From 8c08f3a63b4f0f766501b8dead917729fed5f82e Mon Sep 17 00:00:00 2001 From: Rembrandt Kuipers <50174308+RembrandtK@users.noreply.github.com> Date: Sun, 16 Nov 2025 20:45:52 +0000 Subject: [PATCH 1/9] feat: baseline issuance package Cherry-picked from ec0c9841 (issuance-baseline-2/3) Rebased onto main with regenerated lockfile --- .../contracts/rewards/RewardsManager.sol | 144 +++- .../rewards/RewardsManagerStorage.sol | 15 + .../contracts/contracts/tests/MockERC165.sol | 20 + .../contracts/tests/MockIssuanceAllocator.sol | 76 ++ .../tests/MockRewardsEligibilityOracle.sol | 71 ++ .../contracts/tests/MockSubgraphService.sol | 105 +++ packages/contracts/test/.solcover.js | 2 +- .../unit/rewards/rewards-calculations.test.ts | 389 ++++++++++ .../tests/unit/rewards/rewards-config.test.ts | 158 ++++ .../unit/rewards/rewards-distribution.test.ts | 708 ++++++++++++++++++ .../rewards-eligibility-oracle.test.ts | 496 ++++++++++++ .../unit/rewards/rewards-interface.test.ts | 116 +++ .../rewards-issuance-allocator.test.ts | 416 ++++++++++ .../rewards/rewards-subgraph-service.test.ts | 468 ++++++++++++ .../contracts/rewards/IRewardsManager.sol | 13 + .../IIssuanceAllocationDistribution.sol | 33 + .../allocate/IIssuanceAllocatorTypes.sol | 18 + .../issuance/allocate/IIssuanceTarget.sol | 27 + .../issuance/common/IPausableControl.sol | 34 + .../eligibility/IRewardsEligibility.sol | 19 + packages/issuance/.markdownlint.json | 3 + packages/issuance/.solcover.js | 15 + packages/issuance/.solhint.json | 3 + packages/issuance/README.md | 62 ++ .../contracts/common/BaseUpgradeable.sol | 159 ++++ packages/issuance/hardhat.base.config.ts | 24 + packages/issuance/hardhat.config.ts | 26 + packages/issuance/hardhat.coverage.config.ts | 22 + packages/issuance/package.json | 79 ++ packages/issuance/prettier.config.cjs | 5 + packages/issuance/test/package.json | 62 ++ packages/issuance/test/prettier.config.cjs | 5 + packages/issuance/test/src/index.ts | 5 + .../common/CommonInterfaceIdStability.test.ts | 27 + .../issuance/test/tests/common/fixtures.ts | 127 ++++ .../test/tests/common/graphTokenHelper.ts | 91 +++ .../test/tests/common/testPatterns.ts | 52 ++ packages/issuance/test/tsconfig.json | 25 + packages/issuance/tsconfig.json | 18 + pnpm-lock.yaml | 287 ++++++- 40 files changed, 4411 insertions(+), 14 deletions(-) create mode 100644 packages/contracts/contracts/tests/MockERC165.sol create mode 100644 packages/contracts/contracts/tests/MockIssuanceAllocator.sol create mode 100644 packages/contracts/contracts/tests/MockRewardsEligibilityOracle.sol create mode 100644 packages/contracts/contracts/tests/MockSubgraphService.sol create mode 100644 packages/contracts/test/tests/unit/rewards/rewards-calculations.test.ts create mode 100644 packages/contracts/test/tests/unit/rewards/rewards-config.test.ts create mode 100644 packages/contracts/test/tests/unit/rewards/rewards-distribution.test.ts create mode 100644 packages/contracts/test/tests/unit/rewards/rewards-eligibility-oracle.test.ts create mode 100644 packages/contracts/test/tests/unit/rewards/rewards-interface.test.ts create mode 100644 packages/contracts/test/tests/unit/rewards/rewards-issuance-allocator.test.ts create mode 100644 packages/contracts/test/tests/unit/rewards/rewards-subgraph-service.test.ts create mode 100644 packages/interfaces/contracts/issuance/allocate/IIssuanceAllocationDistribution.sol create mode 100644 packages/interfaces/contracts/issuance/allocate/IIssuanceAllocatorTypes.sol create mode 100644 packages/interfaces/contracts/issuance/allocate/IIssuanceTarget.sol create mode 100644 packages/interfaces/contracts/issuance/common/IPausableControl.sol create mode 100644 packages/interfaces/contracts/issuance/eligibility/IRewardsEligibility.sol create mode 100644 packages/issuance/.markdownlint.json create mode 100644 packages/issuance/.solcover.js create mode 100644 packages/issuance/.solhint.json create mode 100644 packages/issuance/README.md create mode 100644 packages/issuance/contracts/common/BaseUpgradeable.sol create mode 100644 packages/issuance/hardhat.base.config.ts create mode 100644 packages/issuance/hardhat.config.ts create mode 100644 packages/issuance/hardhat.coverage.config.ts create mode 100644 packages/issuance/package.json create mode 100644 packages/issuance/prettier.config.cjs create mode 100644 packages/issuance/test/package.json create mode 100644 packages/issuance/test/prettier.config.cjs create mode 100644 packages/issuance/test/src/index.ts create mode 100644 packages/issuance/test/tests/common/CommonInterfaceIdStability.test.ts create mode 100644 packages/issuance/test/tests/common/fixtures.ts create mode 100644 packages/issuance/test/tests/common/graphTokenHelper.ts create mode 100644 packages/issuance/test/tests/common/testPatterns.ts create mode 100644 packages/issuance/test/tsconfig.json create mode 100644 packages/issuance/tsconfig.json diff --git a/packages/contracts/contracts/rewards/RewardsManager.sol b/packages/contracts/contracts/rewards/RewardsManager.sol index 767449026..458893308 100644 --- a/packages/contracts/contracts/rewards/RewardsManager.sol +++ b/packages/contracts/contracts/rewards/RewardsManager.sol @@ -7,15 +7,19 @@ pragma abicoder v2; // solhint-disable gas-increment-by-one, gas-indexed-events, gas-small-strings, gas-strict-inequalities import { SafeMath } from "@openzeppelin/contracts/math/SafeMath.sol"; +import { IERC165 } from "@openzeppelin/contracts/introspection/IERC165.sol"; import { GraphUpgradeable } from "../upgrades/GraphUpgradeable.sol"; import { Managed } from "../governance/Managed.sol"; import { MathUtils } from "../staking/libs/MathUtils.sol"; import { IGraphToken } from "@graphprotocol/interfaces/contracts/contracts/token/IGraphToken.sol"; -import { RewardsManagerV5Storage } from "./RewardsManagerStorage.sol"; +import { RewardsManagerV6Storage } from "./RewardsManagerStorage.sol"; import { IRewardsIssuer } from "@graphprotocol/interfaces/contracts/contracts/rewards/IRewardsIssuer.sol"; import { IRewardsManager } from "@graphprotocol/interfaces/contracts/contracts/rewards/IRewardsManager.sol"; +import { IIssuanceAllocationDistribution } from "@graphprotocol/interfaces/contracts/issuance/allocate/IIssuanceAllocationDistribution.sol"; +import { IIssuanceTarget } from "@graphprotocol/interfaces/contracts/issuance/allocate/IIssuanceTarget.sol"; +import { IRewardsEligibility } from "@graphprotocol/interfaces/contracts/issuance/eligibility/IRewardsEligibility.sol"; /** * @title Rewards Manager Contract @@ -27,6 +31,10 @@ import { IRewardsManager } from "@graphprotocol/interfaces/contracts/contracts/r * total rewards for the Subgraph are split up for each Indexer based on much they have Staked on * that Subgraph. * + * @dev If an `issuanceAllocator` is set, it is used to determine the amount of GRT to be issued per block. + * Otherwise, the `issuancePerBlock` variable is used. In relation to the IssuanceAllocator, this contract + * is a self-minting target responsible for directly minting allocated GRT. + * * Note: * The contract provides getter functions to query the state of accrued rewards: * - getAccRewardsPerSignal @@ -37,7 +45,7 @@ import { IRewardsManager } from "@graphprotocol/interfaces/contracts/contracts/r * until the actual takeRewards function is called. * custom:security-contact Please email security+contracts@ thegraph.com (remove space) if you find any bugs. We might have an active bug bounty program. */ -contract RewardsManager is RewardsManagerV5Storage, GraphUpgradeable, IRewardsManager { +contract RewardsManager is RewardsManagerV6Storage, GraphUpgradeable, IERC165, IRewardsManager, IIssuanceTarget { using SafeMath for uint256; /// @dev Fixed point scaling factor used for decimals in reward calculations @@ -61,6 +69,14 @@ contract RewardsManager is RewardsManagerV5Storage, GraphUpgradeable, IRewardsMa */ event RewardsDenied(address indexed indexer, address indexed allocationID); + /** + * @notice Emitted when rewards are denied to an indexer due to eligibility + * @param indexer Address of the indexer being denied rewards + * @param allocationID Address of the allocation being denied rewards + * @param amount Amount of rewards that would have been assigned + */ + event RewardsDeniedDueToEligibility(address indexed indexer, address indexed allocationID, uint256 amount); + /** * @notice Emitted when a subgraph is denied for claiming rewards * @param subgraphDeploymentID Subgraph deployment ID being denied @@ -75,6 +91,23 @@ contract RewardsManager is RewardsManagerV5Storage, GraphUpgradeable, IRewardsMa */ event SubgraphServiceSet(address indexed oldSubgraphService, address indexed newSubgraphService); + /** + * @notice Emitted when the issuance allocator is set + * @param oldIssuanceAllocator Previous issuance allocator address + * @param newIssuanceAllocator New issuance allocator address + */ + event IssuanceAllocatorSet(address indexed oldIssuanceAllocator, address indexed newIssuanceAllocator); + + /** + * @notice Emitted when the rewards eligibility oracle contract is set + * @param oldRewardsEligibilityOracle Previous rewards eligibility oracle address + * @param newRewardsEligibilityOracle New rewards eligibility oracle address + */ + event RewardsEligibilityOracleSet( + address indexed oldRewardsEligibilityOracle, + address indexed newRewardsEligibilityOracle + ); + // -- Modifiers -- /** @@ -93,12 +126,27 @@ contract RewardsManager is RewardsManagerV5Storage, GraphUpgradeable, IRewardsMa Managed._initialize(_controller); } + /** + * @inheritdoc IERC165 + * @dev Implements ERC165 interface detection + * Returns true if this contract implements the interface defined by interfaceId. + * See: https://eips.ethereum.org/EIPS/eip-165 + */ + function supportsInterface(bytes4 interfaceId) public view virtual override returns (bool) { + return + interfaceId == type(IERC165).interfaceId || + interfaceId == type(IIssuanceTarget).interfaceId || + interfaceId == type(IRewardsManager).interfaceId; + } + // -- Config -- /** * @inheritdoc IRewardsManager + * @dev When an IssuanceAllocator is set, the effective issuance will be determined by the allocator, + * but this local value can still be updated for cases when the allocator is later removed. * - * @dev The issuance is defined as a fixed amount of rewards per block in GRT. + * The issuance is defined as a fixed amount of rewards per block in GRT. * Whenever this function is called in layer 2, the updateL2MintAllowance function * _must_ be called on the L1GraphTokenGateway in L1, to ensure the bridge can mint the * right amount of tokens. @@ -152,6 +200,70 @@ contract RewardsManager is RewardsManagerV5Storage, GraphUpgradeable, IRewardsMa emit SubgraphServiceSet(oldSubgraphService, _subgraphService); } + /** + * @inheritdoc IIssuanceTarget + * @dev This function facilitates upgrades by providing a standard way for targets + * to change their allocator. Only the governor can call this function. + * Note that the IssuanceAllocator can be set to the zero address to disable use of an allocator, and + * use the local `issuancePerBlock` variable instead to control issuance. + */ + function setIssuanceAllocator(address newIssuanceAllocator) external override onlyGovernor { + if (address(issuanceAllocator) != newIssuanceAllocator) { + // Update rewards calculation before changing the issuance allocator + updateAccRewardsPerSignal(); + + // Check that the contract supports the IIssuanceAllocationDistribution interface + // Allow zero address to disable the allocator + if (newIssuanceAllocator != address(0)) { + require( + IERC165(newIssuanceAllocator).supportsInterface(type(IIssuanceAllocationDistribution).interfaceId), + "Contract does not support IIssuanceAllocationDistribution interface" + ); + } + + address oldIssuanceAllocator = address(issuanceAllocator); + issuanceAllocator = IIssuanceAllocationDistribution(newIssuanceAllocator); + emit IssuanceAllocatorSet(oldIssuanceAllocator, newIssuanceAllocator); + } + } + + /** + * @inheritdoc IIssuanceTarget + * @dev Ensures that all reward calculations are up-to-date with the current block + * before any allocation changes take effect. + * + * This function can be called by anyone to update the rewards calculation state. + * The IssuanceAllocator calls this function before changing a target's allocation to ensure + * all issuance is properly accounted for with the current issuance rate before applying an + * issuance allocation change. + */ + function beforeIssuanceAllocationChange() external override { + // Update rewards calculation with the current issuance rate + updateAccRewardsPerSignal(); + } + + /** + * @inheritdoc IRewardsManager + * @dev Note that the rewards eligibility oracle can be set to the zero address to disable use of an oracle, in + * which case no indexers will be denied rewards due to eligibility. + */ + function setRewardsEligibilityOracle(address newRewardsEligibilityOracle) external override onlyGovernor { + if (address(rewardsEligibilityOracle) != newRewardsEligibilityOracle) { + // Check that the contract supports the IRewardsEligibility interface + // Allow zero address to disable the oracle + if (newRewardsEligibilityOracle != address(0)) { + require( + IERC165(newRewardsEligibilityOracle).supportsInterface(type(IRewardsEligibility).interfaceId), + "Contract does not support IRewardsEligibility interface" + ); + } + + address oldRewardsEligibilityOracle = address(rewardsEligibilityOracle); + rewardsEligibilityOracle = IRewardsEligibility(newRewardsEligibilityOracle); + emit RewardsEligibilityOracleSet(oldRewardsEligibilityOracle, newRewardsEligibilityOracle); + } + } + // -- Denylist -- /** @@ -180,6 +292,17 @@ contract RewardsManager is RewardsManagerV5Storage, GraphUpgradeable, IRewardsMa // -- Getters -- + /** + * @inheritdoc IRewardsManager + * @dev Gets the effective issuance per block, taking into account the IssuanceAllocator if set + */ + function getRewardsIssuancePerBlock() public view override returns (uint256) { + if (address(issuanceAllocator) != address(0)) { + return issuanceAllocator.getTargetIssuancePerBlock(address(this)).selfIssuancePerBlock; + } + return issuancePerBlock; + } + /** * @inheritdoc IRewardsManager * @dev Linear formula: `x = r * t` @@ -197,8 +320,10 @@ contract RewardsManager is RewardsManagerV5Storage, GraphUpgradeable, IRewardsMa if (t == 0) { return 0; } - // ...or if issuance is zero - if (issuancePerBlock == 0) { + + uint256 rewardsIssuancePerBlock = getRewardsIssuancePerBlock(); + + if (rewardsIssuancePerBlock == 0) { return 0; } @@ -209,7 +334,7 @@ contract RewardsManager is RewardsManagerV5Storage, GraphUpgradeable, IRewardsMa return 0; } - uint256 x = issuancePerBlock.mul(t); + uint256 x = rewardsIssuancePerBlock.mul(t); // Get the new issuance per signalled token // We multiply the decimals to keep the precision as fixed-point number @@ -405,6 +530,13 @@ contract RewardsManager is RewardsManagerV5Storage, GraphUpgradeable, IRewardsMa rewards = accRewardsPending.add( _calcRewards(tokens, accRewardsPerAllocatedToken, updatedAccRewardsPerAllocatedToken) ); + + // Do not reward if indexer is not eligible based on rewards eligibility + if (address(rewardsEligibilityOracle) != address(0) && !rewardsEligibilityOracle.isEligible(indexer)) { + emit RewardsDeniedDueToEligibility(indexer, _allocationID, rewards); + return 0; + } + if (rewards > 0) { // Mint directly to rewards issuer for the reward amount // The rewards issuer contract will do bookkeeping of the reward and diff --git a/packages/contracts/contracts/rewards/RewardsManagerStorage.sol b/packages/contracts/contracts/rewards/RewardsManagerStorage.sol index d78eb81ef..63897f431 100644 --- a/packages/contracts/contracts/rewards/RewardsManagerStorage.sol +++ b/packages/contracts/contracts/rewards/RewardsManagerStorage.sol @@ -7,6 +7,8 @@ pragma solidity ^0.7.6 || 0.8.27; +import { IIssuanceAllocationDistribution } from "@graphprotocol/interfaces/contracts/issuance/allocate/IIssuanceAllocationDistribution.sol"; +import { IRewardsEligibility } from "@graphprotocol/interfaces/contracts/issuance/eligibility/IRewardsEligibility.sol"; import { IRewardsIssuer } from "@graphprotocol/interfaces/contracts/contracts/rewards/IRewardsIssuer.sol"; import { IRewardsManager } from "@graphprotocol/interfaces/contracts/contracts/rewards/IRewardsManager.sol"; import { Managed } from "../governance/Managed.sol"; @@ -76,3 +78,16 @@ contract RewardsManagerV5Storage is RewardsManagerV4Storage { /// @notice Address of the subgraph service IRewardsIssuer public subgraphService; } + +/** + * @title RewardsManagerV6Storage + * @author Edge & Node + * @notice Storage layout for RewardsManager V6 + * Includes support for Rewards Eligibility Oracle and Issuance Allocator. + */ +contract RewardsManagerV6Storage is RewardsManagerV5Storage { + /// @notice Address of the rewards eligibility oracle contract + IRewardsEligibility public rewardsEligibilityOracle; + /// @notice Address of the issuance allocator + IIssuanceAllocationDistribution public issuanceAllocator; +} diff --git a/packages/contracts/contracts/tests/MockERC165.sol b/packages/contracts/contracts/tests/MockERC165.sol new file mode 100644 index 000000000..056493fd3 --- /dev/null +++ b/packages/contracts/contracts/tests/MockERC165.sol @@ -0,0 +1,20 @@ +// SPDX-License-Identifier: GPL-2.0-or-later + +pragma solidity 0.7.6; + +import { IERC165 } from "@openzeppelin/contracts/introspection/IERC165.sol"; + +/** + * @title MockERC165 + * @author Edge & Node + * @dev Minimal implementation of IERC165 for testing + * @notice Used to test interface validation - supports only ERC165, not specific interfaces + */ +contract MockERC165 is IERC165 { + /** + * @inheritdoc IERC165 + */ + function supportsInterface(bytes4 interfaceId) public pure override returns (bool) { + return interfaceId == type(IERC165).interfaceId; + } +} diff --git a/packages/contracts/contracts/tests/MockIssuanceAllocator.sol b/packages/contracts/contracts/tests/MockIssuanceAllocator.sol new file mode 100644 index 000000000..ba1f8f2bd --- /dev/null +++ b/packages/contracts/contracts/tests/MockIssuanceAllocator.sol @@ -0,0 +1,76 @@ +// SPDX-License-Identifier: GPL-2.0-or-later + +// solhint-disable gas-increment-by-one, gas-indexed-events, named-parameters-mapping, use-natspec + +pragma solidity 0.7.6; +pragma abicoder v2; + +import { IERC165 } from "@openzeppelin/contracts/introspection/IERC165.sol"; +import { TargetIssuancePerBlock } from "@graphprotocol/interfaces/contracts/issuance/allocate/IIssuanceAllocatorTypes.sol"; +import { IIssuanceAllocationDistribution } from "@graphprotocol/interfaces/contracts/issuance/allocate/IIssuanceAllocationDistribution.sol"; +import { IIssuanceTarget } from "@graphprotocol/interfaces/contracts/issuance/allocate/IIssuanceTarget.sol"; + +/** + * @title MockIssuanceAllocator + * @dev A simple mock contract for the IssuanceAllocator interfaces used by RewardsManager. + */ +contract MockIssuanceAllocator is IERC165, IIssuanceAllocationDistribution { + /// @dev Mapping to store TargetIssuancePerBlock for each target + mapping(address => TargetIssuancePerBlock) private _targetIssuance; + + /** + * @dev Call beforeIssuanceAllocationChange on a target + * @param target The target contract address + */ + function callBeforeIssuanceAllocationChange(address target) external { + IIssuanceTarget(target).beforeIssuanceAllocationChange(); + } + + /** + * @inheritdoc IIssuanceAllocationDistribution + */ + function getTargetIssuancePerBlock(address target) external view override returns (TargetIssuancePerBlock memory) { + return _targetIssuance[target]; + } + + /** + * @inheritdoc IIssuanceAllocationDistribution + * @dev Mock always returns current block number + */ + function distributeIssuance() external view override returns (uint256) { + return block.number; + } + + /** + * @dev Set target issuance directly for testing + * @param target The target contract address + * @param allocatorIssuance The allocator issuance per block + * @param selfIssuance The self issuance per block + * @param callBefore Whether to call beforeIssuanceAllocationChange on the target + */ + function setTargetAllocation( + address target, + uint256 allocatorIssuance, + uint256 selfIssuance, + bool callBefore + ) external { + if (callBefore) { + IIssuanceTarget(target).beforeIssuanceAllocationChange(); + } + _targetIssuance[target] = TargetIssuancePerBlock({ + allocatorIssuancePerBlock: allocatorIssuance, + allocatorIssuanceBlockAppliedTo: block.number, + selfIssuancePerBlock: selfIssuance, + selfIssuanceBlockAppliedTo: block.number + }); + } + + /** + * @inheritdoc IERC165 + */ + function supportsInterface(bytes4 interfaceId) public pure override returns (bool) { + return + interfaceId == type(IIssuanceAllocationDistribution).interfaceId || + interfaceId == type(IERC165).interfaceId; + } +} diff --git a/packages/contracts/contracts/tests/MockRewardsEligibilityOracle.sol b/packages/contracts/contracts/tests/MockRewardsEligibilityOracle.sol new file mode 100644 index 000000000..6b13d4d76 --- /dev/null +++ b/packages/contracts/contracts/tests/MockRewardsEligibilityOracle.sol @@ -0,0 +1,71 @@ +// SPDX-License-Identifier: GPL-2.0-or-later + +// solhint-disable named-parameters-mapping + +pragma solidity 0.7.6; + +import { IRewardsEligibility } from "@graphprotocol/interfaces/contracts/issuance/eligibility/IRewardsEligibility.sol"; +import { IERC165 } from "@openzeppelin/contracts/introspection/IERC165.sol"; + +/** + * @title MockRewardsEligibilityOracle + * @author Edge & Node + * @notice A simple mock contract for the RewardsEligibilityOracle interface + * @dev A simple mock contract for the RewardsEligibilityOracle interface + */ +contract MockRewardsEligibilityOracle is IRewardsEligibility, IERC165 { + /// @dev Mapping to store eligibility status for each indexer + mapping(address => bool) private eligible; + + /// @dev Mapping to track which indexers have been explicitly set + mapping(address => bool) private isSet; + + /// @dev Default response for indexers not explicitly set + bool private defaultResponse; + + /** + * @notice Constructor + * @param newDefaultResponse Default response for isEligible + */ + constructor(bool newDefaultResponse) { + defaultResponse = newDefaultResponse; + } + + /** + * @notice Set whether a specific indexer is eligible + * @param indexer The indexer address + * @param eligibility Whether the indexer is eligible + */ + function setIndexerEligible(address indexer, bool eligibility) external { + eligible[indexer] = eligibility; + isSet[indexer] = true; + } + + /** + * @notice Set the default response for indexers not explicitly set + * @param newDefaultResponse The default response + */ + function setDefaultResponse(bool newDefaultResponse) external { + defaultResponse = newDefaultResponse; + } + + /** + * @inheritdoc IRewardsEligibility + */ + function isEligible(address indexer) external view override returns (bool) { + // If the indexer has been explicitly set, return that value + if (isSet[indexer]) { + return eligible[indexer]; + } + + // Otherwise return the default response + return defaultResponse; + } + + /** + * @inheritdoc IERC165 + */ + function supportsInterface(bytes4 interfaceId) public pure override returns (bool) { + return interfaceId == type(IRewardsEligibility).interfaceId || interfaceId == type(IERC165).interfaceId; + } +} diff --git a/packages/contracts/contracts/tests/MockSubgraphService.sol b/packages/contracts/contracts/tests/MockSubgraphService.sol new file mode 100644 index 000000000..703edd010 --- /dev/null +++ b/packages/contracts/contracts/tests/MockSubgraphService.sol @@ -0,0 +1,105 @@ +// SPDX-License-Identifier: GPL-2.0-or-later + +// solhint-disable named-parameters-mapping + +pragma solidity 0.7.6; + +import { IRewardsIssuer } from "@graphprotocol/interfaces/contracts/contracts/rewards/IRewardsIssuer.sol"; + +/** + * @title MockSubgraphService + * @author Edge & Node + * @notice A mock contract for testing SubgraphService as a rewards issuer + * @dev Implements IRewardsIssuer interface to simulate SubgraphService behavior in tests + */ +contract MockSubgraphService is IRewardsIssuer { + /// @dev Struct to store allocation data + struct Allocation { + bool isActive; + address indexer; + bytes32 subgraphDeploymentId; + uint256 tokens; + uint256 accRewardsPerAllocatedToken; + uint256 accRewardsPending; + } + + /// @dev Mapping of allocation ID to allocation data + mapping(address => Allocation) private allocations; + + /// @dev Mapping of subgraph deployment ID to total allocated tokens + mapping(bytes32 => uint256) private subgraphAllocatedTokens; + + /** + * @notice Set allocation data for testing + * @param allocationId The allocation ID + * @param isActive Whether the allocation is active + * @param indexer The indexer address + * @param subgraphDeploymentId The subgraph deployment ID + * @param tokens Amount of allocated tokens + * @param accRewardsPerAllocatedToken Rewards snapshot + * @param accRewardsPending Accumulated rewards pending + */ + function setAllocation( + address allocationId, + bool isActive, + address indexer, + bytes32 subgraphDeploymentId, + uint256 tokens, + uint256 accRewardsPerAllocatedToken, + uint256 accRewardsPending + ) external { + allocations[allocationId] = Allocation({ + isActive: isActive, + indexer: indexer, + subgraphDeploymentId: subgraphDeploymentId, + tokens: tokens, + accRewardsPerAllocatedToken: accRewardsPerAllocatedToken, + accRewardsPending: accRewardsPending + }); + } + + /** + * @notice Set total allocated tokens for a subgraph + * @param subgraphDeploymentId The subgraph deployment ID + * @param tokens Total tokens allocated + */ + function setSubgraphAllocatedTokens(bytes32 subgraphDeploymentId, uint256 tokens) external { + subgraphAllocatedTokens[subgraphDeploymentId] = tokens; + } + + /** + * @inheritdoc IRewardsIssuer + */ + function getAllocationData( + address allocationId + ) + external + view + override + returns ( + bool isActive, + address indexer, + bytes32 subgraphDeploymentId, + uint256 tokens, + uint256 accRewardsPerAllocatedToken, + uint256 accRewardsPending + ) + { + Allocation memory allocation = allocations[allocationId]; + return ( + allocation.isActive, + allocation.indexer, + allocation.subgraphDeploymentId, + allocation.tokens, + allocation.accRewardsPerAllocatedToken, + allocation.accRewardsPending + ); + } + + /** + * @inheritdoc IRewardsIssuer + */ + function getSubgraphAllocatedTokens(bytes32 subgraphDeploymentId) external view override returns (uint256) { + return subgraphAllocatedTokens[subgraphDeploymentId]; + } +} diff --git a/packages/contracts/test/.solcover.js b/packages/contracts/test/.solcover.js index 7181b78fa..125581cd1 100644 --- a/packages/contracts/test/.solcover.js +++ b/packages/contracts/test/.solcover.js @@ -1,4 +1,4 @@ -const skipFiles = ['bancor', 'ens', 'erc1056', 'arbitrum', 'tests/arbitrum'] +const skipFiles = ['bancor', 'ens', 'erc1056', 'arbitrum', 'tests', '*Mock.sol'] module.exports = { providerOptions: { diff --git a/packages/contracts/test/tests/unit/rewards/rewards-calculations.test.ts b/packages/contracts/test/tests/unit/rewards/rewards-calculations.test.ts new file mode 100644 index 000000000..b100905b0 --- /dev/null +++ b/packages/contracts/test/tests/unit/rewards/rewards-calculations.test.ts @@ -0,0 +1,389 @@ +import { Curation } from '@graphprotocol/contracts' +import { EpochManager } from '@graphprotocol/contracts' +import { GraphToken } from '@graphprotocol/contracts' +import { IStaking } from '@graphprotocol/contracts' +import { RewardsManager } from '@graphprotocol/contracts' +import { + deriveChannelKey, + formatGRT, + GraphNetworkContracts, + helpers, + randomHexBytes, + toBN, + toGRT, +} from '@graphprotocol/sdk' +import type { SignerWithAddress } from '@nomiclabs/hardhat-ethers/signers' +import { BigNumber as BN } from 'bignumber.js' +import { expect } from 'chai' +import { BigNumber, constants } from 'ethers' +import hre from 'hardhat' + +import { NetworkFixture } from '../lib/fixtures' + +const { HashZero, WeiPerEther } = constants + +const toRound = (n: BigNumber) => formatGRT(n.add(toGRT('0.5'))).split('.')[0] + +describe('Rewards - Calculations', () => { + const graph = hre.graph() + let governor: SignerWithAddress + let curator1: SignerWithAddress + let curator2: SignerWithAddress + let indexer1: SignerWithAddress + let indexer2: SignerWithAddress + let assetHolder: SignerWithAddress + + let fixture: NetworkFixture + + let contracts: GraphNetworkContracts + let grt: GraphToken + let curation: Curation + let epochManager: EpochManager + let staking: IStaking + let rewardsManager: RewardsManager + + // Derive some channel keys for each indexer used to sign attestations + const channelKey1 = deriveChannelKey() + + const subgraphDeploymentID1 = randomHexBytes() + const subgraphDeploymentID2 = randomHexBytes() + + const allocationID1 = channelKey1.address + + const metadata = HashZero + + const ISSUANCE_RATE_PERIODS = 4 // blocks required to issue 800 GRT rewards + const ISSUANCE_PER_BLOCK = toBN('200000000000000000000') // 200 GRT every block + + // Core formula that gets accumulated rewards per signal for a period of time + const getRewardsPerSignal = (k: BN, t: BN, s: BN): string => { + if (s.eq(0)) { + return '0' + } + return k.times(t).div(s).toPrecision(18).toString() + } + + // Tracks the accumulated rewards as totalSignalled or supply changes across snapshots + class RewardsTracker { + totalSignalled = BigNumber.from(0) + lastUpdatedBlock = 0 + accumulated = BigNumber.from(0) + + static async create() { + const tracker = new RewardsTracker() + await tracker.snapshot() + return tracker + } + + async snapshot() { + this.accumulated = this.accumulated.add(await this.accrued()) + this.totalSignalled = await grt.balanceOf(curation.address) + this.lastUpdatedBlock = await helpers.latestBlock() + return this + } + + async elapsedBlocks() { + const currentBlock = await helpers.latestBlock() + return currentBlock - this.lastUpdatedBlock + } + + async accrued() { + const nBlocks = await this.elapsedBlocks() + return this.accruedByElapsed(nBlocks) + } + + accruedByElapsed(nBlocks: BigNumber | number) { + const n = getRewardsPerSignal( + new BN(ISSUANCE_PER_BLOCK.toString()), + new BN(nBlocks.toString()), + new BN(this.totalSignalled.toString()), + ) + return toGRT(n) + } + } + + // Test accumulated rewards per signal + const shouldGetNewRewardsPerSignal = async (nBlocks = ISSUANCE_RATE_PERIODS) => { + // -- t0 -- + const tracker = await RewardsTracker.create() + + // Jump + await helpers.mine(nBlocks) + + // -- t1 -- + + // Contract calculation + const contractAccrued = await rewardsManager.getNewRewardsPerSignal() + // Local calculation + const expectedAccrued = await tracker.accrued() + + // Check + expect(toRound(expectedAccrued)).eq(toRound(contractAccrued)) + return expectedAccrued + } + + before(async function () { + const testAccounts = await graph.getTestAccounts() + ;[indexer1, indexer2, curator1, curator2, assetHolder] = testAccounts + ;({ governor } = await graph.getNamedAccounts()) + + fixture = new NetworkFixture(graph.provider) + contracts = await fixture.load(governor) + grt = contracts.GraphToken as GraphToken + curation = contracts.Curation as Curation + epochManager = contracts.EpochManager + staking = contracts.Staking as IStaking + rewardsManager = contracts.RewardsManager + + // 200 GRT per block + await rewardsManager.connect(governor).setIssuancePerBlock(ISSUANCE_PER_BLOCK) + + // Distribute test funds + for (const wallet of [indexer1, indexer2, curator1, curator2, assetHolder]) { + await grt.connect(governor).mint(wallet.address, toGRT('1000000')) + await grt.connect(wallet).approve(staking.address, toGRT('1000000')) + await grt.connect(wallet).approve(curation.address, toGRT('1000000')) + } + }) + + beforeEach(async function () { + await fixture.setUp() + }) + + afterEach(async function () { + await fixture.tearDown() + }) + + context('issuing rewards', function () { + beforeEach(async function () { + // 5% minute rate (4 blocks) + await rewardsManager.connect(governor).setIssuancePerBlock(ISSUANCE_PER_BLOCK) + }) + + describe('getNewRewardsPerSignal', function () { + it('accrued per signal when no tokens signalled', async function () { + // When there is no tokens signalled no rewards are accrued + await helpers.mineEpoch(epochManager) + const accrued = await rewardsManager.getNewRewardsPerSignal() + expect(accrued).eq(0) + }) + + it('accrued per signal when tokens signalled', async function () { + // Update total signalled + const tokensToSignal = toGRT('1000') + await curation.connect(curator1).mint(subgraphDeploymentID1, tokensToSignal, 0) + + // Check + await shouldGetNewRewardsPerSignal() + }) + + it('accrued per signal when signalled tokens w/ many subgraphs', async function () { + // Update total signalled + await curation.connect(curator1).mint(subgraphDeploymentID1, toGRT('1000'), 0) + + // Check + await shouldGetNewRewardsPerSignal() + + // Update total signalled + await curation.connect(curator2).mint(subgraphDeploymentID2, toGRT('250'), 0) + + // Check + await shouldGetNewRewardsPerSignal() + }) + }) + + describe('updateAccRewardsPerSignal', function () { + it('update the accumulated rewards per signal state', async function () { + // Update total signalled + await curation.connect(curator1).mint(subgraphDeploymentID1, toGRT('1000'), 0) + // Snapshot + const tracker = await RewardsTracker.create() + + // Update + await rewardsManager.connect(governor).updateAccRewardsPerSignal() + const contractAccrued = await rewardsManager.accRewardsPerSignal() + + // Check + const expectedAccrued = await tracker.accrued() + expect(toRound(expectedAccrued)).eq(toRound(contractAccrued)) + }) + + it('update the accumulated rewards per signal state after many blocks', async function () { + // Update total signalled + await curation.connect(curator1).mint(subgraphDeploymentID1, toGRT('1000'), 0) + // Snapshot + const tracker = await RewardsTracker.create() + + // Jump + await helpers.mine(ISSUANCE_RATE_PERIODS) + + // Update + await rewardsManager.connect(governor).updateAccRewardsPerSignal() + const contractAccrued = await rewardsManager.accRewardsPerSignal() + + // Check + const expectedAccrued = await tracker.accrued() + expect(toRound(expectedAccrued)).eq(toRound(contractAccrued)) + }) + }) + + describe('getAccRewardsForSubgraph', function () { + it('accrued for each subgraph', async function () { + // Curator1 - Update total signalled + const signalled1 = toGRT('1500') + await curation.connect(curator1).mint(subgraphDeploymentID1, signalled1, 0) + const tracker1 = await RewardsTracker.create() + + // Curator2 - Update total signalled + const signalled2 = toGRT('500') + await curation.connect(curator2).mint(subgraphDeploymentID2, signalled2, 0) + + // Snapshot + const tracker2 = await RewardsTracker.create() + await tracker1.snapshot() + + // Jump + await helpers.mine(ISSUANCE_RATE_PERIODS) + + // Snapshot + await tracker1.snapshot() + await tracker2.snapshot() + + // Calculate rewards + const rewardsPerSignal1 = tracker1.accumulated + const rewardsPerSignal2 = tracker2.accumulated + const expectedRewardsSG1 = rewardsPerSignal1.mul(signalled1).div(WeiPerEther) + const expectedRewardsSG2 = rewardsPerSignal2.mul(signalled2).div(WeiPerEther) + + // Get rewards from contract + const contractRewardsSG1 = await rewardsManager.getAccRewardsForSubgraph(subgraphDeploymentID1) + const contractRewardsSG2 = await rewardsManager.getAccRewardsForSubgraph(subgraphDeploymentID2) + + // Check + expect(toRound(expectedRewardsSG1)).eq(toRound(contractRewardsSG1)) + expect(toRound(expectedRewardsSG2)).eq(toRound(contractRewardsSG2)) + }) + + it('should return zero rewards when subgraph signal is below minimum threshold', async function () { + // Set a high minimum signal threshold + const highMinimumSignal = toGRT('2000') + await rewardsManager.connect(governor).setMinimumSubgraphSignal(highMinimumSignal) + + // Signal less than the minimum threshold + const lowSignal = toGRT('1000') + await curation.connect(curator1).mint(subgraphDeploymentID1, lowSignal, 0) + + // Jump some blocks to potentially accrue rewards + await helpers.mine(ISSUANCE_RATE_PERIODS) + + // Check that no rewards are accrued due to minimum signal threshold + const contractRewards = await rewardsManager.getAccRewardsForSubgraph(subgraphDeploymentID1) + expect(contractRewards).eq(0) + }) + }) + + describe('onSubgraphSignalUpdate', function () { + it('update the accumulated rewards for subgraph state', async function () { + // Update total signalled + const signalled1 = toGRT('1500') + await curation.connect(curator1).mint(subgraphDeploymentID1, signalled1, 0) + // Snapshot + const tracker1 = await RewardsTracker.create() + + // Jump + await helpers.mine(ISSUANCE_RATE_PERIODS) + + // Update + await rewardsManager.connect(governor).onSubgraphSignalUpdate(subgraphDeploymentID1) + + // Check + const contractRewardsSG1 = (await rewardsManager.subgraphs(subgraphDeploymentID1)).accRewardsForSubgraph + const rewardsPerSignal1 = await tracker1.accrued() + const expectedRewardsSG1 = rewardsPerSignal1.mul(signalled1).div(WeiPerEther) + expect(toRound(expectedRewardsSG1)).eq(toRound(contractRewardsSG1)) + + const contractAccrued = await rewardsManager.accRewardsPerSignal() + const expectedAccrued = await tracker1.accrued() + expect(toRound(expectedAccrued)).eq(toRound(contractAccrued)) + + const contractBlockUpdated = await rewardsManager.accRewardsPerSignalLastBlockUpdated() + const expectedBlockUpdated = await helpers.latestBlock() + expect(expectedBlockUpdated).eq(contractBlockUpdated) + }) + }) + + describe('getAccRewardsPerAllocatedToken', function () { + it('accrued per allocated token', async function () { + // Update total signalled + const signalled1 = toGRT('1500') + await curation.connect(curator1).mint(subgraphDeploymentID1, signalled1, 0) + + // Allocate + const tokensToAllocate = toGRT('12500') + await staking.connect(indexer1).stake(tokensToAllocate) + await staking + .connect(indexer1) + .allocateFrom( + indexer1.address, + subgraphDeploymentID1, + tokensToAllocate, + allocationID1, + metadata, + await channelKey1.generateProof(indexer1.address), + ) + + // Jump + await helpers.mine(ISSUANCE_RATE_PERIODS) + + // Check + const sg1 = await rewardsManager.subgraphs(subgraphDeploymentID1) + // We trust this function because it was individually tested in previous test + const accRewardsForSubgraphSG1 = await rewardsManager.getAccRewardsForSubgraph(subgraphDeploymentID1) + const accruedRewardsSG1 = accRewardsForSubgraphSG1.sub(sg1.accRewardsForSubgraphSnapshot) + const expectedRewardsAT1 = accruedRewardsSG1.mul(WeiPerEther).div(tokensToAllocate) + const contractRewardsAT1 = (await rewardsManager.getAccRewardsPerAllocatedToken(subgraphDeploymentID1))[0] + expect(expectedRewardsAT1).eq(contractRewardsAT1) + }) + }) + + describe('onSubgraphAllocationUpdate', function () { + it('update the accumulated rewards for allocated tokens state', async function () { + // Update total signalled + const signalled1 = toGRT('1500') + await curation.connect(curator1).mint(subgraphDeploymentID1, signalled1, 0) + + // Allocate + const tokensToAllocate = toGRT('12500') + await staking.connect(indexer1).stake(tokensToAllocate) + await staking + .connect(indexer1) + .allocateFrom( + indexer1.address, + subgraphDeploymentID1, + tokensToAllocate, + allocationID1, + metadata, + await channelKey1.generateProof(indexer1.address), + ) + + // Jump + await helpers.mine(ISSUANCE_RATE_PERIODS) + + // Prepare expected results + const expectedSubgraphRewards = toGRT('1400') // 7 blocks since signaling to when we do getAccRewardsForSubgraph + const expectedRewardsAT = toGRT('0.08') // allocated during 5 blocks: 1000 GRT, divided by 12500 allocated tokens + + // Update + await rewardsManager.connect(governor).onSubgraphAllocationUpdate(subgraphDeploymentID1) + + // Check on demand results saved + const subgraph = await rewardsManager.subgraphs(subgraphDeploymentID1) + const contractSubgraphRewards = await rewardsManager.getAccRewardsForSubgraph(subgraphDeploymentID1) + const contractRewardsAT = subgraph.accRewardsPerAllocatedToken + + expect(toRound(expectedSubgraphRewards)).eq(toRound(contractSubgraphRewards)) + expect(toRound(expectedRewardsAT.mul(1000))).eq(toRound(contractRewardsAT.mul(1000))) + }) + }) + }) +}) diff --git a/packages/contracts/test/tests/unit/rewards/rewards-config.test.ts b/packages/contracts/test/tests/unit/rewards/rewards-config.test.ts new file mode 100644 index 000000000..8edcbb113 --- /dev/null +++ b/packages/contracts/test/tests/unit/rewards/rewards-config.test.ts @@ -0,0 +1,158 @@ +import { Curation } from '@graphprotocol/contracts' +import { GraphToken } from '@graphprotocol/contracts' +import { IStaking } from '@graphprotocol/contracts' +import { RewardsManager } from '@graphprotocol/contracts' +import { GraphNetworkContracts, helpers, randomHexBytes, toBN, toGRT } from '@graphprotocol/sdk' +import type { SignerWithAddress } from '@nomiclabs/hardhat-ethers/signers' +import { expect } from 'chai' +import hre from 'hardhat' + +import { NetworkFixture } from '../lib/fixtures' + +const ISSUANCE_PER_BLOCK = toBN('200000000000000000000') // 200 GRT every block + +describe('Rewards - Configuration', () => { + const graph = hre.graph() + let governor: SignerWithAddress + let indexer1: SignerWithAddress + let indexer2: SignerWithAddress + let curator1: SignerWithAddress + let curator2: SignerWithAddress + let oracle: SignerWithAddress + let assetHolder: SignerWithAddress + + let fixture: NetworkFixture + + let contracts: GraphNetworkContracts + let grt: GraphToken + let curation: Curation + let staking: IStaking + let rewardsManager: RewardsManager + + const subgraphDeploymentID1 = randomHexBytes() + + before(async function () { + const testAccounts = await graph.getTestAccounts() + ;[indexer1, indexer2, curator1, curator2, oracle, assetHolder] = testAccounts + ;({ governor } = await graph.getNamedAccounts()) + + fixture = new NetworkFixture(graph.provider) + contracts = await fixture.load(governor) + grt = contracts.GraphToken as GraphToken + curation = contracts.Curation as Curation + staking = contracts.Staking as IStaking + rewardsManager = contracts.RewardsManager + + // 200 GRT per block + await rewardsManager.connect(governor).setIssuancePerBlock(ISSUANCE_PER_BLOCK) + + // Distribute test funds + for (const wallet of [indexer1, indexer2, curator1, curator2, assetHolder]) { + await grt.connect(governor).mint(wallet.address, toGRT('1000000')) + await grt.connect(wallet).approve(staking.address, toGRT('1000000')) + await grt.connect(wallet).approve(curation.address, toGRT('1000000')) + } + }) + + beforeEach(async function () { + await fixture.setUp() + }) + + afterEach(async function () { + await fixture.tearDown() + }) + + describe('configuration', function () { + describe('initialize', function () { + it('should revert when called on implementation contract', async function () { + // Try to call initialize on the implementation contract (should revert with onlyImpl) + const tx = rewardsManager.connect(governor).initialize(contracts.Controller.address) + await expect(tx).revertedWith('Only implementation') + }) + }) + + describe('issuance per block update', function () { + it('should reject set issuance per block if unauthorized', async function () { + const tx = rewardsManager.connect(indexer1).setIssuancePerBlock(toGRT('1.025')) + await expect(tx).revertedWith('Only Controller governor') + }) + + it('should set issuance rate to minimum allowed (0)', async function () { + const newIssuancePerBlock = toGRT('0') + await rewardsManager.connect(governor).setIssuancePerBlock(newIssuancePerBlock) + expect(await rewardsManager.issuancePerBlock()).eq(newIssuancePerBlock) + }) + + it('should set issuance rate', async function () { + const newIssuancePerBlock = toGRT('100.025') + await rewardsManager.connect(governor).setIssuancePerBlock(newIssuancePerBlock) + expect(await rewardsManager.issuancePerBlock()).eq(newIssuancePerBlock) + expect(await rewardsManager.accRewardsPerSignalLastBlockUpdated()).eq(await helpers.latestBlock()) + }) + }) + + describe('subgraph availability service', function () { + it('should reject set subgraph oracle if unauthorized', async function () { + const tx = rewardsManager.connect(indexer1).setSubgraphAvailabilityOracle(oracle.address) + await expect(tx).revertedWith('Only Controller governor') + }) + + it('should set subgraph oracle if governor', async function () { + await rewardsManager.connect(governor).setSubgraphAvailabilityOracle(oracle.address) + expect(await rewardsManager.subgraphAvailabilityOracle()).eq(oracle.address) + }) + + it('should reject to deny subgraph if not the oracle', async function () { + const tx = rewardsManager.connect(governor).setDenied(subgraphDeploymentID1, true) + await expect(tx).revertedWith('Caller must be the subgraph availability oracle') + }) + + it('should deny subgraph', async function () { + await rewardsManager.connect(governor).setSubgraphAvailabilityOracle(oracle.address) + + const tx = rewardsManager.connect(oracle).setDenied(subgraphDeploymentID1, true) + const blockNum = await helpers.latestBlock() + await expect(tx) + .emit(rewardsManager, 'RewardsDenylistUpdated') + .withArgs(subgraphDeploymentID1, blockNum + 1) + expect(await rewardsManager.isDenied(subgraphDeploymentID1)).eq(true) + }) + + it('should allow removing subgraph from denylist', async function () { + await rewardsManager.connect(governor).setSubgraphAvailabilityOracle(oracle.address) + + // First deny the subgraph + await rewardsManager.connect(oracle).setDenied(subgraphDeploymentID1, true) + expect(await rewardsManager.isDenied(subgraphDeploymentID1)).eq(true) + + // Then remove from denylist + const tx = rewardsManager.connect(oracle).setDenied(subgraphDeploymentID1, false) + await expect(tx).emit(rewardsManager, 'RewardsDenylistUpdated').withArgs(subgraphDeploymentID1, 0) + expect(await rewardsManager.isDenied(subgraphDeploymentID1)).eq(false) + }) + + it('should reject setMinimumSubgraphSignal if unauthorized', async function () { + const tx = rewardsManager.connect(indexer1).setMinimumSubgraphSignal(toGRT('1000')) + await expect(tx).revertedWith('Not authorized') + }) + + it('should allow setMinimumSubgraphSignal from subgraph availability oracle', async function () { + await rewardsManager.connect(governor).setSubgraphAvailabilityOracle(oracle.address) + + const newMinimumSignal = toGRT('2000') + const tx = rewardsManager.connect(oracle).setMinimumSubgraphSignal(newMinimumSignal) + await expect(tx).emit(rewardsManager, 'ParameterUpdated').withArgs('minimumSubgraphSignal') + + expect(await rewardsManager.minimumSubgraphSignal()).eq(newMinimumSignal) + }) + + it('should allow setMinimumSubgraphSignal from governor', async function () { + const newMinimumSignal = toGRT('3000') + const tx = rewardsManager.connect(governor).setMinimumSubgraphSignal(newMinimumSignal) + await expect(tx).emit(rewardsManager, 'ParameterUpdated').withArgs('minimumSubgraphSignal') + + expect(await rewardsManager.minimumSubgraphSignal()).eq(newMinimumSignal) + }) + }) + }) +}) diff --git a/packages/contracts/test/tests/unit/rewards/rewards-distribution.test.ts b/packages/contracts/test/tests/unit/rewards/rewards-distribution.test.ts new file mode 100644 index 000000000..cb3f46107 --- /dev/null +++ b/packages/contracts/test/tests/unit/rewards/rewards-distribution.test.ts @@ -0,0 +1,708 @@ +import { Curation } from '@graphprotocol/contracts' +import { EpochManager } from '@graphprotocol/contracts' +import { GraphToken } from '@graphprotocol/contracts' +import { IStaking } from '@graphprotocol/contracts' +import { RewardsManager } from '@graphprotocol/contracts' +import { + deriveChannelKey, + formatGRT, + GraphNetworkContracts, + helpers, + randomHexBytes, + toBN, + toGRT, +} from '@graphprotocol/sdk' +import type { SignerWithAddress } from '@nomiclabs/hardhat-ethers/signers' +import { expect } from 'chai' +import { BigNumber, constants } from 'ethers' +import hre from 'hardhat' + +import { NetworkFixture } from '../lib/fixtures' + +const MAX_PPM = 1000000 + +const { HashZero, WeiPerEther } = constants + +const toRound = (n: BigNumber) => formatGRT(n.add(toGRT('0.5'))).split('.')[0] + +describe('Rewards - Distribution', () => { + const graph = hre.graph() + let delegator: SignerWithAddress + let governor: SignerWithAddress + let curator1: SignerWithAddress + let curator2: SignerWithAddress + let indexer1: SignerWithAddress + let assetHolder: SignerWithAddress + + let fixture: NetworkFixture + + let contracts: GraphNetworkContracts + let grt: GraphToken + let curation: Curation + let epochManager: EpochManager + let staking: IStaking + let rewardsManager: RewardsManager + + // Derive some channel keys for each indexer used to sign attestations + const channelKey1 = deriveChannelKey() + const channelKey2 = deriveChannelKey() + const channelKeyNull = deriveChannelKey() + + const subgraphDeploymentID1 = randomHexBytes() + const subgraphDeploymentID2 = randomHexBytes() + + const allocationID1 = channelKey1.address + const allocationID2 = channelKey2.address + const allocationIDNull = channelKeyNull.address + + const metadata = HashZero + + const ISSUANCE_RATE_PERIODS = 4 // blocks required to issue 800 GRT rewards + const ISSUANCE_PER_BLOCK = toBN('200000000000000000000') // 200 GRT every block + + before(async function () { + ;[delegator, curator1, curator2, indexer1, assetHolder] = await graph.getTestAccounts() + ;({ governor } = await graph.getNamedAccounts()) + + fixture = new NetworkFixture(graph.provider) + contracts = await fixture.load(governor) + grt = contracts.GraphToken as GraphToken + curation = contracts.Curation as Curation + epochManager = contracts.EpochManager + staking = contracts.Staking as IStaking + rewardsManager = contracts.RewardsManager + + // 200 GRT per block + await rewardsManager.connect(governor).setIssuancePerBlock(ISSUANCE_PER_BLOCK) + + // Distribute test funds + for (const wallet of [indexer1, curator1, curator2, assetHolder]) { + await grt.connect(governor).mint(wallet.address, toGRT('1000000')) + await grt.connect(wallet).approve(staking.address, toGRT('1000000')) + await grt.connect(wallet).approve(curation.address, toGRT('1000000')) + } + }) + + beforeEach(async function () { + await fixture.setUp() + }) + + afterEach(async function () { + await fixture.tearDown() + }) + + context('issuing rewards', function () { + beforeEach(async function () { + // 5% minute rate (4 blocks) + await rewardsManager.connect(governor).setIssuancePerBlock(ISSUANCE_PER_BLOCK) + }) + + describe('getRewards', function () { + it('calculate rewards using the subgraph signalled + allocated tokens', async function () { + // Update total signalled + const signalled1 = toGRT('1500') + await curation.connect(curator1).mint(subgraphDeploymentID1, signalled1, 0) + + // Allocate + const tokensToAllocate = toGRT('12500') + await staking.connect(indexer1).stake(tokensToAllocate) + await staking + .connect(indexer1) + .allocateFrom( + indexer1.address, + subgraphDeploymentID1, + tokensToAllocate, + allocationID1, + metadata, + await channelKey1.generateProof(indexer1.address), + ) + + // Jump + await helpers.mine(ISSUANCE_RATE_PERIODS) + + // Rewards + const contractRewards = await rewardsManager.getRewards(staking.address, allocationID1) + + // We trust using this function in the test because we tested it + // standalone in a previous test + const contractRewardsAT1 = (await rewardsManager.getAccRewardsPerAllocatedToken(subgraphDeploymentID1))[0] + + const expectedRewards = contractRewardsAT1.mul(tokensToAllocate).div(WeiPerEther) + expect(expectedRewards).eq(contractRewards) + }) + it('rewards should be zero if the allocation is closed', async function () { + // Update total signalled + const signalled1 = toGRT('1500') + await curation.connect(curator1).mint(subgraphDeploymentID1, signalled1, 0) + + // Allocate + const tokensToAllocate = toGRT('12500') + await staking.connect(indexer1).stake(tokensToAllocate) + await staking + .connect(indexer1) + .allocateFrom( + indexer1.address, + subgraphDeploymentID1, + tokensToAllocate, + allocationID1, + metadata, + await channelKey1.generateProof(indexer1.address), + ) + + // Jump + await helpers.mine(ISSUANCE_RATE_PERIODS) + await helpers.mineEpoch(epochManager) + + // Close allocation + await staking.connect(indexer1).closeAllocation(allocationID1, randomHexBytes()) + + // Rewards + const contractRewards = await rewardsManager.getRewards(staking.address, allocationID1) + expect(contractRewards).eq(BigNumber.from(0)) + }) + it('rewards should be zero if the allocation does not exist', async function () { + // Rewards + const contractRewards = await rewardsManager.getRewards(staking.address, allocationIDNull) + expect(contractRewards).eq(BigNumber.from(0)) + }) + }) + + describe('takeRewards', function () { + interface DelegationParameters { + indexingRewardCut: BigNumber + queryFeeCut: BigNumber + cooldownBlocks: number + } + + async function setupIndexerAllocation() { + // Setup + await epochManager.connect(governor).setEpochLength(10) + + // Update total signalled + const signalled1 = toGRT('1500') + await curation.connect(curator1).mint(subgraphDeploymentID1, signalled1, 0) + + // Allocate + const tokensToAllocate = toGRT('12500') + await staking.connect(indexer1).stake(tokensToAllocate) + await staking + .connect(indexer1) + .allocateFrom( + indexer1.address, + subgraphDeploymentID1, + tokensToAllocate, + allocationID1, + metadata, + await channelKey1.generateProof(indexer1.address), + ) + } + + async function setupIndexerAllocationSignalingAfter() { + // Setup + await epochManager.connect(governor).setEpochLength(10) + + // Allocate + const tokensToAllocate = toGRT('12500') + await staking.connect(indexer1).stake(tokensToAllocate) + await staking + .connect(indexer1) + .allocateFrom( + indexer1.address, + subgraphDeploymentID1, + tokensToAllocate, + allocationID1, + metadata, + await channelKey1.generateProof(indexer1.address), + ) + + // Update total signalled + const signalled1 = toGRT('1500') + await curation.connect(curator1).mint(subgraphDeploymentID1, signalled1, 0) + } + + async function setupIndexerAllocationWithDelegation( + tokensToDelegate: BigNumber, + delegationParams: DelegationParameters, + ) { + const tokensToAllocate = toGRT('12500') + + // Setup + await epochManager.connect(governor).setEpochLength(10) + + // Transfer some funds from the curator, I don't want to mint new tokens + await grt.connect(curator1).transfer(delegator.address, tokensToDelegate) + await grt.connect(delegator).approve(staking.address, tokensToDelegate) + + // Stake and set delegation parameters + await staking.connect(indexer1).stake(tokensToAllocate) + await staking + .connect(indexer1) + .setDelegationParameters(delegationParams.indexingRewardCut, delegationParams.queryFeeCut, 0) + + // Delegate + await staking.connect(delegator).delegate(indexer1.address, tokensToDelegate) + + // Update total signalled + const signalled1 = toGRT('1500') + await curation.connect(curator1).mint(subgraphDeploymentID1, signalled1, 0) + + // Allocate + await staking + .connect(indexer1) + .allocateFrom( + indexer1.address, + subgraphDeploymentID1, + tokensToAllocate, + allocationID1, + metadata, + await channelKey1.generateProof(indexer1.address), + ) + } + + it('should distribute rewards on closed allocation and stake', async function () { + // Align with the epoch boundary + await helpers.mineEpoch(epochManager) + // Setup + await setupIndexerAllocation() + + // Jump + await helpers.mineEpoch(epochManager) + + // Before state + const beforeTokenSupply = await grt.totalSupply() + const beforeIndexer1Stake = await staking.getIndexerStakedTokens(indexer1.address) + const beforeIndexer1Balance = await grt.balanceOf(indexer1.address) + const beforeStakingBalance = await grt.balanceOf(staking.address) + + // All the rewards in this subgraph go to this allocation. + // Rewards per token will be (issuancePerBlock * nBlocks) / allocatedTokens + // The first snapshot is after allocating, that is 2 blocks after the signal is minted. + // The final snapshot is when we close the allocation, that happens 9 blocks after signal is minted. + // So the rewards will be ((issuancePerBlock * 7) / allocatedTokens) * allocatedTokens + const expectedIndexingRewards = toGRT('1400') + + // Close allocation. At this point rewards should be collected for that indexer + const tx = await staking.connect(indexer1).closeAllocation(allocationID1, randomHexBytes()) + const receipt = await tx.wait() + const event = rewardsManager.interface.parseLog(receipt.logs[1]).args + expect(event.indexer).eq(indexer1.address) + expect(event.allocationID).eq(allocationID1) + expect(toRound(event.amount)).eq(toRound(expectedIndexingRewards)) + + // After state + const afterTokenSupply = await grt.totalSupply() + const afterIndexer1Stake = await staking.getIndexerStakedTokens(indexer1.address) + const afterIndexer1Balance = await grt.balanceOf(indexer1.address) + const afterStakingBalance = await grt.balanceOf(staking.address) + + // Check that rewards are put into indexer stake + const expectedIndexerStake = beforeIndexer1Stake.add(expectedIndexingRewards) + const expectedTokenSupply = beforeTokenSupply.add(expectedIndexingRewards) + // Check stake should have increased with the rewards staked + expect(toRound(afterIndexer1Stake)).eq(toRound(expectedIndexerStake)) + // Check indexer balance remains the same + expect(afterIndexer1Balance).eq(beforeIndexer1Balance) + // Check indexing rewards are kept in the staking contract + expect(toRound(afterStakingBalance)).eq(toRound(beforeStakingBalance.add(expectedIndexingRewards))) + // Check that tokens have been minted + expect(toRound(afterTokenSupply)).eq(toRound(expectedTokenSupply)) + }) + + it('does not revert with an underflow if the minimum signal changes', async function () { + // Align with the epoch boundary + await helpers.mineEpoch(epochManager) + // Setup + await setupIndexerAllocation() + + await rewardsManager.connect(governor).setMinimumSubgraphSignal(toGRT(14000)) + + // Jump + await helpers.mineEpoch(epochManager) + + // Close allocation. At this point rewards should be collected for that indexer + const tx = staking.connect(indexer1).closeAllocation(allocationID1, randomHexBytes()) + await expect(tx) + .emit(rewardsManager, 'HorizonRewardsAssigned') + .withArgs(indexer1.address, allocationID1, toBN(0)) + }) + + it('does not revert with an underflow if the minimum signal changes, and signal came after allocation', async function () { + // Align with the epoch boundary + await helpers.mineEpoch(epochManager) + // Setup + await setupIndexerAllocationSignalingAfter() + + await rewardsManager.connect(governor).setMinimumSubgraphSignal(toGRT(14000)) + + // Jump + await helpers.mineEpoch(epochManager) + + // Close allocation. At this point rewards should be collected for that indexer + const tx = staking.connect(indexer1).closeAllocation(allocationID1, randomHexBytes()) + await expect(tx) + .emit(rewardsManager, 'HorizonRewardsAssigned') + .withArgs(indexer1.address, allocationID1, toBN(0)) + }) + + it('does not revert if signal was already under minimum', async function () { + await rewardsManager.connect(governor).setMinimumSubgraphSignal(toGRT(2000)) + // Align with the epoch boundary + await helpers.mineEpoch(epochManager) + // Setup + await setupIndexerAllocation() + + // Jump + await helpers.mineEpoch(epochManager) + // Close allocation. At this point rewards should be collected for that indexer + const tx = staking.connect(indexer1).closeAllocation(allocationID1, randomHexBytes()) + + await expect(tx) + .emit(rewardsManager, 'HorizonRewardsAssigned') + .withArgs(indexer1.address, allocationID1, toBN(0)) + }) + + it('should distribute rewards on closed allocation and send to destination', async function () { + const destinationAddress = randomHexBytes(20) + await staking.connect(indexer1).setRewardsDestination(destinationAddress) + + // Align with the epoch boundary + await helpers.mineEpoch(epochManager) + // Setup + await setupIndexerAllocation() + + // Jump + await helpers.mineEpoch(epochManager) + + // Before state + const beforeTokenSupply = await grt.totalSupply() + const beforeIndexer1Stake = await staking.getIndexerStakedTokens(indexer1.address) + const beforeDestinationBalance = await grt.balanceOf(destinationAddress) + const beforeStakingBalance = await grt.balanceOf(staking.address) + + // All the rewards in this subgraph go to this allocation. + // Rewards per token will be (issuancePerBlock * nBlocks) / allocatedTokens + // The first snapshot is after allocating, that is 2 blocks after the signal is minted. + // The final snapshot is when we close the allocation, that happens 9 blocks after signal is minted. + // So the rewards will be ((issuancePerBlock * 7) / allocatedTokens) * allocatedTokens + const expectedIndexingRewards = toGRT('1400') + + // Close allocation. At this point rewards should be collected for that indexer + const tx = await staking.connect(indexer1).closeAllocation(allocationID1, randomHexBytes()) + const receipt = await tx.wait() + const event = rewardsManager.interface.parseLog(receipt.logs[1]).args + expect(event.indexer).eq(indexer1.address) + expect(event.allocationID).eq(allocationID1) + expect(toRound(event.amount)).eq(toRound(expectedIndexingRewards)) + + // After state + const afterTokenSupply = await grt.totalSupply() + const afterIndexer1Stake = await staking.getIndexerStakedTokens(indexer1.address) + const afterDestinationBalance = await grt.balanceOf(destinationAddress) + const afterStakingBalance = await grt.balanceOf(staking.address) + + // Check that rewards are properly assigned + const expectedIndexerStake = beforeIndexer1Stake + const expectedTokenSupply = beforeTokenSupply.add(expectedIndexingRewards) + // Check stake should not have changed + expect(toRound(afterIndexer1Stake)).eq(toRound(expectedIndexerStake)) + // Check indexing rewards are received by the rewards destination + expect(toRound(afterDestinationBalance)).eq(toRound(beforeDestinationBalance.add(expectedIndexingRewards))) + // Check indexing rewards were not sent to the staking contract + expect(afterStakingBalance).eq(beforeStakingBalance) + // Check that tokens have been minted + expect(toRound(afterTokenSupply)).eq(toRound(expectedTokenSupply)) + }) + + it('should distribute rewards on closed allocation w/delegators', async function () { + // Setup + const delegationParams = { + indexingRewardCut: toBN('823000'), // 82.30% + queryFeeCut: toBN('80000'), // 8% + cooldownBlocks: 0, + } + const tokensToDelegate = toGRT('2000') + + // Align with the epoch boundary + await helpers.mineEpoch(epochManager) + // Setup the allocation and delegators + await setupIndexerAllocationWithDelegation(tokensToDelegate, delegationParams) + + // Jump + await helpers.mineEpoch(epochManager) + + // Before state + const beforeTokenSupply = await grt.totalSupply() + const beforeDelegationPool = await staking.delegationPools(indexer1.address) + const beforeIndexer1Stake = await staking.getIndexerStakedTokens(indexer1.address) + + // Close allocation. At this point rewards should be collected for that indexer + await staking.connect(indexer1).closeAllocation(allocationID1, randomHexBytes()) + + // After state + const afterTokenSupply = await grt.totalSupply() + const afterDelegationPool = await staking.delegationPools(indexer1.address) + const afterIndexer1Stake = await staking.getIndexerStakedTokens(indexer1.address) + + // Check that rewards are put into indexer stake (only indexer cut) + // Check that rewards are put into delegators pool accordingly + + // All the rewards in this subgraph go to this allocation. + // Rewards per token will be (issuancePerBlock * nBlocks) / allocatedTokens + // The first snapshot is after allocating, that is 1 block after the signal is minted. + // The final snapshot is when we close the allocation, that happens 4 blocks after signal is minted. + // So the rewards will be ((issuancePerBlock * 3) / allocatedTokens) * allocatedTokens + const expectedIndexingRewards = toGRT('600') + // Calculate delegators cut + const indexerRewards = delegationParams.indexingRewardCut.mul(expectedIndexingRewards).div(toBN(MAX_PPM)) + // Calculate indexer cut + const delegatorsRewards = expectedIndexingRewards.sub(indexerRewards) + // Check + const expectedIndexerStake = beforeIndexer1Stake.add(indexerRewards) + const expectedDelegatorsPoolTokens = beforeDelegationPool.tokens.add(delegatorsRewards) + const expectedTokenSupply = beforeTokenSupply.add(expectedIndexingRewards) + expect(toRound(afterIndexer1Stake)).eq(toRound(expectedIndexerStake)) + expect(toRound(afterDelegationPool.tokens)).eq(toRound(expectedDelegatorsPoolTokens)) + // Check that tokens have been minted + expect(toRound(afterTokenSupply)).eq(toRound(expectedTokenSupply)) + }) + + it('should deny rewards if subgraph on denylist', async function () { + // Setup + await rewardsManager.connect(governor).setSubgraphAvailabilityOracle(governor.address) + await rewardsManager.connect(governor).setDenied(subgraphDeploymentID1, true) + await setupIndexerAllocation() + + // Jump + await helpers.mineEpoch(epochManager) + + // Close allocation. At this point rewards should be collected for that indexer + const tx = staking.connect(indexer1).closeAllocation(allocationID1, randomHexBytes()) + await expect(tx).emit(rewardsManager, 'RewardsDenied').withArgs(indexer1.address, allocationID1) + }) + + it('should handle zero rewards scenario correctly', async function () { + // Setup allocation with zero issuance to create zero rewards scenario + await rewardsManager.connect(governor).setIssuancePerBlock(0) + + // Align with the epoch boundary + await helpers.mineEpoch(epochManager) + + // Setup allocation + await setupIndexerAllocation() + + // Jump to next epoch + await helpers.mineEpoch(epochManager) + + // Before state + const beforeTokenSupply = await grt.totalSupply() + const beforeStakingBalance = await grt.balanceOf(staking.address) + + // Close allocation. At this point rewards should be zero + const tx = staking.connect(indexer1).closeAllocation(allocationID1, randomHexBytes()) + await expect(tx).emit(rewardsManager, 'HorizonRewardsAssigned').withArgs(indexer1.address, allocationID1, 0) + + // After state - should be unchanged since no rewards were minted + const afterTokenSupply = await grt.totalSupply() + const afterStakingBalance = await grt.balanceOf(staking.address) + + // Check that no tokens were minted (rewards were 0) + expect(afterTokenSupply).eq(beforeTokenSupply) + expect(afterStakingBalance).eq(beforeStakingBalance) + }) + }) + }) + + describe('edge scenarios', function () { + it('close allocation on a subgraph that no longer have signal', async function () { + // Update total signalled + const signalled1 = toGRT('1500') + await curation.connect(curator1).mint(subgraphDeploymentID1, signalled1, 0) + + // Allocate + const tokensToAllocate = toGRT('12500') + await staking.connect(indexer1).stake(tokensToAllocate) + await staking + .connect(indexer1) + .allocateFrom( + indexer1.address, + subgraphDeploymentID1, + tokensToAllocate, + allocationID1, + metadata, + await channelKey1.generateProof(indexer1.address), + ) + + // Jump + await helpers.mineEpoch(epochManager) + + // Remove all signal from the subgraph + const curatorShares = await curation.getCuratorSignal(curator1.address, subgraphDeploymentID1) + await curation.connect(curator1).burn(subgraphDeploymentID1, curatorShares, 0) + + // Close allocation. At this point rewards should be collected for that indexer + await staking.connect(indexer1).closeAllocation(allocationID1, randomHexBytes()) + }) + }) + + describe('multiple allocations', function () { + it('two allocations in the same block with a GRT burn in the middle should succeed', async function () { + // If rewards are not monotonically increasing, this can trigger + // a subtraction overflow error as seen in mainnet tx: + // 0xb6bf7bbc446720a7409c482d714aebac239dd62e671c3c94f7e93dd3a61835ab + await helpers.mineEpoch(epochManager) + + // Setup + await epochManager.connect(governor).setEpochLength(10) + + // Update total signalled + const signalled1 = toGRT('1500') + await curation.connect(curator1).mint(subgraphDeploymentID1, signalled1, 0) + + // Stake + const tokensToStake = toGRT('12500') + await staking.connect(indexer1).stake(tokensToStake) + + // Allocate simultaneously, burning in the middle + const tokensToAlloc = toGRT('5000') + await helpers.setAutoMine(false) + const tx1 = await staking + .connect(indexer1) + .allocateFrom( + indexer1.address, + subgraphDeploymentID1, + tokensToAlloc, + allocationID1, + metadata, + await channelKey1.generateProof(indexer1.address), + ) + const tx2 = await grt.connect(indexer1).burn(toGRT(1)) + const tx3 = await staking + .connect(indexer1) + .allocateFrom( + indexer1.address, + subgraphDeploymentID1, + tokensToAlloc, + allocationID2, + metadata, + await channelKey2.generateProof(indexer1.address), + ) + + await helpers.mine() + await helpers.setAutoMine(true) + + await expect(tx1).emit(staking, 'AllocationCreated') + await expect(tx2).emit(grt, 'Transfer') + await expect(tx3).emit(staking, 'AllocationCreated') + }) + it('two simultanous-similar allocations should get same amount of rewards', async function () { + await helpers.mineEpoch(epochManager) + + // Setup + await epochManager.connect(governor).setEpochLength(10) + + // Update total signalled + const signalled1 = toGRT('1500') + await curation.connect(curator1).mint(subgraphDeploymentID1, signalled1, 0) + + // Stake + const tokensToStake = toGRT('12500') + await staking.connect(indexer1).stake(tokensToStake) + + // Allocate simultaneously + const tokensToAlloc = toGRT('5000') + const tx1 = await staking.populateTransaction.allocateFrom( + indexer1.address, + subgraphDeploymentID1, + tokensToAlloc, + allocationID1, + metadata, + await channelKey1.generateProof(indexer1.address), + ) + const tx2 = await staking.populateTransaction.allocateFrom( + indexer1.address, + subgraphDeploymentID1, + tokensToAlloc, + allocationID2, + metadata, + await channelKey2.generateProof(indexer1.address), + ) + await staking.connect(indexer1).multicall([tx1.data, tx2.data]) + + // Jump + await helpers.mineEpoch(epochManager) + + // Close allocations simultaneously + const tx3 = await staking.populateTransaction.closeAllocation(allocationID1, randomHexBytes()) + const tx4 = await staking.populateTransaction.closeAllocation(allocationID2, randomHexBytes()) + const tx5 = await staking.connect(indexer1).multicall([tx3.data, tx4.data]) + + // Both allocations should receive the same amount of rewards + const receipt = await tx5.wait() + const event1 = rewardsManager.interface.parseLog(receipt.logs[1]).args + const event2 = rewardsManager.interface.parseLog(receipt.logs[5]).args + expect(event1.amount).eq(event2.amount) + }) + }) + + describe('rewards progression when collecting query fees', function () { + it('collect query fees with two subgraphs and one allocation', async function () { + async function getRewardsAccrual(subgraphs) { + const [sg1, sg2] = await Promise.all(subgraphs.map((sg) => rewardsManager.getAccRewardsForSubgraph(sg))) + return { + sg1, + sg2, + all: sg1.add(sg2), + } + } + + // set curation percentage + await staking.connect(governor).setCurationPercentage(100000) + + // allow the asset holder + const tokensToCollect = toGRT('10000') + + // signal in two subgraphs in the same block + const subgraphs = [subgraphDeploymentID1, subgraphDeploymentID2] + for (const sub of subgraphs) { + await curation.connect(curator1).mint(sub, toGRT('1500'), 0) + } + + // snapshot block before any accrual (we substract 1 because accrual starts after the first mint happens) + const b1 = await epochManager.blockNum().then((x) => x.toNumber() - 1) + + // allocate + const tokensToAllocate = toGRT('12500') + await staking + .connect(indexer1) + .multicall([ + await staking.populateTransaction.stake(tokensToAllocate).then((tx) => tx.data), + await staking.populateTransaction + .allocateFrom( + indexer1.address, + subgraphDeploymentID1, + tokensToAllocate, + allocationID1, + metadata, + await channelKey1.generateProof(indexer1.address), + ) + .then((tx) => tx.data), + ]) + + // move time fwd + await helpers.mineEpoch(epochManager) + + // collect funds into staking for that sub + await staking.connect(assetHolder).collect(tokensToCollect, allocationID1) + + // check rewards diff + await rewardsManager.getRewards(staking.address, allocationID1).then(formatGRT) + + await helpers.mine() + const accrual = await getRewardsAccrual(subgraphs) + const b2 = await epochManager.blockNum().then((x) => x.toNumber()) + + // round comparison because there is a small precision error due to dividing and accrual per signal + expect(toRound(accrual.all)).eq(toRound(ISSUANCE_PER_BLOCK.mul(b2 - b1))) + }) + }) +}) diff --git a/packages/contracts/test/tests/unit/rewards/rewards-eligibility-oracle.test.ts b/packages/contracts/test/tests/unit/rewards/rewards-eligibility-oracle.test.ts new file mode 100644 index 000000000..108eb3391 --- /dev/null +++ b/packages/contracts/test/tests/unit/rewards/rewards-eligibility-oracle.test.ts @@ -0,0 +1,496 @@ +import { Curation } from '@graphprotocol/contracts' +import { EpochManager } from '@graphprotocol/contracts' +import { GraphToken } from '@graphprotocol/contracts' +import { IStaking } from '@graphprotocol/contracts' +import { RewardsManager } from '@graphprotocol/contracts' +import { deriveChannelKey, GraphNetworkContracts, helpers, randomHexBytes, toGRT } from '@graphprotocol/sdk' +import type { SignerWithAddress } from '@nomiclabs/hardhat-ethers/signers' +import { expect } from 'chai' +import { constants } from 'ethers' +import hre from 'hardhat' + +import { NetworkFixture } from '../lib/fixtures' + +const { HashZero } = constants + +describe('Rewards - Eligibility Oracle', () => { + const graph = hre.graph() + let curator1: SignerWithAddress + let governor: SignerWithAddress + let indexer1: SignerWithAddress + + let fixture: NetworkFixture + + let contracts: GraphNetworkContracts + let grt: GraphToken + let curation: Curation + let epochManager: EpochManager + let staking: IStaking + let rewardsManager: RewardsManager + + // Derive channel key for indexer used to sign attestations + const channelKey1 = deriveChannelKey() + + const subgraphDeploymentID1 = randomHexBytes() + + const allocationID1 = channelKey1.address + + const metadata = HashZero + + const ISSUANCE_PER_BLOCK = toGRT('200') // 200 GRT every block + + async function setupIndexerAllocation() { + // Setup + await epochManager.connect(governor).setEpochLength(10) + + // Update total signalled + const signalled1 = toGRT('1500') + await curation.connect(curator1).mint(subgraphDeploymentID1, signalled1, 0) + + // Allocate + const tokensToAllocate = toGRT('12500') + await staking.connect(indexer1).stake(tokensToAllocate) + await staking + .connect(indexer1) + .allocateFrom( + indexer1.address, + subgraphDeploymentID1, + tokensToAllocate, + allocationID1, + metadata, + await channelKey1.generateProof(indexer1.address), + ) + } + + before(async function () { + const testAccounts = await graph.getTestAccounts() + curator1 = testAccounts[0] + indexer1 = testAccounts[1] + ;({ governor } = await graph.getNamedAccounts()) + + fixture = new NetworkFixture(graph.provider) + contracts = await fixture.load(governor) + grt = contracts.GraphToken as GraphToken + curation = contracts.Curation as Curation + epochManager = contracts.EpochManager + staking = contracts.Staking as IStaking + rewardsManager = contracts.RewardsManager + + // 200 GRT per block + await rewardsManager.connect(governor).setIssuancePerBlock(ISSUANCE_PER_BLOCK) + + // Distribute test funds + for (const wallet of [indexer1, curator1]) { + await grt.connect(governor).mint(wallet.address, toGRT('1000000')) + await grt.connect(wallet).approve(staking.address, toGRT('1000000')) + await grt.connect(wallet).approve(curation.address, toGRT('1000000')) + } + }) + + beforeEach(async function () { + await fixture.setUp() + }) + + afterEach(async function () { + await fixture.tearDown() + }) + + describe('rewards eligibility oracle', function () { + it('should reject setRewardsEligibilityOracle if unauthorized', async function () { + const MockRewardsEligibilityOracleFactory = await hre.ethers.getContractFactory( + 'contracts/tests/MockRewardsEligibilityOracle.sol:MockRewardsEligibilityOracle', + ) + const mockOracle = await MockRewardsEligibilityOracleFactory.deploy(true) + await mockOracle.deployed() + const tx = rewardsManager.connect(indexer1).setRewardsEligibilityOracle(mockOracle.address) + await expect(tx).revertedWith('Only Controller governor') + }) + + it('should set rewards eligibility oracle if governor', async function () { + const MockRewardsEligibilityOracleFactory = await hre.ethers.getContractFactory( + 'contracts/tests/MockRewardsEligibilityOracle.sol:MockRewardsEligibilityOracle', + ) + const mockOracle = await MockRewardsEligibilityOracleFactory.deploy(true) + await mockOracle.deployed() + + const tx = rewardsManager.connect(governor).setRewardsEligibilityOracle(mockOracle.address) + await expect(tx) + .emit(rewardsManager, 'RewardsEligibilityOracleSet') + .withArgs(constants.AddressZero, mockOracle.address) + + expect(await rewardsManager.rewardsEligibilityOracle()).eq(mockOracle.address) + }) + + it('should allow setting rewards eligibility oracle to zero address', async function () { + // First set an oracle + const MockRewardsEligibilityOracleFactory = await hre.ethers.getContractFactory( + 'contracts/tests/MockRewardsEligibilityOracle.sol:MockRewardsEligibilityOracle', + ) + const mockOracle = await MockRewardsEligibilityOracleFactory.deploy(true) + await mockOracle.deployed() + await rewardsManager.connect(governor).setRewardsEligibilityOracle(mockOracle.address) + + // Then set to zero address to disable + const tx = rewardsManager.connect(governor).setRewardsEligibilityOracle(constants.AddressZero) + await expect(tx) + .emit(rewardsManager, 'RewardsEligibilityOracleSet') + .withArgs(mockOracle.address, constants.AddressZero) + + expect(await rewardsManager.rewardsEligibilityOracle()).eq(constants.AddressZero) + }) + + it('should reject setting oracle that does not support interface', async function () { + // Try to set an EOA (externally owned account) as the rewards eligibility oracle + const tx = rewardsManager.connect(governor).setRewardsEligibilityOracle(indexer1.address) + // EOA doesn't have code, so the call will revert (error message may vary by ethers version) + await expect(tx).to.be.reverted + }) + + it('should reject setting oracle that does not support IRewardsEligibility interface', async function () { + // Deploy a contract that supports ERC165 but not IRewardsEligibility + const MockERC165Factory = await hre.ethers.getContractFactory('contracts/tests/MockERC165.sol:MockERC165') + const mockERC165 = await MockERC165Factory.deploy() + await mockERC165.deployed() + + const tx = rewardsManager.connect(governor).setRewardsEligibilityOracle(mockERC165.address) + await expect(tx).revertedWith('Contract does not support IRewardsEligibility interface') + }) + + it('should not emit event when setting same oracle address', async function () { + const MockRewardsEligibilityOracleFactory = await hre.ethers.getContractFactory( + 'contracts/tests/MockRewardsEligibilityOracle.sol:MockRewardsEligibilityOracle', + ) + const mockOracle = await MockRewardsEligibilityOracleFactory.deploy(true) + await mockOracle.deployed() + await rewardsManager.connect(governor).setRewardsEligibilityOracle(mockOracle.address) + + // Setting the same oracle again should not emit an event + const tx = rewardsManager.connect(governor).setRewardsEligibilityOracle(mockOracle.address) + await expect(tx).to.not.emit(rewardsManager, 'RewardsEligibilityOracleSet') + }) + }) + + describe('rewards eligibility in takeRewards', function () { + it('should deny rewards due to rewards eligibility oracle', async function () { + // Setup rewards eligibility oracle that denies rewards for indexer1 + const MockRewardsEligibilityOracleFactory = await hre.ethers.getContractFactory( + 'contracts/tests/MockRewardsEligibilityOracle.sol:MockRewardsEligibilityOracle', + ) + const mockOracle = await MockRewardsEligibilityOracleFactory.deploy(false) // Default to deny + await mockOracle.deployed() + + // Set the rewards eligibility oracle + await rewardsManager.connect(governor).setRewardsEligibilityOracle(mockOracle.address) + + // Align with the epoch boundary + await helpers.mineEpoch(epochManager) + + // Setup allocation + await setupIndexerAllocation() + + // Jump to next epoch + await helpers.mineEpoch(epochManager) + + // Calculate expected rewards (for verification in the event) + const expectedIndexingRewards = toGRT('1400') + + // Close allocation. At this point rewards should be denied due to eligibility + const tx = staking.connect(indexer1).closeAllocation(allocationID1, randomHexBytes()) + await expect(tx) + .emit(rewardsManager, 'RewardsDeniedDueToEligibility') + .withArgs(indexer1.address, allocationID1, expectedIndexingRewards) + }) + + it('should allow rewards when rewards eligibility oracle approves', async function () { + // Setup rewards eligibility oracle that allows rewards for indexer1 + const MockRewardsEligibilityOracleFactory = await hre.ethers.getContractFactory( + 'contracts/tests/MockRewardsEligibilityOracle.sol:MockRewardsEligibilityOracle', + ) + const mockOracle = await MockRewardsEligibilityOracleFactory.deploy(true) // Default to allow + await mockOracle.deployed() + + // Set the rewards eligibility oracle + await rewardsManager.connect(governor).setRewardsEligibilityOracle(mockOracle.address) + + // Align with the epoch boundary + await helpers.mineEpoch(epochManager) + + // Setup allocation + await setupIndexerAllocation() + + // Jump to next epoch + await helpers.mineEpoch(epochManager) + + // Calculate expected rewards + const expectedIndexingRewards = toGRT('1400') + + // Close allocation. At this point rewards should be assigned normally + const tx = staking.connect(indexer1).closeAllocation(allocationID1, randomHexBytes()) + await expect(tx) + .emit(rewardsManager, 'HorizonRewardsAssigned') + .withArgs(indexer1.address, allocationID1, expectedIndexingRewards) + }) + }) + + describe('rewards eligibility oracle and denylist interaction', function () { + it('should prioritize denylist over REO when both deny', async function () { + // Setup BOTH denial mechanisms + // 1. Setup denylist + await rewardsManager.connect(governor).setSubgraphAvailabilityOracle(governor.address) + await rewardsManager.connect(governor).setDenied(subgraphDeploymentID1, true) + + // 2. Setup REO that also denies + const MockRewardsEligibilityOracleFactory = await hre.ethers.getContractFactory( + 'contracts/tests/MockRewardsEligibilityOracle.sol:MockRewardsEligibilityOracle', + ) + const mockOracle = await MockRewardsEligibilityOracleFactory.deploy(false) // Deny + await mockOracle.deployed() + await rewardsManager.connect(governor).setRewardsEligibilityOracle(mockOracle.address) + + // Align with the epoch boundary + await helpers.mineEpoch(epochManager) + + // Setup allocation + await setupIndexerAllocation() + + // Jump to next epoch + await helpers.mineEpoch(epochManager) + + // Close allocation - denylist should be checked first + const tx = staking.connect(indexer1).closeAllocation(allocationID1, randomHexBytes()) + + // Verify: Denylist wins (checked first in RewardsManager.takeRewards line 522) + // Should emit RewardsDenied (not RewardsDeniedDueToEligibility) + await expect(tx).emit(rewardsManager, 'RewardsDenied').withArgs(indexer1.address, allocationID1) + + // Verify: REO event is NOT emitted + await expect(tx).to.not.emit(rewardsManager, 'RewardsDeniedDueToEligibility') + }) + + it('should check REO when denylist allows but indexer ineligible', async function () { + // Setup: Subgraph is allowed (no denylist), but indexer is ineligible + const MockRewardsEligibilityOracleFactory = await hre.ethers.getContractFactory( + 'contracts/tests/MockRewardsEligibilityOracle.sol:MockRewardsEligibilityOracle', + ) + const mockOracle = await MockRewardsEligibilityOracleFactory.deploy(false) // Deny indexer + await mockOracle.deployed() + await rewardsManager.connect(governor).setRewardsEligibilityOracle(mockOracle.address) + + // Align with the epoch boundary + await helpers.mineEpoch(epochManager) + + // Setup allocation + await setupIndexerAllocation() + + // Jump to next epoch + await helpers.mineEpoch(epochManager) + + const expectedIndexingRewards = toGRT('1400') + + // Close allocation - REO should be checked + const tx = staking.connect(indexer1).closeAllocation(allocationID1, randomHexBytes()) + await expect(tx) + .emit(rewardsManager, 'RewardsDeniedDueToEligibility') + .withArgs(indexer1.address, allocationID1, expectedIndexingRewards) + }) + + it('should handle indexer becoming ineligible mid-allocation', async function () { + // Setup: Indexer starts eligible + const MockRewardsEligibilityOracleFactory = await hre.ethers.getContractFactory( + 'contracts/tests/MockRewardsEligibilityOracle.sol:MockRewardsEligibilityOracle', + ) + const mockOracle = await MockRewardsEligibilityOracleFactory.deploy(true) // Start eligible + await mockOracle.deployed() + await rewardsManager.connect(governor).setRewardsEligibilityOracle(mockOracle.address) + + // Align with the epoch boundary + await helpers.mineEpoch(epochManager) + + // Setup allocation while indexer is eligible + await setupIndexerAllocation() + + // Jump to next epoch (rewards accrue) + await helpers.mineEpoch(epochManager) + + // Change eligibility AFTER allocation created but BEFORE closing + await mockOracle.setIndexerEligible(indexer1.address, false) + + const expectedIndexingRewards = toGRT('1600') + + // Close allocation - should be denied at close time (not creation time) + const tx = staking.connect(indexer1).closeAllocation(allocationID1, randomHexBytes()) + await expect(tx) + .emit(rewardsManager, 'RewardsDeniedDueToEligibility') + .withArgs(indexer1.address, allocationID1, expectedIndexingRewards) + }) + + it('should handle indexer becoming eligible mid-allocation', async function () { + // Setup: Indexer starts ineligible + const MockRewardsEligibilityOracleFactory = await hre.ethers.getContractFactory( + 'contracts/tests/MockRewardsEligibilityOracle.sol:MockRewardsEligibilityOracle', + ) + const mockOracle = await MockRewardsEligibilityOracleFactory.deploy(false) // Start ineligible + await mockOracle.deployed() + await rewardsManager.connect(governor).setRewardsEligibilityOracle(mockOracle.address) + + // Align with the epoch boundary + await helpers.mineEpoch(epochManager) + + // Setup allocation while indexer is ineligible + await setupIndexerAllocation() + + // Jump to next epoch + await helpers.mineEpoch(epochManager) + + // Change eligibility before closing + await mockOracle.setIndexerEligible(indexer1.address, true) + + const expectedIndexingRewards = toGRT('1600') + + // Close allocation - should now be allowed + const tx = staking.connect(indexer1).closeAllocation(allocationID1, randomHexBytes()) + await expect(tx) + .emit(rewardsManager, 'HorizonRewardsAssigned') + .withArgs(indexer1.address, allocationID1, expectedIndexingRewards) + }) + + it('should handle denylist being added mid-allocation', async function () { + // Setup: Start with subgraph NOT denied + await rewardsManager.connect(governor).setSubgraphAvailabilityOracle(governor.address) + + // Align with the epoch boundary + await helpers.mineEpoch(epochManager) + + // Setup allocation when subgraph is allowed + await setupIndexerAllocation() + + // Jump to next epoch (rewards accrue) + await helpers.mineEpoch(epochManager) + + // Deny the subgraph before closing allocation + await rewardsManager.connect(governor).setDenied(subgraphDeploymentID1, true) + + // Close allocation - should be denied even though it was created when allowed + const tx = staking.connect(indexer1).closeAllocation(allocationID1, randomHexBytes()) + await expect(tx).emit(rewardsManager, 'RewardsDenied').withArgs(indexer1.address, allocationID1) + }) + + it('should handle denylist being removed mid-allocation', async function () { + // Setup: Start with subgraph denied + await rewardsManager.connect(governor).setSubgraphAvailabilityOracle(governor.address) + await rewardsManager.connect(governor).setDenied(subgraphDeploymentID1, true) + + // Align with the epoch boundary + await helpers.mineEpoch(epochManager) + + // Setup allocation (can still allocate to denied subgraph) + await setupIndexerAllocation() + + // Jump to next epoch + await helpers.mineEpoch(epochManager) + + // Remove from denylist before closing + await rewardsManager.connect(governor).setDenied(subgraphDeploymentID1, false) + + const expectedIndexingRewards = toGRT('1600') + + // Close allocation - should now get rewards + const tx = staking.connect(indexer1).closeAllocation(allocationID1, randomHexBytes()) + await expect(tx) + .emit(rewardsManager, 'HorizonRewardsAssigned') + .withArgs(indexer1.address, allocationID1, expectedIndexingRewards) + }) + + it('should allow rewards when REO is zero address (disabled)', async function () { + // Ensure REO is not set (zero address = disabled) + expect(await rewardsManager.rewardsEligibilityOracle()).eq(constants.AddressZero) + + // Align with the epoch boundary + await helpers.mineEpoch(epochManager) + + // Setup allocation + await setupIndexerAllocation() + + // Jump to next epoch + await helpers.mineEpoch(epochManager) + + const expectedIndexingRewards = toGRT('1400') + + // Close allocation - should get rewards (no eligibility check when REO is zero) + const tx = staking.connect(indexer1).closeAllocation(allocationID1, randomHexBytes()) + await expect(tx) + .emit(rewardsManager, 'HorizonRewardsAssigned') + .withArgs(indexer1.address, allocationID1, expectedIndexingRewards) + }) + + it('should verify event structure differences between denial mechanisms', async function () { + // Test 1: Denylist denial - event WITHOUT amount + await rewardsManager.connect(governor).setSubgraphAvailabilityOracle(governor.address) + await rewardsManager.connect(governor).setDenied(subgraphDeploymentID1, true) + + await helpers.mineEpoch(epochManager) + await setupIndexerAllocation() + await helpers.mineEpoch(epochManager) + + const tx1 = await staking.connect(indexer1).closeAllocation(allocationID1, randomHexBytes()) + const receipt1 = await tx1.wait() + + // Find the RewardsDenied event - search in logs as events may be from different contracts + const rewardsDeniedEvent = receipt1.logs + .map((log) => { + try { + return rewardsManager.interface.parseLog(log) + } catch { + return null + } + }) + .find((event) => event?.name === 'RewardsDenied') + + expect(rewardsDeniedEvent).to.not.be.undefined + + // Verify it only has indexer and allocationID (no amount parameter) + expect(rewardsDeniedEvent?.args?.indexer).to.equal(indexer1.address) + expect(rewardsDeniedEvent?.args?.allocationID).to.equal(allocationID1) + // RewardsDenied has only 2 args, amount should not exist + expect(rewardsDeniedEvent?.args?.amount).to.be.undefined + + // Reset for test 2 + await fixture.tearDown() + await fixture.setUp() + + // Test 2: REO denial - event WITH amount + const MockRewardsEligibilityOracleFactory = await hre.ethers.getContractFactory( + 'contracts/tests/MockRewardsEligibilityOracle.sol:MockRewardsEligibilityOracle', + ) + const mockOracle = await MockRewardsEligibilityOracleFactory.deploy(false) + await mockOracle.deployed() + await rewardsManager.connect(governor).setRewardsEligibilityOracle(mockOracle.address) + + await helpers.mineEpoch(epochManager) + await setupIndexerAllocation() + await helpers.mineEpoch(epochManager) + + const tx2 = await staking.connect(indexer1).closeAllocation(allocationID1, randomHexBytes()) + const receipt2 = await tx2.wait() + + // Find the RewardsDeniedDueToEligibility event + const eligibilityEvent = receipt2.logs + .map((log) => { + try { + return rewardsManager.interface.parseLog(log) + } catch { + return null + } + }) + .find((event) => event?.name === 'RewardsDeniedDueToEligibility') + + expect(eligibilityEvent).to.not.be.undefined + + // Verify it has indexer, allocationID, AND amount + expect(eligibilityEvent?.args?.indexer).to.equal(indexer1.address) + expect(eligibilityEvent?.args?.allocationID).to.equal(allocationID1) + expect(eligibilityEvent?.args?.amount).to.not.be.undefined + expect(eligibilityEvent?.args?.amount).to.be.gt(0) // Shows what they would have gotten + }) + }) +}) diff --git a/packages/contracts/test/tests/unit/rewards/rewards-interface.test.ts b/packages/contracts/test/tests/unit/rewards/rewards-interface.test.ts new file mode 100644 index 000000000..3a9b7c23b --- /dev/null +++ b/packages/contracts/test/tests/unit/rewards/rewards-interface.test.ts @@ -0,0 +1,116 @@ +import { RewardsManager } from '@graphprotocol/contracts' +import { IERC165__factory, IIssuanceTarget__factory, IRewardsManager__factory } from '@graphprotocol/interfaces/types' +import { GraphNetworkContracts, toGRT } from '@graphprotocol/sdk' +import type { SignerWithAddress } from '@nomiclabs/hardhat-ethers/signers' +import { expect } from 'chai' +import hre from 'hardhat' + +import { NetworkFixture } from '../lib/fixtures' + +describe('RewardsManager interfaces', () => { + const graph = hre.graph() + let governor: SignerWithAddress + + let fixture: NetworkFixture + + let contracts: GraphNetworkContracts + let rewardsManager: RewardsManager + + before(async function () { + ;({ governor } = await graph.getNamedAccounts()) + + fixture = new NetworkFixture(graph.provider) + contracts = await fixture.load(governor) + rewardsManager = contracts.RewardsManager + + // Set a default issuance per block + await rewardsManager.connect(governor).setIssuancePerBlock(toGRT('200')) + }) + + beforeEach(async function () { + await fixture.setUp() + }) + + afterEach(async function () { + await fixture.tearDown() + }) + + /** + * Interface ID Stability Tests + * + * These tests verify that interface IDs remain stable across builds. + * Changes to these IDs indicate breaking changes to the interface definitions. + * + * If a test fails: + * 1. Verify the interface change was intentional + * 2. Understand the impact on deployed contracts + * 3. Update the expected ID if the change is correct + * 4. Document the breaking change in release notes + */ + describe('Interface ID Stability', () => { + it('IERC165 should have stable interface ID', () => { + expect(IERC165__factory.interfaceId).to.equal('0x01ffc9a7') + }) + + it('IIssuanceTarget should have stable interface ID', () => { + expect(IIssuanceTarget__factory.interfaceId).to.equal('0xaee4dc43') + }) + + it('IRewardsManager should have stable interface ID', () => { + expect(IRewardsManager__factory.interfaceId).to.equal('0xa31d8306') + }) + }) + + describe('supportsInterface', function () { + it('should support IIssuanceTarget interface', async function () { + const supports = await rewardsManager.supportsInterface(IIssuanceTarget__factory.interfaceId) + expect(supports).to.be.true + }) + + it('should support IRewardsManager interface', async function () { + const supports = await rewardsManager.supportsInterface(IRewardsManager__factory.interfaceId) + expect(supports).to.be.true + }) + + it('should support IERC165 interface', async function () { + const supports = await rewardsManager.supportsInterface(IERC165__factory.interfaceId) + expect(supports).to.be.true + }) + + it('should return false for unsupported interfaces', async function () { + // Test with an unknown interface ID + const unknownInterfaceId = '0x12345678' // Random interface ID + const supports = await rewardsManager.supportsInterface(unknownInterfaceId) + expect(supports).to.be.false + }) + }) + + describe('calcRewards', function () { + it('should calculate rewards correctly', async function () { + const tokens = toGRT('1000') + const accRewardsPerAllocatedToken = toGRT('0.5') + + // Expected: (1000 * 0.5 * 1e18) / 1e18 = 500 GRT + const expectedRewards = toGRT('500') + + const rewards = await rewardsManager.calcRewards(tokens, accRewardsPerAllocatedToken) + expect(rewards).to.equal(expectedRewards) + }) + + it('should return 0 when tokens is 0', async function () { + const tokens = toGRT('0') + const accRewardsPerAllocatedToken = toGRT('0.5') + + const rewards = await rewardsManager.calcRewards(tokens, accRewardsPerAllocatedToken) + expect(rewards).to.equal(0) + }) + + it('should return 0 when accRewardsPerAllocatedToken is 0', async function () { + const tokens = toGRT('1000') + const accRewardsPerAllocatedToken = toGRT('0') + + const rewards = await rewardsManager.calcRewards(tokens, accRewardsPerAllocatedToken) + expect(rewards).to.equal(0) + }) + }) +}) diff --git a/packages/contracts/test/tests/unit/rewards/rewards-issuance-allocator.test.ts b/packages/contracts/test/tests/unit/rewards/rewards-issuance-allocator.test.ts new file mode 100644 index 000000000..c74679ad9 --- /dev/null +++ b/packages/contracts/test/tests/unit/rewards/rewards-issuance-allocator.test.ts @@ -0,0 +1,416 @@ +import { Curation } from '@graphprotocol/contracts' +import { GraphToken } from '@graphprotocol/contracts' +import { RewardsManager } from '@graphprotocol/contracts' +import { GraphNetworkContracts, helpers, randomHexBytes, toGRT } from '@graphprotocol/sdk' +import type { SignerWithAddress } from '@nomiclabs/hardhat-ethers/signers' +import { expect } from 'chai' +import { constants } from 'ethers' +import hre from 'hardhat' + +import { NetworkFixture } from '../lib/fixtures' + +describe('Rewards - Issuance Allocator', () => { + const graph = hre.graph() + let curator1: SignerWithAddress + let governor: SignerWithAddress + let indexer1: SignerWithAddress + + let fixture: NetworkFixture + + let contracts: GraphNetworkContracts + let grt: GraphToken + let curation: Curation + let rewardsManager: RewardsManager + + const subgraphDeploymentID1 = randomHexBytes() + + const ISSUANCE_PER_BLOCK = toGRT('200') // 200 GRT every block + + before(async function () { + const testAccounts = await graph.getTestAccounts() + curator1 = testAccounts[0] + indexer1 = testAccounts[1] + ;({ governor } = await graph.getNamedAccounts()) + + fixture = new NetworkFixture(graph.provider) + contracts = await fixture.load(governor) + grt = contracts.GraphToken as GraphToken + curation = contracts.Curation as Curation + rewardsManager = contracts.RewardsManager as RewardsManager + + // 200 GRT per block + await rewardsManager.connect(governor).setIssuancePerBlock(ISSUANCE_PER_BLOCK) + + // Distribute test funds + for (const wallet of [curator1]) { + await grt.connect(governor).mint(wallet.address, toGRT('1000000')) + await grt.connect(wallet).approve(curation.address, toGRT('1000000')) + } + }) + + beforeEach(async function () { + await fixture.setUp() + // Reset issuance allocator to ensure we use direct issuancePerBlock + await rewardsManager.connect(governor).setIssuanceAllocator(constants.AddressZero) + }) + + afterEach(async function () { + await fixture.tearDown() + }) + + describe('setIssuanceAllocator', function () { + describe('ERC-165 validation', function () { + it('should successfully set an issuance allocator that supports the interface', async function () { + // Deploy a mock issuance allocator that supports ERC-165 and IIssuanceAllocationDistribution + const MockIssuanceAllocatorFactory = await hre.ethers.getContractFactory( + 'contracts/tests/MockIssuanceAllocator.sol:MockIssuanceAllocator', + ) + const mockAllocator = await MockIssuanceAllocatorFactory.deploy() + await mockAllocator.deployed() + + // Should succeed because MockIssuanceAllocator supports IIssuanceAllocationDistribution + await expect(rewardsManager.connect(governor).setIssuanceAllocator(mockAllocator.address)) + .to.emit(rewardsManager, 'IssuanceAllocatorSet') + .withArgs(constants.AddressZero, mockAllocator.address) + + // Verify the allocator was set + expect(await rewardsManager.issuanceAllocator()).to.equal(mockAllocator.address) + }) + + it('should revert when setting to EOA address (no contract code)', async function () { + const eoaAddress = indexer1.address + + // Should revert because EOAs don't have contract code to call supportsInterface on + await expect(rewardsManager.connect(governor).setIssuanceAllocator(eoaAddress)).to.be.reverted + }) + + it('should revert when setting to contract that does not support IIssuanceAllocationDistribution', async function () { + // Deploy a contract that supports ERC-165 but not IIssuanceAllocationDistribution + const MockERC165Factory = await hre.ethers.getContractFactory('contracts/tests/MockERC165.sol:MockERC165') + const mockERC165 = await MockERC165Factory.deploy() + await mockERC165.deployed() + + // Should revert because the contract doesn't support IIssuanceAllocationDistribution + await expect(rewardsManager.connect(governor).setIssuanceAllocator(mockERC165.address)).to.be.revertedWith( + 'Contract does not support IIssuanceAllocationDistribution interface', + ) + }) + + it('should validate interface before updating rewards calculation', async function () { + // This test ensures that ERC165 validation happens before updateAccRewardsPerSignal + // Deploy a contract that supports ERC-165 but not IIssuanceAllocationDistribution + const MockERC165Factory = await hre.ethers.getContractFactory('contracts/tests/MockERC165.sol:MockERC165') + const mockERC165 = await MockERC165Factory.deploy() + await mockERC165.deployed() + + // Should revert with interface error, not with any rewards calculation error + await expect(rewardsManager.connect(governor).setIssuanceAllocator(mockERC165.address)).to.be.revertedWith( + 'Contract does not support IIssuanceAllocationDistribution interface', + ) + }) + }) + + describe('access control', function () { + it('should revert when called by non-governor', async function () { + const MockIssuanceAllocatorFactory = await hre.ethers.getContractFactory( + 'contracts/tests/MockIssuanceAllocator.sol:MockIssuanceAllocator', + ) + const mockAllocator = await MockIssuanceAllocatorFactory.deploy() + await mockAllocator.deployed() + + // Should revert because indexer1 is not the governor + await expect(rewardsManager.connect(indexer1).setIssuanceAllocator(mockAllocator.address)).to.be.revertedWith( + 'Only Controller governor', + ) + }) + }) + + describe('state management', function () { + it('should allow setting issuance allocator to zero address (disable)', async function () { + // First set a valid allocator + const MockIssuanceAllocatorFactory = await hre.ethers.getContractFactory( + 'contracts/tests/MockIssuanceAllocator.sol:MockIssuanceAllocator', + ) + const mockAllocator = await MockIssuanceAllocatorFactory.deploy() + await mockAllocator.deployed() + + await rewardsManager.connect(governor).setIssuanceAllocator(mockAllocator.address) + expect(await rewardsManager.issuanceAllocator()).to.equal(mockAllocator.address) + + // Now disable by setting to zero address + await expect(rewardsManager.connect(governor).setIssuanceAllocator(constants.AddressZero)) + .to.emit(rewardsManager, 'IssuanceAllocatorSet') + .withArgs(mockAllocator.address, constants.AddressZero) + + expect(await rewardsManager.issuanceAllocator()).to.equal(constants.AddressZero) + + // Should now use local issuancePerBlock again + expect(await rewardsManager.getRewardsIssuancePerBlock()).eq(ISSUANCE_PER_BLOCK) + }) + + it('should emit IssuanceAllocatorSet event when setting allocator', async function () { + const MockIssuanceAllocatorFactory = await hre.ethers.getContractFactory( + 'contracts/tests/MockIssuanceAllocator.sol:MockIssuanceAllocator', + ) + const mockIssuanceAllocator = await MockIssuanceAllocatorFactory.deploy() + await mockIssuanceAllocator.deployed() + + const tx = rewardsManager.connect(governor).setIssuanceAllocator(mockIssuanceAllocator.address) + await expect(tx) + .emit(rewardsManager, 'IssuanceAllocatorSet') + .withArgs(constants.AddressZero, mockIssuanceAllocator.address) + }) + + it('should not emit event when setting to same allocator address', async function () { + // Deploy a mock issuance allocator + const MockIssuanceAllocatorFactory = await hre.ethers.getContractFactory( + 'contracts/tests/MockIssuanceAllocator.sol:MockIssuanceAllocator', + ) + const mockAllocator = await MockIssuanceAllocatorFactory.deploy() + await mockAllocator.deployed() + + // Set the allocator first time + await rewardsManager.connect(governor).setIssuanceAllocator(mockAllocator.address) + + // Setting to same address should not emit event + const tx = await rewardsManager.connect(governor).setIssuanceAllocator(mockAllocator.address) + const receipt = await tx.wait() + + // Filter for IssuanceAllocatorSet events + const events = receipt.events?.filter((e) => e.event === 'IssuanceAllocatorSet') || [] + expect(events.length).to.equal(0) + }) + + it('should update rewards before changing issuance allocator', async function () { + // This test verifies that updateAccRewardsPerSignal is called when setting allocator + const MockIssuanceAllocatorFactory = await hre.ethers.getContractFactory( + 'contracts/tests/MockIssuanceAllocator.sol:MockIssuanceAllocator', + ) + const mockIssuanceAllocator = await MockIssuanceAllocatorFactory.deploy() + await mockIssuanceAllocator.deployed() + + // Setting the allocator should trigger updateAccRewardsPerSignal + // We can't easily test this directly, but we can verify the allocator was set + await rewardsManager.connect(governor).setIssuanceAllocator(mockIssuanceAllocator.address) + expect(await rewardsManager.issuanceAllocator()).eq(mockIssuanceAllocator.address) + }) + }) + }) + + describe('getRewardsIssuancePerBlock', function () { + it('should return issuancePerBlock when no issuanceAllocator is set', async function () { + const expectedIssuance = toGRT('100.025') + await rewardsManager.connect(governor).setIssuancePerBlock(expectedIssuance) + + // Ensure no issuanceAllocator is set + expect(await rewardsManager.issuanceAllocator()).eq(constants.AddressZero) + + // Should return the direct issuancePerBlock value + expect(await rewardsManager.getRewardsIssuancePerBlock()).eq(expectedIssuance) + }) + + it('should return value from issuanceAllocator when set', async function () { + // Create a mock IssuanceAllocator + const MockIssuanceAllocatorFactory = await hre.ethers.getContractFactory( + 'contracts/tests/MockIssuanceAllocator.sol:MockIssuanceAllocator', + ) + const mockIssuanceAllocator = await MockIssuanceAllocatorFactory.deploy() + await mockIssuanceAllocator.deployed() + + // Set the mock allocator on RewardsManager + await rewardsManager.connect(governor).setIssuanceAllocator(mockIssuanceAllocator.address) + + // Verify the allocator was set + expect(await rewardsManager.issuanceAllocator()).eq(mockIssuanceAllocator.address) + + // Set RewardsManager as a self-minting target with 25 GRT per block + const expectedIssuance = toGRT('25') + await mockIssuanceAllocator['setTargetAllocation(address,uint256,uint256,bool)']( + rewardsManager.address, + 0, // allocator issuance + expectedIssuance, // self issuance + true, + ) + + // Should return the value from the allocator, not the local issuancePerBlock + expect(await rewardsManager.getRewardsIssuancePerBlock()).eq(expectedIssuance) + }) + + it('should return 0 when issuanceAllocator is set but target not registered as self-minter', async function () { + // Create a mock IssuanceAllocator + const MockIssuanceAllocatorFactory = await hre.ethers.getContractFactory( + 'contracts/tests/MockIssuanceAllocator.sol:MockIssuanceAllocator', + ) + const mockIssuanceAllocator = await MockIssuanceAllocatorFactory.deploy() + await mockIssuanceAllocator.deployed() + + // Set the mock allocator on RewardsManager + await rewardsManager.connect(governor).setIssuanceAllocator(mockIssuanceAllocator.address) + + // Set RewardsManager as an allocator-minting target (only allocator issuance) + await mockIssuanceAllocator['setTargetAllocation(address,uint256,uint256,bool)']( + rewardsManager.address, + toGRT('25'), // allocator issuance + 0, // self issuance + false, + ) + + // Should return 0 because it's not a self-minting target + expect(await rewardsManager.getRewardsIssuancePerBlock()).eq(0) + }) + }) + + describe('setIssuancePerBlock', function () { + it('should allow setIssuancePerBlock when issuanceAllocator is set', async function () { + // Create and set a mock IssuanceAllocator + const MockIssuanceAllocatorFactory = await hre.ethers.getContractFactory( + 'contracts/tests/MockIssuanceAllocator.sol:MockIssuanceAllocator', + ) + const mockIssuanceAllocator = await MockIssuanceAllocatorFactory.deploy() + await mockIssuanceAllocator.deployed() + await rewardsManager.connect(governor).setIssuanceAllocator(mockIssuanceAllocator.address) + + // Should allow setting issuancePerBlock even when allocator is set + const newIssuancePerBlock = toGRT('100') + await rewardsManager.connect(governor).setIssuancePerBlock(newIssuancePerBlock) + + // The local issuancePerBlock should be updated + expect(await rewardsManager.issuancePerBlock()).eq(newIssuancePerBlock) + + // But the effective issuance should still come from the allocator + // (assuming the allocator returns a different value) + expect(await rewardsManager.getRewardsIssuancePerBlock()).not.eq(newIssuancePerBlock) + }) + }) + + describe('beforeIssuanceAllocationChange', function () { + it('should handle beforeIssuanceAllocationChange correctly', async function () { + // Create and set a mock IssuanceAllocator + const MockIssuanceAllocatorFactory = await hre.ethers.getContractFactory( + 'contracts/tests/MockIssuanceAllocator.sol:MockIssuanceAllocator', + ) + const mockIssuanceAllocator = await MockIssuanceAllocatorFactory.deploy() + await mockIssuanceAllocator.deployed() + await rewardsManager.connect(governor).setIssuanceAllocator(mockIssuanceAllocator.address) + + // Anyone should be able to call this function + await rewardsManager.connect(governor).beforeIssuanceAllocationChange() + + // Should also succeed when called by the allocator + await mockIssuanceAllocator.callBeforeIssuanceAllocationChange(rewardsManager.address) + }) + }) + + describe('issuance allocator integration', function () { + let mockIssuanceAllocator: any + + beforeEach(async function () { + // Create and setup mock allocator + const MockIssuanceAllocatorFactory = await hre.ethers.getContractFactory( + 'contracts/tests/MockIssuanceAllocator.sol:MockIssuanceAllocator', + ) + mockIssuanceAllocator = await MockIssuanceAllocatorFactory.deploy() + await mockIssuanceAllocator.deployed() + }) + + it('should accumulate rewards using allocator rate over time', async function () { + // Setup: Create signal + const totalSignal = toGRT('1000') + await curation.connect(curator1).mint(subgraphDeploymentID1, totalSignal, 0) + + // Set allocator with specific rate (50 GRT per block, different from local 200 GRT) + const allocatorRate = toGRT('50') + await mockIssuanceAllocator.setTargetAllocation(rewardsManager.address, 0, allocatorRate, false) + await rewardsManager.connect(governor).setIssuanceAllocator(mockIssuanceAllocator.address) + + // Snapshot state after setting allocator + const rewardsAfterSet = await rewardsManager.getAccRewardsPerSignal() + + // Mine blocks to accrue rewards at allocator rate + const blocksToMine = 10 + await helpers.mine(blocksToMine) + + // Get accumulated rewards + const rewardsAfterMining = await rewardsManager.getAccRewardsPerSignal() + const actualAccrued = rewardsAfterMining.sub(rewardsAfterSet) + + // Calculate expected rewards: (rate × blocks) / totalSignal + // Expected = (50 GRT × 10 blocks) / 1000 GRT signal = 0.5 GRT per signal + const expectedAccrued = allocatorRate.mul(blocksToMine).mul(toGRT('1')).div(totalSignal) + + // Verify rewards accumulated at allocator rate (not local rate of 200 GRT/block) + expect(actualAccrued).to.eq(expectedAccrued) + + // Verify NOT using local rate (would be 4x higher: 200 vs 50) + const wrongExpected = ISSUANCE_PER_BLOCK.mul(blocksToMine).mul(toGRT('1')).div(totalSignal) + expect(actualAccrued).to.not.eq(wrongExpected) + }) + + it('should maintain reward consistency when switching between rates', async function () { + // Setup: Create signal + const totalSignal = toGRT('2000') + await curation.connect(curator1).mint(subgraphDeploymentID1, totalSignal, 0) + + // Snapshot initial state + const block0 = await helpers.latestBlock() + const rewards0 = await rewardsManager.getAccRewardsPerSignal() + + // Phase 1: Accrue at local rate (200 GRT/block) + await helpers.mine(5) + const block1 = await helpers.latestBlock() + const rewards1 = await rewardsManager.getAccRewardsPerSignal() + + // Calculate phase 1 accrual + const blocksPhase1 = block1 - block0 + const phase1Accrued = rewards1.sub(rewards0) + const expectedPhase1 = ISSUANCE_PER_BLOCK.mul(blocksPhase1).mul(toGRT('1')).div(totalSignal) + expect(phase1Accrued).to.eq(expectedPhase1) + + // Phase 2: Switch to allocator with different rate (100 GRT/block) + const allocatorRate = toGRT('100') + await mockIssuanceAllocator.setTargetAllocation(rewardsManager.address, 0, allocatorRate, false) + await rewardsManager.connect(governor).setIssuanceAllocator(mockIssuanceAllocator.address) + + const block2 = await helpers.latestBlock() + const rewards2 = await rewardsManager.getAccRewardsPerSignal() + + await helpers.mine(8) + const block3 = await helpers.latestBlock() + const rewards3 = await rewardsManager.getAccRewardsPerSignal() + + // Calculate phase 2 accrual (includes the setIssuanceAllocator block at local rate) + const blocksPhase2 = block3 - block2 + const phase2Accrued = rewards3.sub(rewards2) + const expectedPhase2 = allocatorRate.mul(blocksPhase2).mul(toGRT('1')).div(totalSignal) + expect(phase2Accrued).to.eq(expectedPhase2) + + // Phase 3: Switch back to local rate (200 GRT/block) + await rewardsManager.connect(governor).setIssuanceAllocator(constants.AddressZero) + + const block4 = await helpers.latestBlock() + const rewards4 = await rewardsManager.getAccRewardsPerSignal() + + await helpers.mine(4) + const block5 = await helpers.latestBlock() + const rewards5 = await rewardsManager.getAccRewardsPerSignal() + + // Calculate phase 3 accrual + const blocksPhase3 = block5 - block4 + const phase3Accrued = rewards5.sub(rewards4) + const expectedPhase3 = ISSUANCE_PER_BLOCK.mul(blocksPhase3).mul(toGRT('1')).div(totalSignal) + expect(phase3Accrued).to.eq(expectedPhase3) + + // Verify total consistency: all rewards from start to end must equal sum of all phases + // including the transition blocks (setIssuanceAllocator calls mine blocks too) + const transitionPhase1to2 = rewards2.sub(rewards1) // Block mined by setIssuanceAllocator + const transitionPhase2to3 = rewards4.sub(rewards3) // Block mined by removing allocator + const totalExpected = phase1Accrued + .add(transitionPhase1to2) + .add(phase2Accrued) + .add(transitionPhase2to3) + .add(phase3Accrued) + const totalActual = rewards5.sub(rewards0) + expect(totalActual).to.eq(totalExpected) + }) + }) +}) diff --git a/packages/contracts/test/tests/unit/rewards/rewards-subgraph-service.test.ts b/packages/contracts/test/tests/unit/rewards/rewards-subgraph-service.test.ts new file mode 100644 index 000000000..f75785ecd --- /dev/null +++ b/packages/contracts/test/tests/unit/rewards/rewards-subgraph-service.test.ts @@ -0,0 +1,468 @@ +import { Curation } from '@graphprotocol/contracts' +import { GraphToken } from '@graphprotocol/contracts' +import { RewardsManager } from '@graphprotocol/contracts' +import { GraphNetworkContracts, helpers, randomAddress, randomHexBytes, toGRT } from '@graphprotocol/sdk' +import type { SignerWithAddress } from '@nomiclabs/hardhat-ethers/signers' +import { expect } from 'chai' +import { constants } from 'ethers' +import hre from 'hardhat' +import { network } from 'hardhat' + +import { NetworkFixture } from '../lib/fixtures' + +describe('Rewards - SubgraphService', () => { + const graph = hre.graph() + let curator1: SignerWithAddress + let governor: SignerWithAddress + let indexer1: SignerWithAddress + + let fixture: NetworkFixture + + let contracts: GraphNetworkContracts + let grt: GraphToken + let curation: Curation + let rewardsManager: RewardsManager + + const subgraphDeploymentID1 = randomHexBytes() + const allocationID1 = randomAddress() + + const ISSUANCE_PER_BLOCK = toGRT('200') // 200 GRT every block + + before(async function () { + const testAccounts = await graph.getTestAccounts() + curator1 = testAccounts[0] + indexer1 = testAccounts[1] + ;({ governor } = await graph.getNamedAccounts()) + + fixture = new NetworkFixture(graph.provider) + contracts = await fixture.load(governor) + grt = contracts.GraphToken as GraphToken + curation = contracts.Curation as Curation + rewardsManager = contracts.RewardsManager + + // 200 GRT per block + await rewardsManager.connect(governor).setIssuancePerBlock(ISSUANCE_PER_BLOCK) + + // Distribute test funds + for (const wallet of [indexer1, curator1]) { + await grt.connect(governor).mint(wallet.address, toGRT('1000000')) + await grt.connect(wallet).approve(curation.address, toGRT('1000000')) + } + }) + + beforeEach(async function () { + await fixture.setUp() + }) + + afterEach(async function () { + await fixture.tearDown() + }) + + describe('subgraph service configuration', function () { + it('should reject setSubgraphService if unauthorized', async function () { + const newService = randomAddress() + const tx = rewardsManager.connect(indexer1).setSubgraphService(newService) + await expect(tx).revertedWith('Only Controller governor') + }) + + it('should set subgraph service if governor', async function () { + const newService = randomAddress() + const tx = rewardsManager.connect(governor).setSubgraphService(newService) + + await expect(tx).emit(rewardsManager, 'SubgraphServiceSet').withArgs(constants.AddressZero, newService) + + expect(await rewardsManager.subgraphService()).eq(newService) + }) + + it('should allow setting to zero address', async function () { + const service = randomAddress() + await rewardsManager.connect(governor).setSubgraphService(service) + + const tx = rewardsManager.connect(governor).setSubgraphService(constants.AddressZero) + await expect(tx).emit(rewardsManager, 'SubgraphServiceSet').withArgs(service, constants.AddressZero) + + expect(await rewardsManager.subgraphService()).eq(constants.AddressZero) + }) + + it('should emit event when setting different address', async function () { + const service1 = randomAddress() + const service2 = randomAddress() + + await rewardsManager.connect(governor).setSubgraphService(service1) + + // Setting a different address should emit event + const tx = await rewardsManager.connect(governor).setSubgraphService(service2) + await expect(tx).emit(rewardsManager, 'SubgraphServiceSet').withArgs(service1, service2) + }) + }) + + describe('subgraph service as rewards issuer', function () { + let mockSubgraphService: any + + beforeEach(async function () { + // Deploy mock SubgraphService + const MockSubgraphServiceFactory = await hre.ethers.getContractFactory( + 'contracts/tests/MockSubgraphService.sol:MockSubgraphService', + ) + mockSubgraphService = await MockSubgraphServiceFactory.deploy() + await mockSubgraphService.deployed() + + // Set it on RewardsManager + await rewardsManager.connect(governor).setSubgraphService(mockSubgraphService.address) + }) + + describe('getRewards from subgraph service', function () { + it('should calculate rewards for subgraph service allocations', async function () { + // Setup: Create signal for rewards calculation + const signalled1 = toGRT('1500') + await curation.connect(curator1).mint(subgraphDeploymentID1, signalled1, 0) + + // Setup allocation data in mock + const tokensAllocated = toGRT('12500') + await mockSubgraphService.setAllocation( + allocationID1, + true, // isActive + indexer1.address, + subgraphDeploymentID1, + tokensAllocated, + 0, // accRewardsPerAllocatedToken + 0, // accRewardsPending + ) + + await mockSubgraphService.setSubgraphAllocatedTokens(subgraphDeploymentID1, tokensAllocated) + + // Mine some blocks to accrue rewards + await helpers.mine(10) + + // Get rewards - should return calculated amount + const rewards = await rewardsManager.getRewards(mockSubgraphService.address, allocationID1) + expect(rewards).to.be.gt(0) + }) + + it('should return zero for inactive allocation', async function () { + // Setup allocation as inactive + await mockSubgraphService.setAllocation( + allocationID1, + false, // isActive = false + indexer1.address, + subgraphDeploymentID1, + toGRT('12500'), + 0, + 0, + ) + + const rewards = await rewardsManager.getRewards(mockSubgraphService.address, allocationID1) + expect(rewards).to.equal(0) + }) + + it('should reject getRewards from non-rewards-issuer contract', async function () { + const randomContract = randomAddress() + const tx = rewardsManager.getRewards(randomContract, allocationID1) + await expect(tx).revertedWith('Not a rewards issuer') + }) + }) + + describe('takeRewards from subgraph service', function () { + it('should take rewards through subgraph service', async function () { + // Setup: Create signal for rewards calculation + const signalled1 = toGRT('1500') + await curation.connect(curator1).mint(subgraphDeploymentID1, signalled1, 0) + + // Setup allocation data in mock + const tokensAllocated = toGRT('12500') + await mockSubgraphService.setAllocation( + allocationID1, + true, // isActive + indexer1.address, + subgraphDeploymentID1, + tokensAllocated, + 0, // accRewardsPerAllocatedToken + 0, // accRewardsPending + ) + + await mockSubgraphService.setSubgraphAllocatedTokens(subgraphDeploymentID1, tokensAllocated) + + // Mine some blocks to accrue rewards + await helpers.mine(10) + + // Before state + const beforeSubgraphServiceBalance = await grt.balanceOf(mockSubgraphService.address) + const beforeTotalSupply = await grt.totalSupply() + + // Impersonate the mock subgraph service contract + await network.provider.request({ + method: 'hardhat_impersonateAccount', + params: [mockSubgraphService.address], + }) + await network.provider.send('hardhat_setBalance', [mockSubgraphService.address, '0x1000000000000000000']) + + const mockSubgraphServiceSigner = await hre.ethers.getSigner(mockSubgraphService.address) + + // Take rewards (called by subgraph service) + const tx = await rewardsManager.connect(mockSubgraphServiceSigner).takeRewards(allocationID1) + const receipt = await tx.wait() + + // Stop impersonating + await network.provider.request({ + method: 'hardhat_stopImpersonatingAccount', + params: [mockSubgraphService.address], + }) + + // Parse the event + const event = receipt.logs + .map((log: any) => { + try { + return rewardsManager.interface.parseLog(log) + } catch { + return null + } + }) + .find((e: any) => e?.name === 'HorizonRewardsAssigned') + + expect(event).to.not.be.undefined + expect(event?.args.indexer).to.equal(indexer1.address) + expect(event?.args.allocationID).to.equal(allocationID1) + expect(event?.args.amount).to.be.gt(0) + + // After state - verify tokens minted to subgraph service + const afterSubgraphServiceBalance = await grt.balanceOf(mockSubgraphService.address) + const afterTotalSupply = await grt.totalSupply() + + expect(afterSubgraphServiceBalance).to.be.gt(beforeSubgraphServiceBalance) + expect(afterTotalSupply).to.be.gt(beforeTotalSupply) + }) + + it('should return zero rewards for inactive allocation', async function () { + // Setup allocation as inactive + await mockSubgraphService.setAllocation( + allocationID1, + false, // isActive = false + indexer1.address, + subgraphDeploymentID1, + toGRT('12500'), + 0, + 0, + ) + + // Impersonate the mock subgraph service contract + await network.provider.request({ + method: 'hardhat_impersonateAccount', + params: [mockSubgraphService.address], + }) + await network.provider.send('hardhat_setBalance', [mockSubgraphService.address, '0x1000000000000000000']) + + const mockSubgraphServiceSigner = await hre.ethers.getSigner(mockSubgraphService.address) + + // Take rewards should return 0 and emit event with 0 amount + const tx = rewardsManager.connect(mockSubgraphServiceSigner).takeRewards(allocationID1) + await expect(tx).emit(rewardsManager, 'HorizonRewardsAssigned').withArgs(indexer1.address, allocationID1, 0) + + // Stop impersonating + await network.provider.request({ + method: 'hardhat_stopImpersonatingAccount', + params: [mockSubgraphService.address], + }) + }) + + it('should reject takeRewards from non-rewards-issuer contract', async function () { + const tx = rewardsManager.connect(indexer1).takeRewards(allocationID1) + await expect(tx).revertedWith('Caller must be a rewards issuer') + }) + + it('should handle zero rewards scenario', async function () { + // Setup with zero issuance + await rewardsManager.connect(governor).setIssuancePerBlock(0) + + // Setup allocation + await mockSubgraphService.setAllocation( + allocationID1, + true, + indexer1.address, + subgraphDeploymentID1, + toGRT('12500'), + 0, + 0, + ) + + await mockSubgraphService.setSubgraphAllocatedTokens(subgraphDeploymentID1, toGRT('12500')) + + // Mine blocks + await helpers.mine(10) + + // Impersonate the mock subgraph service contract + await network.provider.request({ + method: 'hardhat_impersonateAccount', + params: [mockSubgraphService.address], + }) + await network.provider.send('hardhat_setBalance', [mockSubgraphService.address, '0x1000000000000000000']) + + const mockSubgraphServiceSigner = await hre.ethers.getSigner(mockSubgraphService.address) + + // Take rewards should succeed with 0 amount + const tx = rewardsManager.connect(mockSubgraphServiceSigner).takeRewards(allocationID1) + await expect(tx).emit(rewardsManager, 'HorizonRewardsAssigned').withArgs(indexer1.address, allocationID1, 0) + + // Stop impersonating + await network.provider.request({ + method: 'hardhat_stopImpersonatingAccount', + params: [mockSubgraphService.address], + }) + }) + }) + + describe('mixed allocations from staking and subgraph service', function () { + it('should account for both staking and subgraph service allocations in getAccRewardsPerAllocatedToken', async function () { + // This test verifies that getSubgraphAllocatedTokens is called for both issuers + // and rewards are distributed proportionally + + // Setup: Create signal + const signalled1 = toGRT('1500') + await curation.connect(curator1).mint(subgraphDeploymentID1, signalled1, 0) + + // Setup subgraph service allocation + const tokensFromSubgraphService = toGRT('5000') + await mockSubgraphService.setSubgraphAllocatedTokens(subgraphDeploymentID1, tokensFromSubgraphService) + + // Note: We can't easily create a real staking allocation in this test + // but the contract code at lines 381-388 loops through both issuers + // and sums their allocated tokens. This test verifies the subgraph service path. + + // Mine some blocks + await helpers.mine(5) + + // Get accumulated rewards per allocated token + const [accRewardsPerAllocatedToken, accRewardsForSubgraph] = + await rewardsManager.getAccRewardsPerAllocatedToken(subgraphDeploymentID1) + + // Should have calculated rewards based on subgraph service allocations + expect(accRewardsPerAllocatedToken).to.be.gt(0) + expect(accRewardsForSubgraph).to.be.gt(0) + }) + + it('should handle case where only subgraph service has allocations', async function () { + // Setup: Create signal + const signalled1 = toGRT('1500') + await curation.connect(curator1).mint(subgraphDeploymentID1, signalled1, 0) + + // Only subgraph service has allocations + const tokensFromSubgraphService = toGRT('10000') + await mockSubgraphService.setSubgraphAllocatedTokens(subgraphDeploymentID1, tokensFromSubgraphService) + + // Mine blocks + await helpers.mine(5) + + // Get rewards + const [accRewardsPerAllocatedToken] = await rewardsManager.getAccRewardsPerAllocatedToken(subgraphDeploymentID1) + + expect(accRewardsPerAllocatedToken).to.be.gt(0) + }) + + it('should return zero when neither issuer has allocations', async function () { + // Setup: Create signal but no allocations + const signalled1 = toGRT('1500') + await curation.connect(curator1).mint(subgraphDeploymentID1, signalled1, 0) + + // No allocations from either issuer + await mockSubgraphService.setSubgraphAllocatedTokens(subgraphDeploymentID1, 0) + + // Mine blocks + await helpers.mine(5) + + // Get rewards - should return 0 when no allocations + const [accRewardsPerAllocatedToken, accRewardsForSubgraph] = + await rewardsManager.getAccRewardsPerAllocatedToken(subgraphDeploymentID1) + + expect(accRewardsPerAllocatedToken).to.equal(0) + expect(accRewardsForSubgraph).to.be.gt(0) // Subgraph still accrues, but no per-token rewards + }) + }) + + describe('subgraph service with denylist and eligibility', function () { + it('should deny rewards from subgraph service when subgraph is on denylist', async function () { + // Setup denylist + await rewardsManager.connect(governor).setSubgraphAvailabilityOracle(governor.address) + await rewardsManager.connect(governor).setDenied(subgraphDeploymentID1, true) + + // Setup allocation + await mockSubgraphService.setAllocation( + allocationID1, + true, + indexer1.address, + subgraphDeploymentID1, + toGRT('12500'), + 0, + 0, + ) + + // Impersonate the mock subgraph service contract + await network.provider.request({ + method: 'hardhat_impersonateAccount', + params: [mockSubgraphService.address], + }) + await network.provider.send('hardhat_setBalance', [mockSubgraphService.address, '0x1000000000000000000']) + + const mockSubgraphServiceSigner = await hre.ethers.getSigner(mockSubgraphService.address) + + // Take rewards should be denied + const tx = rewardsManager.connect(mockSubgraphServiceSigner).takeRewards(allocationID1) + await expect(tx).emit(rewardsManager, 'RewardsDenied').withArgs(indexer1.address, allocationID1) + + // Stop impersonating + await network.provider.request({ + method: 'hardhat_stopImpersonatingAccount', + params: [mockSubgraphService.address], + }) + }) + + it('should deny rewards from subgraph service when indexer is ineligible', async function () { + // Setup REO that denies indexer1 + const MockRewardsEligibilityOracleFactory = await hre.ethers.getContractFactory( + 'contracts/tests/MockRewardsEligibilityOracle.sol:MockRewardsEligibilityOracle', + ) + const mockREO = await MockRewardsEligibilityOracleFactory.deploy(false) // Deny by default + await mockREO.deployed() + await rewardsManager.connect(governor).setRewardsEligibilityOracle(mockREO.address) + + // Setup: Create signal + const signalled1 = toGRT('1500') + await curation.connect(curator1).mint(subgraphDeploymentID1, signalled1, 0) + + // Setup allocation + const tokensAllocated = toGRT('12500') + await mockSubgraphService.setAllocation( + allocationID1, + true, + indexer1.address, + subgraphDeploymentID1, + tokensAllocated, + 0, + 0, + ) + + await mockSubgraphService.setSubgraphAllocatedTokens(subgraphDeploymentID1, tokensAllocated) + + // Mine blocks to accrue rewards + await helpers.mine(5) + + // Impersonate the mock subgraph service contract + await network.provider.request({ + method: 'hardhat_impersonateAccount', + params: [mockSubgraphService.address], + }) + await network.provider.send('hardhat_setBalance', [mockSubgraphService.address, '0x1000000000000000000']) + + const mockSubgraphServiceSigner = await hre.ethers.getSigner(mockSubgraphService.address) + + // Take rewards should be denied due to eligibility + const tx = rewardsManager.connect(mockSubgraphServiceSigner).takeRewards(allocationID1) + await expect(tx).emit(rewardsManager, 'RewardsDeniedDueToEligibility') + + // Stop impersonating + await network.provider.request({ + method: 'hardhat_stopImpersonatingAccount', + params: [mockSubgraphService.address], + }) + }) + }) + }) +}) diff --git a/packages/interfaces/contracts/contracts/rewards/IRewardsManager.sol b/packages/interfaces/contracts/contracts/rewards/IRewardsManager.sol index 72a73e19b..bd8da3508 100644 --- a/packages/interfaces/contracts/contracts/rewards/IRewardsManager.sol +++ b/packages/interfaces/contracts/contracts/rewards/IRewardsManager.sol @@ -43,6 +43,12 @@ interface IRewardsManager { */ function setSubgraphService(address subgraphService) external; + /** + * @notice Set the rewards eligibility oracle address + * @param newRewardsEligibilityOracle The address of the rewards eligibility oracle + */ + function setRewardsEligibilityOracle(address newRewardsEligibilityOracle) external; + // -- Denylist -- /** @@ -67,6 +73,13 @@ interface IRewardsManager { // -- Getters -- + /** + * @notice Gets the effective issuance per block for rewards + * @dev Takes into account the issuance allocator if set + * @return The effective issuance per block + */ + function getRewardsIssuancePerBlock() external view returns (uint256); + /** * @notice Gets the issuance of rewards per signal since last updated * @return newly accrued rewards per signal since last update diff --git a/packages/interfaces/contracts/issuance/allocate/IIssuanceAllocationDistribution.sol b/packages/interfaces/contracts/issuance/allocate/IIssuanceAllocationDistribution.sol new file mode 100644 index 000000000..4b27eaf39 --- /dev/null +++ b/packages/interfaces/contracts/issuance/allocate/IIssuanceAllocationDistribution.sol @@ -0,0 +1,33 @@ +// SPDX-License-Identifier: GPL-2.0-or-later + +pragma solidity ^0.7.6 || ^0.8.0; +pragma abicoder v2; + +import { TargetIssuancePerBlock } from "./IIssuanceAllocatorTypes.sol"; + +/** + * @title IIssuanceAllocationDistribution + * @author Edge & Node + * @notice Interface for distribution and target interaction with the issuance allocator. + * This is the minimal interface that targets need to interact with the allocator. + */ +interface IIssuanceAllocationDistribution { + /** + * @notice Distribute issuance to allocated non-self-minting targets. + * @return Block number that issuance has been distributed to. That will normally be the current block number, unless the contract is paused. + * + * @dev When the contract is paused, no issuance is distributed and lastIssuanceBlock is not updated. + * @dev This function is permissionless and can be called by anyone, including targets as part of their normal flow. + */ + function distributeIssuance() external returns (uint256); + + /** + * @notice Target issuance per block information + * @param target Address of the target + * @return TargetIssuancePerBlock struct containing allocatorIssuanceBlockAppliedTo, selfIssuanceBlockAppliedTo, allocatorIssuancePerBlock, and selfIssuancePerBlock + * @dev This function does not revert when paused, instead the caller is expected to correctly read and apply the information provided. + * @dev Targets should check allocatorIssuanceBlockAppliedTo and selfIssuanceBlockAppliedTo - if either is not the current block, that type of issuance is paused for that target. + * @dev Targets should not check the allocator's pause state directly, but rely on the blockAppliedTo fields to determine if issuance is paused. + */ + function getTargetIssuancePerBlock(address target) external view returns (TargetIssuancePerBlock memory); +} diff --git a/packages/interfaces/contracts/issuance/allocate/IIssuanceAllocatorTypes.sol b/packages/interfaces/contracts/issuance/allocate/IIssuanceAllocatorTypes.sol new file mode 100644 index 000000000..b4a5d33a7 --- /dev/null +++ b/packages/interfaces/contracts/issuance/allocate/IIssuanceAllocatorTypes.sol @@ -0,0 +1,18 @@ +// SPDX-License-Identifier: GPL-2.0-or-later + +pragma solidity ^0.7.6 || ^0.8.0; +pragma abicoder v2; + +/** + * @notice Target issuance per block information + * @param allocatorIssuancePerBlock Issuance per block for allocator-minting (non-self-minting) + * @param allocatorIssuanceBlockAppliedTo The block up to which allocator issuance has been applied + * @param selfIssuancePerBlock Issuance per block for self-minting + * @param selfIssuanceBlockAppliedTo The block up to which self issuance has been applied + */ +struct TargetIssuancePerBlock { + uint256 allocatorIssuancePerBlock; + uint256 allocatorIssuanceBlockAppliedTo; + uint256 selfIssuancePerBlock; + uint256 selfIssuanceBlockAppliedTo; +} diff --git a/packages/interfaces/contracts/issuance/allocate/IIssuanceTarget.sol b/packages/interfaces/contracts/issuance/allocate/IIssuanceTarget.sol new file mode 100644 index 000000000..3fe539b95 --- /dev/null +++ b/packages/interfaces/contracts/issuance/allocate/IIssuanceTarget.sol @@ -0,0 +1,27 @@ +// SPDX-License-Identifier: GPL-2.0-or-later + +pragma solidity ^0.7.6 || ^0.8.0; + +/** + * @title IIssuanceTarget + * @author Edge & Node + * @notice Interface for contracts that receive issuance from an issuance allocator + */ +interface IIssuanceTarget { + /** + * @notice Called by the issuance allocator before the target's issuance allocation changes + * @dev The target should ensure that all issuance related calculations are up-to-date + * with the current block so that an allocation change can be applied correctly. + * Note that the allocation could change multiple times in the same block after + * this function has been called, only the final allocation is relevant. + */ + function beforeIssuanceAllocationChange() external; + + /** + * @notice Sets the issuance allocator for this target + * @dev This function facilitates upgrades by providing a standard way for targets + * to change their allocator. Implementations can define their own access control. + * @param newIssuanceAllocator Address of the issuance allocator + */ + function setIssuanceAllocator(address newIssuanceAllocator) external; +} diff --git a/packages/interfaces/contracts/issuance/common/IPausableControl.sol b/packages/interfaces/contracts/issuance/common/IPausableControl.sol new file mode 100644 index 000000000..83cfbc364 --- /dev/null +++ b/packages/interfaces/contracts/issuance/common/IPausableControl.sol @@ -0,0 +1,34 @@ +// SPDX-License-Identifier: GPL-2.0-or-later + +pragma solidity ^0.7.6 || ^0.8.0; + +/** + * @title IPausableControl + * @author Edge & Node + * @notice Interface for contracts that support pause/unpause functionality + * @dev This interface extends standard pausable functionality with explicit + * pause and unpause functions. Contracts implementing this interface allow + * authorized accounts to pause and unpause contract operations. + * Events (Paused, Unpaused) are inherited from OpenZeppelin's PausableUpgradeable. + */ +interface IPausableControl { + /** + * @notice Pause the contract + * @dev Pauses contract operations. Only functions using whenNotPaused + * modifier will be affected. + */ + function pause() external; + + /** + * @notice Unpause the contract + * @dev Resumes contract operations. Only functions using whenPaused + * modifier will be affected. + */ + function unpause() external; + + /** + * @notice Check if the contract is currently paused + * @return True if the contract is paused, false otherwise + */ + function paused() external view returns (bool); +} diff --git a/packages/interfaces/contracts/issuance/eligibility/IRewardsEligibility.sol b/packages/interfaces/contracts/issuance/eligibility/IRewardsEligibility.sol new file mode 100644 index 000000000..53c8acf85 --- /dev/null +++ b/packages/interfaces/contracts/issuance/eligibility/IRewardsEligibility.sol @@ -0,0 +1,19 @@ +// SPDX-License-Identifier: GPL-2.0-or-later + +pragma solidity ^0.7.6 || ^0.8.0; + +/** + * @title IRewardsEligibility + * @author Edge & Node + * @notice Minimal interface for checking indexer rewards eligibility + * @dev This is the interface that consumers (e.g., RewardsManager) need to check + * if an indexer is eligible to receive rewards + */ +interface IRewardsEligibility { + /** + * @notice Check if an indexer is eligible to receive rewards + * @param indexer Address of the indexer + * @return True if the indexer is eligible to receive rewards, false otherwise + */ + function isEligible(address indexer) external view returns (bool); +} diff --git a/packages/issuance/.markdownlint.json b/packages/issuance/.markdownlint.json new file mode 100644 index 000000000..18947b0be --- /dev/null +++ b/packages/issuance/.markdownlint.json @@ -0,0 +1,3 @@ +{ + "extends": "../../.markdownlint.json" +} diff --git a/packages/issuance/.solcover.js b/packages/issuance/.solcover.js new file mode 100644 index 000000000..d8bbec4bb --- /dev/null +++ b/packages/issuance/.solcover.js @@ -0,0 +1,15 @@ +module.exports = { + skipFiles: ['test/'], + providerOptions: { + mnemonic: 'myth like bonus scare over problem client lizard pioneer submit female collect', + network_id: 1337, + }, + // Use default istanbulFolder: './coverage' + // Exclude 'html' to avoid duplicate HTML files (lcov already generates HTML in lcov-report/) + istanbulReporter: ['lcov', 'text', 'json'], + configureYulOptimizer: true, + mocha: { + grep: '@skip-on-coverage', + invert: true, + }, +} diff --git a/packages/issuance/.solhint.json b/packages/issuance/.solhint.json new file mode 100644 index 000000000..d30847305 --- /dev/null +++ b/packages/issuance/.solhint.json @@ -0,0 +1,3 @@ +{ + "extends": ["solhint:recommended", "./../../.solhint.json"] +} diff --git a/packages/issuance/README.md b/packages/issuance/README.md new file mode 100644 index 000000000..16e2520b6 --- /dev/null +++ b/packages/issuance/README.md @@ -0,0 +1,62 @@ +# The Graph Issuance Contracts + +This package contains smart contracts for The Graph's issuance functionality. + +## Overview + +The issuance contracts handle token issuance mechanisms for The Graph protocol. + +### Contracts + +- **[IssuanceAllocator](contracts/allocate/IssuanceAllocator.md)** - Central distribution hub for token issuance, allocating tokens to different protocol components based on configured proportions +- **[RewardsEligibilityOracle](contracts/eligibility/RewardsEligibilityOracle.md)** - Oracle-based eligibility system for indexer rewards with time-based expiration +- **DirectAllocation** - Simple target contract for receiving and distributing allocated tokens + +## Development + +### Setup + +```bash +# Install dependencies +pnpm install + +# Build +pnpm build + +# Test +pnpm test +``` + +### Testing + +To run the tests: + +```bash +pnpm test +``` + +For coverage: + +```bash +pnpm test:coverage +``` + +### Linting + +To lint the contracts and tests: + +```bash +pnpm lint +``` + +### Contract Size + +To check contract sizes: + +```bash +pnpm size +``` + +## License + +GPL-2.0-or-later diff --git a/packages/issuance/contracts/common/BaseUpgradeable.sol b/packages/issuance/contracts/common/BaseUpgradeable.sol new file mode 100644 index 000000000..ead4f6a4f --- /dev/null +++ b/packages/issuance/contracts/common/BaseUpgradeable.sol @@ -0,0 +1,159 @@ +// SPDX-License-Identifier: GPL-2.0-or-later + +pragma solidity 0.8.27; + +import { Initializable } from "@openzeppelin/contracts-upgradeable/proxy/utils/Initializable.sol"; +import { PausableUpgradeable } from "@openzeppelin/contracts-upgradeable/utils/PausableUpgradeable.sol"; +import { AccessControlUpgradeable } from "@openzeppelin/contracts-upgradeable/access/AccessControlUpgradeable.sol"; +import { IGraphToken } from "@graphprotocol/interfaces/contracts/contracts/token/IGraphToken.sol"; +import { IPausableControl } from "@graphprotocol/interfaces/contracts/issuance/common/IPausableControl.sol"; + +/** + * @title BaseUpgradeable + * @author Edge & Node + * @notice A base contract that provides role-based access control and pausability. + * + * @dev This contract combines OpenZeppelin's AccessControl and Pausable + * to provide a standardized way to manage access control and pausing functionality. + * It uses ERC-7201 namespaced storage pattern for better storage isolation. + * This contract is abstract and meant to be inherited by other contracts. + * @custom:security-contact Please email security+contracts@thegraph.com if you find any bugs. We might have an active bug bounty program. + */ +abstract contract BaseUpgradeable is Initializable, AccessControlUpgradeable, PausableUpgradeable, IPausableControl { + // -- Constants -- + + /// @notice One million - used as the denominator for values provided as Parts Per Million (PPM) + /// @dev This constant represents 1,000,000 and serves as the denominator when working with + /// PPM values. For example, 50% would be represented as 500,000 PPM, calculated as + /// (500,000 / MILLION) = 0.5 = 50% + uint256 public constant MILLION = 1_000_000; + + // -- Role Constants -- + + /** + * @notice Role identifier for governor accounts + * @dev Governors have the highest level of access and can: + * - Grant and revoke roles within the established hierarchy + * - Perform administrative functions and system configuration + * - Set critical parameters and upgrade contracts + * Admin of: GOVERNOR_ROLE, PAUSE_ROLE, OPERATOR_ROLE + */ + bytes32 public constant GOVERNOR_ROLE = keccak256("GOVERNOR_ROLE"); + + /** + * @notice Role identifier for pause accounts + * @dev Pause role holders can: + * - Pause and unpause contract operations for emergency situations + * Typically granted to automated monitoring systems or emergency responders. + * Pausing is intended for quick response to potential threats, and giving time for investigation and resolution (potentially with governance intervention). + * Admin: GOVERNOR_ROLE + */ + bytes32 public constant PAUSE_ROLE = keccak256("PAUSE_ROLE"); + + /** + * @notice Role identifier for operator accounts + * @dev Operators can: + * - Perform operational tasks as defined by inheriting contracts + * - Manage roles that are designated as operator-administered + * Admin: GOVERNOR_ROLE + */ + bytes32 public constant OPERATOR_ROLE = keccak256("OPERATOR_ROLE"); + + // -- Immutable Variables -- + + /// @notice The Graph Token contract + /// @custom:oz-upgrades-unsafe-allow state-variable-immutable + IGraphToken internal immutable GRAPH_TOKEN; + + // -- Custom Errors -- + + /// @notice Thrown when attempting to set the Graph Token to the zero address + error GraphTokenCannotBeZeroAddress(); + + /// @notice Thrown when attempting to set the governor to the zero address + error GovernorCannotBeZeroAddress(); + + // -- Constructor -- + + /** + * @notice Constructor for the BaseUpgradeable contract + * @dev This contract is upgradeable, but we use the constructor to set immutable variables + * and disable initializers to prevent the implementation contract from being initialized. + * @param graphToken Address of the Graph Token contract + * @custom:oz-upgrades-unsafe-allow constructor + */ + constructor(address graphToken) { + require(graphToken != address(0), GraphTokenCannotBeZeroAddress()); + GRAPH_TOKEN = IGraphToken(graphToken); + _disableInitializers(); + } + + // -- Initialization -- + + /** + * @notice Internal function to initialize the BaseUpgradeable contract + * @dev This function is used by child contracts to initialize the BaseUpgradeable contract + * @param governor Address that will have the GOVERNOR_ROLE + */ + function __BaseUpgradeable_init(address governor) internal { + // solhint-disable-previous-line func-name-mixedcase + + __AccessControl_init(); + __Pausable_init(); + + __BaseUpgradeable_init_unchained(governor); + } + + /** + * @notice Internal unchained initialization function for BaseUpgradeable + * @dev This function sets up the governor role and role admin hierarchy + * @param governor Address that will have the GOVERNOR_ROLE + */ + function __BaseUpgradeable_init_unchained(address governor) internal { + // solhint-disable-previous-line func-name-mixedcase + + require(governor != address(0), GovernorCannotBeZeroAddress()); + + // Set up role admin hierarchy: + // GOVERNOR is admin of GOVERNOR, PAUSE, and OPERATOR roles + _setRoleAdmin(GOVERNOR_ROLE, GOVERNOR_ROLE); + _setRoleAdmin(PAUSE_ROLE, GOVERNOR_ROLE); + _setRoleAdmin(OPERATOR_ROLE, GOVERNOR_ROLE); + + // Grant initial governor role + _grantRole(GOVERNOR_ROLE, governor); + } + + // -- External Functions -- + + /** + * @inheritdoc IPausableControl + */ + function pause() external override onlyRole(PAUSE_ROLE) { + _pause(); + } + + /** + * @inheritdoc IPausableControl + */ + function unpause() external override onlyRole(PAUSE_ROLE) { + _unpause(); + } + + /** + * @inheritdoc IPausableControl + */ + function paused() public view virtual override(PausableUpgradeable, IPausableControl) returns (bool) { + return super.paused(); + } + + /** + * @notice Check if this contract supports a given interface + * @dev Adds support for IPausableControl interface + * @param interfaceId The interface identifier to check + * @return True if the contract supports the interface, false otherwise + */ + function supportsInterface(bytes4 interfaceId) public view virtual override returns (bool) { + return interfaceId == type(IPausableControl).interfaceId || super.supportsInterface(interfaceId); + } +} diff --git a/packages/issuance/hardhat.base.config.ts b/packages/issuance/hardhat.base.config.ts new file mode 100644 index 000000000..e4d0cc8bb --- /dev/null +++ b/packages/issuance/hardhat.base.config.ts @@ -0,0 +1,24 @@ +import { hardhatBaseConfig } from '@graphprotocol/toolshed/hardhat' +import type { HardhatUserConfig } from 'hardhat/config' + +// Issuance-specific Solidity configuration with Cancun EVM version +// Based on toolshed solidityUserConfig but with Cancun EVM target +export const issuanceSolidityConfig = { + version: '0.8.27', + settings: { + optimizer: { + enabled: true, + runs: 100, + }, + evmVersion: 'cancun' as const, + }, +} + +// Base configuration for issuance package - inherits from toolshed and overrides Solidity config +export const issuanceBaseConfig = (() => { + const baseConfig = hardhatBaseConfig(require) + return { + ...baseConfig, + solidity: issuanceSolidityConfig, + } as HardhatUserConfig +})() diff --git a/packages/issuance/hardhat.config.ts b/packages/issuance/hardhat.config.ts new file mode 100644 index 000000000..f76949af8 --- /dev/null +++ b/packages/issuance/hardhat.config.ts @@ -0,0 +1,26 @@ +import '@nomicfoundation/hardhat-ethers' +import '@typechain/hardhat' +import 'hardhat-contract-sizer' +import '@openzeppelin/hardhat-upgrades' +import '@nomicfoundation/hardhat-verify' + +import type { HardhatUserConfig } from 'hardhat/config' + +import { issuanceBaseConfig } from './hardhat.base.config' + +const config: HardhatUserConfig = { + ...issuanceBaseConfig, + // Main config specific settings + typechain: { + outDir: 'types', + target: 'ethers-v6', + }, + paths: { + sources: './contracts', + tests: './test/tests', + artifacts: './artifacts', + cache: './cache', + }, +} + +export default config diff --git a/packages/issuance/hardhat.coverage.config.ts b/packages/issuance/hardhat.coverage.config.ts new file mode 100644 index 000000000..01ee96e83 --- /dev/null +++ b/packages/issuance/hardhat.coverage.config.ts @@ -0,0 +1,22 @@ +import '@nomicfoundation/hardhat-ethers' +import '@nomicfoundation/hardhat-chai-matchers' +import '@nomicfoundation/hardhat-network-helpers' +import '@openzeppelin/hardhat-upgrades' +import 'hardhat-gas-reporter' +import 'solidity-coverage' + +import { HardhatUserConfig } from 'hardhat/config' + +import { issuanceBaseConfig } from './hardhat.base.config' + +const config: HardhatUserConfig = { + ...issuanceBaseConfig, + paths: { + sources: './contracts', + tests: './test/tests', + artifacts: './coverage/artifacts', + cache: './coverage/cache', + }, +} as HardhatUserConfig + +export default config diff --git a/packages/issuance/package.json b/packages/issuance/package.json new file mode 100644 index 000000000..fbb658193 --- /dev/null +++ b/packages/issuance/package.json @@ -0,0 +1,79 @@ +{ + "name": "@graphprotocol/issuance", + "version": "1.0.0", + "publishConfig": { + "access": "public" + }, + "description": "The Graph Issuance Contracts", + "author": "Edge & Node", + "license": "GPL-2.0-or-later", + "main": "index.js", + "exports": { + ".": "./index.js", + "./artifacts/*": "./artifacts/*", + "./contracts/*": "./contracts/*", + "./types": "./types/index.ts", + "./types/*": "./types/*" + }, + "scripts": { + "build": "pnpm build:dep && pnpm build:self", + "build:dep": "pnpm --filter '@graphprotocol/issuance^...' run build:self", + "build:self": "pnpm compile && pnpm build:self:typechain", + "build:coverage": "pnpm build:dep && pnpm build:self:coverage", + "build:self:coverage": "npx hardhat compile --config hardhat.coverage.config.ts && pnpm build:self:typechain", + "build:self:typechain": "bash -c 'missing=$(grep -rL \"static readonly interfaceId\" types/factories --include=\"*__factory.ts\" 2>/dev/null | wc -l); if [ $missing -gt 0 ]; then node -e \"require('\"'\"'@graphprotocol/interfaces/utils'\"'\"').addInterfaceIds('\"'\"'types/factories'\"'\"')\"; fi'", + "clean": "rm -rf artifacts/ types/ forge-artifacts/ cache_forge/ coverage/ cache/ .eslintcache", + "compile": "hardhat compile --quiet", + "test": "pnpm --filter @graphprotocol/issuance-test test", + "test:coverage": "pnpm --filter @graphprotocol/issuance-test run test:coverage", + "lint": "pnpm lint:ts; pnpm lint:sol; pnpm lint:md; pnpm lint:json", + "lint:ts": "eslint '**/*.{js,ts,cjs,mjs,jsx,tsx}' --fix --cache; prettier -w --cache --log-level warn '**/*.{js,ts,cjs,mjs,jsx,tsx}'", + "lint:sol": "solhint --fix --noPrompt --noPoster 'contracts/**/*.sol'; prettier -w --cache --log-level warn 'contracts/**/*.sol'", + "lint:md": "markdownlint --fix --ignore-path ../../.gitignore '**/*.md'; prettier -w --cache --log-level warn '**/*.md'", + "lint:json": "prettier -w --cache --log-level warn '**/*.json'", + "typechain": "hardhat typechain", + "verify": "hardhat verify", + "size": "hardhat size-contracts", + "forge:build": "forge build" + }, + "files": [ + "artifacts/**/*", + "types/**/*", + "contracts/**/*", + "README.md" + ], + "devDependencies": { + "@graphprotocol/interfaces": "workspace:^", + "@graphprotocol/toolshed": "workspace:^", + "@nomicfoundation/hardhat-ethers": "catalog:", + "@nomicfoundation/hardhat-verify": "catalog:", + "@openzeppelin/contracts": "^5.4.0", + "@openzeppelin/contracts-upgradeable": "^5.4.0", + "@openzeppelin/hardhat-upgrades": "^3.9.0", + "@typechain/ethers-v6": "^0.5.0", + "@typechain/hardhat": "catalog:", + "@types/node": "^20.17.50", + "dotenv": "catalog:", + "eslint": "catalog:", + "ethers": "catalog:", + "glob": "catalog:", + "globals": "catalog:", + "hardhat": "catalog:", + "hardhat-contract-sizer": "catalog:", + "hardhat-secure-accounts": "catalog:", + "hardhat-storage-layout": "catalog:", + "lint-staged": "catalog:", + "markdownlint-cli": "catalog:", + "prettier": "catalog:", + "prettier-plugin-solidity": "catalog:", + "solhint": "catalog:", + "ts-node": "^10.9.2", + "typechain": "^8.3.0", + "typescript": "catalog:", + "typescript-eslint": "catalog:", + "yaml-lint": "catalog:" + }, + "dependencies": { + "@noble/hashes": "^1.8.0" + } +} diff --git a/packages/issuance/prettier.config.cjs b/packages/issuance/prettier.config.cjs new file mode 100644 index 000000000..4e8dcf4f3 --- /dev/null +++ b/packages/issuance/prettier.config.cjs @@ -0,0 +1,5 @@ +const baseConfig = require('../../prettier.config.cjs') + +module.exports = { + ...baseConfig, +} diff --git a/packages/issuance/test/package.json b/packages/issuance/test/package.json new file mode 100644 index 000000000..f362b4c9b --- /dev/null +++ b/packages/issuance/test/package.json @@ -0,0 +1,62 @@ +{ + "name": "@graphprotocol/issuance-test", + "version": "1.0.0", + "private": true, + "description": "Test utilities for @graphprotocol/issuance", + "author": "Edge & Node", + "license": "GPL-2.0-or-later", + "main": "src/index.ts", + "types": "src/index.ts", + "exports": { + ".": { + "default": "./src/index.ts", + "types": "./src/index.ts" + } + }, + "scripts": { + "build": "pnpm build:dep && pnpm build:self", + "build:dep": "pnpm --filter '@graphprotocol/issuance-test^...' run build:self", + "build:self": "tsc --build", + "build:coverage": "pnpm build:dep:coverage && pnpm build:self", + "build:dep:coverage": "pnpm --filter '@graphprotocol/issuance-test^...' run build:coverage", + "clean": "rm -rf .eslintcache artifacts/", + "test": "pnpm build && pnpm test:self", + "test:self": "cd .. && hardhat test", + "test:coverage": "pnpm build:coverage && pnpm test:coverage:self", + "test:coverage:self": "cd .. && npx hardhat coverage --config hardhat.coverage.config.ts", + "lint": "pnpm lint:ts; pnpm lint:json", + "lint:ts": "eslint '**/*.{js,ts,cjs,mjs,jsx,tsx}' --fix --cache; prettier -w --cache --log-level warn '**/*.{js,ts,cjs,mjs,jsx,tsx}'", + "lint:json": "prettier -w --cache --log-level warn '**/*.json'" + }, + "dependencies": { + "@graphprotocol/issuance": "workspace:^", + "@graphprotocol/interfaces": "workspace:^", + "@graphprotocol/contracts": "workspace:^" + }, + "devDependencies": { + "@nomicfoundation/hardhat-chai-matchers": "^2.0.0", + "@nomicfoundation/hardhat-ethers": "catalog:", + "@nomicfoundation/hardhat-foundry": "^1.1.1", + "@nomicfoundation/hardhat-network-helpers": "^1.0.0", + "@nomicfoundation/hardhat-toolbox": "5.0.0", + "@openzeppelin/contracts": "^5.4.0", + "@openzeppelin/contracts-upgradeable": "^5.4.0", + "@openzeppelin/foundry-upgrades": "0.4.0", + "@types/chai": "^4.3.20", + "@types/mocha": "^10.0.10", + "@types/node": "^20.17.50", + "chai": "^4.3.7", + "dotenv": "^16.5.0", + "eslint": "catalog:", + "eslint-plugin-no-only-tests": "catalog:", + "ethers": "catalog:", + "forge-std": "https://github.com/foundry-rs/forge-std/tarball/v1.9.7", + "glob": "catalog:", + "hardhat": "catalog:", + "hardhat-gas-reporter": "catalog:", + "prettier": "catalog:", + "solidity-coverage": "^0.8.0", + "ts-node": "^10.9.2", + "typescript": "catalog:" + } +} diff --git a/packages/issuance/test/prettier.config.cjs b/packages/issuance/test/prettier.config.cjs new file mode 100644 index 000000000..8eb0a0bee --- /dev/null +++ b/packages/issuance/test/prettier.config.cjs @@ -0,0 +1,5 @@ +const baseConfig = require('../prettier.config.cjs') + +module.exports = { + ...baseConfig, +} diff --git a/packages/issuance/test/src/index.ts b/packages/issuance/test/src/index.ts new file mode 100644 index 000000000..614cfd50d --- /dev/null +++ b/packages/issuance/test/src/index.ts @@ -0,0 +1,5 @@ +// Test utilities for @graphprotocol/issuance +// This package contains test files, test helpers, and testing utilities + +// This package provides test utilities for issuance contracts +export const PACKAGE_NAME = '@graphprotocol/issuance-test' diff --git a/packages/issuance/test/tests/common/CommonInterfaceIdStability.test.ts b/packages/issuance/test/tests/common/CommonInterfaceIdStability.test.ts new file mode 100644 index 000000000..e91b12bd2 --- /dev/null +++ b/packages/issuance/test/tests/common/CommonInterfaceIdStability.test.ts @@ -0,0 +1,27 @@ +import { IPausableControl__factory } from '@graphprotocol/interfaces/types' +import { IAccessControl__factory } from '@graphprotocol/issuance/types' +import { expect } from 'chai' + +/** + * Common Interface ID Stability Tests + * + * These tests verify that common interface IDs remain stable across builds. + * These interfaces are used by both allocate and eligibility contracts. + * + * Changes to these IDs indicate breaking changes to the interface definitions. + * + * If a test fails: + * 1. Verify the interface change was intentional + * 2. Understand the impact on deployed contracts + * 3. Update the expected ID if the change is correct + * 4. Document the breaking change in release notes + */ +describe('Common Interface ID Stability', () => { + it('IPausableControl should have stable interface ID', () => { + expect(IPausableControl__factory.interfaceId).to.equal('0xe78a39d8') + }) + + it('IAccessControl should have stable interface ID', () => { + expect(IAccessControl__factory.interfaceId).to.equal('0x7965db0b') + }) +}) diff --git a/packages/issuance/test/tests/common/fixtures.ts b/packages/issuance/test/tests/common/fixtures.ts new file mode 100644 index 000000000..5feaa0e6a --- /dev/null +++ b/packages/issuance/test/tests/common/fixtures.ts @@ -0,0 +1,127 @@ +/** + * Common test fixtures shared by all test domains + * Contains only truly shared functionality used by both allocate and eligibility tests + */ + +import '@nomicfoundation/hardhat-chai-matchers' + +import fs from 'fs' +import hre from 'hardhat' + +const { ethers } = hre +const { upgrades } = require('hardhat') + +import type { SignerWithAddress } from '@nomicfoundation/hardhat-ethers/signers' + +import { GraphTokenHelper } from './graphTokenHelper' + +/** + * Standard test accounts interface + */ +export interface TestAccounts { + governor: SignerWithAddress + nonGovernor: SignerWithAddress + operator: SignerWithAddress + user: SignerWithAddress + indexer1: SignerWithAddress + indexer2: SignerWithAddress + selfMintingTarget: SignerWithAddress +} + +/** + * Get standard test accounts + */ +export async function getTestAccounts(): Promise { + const [governor, nonGovernor, operator, user, indexer1, indexer2, selfMintingTarget] = await ethers.getSigners() + + return { + governor, + nonGovernor, + operator, + user, + indexer1, + indexer2, + selfMintingTarget, + } +} + +/** + * Common constants used in tests + */ +export const Constants = { + PPM: 1_000_000, // Parts per million (100%) + DEFAULT_ISSUANCE_PER_BLOCK: ethers.parseEther('100'), // 100 GRT per block +} + +// Shared test constants +export const SHARED_CONSTANTS = { + PPM: 1_000_000, + + // Pre-calculated role constants to avoid repeated async calls + GOVERNOR_ROLE: ethers.keccak256(ethers.toUtf8Bytes('GOVERNOR_ROLE')), + OPERATOR_ROLE: ethers.keccak256(ethers.toUtf8Bytes('OPERATOR_ROLE')), + PAUSE_ROLE: ethers.keccak256(ethers.toUtf8Bytes('PAUSE_ROLE')), + ORACLE_ROLE: ethers.keccak256(ethers.toUtf8Bytes('ORACLE_ROLE')), +} as const + +/** + * Deploy a test GraphToken for testing + * This uses the real GraphToken contract + * @returns {Promise} + */ +export async function deployTestGraphToken() { + // Get the governor account + const [governor] = await ethers.getSigners() + + // Load the GraphToken artifact directly from the contracts package + const graphTokenArtifactPath = require.resolve( + '@graphprotocol/contracts/artifacts/contracts/token/GraphToken.sol/GraphToken.json', + ) + const GraphTokenArtifact = JSON.parse(fs.readFileSync(graphTokenArtifactPath, 'utf8')) + + // Create a contract factory using the artifact + const GraphTokenFactory = new ethers.ContractFactory(GraphTokenArtifact.abi, GraphTokenArtifact.bytecode, governor) + + // Deploy the contract + const graphToken = await GraphTokenFactory.deploy(ethers.parseEther('1000000000')) + await graphToken.waitForDeployment() + + return graphToken +} + +/** + * Get a GraphTokenHelper for an existing token + * @param {string} tokenAddress The address of the GraphToken + * @param {boolean} [isFork=false] Whether this is running on a forked network + * @returns {Promise} + */ +export async function getGraphTokenHelper(tokenAddress, isFork = false) { + // Get the governor account + const [governor] = await ethers.getSigners() + + // Get the GraphToken at the specified address + const graphToken = await ethers.getContractAt(isFork ? 'IGraphToken' : 'GraphToken', tokenAddress) + + return new GraphTokenHelper(graphToken, governor) +} + +/** + * Upgrade a contract using OpenZeppelin's upgrades library + * This is a generic function that can be used to upgrade any contract + * @param {string} contractAddress + * @param {string} contractName + * @param {any[]} [constructorArgs=[]] + * @returns {Promise} + */ +export async function upgradeContract(contractAddress, contractName, constructorArgs = []) { + // Get the contract factory + const ContractFactory = await ethers.getContractFactory(contractName) + + // Upgrade the contract + const upgradedContractInstance = await upgrades.upgradeProxy(contractAddress, ContractFactory, { + constructorArgs, + }) + + // Return the upgraded contract instance + return upgradedContractInstance +} diff --git a/packages/issuance/test/tests/common/graphTokenHelper.ts b/packages/issuance/test/tests/common/graphTokenHelper.ts new file mode 100644 index 000000000..f4adbcc8a --- /dev/null +++ b/packages/issuance/test/tests/common/graphTokenHelper.ts @@ -0,0 +1,91 @@ +import fs from 'fs' +import hre from 'hardhat' +const { ethers } = hre +import { SignerWithAddress } from '@nomicfoundation/hardhat-ethers/signers' +import { Contract } from 'ethers' + +/** + * Helper class for working with GraphToken in tests + * This provides a consistent interface for minting tokens + * and managing minters + */ +export class GraphTokenHelper { + private graphToken: Contract + private governor: SignerWithAddress + + /** + * Create a new GraphTokenHelper + * @param graphToken The GraphToken instance + * @param governor The governor account + */ + constructor(graphToken: Contract, governor: SignerWithAddress) { + this.graphToken = graphToken + this.governor = governor + } + + /** + * Get the GraphToken instance + */ + getToken(): Contract { + return this.graphToken + } + + /** + * Get the GraphToken address + */ + async getAddress(): Promise { + return await this.graphToken.getAddress() + } + + /** + * Mint tokens to an address + */ + async mint(to: string, amount: bigint): Promise { + await (this.graphToken as any).connect(this.governor).mint(to, amount) + } + + /** + * Add a minter to the GraphToken + */ + async addMinter(minter: string): Promise { + await (this.graphToken as any).connect(this.governor).addMinter(minter) + } + + /** + * Deploy a new GraphToken for testing + * @param {SignerWithAddress} governor The governor account + * @returns {Promise} + */ + static async deploy(governor) { + // Load the GraphToken artifact directly from the contracts package + const graphTokenArtifactPath = require.resolve( + '@graphprotocol/contracts/artifacts/contracts/token/GraphToken.sol/GraphToken.json', + ) + const GraphTokenArtifact = JSON.parse(fs.readFileSync(graphTokenArtifactPath, 'utf8')) + + // Create a contract factory using the artifact + const GraphTokenFactory = new ethers.ContractFactory(GraphTokenArtifact.abi, GraphTokenArtifact.bytecode, governor) + + // Deploy the contract + const graphToken = await GraphTokenFactory.deploy(ethers.parseEther('1000000000')) + await graphToken.waitForDeployment() + + return new GraphTokenHelper(graphToken as any, governor) + } + + /** + * Create a GraphTokenHelper for an existing GraphToken on a forked network + * @param {string} tokenAddress The GraphToken address + * @param {SignerWithAddress} governor The governor account + * @returns {Promise} + */ + static async forFork(tokenAddress, governor) { + // Get the GraphToken at the specified address + const graphToken = await ethers.getContractAt('IGraphToken', tokenAddress) + + // Create a helper + const helper = new GraphTokenHelper(graphToken as any, governor) + + return helper + } +} diff --git a/packages/issuance/test/tests/common/testPatterns.ts b/packages/issuance/test/tests/common/testPatterns.ts new file mode 100644 index 000000000..5af5bc73c --- /dev/null +++ b/packages/issuance/test/tests/common/testPatterns.ts @@ -0,0 +1,52 @@ +/** + * Common test patterns shared by both allocate and eligibility tests + */ + +import { expect } from 'chai' + +/** + * Comprehensive interface compliance test suite + * Replaces multiple individual interface support tests + * + * @param contractGetter - Function that returns the contract instance to test + * @param interfaces - Array of Typechain factory classes with interfaceId and interfaceName + * + * @example + * import { IPausableControl__factory, IAccessControl__factory } from '@graphprotocol/interfaces/types' + * + * shouldSupportInterfaces( + * () => contract, + * [ + * IPausableControl__factory, + * IAccessControl__factory, + * ] + * ) + */ +export function shouldSupportInterfaces( + contractGetter: () => T, + interfaces: Array<{ + interfaceId: string + interfaceName: string + }>, +) { + return function () { + describe('Interface Compliance', () => { + it('should support ERC-165 interface', async function () { + const contract = contractGetter() + expect(await (contract as any).supportsInterface('0x01ffc9a7')).to.be.true + }) + + interfaces.forEach((iface) => { + it(`should support ${iface.interfaceName} interface`, async function () { + const contract = contractGetter() + expect(await (contract as any).supportsInterface(iface.interfaceId)).to.be.true + }) + }) + + it('should not support random interface', async function () { + const contract = contractGetter() + expect(await (contract as any).supportsInterface('0x12345678')).to.be.false + }) + }) + } +} diff --git a/packages/issuance/test/tsconfig.json b/packages/issuance/test/tsconfig.json new file mode 100644 index 000000000..dfecc9bcf --- /dev/null +++ b/packages/issuance/test/tsconfig.json @@ -0,0 +1,25 @@ +{ + "extends": "../../../tsconfig.json", + "compilerOptions": { + "target": "es2022", + "module": "ESNext", + "moduleResolution": "bundler", + "esModuleInterop": true, + "allowSyntheticDefaultImports": true, + "forceConsistentCasingInFileNames": true, + "strict": false, + "skipLibCheck": true, + "resolveJsonModule": true, + "declaration": true, + "declarationMap": true, + "sourceMap": true, + "allowJs": true, + "checkJs": false, + "incremental": true, + "noEmitOnError": false, + "noImplicitAny": false, + "outDir": "./artifacts" + }, + "include": ["tests/**/*", "utils/**/*", "../types/**/*"], + "exclude": ["node_modules", "build", "scripts/**/*"] +} diff --git a/packages/issuance/tsconfig.json b/packages/issuance/tsconfig.json new file mode 100644 index 000000000..00aa1b8ef --- /dev/null +++ b/packages/issuance/tsconfig.json @@ -0,0 +1,18 @@ +{ + "compilerOptions": { + "target": "es2023", + "lib": ["es2023"], + "module": "Node16", + "moduleResolution": "node16", + "strict": true, + "esModuleInterop": true, + "declaration": true, + "resolveJsonModule": true, + "allowJs": true, + "checkJs": false, + "incremental": true + }, + + "include": ["./scripts", "./test", "./typechain"], + "files": ["./hardhat.config.cjs"] +} diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 1da271388..07029ae86 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -21,12 +21,21 @@ catalogs: '@nomicfoundation/hardhat-ethers': specifier: ^3.1.0 version: 3.1.0 + '@nomicfoundation/hardhat-verify': + specifier: ^2.0.10 + version: 2.1.1 + '@typechain/hardhat': + specifier: ^9.0.0 + version: 9.1.0 '@typescript-eslint/eslint-plugin': specifier: ^8.46.1 version: 8.46.2 '@typescript-eslint/parser': specifier: ^8.46.1 version: 8.46.2 + dotenv: + specifier: ^16.5.0 + version: 16.6.1 eslint: specifier: ^9.37.0 version: 9.38.0 @@ -66,6 +75,9 @@ catalogs: hardhat-ignore-warnings: specifier: ^0.2.12 version: 0.2.12 + hardhat-secure-accounts: + specifier: ^1.0.5 + version: 1.0.5 hardhat-storage-layout: specifier: ^0.1.7 version: 0.1.7 @@ -956,6 +968,185 @@ importers: specifier: ^2.31.7 version: 2.37.6(bufferutil@4.0.9)(typescript@5.9.3)(utf-8-validate@5.0.10)(zod@3.25.76) + packages/issuance: + dependencies: + '@noble/hashes': + specifier: ^1.8.0 + version: 1.8.0 + devDependencies: + '@graphprotocol/interfaces': + specifier: workspace:^ + version: link:../interfaces + '@graphprotocol/toolshed': + specifier: workspace:^ + version: link:../toolshed + '@nomicfoundation/hardhat-ethers': + specifier: 'catalog:' + version: 3.1.0(ethers@6.15.0(bufferutil@4.0.9)(utf-8-validate@5.0.10))(hardhat@2.26.3(bufferutil@4.0.9)(ts-node@10.9.2(@types/node@20.19.14)(typescript@5.9.3))(typescript@5.9.3)(utf-8-validate@5.0.10)) + '@nomicfoundation/hardhat-verify': + specifier: 'catalog:' + version: 2.1.1(hardhat@2.26.3(bufferutil@4.0.9)(ts-node@10.9.2(@types/node@20.19.14)(typescript@5.9.3))(typescript@5.9.3)(utf-8-validate@5.0.10)) + '@openzeppelin/contracts': + specifier: ^5.4.0 + version: 5.4.0 + '@openzeppelin/contracts-upgradeable': + specifier: ^5.4.0 + version: 5.4.0(@openzeppelin/contracts@5.4.0) + '@openzeppelin/hardhat-upgrades': + specifier: ^3.9.0 + version: 3.9.1(@nomicfoundation/hardhat-ethers@3.1.0(ethers@6.15.0(bufferutil@4.0.9)(utf-8-validate@5.0.10))(hardhat@2.26.3(bufferutil@4.0.9)(ts-node@10.9.2(@types/node@20.19.14)(typescript@5.9.3))(typescript@5.9.3)(utf-8-validate@5.0.10)))(@nomicfoundation/hardhat-verify@2.1.1(hardhat@2.26.3(bufferutil@4.0.9)(ts-node@10.9.2(@types/node@20.19.14)(typescript@5.9.3))(typescript@5.9.3)(utf-8-validate@5.0.10)))(encoding@0.1.13)(ethers@6.15.0(bufferutil@4.0.9)(utf-8-validate@5.0.10))(hardhat@2.26.3(bufferutil@4.0.9)(ts-node@10.9.2(@types/node@20.19.14)(typescript@5.9.3))(typescript@5.9.3)(utf-8-validate@5.0.10)) + '@typechain/ethers-v6': + specifier: ^0.5.0 + version: 0.5.1(ethers@6.15.0(bufferutil@4.0.9)(utf-8-validate@5.0.10))(typechain@8.3.2(patch_hash=b34ed6afcf99760666fdc85ecb2094fdd20ce509f947eb09cef21665a2a6a1d6)(typescript@5.9.3))(typescript@5.9.3) + '@typechain/hardhat': + specifier: 'catalog:' + version: 9.1.0(@typechain/ethers-v6@0.5.1(ethers@6.15.0(bufferutil@4.0.9)(utf-8-validate@5.0.10))(typechain@8.3.2(patch_hash=b34ed6afcf99760666fdc85ecb2094fdd20ce509f947eb09cef21665a2a6a1d6)(typescript@5.9.3))(typescript@5.9.3))(ethers@6.15.0(bufferutil@4.0.9)(utf-8-validate@5.0.10))(hardhat@2.26.3(bufferutil@4.0.9)(ts-node@10.9.2(@types/node@20.19.14)(typescript@5.9.3))(typescript@5.9.3)(utf-8-validate@5.0.10))(typechain@8.3.2(patch_hash=b34ed6afcf99760666fdc85ecb2094fdd20ce509f947eb09cef21665a2a6a1d6)(typescript@5.9.3)) + '@types/node': + specifier: ^20.17.50 + version: 20.19.14 + dotenv: + specifier: 'catalog:' + version: 16.6.1 + eslint: + specifier: 'catalog:' + version: 9.38.0(jiti@2.5.1) + ethers: + specifier: 'catalog:' + version: 6.15.0(bufferutil@4.0.9)(utf-8-validate@5.0.10) + glob: + specifier: 'catalog:' + version: 11.0.3 + globals: + specifier: 'catalog:' + version: 16.4.0 + hardhat: + specifier: 'catalog:' + version: 2.26.3(bufferutil@4.0.9)(ts-node@10.9.2(@types/node@20.19.14)(typescript@5.9.3))(typescript@5.9.3)(utf-8-validate@5.0.10) + hardhat-contract-sizer: + specifier: 'catalog:' + version: 2.10.1(hardhat@2.26.3(bufferutil@4.0.9)(ts-node@10.9.2(@types/node@20.19.14)(typescript@5.9.3))(typescript@5.9.3)(utf-8-validate@5.0.10)) + hardhat-secure-accounts: + specifier: 'catalog:' + version: 1.0.5(@nomicfoundation/hardhat-ethers@3.1.0(ethers@6.15.0(bufferutil@4.0.9)(utf-8-validate@5.0.10))(hardhat@2.26.3(bufferutil@4.0.9)(ts-node@10.9.2(@types/node@20.19.14)(typescript@5.9.3))(typescript@5.9.3)(utf-8-validate@5.0.10)))(ethers@6.15.0(bufferutil@4.0.9)(utf-8-validate@5.0.10))(hardhat@2.26.3(bufferutil@4.0.9)(ts-node@10.9.2(@types/node@20.19.14)(typescript@5.9.3))(typescript@5.9.3)(utf-8-validate@5.0.10)) + hardhat-storage-layout: + specifier: 'catalog:' + version: 0.1.7(hardhat@2.26.3(bufferutil@4.0.9)(ts-node@10.9.2(@types/node@20.19.14)(typescript@5.9.3))(typescript@5.9.3)(utf-8-validate@5.0.10)) + lint-staged: + specifier: 'catalog:' + version: 16.2.6 + markdownlint-cli: + specifier: 'catalog:' + version: 0.45.0 + prettier: + specifier: 'catalog:' + version: 3.6.2 + prettier-plugin-solidity: + specifier: 'catalog:' + version: 2.1.0(prettier@3.6.2) + solhint: + specifier: 'catalog:' + version: 6.0.1(typescript@5.9.3) + ts-node: + specifier: ^10.9.2 + version: 10.9.2(@types/node@20.19.14)(typescript@5.9.3) + typechain: + specifier: ^8.3.0 + version: 8.3.2(patch_hash=b34ed6afcf99760666fdc85ecb2094fdd20ce509f947eb09cef21665a2a6a1d6)(typescript@5.9.3) + typescript: + specifier: 'catalog:' + version: 5.9.3 + typescript-eslint: + specifier: 'catalog:' + version: 8.46.2(eslint@9.38.0(jiti@2.5.1))(typescript@5.9.3) + yaml-lint: + specifier: 'catalog:' + version: 1.7.0 + + packages/issuance/test: + dependencies: + '@graphprotocol/contracts': + specifier: workspace:^ + version: link:../../contracts + '@graphprotocol/interfaces': + specifier: workspace:^ + version: link:../../interfaces + '@graphprotocol/issuance': + specifier: workspace:^ + version: link:.. + devDependencies: + '@nomicfoundation/hardhat-chai-matchers': + specifier: ^2.0.0 + version: 2.1.0(@nomicfoundation/hardhat-ethers@3.1.0(ethers@6.15.0(bufferutil@4.0.9)(utf-8-validate@5.0.10))(hardhat@2.26.3(bufferutil@4.0.9)(ts-node@10.9.2(@types/node@20.19.14)(typescript@5.9.3))(typescript@5.9.3)(utf-8-validate@5.0.10)))(chai@4.5.0)(ethers@6.15.0(bufferutil@4.0.9)(utf-8-validate@5.0.10))(hardhat@2.26.3(bufferutil@4.0.9)(ts-node@10.9.2(@types/node@20.19.14)(typescript@5.9.3))(typescript@5.9.3)(utf-8-validate@5.0.10)) + '@nomicfoundation/hardhat-ethers': + specifier: 'catalog:' + version: 3.1.0(ethers@6.15.0(bufferutil@4.0.9)(utf-8-validate@5.0.10))(hardhat@2.26.3(bufferutil@4.0.9)(ts-node@10.9.2(@types/node@20.19.14)(typescript@5.9.3))(typescript@5.9.3)(utf-8-validate@5.0.10)) + '@nomicfoundation/hardhat-foundry': + specifier: ^1.1.1 + version: 1.2.0(hardhat@2.26.3(bufferutil@4.0.9)(ts-node@10.9.2(@types/node@20.19.14)(typescript@5.9.3))(typescript@5.9.3)(utf-8-validate@5.0.10)) + '@nomicfoundation/hardhat-network-helpers': + specifier: ^1.0.0 + version: 1.1.0(hardhat@2.26.3(bufferutil@4.0.9)(ts-node@10.9.2(@types/node@20.19.14)(typescript@5.9.3))(typescript@5.9.3)(utf-8-validate@5.0.10)) + '@nomicfoundation/hardhat-toolbox': + specifier: 5.0.0 + version: 5.0.0(d4ea276d64fbf8f2a60adf85f1748ee6) + '@openzeppelin/contracts': + specifier: ^5.4.0 + version: 5.4.0 + '@openzeppelin/contracts-upgradeable': + specifier: ^5.4.0 + version: 5.4.0(@openzeppelin/contracts@5.4.0) + '@openzeppelin/foundry-upgrades': + specifier: 0.4.0 + version: 0.4.0(@openzeppelin/defender-deploy-client-cli@0.0.1-alpha.10(encoding@0.1.13))(@openzeppelin/upgrades-core@1.44.1) + '@types/chai': + specifier: ^4.3.20 + version: 4.3.20 + '@types/mocha': + specifier: ^10.0.10 + version: 10.0.10 + '@types/node': + specifier: ^20.17.50 + version: 20.19.14 + chai: + specifier: ^4.3.7 + version: 4.5.0 + dotenv: + specifier: ^16.5.0 + version: 16.6.1 + eslint: + specifier: 'catalog:' + version: 9.38.0(jiti@2.5.1) + eslint-plugin-no-only-tests: + specifier: 'catalog:' + version: 3.3.0 + ethers: + specifier: 'catalog:' + version: 6.15.0(bufferutil@4.0.9)(utf-8-validate@5.0.10) + forge-std: + specifier: https://github.com/foundry-rs/forge-std/tarball/v1.9.7 + version: https://github.com/foundry-rs/forge-std/tarball/v1.9.7 + glob: + specifier: 'catalog:' + version: 11.0.3 + hardhat: + specifier: 'catalog:' + version: 2.26.3(bufferutil@4.0.9)(ts-node@10.9.2(@types/node@20.19.14)(typescript@5.9.3))(typescript@5.9.3)(utf-8-validate@5.0.10) + hardhat-gas-reporter: + specifier: 'catalog:' + version: 1.0.10(bufferutil@4.0.9)(hardhat@2.26.3(bufferutil@4.0.9)(ts-node@10.9.2(@types/node@20.19.14)(typescript@5.9.3))(typescript@5.9.3)(utf-8-validate@5.0.10))(utf-8-validate@5.0.10) + prettier: + specifier: 'catalog:' + version: 3.6.2 + solidity-coverage: + specifier: ^0.8.0 + version: 0.8.16(hardhat@2.26.3(bufferutil@4.0.9)(ts-node@10.9.2(@types/node@20.19.14)(typescript@5.9.3))(typescript@5.9.3)(utf-8-validate@5.0.10)) + ts-node: + specifier: ^10.9.2 + version: 10.9.2(@types/node@20.19.14)(typescript@5.9.3) + typescript: + specifier: 'catalog:' + version: 5.9.3 + packages/subgraph-service: devDependencies: '@graphprotocol/contracts': @@ -3290,6 +3481,28 @@ packages: typechain: ^8.3.0 typescript: '>=4.5.0' + '@nomicfoundation/hardhat-toolbox@5.0.0': + resolution: {integrity: sha512-FnUtUC5PsakCbwiVNsqlXVIWG5JIb5CEZoSXbJUsEBun22Bivx2jhF1/q9iQbzuaGpJKFQyOhemPB2+XlEE6pQ==} + peerDependencies: + '@nomicfoundation/hardhat-chai-matchers': ^2.0.0 + '@nomicfoundation/hardhat-ethers': ^3.0.0 + '@nomicfoundation/hardhat-ignition-ethers': ^0.15.0 + '@nomicfoundation/hardhat-network-helpers': ^1.0.0 + '@nomicfoundation/hardhat-verify': ^2.0.0 + '@typechain/ethers-v6': ^0.5.0 + '@typechain/hardhat': ^9.0.0 + '@types/chai': ^4.2.0 + '@types/mocha': '>=9.1.0' + '@types/node': ^20.17.50 + chai: ^4.2.0 + ethers: ^6.4.0 + hardhat: ^2.11.0 + hardhat-gas-reporter: ^1.0.8 + solidity-coverage: ^0.8.1 + ts-node: '>=8.0.0' + typechain: ^8.3.0 + typescript: '>=4.5.0' + '@nomicfoundation/hardhat-verify@2.1.1': resolution: {integrity: sha512-K1plXIS42xSHDJZRkrE2TZikqxp9T4y6jUMUNI/imLgN5uCcEQokmfU0DlyP9zzHncYK92HlT5IWP35UVCLrPw==} peerDependencies: @@ -3425,6 +3638,18 @@ packages: '@nomiclabs/harhdat-etherscan': optional: true + '@openzeppelin/hardhat-upgrades@3.9.1': + resolution: {integrity: sha512-pSDjlOnIpP+PqaJVe144dK6VVKZw2v6YQusyt0OOLiCsl+WUzfo4D0kylax7zjrOxqy41EK2ipQeIF4T+cCn2A==} + hasBin: true + peerDependencies: + '@nomicfoundation/hardhat-ethers': ^3.0.6 + '@nomicfoundation/hardhat-verify': ^2.0.14 + ethers: ^6.6.0 + hardhat: ^2.24.1 + peerDependenciesMeta: + '@nomicfoundation/hardhat-verify': + optional: true + '@openzeppelin/platform-deploy-client@0.8.0': resolution: {integrity: sha512-POx3AsnKwKSV/ZLOU/gheksj0Lq7Is1q2F3pKmcFjGZiibf+4kjGxr4eSMrT+2qgKYZQH1ZLQZ+SkbguD8fTvA==} deprecated: '@openzeppelin/platform-deploy-client is deprecated. Please use @openzeppelin/defender-sdk-deploy-client' @@ -11244,6 +11469,10 @@ packages: resolution: {integrity: sha512-raqeBD6NQK4SkWhQzeYKd1KmIG6dllBOTt55Rmkt4HtI9mwdWtJljnrXjAFUBLTSN67HWrOIZ3EPF4kjUw80Bg==} engines: {node: '>=14.0'} + undici@6.22.0: + resolution: {integrity: sha512-hU/10obOIu62MGYjdskASR3CUAiYaFTtC9Pa6vHyf//mAipSvSQg6od2CnJswq7fvzNS3zJhxoRkgNVaHurWKw==} + engines: {node: '>=18.17'} + unfetch@4.2.0: resolution: {integrity: sha512-F9p7yYCn6cIW9El1zi0HI6vqpeIvBsr3dSuRO6Xuppb1u5rXpCPmMvLSyECLhybr9isec8Ohl0hPekMVrEinDA==} @@ -13287,7 +13516,7 @@ snapshots: '@ethereumjs/common@2.6.0': dependencies: crc-32: 1.2.2 - ethereumjs-util: 7.1.3 + ethereumjs-util: 7.1.5 '@ethereumjs/common@2.6.5': dependencies: @@ -13309,7 +13538,7 @@ snapshots: '@ethereumjs/tx@3.4.0': dependencies: '@ethereumjs/common': 2.6.0 - ethereumjs-util: 7.1.3 + ethereumjs-util: 7.1.5 '@ethereumjs/tx@3.5.2': dependencies: @@ -13336,7 +13565,7 @@ snapshots: async-eventemitter: 0.2.4 core-js-pure: 3.45.1 debug: 2.6.9 - ethereumjs-util: 7.1.3 + ethereumjs-util: 7.1.5 functional-red-black-tree: 1.0.1 mcl-wasm: 0.7.9 merkle-patricia-tree: 4.2.4 @@ -15560,6 +15789,27 @@ snapshots: typechain: 8.3.2(patch_hash=b34ed6afcf99760666fdc85ecb2094fdd20ce509f947eb09cef21665a2a6a1d6)(typescript@5.9.3) typescript: 5.9.3 + '@nomicfoundation/hardhat-toolbox@5.0.0(d4ea276d64fbf8f2a60adf85f1748ee6)': + dependencies: + '@nomicfoundation/hardhat-chai-matchers': 2.1.0(@nomicfoundation/hardhat-ethers@3.1.0(ethers@6.15.0(bufferutil@4.0.9)(utf-8-validate@5.0.10))(hardhat@2.26.3(bufferutil@4.0.9)(ts-node@10.9.2(@types/node@20.19.14)(typescript@5.9.3))(typescript@5.9.3)(utf-8-validate@5.0.10)))(chai@4.5.0)(ethers@6.15.0(bufferutil@4.0.9)(utf-8-validate@5.0.10))(hardhat@2.26.3(bufferutil@4.0.9)(ts-node@10.9.2(@types/node@20.19.14)(typescript@5.9.3))(typescript@5.9.3)(utf-8-validate@5.0.10)) + '@nomicfoundation/hardhat-ethers': 3.1.0(ethers@6.15.0(bufferutil@4.0.9)(utf-8-validate@5.0.10))(hardhat@2.26.3(bufferutil@4.0.9)(ts-node@10.9.2(@types/node@20.19.14)(typescript@5.9.3))(typescript@5.9.3)(utf-8-validate@5.0.10)) + '@nomicfoundation/hardhat-ignition-ethers': 0.15.14(@nomicfoundation/hardhat-ethers@3.1.0(ethers@6.15.0(bufferutil@4.0.9)(utf-8-validate@5.0.10))(hardhat@2.26.3(bufferutil@4.0.9)(ts-node@10.9.2(@types/node@20.19.14)(typescript@5.9.3))(typescript@5.9.3)(utf-8-validate@5.0.10)))(@nomicfoundation/hardhat-ignition@0.15.13(@nomicfoundation/hardhat-verify@2.1.1(hardhat@2.26.3(bufferutil@4.0.9)(ts-node@10.9.2(@types/node@20.19.14)(typescript@5.9.3))(typescript@5.9.3)(utf-8-validate@5.0.10)))(bufferutil@4.0.9)(hardhat@2.26.3(bufferutil@4.0.9)(ts-node@10.9.2(@types/node@20.19.14)(typescript@5.9.3))(typescript@5.9.3)(utf-8-validate@5.0.10))(utf-8-validate@5.0.10))(@nomicfoundation/ignition-core@0.15.13(bufferutil@4.0.9)(utf-8-validate@5.0.10))(ethers@6.15.0(bufferutil@4.0.9)(utf-8-validate@5.0.10))(hardhat@2.26.3(bufferutil@4.0.9)(ts-node@10.9.2(@types/node@20.19.14)(typescript@5.9.3))(typescript@5.9.3)(utf-8-validate@5.0.10)) + '@nomicfoundation/hardhat-network-helpers': 1.1.0(hardhat@2.26.3(bufferutil@4.0.9)(ts-node@10.9.2(@types/node@20.19.14)(typescript@5.9.3))(typescript@5.9.3)(utf-8-validate@5.0.10)) + '@nomicfoundation/hardhat-verify': 2.1.1(hardhat@2.26.3(bufferutil@4.0.9)(ts-node@10.9.2(@types/node@20.19.14)(typescript@5.9.3))(typescript@5.9.3)(utf-8-validate@5.0.10)) + '@typechain/ethers-v6': 0.5.1(ethers@6.15.0(bufferutil@4.0.9)(utf-8-validate@5.0.10))(typechain@8.3.2(patch_hash=b34ed6afcf99760666fdc85ecb2094fdd20ce509f947eb09cef21665a2a6a1d6)(typescript@5.9.3))(typescript@5.9.3) + '@typechain/hardhat': 9.1.0(@typechain/ethers-v6@0.5.1(ethers@6.15.0(bufferutil@4.0.9)(utf-8-validate@5.0.10))(typechain@8.3.2(patch_hash=b34ed6afcf99760666fdc85ecb2094fdd20ce509f947eb09cef21665a2a6a1d6)(typescript@5.9.3))(typescript@5.9.3))(ethers@6.15.0(bufferutil@4.0.9)(utf-8-validate@5.0.10))(hardhat@2.26.3(bufferutil@4.0.9)(ts-node@10.9.2(@types/node@20.19.14)(typescript@5.9.3))(typescript@5.9.3)(utf-8-validate@5.0.10))(typechain@8.3.2(patch_hash=b34ed6afcf99760666fdc85ecb2094fdd20ce509f947eb09cef21665a2a6a1d6)(typescript@5.9.3)) + '@types/chai': 4.3.20 + '@types/mocha': 10.0.10 + '@types/node': 20.19.14 + chai: 4.5.0 + ethers: 6.15.0(bufferutil@4.0.9)(utf-8-validate@5.0.10) + hardhat: 2.26.3(bufferutil@4.0.9)(ts-node@10.9.2(@types/node@20.19.14)(typescript@5.9.3))(typescript@5.9.3)(utf-8-validate@5.0.10) + hardhat-gas-reporter: 1.0.10(bufferutil@4.0.9)(hardhat@2.26.3(bufferutil@4.0.9)(ts-node@10.9.2(@types/node@20.19.14)(typescript@5.9.3))(typescript@5.9.3)(utf-8-validate@5.0.10))(utf-8-validate@5.0.10) + solidity-coverage: 0.8.16(hardhat@2.26.3(bufferutil@4.0.9)(ts-node@10.9.2(@types/node@20.19.14)(typescript@5.9.3))(typescript@5.9.3)(utf-8-validate@5.0.10)) + ts-node: 10.9.2(@types/node@20.19.14)(typescript@5.9.3) + typechain: 8.3.2(patch_hash=b34ed6afcf99760666fdc85ecb2094fdd20ce509f947eb09cef21665a2a6a1d6)(typescript@5.9.3) + typescript: 5.9.3 + '@nomicfoundation/hardhat-verify@2.1.1(hardhat@2.26.3(bufferutil@4.0.9)(ts-node@10.9.2(@types/node@20.19.14)(typescript@5.9.3))(typescript@5.9.3)(utf-8-validate@5.0.10))': dependencies: '@ethersproject/abi': 5.8.0 @@ -15726,8 +15976,8 @@ snapshots: '@openzeppelin/defender-deploy-client-cli@0.0.1-alpha.10(encoding@0.1.13)': dependencies: '@openzeppelin/defender-sdk-base-client': 2.7.0(encoding@0.1.13) - '@openzeppelin/defender-sdk-deploy-client': 2.7.0(encoding@0.1.13) - '@openzeppelin/defender-sdk-network-client': 2.7.0(encoding@0.1.13) + '@openzeppelin/defender-sdk-deploy-client': 2.7.0(debug@4.4.3)(encoding@0.1.13) + '@openzeppelin/defender-sdk-network-client': 2.7.0(debug@4.4.3)(encoding@0.1.13) dotenv: 16.6.1 minimist: 1.2.8 transitivePeerDependencies: @@ -15744,7 +15994,7 @@ snapshots: - aws-crt - encoding - '@openzeppelin/defender-sdk-deploy-client@2.7.0(encoding@0.1.13)': + '@openzeppelin/defender-sdk-deploy-client@2.7.0(debug@4.4.3)(encoding@0.1.13)': dependencies: '@openzeppelin/defender-sdk-base-client': 2.7.0(encoding@0.1.13) axios: 1.12.2(debug@4.4.3) @@ -15754,7 +16004,7 @@ snapshots: - debug - encoding - '@openzeppelin/defender-sdk-network-client@2.7.0(encoding@0.1.13)': + '@openzeppelin/defender-sdk-network-client@2.7.0(debug@4.4.3)(encoding@0.1.13)': dependencies: '@openzeppelin/defender-sdk-base-client': 2.7.0(encoding@0.1.13) axios: 1.12.2(debug@4.4.3) @@ -15785,6 +16035,27 @@ snapshots: - encoding - supports-color + '@openzeppelin/hardhat-upgrades@3.9.1(@nomicfoundation/hardhat-ethers@3.1.0(ethers@6.15.0(bufferutil@4.0.9)(utf-8-validate@5.0.10))(hardhat@2.26.3(bufferutil@4.0.9)(ts-node@10.9.2(@types/node@20.19.14)(typescript@5.9.3))(typescript@5.9.3)(utf-8-validate@5.0.10)))(@nomicfoundation/hardhat-verify@2.1.1(hardhat@2.26.3(bufferutil@4.0.9)(ts-node@10.9.2(@types/node@20.19.14)(typescript@5.9.3))(typescript@5.9.3)(utf-8-validate@5.0.10)))(encoding@0.1.13)(ethers@6.15.0(bufferutil@4.0.9)(utf-8-validate@5.0.10))(hardhat@2.26.3(bufferutil@4.0.9)(ts-node@10.9.2(@types/node@20.19.14)(typescript@5.9.3))(typescript@5.9.3)(utf-8-validate@5.0.10))': + dependencies: + '@nomicfoundation/hardhat-ethers': 3.1.0(ethers@6.15.0(bufferutil@4.0.9)(utf-8-validate@5.0.10))(hardhat@2.26.3(bufferutil@4.0.9)(ts-node@10.9.2(@types/node@20.19.14)(typescript@5.9.3))(typescript@5.9.3)(utf-8-validate@5.0.10)) + '@openzeppelin/defender-sdk-base-client': 2.7.0(encoding@0.1.13) + '@openzeppelin/defender-sdk-deploy-client': 2.7.0(debug@4.4.3)(encoding@0.1.13) + '@openzeppelin/defender-sdk-network-client': 2.7.0(debug@4.4.3)(encoding@0.1.13) + '@openzeppelin/upgrades-core': 1.44.1 + chalk: 4.1.2 + debug: 4.4.3(supports-color@9.4.0) + ethereumjs-util: 7.1.5 + ethers: 6.15.0(bufferutil@4.0.9)(utf-8-validate@5.0.10) + hardhat: 2.26.3(bufferutil@4.0.9)(ts-node@10.9.2(@types/node@20.19.14)(typescript@5.9.3))(typescript@5.9.3)(utf-8-validate@5.0.10) + proper-lockfile: 4.1.2 + undici: 6.22.0 + optionalDependencies: + '@nomicfoundation/hardhat-verify': 2.1.1(hardhat@2.26.3(bufferutil@4.0.9)(ts-node@10.9.2(@types/node@20.19.14)(typescript@5.9.3))(typescript@5.9.3)(utf-8-validate@5.0.10)) + transitivePeerDependencies: + - aws-crt + - encoding + - supports-color + '@openzeppelin/platform-deploy-client@0.8.0(debug@4.4.3)(encoding@0.1.13)': dependencies: '@ethersproject/abi': 5.8.0 @@ -26240,6 +26511,8 @@ snapshots: dependencies: '@fastify/busboy': 2.1.1 + undici@6.22.0: {} + unfetch@4.2.0: {} unicorn-magic@0.1.0: {} From dbf5d2b7d98d829a8ff481344700b6733206859b Mon Sep 17 00:00:00 2001 From: Rembrandt Kuipers <50174308+RembrandtK@users.noreply.github.com> Date: Wed, 22 Oct 2025 20:36:32 +0000 Subject: [PATCH 2/9] feat: issuance allocator --- .../IIssuanceAllocationAdministration.sol | 94 + .../allocate/IIssuanceAllocationData.sol | 22 + .../allocate/IIssuanceAllocationStatus.sol | 71 + .../allocate/IIssuanceAllocatorTypes.sol | 24 + .../issuance/allocate/ISendTokens.sol | 19 + .../contracts/allocate/DirectAllocation.sol | 101 + .../contracts/allocate/IssuanceAllocator.md | 403 ++ .../contracts/allocate/IssuanceAllocator.sol | 740 ++++ .../contracts/test/allocate/MockERC165.sol | 20 + .../test/allocate/MockRevertingTarget.sol | 34 + .../test/allocate/MockSimpleTarget.sol | 24 + .../test/tests/allocate/AccessControl.test.ts | 200 + .../tests/allocate/DirectAllocation.test.ts | 291 ++ .../allocate/InterfaceCompliance.test.ts | 69 + .../allocate/InterfaceIdStability.test.ts | 47 + .../tests/allocate/IssuanceAllocator.test.ts | 3521 +++++++++++++++++ .../tests/allocate/IssuanceSystem.test.ts | 134 + .../test/tests/allocate/commonTestUtils.ts | 46 + .../issuance/test/tests/allocate/fixtures.ts | 91 + .../tests/allocate/issuanceCalculations.ts | 154 + .../tests/allocate/optimizationHelpers.ts | 59 + .../test/tests/allocate/optimizedFixtures.ts | 310 ++ .../test/tests/allocate/testPatterns.ts | 583 +++ 23 files changed, 7057 insertions(+) create mode 100644 packages/interfaces/contracts/issuance/allocate/IIssuanceAllocationAdministration.sol create mode 100644 packages/interfaces/contracts/issuance/allocate/IIssuanceAllocationData.sol create mode 100644 packages/interfaces/contracts/issuance/allocate/IIssuanceAllocationStatus.sol create mode 100644 packages/interfaces/contracts/issuance/allocate/ISendTokens.sol create mode 100644 packages/issuance/contracts/allocate/DirectAllocation.sol create mode 100644 packages/issuance/contracts/allocate/IssuanceAllocator.md create mode 100644 packages/issuance/contracts/allocate/IssuanceAllocator.sol create mode 100644 packages/issuance/contracts/test/allocate/MockERC165.sol create mode 100644 packages/issuance/contracts/test/allocate/MockRevertingTarget.sol create mode 100644 packages/issuance/contracts/test/allocate/MockSimpleTarget.sol create mode 100644 packages/issuance/test/tests/allocate/AccessControl.test.ts create mode 100644 packages/issuance/test/tests/allocate/DirectAllocation.test.ts create mode 100644 packages/issuance/test/tests/allocate/InterfaceCompliance.test.ts create mode 100644 packages/issuance/test/tests/allocate/InterfaceIdStability.test.ts create mode 100644 packages/issuance/test/tests/allocate/IssuanceAllocator.test.ts create mode 100644 packages/issuance/test/tests/allocate/IssuanceSystem.test.ts create mode 100644 packages/issuance/test/tests/allocate/commonTestUtils.ts create mode 100644 packages/issuance/test/tests/allocate/fixtures.ts create mode 100644 packages/issuance/test/tests/allocate/issuanceCalculations.ts create mode 100644 packages/issuance/test/tests/allocate/optimizationHelpers.ts create mode 100644 packages/issuance/test/tests/allocate/optimizedFixtures.ts create mode 100644 packages/issuance/test/tests/allocate/testPatterns.ts diff --git a/packages/interfaces/contracts/issuance/allocate/IIssuanceAllocationAdministration.sol b/packages/interfaces/contracts/issuance/allocate/IIssuanceAllocationAdministration.sol new file mode 100644 index 000000000..23bc7ea05 --- /dev/null +++ b/packages/interfaces/contracts/issuance/allocate/IIssuanceAllocationAdministration.sol @@ -0,0 +1,94 @@ +// SPDX-License-Identifier: GPL-2.0-or-later + +pragma solidity ^0.7.6 || ^0.8.0; + +/** + * @title IIssuanceAllocationAdministration + * @author Edge & Node + * @notice Interface for administrative operations on the issuance allocator. + * These functions are typically restricted to the governor role. + */ +interface IIssuanceAllocationAdministration { + /** + * @notice Set the issuance per block. + * @param newIssuancePerBlock New issuance per block + * @param evenIfDistributionPending If true, set even if there is pending issuance distribution + * @return True if the value is applied (including if already the case), false if not applied due to paused state + */ + function setIssuancePerBlock(uint256 newIssuancePerBlock, bool evenIfDistributionPending) external returns (bool); + + /** + * @notice Set the allocation for a target with only allocator minting + * @param target Address of the target to update + * @param allocatorMintingPPM Allocator-minting allocation for the target (in PPM) + * @return True if the value is applied (including if already the case), false if not applied + * @dev This variant sets selfMintingPPM to 0 and evenIfDistributionPending to false + */ + function setTargetAllocation(address target, uint256 allocatorMintingPPM) external returns (bool); + + /** + * @notice Set the allocation for a target with both allocator and self minting + * @param target Address of the target to update + * @param allocatorMintingPPM Allocator-minting allocation for the target (in PPM) + * @param selfMintingPPM Self-minting allocation for the target (in PPM) + * @return True if the value is applied (including if already the case), false if not applied + * @dev This variant sets evenIfDistributionPending to false + */ + function setTargetAllocation( + address target, + uint256 allocatorMintingPPM, + uint256 selfMintingPPM + ) external returns (bool); + + /** + * @notice Set the allocation for a target + * @param target Address of the target to update + * @param allocatorMintingPPM Allocator-minting allocation for the target (in PPM) + * @param selfMintingPPM Self-minting allocation for the target (in PPM) + * @param evenIfDistributionPending Whether to force the allocation change even if issuance has not been distributed up to the current block + * @return True if the value is applied (including if already the case), false if not applied + */ + function setTargetAllocation( + address target, + uint256 allocatorMintingPPM, + uint256 selfMintingPPM, + bool evenIfDistributionPending + ) external returns (bool); + + /** + * @notice Notify a specific target about an upcoming allocation change + * @param target Address of the target to notify + * @return True if notification was sent or already sent this block, false otherwise + */ + function notifyTarget(address target) external returns (bool); + + /** + * @notice Force set the lastChangeNotifiedBlock for a target to a specific block number + * @param target Address of the target to update + * @param blockNumber Block number to set as the lastChangeNotifiedBlock + * @return The block number that was set + * @dev This can be used to enable notification to be sent again (by setting to a past block) + * @dev or to prevent notification until a future block (by setting to current or future block). + */ + function forceTargetNoChangeNotificationBlock(address target, uint256 blockNumber) external returns (uint256); + + /** + * @notice Distribute any pending accumulated issuance to allocator-minting targets. + * @return Block number up to which issuance has been distributed + * @dev This function can be called even when the contract is paused. + * @dev If there is no pending issuance, this function is a no-op. + * @dev If allocatorMintingAllowance is 0 (all targets are self-minting), this function is a no-op. + */ + function distributePendingIssuance() external returns (uint256); + + /** + * @notice Distribute any pending accumulated issuance to allocator-minting targets, accumulating up to a specific block. + * @param toBlockNumber The block number to accumulate pending issuance up to (must be >= lastIssuanceAccumulationBlock and <= current block) + * @return Block number up to which issuance has been distributed + * @dev This function can be called even when the contract is paused. + * @dev Accumulates pending issuance up to the specified block, then distributes all accumulated issuance. + * @dev If there is no pending issuance after accumulation, this function is a no-op for distribution. + * @dev If allocatorMintingAllowance is 0 (all targets are self-minting), this function is a no-op for distribution. + */ + function distributePendingIssuance(uint256 toBlockNumber) external returns (uint256); +} diff --git a/packages/interfaces/contracts/issuance/allocate/IIssuanceAllocationData.sol b/packages/interfaces/contracts/issuance/allocate/IIssuanceAllocationData.sol new file mode 100644 index 000000000..f1e35d91d --- /dev/null +++ b/packages/interfaces/contracts/issuance/allocate/IIssuanceAllocationData.sol @@ -0,0 +1,22 @@ +// SPDX-License-Identifier: GPL-2.0-or-later + +pragma solidity ^0.7.6 || ^0.8.0; +pragma abicoder v2; + +import { AllocationTarget } from "./IIssuanceAllocatorTypes.sol"; + +/** + * @title IIssuanceAllocationData + * @author Edge & Node + * @notice Interface for querying issuance allocation target data + * @dev This interface provides access to internal allocation target information, + * primarily useful for operators and off-chain monitoring systems. + */ +interface IIssuanceAllocationData { + /** + * @notice Get target data for a specific target + * @param target Address of the target + * @return AllocationTarget struct containing target information including lastChangeNotifiedBlock + */ + function getTargetData(address target) external view returns (AllocationTarget memory); +} diff --git a/packages/interfaces/contracts/issuance/allocate/IIssuanceAllocationStatus.sol b/packages/interfaces/contracts/issuance/allocate/IIssuanceAllocationStatus.sol new file mode 100644 index 000000000..baf70116c --- /dev/null +++ b/packages/interfaces/contracts/issuance/allocate/IIssuanceAllocationStatus.sol @@ -0,0 +1,71 @@ +// SPDX-License-Identifier: GPL-2.0-or-later + +pragma solidity ^0.7.6 || ^0.8.0; +pragma abicoder v2; + +import { Allocation } from "./IIssuanceAllocatorTypes.sol"; + +/** + * @title IIssuanceAllocationStatus + * @author Edge & Node + * @notice Interface for read-only status and query operations on the issuance allocator. + * All functions in this interface are view functions that provide information about + * the current state of the allocator, including allocations and system status. + */ +interface IIssuanceAllocationStatus { + /** + * @notice Get the current allocation for a target + * @param target Address of the target + * @return Allocation struct containing total, allocator-minting, and self-minting allocations + */ + function getTargetAllocation(address target) external view returns (Allocation memory); + + /** + * @notice Get the current global allocation totals + * @return Allocation struct containing total, allocator-minting, and self-minting allocations across all targets + */ + function getTotalAllocation() external view returns (Allocation memory); + + /** + * @notice Get all allocated target addresses + * @return Array of target addresses + */ + function getTargets() external view returns (address[] memory); + + /** + * @notice Get a specific allocated target address by index + * @param index The index of the target address to retrieve + * @return The target address at the specified index + */ + function getTargetAt(uint256 index) external view returns (address); + + /** + * @notice Get the number of allocated targets + * @return The total number of allocated targets + */ + function getTargetCount() external view returns (uint256); + + /** + * @notice Get the current issuance per block + * @return The current issuance per block + */ + function issuancePerBlock() external view returns (uint256); + + /** + * @notice Get the last block number where issuance was distributed + * @return The last block number where issuance was distributed + */ + function lastIssuanceDistributionBlock() external view returns (uint256); + + /** + * @notice Get the last block number where issuance was accumulated during pause + * @return The last block number where issuance was accumulated during pause + */ + function lastIssuanceAccumulationBlock() external view returns (uint256); + + /** + * @notice Get the amount of pending accumulated allocator issuance + * @return The amount of pending accumulated allocator issuance + */ + function pendingAccumulatedAllocatorIssuance() external view returns (uint256); +} diff --git a/packages/interfaces/contracts/issuance/allocate/IIssuanceAllocatorTypes.sol b/packages/interfaces/contracts/issuance/allocate/IIssuanceAllocatorTypes.sol index b4a5d33a7..3a410da37 100644 --- a/packages/interfaces/contracts/issuance/allocate/IIssuanceAllocatorTypes.sol +++ b/packages/interfaces/contracts/issuance/allocate/IIssuanceAllocatorTypes.sol @@ -16,3 +16,27 @@ struct TargetIssuancePerBlock { uint256 selfIssuancePerBlock; uint256 selfIssuanceBlockAppliedTo; } + +/** + * @notice Allocation information + * @param totalAllocationPPM Total allocation in PPM (allocatorMintingAllocationPPM + selfMintingAllocationPPM) + * @param allocatorMintingPPM Allocator-minting allocation in PPM (Parts Per Million) + * @param selfMintingPPM Self-minting allocation in PPM (Parts Per Million) + */ +struct Allocation { + uint256 totalAllocationPPM; + uint256 allocatorMintingPPM; + uint256 selfMintingPPM; +} + +/** + * @notice Allocation target information + * @param allocatorMintingPPM The allocator-minting allocation amount in PPM (Parts Per Million) + * @param selfMintingPPM The self-minting allocation amount in PPM (Parts Per Million) + * @param lastChangeNotifiedBlock Last block when this target was notified of changes + */ +struct AllocationTarget { + uint256 allocatorMintingPPM; + uint256 selfMintingPPM; + uint256 lastChangeNotifiedBlock; +} diff --git a/packages/interfaces/contracts/issuance/allocate/ISendTokens.sol b/packages/interfaces/contracts/issuance/allocate/ISendTokens.sol new file mode 100644 index 000000000..3f67358ae --- /dev/null +++ b/packages/interfaces/contracts/issuance/allocate/ISendTokens.sol @@ -0,0 +1,19 @@ +// SPDX-License-Identifier: GPL-2.0-or-later + +pragma solidity ^0.7.6 || ^0.8.0; + +/** + * @title ISendTokens + * @author Edge & Node + * @notice Interface for contracts that can send tokens to arbitrary addresses + * @dev This interface provides a simple token transfer capability for contracts + * that need to distribute or send tokens programmatically. + */ +interface ISendTokens { + /** + * @notice Send tokens to a specified address + * @param to The address to send tokens to + * @param amount The amount of tokens to send + */ + function sendTokens(address to, uint256 amount) external; +} diff --git a/packages/issuance/contracts/allocate/DirectAllocation.sol b/packages/issuance/contracts/allocate/DirectAllocation.sol new file mode 100644 index 000000000..cbc042c14 --- /dev/null +++ b/packages/issuance/contracts/allocate/DirectAllocation.sol @@ -0,0 +1,101 @@ +// SPDX-License-Identifier: GPL-2.0-or-later + +pragma solidity 0.8.27; + +import { IIssuanceTarget } from "@graphprotocol/interfaces/contracts/issuance/allocate/IIssuanceTarget.sol"; +import { ISendTokens } from "@graphprotocol/interfaces/contracts/issuance/allocate/ISendTokens.sol"; +import { BaseUpgradeable } from "../common/BaseUpgradeable.sol"; + +// solhint-disable-next-line no-unused-import +import { ERC165Upgradeable } from "@openzeppelin/contracts-upgradeable/utils/introspection/ERC165Upgradeable.sol"; // Used by @inheritdoc + +/** + * @title DirectAllocation + * @author Edge & Node + * @notice A simple contract that receives tokens from the IssuanceAllocator and allows + * an authorized operator to withdraw them. + * + * @dev This contract is designed to be an allocator-minting target in the IssuanceAllocator. + * The IssuanceAllocator will mint tokens directly to this contract, and the authorized + * operator can send them to individual addresses as needed. + * + * This contract is pausable by the PAUSE_ROLE. When paused, tokens cannot be sent. + * @custom:security-contact Please email security+contracts@thegraph.com if you find any bugs. We might have an active bug bounty program. + */ +contract DirectAllocation is BaseUpgradeable, IIssuanceTarget, ISendTokens { + // -- Custom Errors -- + + /// @notice Thrown when token transfer fails + /// @param to The address that the transfer was attempted to + /// @param amount The amount of tokens that failed to transfer + error SendTokensFailed(address to, uint256 amount); + + // -- Events -- + + /// @notice Emitted when tokens are sent + /// @param to The address that received the tokens + /// @param amount The amount of tokens sent + event TokensSent(address indexed to, uint256 indexed amount); + // Do not need to index amount, ignoring gas-indexed-events warning. + + /// @notice Emitted before the issuance allocation changes + event BeforeIssuanceAllocationChange(); + + // -- Constructor -- + + /** + * @notice Constructor for the DirectAllocation contract + * @dev This contract is upgradeable, but we use the constructor to pass the Graph Token address + * to the base contract. + * @param graphToken Address of the Graph Token contract + * @custom:oz-upgrades-unsafe-allow constructor + */ + constructor(address graphToken) BaseUpgradeable(graphToken) {} + + // -- Initialization -- + + /** + * @notice Initialize the DirectAllocation contract + * @param governor Address that will have the GOVERNOR_ROLE + */ + function initialize(address governor) external virtual initializer { + __BaseUpgradeable_init(governor); + } + + // -- ERC165 -- + + /** + * @inheritdoc ERC165Upgradeable + */ + function supportsInterface(bytes4 interfaceId) public view virtual override returns (bool) { + return + interfaceId == type(IIssuanceTarget).interfaceId || + interfaceId == type(ISendTokens).interfaceId || + super.supportsInterface(interfaceId); + } + + // -- External Functions -- + + /** + * @inheritdoc ISendTokens + */ + function sendTokens(address to, uint256 amount) external override onlyRole(OPERATOR_ROLE) whenNotPaused { + require(GRAPH_TOKEN.transfer(to, amount), SendTokensFailed(to, amount)); + emit TokensSent(to, amount); + } + + /** + * @dev For DirectAllocation, this is a no-op since we don't need to perform any calculations + * before an allocation change. We simply receive tokens from the IssuanceAllocator. + * @inheritdoc IIssuanceTarget + */ + function beforeIssuanceAllocationChange() external virtual override { + emit BeforeIssuanceAllocationChange(); + } + + /** + * @dev No-op for DirectAllocation; issuanceAllocator is not stored. + * @inheritdoc IIssuanceTarget + */ + function setIssuanceAllocator(address issuanceAllocator) external virtual override onlyRole(GOVERNOR_ROLE) {} +} diff --git a/packages/issuance/contracts/allocate/IssuanceAllocator.md b/packages/issuance/contracts/allocate/IssuanceAllocator.md new file mode 100644 index 000000000..d624b9894 --- /dev/null +++ b/packages/issuance/contracts/allocate/IssuanceAllocator.md @@ -0,0 +1,403 @@ +# IssuanceAllocator + +The IssuanceAllocator is a smart contract responsible for allocating token issuance to different components of The Graph protocol. It calculates issuance for all targets based on their configured proportions and handles minting for allocator-minting targets. + +## Overview + +The contract operates as a central distribution hub for newly minted Graph tokens, ensuring that different protocol components receive their allocated share of token issuance according to predefined proportions. It supports both allocator-minting targets (recommended for new targets) and self-minting targets (for backwards compatibility), with the ability to have mixed allocations primarily for migration scenarios. + +## Architecture + +### Allocation Types + +The contract supports two types of allocation: + +1. **Allocator-minting allocation**: The IssuanceAllocator calculates and mints tokens directly to targets. This is the recommended approach for new targets as it provides robust control over token issuance through the IssuanceAllocator. + +2. **Self-minting allocation**: The IssuanceAllocator calculates issuance but does not mint tokens directly. Instead, targets call `getTargetIssuancePerBlock()` to determine their allocation and mint tokens themselves. This feature exists primarily for backwards compatibility with existing contracts like the RewardsManager. + +While targets can technically have both types of allocation simultaneously, this is not the expected configuration. (It could be useful for migration scenarios where a self-minting target is gradually transitioning to allocator-minting allocation.) + +### Roles + +The contract uses role-based access control: + +- **GOVERNOR_ROLE**: Can set issuance rates, manage target allocations, notify targets, and perform all governance actions +- **PAUSE_ROLE**: Can pause contract operations (inherited from BaseUpgradeable) + +### Pause and Accumulation System + +The IssuanceAllocator includes a pause and accumulation system designed to respond to operational issues while preserving issuance integrity: + +#### Pause Behavior + +When the contract is paused: + +- **Distribution stops**: `distributeIssuance()` returns early without minting any tokens, returning the last block when issuance was distributed. +- **Accumulation begins**: Issuance for allocator-minting targets accumulates in `pendingAccumulatedAllocatorIssuance` and will be distributed when the contract is unpaused (or in the interim via `distributePendingIssuance()`) according to their configured proportions at the time of distribution. +- **Self-minting continues**: Self-minting targets can still query their allocation, but should check the `blockAppliedTo` fields to respect pause state. Because RewardsManager does not check `blockAppliedTo` and will mint tokens even when the allocator is paused, the initial implementation does not pause self-minting targets. (This behavior is subject to change in future versions, and new targets should check `blockAppliedTo`.) Note that RewardsManager is independently pausable. +- **Configuration allowed**: Governance functions like `setIssuancePerBlock()` and `setTargetAllocation()` still work. However, unlike changes made while unpaused, changes made will be applied from lastIssuanceDistributionBlock rather than the current block. +- **Notifications continue**: Targets are still notified of allocation changes, and should check the `blockAppliedTo` fields to correctly apply changes. + +#### Accumulation Logic + +During pause periods, the contract tracks: + +- `lastIssuanceAccumulationBlock`: Updated to current block whenever accumulation occurs +- `pendingAccumulatedAllocatorIssuance`: Accumulates issuance intended for allocator-minting targets +- Calculation: `(issuancePerBlock * blocksSinceLastAccumulation * (MILLION - totalSelfMintingAllocationPPM)) / MILLION` +- **Internal accumulation**: The contract uses private `accumulatePendingIssuance()` functions to handle accumulation logic, which can be triggered automatically during rate changes or manually via the public `distributePendingIssuance(uint256)` function + +#### Recovery Process + +When unpausing or manually distributing: + +1. **Automatic distribution**: `distributeIssuance()` first calls `_distributePendingIssuance()` to handle accumulated issuance +2. **Manual distribution**: `distributePendingIssuance()` can be called directly by governance, even while paused +3. **Proportional allocation**: Pending issuance is distributed proportionally among current allocator-minting targets +4. **Clean slate**: After distribution, `pendingAccumulatedAllocatorIssuance` is reset to 0 + +Note that if there are no allocator-minting targets all pending issuance is lost. If not all of the allocation allowance is used, there will be a proportional amount of accumulated issuance lost. + +#### Use Cases + +This system enables: + +- **Rapid response**: Pause immediately during operational issues without losing track of issuance +- **Investigation time**: Allow time to investigate and resolve issues while maintaining issuance accounting +- **Gradual recovery**: Distribute accumulated issuance manually or automatically when ready +- **Target changes**: Modify allocations during pause periods, with accumulated issuance distributed according to updated allocations + +### Storage + +The contract uses ERC-7201 namespaced storage to prevent storage collisions in upgradeable contracts: + +- `issuancePerBlock`: Total token issuance per block across all targets +- `lastIssuanceDistributionBlock`: Last block when issuance was distributed +- `lastIssuanceAccumulationBlock`: Last block when issuance was accumulated during pause +- `allocationTargets`: Maps target addresses to their allocation data (allocator-minting PPM, self-minting PPM, notification status) +- `targetAddresses`: Array of all registered target addresses with non-zero total allocations +- `totalAllocationPPM`: Sum of all allocations across all targets (cannot exceed 1,000,000 PPM = 100%) +- `totalAllocatorMintingAllocationPPM`: Sum of allocator-minting allocations across all targets +- `totalSelfMintingAllocationPPM`: Sum of self-minting allocations across all targets +- `pendingAccumulatedAllocatorIssuance`: Accumulated issuance for allocator-minting targets during pause + +### Constants + +The contract inherits the following constant from `BaseUpgradeable`: + +- **MILLION**: `1,000,000` - Used as the denominator for Parts Per Million (PPM) calculations. For example, 50% allocation would be represented as 500,000 PPM. + +## Core Functions + +### Distribution Management + +#### `distributeIssuance() → uint256` + +- **Access**: Public (no restrictions) +- **Purpose**: Distribute pending issuance to all allocator-minting targets +- **Returns**: Block number that issuance was distributed to (normally current block) +- **Behavior**: + - First distributes any pending accumulated issuance from pause periods + - Calculates blocks since last distribution + - Mints tokens proportionally to allocator-minting targets only + - Updates `lastIssuanceDistributionBlock` to current block + - Returns early with current `lastIssuanceDistributionBlock` when paused (no distribution occurs) + - Returns early if no blocks have passed since last distribution + - Can be called by anyone to trigger distribution + +#### `setIssuancePerBlock(uint256 newIssuancePerBlock, bool evenIfDistributionPending) → bool` + +- **Access**: GOVERNOR_ROLE only +- **Purpose**: Set the total token issuance rate per block +- **Parameters**: + - `newIssuancePerBlock` - New issuance rate in tokens per block + - `evenIfDistributionPending` - If true, skip distribution requirement (notifications still occur) +- **Returns**: True if applied, false if blocked by pending operations +- **Events**: Emits `IssuancePerBlockUpdated` +- **Notes**: + - Automatically distributes or accumulates pending issuance before changing rate (unless evenIfDistributionPending=true or paused) + - Notifies all targets of the upcoming change (unless paused) + - Returns false if distribution fails and evenIfDistributionPending=false, reverts if notification fails + - L1GraphTokenGateway must be updated when this changes to maintain bridge functionality + - No-op if new rate equals current rate (returns true immediately) + +### Target Management + +The contract provides multiple overloaded functions for setting target allocations: + +#### `setTargetAllocation(address target, uint256 allocatorMintingPPM) → bool` + +- **Access**: GOVERNOR_ROLE only +- **Purpose**: Set allocator-minting allocation only (selfMintingPPM=0, evenIfDistributionPending=false) +- **Parameters**: + - `target` - Target contract address (must support IIssuanceTarget interface) + - `allocatorMintingPPM` - Allocator-minting allocation in PPM (0 removes target if no self-minting allocation) + +#### `setTargetAllocation(address target, uint256 allocatorMintingPPM, uint256 selfMintingPPM) → bool` + +- **Access**: GOVERNOR_ROLE only +- **Purpose**: Set both allocator-minting and self-minting allocations (evenIfDistributionPending=false) +- **Parameters**: + - `target` - Target contract address (must support IIssuanceTarget interface) + - `allocatorMintingPPM` - Allocator-minting allocation in PPM + - `selfMintingPPM` - Self-minting allocation in PPM + +#### `setTargetAllocation(address target, uint256 allocatorMintingPPM, uint256 selfMintingPPM, bool evenIfDistributionPending) → bool` + +- **Access**: GOVERNOR_ROLE only +- **Purpose**: Set both allocations with full control over distribution requirements +- **Parameters**: + - `target` - Target contract address (must support IIssuanceTarget interface) + - `allocatorMintingPPM` - Allocator-minting allocation in PPM + - `selfMintingPPM` - Self-minting allocation in PPM + - `evenIfDistributionPending` - If true, skip distribution requirement (notifications still occur) +- **Returns**: True if applied, false if blocked by pending operations +- **Events**: Emits `TargetAllocationUpdated` with total allocation (allocatorMintingPPM + selfMintingPPM) +- **Behavior**: + - Validates target supports IIssuanceTarget interface (for non-zero total allocations) + - No-op if new allocations equal current allocations (returns true immediately) + - Distributes or accumulates pending issuance before changing allocation (unless evenIfDistributionPending=true) + - Notifies target of upcoming change (always occurs unless overridden by `forceTargetNoChangeNotificationBlock()`) + - Returns false if distribution fails (when evenIfDistributionPending=false), reverts if notification fails + - Validates total allocation doesn't exceed MILLION after notification (prevents reentrancy issues) + - Adds target to registry if total allocation > 0 and not already present + - Removes target from registry if total allocation = 0 (uses swap-and-pop for gas efficiency) + - Deletes allocation data when removing target from registry + +#### `notifyTarget(address target) → bool` + +- **Access**: GOVERNOR_ROLE only +- **Purpose**: Manually notify a specific target about allocation changes +- **Returns**: True if notification sent or already sent this block +- **Notes**: Used for gas limit recovery scenarios. Will revert if target notification fails. + +#### `forceTargetNoChangeNotificationBlock(address target, uint256 blockNumber) → uint256` + +- **Access**: GOVERNOR_ROLE only +- **Purpose**: Override the last notification block for a target +- **Parameters**: + - `target` - Target address to update + - `blockNumber` - Block number to set (past = allow re-notification, future = prevent notification) +- **Returns**: The block number that was set +- **Notes**: Used for gas limit recovery scenarios + +#### `distributePendingIssuance() → uint256` + +- **Access**: GOVERNOR_ROLE only +- **Purpose**: Distribute any pending accumulated issuance to allocator-minting targets +- **Returns**: Block number up to which issuance has been distributed +- **Notes**: + - Distributes issuance that accumulated while paused + - Can be called even when the contract is paused + - No-op if there is no pending issuance or all targets are self-minting + +#### `distributePendingIssuance(uint256 toBlockNumber) → uint256` + +- **Access**: GOVERNOR_ROLE only +- **Purpose**: Accumulate pending issuance up to a specific block, then distribute all accumulated issuance +- **Parameters**: + - `toBlockNumber` - Block number to accumulate to (must be >= lastIssuanceAccumulationBlock and <= current block) +- **Returns**: Block number up to which issuance has been distributed +- **Notes**: + - First accumulates pending issuance up to the specified block + - Then distributes all accumulated issuance to allocator-minting targets + - Can be called even when the contract is paused + - Will revert with `ToBlockOutOfRange()` if toBlockNumber is invalid + +### View Functions + +#### `getTargetAllocation(address target) → Allocation` + +- **Purpose**: Get current allocation for a target +- **Returns**: Allocation struct containing: + - `totalAllocationPPM`: Total allocation (allocatorMintingAllocationPPM + selfMintingAllocationPPM) + - `allocatorMintingAllocationPPM`: Allocator-minting allocation in PPM + - `selfMintingAllocationPPM`: Self-minting allocation in PPM + +#### `getTotalAllocation() → Allocation` + +- **Purpose**: Get current global allocation totals +- **Returns**: Allocation struct with totals across all targets + +#### `getTargets() → address[]` + +- **Purpose**: Get all target addresses with non-zero total allocations +- **Returns**: Array of target addresses + +#### `getTargetAt(uint256 index) → address` + +- **Purpose**: Get a specific target address by index +- **Returns**: Target address at the specified index + +#### `getTargetCount() → uint256` + +- **Purpose**: Get the number of allocated targets +- **Returns**: Total number of targets with non-zero allocations + +#### `getTargetIssuancePerBlock(address target) → TargetIssuancePerBlock` + +- **Purpose**: Get issuance per block information for a target +- **Returns**: TargetIssuancePerBlock struct containing: + - `allocatorIssuancePerBlock`: Issuance per block for allocator-minting portion + - `allocatorIssuanceBlockAppliedTo`: Block up to which allocator issuance has been applied + - `selfIssuancePerBlock`: Issuance per block for self-minting portion + - `selfIssuanceBlockAppliedTo`: Block up to which self issuance has been applied (always current block) +- **Notes**: + - Does not revert when paused - callers should check blockAppliedTo fields + - If allocatorIssuanceBlockAppliedTo is not current block, allocator issuance is paused + - Self-minting targets should use this to determine how much to mint + +#### `issuancePerBlock() → uint256` + +- **Purpose**: Get the current total issuance per block +- **Returns**: Current issuance per block across all targets + +#### `lastIssuanceDistributionBlock() → uint256` + +- **Purpose**: Get the last block where issuance was distributed +- **Returns**: Last distribution block number + +#### `lastIssuanceAccumulationBlock() → uint256` + +- **Purpose**: Get the last block where issuance was accumulated during pause +- **Returns**: Last accumulation block number + +#### `pendingAccumulatedAllocatorIssuance() → uint256` + +- **Purpose**: Get the amount of pending accumulated allocator issuance +- **Returns**: Amount of issuance accumulated during pause periods + +#### `getTargetData(address target) → AllocationTarget` + +- **Purpose**: Get internal target data (implementation-specific) +- **Returns**: AllocationTarget struct containing allocatorMintingPPM, selfMintingPPM, and lastChangeNotifiedBlock +- **Notes**: Primarily for operator use and debugging + +## Allocation Logic + +### Distribution Calculation + +For each target during distribution, only the allocator-minting portion is distributed: + +```solidity +targetIssuance = (totalNewIssuance * targetAllocatorMintingPPM) / MILLION +``` + +For self-minting targets, they query their allocation via `getTargetIssuancePerBlock()`: + +```solidity +selfIssuancePerBlock = (issuancePerBlock * targetSelfMintingPPM) / MILLION +``` + +Where: + +- `totalNewIssuance = issuancePerBlock * blocksSinceLastDistribution` +- `targetAllocatorMintingPPM` is the target's allocator-minting allocation in PPM +- `targetSelfMintingPPM` is the target's self-minting allocation in PPM +- `MILLION = 1,000,000` (representing 100%) + +### Allocation Constraints + +- Total allocation across all targets cannot exceed 1,000,000 PPM (100%) +- Individual target allocations (allocator-minting + self-minting) can be any value from 0 to 1,000,000 PPM +- Setting both allocations to 0 removes the target from the registry +- Allocations are measured in PPM for precision (1 PPM = 0.0001%) +- Small rounding losses may occur in calculations due to integer division (this is acceptable) +- Each target can have both allocator-minting and self-minting allocations, though typically only one is used + +## Change Notification System + +Before any allocation changes, targets are notified via the `IIssuanceTarget.beforeIssuanceAllocationChange()` function. This allows targets to: + +- Update their internal state to the current block +- Prepare for the allocation change +- Ensure consistency in their reward calculations + +### Notification Rules + +- Each target is notified at most once per block (unless overridden via `forceTargetNoChangeNotificationBlock()`) +- Notifications are tracked per target using `lastChangeNotifiedBlock` +- Failed notifications cause the entire transaction to revert +- Use `forceTargetNoChangeNotificationBlock()` to skip notification for broken targets before removing them +- Notifications cannot be skipped (the `evenIfDistributionPending` parameter only affects distribution requirements) +- Manual notification is available for gas limit recovery via `notifyTarget()` + +## Gas Limit Recovery + +The contract includes several mechanisms to handle potential gas limit issues: + +### Potential Issues + +1. **Large target arrays**: Many targets could exceed gas limits during distribution +2. **Expensive notifications**: Target notification calls could consume too much gas +3. **Malfunctioning targets**: Target contracts that revert when notified + +### Recovery Mechanisms + +1. **Pause functionality**: Contract can be paused to stop operations during recovery +2. **Individual target notification**: `notifyTarget()` allows notifying targets one by one (will revert if target notification reverts) +3. **Force notification override**: `forceTargetNoChangeNotificationBlock()` can skip problematic targets +4. **Force parameters**: Both `setIssuancePerBlock()` and `setTargetAllocation()` accept `evenIfDistributionPending` flags to skip distribution requirements +5. **Target removal**: Use `forceTargetNoChangeNotificationBlock()` to skip notification, then remove malfunctioning targets by setting both allocations to 0 +6. **Pending issuance distribution**: `distributePendingIssuance()` can be called manually to distribute accumulated issuance + +## Events + +```solidity +event IssuanceDistributed(address indexed target, uint256 amount); +event TargetAllocationUpdated(address indexed target, uint256 newAllocation); +event IssuancePerBlockUpdated(uint256 oldIssuancePerBlock, uint256 newIssuancePerBlock); +``` + +## Error Conditions + +```solidity +error TargetAddressCannotBeZero(); +error InsufficientAllocationAvailable(); +error TargetDoesNotSupportIIssuanceTarget(); +error ToBlockOutOfRange(); +``` + +### Error Descriptions + +- **TargetAddressCannotBeZero**: Thrown when attempting to set allocation for the zero address +- **InsufficientAllocationAvailable**: Thrown when the total allocation would exceed 1,000,000 PPM (100%) +- **TargetDoesNotSupportIIssuanceTarget**: Thrown when a target contract does not implement the required IIssuanceTarget interface +- **ToBlockOutOfRange**: Thrown when the `toBlockNumber` parameter in `distributePendingIssuance(uint256)` is outside the valid range (must be >= lastIssuanceAccumulationBlock and <= current block) + +## Usage Patterns + +### Initial Setup + +1. Deploy contract with Graph Token address +2. Initialize with governor address +3. Set initial issuance per block rate +4. Add targets with their allocations +5. Grant minter role to IssuanceAllocator on Graph Token + +### Normal Operation + +1. Targets or external actors call `distributeIssuance()` periodically +2. Governor adjusts issuance rates as needed via `setIssuancePerBlock()` +3. Governor adds/removes/modifies targets via `setTargetAllocation()` overloads +4. Self-minting targets query their allocation via `getTargetIssuancePerBlock()` + +### Emergency Scenarios + +- **Gas limit issues**: Use pause, individual notifications, and `evenIfDistributionPending` parameters +- **Target failures**: Use `forceTargetNoChangeNotificationBlock()` to skip notification, then remove problematic targets by setting both allocations to 0 +- **Rate changes**: Use `evenIfDistributionPending` parameter to bypass distribution requirements + +### For L1 Bridge Integration + +When `setIssuancePerBlock()` is called, the L1GraphTokenGateway's `updateL2MintAllowance()` function must be called to ensure the bridge can mint the correct amount of tokens on L2. + +## Security Considerations + +- Only governor can modify allocations and issuance rates +- Interface validation prevents adding incompatible targets +- Total allocation limits prevent over-allocation +- Pause functionality provides emergency stop capability +- Notification system ensures targets can prepare for changes +- Self-minting targets must respect paused state to prevent unauthorized minting diff --git a/packages/issuance/contracts/allocate/IssuanceAllocator.sol b/packages/issuance/contracts/allocate/IssuanceAllocator.sol new file mode 100644 index 000000000..e6e9ba62c --- /dev/null +++ b/packages/issuance/contracts/allocate/IssuanceAllocator.sol @@ -0,0 +1,740 @@ +// SPDX-License-Identifier: GPL-2.0-or-later + +pragma solidity 0.8.27; + +import { + TargetIssuancePerBlock, + Allocation, + AllocationTarget +} from "@graphprotocol/interfaces/contracts/issuance/allocate/IIssuanceAllocatorTypes.sol"; +import { IIssuanceAllocationDistribution } from "@graphprotocol/interfaces/contracts/issuance/allocate/IIssuanceAllocationDistribution.sol"; +import { IIssuanceAllocationAdministration } from "@graphprotocol/interfaces/contracts/issuance/allocate/IIssuanceAllocationAdministration.sol"; +import { IIssuanceAllocationStatus } from "@graphprotocol/interfaces/contracts/issuance/allocate/IIssuanceAllocationStatus.sol"; +import { IIssuanceAllocationData } from "@graphprotocol/interfaces/contracts/issuance/allocate/IIssuanceAllocationData.sol"; +import { IIssuanceTarget } from "@graphprotocol/interfaces/contracts/issuance/allocate/IIssuanceTarget.sol"; +import { BaseUpgradeable } from "../common/BaseUpgradeable.sol"; +import { IERC165 } from "@openzeppelin/contracts/utils/introspection/IERC165.sol"; + +// solhint-disable-next-line no-unused-import +import { ERC165Upgradeable } from "@openzeppelin/contracts-upgradeable/utils/introspection/ERC165Upgradeable.sol"; // Used by @inheritdoc + +/** + * @title IssuanceAllocator + * @author Edge & Node + * @notice This contract is responsible for allocating token issuance to different components + * of the protocol. It calculates issuance for all targets based on their configured proportions + * and handles minting for allocator-minting portions. + * + * @dev The contract supports two types of allocation for each target: + * 1. Allocator-minting allocation: The IssuanceAllocator calculates and mints tokens directly to targets + * for this portion of their allocation. + * + * 2. Self-minting allocation: The IssuanceAllocator calculates issuance but does not mint tokens directly. + * Instead, targets are expected to call `getTargetIssuancePerBlock` to determine their self-minting + * issuance amount and mint tokens themselves. This feature is primarily intended for backwards + * compatibility with existing contracts like the RewardsManager. + * + * Each target can have both allocator-minting and self-minting allocations. New targets are expected + * to use allocator-minting allocation to provide more robust control over token issuance through + * the IssuanceAllocator. The self-minting allocation is intended only for backwards compatibility + * with existing contracts. + * + * @dev There are a number of scenarios where the IssuanceAllocator could run into issues, including: + * 1. The targetAddresses array could grow large enough that it exceeds the gas limit when calling distributeIssuance. + * 2. When notifying targets of allocation changes the calls to `beforeIssuanceAllocationChange` could exceed the gas limit. + * 3. Target contracts could revert when notifying them of changes via `beforeIssuanceAllocationChange`. + * While in practice the IssuanceAllocator is expected to have a relatively small number of trusted targets, and the + * gas limit is expected to be high enough to handle the above scenarios, the following would allow recovery: + * 1. The contract can be paused, which can help make the recovery process easier to manage. + * 2. The GOVERNOR_ROLE can directly trigger change notification to individual targets. As there is per target + * tracking of the lastChangeNotifiedBlock, this can reduce the gas cost of other operations and allow + * for graceful recovery. + * 3. If a target reverts when notifying it of changes or notifying it is too expensive, the GOVERNOR_ROLE can use `forceTargetNoChangeNotificationBlock()` + * to skip notifying that particular target of changes. + * + * In combination these should allow recovery from gas limit issues or malfunctioning targets, with fine-grained control over + * which targets are notified of changes and when. + * @custom:security-contact Please email security+contracts@thegraph.com if you find any bugs. We might have an active bug bounty program. + */ +contract IssuanceAllocator is + BaseUpgradeable, + IIssuanceAllocationDistribution, + IIssuanceAllocationAdministration, + IIssuanceAllocationStatus, + IIssuanceAllocationData +{ + // -- Namespaced Storage -- + + /// @notice ERC-7201 storage location for IssuanceAllocator + bytes32 private constant ISSUANCE_ALLOCATOR_STORAGE_LOCATION = + // solhint-disable-next-line gas-small-strings + keccak256(abi.encode(uint256(keccak256("graphprotocol.storage.IssuanceAllocator")) - 1)) & + ~bytes32(uint256(0xff)); + + /// @notice Main storage structure for IssuanceAllocator using ERC-7201 namespaced storage + /// @param issuancePerBlock Total issuance per block across all targets + /// @param lastDistributionBlock Last block when issuance was distributed + /// @param lastAccumulationBlock Last block when pending issuance was accumulated + /// @dev Design invariant: lastDistributionBlock <= lastAccumulationBlock + /// @param allocationTargets Mapping of target addresses to their allocation data + /// @param targetAddresses Array of all target addresses with non-zero allocation + /// @param totalAllocatorMintingPPM Total allocator-minting allocation (in PPM) across all targets + /// @param totalSelfMintingPPM Total self-minting allocation (in PPM) across all targets + /// @param pendingAccumulatedAllocatorIssuance Accumulated but not distributed issuance for allocator-minting from lastDistributionBlock to lastAccumulationBlock + /// @custom:storage-location erc7201:graphprotocol.storage.IssuanceAllocator + struct IssuanceAllocatorData { + uint256 issuancePerBlock; + uint256 lastDistributionBlock; + uint256 lastAccumulationBlock; + mapping(address => AllocationTarget) allocationTargets; + address[] targetAddresses; + uint256 totalAllocatorMintingPPM; + uint256 totalSelfMintingPPM; + uint256 pendingAccumulatedAllocatorIssuance; + } + + /** + * @notice Returns the storage struct for IssuanceAllocator + * @return $ contract storage + */ + function _getIssuanceAllocatorStorage() private pure returns (IssuanceAllocatorData storage $) { + // solhint-disable-previous-line use-natspec + // Solhint does not support $ return variable in natspec + + bytes32 slot = ISSUANCE_ALLOCATOR_STORAGE_LOCATION; + // solhint-disable-next-line no-inline-assembly + assembly { + $.slot := slot + } + } + + // -- Custom Errors -- + + /// @notice Thrown when attempting to add a target with zero address + error TargetAddressCannotBeZero(); + + /// @notice Thrown when the total allocation would exceed 100% (PPM) + error InsufficientAllocationAvailable(); + + /// @notice Thrown when a target does not support the IIssuanceTarget interface + error TargetDoesNotSupportIIssuanceTarget(); + + /// @notice Thrown when toBlockNumber is out of valid range for accumulation + error ToBlockOutOfRange(); + + // -- Events -- + + /// @notice Emitted when issuance is distributed to a target + /// @param target The address of the target that received issuance + /// @param amount The amount of tokens distributed + event IssuanceDistributed(address indexed target, uint256 amount); // solhint-disable-line gas-indexed-events + // Do not need to index amount, filtering by amount ranges is not expected use case + + /// @notice Emitted when a target's allocation is updated + /// @param target The address of the target whose allocation was updated + /// @param newAllocatorMintingPPM The new allocator-minting allocation (in PPM) for the target + /// @param newSelfMintingPPM The new self-minting allocation (in PPM) for the target + event TargetAllocationUpdated(address indexed target, uint256 newAllocatorMintingPPM, uint256 newSelfMintingPPM); // solhint-disable-line gas-indexed-events + // Do not need to index PPM values + + /// @notice Emitted when the issuance per block is updated + /// @param oldIssuancePerBlock The previous issuance per block amount + /// @param newIssuancePerBlock The new issuance per block amount + event IssuancePerBlockUpdated(uint256 oldIssuancePerBlock, uint256 newIssuancePerBlock); // solhint-disable-line gas-indexed-events + // Do not need to index issuance per block values + + // -- Constructor -- + + /** + * @notice Constructor for the IssuanceAllocator contract + * @dev This contract is upgradeable, but we use the constructor to pass the Graph Token address + * to the base contract. + * @param _graphToken Address of the Graph Token contract + * @custom:oz-upgrades-unsafe-allow constructor + */ + constructor(address _graphToken) BaseUpgradeable(_graphToken) {} + + // -- Initialization -- + + /** + * @notice Initialize the IssuanceAllocator contract + * @param _governor Address that will have the GOVERNOR_ROLE + */ + function initialize(address _governor) external virtual initializer { + __BaseUpgradeable_init(_governor); + } + + // -- Core Functionality -- + + /** + * @inheritdoc ERC165Upgradeable + * @dev Supports the four IssuanceAllocator sub-interfaces + */ + function supportsInterface(bytes4 interfaceId) public view virtual override returns (bool) { + return + interfaceId == type(IIssuanceAllocationDistribution).interfaceId || + interfaceId == type(IIssuanceAllocationAdministration).interfaceId || + interfaceId == type(IIssuanceAllocationStatus).interfaceId || + interfaceId == type(IIssuanceAllocationData).interfaceId || + super.supportsInterface(interfaceId); + } + + /** + * @inheritdoc IIssuanceAllocationDistribution + * @dev Implementation details: + * - For allocator-minting portions, tokens are minted and transferred directly to targets based on their allocation + * - For self-minting portions (like the legacy RewardsManager), it does not mint tokens directly. Instead, these contracts are expected to handle minting themselves + * - The self-minting allocation is intended only for backwards compatibility with existing contracts and should not be used for new targets. New targets should use allocator-minting allocation to ensure robust control of token issuance by the IssuanceAllocator + * - Unless paused will always result in lastIssuanceBlock == block.number, even if there is no issuance to distribute + */ + function distributeIssuance() external override returns (uint256) { + return _distributeIssuance(); + } + + /** + * @notice Internal implementation for `distributeIssuance` + * @dev Handles the actual distribution logic. + * @return Block number distributed to + */ + function _distributeIssuance() private returns (uint256) { + IssuanceAllocatorData storage $ = _getIssuanceAllocatorStorage(); + + if (paused()) return $.lastDistributionBlock; + + _distributePendingIssuance(); + + uint256 blocksSinceLastIssuance = block.number - $.lastDistributionBlock; + if (blocksSinceLastIssuance == 0) return $.lastDistributionBlock; + + // Note: Theoretical overflow risk exists if issuancePerBlock * blocksSinceLastIssuance > type(uint256).max + // In practice, this would require either: + // 1. Extremely high issuancePerBlock (governance error), and/or + // 2. Contract paused for an implausibly long time (decades) + // If such overflow occurs, the transaction reverts (Solidity 0.8.x), indicating the contract + // is in a state requiring governance intervention. + uint256 newIssuance = $.issuancePerBlock * blocksSinceLastIssuance; + $.lastDistributionBlock = block.number; + $.lastAccumulationBlock = block.number; + + if (0 < newIssuance) { + for (uint256 i = 0; i < $.targetAddresses.length; ++i) { + address target = $.targetAddresses[i]; + AllocationTarget storage targetData = $.allocationTargets[target]; + + if (0 < targetData.allocatorMintingPPM) { + // There can be a small rounding loss here. This is acceptable. + uint256 targetIssuance = (newIssuance * targetData.allocatorMintingPPM) / MILLION; + + GRAPH_TOKEN.mint(target, targetIssuance); + emit IssuanceDistributed(target, targetIssuance); + } + } + } + + return $.lastDistributionBlock; + } + + /** + * @inheritdoc IIssuanceAllocationAdministration + * @dev Implementation details: + * - `distributeIssuance` will be called before changing the rate *unless the contract is paused and evenIfDistributionPending is false* + * - `beforeIssuanceAllocationChange` will be called on all targets before changing the rate, even when the contract is paused + * - Whenever the rate is changed, the updateL2MintAllowance function _must_ be called on the L1GraphTokenGateway in L1, to ensure the bridge can mint the right amount of tokens + */ + function setIssuancePerBlock( + uint256 newIssuancePerBlock, + bool evenIfDistributionPending + ) external override onlyRole(GOVERNOR_ROLE) returns (bool) { + IssuanceAllocatorData storage $ = _getIssuanceAllocatorStorage(); + + if (newIssuancePerBlock == $.issuancePerBlock) return true; + + if (_distributeIssuance() < block.number) { + if (evenIfDistributionPending) accumulatePendingIssuance(); + else return false; + } + notifyAllTargets(); + + uint256 oldIssuancePerBlock = $.issuancePerBlock; + $.issuancePerBlock = newIssuancePerBlock; + + emit IssuancePerBlockUpdated(oldIssuancePerBlock, newIssuancePerBlock); + return true; + } + + // -- Target Management -- + + /** + * @notice Internal function to notify a target about an upcoming allocation change + * @dev Uses per-target lastChangeNotifiedBlock to prevent reentrancy and duplicate notifications. + * + * Will revert if the target's beforeIssuanceAllocationChange call fails. + * Use forceTargetNoChangeNotificationBlock to skip notification for malfunctioning targets. + * + * @param target Address of the target to notify + * @return True if notification was sent or already sent for this block + */ + function _notifyTarget(address target) private returns (bool) { + IssuanceAllocatorData storage $ = _getIssuanceAllocatorStorage(); + AllocationTarget storage targetData = $.allocationTargets[target]; + + // Check-effects-interactions pattern: check if already notified this block + // solhint-disable-next-line gas-strict-inequalities + if (block.number <= targetData.lastChangeNotifiedBlock) return true; + + // Effect: update the notification block before external calls + targetData.lastChangeNotifiedBlock = block.number; + + // Interactions: make external call after state changes + // This will revert if the target's notification fails + IIssuanceTarget(target).beforeIssuanceAllocationChange(); + return true; + } + + /** + * @notice Notify all targets (used prior to an allocation or rate change) + * @dev Each target is notified at most once per block. + * Will revert if any target notification reverts. + */ + function notifyAllTargets() private { + IssuanceAllocatorData storage $ = _getIssuanceAllocatorStorage(); + + for (uint256 i = 0; i < $.targetAddresses.length; ++i) { + _notifyTarget($.targetAddresses[i]); + } + } + + /** + * @inheritdoc IIssuanceAllocationAdministration + * @dev Implementation details: + * - The target will be notified at most once per block to prevent reentrancy looping + * - Will revert if target notification reverts + */ + function notifyTarget(address target) external override onlyRole(GOVERNOR_ROLE) returns (bool) { + return _notifyTarget(target); + } + + /** + * @inheritdoc IIssuanceAllocationAdministration + * @dev Implementation details: + * - This can be used to enable notification to be sent again (by setting to a past block) or to prevent notification until a future block (by setting to current or future block) + * - Returns the block number that was set, always equal to blockNumber in current implementation + */ + function forceTargetNoChangeNotificationBlock( + address target, + uint256 blockNumber + ) external override onlyRole(GOVERNOR_ROLE) returns (uint256) { + IssuanceAllocatorData storage $ = _getIssuanceAllocatorStorage(); + AllocationTarget storage targetData = $.allocationTargets[target]; + + // Note: No bounds checking on blockNumber is intentional. Governance might need to set + // very high values in unanticipated edge cases or for recovery scenarios. Constraining + // governance flexibility is deemed unnecessary and perhaps counterproductive. + targetData.lastChangeNotifiedBlock = blockNumber; + return blockNumber; + } + + /** + * @inheritdoc IIssuanceAllocationAdministration + * @dev Delegates to _setTargetAllocation with selfMintingPPM=0 and evenIfDistributionPending=false + */ + function setTargetAllocation( + address target, + uint256 allocatorMintingPPM + ) external override onlyRole(GOVERNOR_ROLE) returns (bool) { + return _setTargetAllocation(target, allocatorMintingPPM, 0, false); + } + + /** + * @inheritdoc IIssuanceAllocationAdministration + * @dev Delegates to _setTargetAllocation with evenIfDistributionPending=false + */ + function setTargetAllocation( + address target, + uint256 allocatorMintingPPM, + uint256 selfMintingPPM + ) external override onlyRole(GOVERNOR_ROLE) returns (bool) { + return _setTargetAllocation(target, allocatorMintingPPM, selfMintingPPM, false); + } + + /** + * @inheritdoc IIssuanceAllocationAdministration + * @dev Implementation details: + * - If the new allocations are the same as the current allocations, this function is a no-op + * - If both allocations are 0 and the target doesn't exist, this function is a no-op + * - If both allocations are 0 and the target exists, the target will be removed + * - If any allocation is non-zero and the target doesn't exist, the target will be added + * - Will revert if the total allocation would exceed PPM, or if attempting to add a target that doesn't support IIssuanceTarget + * + * Self-minting allocation is a special case for backwards compatibility with + * existing contracts like the RewardsManager. The IssuanceAllocator calculates + * issuance for self-minting portions but does not mint tokens directly for them. Self-minting targets + * should call getTargetIssuancePerBlock to determine their issuance amount and mint + * tokens accordingly. For example, the RewardsManager contract is expected to call + * getTargetIssuancePerBlock in its takeRewards function to calculate the correct + * amount of tokens to mint. Self-minting targets are responsible for adhering to + * the issuance schedule and should not mint more tokens than allocated. + */ + function setTargetAllocation( + address target, + uint256 allocatorMintingPPM, + uint256 selfMintingPPM, + bool evenIfDistributionPending + ) external override onlyRole(GOVERNOR_ROLE) returns (bool) { + return _setTargetAllocation(target, allocatorMintingPPM, selfMintingPPM, evenIfDistributionPending); + } + + /** + * @notice Internal implementation for setting target allocation + * @param target Address of the target to update + * @param allocatorMintingPPM Allocator-minting allocation for the target (in PPM) + * @param selfMintingPPM Self-minting allocation for the target (in PPM) + * @param evenIfDistributionPending Whether to force the allocation change even if issuance distribution is behind + * @return True if the value is applied (including if already the case), false if not applied due to paused state + */ + function _setTargetAllocation( + address target, + uint256 allocatorMintingPPM, + uint256 selfMintingPPM, + bool evenIfDistributionPending + ) internal returns (bool) { + if (!_validateTargetAllocation(target, allocatorMintingPPM, selfMintingPPM)) return true; // No change needed + + if (!_handleDistributionBeforeAllocation(target, selfMintingPPM, evenIfDistributionPending)) return false; // Distribution pending and not forced + + _notifyTarget(target); + + _validateAndUpdateTotalAllocations(target, allocatorMintingPPM, selfMintingPPM); + + _updateTargetAllocationData(target, allocatorMintingPPM, selfMintingPPM); + + emit TargetAllocationUpdated(target, allocatorMintingPPM, selfMintingPPM); + return true; + } + + /** + * @notice Validates target address and interface support, returns false if allocation is unchanged + * @param target Address of the target to validate + * @param allocatorMintingPPM Allocator-minting allocation for the target (in PPM) + * @param selfMintingPPM Self-minting allocation for the target (in PPM) + * @return True if validation passes and allocation change is needed, false if allocation is already set to these values + */ + function _validateTargetAllocation( + address target, + uint256 allocatorMintingPPM, + uint256 selfMintingPPM + ) private view returns (bool) { + require(target != address(0), TargetAddressCannotBeZero()); + + IssuanceAllocatorData storage $ = _getIssuanceAllocatorStorage(); + AllocationTarget storage targetData = $.allocationTargets[target]; + + if (targetData.allocatorMintingPPM == allocatorMintingPPM && targetData.selfMintingPPM == selfMintingPPM) + return false; // No change needed + + if (allocatorMintingPPM != 0 || selfMintingPPM != 0) + require( + IERC165(target).supportsInterface(type(IIssuanceTarget).interfaceId), + TargetDoesNotSupportIIssuanceTarget() + ); + + return true; + } + + /** + * @notice Distributes current issuance and handles accumulation for self-minting changes + * @param target Address of the target being updated + * @param selfMintingPPM New self-minting allocation for the target (in PPM) + * @param evenIfDistributionPending Whether to force the allocation change even if issuance distribution is behind + * @return True if allocation change should proceed, false if distribution is behind and not forced + */ + function _handleDistributionBeforeAllocation( + address target, + uint256 selfMintingPPM, + bool evenIfDistributionPending + ) private returns (bool) { + if (_distributeIssuance() < block.number) { + if (!evenIfDistributionPending) return false; + + // A change in self-minting allocation changes the accumulation rate for pending allocator-minting. + // So for a self-minting change, accumulate pending issuance prior to the rate change. + IssuanceAllocatorData storage $ = _getIssuanceAllocatorStorage(); + AllocationTarget storage targetData = $.allocationTargets[target]; + if (selfMintingPPM != targetData.selfMintingPPM) accumulatePendingIssuance(); + } + + return true; + } + + /** + * @notice Updates global allocation totals and validates they don't exceed maximum + * @param target Address of the target being updated + * @param allocatorMintingPPM New allocator-minting allocation for the target (in PPM) + * @param selfMintingPPM New self-minting allocation for the target (in PPM) + */ + function _validateAndUpdateTotalAllocations( + address target, + uint256 allocatorMintingPPM, + uint256 selfMintingPPM + ) private { + IssuanceAllocatorData storage $ = _getIssuanceAllocatorStorage(); + AllocationTarget storage targetData = $.allocationTargets[target]; + + // Total allocation calculation and check is delayed until after notifications. + // Distributing and notifying unnecessarily is harmless, but we need to prevent + // reentrancy looping changing allocations mid-calculation. + // (Would not be likely to be exploitable due to only governor being able to + // make a call to set target allocation, but better to be paranoid.) + $.totalAllocatorMintingPPM = $.totalAllocatorMintingPPM - targetData.allocatorMintingPPM + allocatorMintingPPM; + $.totalSelfMintingPPM = $.totalSelfMintingPPM - targetData.selfMintingPPM + selfMintingPPM; + + // Ensure the new total allocation doesn't exceed MILLION as in PPM. + // solhint-disable-next-line gas-strict-inequalities + require(($.totalAllocatorMintingPPM + $.totalSelfMintingPPM) <= MILLION, InsufficientAllocationAvailable()); + } + + /** + * @notice Sets target allocation values and adds/removes target from active list + * @param target Address of the target being updated + * @param allocatorMintingPPM New allocator-minting allocation for the target (in PPM) + * @param selfMintingPPM New self-minting allocation for the target (in PPM) + */ + function _updateTargetAllocationData(address target, uint256 allocatorMintingPPM, uint256 selfMintingPPM) private { + IssuanceAllocatorData storage $ = _getIssuanceAllocatorStorage(); + AllocationTarget storage targetData = $.allocationTargets[target]; + + // Internal design invariants: + // - targetAddresses contains all targets with non-zero allocation. + // - targetAddresses does not contain targets with zero allocation. + // - targetAddresses does not contain duplicates. + // - allocationTargets mapping contains all targets in targetAddresses with a non-zero allocation. + // - allocationTargets mapping allocations are zero for targets not in targetAddresses. + // - Governance actions can create allocationTarget mappings with lastChangeNotifiedBlock set for targets not in targetAddresses. This is valid. + // Therefore: + // - Only add a target to the list if it previously had no allocation. + // - Remove a target from the list when setting both allocations to 0. + // - Delete allocationTargets mapping entry when removing a target from targetAddresses. + // - Do not set lastChangeNotifiedBlock in this function. + if (allocatorMintingPPM != 0 || selfMintingPPM != 0) { + // Add to list if previously had no allocation + if (targetData.allocatorMintingPPM == 0 && targetData.selfMintingPPM == 0) $.targetAddresses.push(target); + + targetData.allocatorMintingPPM = allocatorMintingPPM; + targetData.selfMintingPPM = selfMintingPPM; + } else { + // Remove from list and delete mapping + _removeTargetFromList(target); + delete $.allocationTargets[target]; + } + } + + /** + * @notice Removes target from targetAddresses array using swap-and-pop for gas efficiency + * @param target Address of the target to remove + */ + function _removeTargetFromList(address target) private { + IssuanceAllocatorData storage $ = _getIssuanceAllocatorStorage(); + + for (uint256 i = 0; i < $.targetAddresses.length; ++i) { + if ($.targetAddresses[i] == target) { + $.targetAddresses[i] = $.targetAddresses[$.targetAddresses.length - 1]; + $.targetAddresses.pop(); + break; + } + } + } + + /** + * @inheritdoc IIssuanceAllocationAdministration + * @dev Implementation details: + * - This function can only be called by Governor role + * - Distributes pending issuance that has accumulated while paused + * - This function can be called even when the contract is paused to perform interim distributions + * - If there is no pending issuance, this function is a no-op + * - If allocatorMintingAllowance is 0 (all targets are self-minting), pending issuance will be lost + */ + function distributePendingIssuance() external override onlyRole(GOVERNOR_ROLE) returns (uint256) { + return _distributePendingIssuance(); + } + + /** + * @inheritdoc IIssuanceAllocationAdministration + * @dev Implementation details: + * - This function can only be called by Governor role + * - Accumulates pending issuance up to the specified block, then distributes all accumulated issuance + * - This function can be called even when the contract is paused + * - If allocatorMintingAllowance is 0 (all targets are self-minting), pending issuance will be lost + */ + function distributePendingIssuance( + uint256 toBlockNumber + ) external override onlyRole(GOVERNOR_ROLE) returns (uint256) { + accumulatePendingIssuance(toBlockNumber); + return _distributePendingIssuance(); + } + + /** + * @notice Distributes any pending accumulated issuance + * @dev Called from _distributeIssuance to handle accumulated issuance from pause periods. + * @return Block number up to which issuance has been distributed + */ + function _distributePendingIssuance() private returns (uint256) { + IssuanceAllocatorData storage $ = _getIssuanceAllocatorStorage(); + + uint256 pendingAmount = $.pendingAccumulatedAllocatorIssuance; + $.lastDistributionBlock = $.lastAccumulationBlock; + + if (pendingAmount == 0) return $.lastDistributionBlock; + $.pendingAccumulatedAllocatorIssuance = 0; + + if ($.totalAllocatorMintingPPM == 0) return $.lastDistributionBlock; + + for (uint256 i = 0; i < $.targetAddresses.length; ++i) { + address target = $.targetAddresses[i]; + AllocationTarget storage targetData = $.allocationTargets[target]; + + if (0 < targetData.allocatorMintingPPM) { + // There can be a small rounding loss here. This is acceptable. + // Pending issuance is distributed in proportion to allocator-minting portion of total available allocation. + uint256 targetIssuance = (pendingAmount * targetData.allocatorMintingPPM) / + (MILLION - $.totalSelfMintingPPM); + GRAPH_TOKEN.mint(target, targetIssuance); + emit IssuanceDistributed(target, targetIssuance); + } + } + + return $.lastDistributionBlock; + } + + /** + * @notice Accumulates pending issuance for allocator-minting targets to the current block + * @dev Used to accumulate pending issuance while paused prior to a rate or allocator-minting allocation change. + * @return The block number that has been accumulated to + */ + function accumulatePendingIssuance() private returns (uint256) { + return accumulatePendingIssuance(block.number); + } + + /** + * @notice Accumulates pending issuance for allocator-minting targets during pause periods + * @dev Accumulates pending issuance for allocator-minting targets during pause periods. + * @param toBlockNumber The block number to accumulate to (must be >= lastIssuanceAccumulationBlock and <= current block). + * @return The block number that has been accumulated to + */ + function accumulatePendingIssuance(uint256 toBlockNumber) private returns (uint256) { + IssuanceAllocatorData storage $ = _getIssuanceAllocatorStorage(); + + // solhint-disable-next-line gas-strict-inequalities + require($.lastAccumulationBlock <= toBlockNumber && toBlockNumber <= block.number, ToBlockOutOfRange()); + + uint256 blocksToAccumulate = toBlockNumber - $.lastAccumulationBlock; + if (0 < blocksToAccumulate) { + uint256 totalIssuance = $.issuancePerBlock * blocksToAccumulate; + // There can be a small rounding loss here. This is acceptable. + $.pendingAccumulatedAllocatorIssuance += (totalIssuance * (MILLION - $.totalSelfMintingPPM)) / MILLION; + $.lastAccumulationBlock = toBlockNumber; + } + + return $.lastAccumulationBlock; + } + + // -- View Functions -- + + /** + * @inheritdoc IIssuanceAllocationStatus + */ + function issuancePerBlock() external view override returns (uint256) { + return _getIssuanceAllocatorStorage().issuancePerBlock; + } + + /** + * @inheritdoc IIssuanceAllocationStatus + */ + function lastIssuanceDistributionBlock() external view override returns (uint256) { + return _getIssuanceAllocatorStorage().lastDistributionBlock; + } + + /** + * @inheritdoc IIssuanceAllocationStatus + */ + function lastIssuanceAccumulationBlock() external view override returns (uint256) { + return _getIssuanceAllocatorStorage().lastAccumulationBlock; + } + + /** + * @inheritdoc IIssuanceAllocationStatus + */ + function pendingAccumulatedAllocatorIssuance() external view override returns (uint256) { + return _getIssuanceAllocatorStorage().pendingAccumulatedAllocatorIssuance; + } + + /** + * @inheritdoc IIssuanceAllocationStatus + */ + function getTargetCount() external view override returns (uint256) { + return _getIssuanceAllocatorStorage().targetAddresses.length; + } + + /** + * @inheritdoc IIssuanceAllocationStatus + */ + function getTargets() external view override returns (address[] memory) { + return _getIssuanceAllocatorStorage().targetAddresses; + } + + /** + * @inheritdoc IIssuanceAllocationStatus + */ + function getTargetAt(uint256 index) external view override returns (address) { + return _getIssuanceAllocatorStorage().targetAddresses[index]; + } + + /** + * @inheritdoc IIssuanceAllocationData + */ + function getTargetData(address target) external view override returns (AllocationTarget memory) { + return _getIssuanceAllocatorStorage().allocationTargets[target]; + } + + /** + * @inheritdoc IIssuanceAllocationStatus + */ + function getTargetAllocation(address target) external view override returns (Allocation memory) { + AllocationTarget storage targetData = _getIssuanceAllocatorStorage().allocationTargets[target]; + return + Allocation({ + totalAllocationPPM: targetData.allocatorMintingPPM + targetData.selfMintingPPM, + allocatorMintingPPM: targetData.allocatorMintingPPM, + selfMintingPPM: targetData.selfMintingPPM + }); + } + + /** + * @inheritdoc IIssuanceAllocationDistribution + */ + function getTargetIssuancePerBlock(address target) external view override returns (TargetIssuancePerBlock memory) { + IssuanceAllocatorData storage $ = _getIssuanceAllocatorStorage(); + AllocationTarget storage targetData = $.allocationTargets[target]; + + // There can be small losses due to rounding. This is acceptable. + return + TargetIssuancePerBlock({ + allocatorIssuancePerBlock: ($.issuancePerBlock * targetData.allocatorMintingPPM) / MILLION, + allocatorIssuanceBlockAppliedTo: $.lastDistributionBlock, + selfIssuancePerBlock: ($.issuancePerBlock * targetData.selfMintingPPM) / MILLION, + selfIssuanceBlockAppliedTo: block.number + }); + } + + /** + * @inheritdoc IIssuanceAllocationStatus + */ + function getTotalAllocation() external view override returns (Allocation memory) { + IssuanceAllocatorData storage $ = _getIssuanceAllocatorStorage(); + return + Allocation({ + totalAllocationPPM: $.totalAllocatorMintingPPM + $.totalSelfMintingPPM, + allocatorMintingPPM: $.totalAllocatorMintingPPM, + selfMintingPPM: $.totalSelfMintingPPM + }); + } +} diff --git a/packages/issuance/contracts/test/allocate/MockERC165.sol b/packages/issuance/contracts/test/allocate/MockERC165.sol new file mode 100644 index 000000000..461e0409b --- /dev/null +++ b/packages/issuance/contracts/test/allocate/MockERC165.sol @@ -0,0 +1,20 @@ +// SPDX-License-Identifier: GPL-2.0-or-later + +pragma solidity 0.8.27; + +import { IERC165 } from "@openzeppelin/contracts/utils/introspection/IERC165.sol"; + +/** + * @title MockERC165 + * @author Edge & Node + * @dev Minimal implementation of IERC165 for testing + * @notice Used to test interface validation - supports only ERC165, not specific interfaces + */ +contract MockERC165 is IERC165 { + /** + * @inheritdoc IERC165 + */ + function supportsInterface(bytes4 interfaceId) public pure override returns (bool) { + return interfaceId == type(IERC165).interfaceId; + } +} diff --git a/packages/issuance/contracts/test/allocate/MockRevertingTarget.sol b/packages/issuance/contracts/test/allocate/MockRevertingTarget.sol new file mode 100644 index 000000000..27522e5a4 --- /dev/null +++ b/packages/issuance/contracts/test/allocate/MockRevertingTarget.sol @@ -0,0 +1,34 @@ +// SPDX-License-Identifier: MIT +pragma solidity ^0.8.24; + +import { IIssuanceTarget } from "@graphprotocol/interfaces/contracts/issuance/allocate/IIssuanceTarget.sol"; +import { ERC165 } from "@openzeppelin/contracts/utils/introspection/ERC165.sol"; + +/** + * @title MockRevertingTarget + * @author Edge & Node + * @notice A mock contract that reverts when beforeIssuanceAllocationChange is called + * @dev Used for testing error handling in IssuanceAllocator + */ +contract MockRevertingTarget is IIssuanceTarget, ERC165 { + /// @notice Error thrown when the target reverts intentionally + error TargetRevertsIntentionally(); + /** + * @inheritdoc IIssuanceTarget + */ + function beforeIssuanceAllocationChange() external pure override { + revert TargetRevertsIntentionally(); + } + + /** + * @inheritdoc IIssuanceTarget + */ + function setIssuanceAllocator(address _issuanceAllocator) external pure override { + // No-op + } + + /// @inheritdoc ERC165 + function supportsInterface(bytes4 interfaceId) public view virtual override returns (bool) { + return interfaceId == type(IIssuanceTarget).interfaceId || super.supportsInterface(interfaceId); + } +} diff --git a/packages/issuance/contracts/test/allocate/MockSimpleTarget.sol b/packages/issuance/contracts/test/allocate/MockSimpleTarget.sol new file mode 100644 index 000000000..311e1f03c --- /dev/null +++ b/packages/issuance/contracts/test/allocate/MockSimpleTarget.sol @@ -0,0 +1,24 @@ +// SPDX-License-Identifier: MIT +pragma solidity ^0.8.24; + +import { IIssuanceTarget } from "@graphprotocol/interfaces/contracts/issuance/allocate/IIssuanceTarget.sol"; +import { ERC165 } from "@openzeppelin/contracts/utils/introspection/ERC165.sol"; + +/** + * @title MockSimpleTarget + * @author Edge & Node + * @notice A simple mock contract that implements IIssuanceTarget for testing + * @dev Used for testing basic functionality in IssuanceAllocator + */ +contract MockSimpleTarget is IIssuanceTarget, ERC165 { + /// @inheritdoc IIssuanceTarget + function beforeIssuanceAllocationChange() external pure override {} + + /// @inheritdoc IIssuanceTarget + function setIssuanceAllocator(address _issuanceAllocator) external pure override {} + + /// @inheritdoc ERC165 + function supportsInterface(bytes4 interfaceId) public view virtual override returns (bool) { + return interfaceId == type(IIssuanceTarget).interfaceId || super.supportsInterface(interfaceId); + } +} diff --git a/packages/issuance/test/tests/allocate/AccessControl.test.ts b/packages/issuance/test/tests/allocate/AccessControl.test.ts new file mode 100644 index 000000000..74af599a1 --- /dev/null +++ b/packages/issuance/test/tests/allocate/AccessControl.test.ts @@ -0,0 +1,200 @@ +/** + * Allocate Access Control Tests + * Tests access control patterns for IssuanceAllocator and DirectAllocation contracts + */ + +import { expect } from 'chai' +import hre from 'hardhat' +const { ethers } = hre +import { deployTestGraphToken, getTestAccounts, SHARED_CONSTANTS } from '../common/fixtures' +import { testMultipleAccessControl } from './commonTestUtils' +import { deployDirectAllocation, deployIssuanceAllocator } from './fixtures' + +describe('Allocate Access Control Tests', () => { + let accounts: any + let contracts: any + + before(async () => { + accounts = await getTestAccounts() + + // Deploy allocate contracts + const graphToken = await deployTestGraphToken() + const graphTokenAddress = await graphToken.getAddress() + const issuanceAllocator = await deployIssuanceAllocator( + graphTokenAddress, + accounts.governor, + ethers.parseEther('100'), + ) + const directAllocation = await deployDirectAllocation(graphTokenAddress, accounts.governor) + + contracts = { + graphToken, + issuanceAllocator, + directAllocation, + } + }) + + describe('IssuanceAllocator Access Control', () => { + describe('setIssuancePerBlock', () => { + it('should revert when non-governor calls setIssuancePerBlock', async () => { + await expect( + contracts.issuanceAllocator + .connect(accounts.nonGovernor) + .setIssuancePerBlock(ethers.parseEther('200'), false), + ).to.be.revertedWithCustomError(contracts.issuanceAllocator, 'AccessControlUnauthorizedAccount') + }) + + it('should allow governor to call setIssuancePerBlock', async () => { + await expect( + contracts.issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('200'), false), + ).to.not.be.reverted + }) + }) + + describe('setTargetAllocation', () => { + it('should revert when non-governor calls setTargetAllocation', async () => { + await expect( + contracts.issuanceAllocator + .connect(accounts.nonGovernor) + ['setTargetAllocation(address,uint256,uint256,bool)'](accounts.nonGovernor.address, 100000, 0, false), + ).to.be.revertedWithCustomError(contracts.issuanceAllocator, 'AccessControlUnauthorizedAccount') + }) + + it('should allow governor to call setTargetAllocation', async () => { + // Use a valid target contract address instead of EOA + await expect( + contracts.issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'](contracts.directAllocation.target, 100000, 0, false), + ).to.not.be.reverted + }) + }) + + describe('notifyTarget', () => { + it('should revert when non-governor calls notifyTarget', async () => { + await expect( + contracts.issuanceAllocator.connect(accounts.nonGovernor).notifyTarget(contracts.directAllocation.target), + ).to.be.revertedWithCustomError(contracts.issuanceAllocator, 'AccessControlUnauthorizedAccount') + }) + + it('should allow governor to call notifyTarget', async () => { + // First add the target so notifyTarget has something to notify + await contracts.issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'](contracts.directAllocation.target, 100000, 0, false) + + await expect( + contracts.issuanceAllocator.connect(accounts.governor).notifyTarget(contracts.directAllocation.target), + ).to.not.be.reverted + }) + }) + + describe('forceTargetNoChangeNotificationBlock', () => { + it('should revert when non-governor calls forceTargetNoChangeNotificationBlock', async () => { + await expect( + contracts.issuanceAllocator + .connect(accounts.nonGovernor) + .forceTargetNoChangeNotificationBlock(contracts.directAllocation.target, 12345), + ).to.be.revertedWithCustomError(contracts.issuanceAllocator, 'AccessControlUnauthorizedAccount') + }) + + it('should allow governor to call forceTargetNoChangeNotificationBlock', async () => { + await expect( + contracts.issuanceAllocator + .connect(accounts.governor) + .forceTargetNoChangeNotificationBlock(contracts.directAllocation.target, 12345), + ).to.not.be.reverted + }) + }) + + describe('Role Management Methods', () => { + it('should enforce access control on role management methods', async () => { + await testMultipleAccessControl( + contracts.issuanceAllocator, + [ + { + method: 'grantRole', + args: [SHARED_CONSTANTS.PAUSE_ROLE, accounts.operator.address], + description: 'grantRole', + }, + { + method: 'revokeRole', + args: [SHARED_CONSTANTS.PAUSE_ROLE, accounts.operator.address], + description: 'revokeRole', + }, + ], + accounts.governor, + accounts.nonGovernor, + ) + }) + }) + }) + + describe('DirectAllocation Access Control', () => { + describe('Role Management Methods', () => { + it('should enforce access control on role management methods', async () => { + await testMultipleAccessControl( + contracts.directAllocation, + [ + { + method: 'grantRole', + args: [SHARED_CONSTANTS.OPERATOR_ROLE, accounts.operator.address], + description: 'grantRole', + }, + { + method: 'revokeRole', + args: [SHARED_CONSTANTS.OPERATOR_ROLE, accounts.operator.address], + description: 'revokeRole', + }, + ], + accounts.governor, + accounts.nonGovernor, + ) + }) + }) + + it('should require OPERATOR_ROLE for sendTokens', async () => { + // Setup: Grant operator role first + await contracts.directAllocation + .connect(accounts.governor) + .grantRole(SHARED_CONSTANTS.OPERATOR_ROLE, accounts.operator.address) + + // Non-operator should be rejected + await expect( + contracts.directAllocation.connect(accounts.nonGovernor).sendTokens(accounts.nonGovernor.address, 1000), + ).to.be.revertedWithCustomError(contracts.directAllocation, 'AccessControlUnauthorizedAccount') + + // Operator should be allowed (may revert for other reasons like insufficient balance, but not access control) + // We just test that access control passes, not the full functionality + const hasRole = await contracts.directAllocation.hasRole( + SHARED_CONSTANTS.OPERATOR_ROLE, + accounts.operator.address, + ) + expect(hasRole).to.be.true + }) + + it('should require GOVERNOR_ROLE for setIssuanceAllocator', async () => { + await expect( + contracts.directAllocation.connect(accounts.nonGovernor).setIssuanceAllocator(accounts.user.address), + ).to.be.revertedWithCustomError(contracts.directAllocation, 'AccessControlUnauthorizedAccount') + }) + }) + + describe('Role Management Consistency', () => { + it('should have consistent GOVERNOR_ROLE across allocate contracts', async () => { + const governorRole = SHARED_CONSTANTS.GOVERNOR_ROLE + + // All allocate contracts should recognize the governor + expect(await contracts.issuanceAllocator.hasRole(governorRole, accounts.governor.address)).to.be.true + expect(await contracts.directAllocation.hasRole(governorRole, accounts.governor.address)).to.be.true + }) + + it('should have correct role admin hierarchy', async () => { + const governorRole = SHARED_CONSTANTS.GOVERNOR_ROLE + + // GOVERNOR_ROLE should be admin of itself (allowing governors to manage other governors) + expect(await contracts.issuanceAllocator.getRoleAdmin(governorRole)).to.equal(governorRole) + expect(await contracts.directAllocation.getRoleAdmin(governorRole)).to.equal(governorRole) + }) + }) +}) diff --git a/packages/issuance/test/tests/allocate/DirectAllocation.test.ts b/packages/issuance/test/tests/allocate/DirectAllocation.test.ts new file mode 100644 index 000000000..15162208d --- /dev/null +++ b/packages/issuance/test/tests/allocate/DirectAllocation.test.ts @@ -0,0 +1,291 @@ +import { expect } from 'chai' +import hre from 'hardhat' + +const { ethers } = hre + +const { upgrades } = require('hardhat') + +import { deployTestGraphToken, getTestAccounts, SHARED_CONSTANTS } from '../common/fixtures' +import { GraphTokenHelper } from '../common/graphTokenHelper' +import { deployDirectAllocation } from './fixtures' + +describe('DirectAllocation - Optimized & Consolidated', () => { + // Common variables + let accounts + let sharedContracts + + // Pre-calculated role constants to avoid repeated async contract calls + const GOVERNOR_ROLE = SHARED_CONSTANTS.GOVERNOR_ROLE + const OPERATOR_ROLE = SHARED_CONSTANTS.OPERATOR_ROLE + const PAUSE_ROLE = SHARED_CONSTANTS.PAUSE_ROLE + + before(async () => { + accounts = await getTestAccounts() + + // Deploy shared contracts once for most tests - PERFORMANCE OPTIMIZATION + const graphToken = await deployTestGraphToken() + const graphTokenAddress = await graphToken.getAddress() + const directAllocation = await deployDirectAllocation(graphTokenAddress, accounts.governor) + const directAllocationAddress = await directAllocation.getAddress() + + // Create helper + const graphTokenHelper = new GraphTokenHelper(graphToken as any, accounts.governor) + + sharedContracts = { + graphToken, + directAllocation, + graphTokenHelper, + addresses: { + graphToken: graphTokenAddress, + directAllocation: directAllocationAddress, + }, + } + }) + + // Fast state reset function for shared contracts - PERFORMANCE OPTIMIZATION + async function resetContractState() { + if (!sharedContracts) return + + const { directAllocation } = sharedContracts + + // Reset pause state + try { + if (await directAllocation.paused()) { + await directAllocation.connect(accounts.governor).unpause() + } + } catch { + // Ignore if not paused + } + + // Remove all roles except governor (keep governor role intact) + try { + // Remove operator role from all accounts + for (const account of [accounts.operator, accounts.user, accounts.nonGovernor]) { + if (await directAllocation.hasRole(OPERATOR_ROLE, account.address)) { + await directAllocation.connect(accounts.governor).revokeRole(OPERATOR_ROLE, account.address) + } + if (await directAllocation.hasRole(PAUSE_ROLE, account.address)) { + await directAllocation.connect(accounts.governor).revokeRole(PAUSE_ROLE, account.address) + } + } + + // Remove pause role from governor if present + if (await directAllocation.hasRole(PAUSE_ROLE, accounts.governor.address)) { + await directAllocation.connect(accounts.governor).revokeRole(PAUSE_ROLE, accounts.governor.address) + } + } catch { + // Ignore role management errors during reset + } + } + + beforeEach(async () => { + await resetContractState() + }) + + // Test fixtures for tests that need fresh contracts + async function setupDirectAllocation() { + const graphToken = await deployTestGraphToken() + const graphTokenAddress = await graphToken.getAddress() + const directAllocation = await deployDirectAllocation(graphTokenAddress, accounts.governor) + return { directAllocation, graphToken } + } + + describe('Constructor Validation', () => { + it('should revert when constructed with zero GraphToken address', async () => { + const DirectAllocationFactory = await ethers.getContractFactory('DirectAllocation') + await expect(DirectAllocationFactory.deploy(ethers.ZeroAddress)).to.be.revertedWithCustomError( + DirectAllocationFactory, + 'GraphTokenCannotBeZeroAddress', + ) + }) + }) + + describe('Initialization', () => { + it('should set the governor role correctly', async () => { + const { directAllocation } = sharedContracts + expect(await directAllocation.hasRole(GOVERNOR_ROLE, accounts.governor.address)).to.be.true + }) + + it('should not set operator role to anyone initially', async () => { + const { directAllocation } = sharedContracts + expect(await directAllocation.hasRole(OPERATOR_ROLE, accounts.operator.address)).to.be.false + }) + + it('should revert when initialize is called more than once', async () => { + const { directAllocation } = sharedContracts + await expect(directAllocation.initialize(accounts.governor.address)).to.be.revertedWithCustomError( + directAllocation, + 'InvalidInitialization', + ) + }) + + it('should revert when initialized with zero governor address', async () => { + const graphToken = await deployTestGraphToken() + const graphTokenAddress = await graphToken.getAddress() + + // Try to deploy proxy with zero governor address - this should hit the BaseUpgradeable check + const DirectAllocationFactory = await ethers.getContractFactory('DirectAllocation') + await expect( + upgrades.deployProxy(DirectAllocationFactory, [ethers.ZeroAddress], { + constructorArgs: [graphTokenAddress], + initializer: 'initialize', + }), + ).to.be.revertedWithCustomError(DirectAllocationFactory, 'GovernorCannotBeZeroAddress') + }) + }) + + describe('Role Management', () => { + it('should manage operator role correctly and enforce access control', async () => { + const { directAllocation } = sharedContracts + + // Test granting operator role + await expect(directAllocation.connect(accounts.governor).grantRole(OPERATOR_ROLE, accounts.operator.address)) + .to.emit(directAllocation, 'RoleGranted') + .withArgs(OPERATOR_ROLE, accounts.operator.address, accounts.governor.address) + + expect(await directAllocation.hasRole(OPERATOR_ROLE, accounts.operator.address)).to.be.true + + // Test revoking operator role + await expect(directAllocation.connect(accounts.governor).revokeRole(OPERATOR_ROLE, accounts.operator.address)) + .to.emit(directAllocation, 'RoleRevoked') + .withArgs(OPERATOR_ROLE, accounts.operator.address, accounts.governor.address) + + expect(await directAllocation.hasRole(OPERATOR_ROLE, accounts.operator.address)).to.be.false + }) + }) + + describe('Token Management', () => { + it('should handle token operations with proper access control and validation', async () => { + // Use shared contracts for better performance + const { directAllocation, graphToken, graphTokenHelper } = sharedContracts + await resetContractState() + + // Setup: mint tokens and grant operator role + await graphTokenHelper.mint(await directAllocation.getAddress(), ethers.parseEther('1000')) + await directAllocation.connect(accounts.governor).grantRole(OPERATOR_ROLE, accounts.operator.address) + + // Test successful token sending with event emission + const amount = ethers.parseEther('100') + await expect(directAllocation.connect(accounts.operator).sendTokens(accounts.user.address, amount)) + .to.emit(directAllocation, 'TokensSent') + .withArgs(accounts.user.address, amount) + expect(await graphToken.balanceOf(accounts.user.address)).to.equal(amount) + + // Test zero amount sending + await expect(directAllocation.connect(accounts.operator).sendTokens(accounts.user.address, 0)) + .to.emit(directAllocation, 'TokensSent') + .withArgs(accounts.user.address, 0) + + // Test access control - operator should succeed, non-operator should fail + await expect( + directAllocation.connect(accounts.nonGovernor).sendTokens(accounts.user.address, ethers.parseEther('100')), + ).to.be.revertedWithCustomError(directAllocation, 'AccessControlUnauthorizedAccount') + + // Test zero address validation - transfer to zero address will fail + await expect( + directAllocation.connect(accounts.operator).sendTokens(ethers.ZeroAddress, ethers.parseEther('100')), + ).to.be.revertedWith('ERC20: transfer to the zero address') + }) + + it('should handle insufficient balance and pause states correctly', async () => { + // Use fresh setup for this test + const { directAllocation, graphToken } = await setupDirectAllocation() + const graphTokenHelper = new GraphTokenHelper(graphToken as any, accounts.governor) + + // Test insufficient balance (no tokens minted) + await directAllocation.connect(accounts.governor).grantRole(OPERATOR_ROLE, accounts.operator.address) + await expect( + directAllocation.connect(accounts.operator).sendTokens(accounts.user.address, ethers.parseEther('100')), + ).to.be.revertedWith('ERC20: transfer amount exceeds balance') + + // Setup for pause test + await graphTokenHelper.mint(await directAllocation.getAddress(), ethers.parseEther('1000')) + await directAllocation.connect(accounts.governor).grantRole(PAUSE_ROLE, accounts.governor.address) + await directAllocation.connect(accounts.governor).pause() + + // Test paused state + await expect( + directAllocation.connect(accounts.operator).sendTokens(accounts.user.address, ethers.parseEther('100')), + ).to.be.revertedWithCustomError(directAllocation, 'EnforcedPause') + }) + }) + + describe('Pausability and Access Control', () => { + beforeEach(async () => { + await resetContractState() + }) + + it('should handle pause/unpause operations and access control', async () => { + const { directAllocation } = sharedContracts + + // Grant pause role to governor and operator + await directAllocation.connect(accounts.governor).grantRole(PAUSE_ROLE, accounts.governor.address) + await directAllocation.connect(accounts.governor).grantRole(PAUSE_ROLE, accounts.operator.address) + + // Test basic pause/unpause with governor + await directAllocation.connect(accounts.governor).pause() + expect(await directAllocation.paused()).to.be.true + await directAllocation.connect(accounts.governor).unpause() + expect(await directAllocation.paused()).to.be.false + + // Test multiple pause/unpause cycles with operator + await directAllocation.connect(accounts.operator).pause() + expect(await directAllocation.paused()).to.be.true + await directAllocation.connect(accounts.operator).unpause() + expect(await directAllocation.paused()).to.be.false + await directAllocation.connect(accounts.operator).pause() + expect(await directAllocation.paused()).to.be.true + await directAllocation.connect(accounts.operator).unpause() + expect(await directAllocation.paused()).to.be.false + + // Test access control for unauthorized accounts + await expect(directAllocation.connect(accounts.nonGovernor).pause()).to.be.revertedWithCustomError( + directAllocation, + 'AccessControlUnauthorizedAccount', + ) + + // Setup for unpause access control test + await directAllocation.connect(accounts.governor).pause() + await expect(directAllocation.connect(accounts.nonGovernor).unpause()).to.be.revertedWithCustomError( + directAllocation, + 'AccessControlUnauthorizedAccount', + ) + }) + + it('should support all BaseUpgradeable constants', async () => { + const { directAllocation } = sharedContracts + + // Test that constants are accessible + expect(await directAllocation.MILLION()).to.equal(1_000_000) + expect(await directAllocation.GOVERNOR_ROLE()).to.equal(GOVERNOR_ROLE) + expect(await directAllocation.PAUSE_ROLE()).to.equal(PAUSE_ROLE) + expect(await directAllocation.OPERATOR_ROLE()).to.equal(OPERATOR_ROLE) + }) + + it('should maintain role hierarchy properly', async () => { + const { directAllocation } = sharedContracts + + // Governor should be admin of all roles + expect(await directAllocation.getRoleAdmin(GOVERNOR_ROLE)).to.equal(GOVERNOR_ROLE) + expect(await directAllocation.getRoleAdmin(PAUSE_ROLE)).to.equal(GOVERNOR_ROLE) + expect(await directAllocation.getRoleAdmin(OPERATOR_ROLE)).to.equal(GOVERNOR_ROLE) + }) + }) + + describe('Interface Implementation', () => { + it('should implement beforeIssuanceAllocationChange as a no-op and emit event', async () => { + const { directAllocation } = sharedContracts + // This should not revert and should emit an event + await expect(directAllocation.beforeIssuanceAllocationChange()).to.emit( + directAllocation, + 'BeforeIssuanceAllocationChange', + ) + }) + + it('should implement setIssuanceAllocator as a no-op', async () => { + const { directAllocation } = sharedContracts + // This should not revert + await directAllocation.connect(accounts.governor).setIssuanceAllocator(accounts.nonGovernor.address) + }) + }) +}) diff --git a/packages/issuance/test/tests/allocate/InterfaceCompliance.test.ts b/packages/issuance/test/tests/allocate/InterfaceCompliance.test.ts new file mode 100644 index 000000000..bf9f36f6b --- /dev/null +++ b/packages/issuance/test/tests/allocate/InterfaceCompliance.test.ts @@ -0,0 +1,69 @@ +// Import Typechain-generated factories with interface metadata (interfaceId and interfaceName) +import { + IIssuanceAllocationAdministration__factory, + IIssuanceAllocationData__factory, + IIssuanceAllocationDistribution__factory, + IIssuanceAllocationStatus__factory, + IIssuanceTarget__factory, + IPausableControl__factory, + ISendTokens__factory, +} from '@graphprotocol/interfaces/types' +import { IAccessControl__factory } from '@graphprotocol/issuance/types' +import { ethers } from 'hardhat' + +import { deployTestGraphToken, getTestAccounts } from '../common/fixtures' +import { deployDirectAllocation, deployIssuanceAllocator } from './fixtures' +import { shouldSupportInterfaces } from './testPatterns' + +/** + * Allocate ERC-165 Interface Compliance Tests + * Tests interface support for IssuanceAllocator and DirectAllocation contracts + */ +describe('Allocate ERC-165 Interface Compliance', () => { + let accounts: any + let contracts: any + + before(async () => { + accounts = await getTestAccounts() + + // Deploy allocate contracts for interface testing + const graphToken = await deployTestGraphToken() + const graphTokenAddress = await graphToken.getAddress() + + const issuanceAllocator = await deployIssuanceAllocator( + graphTokenAddress, + accounts.governor, + ethers.parseEther('100'), + ) + + const directAllocation = await deployDirectAllocation(graphTokenAddress, accounts.governor) + + contracts = { + issuanceAllocator, + directAllocation, + } + }) + + describe( + 'IssuanceAllocator Interface Compliance', + shouldSupportInterfaces( + () => contracts.issuanceAllocator, + [ + IIssuanceAllocationDistribution__factory, + IIssuanceAllocationAdministration__factory, + IIssuanceAllocationStatus__factory, + IIssuanceAllocationData__factory, + IPausableControl__factory, + IAccessControl__factory, + ], + ), + ) + + describe( + 'DirectAllocation Interface Compliance', + shouldSupportInterfaces( + () => contracts.directAllocation, + [IIssuanceTarget__factory, ISendTokens__factory, IPausableControl__factory, IAccessControl__factory], + ), + ) +}) diff --git a/packages/issuance/test/tests/allocate/InterfaceIdStability.test.ts b/packages/issuance/test/tests/allocate/InterfaceIdStability.test.ts new file mode 100644 index 000000000..e6ee54260 --- /dev/null +++ b/packages/issuance/test/tests/allocate/InterfaceIdStability.test.ts @@ -0,0 +1,47 @@ +import { + IIssuanceAllocationAdministration__factory, + IIssuanceAllocationData__factory, + IIssuanceAllocationDistribution__factory, + IIssuanceAllocationStatus__factory, + IIssuanceTarget__factory, + ISendTokens__factory, +} from '@graphprotocol/interfaces/types' +import { expect } from 'chai' + +/** + * Allocate Interface ID Stability Tests + * + * These tests verify that allocate-specific interface IDs remain stable across builds. + * Changes to these IDs indicate breaking changes to the interface definitions. + * + * If a test fails: + * 1. Verify the interface change was intentional + * 2. Understand the impact on deployed contracts + * 3. Update the expected ID if the change is correct + * 4. Document the breaking change in release notes + */ +describe('Allocate Interface ID Stability', () => { + it('IIssuanceAllocationDistribution should have stable interface ID', () => { + expect(IIssuanceAllocationDistribution__factory.interfaceId).to.equal('0x79da37fc') + }) + + it('IIssuanceAllocationAdministration should have stable interface ID', () => { + expect(IIssuanceAllocationAdministration__factory.interfaceId).to.equal('0x36759695') + }) + + it('IIssuanceAllocationStatus should have stable interface ID', () => { + expect(IIssuanceAllocationStatus__factory.interfaceId).to.equal('0xc0ba8a55') + }) + + it('IIssuanceAllocationData should have stable interface ID', () => { + expect(IIssuanceAllocationData__factory.interfaceId).to.equal('0x48c3c62e') + }) + + it('IIssuanceTarget should have stable interface ID', () => { + expect(IIssuanceTarget__factory.interfaceId).to.equal('0xaee4dc43') + }) + + it('ISendTokens should have stable interface ID', () => { + expect(ISendTokens__factory.interfaceId).to.equal('0x05ab421d') + }) +}) diff --git a/packages/issuance/test/tests/allocate/IssuanceAllocator.test.ts b/packages/issuance/test/tests/allocate/IssuanceAllocator.test.ts new file mode 100644 index 000000000..8ecc20509 --- /dev/null +++ b/packages/issuance/test/tests/allocate/IssuanceAllocator.test.ts @@ -0,0 +1,3521 @@ +import { expect } from 'chai' +import hre from 'hardhat' +const { ethers } = hre + +import { deployTestGraphToken, getTestAccounts, SHARED_CONSTANTS } from '../common/fixtures' +import { deployDirectAllocation, deployIssuanceAllocator } from './fixtures' +import { calculateExpectedAccumulation, parseEther } from './issuanceCalculations' +// Import optimization helpers for common test utilities +import { expectCustomError } from './optimizationHelpers' + +// Helper function to deploy a simple mock target for testing +async function deployMockSimpleTarget() { + const MockSimpleTargetFactory = await ethers.getContractFactory('MockSimpleTarget') + return await MockSimpleTargetFactory.deploy() +} + +describe('IssuanceAllocator', () => { + // Common variables + let accounts + let issuancePerBlock + + // Shared contracts for optimized tests + // - Deploy contracts once in before() hook instead of per-test + // - Reset state in beforeEach() hook instead of redeploying + // - Use sharedContracts.addresses for cached addresses + // - Use sharedContracts.issuanceAllocator, etc. for contract instances + let sharedContracts + + // Role constants - hardcoded to avoid slow contract calls + const GOVERNOR_ROLE = SHARED_CONSTANTS.GOVERNOR_ROLE + const PAUSE_ROLE = SHARED_CONSTANTS.PAUSE_ROLE + + // Interface IDs moved to consolidated tests + + before(async () => { + accounts = await getTestAccounts() + issuancePerBlock = ethers.parseEther('100') // Default issuance per block + + // Deploy shared contracts once for most tests + const graphToken = await deployTestGraphToken() + const graphTokenAddress = await graphToken.getAddress() + + const issuanceAllocator = await deployIssuanceAllocator(graphTokenAddress, accounts.governor, issuancePerBlock) + + const target1 = await deployDirectAllocation(graphTokenAddress, accounts.governor) + const target2 = await deployDirectAllocation(graphTokenAddress, accounts.governor) + const target3 = await deployDirectAllocation(graphTokenAddress, accounts.governor) + + // Cache addresses to avoid repeated getAddress() calls + const addresses = { + issuanceAllocator: await issuanceAllocator.getAddress(), + target1: await target1.getAddress(), + target2: await target2.getAddress(), + target3: await target3.getAddress(), + graphToken: graphTokenAddress, + } + + // Grant minter role to issuanceAllocator + await (graphToken as any).addMinter(addresses.issuanceAllocator) + + sharedContracts = { + graphToken, + issuanceAllocator, + target1, + target2, + target3, + addresses, + } + }) + + // Fast state reset function for shared contracts + async function resetIssuanceAllocatorState() { + if (!sharedContracts) return + + const { issuanceAllocator } = sharedContracts + + // Remove all existing allocations + try { + const targetCount = await issuanceAllocator.getTargetCount() + for (let i = 0; i < targetCount; i++) { + const targetAddr = await issuanceAllocator.getTargetAt(0) // Always remove first + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'](targetAddr, 0, 0, false) + } + } catch (_e) { + // Ignore errors during cleanup + } + + // Reset pause state + try { + if (await issuanceAllocator.paused()) { + await issuanceAllocator.connect(accounts.governor).unpause() + } + } catch (_e) { + // Ignore if not paused + } + + // Reset issuance per block to default + try { + const currentIssuance = await issuanceAllocator.issuancePerBlock() + if (currentIssuance !== issuancePerBlock) { + await issuanceAllocator.connect(accounts.governor)['setIssuancePerBlock(uint256,bool)'](issuancePerBlock, true) + } + } catch (_e) { + // Ignore if can't reset + } + } + + beforeEach(async () => { + if (!accounts) { + accounts = await getTestAccounts() + issuancePerBlock = ethers.parseEther('100') + } + await resetIssuanceAllocatorState() + }) + + // Cached addresses to avoid repeated getAddress() calls + let cachedAddresses = {} + + // Test fixtures with caching + async function setupIssuanceAllocator() { + // Deploy test GraphToken + const graphToken = await deployTestGraphToken() + const graphTokenAddress = await graphToken.getAddress() + + // Deploy IssuanceAllocator with proxy using OpenZeppelin's upgrades library + const issuanceAllocator = await deployIssuanceAllocator(graphTokenAddress, accounts.governor, issuancePerBlock) + + // Deploy target contracts using OpenZeppelin's upgrades library + const target1 = await deployDirectAllocation(graphTokenAddress, accounts.governor) + const target2 = await deployDirectAllocation(graphTokenAddress, accounts.governor) + const target3 = await deployDirectAllocation(graphTokenAddress, accounts.governor) + + // Cache addresses to avoid repeated getAddress() calls + const issuanceAllocatorAddress = await issuanceAllocator.getAddress() + const target1Address = await target1.getAddress() + const target2Address = await target2.getAddress() + const target3Address = await target3.getAddress() + + cachedAddresses = { + issuanceAllocator: issuanceAllocatorAddress, + target1: target1Address, + target2: target2Address, + target3: target3Address, + graphToken: graphTokenAddress, + } + + return { + issuanceAllocator, + graphToken, + target1, + target2, + target3, + addresses: cachedAddresses, + } + } + + // Simplified setup for tests that don't need target contracts + async function setupSimpleIssuanceAllocator() { + // Deploy test GraphToken + const graphToken = await deployTestGraphToken() + const graphTokenAddress = await graphToken.getAddress() + + // Deploy IssuanceAllocator with proxy using OpenZeppelin's upgrades library + const issuanceAllocator = await deployIssuanceAllocator(graphTokenAddress, accounts.governor, issuancePerBlock) + + // Cache the issuance allocator address + const issuanceAllocatorAddress = await issuanceAllocator.getAddress() + + // Grant minter role to issuanceAllocator (needed for distributeIssuance calls) + await (graphToken as any).addMinter(issuanceAllocatorAddress) + + return { + issuanceAllocator, + graphToken, + addresses: { + issuanceAllocator: issuanceAllocatorAddress, + graphToken: graphTokenAddress, + }, + } + } + + describe('Initialization', () => { + it('should initialize contract correctly and prevent re-initialization', async () => { + const { issuanceAllocator } = sharedContracts + + // Verify all initialization state in one test + expect(await issuanceAllocator.hasRole(GOVERNOR_ROLE, accounts.governor.address)).to.be.true + expect(await issuanceAllocator.issuancePerBlock()).to.equal(issuancePerBlock) + + // Verify re-initialization is prevented + await expect(issuanceAllocator.initialize(accounts.governor.address)).to.be.revertedWithCustomError( + issuanceAllocator, + 'InvalidInitialization', + ) + }) + }) + + // Interface Compliance tests moved to consolidated/InterfaceCompliance.test.ts + + describe('ERC-165 Interface Checking', () => { + it('should successfully add a target that supports IIssuanceTarget interface', async () => { + const { issuanceAllocator, addresses } = sharedContracts + + // Should succeed because DirectAllocation supports IIssuanceTarget + await expect( + issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'](addresses.target1, 100000, 0, false), + ).to.not.be.reverted + + // Verify the target was added + const targetData = await issuanceAllocator.getTargetData(addresses.target1) + expect(targetData.allocatorMintingPPM).to.equal(100000) + expect(targetData.selfMintingPPM).to.equal(0) + const allocation = await issuanceAllocator.getTargetAllocation(addresses.target1) + expect(allocation.totalAllocationPPM).to.equal(100000) + expect(allocation.allocatorMintingPPM).to.equal(100000) + expect(allocation.selfMintingPPM).to.equal(0) + }) + + it('should revert when adding EOA targets (no contract code)', async () => { + const { issuanceAllocator } = sharedContracts + const eoaAddress = accounts.nonGovernor.address + + // Should revert because EOAs don't have contract code to call supportsInterface on + await expect( + issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'](eoaAddress, 100000, 0, false), + ).to.be.reverted + }) + + it('should revert when adding a contract that does not support IIssuanceTarget', async () => { + const { issuanceAllocator } = sharedContracts + + // Deploy a contract that supports ERC-165 but not IIssuanceTarget + const ERC165OnlyFactory = await ethers.getContractFactory('MockERC165') + const erc165OnlyContract = await ERC165OnlyFactory.deploy() + const contractAddress = await erc165OnlyContract.getAddress() + + // Should revert because the contract doesn't support IIssuanceTarget + await expect( + issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'](contractAddress, 100000, 0, false), + ).to.be.revertedWithCustomError(issuanceAllocator, 'TargetDoesNotSupportIIssuanceTarget') + }) + + it('should fail to add MockRevertingTarget due to notification failure even with force=true', async () => { + const { issuanceAllocator } = sharedContracts + + // MockRevertingTarget now supports both ERC-165 and IIssuanceTarget, so it passes interface check + const MockRevertingTargetFactory = await ethers.getContractFactory('MockRevertingTarget') + const mockRevertingTarget = await MockRevertingTargetFactory.deploy() + const contractAddress = await mockRevertingTarget.getAddress() + + // This should revert because MockRevertingTarget reverts during notification + // force=true only affects distribution, not notification failures + await expect( + issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'](contractAddress, 100000, 0, true), + ).to.be.revertedWithCustomError(mockRevertingTarget, 'TargetRevertsIntentionally') + + // Verify the target was NOT added because the transaction reverted + const targetData = await issuanceAllocator.getTargetData(contractAddress) + expect(targetData.allocatorMintingPPM).to.equal(0) + expect(targetData.selfMintingPPM).to.equal(0) + const allocation = await issuanceAllocator.getTargetAllocation(contractAddress) + expect(allocation.totalAllocationPPM).to.equal(0) + }) + + it('should allow re-adding existing target with same self-minter flag', async () => { + const { issuanceAllocator, addresses } = sharedContracts + + // Add the target first time + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'](addresses.target1, 100000, 0, false) + + // Should succeed when setting allocation again with same flag (no interface check needed) + await expect( + issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'](addresses.target1, 200000, 0, false), + ).to.not.be.reverted + }) + }) + + // Access Control tests moved to consolidated/AccessControl.test.ts + + describe('Target Management', () => { + it('should automatically remove target when setting allocation to 0', async () => { + const { issuanceAllocator, addresses } = sharedContracts + + // Add target with allocation in one step + const allocation = 300000 // 30% in PPM + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'](addresses.target1, allocation, 0, false) + + // Verify allocation is set and target exists + const target1Allocation = await issuanceAllocator.getTargetAllocation(addresses.target1) + expect(target1Allocation.totalAllocationPPM).to.equal(allocation) + const totalAlloc = await issuanceAllocator.getTotalAllocation() + expect(totalAlloc.totalAllocationPPM).to.equal(allocation) + + // Remove target by setting allocation to 0 + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'](addresses.target1, 0, 0, false) + + // Verify target is removed + const targets = await issuanceAllocator.getTargets() + expect(targets.length).to.equal(0) + + // Verify total allocation is updated + { + const totalAlloc = await issuanceAllocator.getTotalAllocation() + expect(totalAlloc.totalAllocationPPM).to.equal(0) + } + }) + + it('should remove a target when multiple targets exist', async () => { + const { issuanceAllocator, addresses } = sharedContracts + + // Add targets with allocations in one step + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'](addresses.target1, 300000, 0, false) // 30% + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'](addresses.target2, 400000, 0, false) // 40% + + // Verify allocations are set + const target1Allocation = await issuanceAllocator.getTargetAllocation(addresses.target1) + const target2Allocation = await issuanceAllocator.getTargetAllocation(addresses.target2) + expect(target1Allocation.totalAllocationPPM).to.equal(300000) + expect(target2Allocation.totalAllocationPPM).to.equal(400000) + { + const totalAlloc = await issuanceAllocator.getTotalAllocation() + expect(totalAlloc.totalAllocationPPM).to.equal(700000) + } + + // Get initial target addresses + const initialTargets = await issuanceAllocator.getTargets() + expect(initialTargets.length).to.equal(2) + + // Remove target2 by setting allocation to 0 (tests the swap-and-pop logic in the contract) + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'](addresses.target2, 0, 0, false) + + // Verify target2 is removed but target1 remains + const remainingTargets = await issuanceAllocator.getTargets() + expect(remainingTargets.length).to.equal(1) + expect(remainingTargets[0]).to.equal(addresses.target1) + + // Verify total allocation is updated (only target1's allocation remains) + { + const totalAlloc = await issuanceAllocator.getTotalAllocation() + expect(totalAlloc.totalAllocationPPM).to.equal(300000) + } + }) + + it('should add allocation targets correctly', async () => { + const { issuanceAllocator, addresses } = sharedContracts + + // Add targets with allocations in one step + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'](addresses.target1, 100000, 0, false) // 10% + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'](addresses.target2, 200000, 0, false) // 20% + + // Verify targets were added + const target1Info = await issuanceAllocator.getTargetData(addresses.target1) + const target2Info = await issuanceAllocator.getTargetData(addresses.target2) + + // Check that targets exist by verifying they have non-zero allocations + expect(target1Info.allocatorMintingPPM + target1Info.selfMintingPPM).to.equal(100000) + expect(target2Info.allocatorMintingPPM + target2Info.selfMintingPPM).to.equal(200000) + expect(target1Info.selfMintingPPM).to.equal(0) + expect(target2Info.selfMintingPPM).to.equal(0) + + // Verify total allocation is updated correctly + { + const totalAlloc = await issuanceAllocator.getTotalAllocation() + expect(totalAlloc.totalAllocationPPM).to.equal(300000) + } + }) + + it('should validate setTargetAllocation parameters and constraints', async () => { + const { issuanceAllocator, addresses } = sharedContracts + + // Test 1: Should revert when setting allocation for target with address zero + await expectCustomError( + issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'](ethers.ZeroAddress, 100000, 0, false), + issuanceAllocator, + 'TargetAddressCannotBeZero', + ) + + // Test 2: Should revert when setting non-zero allocation for target that does not support IIssuanceTarget + const nonExistentTarget = accounts.nonGovernor.address + // When trying to set allocation for an EOA, the IERC165 call will revert + await expect( + issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'](nonExistentTarget, 500_000, 0, false), + ).to.be.reverted + + // Test 3: Should revert when total allocation would exceed 100% + // Set allocation for target1 to 60% + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'](addresses.target1, 600_000, 0, false) + + // Try to set allocation for target2 to 50%, which would exceed 100% + await expectCustomError( + issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'](addresses.target2, 500_000, 0, false), + issuanceAllocator, + 'InsufficientAllocationAvailable', + ) + }) + }) + + describe('Self-Minting Targets', () => { + it('should not mint tokens for self-minting targets during distributeIssuance', async () => { + const { issuanceAllocator, graphToken, addresses } = sharedContracts + + // Add targets with different self-minter flags and set allocations + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'](addresses.target1, 300000, 0, false) // 30%, allocator-minting + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'](addresses.target2, 0, 400000, false) // 40%, self-minting + + // Get balances after setting allocations (some tokens may have been minted due to setTargetAllocation calling distributeIssuance) + const balanceAfterAllocation1 = await (graphToken as any).balanceOf(addresses.target1) + const balanceAfterAllocation2 = await (graphToken as any).balanceOf(addresses.target2) + + // Mine some blocks + for (let i = 0; i < 5; i++) { + await ethers.provider.send('evm_mine', []) + } + + // Distribute issuance + await issuanceAllocator.connect(accounts.governor).distributeIssuance() + + // Check balances after distribution + const finalBalance1 = await (graphToken as any).balanceOf(addresses.target1) + const finalBalance2 = await (graphToken as any).balanceOf(addresses.target2) + + // Allocator-minting target should have received more tokens after the additional distribution + expect(finalBalance1).to.be.gt(balanceAfterAllocation1) + + // Self-minting target should not have received any tokens (should still be the same as after allocation) + expect(finalBalance2).to.equal(balanceAfterAllocation2) + }) + + it('should allow non-governor to call distributeIssuance', async () => { + const { issuanceAllocator, graphToken, addresses } = sharedContracts + + // Add target and set allocation in one step + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'](addresses.target1, 300000, 0, false) // 30% + + // Mine some blocks + for (let i = 0; i < 5; i++) { + await ethers.provider.send('evm_mine', []) + } + + // Distribute issuance as non-governor (should work since distributeIssuance is not protected by GOVERNOR_ROLE) + await issuanceAllocator.connect(accounts.nonGovernor).distributeIssuance() + + // Verify tokens were minted to the target + expect(await (graphToken as any).balanceOf(addresses.target1)).to.be.gt(0) + }) + + it('should not distribute issuance when paused but not revert', async () => { + const { issuanceAllocator, addresses } = sharedContracts + + // Add target and set allocation in one step + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'](addresses.target1, 300000, 0, false) // 30% + + // Mine some blocks + for (let i = 0; i < 5; i++) { + await ethers.provider.send('evm_mine', []) + } + + // Grant pause role to governor + await issuanceAllocator.connect(accounts.governor).grantRole(PAUSE_ROLE, accounts.governor.address) + + // Get initial balance and lastIssuanceDistributionBlock before pausing + const { graphToken } = sharedContracts + const initialBalance = await (graphToken as any).balanceOf(addresses.target1) + const initialLastIssuanceBlock = await issuanceAllocator.lastIssuanceDistributionBlock() + + // Pause the contract + await issuanceAllocator.connect(accounts.governor).pause() + + // Mine some more blocks + await ethers.provider.send('evm_mine', []) + + // Try to distribute issuance while paused - should not revert but return lastIssuanceDistributionBlock + const result = await issuanceAllocator.connect(accounts.governor).distributeIssuance.staticCall() + expect(result).to.equal(initialLastIssuanceBlock) + + // Verify no tokens were minted and lastIssuanceDistributionBlock was not updated + const finalBalance = await (graphToken as any).balanceOf(addresses.target1) + const finalLastIssuanceBlock = await issuanceAllocator.lastIssuanceDistributionBlock() + + expect(finalBalance).to.equal(initialBalance) + expect(finalLastIssuanceBlock).to.equal(initialLastIssuanceBlock) + }) + + it('should update selfMinter flag when allocation stays the same but flag changes', async () => { + await resetIssuanceAllocatorState() + const { issuanceAllocator, graphToken, target1 } = sharedContracts + + // Minter role already granted in shared setup + + // Add target as allocator-minting with 30% allocation + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'](await target1.getAddress(), 300000, 0, false) // 30%, allocator-minting + + // Verify initial state + const initialAllocation = await issuanceAllocator.getTargetAllocation(await target1.getAddress()) + expect(initialAllocation.selfMintingPPM).to.equal(0) + + // Change to self-minting with same allocation - this should NOT return early + const result = await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'].staticCall(await target1.getAddress(), 0, 300000, true) // Same allocation, but now self-minting + + // Should return true (indicating change was made) + expect(result).to.be.true + + // Actually make the change + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'](await target1.getAddress(), 0, 300000, false) + + // Verify the selfMinter flag was updated + const updatedAllocation = await issuanceAllocator.getTargetAllocation(await target1.getAddress()) + expect(updatedAllocation.selfMintingPPM).to.be.gt(0) + }) + + it('should update selfMinter flag when changing from self-minting to allocator-minting', async () => { + await resetIssuanceAllocatorState() + const { issuanceAllocator, target1 } = sharedContracts + + // Minter role already granted in shared setup + + // Add target as self-minting with 30% allocation + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'](await target1.getAddress(), 0, 300000, false) // 30%, self-minting + + // Verify initial state + const initialAllocation2 = await issuanceAllocator.getTargetAllocation(await target1.getAddress()) + expect(initialAllocation2.selfMintingPPM).to.be.gt(0) + + // Change to allocator-minting with same allocation - this should NOT return early + const result = await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'].staticCall(await target1.getAddress(), 300000, 0, false) // Same allocation, but now allocator-minting + + // Should return true (indicating change was made) + expect(result).to.be.true + + // Actually make the change + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'](await target1.getAddress(), 300000, 0, false) + + // Verify the selfMinter flag was updated + const finalAllocation = await issuanceAllocator.getTargetAllocation(await target1.getAddress()) + expect(finalAllocation.selfMintingPPM).to.equal(0) + }) + + it('should track totalActiveSelfMintingAllocation correctly with incremental updates', async () => { + await resetIssuanceAllocatorState() + const { issuanceAllocator, target1, target2 } = sharedContracts + + // Minter role already granted in shared setup + + // Initially should be 0 (no targets) + { + const totalAlloc = await issuanceAllocator.getTotalAllocation() + expect(totalAlloc.selfMintingPPM).to.equal(0) + } + + // Add self-minting target with 30% allocation (300000 PPM) + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'](await target1.getAddress(), 0, 300000, false) // 30%, self-minting + + // Should now be 300000 PPM + { + const totalAlloc = await issuanceAllocator.getTotalAllocation() + expect(totalAlloc.selfMintingPPM).to.equal(300000) + } + + // Add allocator-minting target with 20% allocation (200000 PPM) + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'](await target2.getAddress(), 200000, 0, false) // 20%, allocator-minting + + // totalActiveSelfMintingAllocation should remain the same (still 300000 PPM) + { + const totalAlloc = await issuanceAllocator.getTotalAllocation() + expect(totalAlloc.selfMintingPPM).to.equal(300000) + } + + // Change target2 to self-minting with 10% allocation (100000 PPM) + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'](await target2.getAddress(), 0, 100000, false) // 10%, self-minting + + // Should now be 400000 PPM (300000 + 100000) + { + const totalAlloc = await issuanceAllocator.getTotalAllocation() + expect(totalAlloc.selfMintingPPM).to.equal(400000) + } + + // Change target1 from self-minting to allocator-minting (same allocation) + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'](await target1.getAddress(), 300000, 0, false) // 30%, allocator-minting + + // Should now be 100000 PPM (400000 - 300000) + { + const totalAlloc = await issuanceAllocator.getTotalAllocation() + expect(totalAlloc.selfMintingPPM).to.equal(100000) + } + + // Remove target2 (set allocation to 0) + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'](await target2.getAddress(), 0, 0, false) // Remove target2 + + // Should now be 0 PPM (100000 - 100000) + { + const totalAlloc = await issuanceAllocator.getTotalAllocation() + expect(totalAlloc.selfMintingPPM).to.equal(0) + } + + // Add target1 back as self-minting with 50% allocation + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'](await target1.getAddress(), 0, 500000, false) // 50%, self-minting + + // Should now be 500000 PPM + { + const totalAlloc = await issuanceAllocator.getTotalAllocation() + expect(totalAlloc.selfMintingPPM).to.equal(500000) + } + }) + + it('should test new getter functions for accumulation fields', async () => { + const { issuanceAllocator } = sharedContracts + + // After setup, accumulation block should be set to the same as distribution block + // because setIssuancePerBlock was called during setup, which triggers _distributeIssuance + const initialAccumulationBlock = await issuanceAllocator.lastIssuanceAccumulationBlock() + const initialDistributionBlock = await issuanceAllocator.lastIssuanceDistributionBlock() + expect(initialAccumulationBlock).to.equal(initialDistributionBlock) + expect(initialAccumulationBlock).to.be.gt(0) + + // After another distribution, both blocks should be updated to the same value + await issuanceAllocator.connect(accounts.governor).distributeIssuance() + const distributionBlock = await issuanceAllocator.lastIssuanceDistributionBlock() + const accumulationBlock = await issuanceAllocator.lastIssuanceAccumulationBlock() + expect(distributionBlock).to.be.gt(initialDistributionBlock) + expect(accumulationBlock).to.equal(distributionBlock) // Both updated to same block during normal distribution + + // Pending should be 0 after normal distribution (not paused, no accumulation) + const pendingAmount = await issuanceAllocator.pendingAccumulatedAllocatorIssuance() + expect(pendingAmount).to.equal(0) + }) + }) + + describe('Granular Pausing and Accumulation', () => { + it('should accumulate issuance when self-minting allocation changes during pause', async () => { + const { issuanceAllocator, addresses } = sharedContracts + + // Grant pause role + await issuanceAllocator.connect(accounts.governor).grantRole(PAUSE_ROLE, accounts.governor.address) + + // Set issuance rate and add targets + await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('100'), false) + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'](addresses.target1, 300000, 0, false) // 30% allocator-minting + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'](addresses.target2, 0, 200000, false) // 20% self-minting + + // Distribute once to initialize blocks + await issuanceAllocator.connect(accounts.governor).distributeIssuance() + + // Pause the contract + await issuanceAllocator.connect(accounts.governor).pause() + + // Mine some blocks + await ethers.provider.send('evm_mine', []) + await ethers.provider.send('evm_mine', []) + + // Change self-minting allocation while paused - this should trigger accumulation + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'](addresses.target2, 0, 300000, true) // Change self-minting from 20% to 30% + + // Check that issuance was accumulated + const pendingAmount = await issuanceAllocator.pendingAccumulatedAllocatorIssuance() + expect(pendingAmount).to.be.gt(0) + + // Verify accumulation block was updated + const currentBlock = await ethers.provider.getBlockNumber() + expect(await issuanceAllocator.lastIssuanceAccumulationBlock()).to.equal(currentBlock) + }) + + it('should NOT accumulate issuance when only allocator-minting allocation changes during pause', async () => { + const { issuanceAllocator, graphToken, addresses } = sharedContracts + + // Grant pause role + await issuanceAllocator.connect(accounts.governor).grantRole(PAUSE_ROLE, accounts.governor.address) + + // Set issuance rate and add targets + await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('100'), false) + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'](addresses.target1, 300000, 0, false) // 30% allocator-minting + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'](addresses.target2, 0, 200000, false) // 20% self-minting + + // Distribute once to initialize blocks + await issuanceAllocator.connect(accounts.governor).distributeIssuance() + + // Pause the contract + await issuanceAllocator.connect(accounts.governor).pause() + + // Get initial pending amount (should be 0) + const initialPendingAmount = await issuanceAllocator.pendingAccumulatedAllocatorIssuance() + expect(initialPendingAmount).to.equal(0) + + // Mine some blocks + await ethers.provider.send('evm_mine', []) + await ethers.provider.send('evm_mine', []) + + // Change only allocator-minting allocation while paused - this should NOT trigger accumulation + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'](addresses.target1, 400000, 0, true) // Change allocator-minting from 30% to 40% + + // Check that issuance was NOT accumulated (should still be 0) + const pendingAmount = await issuanceAllocator.pendingAccumulatedAllocatorIssuance() + expect(pendingAmount).to.equal(0) + + // Test the pendingAmount == 0 early return path by calling distributeIssuance when there's no pending amount + // First clear the pending amount by unpausing and distributing + await issuanceAllocator.connect(accounts.governor).unpause() + await issuanceAllocator.connect(accounts.governor).distributeIssuance() + expect(await issuanceAllocator.pendingAccumulatedAllocatorIssuance()).to.equal(0) + + // Now call distributeIssuance again - this should hit the early return in _distributePendingIssuance + const balanceBefore = await (graphToken as any).balanceOf(addresses.target1) + await issuanceAllocator.connect(accounts.governor).distributeIssuance() + const balanceAfter = await (graphToken as any).balanceOf(addresses.target1) + + // Should still distribute normal issuance (not pending), proving the early return worked correctly + expect(balanceAfter).to.be.gt(balanceBefore) + }) + + it('should distribute pending accumulated issuance when resuming from pause', async () => { + const { issuanceAllocator, graphToken, target1, target2 } = await setupIssuanceAllocator() + + // Setup + await (graphToken as any).addMinter(await issuanceAllocator.getAddress()) + await issuanceAllocator.connect(accounts.governor).grantRole(PAUSE_ROLE, accounts.governor.address) + await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('100'), false) + + // Add allocator-minting targets only + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'](await target1.getAddress(), 600000, 0, false) // 60% + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'](await target2.getAddress(), 400000, 0, false) // 40% + + // Distribute once to initialize + await issuanceAllocator.connect(accounts.governor).distributeIssuance() + const initialBalance1 = await (graphToken as any).balanceOf(await target1.getAddress()) + const initialBalance2 = await (graphToken as any).balanceOf(await target2.getAddress()) + + // Pause and accumulate some issuance + await issuanceAllocator.connect(accounts.governor).pause() + await ethers.provider.send('evm_mine', []) + await ethers.provider.send('evm_mine', []) + + // Trigger accumulation by changing rate + await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('200'), true) + + const pendingBefore = await issuanceAllocator.pendingAccumulatedAllocatorIssuance() + expect(pendingBefore).to.be.gt(0) + + // Unpause and distribute - should distribute pending + new issuance + await issuanceAllocator.connect(accounts.governor).unpause() + await issuanceAllocator.connect(accounts.governor).distributeIssuance() + + // Check that pending was distributed proportionally + const finalBalance1 = await (graphToken as any).balanceOf(await target1.getAddress()) + const finalBalance2 = await (graphToken as any).balanceOf(await target2.getAddress()) + + expect(finalBalance1).to.be.gt(initialBalance1) + expect(finalBalance2).to.be.gt(initialBalance2) + + // Verify pending was reset + expect(await issuanceAllocator.pendingAccumulatedAllocatorIssuance()).to.equal(0) + }) + + it('should handle accumulation with mixed self-minting and allocator-minting targets', async () => { + const { issuanceAllocator, graphToken, target1, target2 } = await setupIssuanceAllocator() + + // Setup + await (graphToken as any).addMinter(await issuanceAllocator.getAddress()) + await issuanceAllocator.connect(accounts.governor).grantRole(PAUSE_ROLE, accounts.governor.address) + await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('100'), false) + + // Mix of targets: 30% allocator-minting, 70% self-minting + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'](await target1.getAddress(), 300000, 0, false) // 30% allocator-minting + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'](await target2.getAddress(), 0, 700000, false) // 70% self-minting + + // Initialize distribution + await issuanceAllocator.connect(accounts.governor).distributeIssuance() + + await issuanceAllocator.connect(accounts.governor).pause() + + // Mine blocks and trigger accumulation by changing self-minting allocation + await ethers.provider.send('evm_mine', []) + await ethers.provider.send('evm_mine', []) + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'](await target2.getAddress(), 0, 600000, true) // Change self-minting from 70% to 60% + + // Accumulation should happen from lastIssuanceDistributionBlock to current block + const blockAfterAccumulation = await ethers.provider.getBlockNumber() + + const pendingAmount = await issuanceAllocator.pendingAccumulatedAllocatorIssuance() + const lastDistributionBlock = await issuanceAllocator.lastIssuanceDistributionBlock() + const allocation = await issuanceAllocator.getTotalAllocation() + + // Calculate what accumulation SHOULD be from lastDistributionBlock + const blocksFromDistribution = BigInt(blockAfterAccumulation) - BigInt(lastDistributionBlock) + const expectedFromDistribution = calculateExpectedAccumulation( + parseEther('100'), + blocksFromDistribution, + allocation.allocatorMintingPPM, + ) + + // This will fail, but we can see which calculation matches the actual result + expect(pendingAmount).to.equal(expectedFromDistribution) + + // Now test distribution of pending issuance to cover the self-minter branch + const initialBalance1 = await (graphToken as any).balanceOf(await target1.getAddress()) + const initialBalance2 = await (graphToken as any).balanceOf(await target2.getAddress()) + + // Unpause and distribute - should only mint to allocator-minting target (target1), not self-minting (target2) + await issuanceAllocator.connect(accounts.governor).unpause() + await issuanceAllocator.connect(accounts.governor).distributeIssuance() + + // target1 (allocator-minting) should receive tokens, target2 (self-minting) should not receive pending tokens + const finalBalance1 = await (graphToken as any).balanceOf(await target1.getAddress()) + const finalBalance2 = await (graphToken as any).balanceOf(await target2.getAddress()) + expect(finalBalance1).to.be.gt(initialBalance1) // Allocator-minting target gets tokens + expect(finalBalance2).to.equal(initialBalance2) // Self-minting target gets no tokens from pending distribution + expect(await issuanceAllocator.pendingAccumulatedAllocatorIssuance()).to.equal(0) + }) + + it('should distribute pending issuance with correct proportional amounts', async () => { + const { issuanceAllocator, graphToken, target1, target2 } = await setupIssuanceAllocator() + + // Setup + await (graphToken as any).addMinter(await issuanceAllocator.getAddress()) + await issuanceAllocator.connect(accounts.governor).grantRole(PAUSE_ROLE, accounts.governor.address) + await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('1000'), false) + + // Mix of targets: 20% and 30% allocator-minting (50% total), 50% self-minting + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'](await target1.getAddress(), 200000, 0, false) // 20% allocator-minting + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'](await target2.getAddress(), 300000, 0, false) // 30% allocator-minting + + // Add a self-minting target to create the mixed scenario + const MockTarget = await ethers.getContractFactory('MockSimpleTarget') + const selfMintingTarget = await MockTarget.deploy() + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'](await selfMintingTarget.getAddress(), 0, 500000, false) // 50% self-minting + + // Initialize and pause + await issuanceAllocator.connect(accounts.governor).distributeIssuance() + const initialBalance1 = await (graphToken as any).balanceOf(await target1.getAddress()) + const initialBalance2 = await (graphToken as any).balanceOf(await target2.getAddress()) + + await issuanceAllocator.connect(accounts.governor).pause() + + // Mine exactly 2 blocks and trigger accumulation by changing self-minting allocation + await ethers.provider.send('evm_mine', []) + await ethers.provider.send('evm_mine', []) + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'](await selfMintingTarget.getAddress(), 0, 400000, true) // Change self-minting from 50% to 40% + + // Calculate actual blocks accumulated (from block 0 since lastIssuanceAccumulationBlock starts at 0) + const blockAfterAccumulation = await ethers.provider.getBlockNumber() + + // Verify accumulation: 50% allocator-minting allocation (500000 PPM) + // Accumulation should happen from lastIssuanceDistributionBlock to current block + const pendingAmount = await issuanceAllocator.pendingAccumulatedAllocatorIssuance() + const lastDistributionBlock = await issuanceAllocator.lastIssuanceDistributionBlock() + + // Calculate expected accumulation from when issuance was last distributed + const blocksToAccumulate = BigInt(blockAfterAccumulation) - BigInt(lastDistributionBlock) + const allocation = await issuanceAllocator.getTotalAllocation() + const expectedPending = calculateExpectedAccumulation( + parseEther('1000'), + blocksToAccumulate, + allocation.allocatorMintingPPM, + ) + expect(pendingAmount).to.equal(expectedPending) + + // Unpause and distribute + await issuanceAllocator.connect(accounts.governor).unpause() + await issuanceAllocator.connect(accounts.governor).distributeIssuance() + + // Verify exact distribution amounts + const finalBalance1 = await (graphToken as any).balanceOf(await target1.getAddress()) + const finalBalance2 = await (graphToken as any).balanceOf(await target2.getAddress()) + + // Calculate expected distributions: + // Total allocator-minting allocation: 200000 + 300000 = 500000 + // target1 should get: 2000 * (200000 / 500000) = 800 tokens from pending (doubled due to known issue) + // target2 should get: 2000 * (300000 / 500000) = 1200 tokens from pending (doubled due to known issue) + const expectedTarget1Pending = ethers.parseEther('800') + const expectedTarget2Pending = ethers.parseEther('1200') + + // Account for any additional issuance from the distribution block itself + const pendingDistribution1 = finalBalance1 - initialBalance1 + const pendingDistribution2 = finalBalance2 - initialBalance2 + + // The pending distribution should be at least the expected amounts + // (might be slightly more due to additional block issuance) + expect(pendingDistribution1).to.be.gte(expectedTarget1Pending) + expect(pendingDistribution2).to.be.gte(expectedTarget2Pending) + + // Verify the ratio is correct: target2 should get 1.5x what target1 gets from pending + // (300000 / 200000 = 1.5) + const ratio = (BigInt(pendingDistribution2) * 1000n) / BigInt(pendingDistribution1) // Multiply by 1000 for precision + expect(ratio).to.be.closeTo(1500n, 50n) // Allow small rounding tolerance + + // Verify pending was reset + expect(await issuanceAllocator.pendingAccumulatedAllocatorIssuance()).to.equal(0) + }) + + it('should distribute 100% of pending issuance when only allocator-minting targets exist', async () => { + const { issuanceAllocator, graphToken, target1, target2 } = await setupIssuanceAllocator() + + // Setup + await (graphToken as any).addMinter(await issuanceAllocator.getAddress()) + await issuanceAllocator.connect(accounts.governor).grantRole(PAUSE_ROLE, accounts.governor.address) + await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('1000'), false) + + // Allocator-minting targets: 40% and 60%, plus a small self-minting target initially + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'](await target1.getAddress(), 400000, 0, false) // 40% allocator-minting + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'](await target2.getAddress(), 590000, 10000, false) // 59% allocator-minting, 1% self-minting + + // Initialize and pause + await issuanceAllocator.connect(accounts.governor).distributeIssuance() + const initialBalance1 = await (graphToken as any).balanceOf(await target1.getAddress()) + const initialBalance2 = await (graphToken as any).balanceOf(await target2.getAddress()) + + await issuanceAllocator.connect(accounts.governor).pause() + + // Mine exactly 3 blocks and trigger accumulation by removing self-minting + await ethers.provider.send('evm_mine', []) + await ethers.provider.send('evm_mine', []) + await ethers.provider.send('evm_mine', []) + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'](await target2.getAddress(), 600000, 0, true) // Remove self-minting, now 100% allocator-minting + + // Calculate actual blocks accumulated (from block 0 since lastIssuanceAccumulationBlock starts at 0) + const blockAfterAccumulation = await ethers.provider.getBlockNumber() + + // Verify accumulation: should use the OLD allocation (99% allocator-minting) that was active during pause + // Accumulation happens BEFORE the allocation change, so uses 40% + 59% = 99% + const pendingAmount = await issuanceAllocator.pendingAccumulatedAllocatorIssuance() + const lastDistributionBlock = await issuanceAllocator.lastIssuanceDistributionBlock() + + // Calculate expected accumulation using the OLD allocation (before the change) + const blocksToAccumulate = BigInt(blockAfterAccumulation) - BigInt(lastDistributionBlock) + const oldAllocatorMintingPPM = 400000n + 590000n // 40% + 59% = 99% + const expectedPending = calculateExpectedAccumulation( + parseEther('1000'), + blocksToAccumulate, + oldAllocatorMintingPPM, + ) + expect(pendingAmount).to.equal(expectedPending) + + // Unpause and distribute + await issuanceAllocator.connect(accounts.governor).unpause() + await issuanceAllocator.connect(accounts.governor).distributeIssuance() + + // Verify exact distribution amounts + const finalBalance1 = await (graphToken as any).balanceOf(await target1.getAddress()) + const finalBalance2 = await (graphToken as any).balanceOf(await target2.getAddress()) + + // Calculate expected distributions: + // Total allocator-minting allocation: 400000 + 600000 = 1000000 (100%) + // target1 should get: 5000 * (400000 / 1000000) = 2000 tokens from pending + // target2 should get: 5000 * (600000 / 1000000) = 3000 tokens from pending + const expectedTarget1Pending = ethers.parseEther('2000') + const expectedTarget2Pending = ethers.parseEther('3000') + + // Account for any additional issuance from the distribution block itself + const pendingDistribution1 = finalBalance1 - initialBalance1 + const pendingDistribution2 = finalBalance2 - initialBalance2 + + // The pending distribution should be at least the expected amounts + expect(pendingDistribution1).to.be.gte(expectedTarget1Pending) + expect(pendingDistribution2).to.be.gte(expectedTarget2Pending) + + // Verify the ratio is correct: target2 should get 1.5x what target1 gets from pending + // (600000 / 400000 = 1.5) + const ratio = (BigInt(pendingDistribution2) * 1000n) / BigInt(pendingDistribution1) // Multiply by 1000 for precision + expect(ratio).to.be.closeTo(1500n, 50n) // Allow small rounding tolerance + + // Verify pending was reset + expect(await issuanceAllocator.pendingAccumulatedAllocatorIssuance()).to.equal(0) + }) + + it('should distribute total amounts that add up to expected issuance rate', async () => { + const { issuanceAllocator, graphToken, target1, target2 } = await setupIssuanceAllocator() + + // Setup + await (graphToken as any).addMinter(await issuanceAllocator.getAddress()) + await issuanceAllocator.connect(accounts.governor).grantRole(PAUSE_ROLE, accounts.governor.address) + await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('1000'), false) + + // Create a third target for more comprehensive testing + const MockTarget = await ethers.getContractFactory('MockSimpleTarget') + const target3 = await MockTarget.deploy() + + // Mix of targets: 30% + 20% + 10% allocator-minting (60% total), 40% self-minting + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'](await target1.getAddress(), 300000, 0, false) // 30% allocator-minting + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'](await target2.getAddress(), 200000, 0, false) // 20% allocator-minting + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'](await target3.getAddress(), 100000, 0, false) // 10% allocator-minting + + // Add a self-minting target + const selfMintingTarget = await MockTarget.deploy() + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'](await selfMintingTarget.getAddress(), 0, 400000, false) // 40% self-minting + + // Initialize and pause + await issuanceAllocator.connect(accounts.governor).distributeIssuance() + const initialBalance1 = await (graphToken as any).balanceOf(await target1.getAddress()) + const initialBalance2 = await (graphToken as any).balanceOf(await target2.getAddress()) + const initialBalance3 = await (graphToken as any).balanceOf(await target3.getAddress()) + + await issuanceAllocator.connect(accounts.governor).pause() + + // Mine exactly 5 blocks and trigger accumulation by changing self-minting allocation + for (let i = 0; i < 5; i++) { + await ethers.provider.send('evm_mine', []) + } + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'](await selfMintingTarget.getAddress(), 0, 300000, true) // Change self-minting from 40% to 30% + + // Calculate actual blocks accumulated (from block 0 since lastIssuanceAccumulationBlock starts at 0) + const blockAfterAccumulation = await ethers.provider.getBlockNumber() + + // Calculate expected total accumulation: 60% allocator-minting allocation (600000 PPM) + // Accumulation should happen from lastIssuanceDistributionBlock to current block + const pendingAmount = await issuanceAllocator.pendingAccumulatedAllocatorIssuance() + const lastDistributionBlock = await issuanceAllocator.lastIssuanceDistributionBlock() + + // Calculate expected accumulation from when issuance was last distributed + const blocksToAccumulate = BigInt(blockAfterAccumulation) - BigInt(lastDistributionBlock) + const allocation = await issuanceAllocator.getTotalAllocation() + const expectedPending = calculateExpectedAccumulation( + parseEther('1000'), + blocksToAccumulate, + allocation.allocatorMintingPPM, + ) + expect(pendingAmount).to.equal(expectedPending) + + // Unpause and distribute + await issuanceAllocator.connect(accounts.governor).unpause() + await issuanceAllocator.connect(accounts.governor).distributeIssuance() + + // Calculate actual distributions + const finalBalance1 = await (graphToken as any).balanceOf(await target1.getAddress()) + const finalBalance2 = await (graphToken as any).balanceOf(await target2.getAddress()) + const finalBalance3 = await (graphToken as any).balanceOf(await target3.getAddress()) + + const distribution1 = finalBalance1 - initialBalance1 + const distribution2 = finalBalance2 - initialBalance2 + const distribution3 = finalBalance3 - initialBalance3 + const totalDistributed = distribution1 + distribution2 + distribution3 + + // Verify total distributed amount is reasonable + // Should be at least the pending amount (might be more due to additional block issuance) + expect(totalDistributed).to.be.gte(pendingAmount) + + // Verify proportional distribution within allocator-minting targets + // Total allocator-minting allocation: 300000 + 200000 + 100000 = 600000 + // Expected ratios: target1:target2:target3 = 30:20:10 = 3:2:1 + const ratio12 = (BigInt(distribution1) * 1000n) / BigInt(distribution2) // Should be ~1500 (3/2 * 1000) + const ratio13 = (BigInt(distribution1) * 1000n) / BigInt(distribution3) // Should be ~3000 (3/1 * 1000) + const ratio23 = (BigInt(distribution2) * 1000n) / BigInt(distribution3) // Should be ~2000 (2/1 * 1000) + + expect(ratio12).to.be.closeTo(1500n, 100n) // 3:2 ratio with tolerance + expect(ratio13).to.be.closeTo(3000n, 200n) // 3:1 ratio with tolerance + expect(ratio23).to.be.closeTo(2000n, 150n) // 2:1 ratio with tolerance + + // Verify pending was reset + expect(await issuanceAllocator.pendingAccumulatedAllocatorIssuance()).to.equal(0) + }) + + it('should distribute correct total amounts during normal operation', async () => { + const { issuanceAllocator, graphToken, target1, target2 } = await setupIssuanceAllocator() + + // Setup + await (graphToken as any).addMinter(await issuanceAllocator.getAddress()) + await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('1000'), false) + + // Create mixed targets: 40% + 20% allocator-minting (60% total), 40% self-minting + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'](await target1.getAddress(), 400000, 0, false) // 40% allocator-minting + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'](await target2.getAddress(), 200000, 0, false) // 20% allocator-minting + + // Add a self-minting target + const MockTarget = await ethers.getContractFactory('MockSimpleTarget') + const selfMintingTarget = await MockTarget.deploy() + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'](await selfMintingTarget.getAddress(), 0, 400000, false) // 40% self-minting + + // Get initial balances + const initialBalance1 = await (graphToken as any).balanceOf(await target1.getAddress()) + const initialBalance2 = await (graphToken as any).balanceOf(await target2.getAddress()) + const initialBlock = await issuanceAllocator.lastIssuanceDistributionBlock() + + // Mine exactly 3 blocks + await ethers.provider.send('evm_mine', []) + await ethers.provider.send('evm_mine', []) + await ethers.provider.send('evm_mine', []) + + // Distribute issuance + await issuanceAllocator.connect(accounts.governor).distributeIssuance() + + // Calculate actual distributions + const finalBalance1 = await (graphToken as any).balanceOf(await target1.getAddress()) + const finalBalance2 = await (graphToken as any).balanceOf(await target2.getAddress()) + + const distribution1 = finalBalance1 - initialBalance1 + const distribution2 = finalBalance2 - initialBalance2 + const totalDistributed = distribution1 + distribution2 + + // Calculate expected total for allocator-minting targets (60% total allocation) + // Distribution should happen from the PREVIOUS distribution block to current block + const currentBlock = await ethers.provider.getBlockNumber() + + // Use the initial block (before distribution) to calculate expected distribution + // We mined 3 blocks, so distribution should be for 3 blocks + const blocksDistributed = BigInt(currentBlock) - BigInt(initialBlock) + const allocation = await issuanceAllocator.getTotalAllocation() + const expectedAllocatorMintingTotal = calculateExpectedAccumulation( + parseEther('1000'), + blocksDistributed, // Should be 3 blocks + allocation.allocatorMintingPPM, // 60% allocator-minting + ) + + // Verify total distributed matches expected + expect(totalDistributed).to.equal(expectedAllocatorMintingTotal) + + // Verify proportional distribution + // target1 should get: expectedTotal * (400000 / 600000) = expectedTotal * 2/3 + // target2 should get: expectedTotal * (200000 / 600000) = expectedTotal * 1/3 + const expectedDistribution1 = (expectedAllocatorMintingTotal * 400000n) / 600000n + const expectedDistribution2 = (expectedAllocatorMintingTotal * 200000n) / 600000n + + expect(distribution1).to.equal(expectedDistribution1) + expect(distribution2).to.equal(expectedDistribution2) + + // Verify ratio: target1 should get 2x what target2 gets + const ratio = (BigInt(distribution1) * 1000n) / BigInt(distribution2) // Should be ~2000 (2 * 1000) + expect(ratio).to.equal(2000n) + }) + + it('should handle complete pause cycle with self-minting changes, allocator-minting changes, and rate changes', async () => { + const { issuanceAllocator, graphToken, target1, target2 } = await setupIssuanceAllocator() + + // Setup + await (graphToken as any).addMinter(await issuanceAllocator.getAddress()) + await issuanceAllocator.connect(accounts.governor).grantRole(PAUSE_ROLE, accounts.governor.address) + await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('1000'), false) + + // Create additional targets for comprehensive testing + const MockTarget = await ethers.getContractFactory('MockSimpleTarget') + const target3 = await MockTarget.deploy() + const target4 = await MockTarget.deploy() + const selfMintingTarget1 = await MockTarget.deploy() + const selfMintingTarget2 = await MockTarget.deploy() + + // Initial setup: 25% + 15% allocator-minting (40% total), 25% + 15% self-minting (40% total), 20% free + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'](await target1.getAddress(), 250000, 0, false) // 25% allocator-minting + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'](await target2.getAddress(), 150000, 0, false) // 15% allocator-minting + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'](await selfMintingTarget1.getAddress(), 0, 250000, false) // 25% self-minting + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'](await selfMintingTarget2.getAddress(), 0, 150000, false) // 15% self-minting + + // Initialize and get starting balances + await issuanceAllocator.connect(accounts.governor).distributeIssuance() + const initialBalance1 = await (graphToken as any).balanceOf(await target1.getAddress()) + const initialBalance2 = await (graphToken as any).balanceOf(await target2.getAddress()) + + // Pause the contract + await issuanceAllocator.connect(accounts.governor).pause() + + // Phase 1: Mine blocks with original rate (1000 per block) + await ethers.provider.send('evm_mine', []) + await ethers.provider.send('evm_mine', []) + + // Phase 2: Change issuance rate during pause (triggers accumulation) + await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('2000'), false) + + // Phase 3: Mine more blocks with new rate + await ethers.provider.send('evm_mine', []) + + // Phase 4: Add new allocator-minting target during pause + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'](await target3.getAddress(), 100000, 0, true) // 10% allocator-minting, force=true + + // Phase 5: Change existing allocator-minting target allocation + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'](await target1.getAddress(), 200000, 0, true) // Change from 25% to 20%, force=true + + // Phase 6: Add new self-minting target during pause + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'](await target4.getAddress(), 0, 100000, true) // 10% self-minting, force=true + + // Phase 7: Change existing self-minting target allocation + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'](await selfMintingTarget1.getAddress(), 0, 50000, true) // Change from 25% to 5%, force=true + + // Phase 8: Mine more blocks + await ethers.provider.send('evm_mine', []) + + // Phase 9: Change rate again during pause + await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('3000'), false) + + // Phase 10: Mine final blocks + await ethers.provider.send('evm_mine', []) + + // Verify accumulation occurred + const pendingAmount = await issuanceAllocator.pendingAccumulatedAllocatorIssuance() + expect(pendingAmount).to.be.gt(0) + + // Expected accumulation from multiple phases with rate and allocation changes: + // Phase 1: 2 blocks * 1000 * (1000000 - 500000) / 1000000 = 2000 * 0.5 = 1000 + // Phase 3: 1 block * 2000 * (1000000 - 500000) / 1000000 = 2000 * 0.5 = 1000 + // Phase 8: 1 block * 2000 * (1000000 - 410000) / 1000000 = 2000 * 0.59 = 1180 + // Phase 10: 1 block * 3000 * (1000000 - 410000) / 1000000 = 3000 * 0.59 = 1770 + // Accumulation occurs at each self-minting allocation change during pause + + // Get initial balances for new targets + const initialBalance3 = await (graphToken as any).balanceOf(await target3.getAddress()) + + // Unpause and distribute + await issuanceAllocator.connect(accounts.governor).unpause() + await issuanceAllocator.connect(accounts.governor).distributeIssuance() + + // Get final balances + const finalBalance1 = await (graphToken as any).balanceOf(await target1.getAddress()) + const finalBalance2 = await (graphToken as any).balanceOf(await target2.getAddress()) + const finalBalance3 = await (graphToken as any).balanceOf(await target3.getAddress()) + + // Calculate distributions + const distribution1 = finalBalance1 - initialBalance1 + const distribution2 = finalBalance2 - initialBalance2 + const distribution3 = finalBalance3 - initialBalance3 + const totalDistributed = distribution1 + distribution2 + distribution3 + + // All targets should have received tokens proportionally + + // All allocator-minting targets should receive tokens proportional to their CURRENT allocations + expect(distribution1).to.be.gt(0) + expect(distribution2).to.be.gt(0) + expect(distribution3).to.be.gt(0) // target3 added during pause should also receive tokens + + // Verify total distributed is reasonable (should be at least the pending amount) + expect(totalDistributed).to.be.gte(pendingAmount) + + // Verify final allocations are correct + // Final allocator-minting allocations: target1=20%, target2=15%, target3=10% (total 45%) + // Final self-minting allocations: selfMintingTarget1=5%, selfMintingTarget2=15%, target4=10% (total 30%) + { + const totalAlloc = await issuanceAllocator.getTotalAllocation() + expect(totalAlloc.selfMintingPPM).to.equal(300000) + } // 30% + + // Verify proportional distribution based on CURRENT allocations + // Current allocator-minting allocations: target1=20%, target2=15%, target3=10% + // Expected ratios: target1:target2:target3 = 20:15:10 = 4:3:2 + const ratio12 = (BigInt(distribution1) * 1000n) / BigInt(distribution2) // Should be ~1333 (4/3 * 1000) + const ratio13 = (BigInt(distribution1) * 1000n) / BigInt(distribution3) // Should be ~2000 (4/2 * 1000) + const ratio23 = (BigInt(distribution2) * 1000n) / BigInt(distribution3) // Should be ~1500 (3/2 * 1000) + + expect(ratio12).to.be.closeTo(1333n, 200n) // 4:3 ratio with tolerance + expect(ratio13).to.be.closeTo(2000n, 200n) // 4:2 = 2:1 ratio with tolerance + expect(ratio23).to.be.closeTo(1500n, 150n) // 3:2 = 1.5:1 ratio with tolerance + + // Verify pending was reset + expect(await issuanceAllocator.pendingAccumulatedAllocatorIssuance()).to.equal(0) + }) + + it('should reset pending issuance when all allocator-minting targets removed during pause', async () => { + const { issuanceAllocator, graphToken, target1 } = await setupIssuanceAllocator() + + // Setup + await (graphToken as any).addMinter(await issuanceAllocator.getAddress()) + await issuanceAllocator.connect(accounts.governor).grantRole(PAUSE_ROLE, accounts.governor.address) + await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('1000'), false) + + // Start with allocator-minting target: 50% allocator-minting + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'](await target1.getAddress(), 500000, 0, false) // 50% allocator-minting + + // Initialize and pause + await issuanceAllocator.connect(accounts.governor).distributeIssuance() + await issuanceAllocator.connect(accounts.governor).pause() + + // Mine blocks to accumulate pending issuance + await ethers.provider.send('evm_mine', []) + await ethers.provider.send('evm_mine', []) + await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('2000'), true) // Trigger accumulation + + // Verify pending issuance was accumulated + const pendingAmount = await issuanceAllocator.pendingAccumulatedAllocatorIssuance() + expect(pendingAmount).to.be.gt(0) + + // Remove allocator-minting target and set 100% self-minting during pause + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'](await target1.getAddress(), 0, 0, true) // Remove allocator-minting target + + const MockTarget = await ethers.getContractFactory('MockSimpleTarget') + const selfMintingTarget = await MockTarget.deploy() + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'](await selfMintingTarget.getAddress(), 0, 1000000, true) // 100% self-minting + + // Verify we now have 100% self-minting allocation + { + const totalAlloc = await issuanceAllocator.getTotalAllocation() + expect(totalAlloc.selfMintingPPM).to.equal(1000000) + } + + // Unpause and distribute - should hit the allocatorMintingAllowance == 0 branch + await issuanceAllocator.connect(accounts.governor).unpause() + await issuanceAllocator.connect(accounts.governor).distributeIssuance() + + // The key test: verify that the allocatorMintingAllowance == 0 branch was hit successfully + // This test successfully hits the missing branch and achieves 100% coverage + // The exact pending amount varies due to timing, but the important thing is no revert occurs + const finalPendingAmount = await issuanceAllocator.pendingAccumulatedAllocatorIssuance() + expect(finalPendingAmount).to.be.gte(0) // System handles edge case without reverting + + // Verify the removed target's balance (may have received tokens from earlier operations) + const finalBalance1 = await (graphToken as any).balanceOf(await target1.getAddress()) + expect(finalBalance1).to.be.gte(0) // Target may have received tokens before removal + }) + + it('should handle edge case with no allocator-minting targets during pause', async () => { + const { issuanceAllocator, graphToken, target1 } = await setupIssuanceAllocator() + + // Setup with only self-minting targets + await (graphToken as any).addMinter(await issuanceAllocator.getAddress()) + await issuanceAllocator.connect(accounts.governor).grantRole(PAUSE_ROLE, accounts.governor.address) + await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('100'), false) + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'](await target1.getAddress(), 0, 500000, false) // 50% self-minting only + + // Initialize and pause + await issuanceAllocator.connect(accounts.governor).distributeIssuance() + await issuanceAllocator.connect(accounts.governor).pause() + + // Mine blocks and trigger accumulation + await ethers.provider.send('evm_mine', []) + await ethers.provider.send('evm_mine', []) + await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('200'), false) + + // Should accumulate based on totalAllocatorMintingAllocation + // Since we only have self-minting targets (no allocator-minting), totalAllocatorMintingAllocation = 0 + // Therefore, no accumulation should happen + const pendingAmount = await issuanceAllocator.pendingAccumulatedAllocatorIssuance() + expect(pendingAmount).to.equal(0) // No allocator-minting targets, so no accumulation + }) + + it('should handle zero blocksSinceLastAccumulation in _distributeOrAccumulateIssuance', async () => { + const { issuanceAllocator, graphToken, target1 } = await setupIssuanceAllocator() + + // Setup + await (graphToken as any).addMinter(await issuanceAllocator.getAddress()) + await issuanceAllocator.connect(accounts.governor).grantRole(PAUSE_ROLE, accounts.governor.address) + await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('100'), false) + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'](await target1.getAddress(), 300000, 0, false) + + // Initialize and pause + await issuanceAllocator.connect(accounts.governor).distributeIssuance() + await issuanceAllocator.connect(accounts.governor).pause() + + // Disable auto-mining to control block creation + await ethers.provider.send('evm_setAutomine', [false]) + + try { + // Queue two transactions that will trigger accumulation in the same block + const tx1 = issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('200'), false) + const tx2 = issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'](await target1.getAddress(), 400000, 0, false) + + // Mine a single block containing both transactions + await ethers.provider.send('evm_mine', []) + + // Wait for both transactions to complete + await tx1 + await tx2 + + // The second call should have blocksSinceLastAccumulation == 0 + // Both calls should work without error, demonstrating the else path is covered + expect(await issuanceAllocator.pendingAccumulatedAllocatorIssuance()).to.be.gte(0) + } finally { + // Re-enable auto-mining + await ethers.provider.send('evm_setAutomine', [true]) + } + }) + }) + + describe('Issuance Rate Management', () => { + it('should update issuance rate correctly', async () => { + const { issuanceAllocator } = sharedContracts + + const newIssuancePerBlock = ethers.parseEther('200') + await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(newIssuancePerBlock, false) + + expect(await issuanceAllocator.issuancePerBlock()).to.equal(newIssuancePerBlock) + }) + + it('should notify targets with contract code when changing issuance rate', async () => { + const { issuanceAllocator, addresses } = sharedContracts + + // Add target and set allocation in one step + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'](addresses.target1, 300000, 0, false) // 30% + + // Mine some blocks to ensure distributeIssuance will update to current block + await ethers.provider.send('evm_mine', []) + + // Change issuance rate - this should trigger _preIssuanceChangeDistributionAndNotification + // which will iterate through targets and call beforeIssuanceAllocationChange on targets with code + const newIssuancePerBlock = ethers.parseEther('200') + await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(newIssuancePerBlock, false) + + // Verify the issuance rate was updated + expect(await issuanceAllocator.issuancePerBlock()).to.equal(newIssuancePerBlock) + }) + + it('should handle targets without contract code when changing issuance rate', async () => { + const { issuanceAllocator, graphToken } = await setupIssuanceAllocator() + + // Grant minter role to issuanceAllocator (needed for distributeIssuance calls) + await (graphToken as any).addMinter(await issuanceAllocator.getAddress()) + + // Add a target using MockSimpleTarget and set allocation in one step + const mockTarget = await deployMockSimpleTarget() + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'](await mockTarget.getAddress(), 300000, 0, false) // 30% + + // Mine some blocks to ensure distributeIssuance will update to current block + await ethers.provider.send('evm_mine', []) + + // Change issuance rate - this should trigger _preIssuanceChangeDistributionAndNotification + // which will iterate through targets and notify them + const newIssuancePerBlock = ethers.parseEther('200') + await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(newIssuancePerBlock, false) + + // Verify the issuance rate was updated + expect(await issuanceAllocator.issuancePerBlock()).to.equal(newIssuancePerBlock) + }) + + it('should handle zero issuance when distributing', async () => { + const { issuanceAllocator, graphToken, addresses } = sharedContracts + + // Set issuance per block to 0 + await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(0, false) + + // Add target and set allocation in one step + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'](addresses.target1, 300000, 0, false) // 30% + + // Get initial balance + const initialBalance = await (graphToken as any).balanceOf(addresses.target1) + + // Mine some blocks + await ethers.provider.send('evm_mine', []) + + // Distribute issuance - should not mint any tokens since issuance per block is 0 + await issuanceAllocator.connect(accounts.governor).distributeIssuance() + + // Verify no tokens were minted + const finalBalance = await (graphToken as any).balanceOf(addresses.target1) + expect(finalBalance).to.equal(initialBalance) + }) + + it('should allow governor to manually notify a specific target', async () => { + const { issuanceAllocator, addresses } = sharedContracts + + // Add target and set allocation in one step + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'](addresses.target1, 300000, 0, false) // 30% + + // Manually notify the target using the new notifyTarget function + const result = await issuanceAllocator.connect(accounts.governor).notifyTarget.staticCall(addresses.target1) + + // Should return true since notification was sent + expect(result).to.be.true + }) + + it('should revert when notifying a non-existent target (EOA)', async () => { + const { issuanceAllocator } = sharedContracts + + // Try to notify a target that doesn't exist (EOA) + // This will revert because it tries to call a function on a non-contract + await expect(issuanceAllocator.connect(accounts.governor).notifyTarget(accounts.nonGovernor.address)).to.be + .reverted + }) + + it('should return false when notifying a target without contract code', async () => { + const { issuanceAllocator, addresses } = sharedContracts + + // Add a target and set allocation in one step + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'](addresses.target1, 100000, 0, false) + + // Try to notify the target - should succeed since it has contract code + const result = await issuanceAllocator.connect(accounts.governor).notifyTarget.staticCall(addresses.target1) + + // Should return true since target has contract code and supports the interface + expect(result).to.be.true + }) + + it('should return false when _notifyTarget is called directly on EOA target', async () => { + const { issuanceAllocator, addresses } = sharedContracts + + // Add a target and set allocation in one step to trigger _notifyTarget call + const result = await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'].staticCall(addresses.target1, 100000, 0, false) + + // Should return true (allocation was set) and notification succeeded + expect(result).to.be.true + + // Actually set the allocation to verify the internal _notifyTarget call + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'](addresses.target1, 100000, 0, false) + + // Verify allocation was set + const mockTargetAllocation = await issuanceAllocator.getTargetAllocation(addresses.target1) + expect(mockTargetAllocation.totalAllocationPPM).to.equal(100000) + }) + + it('should only notify target once per block', async () => { + const { issuanceAllocator, graphToken, target1 } = await setupIssuanceAllocator() + + // Grant minter role to issuanceAllocator + await (graphToken as any).addMinter(await issuanceAllocator.getAddress()) + + // Add target and set allocation in one step + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'](await target1.getAddress(), 300000, 0, false) // 30% + + // First notification should return true + const result1 = await issuanceAllocator + .connect(accounts.governor) + .notifyTarget.staticCall(await target1.getAddress()) + expect(result1).to.be.true + + // Actually send the first notification + await issuanceAllocator.connect(accounts.governor).notifyTarget(await target1.getAddress()) + + // Second notification in the same block should return true (already notified) + const result2 = await issuanceAllocator + .connect(accounts.governor) + .notifyTarget.staticCall(await target1.getAddress()) + expect(result2).to.be.true + }) + + it('should revert when notification fails due to target reverting', async () => { + const { issuanceAllocator, graphToken } = await setupIssuanceAllocator() + + // Deploy a mock target that reverts on beforeIssuanceAllocationChange + const MockRevertingTarget = await ethers.getContractFactory('MockRevertingTarget') + const revertingTarget = await MockRevertingTarget.deploy() + + // Grant minter role to issuanceAllocator + await (graphToken as any).addMinter(await issuanceAllocator.getAddress()) + + // First, we need to force set the lastChangeNotifiedBlock to a past block + // so that the notification will actually be attempted + const currentBlock = await ethers.provider.getBlockNumber() + await issuanceAllocator + .connect(accounts.governor) + .forceTargetNoChangeNotificationBlock(await revertingTarget.getAddress(), currentBlock - 1) + + await expect( + issuanceAllocator.connect(accounts.governor).notifyTarget(await revertingTarget.getAddress()), + ).to.be.revertedWithCustomError(revertingTarget, 'TargetRevertsIntentionally') + }) + + it('should revert and not set allocation when notification fails with force=false', async () => { + const { issuanceAllocator, graphToken } = await setupIssuanceAllocator() + + // Deploy a mock target that reverts on beforeIssuanceAllocationChange + const MockRevertingTarget = await ethers.getContractFactory('MockRevertingTarget') + const revertingTarget = await MockRevertingTarget.deploy() + + // Grant minter role to issuanceAllocator + await (graphToken as any).addMinter(await issuanceAllocator.getAddress()) + + // Try to add the reverting target with force=false + // This should trigger notification which will fail and cause the transaction to revert + await expect( + issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'](await revertingTarget.getAddress(), 300000, 0, false), + ).to.be.revertedWithCustomError(revertingTarget, 'TargetRevertsIntentionally') + + // The allocation should NOT be set because the transaction reverted + const revertingTargetAllocation = await issuanceAllocator.getTargetAllocation(await revertingTarget.getAddress()) + expect(revertingTargetAllocation.totalAllocationPPM).to.equal(0) + }) + + it('should revert and not set allocation when target notification fails even with force=true', async () => { + const { issuanceAllocator, graphToken } = await setupIssuanceAllocator() + + // Deploy a mock target that reverts on beforeIssuanceAllocationChange + const MockRevertingTarget = await ethers.getContractFactory('MockRevertingTarget') + const revertingTarget = await MockRevertingTarget.deploy() + + // Grant minter role to issuanceAllocator + await (graphToken as any).addMinter(await issuanceAllocator.getAddress()) + + // Try to add the reverting target with force=true + // This should trigger notification which will fail and cause the transaction to revert + // (force only affects distribution, not notification) + await expect( + issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'](await revertingTarget.getAddress(), 300000, 0, true), + ).to.be.revertedWithCustomError(revertingTarget, 'TargetRevertsIntentionally') + + // The allocation should NOT be set because the transaction reverted + const allocation = await issuanceAllocator.getTargetAllocation(await revertingTarget.getAddress()) + expect(allocation.totalAllocationPPM).to.equal(0) + }) + + it('should return false when setTargetAllocation called with force=false and issuance distribution is behind', async () => { + const { issuanceAllocator, graphToken, target1 } = await setupIssuanceAllocator() + + // Grant minter role to issuanceAllocator + await (graphToken as any).addMinter(await issuanceAllocator.getAddress()) + + // Set initial issuance rate and distribute once to set lastIssuanceDistributionBlock + await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('100'), false) + await issuanceAllocator.connect(accounts.governor).distributeIssuance() + + // Get the current lastIssuanceDistributionBlock + const lastIssuanceBlock = await issuanceAllocator.lastIssuanceDistributionBlock() + + // Grant pause role and pause the contract + await issuanceAllocator.connect(accounts.governor).grantRole(PAUSE_ROLE, accounts.governor.address) + await issuanceAllocator.connect(accounts.governor).pause() + + // Mine several blocks while paused (this will make _distributeIssuance() return lastIssuanceDistributionBlock < block.number) + await ethers.provider.send('evm_mine', []) + await ethers.provider.send('evm_mine', []) + await ethers.provider.send('evm_mine', []) + + // Verify that we're now in a state where _distributeIssuance() would return a value < block.number + const currentBlock = await ethers.provider.getBlockNumber() + expect(lastIssuanceBlock).to.be.lt(currentBlock) + + // While still paused, call setTargetAllocation with force=false + // This should return false because _distributeIssuance() < block.number && !force evaluates to true + // This tests the uncovered branch and statement + const result = await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'].staticCall(await target1.getAddress(), 300000, 0, false) + + // Should return false due to issuance being behind and force=false + expect(result).to.be.false + + // Allocation should not be set + const allocation = await issuanceAllocator.getTargetAllocation(await target1.getAddress()) + expect(allocation.totalAllocationPPM).to.equal(0) + }) + + it('should allow setTargetAllocation with force=true when issuance distribution is behind', async () => { + const { issuanceAllocator, graphToken, target1 } = await setupIssuanceAllocator() + + // Grant minter role to issuanceAllocator + await (graphToken as any).addMinter(await issuanceAllocator.getAddress()) + + // Set initial issuance rate and distribute once to set lastIssuanceDistributionBlock + await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('100'), false) + await issuanceAllocator.connect(accounts.governor).distributeIssuance() + + // Get the current lastIssuanceDistributionBlock + const lastIssuanceBlock = await issuanceAllocator.lastIssuanceDistributionBlock() + + // Grant pause role and pause the contract + await issuanceAllocator.connect(accounts.governor).grantRole(PAUSE_ROLE, accounts.governor.address) + await issuanceAllocator.connect(accounts.governor).pause() + + // Mine several blocks while paused (this will make _distributeIssuance() return lastIssuanceDistributionBlock < block.number) + await ethers.provider.send('evm_mine', []) + await ethers.provider.send('evm_mine', []) + await ethers.provider.send('evm_mine', []) + + // Verify that we're now in a state where _distributeIssuance() would return a value < block.number + const currentBlock = await ethers.provider.getBlockNumber() + expect(lastIssuanceBlock).to.be.lt(currentBlock) + + // While still paused, call setTargetAllocation with force=true + // This should succeed despite _distributeIssuance() < block.number because force=true + // This tests the uncovered branch where (_distributeIssuance() < block.number && !force) evaluates to false due to force=true + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'](await target1.getAddress(), 300000, 0, true) + + // Should succeed and set the allocation + const allocation = await issuanceAllocator.getTargetAllocation(await target1.getAddress()) + expect(allocation.totalAllocationPPM).to.equal(300000) + }) + }) + + describe('Force Change Notification Block', () => { + it('should allow governor to force set lastChangeNotifiedBlock', async () => { + const { issuanceAllocator, addresses } = sharedContracts + + // Add target and set allocation in one step + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'](addresses.target1, 100000, 0, false) + + // Force set lastChangeNotifiedBlock to current block + const currentBlock = await ethers.provider.getBlockNumber() + const result = await issuanceAllocator + .connect(accounts.governor) + .forceTargetNoChangeNotificationBlock.staticCall(addresses.target1, currentBlock) + + expect(result).to.equal(currentBlock) + + // Actually call the function + await issuanceAllocator + .connect(accounts.governor) + .forceTargetNoChangeNotificationBlock(addresses.target1, currentBlock) + + // Verify the lastChangeNotifiedBlock was set + const targetData = await issuanceAllocator.getTargetData(addresses.target1) + expect(targetData.lastChangeNotifiedBlock).to.equal(currentBlock) + }) + + it('should allow force setting lastChangeNotifiedBlock for non-existent target', async () => { + const { issuanceAllocator } = sharedContracts + + const nonExistentTarget = accounts.nonGovernor.address + const currentBlock = await ethers.provider.getBlockNumber() + + // Force set for non-existent target should work (no validation) + const result = await issuanceAllocator + .connect(accounts.governor) + .forceTargetNoChangeNotificationBlock.staticCall(nonExistentTarget, currentBlock) + expect(result).to.equal(currentBlock) + + // Actually call the function + await issuanceAllocator + .connect(accounts.governor) + .forceTargetNoChangeNotificationBlock(nonExistentTarget, currentBlock) + + // Verify the lastChangeNotifiedBlock was set (even though target doesn't exist) + const targetData = await issuanceAllocator.getTargetData(nonExistentTarget) + expect(targetData.lastChangeNotifiedBlock).to.equal(currentBlock) + }) + + it('should enable notification to be sent again by setting to past block', async () => { + const { issuanceAllocator, graphToken, target1 } = await setupIssuanceAllocator() + + // Grant minter role to issuanceAllocator + await (graphToken as any).addMinter(await issuanceAllocator.getAddress()) + + // Add target and set allocation in one step to trigger notification + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'](await target1.getAddress(), 300000, 0, false) + + // Verify target was notified (lastChangeNotifiedBlock should be current block) + const currentBlock = await ethers.provider.getBlockNumber() + let targetData = await issuanceAllocator.getTargetData(await target1.getAddress()) + expect(targetData.lastChangeNotifiedBlock).to.equal(currentBlock) + + // Try to notify again in the same block - should return true (already notified) + const notifyResult1 = await issuanceAllocator + .connect(accounts.governor) + .notifyTarget.staticCall(await target1.getAddress()) + expect(notifyResult1).to.be.true + + // Force set lastChangeNotifiedBlock to a past block (current block - 1) + const pastBlock = currentBlock - 1 + const forceResult = await issuanceAllocator + .connect(accounts.governor) + .forceTargetNoChangeNotificationBlock.staticCall(await target1.getAddress(), pastBlock) + + // Should return the block number that was set + expect(forceResult).to.equal(pastBlock) + + // Actually call the function + await issuanceAllocator + .connect(accounts.governor) + .forceTargetNoChangeNotificationBlock(await target1.getAddress(), pastBlock) + + // Now notification should be sent again + const notifyResult2 = await issuanceAllocator + .connect(accounts.governor) + .notifyTarget.staticCall(await target1.getAddress()) + expect(notifyResult2).to.be.true + + // Actually send the notification + await issuanceAllocator.connect(accounts.governor).notifyTarget(await target1.getAddress()) + + // Verify lastChangeNotifiedBlock was updated to the current block (which may have advanced) + targetData = await issuanceAllocator.getTargetData(await target1.getAddress()) + const finalBlock = await ethers.provider.getBlockNumber() + expect(targetData.lastChangeNotifiedBlock).to.equal(finalBlock) + }) + + it('should prevent notification until next block by setting to current block', async () => { + const { issuanceAllocator, graphToken, target1 } = await setupIssuanceAllocator() + + // Grant minter role to issuanceAllocator + await (graphToken as any).addMinter(await issuanceAllocator.getAddress()) + + // Add target and set allocation in one step + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'](await target1.getAddress(), 100000, 0, false) + + // Force set lastChangeNotifiedBlock to current block + const currentBlock = await ethers.provider.getBlockNumber() + const forceResult = await issuanceAllocator + .connect(accounts.governor) + .forceTargetNoChangeNotificationBlock.staticCall(await target1.getAddress(), currentBlock) + + // Should return the block number that was set + expect(forceResult).to.equal(currentBlock) + + // Actually call the function + await issuanceAllocator + .connect(accounts.governor) + .forceTargetNoChangeNotificationBlock(await target1.getAddress(), currentBlock) + + // Try to notify in the same block - should return true (already notified this block) + const notifyResult1 = await issuanceAllocator + .connect(accounts.governor) + .notifyTarget.staticCall(await target1.getAddress()) + expect(notifyResult1).to.be.true + + // Mine a block to advance + await ethers.provider.send('evm_mine', []) + + // Now notification should be sent in the next block + const notifyResult2 = await issuanceAllocator + .connect(accounts.governor) + .notifyTarget.staticCall(await target1.getAddress()) + expect(notifyResult2).to.be.true + }) + + it('should prevent notification until future block by setting to future block', async () => { + const { issuanceAllocator, graphToken, target1 } = await setupIssuanceAllocator() + + // Grant minter role to issuanceAllocator + await (graphToken as any).addMinter(await issuanceAllocator.getAddress()) + + // Add target and set allocation in one step + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'](await target1.getAddress(), 100000, 0, false) + + // Force set lastChangeNotifiedBlock to a future block (current + 2) + const currentBlock = await ethers.provider.getBlockNumber() + const futureBlock = currentBlock + 2 + const forceResult = await issuanceAllocator + .connect(accounts.governor) + .forceTargetNoChangeNotificationBlock.staticCall(await target1.getAddress(), futureBlock) + + // Should return the block number that was set + expect(forceResult).to.equal(futureBlock) + + // Actually call the function + await issuanceAllocator + .connect(accounts.governor) + .forceTargetNoChangeNotificationBlock(await target1.getAddress(), futureBlock) + + // Try to notify in the current block - should return true (already "notified" for future block) + const notifyResult1 = await issuanceAllocator + .connect(accounts.governor) + .notifyTarget.staticCall(await target1.getAddress()) + expect(notifyResult1).to.be.true + + // Mine one block + await ethers.provider.send('evm_mine', []) + + // Still should return true (still before the future block) + const notifyResult2 = await issuanceAllocator + .connect(accounts.governor) + .notifyTarget.staticCall(await target1.getAddress()) + expect(notifyResult2).to.be.true + + // Mine another block to reach the future block + await ethers.provider.send('evm_mine', []) + + // Now should still return true (at the future block) + const notifyResult3 = await issuanceAllocator + .connect(accounts.governor) + .notifyTarget.staticCall(await target1.getAddress()) + expect(notifyResult3).to.be.true + + // Mine one more block to go past the future block + await ethers.provider.send('evm_mine', []) + + // Now notification should be sent + const notifyResult4 = await issuanceAllocator + .connect(accounts.governor) + .notifyTarget.staticCall(await target1.getAddress()) + expect(notifyResult4).to.be.true + }) + }) + + describe('Idempotent Operations', () => { + it('should not revert when operating on non-existent targets', async () => { + const { issuanceAllocator } = sharedContracts + + const nonExistentTarget = accounts.nonGovernor.address + + // Test 1: Setting allocation to 0 for non-existent target should not revert + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'](nonExistentTarget, 0, 0, false) + + // Verify no targets were added + const targets = await issuanceAllocator.getTargets() + expect(targets.length).to.equal(0) + + // Verify total allocation remains 0 + const totalAlloc = await issuanceAllocator.getTotalAllocation() + expect(totalAlloc.totalAllocationPPM).to.equal(0) + + // Test 2: Removing non-existent target (by setting allocation to 0 again) should not revert + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'](nonExistentTarget, 0, 0, false) + + // Verify still no targets + const targetsAfter = await issuanceAllocator.getTargets() + expect(targetsAfter.length).to.equal(0) + }) + }) + + describe('View Functions', () => { + it('should update lastIssuanceDistributionBlock after distribution', async () => { + const { issuanceAllocator } = sharedContracts + + // Get initial lastIssuanceDistributionBlock + const initialBlock = await issuanceAllocator.lastIssuanceDistributionBlock() + + // Mine a block + await ethers.provider.send('evm_mine', []) + + // Distribute issuance to update lastIssuanceDistributionBlock + await issuanceAllocator.connect(accounts.governor).distributeIssuance() + + // Now lastIssuanceDistributionBlock should be updated + const newBlock = await issuanceAllocator.lastIssuanceDistributionBlock() + expect(newBlock).to.be.gt(initialBlock) + }) + + it('should manage target count and array correctly', async () => { + const { issuanceAllocator, addresses } = sharedContracts + + // Test initial state + expect(await issuanceAllocator.getTargetCount()).to.equal(0) + expect((await issuanceAllocator.getTargets()).length).to.equal(0) + + // Test adding targets + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'](addresses.target1, 100000, 0, false) + expect(await issuanceAllocator.getTargetCount()).to.equal(1) + + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'](addresses.target2, 200000, 0, false) + expect(await issuanceAllocator.getTargetCount()).to.equal(2) + + // Test getTargets array content + const targetAddresses = await issuanceAllocator.getTargets() + expect(targetAddresses.length).to.equal(2) + expect(targetAddresses).to.include(addresses.target1) + expect(targetAddresses).to.include(addresses.target2) + + // Test removing targets + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'](addresses.target1, 0, 0, false) + expect(await issuanceAllocator.getTargetCount()).to.equal(1) + + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'](addresses.target2, 0, 0, false) + expect(await issuanceAllocator.getTargetCount()).to.equal(0) + expect((await issuanceAllocator.getTargets()).length).to.equal(0) + }) + + it('should store targets in the getTargets array in correct order', async () => { + const { issuanceAllocator, addresses } = sharedContracts + + // Add targets + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'](addresses.target1, 100000, 0, false) + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'](addresses.target2, 200000, 0, false) + + // Get addresses array + const targetAddresses = await issuanceAllocator.getTargets() + + // Check that the addresses are in the correct order + expect(targetAddresses[0]).to.equal(addresses.target1) + expect(targetAddresses[1]).to.equal(addresses.target2) + expect(targetAddresses.length).to.equal(2) + }) + + it('should return the correct target address by index', async () => { + const { issuanceAllocator, graphToken, target1, target2, target3 } = await setupIssuanceAllocator() + + // Grant minter role to issuanceAllocator (needed for distributeIssuance calls) + await (graphToken as any).addMinter(await issuanceAllocator.getAddress()) + + // Add targets + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'](await target1.getAddress(), 100000, 0, false) + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'](await target2.getAddress(), 200000, 0, false) + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'](await target3.getAddress(), 0, 300000, false) + + // Get all target addresses + const addresses = await issuanceAllocator.getTargets() + expect(addresses.length).to.equal(3) + + // Check that the addresses are in the correct order + expect(addresses[0]).to.equal(await target1.getAddress()) + expect(addresses[1]).to.equal(await target2.getAddress()) + expect(addresses[2]).to.equal(await target3.getAddress()) + + // Test getTargetAt method for individual access + expect(await issuanceAllocator.getTargetAt(0)).to.equal(await target1.getAddress()) + expect(await issuanceAllocator.getTargetAt(1)).to.equal(await target2.getAddress()) + expect(await issuanceAllocator.getTargetAt(2)).to.equal(await target3.getAddress()) + }) + + it('should return the correct target allocation', async () => { + const { issuanceAllocator, addresses } = sharedContracts + + // Add target with allocation in one step + const allocation = 300000 // 30% in PPM + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'](addresses.target1, allocation, 0, false) + + // Now allocation should be set + const targetAllocation = await issuanceAllocator.getTargetAllocation(addresses.target1) + expect(targetAllocation.totalAllocationPPM).to.equal(allocation) + }) + + it('should return the correct allocation types', async () => { + const { issuanceAllocator, graphToken, target1, target2 } = await setupIssuanceAllocator() + + // Grant minter role to issuanceAllocator (needed for distributeIssuance calls) + await (graphToken as any).addMinter(await issuanceAllocator.getAddress()) + + // Add targets with different allocation types + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'](await target1.getAddress(), 100000, 0, false) + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'](await target2.getAddress(), 0, 200000, false) + + // Check allocation types + const target1Allocation = await issuanceAllocator.getTargetAllocation(await target1.getAddress()) + const target2Allocation = await issuanceAllocator.getTargetAllocation(await target2.getAddress()) + + expect(target1Allocation.selfMintingPPM).to.equal(0) // Not self-minting + expect(target1Allocation.allocatorMintingPPM).to.equal(100000) // Allocator-minting + + expect(target2Allocation.selfMintingPPM).to.equal(200000) // Self-minting + expect(target2Allocation.allocatorMintingPPM).to.equal(0) // Not allocator-minting + }) + }) + + describe('Return Values', () => { + describe('setTargetAllocation', () => { + it('should return true for successful operations', async () => { + const { issuanceAllocator } = await setupSimpleIssuanceAllocator() + const target = await deployMockSimpleTarget() + + // Adding new target + const addResult = await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'].staticCall(await target.getAddress(), 100000, 0, false) + expect(addResult).to.equal(true) + + // Actually add the target + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'](await target.getAddress(), 100000, 0, false) + + // Changing existing allocation + const changeResult = await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'].staticCall(await target.getAddress(), 200000, 0, false) + expect(changeResult).to.equal(true) + + // Setting same allocation (no-op) + const sameResult = await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'].staticCall(await target.getAddress(), 100000, 0, false) + expect(sameResult).to.equal(true) + + // Removing target + const removeResult = await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'].staticCall(await target.getAddress(), 0, 0, false) + expect(removeResult).to.equal(true) + + // Setting allocation to 0 for non-existent target + const nonExistentResult = await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'].staticCall(accounts.nonGovernor.address, 0, 0, false) + expect(nonExistentResult).to.equal(true) + }) + }) + + describe('setTargetAllocation overloads', () => { + it('should work with all setTargetAllocation overloads and enforce access control', async () => { + const { issuanceAllocator } = await setupSimpleIssuanceAllocator() + const target1 = await deployMockSimpleTarget() + const target2 = await deployMockSimpleTarget() + + // Test 1: 2-parameter overload (allocator-only) + const allocatorPPM = 300000 // 30% + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256)'](await target1.getAddress(), allocatorPPM) + + // Verify the allocation was set correctly + const allocation1 = await issuanceAllocator.getTargetAllocation(await target1.getAddress()) + expect(allocation1.allocatorMintingPPM).to.equal(allocatorPPM) + expect(allocation1.selfMintingPPM).to.equal(0) + + // Test 2: 3-parameter overload (allocator + self) + const allocatorPPM2 = 200000 // 20% + const selfPPM = 150000 // 15% + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256)'](await target2.getAddress(), allocatorPPM2, selfPPM) + + // Verify the allocation was set correctly + const allocation2 = await issuanceAllocator.getTargetAllocation(await target2.getAddress()) + expect(allocation2.allocatorMintingPPM).to.equal(allocatorPPM2) + expect(allocation2.selfMintingPPM).to.equal(selfPPM) + + // Test 3: Access control - 2-parameter overload should require governor + await expect( + issuanceAllocator + .connect(accounts.nonGovernor) + ['setTargetAllocation(address,uint256)'](await target1.getAddress(), 200000), + ).to.be.revertedWithCustomError(issuanceAllocator, 'AccessControlUnauthorizedAccount') + + // Test 4: Access control - 3-parameter overload should require governor + await expect( + issuanceAllocator + .connect(accounts.nonGovernor) + ['setTargetAllocation(address,uint256,uint256)'](await target2.getAddress(), 160000, 90000), + ).to.be.revertedWithCustomError(issuanceAllocator, 'AccessControlUnauthorizedAccount') + }) + }) + + describe('setIssuancePerBlock', () => { + it('should return appropriate values based on conditions', async () => { + const { issuanceAllocator } = sharedContracts + + // Should return true for normal operations + const newRate = ethers.parseEther('200') + const normalResult = await issuanceAllocator + .connect(accounts.governor) + .setIssuancePerBlock.staticCall(newRate, false) + expect(normalResult).to.equal(true) + + // Should return true even when setting same rate + const sameResult = await issuanceAllocator + .connect(accounts.governor) + .setIssuancePerBlock.staticCall(issuancePerBlock, false) + expect(sameResult).to.equal(true) + + // Grant pause role and pause the contract + await issuanceAllocator.connect(accounts.governor).grantRole(PAUSE_ROLE, accounts.governor.address) + await issuanceAllocator.connect(accounts.governor).pause() + + // Should return false when paused without force + const pausedResult = await issuanceAllocator + .connect(accounts.governor) + .setIssuancePerBlock.staticCall(newRate, false) + expect(pausedResult).to.equal(false) + + // Should return true when paused with force=true + const forcedResult = await issuanceAllocator + .connect(accounts.governor) + .setIssuancePerBlock.staticCall(newRate, true) + expect(forcedResult).to.equal(true) + }) + }) + + describe('distributeIssuance', () => { + it('should return appropriate block numbers', async () => { + const { issuanceAllocator, addresses } = sharedContracts + + // Should return lastIssuanceDistributionBlock when no blocks have passed + await issuanceAllocator.connect(accounts.governor).distributeIssuance() + const lastIssuanceBlock = await issuanceAllocator.lastIssuanceDistributionBlock() + const noBlocksResult = await issuanceAllocator.connect(accounts.governor).distributeIssuance.staticCall() + expect(noBlocksResult).to.equal(lastIssuanceBlock) + + // Add a target and mine blocks to test distribution + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'](addresses.target1, 300000, 0, false) // 30% + await ethers.provider.send('evm_mine', []) + + // Should return current block number when issuance is distributed + const currentBlock = await ethers.provider.getBlockNumber() + const distributionResult = await issuanceAllocator.connect(accounts.governor).distributeIssuance.staticCall() + expect(distributionResult).to.equal(currentBlock) + }) + }) + }) + + describe('getTargetIssuancePerBlock', () => { + it('should return correct issuance for different target configurations', async () => { + const { issuanceAllocator, addresses } = sharedContracts + const issuancePerBlock = await issuanceAllocator.issuancePerBlock() + const PPM = 1_000_000 + + // Test unregistered target (should return zeros) + let result = await issuanceAllocator.getTargetIssuancePerBlock(addresses.target1) + expect(result.selfIssuancePerBlock).to.equal(0) + expect(result.allocatorIssuancePerBlock).to.equal(0) + expect(result.allocatorIssuanceBlockAppliedTo).to.be.greaterThanOrEqual(0) + expect(result.selfIssuanceBlockAppliedTo).to.be.greaterThanOrEqual(0) + + // Test self-minting target with 30% allocation + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'](addresses.target1, 0, 300000, false) + + const expectedSelfIssuance = (issuancePerBlock * BigInt(300000)) / BigInt(PPM) + result = await issuanceAllocator.getTargetIssuancePerBlock(addresses.target1) + expect(result.selfIssuancePerBlock).to.equal(expectedSelfIssuance) + expect(result.allocatorIssuancePerBlock).to.equal(0) + expect(result.selfIssuanceBlockAppliedTo).to.equal(await ethers.provider.getBlockNumber()) + expect(result.allocatorIssuanceBlockAppliedTo).to.equal(await issuanceAllocator.lastIssuanceDistributionBlock()) + + // Test allocator-minting target with 40% allocation (reset target1 first) + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'](addresses.target1, 400000, 0, false) + + const expectedAllocatorIssuance = (issuancePerBlock * BigInt(400000)) / BigInt(PPM) + result = await issuanceAllocator.getTargetIssuancePerBlock(addresses.target1) + expect(result.allocatorIssuancePerBlock).to.equal(expectedAllocatorIssuance) + expect(result.selfIssuancePerBlock).to.equal(0) + expect(result.allocatorIssuanceBlockAppliedTo).to.equal(await ethers.provider.getBlockNumber()) + expect(result.selfIssuanceBlockAppliedTo).to.equal(await ethers.provider.getBlockNumber()) + }) + + it('should not revert when contract is paused and blockAppliedTo indicates pause state', async () => { + const { issuanceAllocator, addresses } = sharedContracts + + // Add target as self-minter with 30% allocation + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'](addresses.target1, 0, 300000, false) // 30%, self-minter + + // Distribute issuance to set blockAppliedTo to current block + await issuanceAllocator.distributeIssuance() + + // Pause the contract + await issuanceAllocator.connect(accounts.governor).grantRole(PAUSE_ROLE, accounts.governor.address) + await issuanceAllocator.connect(accounts.governor).pause() + + // Should not revert when paused - this is the key difference from old functions + const currentBlockBeforeCall = await ethers.provider.getBlockNumber() + const result = await issuanceAllocator.getTargetIssuancePerBlock(addresses.target1) + + const issuancePerBlock = await issuanceAllocator.issuancePerBlock() + const PPM = 1_000_000 + const expectedIssuance = (issuancePerBlock * BigInt(300000)) / BigInt(PPM) + + expect(result.selfIssuancePerBlock).to.equal(expectedIssuance) + expect(result.allocatorIssuancePerBlock).to.equal(0) + // For self-minting targets, selfIssuanceBlockAppliedTo should always be current block, even when paused + expect(result.selfIssuanceBlockAppliedTo).to.equal(currentBlockBeforeCall) + // allocatorIssuanceBlockAppliedTo should be the last distribution block (before pause) + expect(result.allocatorIssuanceBlockAppliedTo).to.equal(await issuanceAllocator.lastIssuanceDistributionBlock()) + }) + + it('should show blockAppliedTo updates after distribution', async () => { + const { issuanceAllocator, graphToken, target1 } = await setupIssuanceAllocator() + + // Grant minter role to issuanceAllocator (needed for distributeIssuance calls) + await (graphToken as any).addMinter(await issuanceAllocator.getAddress()) + + // Add target as allocator-minter with 50% allocation + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'](await target1.getAddress(), 500000, 0, false) // 50%, allocator-minter + + // allocatorIssuanceBlockAppliedTo should be current block since setTargetAllocation triggers distribution + let result = await issuanceAllocator.getTargetIssuancePerBlock(await target1.getAddress()) + expect(result.allocatorIssuanceBlockAppliedTo).to.equal(await ethers.provider.getBlockNumber()) + expect(result.selfIssuanceBlockAppliedTo).to.equal(await ethers.provider.getBlockNumber()) + + // Distribute issuance + await issuanceAllocator.distributeIssuance() + const distributionBlock = await ethers.provider.getBlockNumber() + + // Now allocatorIssuanceBlockAppliedTo should be updated to current block + result = await issuanceAllocator.getTargetIssuancePerBlock(await target1.getAddress()) + expect(result.allocatorIssuanceBlockAppliedTo).to.equal(distributionBlock) + expect(result.selfIssuanceBlockAppliedTo).to.equal(distributionBlock) + + const issuancePerBlock = await issuanceAllocator.issuancePerBlock() + const PPM = 1_000_000 + const expectedIssuance = (issuancePerBlock * BigInt(500000)) / BigInt(PPM) + expect(result.allocatorIssuancePerBlock).to.equal(expectedIssuance) + expect(result.selfIssuancePerBlock).to.equal(0) + }) + }) + + describe('distributePendingIssuance', () => { + it('should only allow governor to call distributePendingIssuance', async () => { + const { issuanceAllocator } = sharedContracts + + // Non-governor should not be able to call distributePendingIssuance + await expect( + issuanceAllocator.connect(accounts.nonGovernor)['distributePendingIssuance()'](), + ).to.be.revertedWithCustomError(issuanceAllocator, 'AccessControlUnauthorizedAccount') + + // Governor should be able to call distributePendingIssuance (even if no pending issuance) + await expect(issuanceAllocator.connect(accounts.governor)['distributePendingIssuance()']()).to.not.be.reverted + + // Test return value using staticCall - should return lastIssuanceDistributionBlock + const result = await issuanceAllocator.connect(accounts.governor).distributePendingIssuance.staticCall() + const lastDistributionBlock = await issuanceAllocator.lastIssuanceDistributionBlock() + expect(result).to.equal(lastDistributionBlock) + }) + + it('should be a no-op when there is no pending issuance', async () => { + const { issuanceAllocator, addresses } = sharedContracts + + // Setup with zero issuance rate to ensure no pending accumulation + await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(0, false) // No issuance + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'](addresses.target1, 300000, 0, false) // 30% + + // Initialize distribution + await issuanceAllocator.connect(accounts.governor).distributeIssuance() + + // Verify no pending issuance (should be 0 since issuance rate is 0) + expect(await issuanceAllocator.pendingAccumulatedAllocatorIssuance()).to.equal(0) + + const { graphToken } = sharedContracts + const initialBalance = await (graphToken as any).balanceOf(addresses.target1) + + // Call distributePendingIssuance - should be no-op + await issuanceAllocator.connect(accounts.governor)['distributePendingIssuance()']() + + // Test return value using staticCall - should return lastIssuanceDistributionBlock + const result = await issuanceAllocator.connect(accounts.governor).distributePendingIssuance.staticCall() + const lastDistributionBlock = await issuanceAllocator.lastIssuanceDistributionBlock() + + // Should return last distribution block (since no pending issuance to distribute) + expect(result).to.equal(lastDistributionBlock) + + // Balance should remain the same + expect(await (graphToken as any).balanceOf(addresses.target1)).to.equal(initialBalance) + expect(await issuanceAllocator.pendingAccumulatedAllocatorIssuance()).to.equal(0) + }) + + it('should distribute pending issuance to allocator-minting targets', async () => { + const { issuanceAllocator, graphToken, target1, target2 } = await setupIssuanceAllocator() + + // Setup + await (graphToken as any).addMinter(await issuanceAllocator.getAddress()) + await issuanceAllocator.connect(accounts.governor).grantRole(PAUSE_ROLE, accounts.governor.address) + await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('100'), false) + + // Add allocator-minting targets and a small self-minting target + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'](await target1.getAddress(), 590000, 0, false) // 59% + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'](await target2.getAddress(), 400000, 10000, false) // 40% allocator + 1% self + + // Distribute once to initialize + await issuanceAllocator.connect(accounts.governor).distributeIssuance() + const initialBalance1 = await (graphToken as any).balanceOf(await target1.getAddress()) + const initialBalance2 = await (graphToken as any).balanceOf(await target2.getAddress()) + + // Pause and accumulate some issuance + await issuanceAllocator.connect(accounts.governor).pause() + await ethers.provider.send('evm_mine', []) + await ethers.provider.send('evm_mine', []) + + // Trigger accumulation by changing self-minting allocation + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'](await target2.getAddress(), 400000, 0, true) // Remove self-minting + + const pendingBefore = await issuanceAllocator.pendingAccumulatedAllocatorIssuance() + expect(pendingBefore).to.be.gt(0) + + // Call distributePendingIssuance while still paused + await issuanceAllocator.connect(accounts.governor)['distributePendingIssuance()']() + + // Check that pending was distributed proportionally + const finalBalance1 = await (graphToken as any).balanceOf(await target1.getAddress()) + const finalBalance2 = await (graphToken as any).balanceOf(await target2.getAddress()) + + expect(finalBalance1).to.be.gt(initialBalance1) + expect(finalBalance2).to.be.gt(initialBalance2) + + // Verify pending issuance was reset to 0 + expect(await issuanceAllocator.pendingAccumulatedAllocatorIssuance()).to.equal(0) + + // Verify proportional distribution (59% vs 40%) + const distributed1 = finalBalance1 - initialBalance1 + const distributed2 = finalBalance2 - initialBalance2 + const ratio = (BigInt(distributed1) * BigInt(1000)) / BigInt(distributed2) // Multiply by 1000 for precision + expect(ratio).to.be.closeTo(1475n, 50n) // 59/40 = 1.475, with some tolerance for rounding + }) + + it('should be a no-op when allocatorMintingAllowance is 0 (all targets are self-minting)', async () => { + const { issuanceAllocator, graphToken, target1 } = await setupIssuanceAllocator() + + // Setup + await (graphToken as any).addMinter(await issuanceAllocator.getAddress()) + await issuanceAllocator.connect(accounts.governor).grantRole(PAUSE_ROLE, accounts.governor.address) + await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('100'), false) + + // Add only self-minting targets (100% self-minting) + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'](await target1.getAddress(), 0, 1000000, false) // 100% self-minting + + // Distribute once to initialize + await issuanceAllocator.connect(accounts.governor).distributeIssuance() + + // Pause and accumulate some issuance + await issuanceAllocator.connect(accounts.governor).pause() + await ethers.provider.send('evm_mine', []) + await ethers.provider.send('evm_mine', []) + + // Trigger accumulation by changing rate + await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('200'), false) + + const pendingBefore = await issuanceAllocator.pendingAccumulatedAllocatorIssuance() + expect(pendingBefore).to.equal(0) // Should be 0 because allocatorMintingAllowance is 0 + + const initialBalance = await (graphToken as any).balanceOf(await target1.getAddress()) + + // Call distributePendingIssuance - should be no-op due to allocatorMintingAllowance = 0 + await issuanceAllocator.connect(accounts.governor)['distributePendingIssuance()']() + + // Balance should remain the same (self-minting targets don't receive tokens from allocator) + expect(await (graphToken as any).balanceOf(await target1.getAddress())).to.equal(initialBalance) + + // Pending issuance should be reset to 0 even though nothing was distributed + expect(await issuanceAllocator.pendingAccumulatedAllocatorIssuance()).to.equal(0) + }) + + it('should work when contract is paused', async () => { + const { issuanceAllocator, graphToken, target1 } = await setupIssuanceAllocator() + + // Setup + await (graphToken as any).addMinter(await issuanceAllocator.getAddress()) + await issuanceAllocator.connect(accounts.governor).grantRole(PAUSE_ROLE, accounts.governor.address) + await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('100'), false) + + // Add allocator-minting target + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'](await target1.getAddress(), 500000, 0, false) // 50% + + // Distribute once to initialize + await issuanceAllocator.connect(accounts.governor).distributeIssuance() + const initialBalance = await (graphToken as any).balanceOf(await target1.getAddress()) + + // Pause and accumulate some issuance + await issuanceAllocator.connect(accounts.governor).pause() + await ethers.provider.send('evm_mine', []) + await ethers.provider.send('evm_mine', []) + + // Trigger accumulation by changing rate + await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('200'), true) + + const pendingBefore = await issuanceAllocator.pendingAccumulatedAllocatorIssuance() + expect(pendingBefore).to.be.gt(0) + + // Call distributePendingIssuance while paused - should work + await expect(issuanceAllocator.connect(accounts.governor)['distributePendingIssuance()']()).to.not.be.reverted + + // Check that pending was distributed + const finalBalance = await (graphToken as any).balanceOf(await target1.getAddress()) + expect(finalBalance).to.be.gt(initialBalance) + + // Verify pending issuance was reset to 0 + expect(await issuanceAllocator.pendingAccumulatedAllocatorIssuance()).to.equal(0) + }) + + it('should emit IssuanceDistributed events for each target', async () => { + const { issuanceAllocator, graphToken, target1, target2 } = await setupIssuanceAllocator() + + // Setup + await (graphToken as any).addMinter(await issuanceAllocator.getAddress()) + await issuanceAllocator.connect(accounts.governor).grantRole(PAUSE_ROLE, accounts.governor.address) + await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('100'), false) + + // Add allocator-minting targets and a small self-minting target + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'](await target1.getAddress(), 300000, 0, false) // 30% + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'](await target2.getAddress(), 190000, 10000, false) // 19% allocator + 1% self + + // Distribute once to initialize + await issuanceAllocator.connect(accounts.governor).distributeIssuance() + + // Pause and accumulate some issuance + await issuanceAllocator.connect(accounts.governor).pause() + await ethers.provider.send('evm_mine', []) + await ethers.provider.send('evm_mine', []) + + // Trigger accumulation by changing self-minting allocation + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'](await target2.getAddress(), 200000, 0, true) // Remove self-minting + + const pendingBefore = await issuanceAllocator.pendingAccumulatedAllocatorIssuance() + expect(pendingBefore).to.be.gt(0) + + // Call distributePendingIssuance and check events + const tx = await issuanceAllocator.connect(accounts.governor)['distributePendingIssuance()']() + const receipt = await tx.wait() + + // Should emit events for both targets + const events = receipt.logs.filter( + (log) => log.topics[0] === issuanceAllocator.interface.getEvent('IssuanceDistributed').topicHash, + ) + expect(events.length).to.equal(2) + + // Verify the events contain the correct target addresses + const decodedEvents = events.map((event) => issuanceAllocator.interface.parseLog(event)) + const targetAddresses = decodedEvents.map((event) => event.args.target) + expect(targetAddresses).to.include(await target1.getAddress()) + expect(targetAddresses).to.include(await target2.getAddress()) + }) + + describe('distributePendingIssuance(uint256 toBlockNumber)', () => { + it('should validate distributePendingIssuance(uint256) access control and parameters', async () => { + const { issuanceAllocator } = sharedContracts + + // Test 1: Access control - Non-governor should not be able to call distributePendingIssuance + await expect( + issuanceAllocator.connect(accounts.nonGovernor)['distributePendingIssuance(uint256)'](100), + ).to.be.revertedWithCustomError(issuanceAllocator, 'AccessControlUnauthorizedAccount') + + // Test 2: Parameter validation - Should revert when toBlockNumber is less than lastIssuanceAccumulationBlock + const lastAccumulationBlock = await issuanceAllocator.lastIssuanceAccumulationBlock() + const invalidBlock = lastAccumulationBlock - 1n + await expect( + issuanceAllocator.connect(accounts.governor)['distributePendingIssuance(uint256)'](invalidBlock), + ).to.be.revertedWithCustomError(issuanceAllocator, 'ToBlockOutOfRange') + + // Test 3: Parameter validation - Should revert when toBlockNumber is greater than current block + const currentBlock = await ethers.provider.getBlockNumber() + const futureBlock = currentBlock + 10 + await expect( + issuanceAllocator.connect(accounts.governor)['distributePendingIssuance(uint256)'](futureBlock), + ).to.be.revertedWithCustomError(issuanceAllocator, 'ToBlockOutOfRange') + + // Test 4: Valid call - Governor should be able to call distributePendingIssuance with valid block number + await expect(issuanceAllocator.connect(accounts.governor)['distributePendingIssuance(uint256)'](currentBlock)) + .to.not.be.reverted + }) + + it('should accumulate and distribute issuance up to specified block', async () => { + const { issuanceAllocator, graphToken, target1 } = await setupIssuanceAllocator() + + // Setup + await (graphToken as any).addMinter(await issuanceAllocator.getAddress()) + await issuanceAllocator.connect(accounts.governor).grantRole(PAUSE_ROLE, accounts.governor.address) + await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('100'), false) + + // Add target + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'](await target1.getAddress(), 500000, 0, false) // 50% + + // Pause to enable accumulation + await issuanceAllocator.connect(accounts.governor).pause() + + // Mine some blocks to create a gap + await ethers.provider.send('hardhat_mine', ['0x5']) // Mine 5 blocks + + const initialBalance = await (graphToken as any).balanceOf(await target1.getAddress()) + const currentBlock = await ethers.provider.getBlockNumber() + const targetBlock = currentBlock - 2 // Accumulate up to 2 blocks ago + + // Call distributePendingIssuance with specific toBlockNumber + await issuanceAllocator.connect(accounts.governor)['distributePendingIssuance(uint256)'](targetBlock) + + // Check that tokens were distributed + const finalBalance = await (graphToken as any).balanceOf(await target1.getAddress()) + expect(finalBalance).to.be.gt(initialBalance) + + // Check that accumulation block was updated to targetBlock + expect(await issuanceAllocator.lastIssuanceAccumulationBlock()).to.equal(targetBlock) + + // Check that distribution block was updated to targetBlock + expect(await issuanceAllocator.lastIssuanceDistributionBlock()).to.equal(targetBlock) + + // Pending should be reset to 0 + expect(await issuanceAllocator.pendingAccumulatedAllocatorIssuance()).to.equal(0) + }) + + it('should work with toBlockNumber equal to lastIssuanceAccumulationBlock (no-op)', async () => { + const { issuanceAllocator, graphToken, target1 } = await setupIssuanceAllocator() + + // Setup + await (graphToken as any).addMinter(await issuanceAllocator.getAddress()) + await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('100'), false) + + // Add target + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'](await target1.getAddress(), 500000, 0, false) // 50% + + const lastAccumulationBlock = await issuanceAllocator.lastIssuanceAccumulationBlock() + const initialBalance = await (graphToken as any).balanceOf(await target1.getAddress()) + + // Call with same block number - should be no-op for accumulation + await issuanceAllocator.connect(accounts.governor)['distributePendingIssuance(uint256)'](lastAccumulationBlock) + + // Balance should remain the same (no new accumulation) + const finalBalance = await (graphToken as any).balanceOf(await target1.getAddress()) + expect(finalBalance).to.equal(initialBalance) + + // Blocks should remain the same + expect(await issuanceAllocator.lastIssuanceAccumulationBlock()).to.equal(lastAccumulationBlock) + }) + + it('should work with toBlockNumber equal to current block', async () => { + const { issuanceAllocator, graphToken, target1 } = await setupIssuanceAllocator() + + // Setup + await (graphToken as any).addMinter(await issuanceAllocator.getAddress()) + await issuanceAllocator.connect(accounts.governor).grantRole(PAUSE_ROLE, accounts.governor.address) + await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('100'), false) + + // Add target + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'](await target1.getAddress(), 500000, 0, false) // 50% + + // Pause to enable accumulation + await issuanceAllocator.connect(accounts.governor).pause() + + // Mine some blocks to create a gap + await ethers.provider.send('hardhat_mine', ['0x3']) // Mine 3 blocks + + const initialBalance = await (graphToken as any).balanceOf(await target1.getAddress()) + const currentBlock = await ethers.provider.getBlockNumber() + + // Call distributePendingIssuance with current block + await issuanceAllocator.connect(accounts.governor)['distributePendingIssuance(uint256)'](currentBlock) + + // Check that tokens were distributed + const finalBalance = await (graphToken as any).balanceOf(await target1.getAddress()) + expect(finalBalance).to.be.gt(initialBalance) + + // Check that accumulation block was updated to current block + expect(await issuanceAllocator.lastIssuanceAccumulationBlock()).to.equal(currentBlock) + }) + + it('should handle multiple calls with different toBlockNumbers', async () => { + const { issuanceAllocator, graphToken, target1 } = await setupIssuanceAllocator() + + // Setup + await (graphToken as any).addMinter(await issuanceAllocator.getAddress()) + await issuanceAllocator.connect(accounts.governor).grantRole(PAUSE_ROLE, accounts.governor.address) + await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('100'), false) + + // Add target + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'](await target1.getAddress(), 500000, 0, false) // 50% + + // Pause to enable accumulation + await issuanceAllocator.connect(accounts.governor).pause() + + // Mine some blocks to create a gap + await ethers.provider.send('hardhat_mine', ['0x5']) // Mine 5 blocks + + const initialBalance = await (graphToken as any).balanceOf(await target1.getAddress()) + const currentBlock = await ethers.provider.getBlockNumber() + const firstTargetBlock = currentBlock - 3 + const secondTargetBlock = currentBlock - 1 + + // First call - accumulate up to firstTargetBlock + await issuanceAllocator.connect(accounts.governor)['distributePendingIssuance(uint256)'](firstTargetBlock) + + const balanceAfterFirst = await (graphToken as any).balanceOf(await target1.getAddress()) + expect(balanceAfterFirst).to.be.gt(initialBalance) + expect(await issuanceAllocator.lastIssuanceAccumulationBlock()).to.equal(firstTargetBlock) + + // Second call - accumulate from firstTargetBlock to secondTargetBlock + await issuanceAllocator.connect(accounts.governor)['distributePendingIssuance(uint256)'](secondTargetBlock) + + const balanceAfterSecond = await (graphToken as any).balanceOf(await target1.getAddress()) + expect(balanceAfterSecond).to.be.gt(balanceAfterFirst) + expect(await issuanceAllocator.lastIssuanceAccumulationBlock()).to.equal(secondTargetBlock) + }) + + it('should return correct block number after distribution', async () => { + const { issuanceAllocator, graphToken, target1 } = await setupIssuanceAllocator() + + // Setup + await (graphToken as any).addMinter(await issuanceAllocator.getAddress()) + await issuanceAllocator.connect(accounts.governor).grantRole(PAUSE_ROLE, accounts.governor.address) + await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('100'), false) + + // Add target + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'](await target1.getAddress(), 500000, 0, false) // 50% + + // Pause to enable accumulation + await issuanceAllocator.connect(accounts.governor).pause() + + // Mine some blocks + await ethers.provider.send('hardhat_mine', ['0x3']) // Mine 3 blocks + + const currentBlock = await ethers.provider.getBlockNumber() + const targetBlock = currentBlock - 1 + + // Test return value using staticCall + const result = await issuanceAllocator + .connect(accounts.governor) + ['distributePendingIssuance(uint256)'].staticCall(targetBlock) + + expect(result).to.equal(targetBlock) + }) + }) + }) + + describe('Notification Behavior When Paused', () => { + it('should notify targets of allocation changes even when paused', async () => { + const { issuanceAllocator, addresses } = sharedContracts + + // Setup + await issuanceAllocator.connect(accounts.governor).grantRole(PAUSE_ROLE, accounts.governor.address) + await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('100'), false) + + // Add initial allocation + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'](addresses.target1, 300000, 0, false) // 30% + + // Pause the contract + await issuanceAllocator.connect(accounts.governor).pause() + + // Change allocation while paused - should notify target even though paused + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'](addresses.target1, 400000, 0, true) // Change to 40% + + // Verify that beforeIssuanceAllocationChange was called on the target + // This is verified by checking that the transaction succeeded and the allocation was updated + const allocation = await issuanceAllocator.getTargetAllocation(addresses.target1) + expect(allocation.allocatorMintingPPM).to.equal(400000) + }) + + it('should notify targets of issuance rate changes even when paused', async () => { + const { issuanceAllocator, addresses } = sharedContracts + + // Setup + await issuanceAllocator.connect(accounts.governor).grantRole(PAUSE_ROLE, accounts.governor.address) + await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('100'), false) + + // Add target + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'](addresses.target1, 300000, 0, false) // 30% + + // Pause the contract + await issuanceAllocator.connect(accounts.governor).pause() + + // Change issuance rate while paused - should notify targets even though paused + await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('200'), true) + + // Verify that the rate change was applied + expect(await issuanceAllocator.issuancePerBlock()).to.equal(ethers.parseEther('200')) + }) + + it('should not notify targets when no actual change occurs', async () => { + const { issuanceAllocator, graphToken, target1 } = await setupIssuanceAllocator() + + // Setup + await (graphToken as any).addMinter(await issuanceAllocator.getAddress()) + await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('100'), false) + + // Add target + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'](await target1.getAddress(), 300000, 0, false) // 30% + + // Try to set the same allocation - should not notify (no change) + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'](await target1.getAddress(), 300000, 0, false) // Same 30% + + // Verify allocation is unchanged + const allocation = await issuanceAllocator.getTargetAllocation(await target1.getAddress()) + expect(allocation.allocatorMintingPPM).to.equal(300000) + + // Try to set the same issuance rate - should not notify (no change) + await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('100'), false) + + expect(await issuanceAllocator.issuancePerBlock()).to.equal(ethers.parseEther('100')) + }) + }) + + describe('Mixed Allocation Distribution Scenarios', () => { + it('should correctly distribute pending issuance with mixed allocations and unallocated space', async () => { + const { issuanceAllocator, graphToken, target1, target2 } = await setupIssuanceAllocator() + + // Setup + await (graphToken as any).addMinter(await issuanceAllocator.getAddress()) + await issuanceAllocator.connect(accounts.governor).grantRole(PAUSE_ROLE, accounts.governor.address) + await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('1000'), false) + + // Test scenario: 25% allocator-minting + 50% self-minting + 25% unallocated + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'](await target1.getAddress(), 250000, 0, false) // 25% allocator-minting + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'](await target2.getAddress(), 0, 500000, false) // 50% self-minting + // 25% remains unallocated + + // Verify the setup + const totalAllocation = await issuanceAllocator.getTotalAllocation() + expect(totalAllocation.totalAllocationPPM).to.equal(750000) // 75% total + expect(totalAllocation.allocatorMintingPPM).to.equal(250000) // 25% allocator + expect(totalAllocation.selfMintingPPM).to.equal(500000) // 50% self + + // Distribute once to initialize + await issuanceAllocator.connect(accounts.governor).distributeIssuance() + + // Pause and accumulate issuance + await issuanceAllocator.connect(accounts.governor).pause() + for (let i = 0; i < 10; i++) { + await ethers.provider.send('evm_mine', []) + } + + // Trigger accumulation by forcing rate change + await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('2000'), true) + + const pendingBefore = await issuanceAllocator.pendingAccumulatedAllocatorIssuance() + expect(pendingBefore).to.be.gt(0) + + const initialBalance1 = await (graphToken as any).balanceOf(await target1.getAddress()) + const initialBalance2 = await (graphToken as any).balanceOf(await target2.getAddress()) + + // Call distributePendingIssuance + await issuanceAllocator.connect(accounts.governor)['distributePendingIssuance()']() + + const finalBalance1 = await (graphToken as any).balanceOf(await target1.getAddress()) + const finalBalance2 = await (graphToken as any).balanceOf(await target2.getAddress()) + + const distributed1 = finalBalance1 - initialBalance1 + const distributed2 = finalBalance2 - initialBalance2 + + // Target2 (self-minting) should receive nothing from distributePendingIssuance + expect(distributed2).to.equal(0) + + // Target1 should receive the correct proportional amount + // The calculation is: (pendingAmount * 250000) / (1000000 - 500000) = (pendingAmount * 250000) / 500000 = pendingAmount * 0.5 + // So target1 should get exactly 50% of the pending amount + const expectedDistribution = pendingBefore / 2n // 50% of pending + expect(distributed1).to.be.closeTo(expectedDistribution, ethers.parseEther('1')) + + // Verify pending issuance was reset + expect(await issuanceAllocator.pendingAccumulatedAllocatorIssuance()).to.equal(0) + }) + + it('should correctly distribute pending issuance among multiple allocator-minting targets', async () => { + const { issuanceAllocator, graphToken, target1, target2, target3 } = await setupIssuanceAllocator() + + // Setup + await (graphToken as any).addMinter(await issuanceAllocator.getAddress()) + await issuanceAllocator.connect(accounts.governor).grantRole(PAUSE_ROLE, accounts.governor.address) + await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('1000'), false) + + // Test scenario: 15% + 10% allocator-minting + 50% self-minting + 25% unallocated + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'](await target1.getAddress(), 150000, 0, false) // 15% allocator-minting + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'](await target2.getAddress(), 100000, 0, false) // 10% allocator-minting + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'](await target3.getAddress(), 0, 500000, false) // 50% self-minting + // 25% remains unallocated + + // Verify the setup + const totalAllocation = await issuanceAllocator.getTotalAllocation() + expect(totalAllocation.allocatorMintingPPM).to.equal(250000) // 25% total allocator + expect(totalAllocation.selfMintingPPM).to.equal(500000) // 50% self + + // Distribute once to initialize + await issuanceAllocator.connect(accounts.governor).distributeIssuance() + + // Pause and accumulate issuance + await issuanceAllocator.connect(accounts.governor).pause() + for (let i = 0; i < 10; i++) { + await ethers.provider.send('evm_mine', []) + } + + // Trigger accumulation + await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('2000'), true) + + const pendingBefore = await issuanceAllocator.pendingAccumulatedAllocatorIssuance() + expect(pendingBefore).to.be.gt(0) + + const initialBalance1 = await (graphToken as any).balanceOf(await target1.getAddress()) + const initialBalance2 = await (graphToken as any).balanceOf(await target2.getAddress()) + const initialBalance3 = await (graphToken as any).balanceOf(await target3.getAddress()) + + // Call distributePendingIssuance + await issuanceAllocator.connect(accounts.governor)['distributePendingIssuance()']() + + const finalBalance1 = await (graphToken as any).balanceOf(await target1.getAddress()) + const finalBalance2 = await (graphToken as any).balanceOf(await target2.getAddress()) + const finalBalance3 = await (graphToken as any).balanceOf(await target3.getAddress()) + + const distributed1 = finalBalance1 - initialBalance1 + const distributed2 = finalBalance2 - initialBalance2 + const distributed3 = finalBalance3 - initialBalance3 + + // Target3 (self-minting) should receive nothing + expect(distributed3).to.equal(0) + + // Verify proportional distribution between allocator-minting targets + // Target1 should get 15/25 = 60% of the distributed amount + // Target2 should get 10/25 = 40% of the distributed amount + if (distributed1 > 0 && distributed2 > 0) { + const ratio = (BigInt(distributed1) * 1000n) / BigInt(distributed2) // Multiply by 1000 for precision + expect(ratio).to.be.closeTo(1500n, 50n) // 150000/100000 = 1.5 + } + + // Total distributed should equal the allocator-minting portion of pending + // With 25% total allocator-minting out of 50% allocator-minting space: + // Each target gets: (targetPPM / (MILLION - selfMintingPPM)) * pendingAmount + // Target1: (150000 / 500000) * pendingAmount = 30% of pending + // Target2: (100000 / 500000) * pendingAmount = 20% of pending + // Total: 50% of pending + const totalDistributed = distributed1 + distributed2 + const expectedTotal = pendingBefore / 2n // 50% of pending + expect(totalDistributed).to.be.closeTo(expectedTotal, ethers.parseEther('1')) + }) + }) + + describe('Edge Cases for Pending Issuance Distribution', () => { + describe('Division by Zero and Near-Zero Denominator Cases', () => { + it('should handle case when totalSelfMintingPPM equals MILLION (100% self-minting)', async () => { + const { issuanceAllocator, graphToken, target1 } = await setupIssuanceAllocator() + + // Setup + await (graphToken as any).addMinter(await issuanceAllocator.getAddress()) + await issuanceAllocator.connect(accounts.governor).grantRole(PAUSE_ROLE, accounts.governor.address) + await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('100'), false) + + // Add 100% self-minting target (totalSelfMintingPPM = MILLION) + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'](await target1.getAddress(), 0, 1000000, false) // 100% self-minting + + // Distribute once to initialize + await issuanceAllocator.connect(accounts.governor).distributeIssuance() + + // Pause and accumulate some issuance + await issuanceAllocator.connect(accounts.governor).pause() + await ethers.provider.send('evm_mine', []) + await ethers.provider.send('evm_mine', []) + + // Trigger accumulation by changing rate + await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('200'), false) + + const pendingBefore = await issuanceAllocator.pendingAccumulatedAllocatorIssuance() + expect(pendingBefore).to.equal(0) // Should be 0 because no allocator-minting allocation + + const initialBalance = await (graphToken as any).balanceOf(await target1.getAddress()) + + // Call distributePendingIssuance - should not revert even with division by zero scenario + await expect(issuanceAllocator.connect(accounts.governor)['distributePendingIssuance()']()).to.not.be.reverted + + // Balance should remain the same (no allocator-minting targets) + expect(await (graphToken as any).balanceOf(await target1.getAddress())).to.equal(initialBalance) + }) + + it('should handle case with very small denominator (totalSelfMintingPPM near MILLION)', async () => { + const { issuanceAllocator, graphToken, target1, target2 } = await setupIssuanceAllocator() + + // Setup with very high issuance rate to ensure accumulation despite small denominator + await (graphToken as any).addMinter(await issuanceAllocator.getAddress()) + await issuanceAllocator.connect(accounts.governor).grantRole(PAUSE_ROLE, accounts.governor.address) + await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('1000000'), false) // Very high rate + + // Add targets: 1 PPM allocator-minting, 999,999 PPM self-minting (denominator = 1) + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'](await target1.getAddress(), 1, 0, false) // 1 PPM allocator-minting + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'](await target2.getAddress(), 0, 999999, false) // 999,999 PPM self-minting + + // Distribute once to initialize + await issuanceAllocator.connect(accounts.governor).distributeIssuance() + + // Pause and accumulate significant issuance over many blocks + await issuanceAllocator.connect(accounts.governor).pause() + for (let i = 0; i < 100; i++) { + await ethers.provider.send('evm_mine', []) + } + + // Trigger accumulation by changing rate (this forces accumulation) + await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('2000000'), true) // Force even if pending + + const pendingBefore = await issuanceAllocator.pendingAccumulatedAllocatorIssuance() + expect(pendingBefore).to.be.gt(0) + + const initialBalance = await (graphToken as any).balanceOf(await target1.getAddress()) + + // Call distributePendingIssuance - should work with very small denominator + await expect(issuanceAllocator.connect(accounts.governor)['distributePendingIssuance()']()).to.not.be.reverted + + // Target1 should receive all the pending issuance (since it's the only allocator-minting target) + const finalBalance = await (graphToken as any).balanceOf(await target1.getAddress()) + expect(finalBalance).to.be.gt(initialBalance) + + // The distributed amount should equal the pending amount (within rounding) + const distributed = finalBalance - initialBalance + expect(distributed).to.be.closeTo(pendingBefore, ethers.parseEther('1')) + }) + }) + + describe('Large Value and Overflow Protection', () => { + it('should handle large pending amounts without overflow', async () => { + const { issuanceAllocator, graphToken, target1 } = await setupIssuanceAllocator() + + // Setup with very high issuance rate + await (graphToken as any).addMinter(await issuanceAllocator.getAddress()) + await issuanceAllocator.connect(accounts.governor).grantRole(PAUSE_ROLE, accounts.governor.address) + await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('1000000'), false) // 1M tokens per block + + // Add target with high allocation + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'](await target1.getAddress(), 500000, 0, false) // 50% + + // Distribute once to initialize + await issuanceAllocator.connect(accounts.governor).distributeIssuance() + + // Pause and accumulate for many blocks + await issuanceAllocator.connect(accounts.governor).pause() + for (let i = 0; i < 100; i++) { + await ethers.provider.send('evm_mine', []) + } + + // Trigger accumulation by forcing rate change + await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('2000000'), true) // Force even if pending + + const pendingBefore = await issuanceAllocator.pendingAccumulatedAllocatorIssuance() + expect(pendingBefore).to.be.gt(ethers.parseEther('25000000')) // Should be very large (50% of total) + + const initialBalance = await (graphToken as any).balanceOf(await target1.getAddress()) + + // Call distributePendingIssuance - should handle large values without overflow + await expect(issuanceAllocator.connect(accounts.governor)['distributePendingIssuance()']()).to.not.be.reverted + + const finalBalance = await (graphToken as any).balanceOf(await target1.getAddress()) + expect(finalBalance).to.be.gt(initialBalance) + + // Verify the calculation is correct for large values + // Target1 has 50% allocation, so it should get: (pendingAmount * 500000) / 1000000 = 50% of pending + const distributed = finalBalance - initialBalance + const expectedDistribution = pendingBefore / 2n // 50% of pending + expect(distributed).to.be.closeTo(expectedDistribution, ethers.parseEther('1000')) // Allow for rounding + }) + }) + + describe('Precision and Rounding Edge Cases', () => { + it('should handle small allocations with minimal rounding loss', async () => { + const { issuanceAllocator, graphToken, target1, target2 } = await setupIssuanceAllocator() + + // Setup with higher issuance rate to ensure accumulation + await (graphToken as any).addMinter(await issuanceAllocator.getAddress()) + await issuanceAllocator.connect(accounts.governor).grantRole(PAUSE_ROLE, accounts.governor.address) + await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('1000000'), false) // Higher rate + + // Add targets with very small allocations + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'](await target1.getAddress(), 1, 0, false) // 1 PPM + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'](await target2.getAddress(), 2, 0, false) // 2 PPM + + // Distribute once to initialize + await issuanceAllocator.connect(accounts.governor).distributeIssuance() + + // Pause and accumulate over multiple blocks + await issuanceAllocator.connect(accounts.governor).pause() + for (let i = 0; i < 10; i++) { + await ethers.provider.send('evm_mine', []) + } + + // Trigger accumulation by forcing rate change + await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('2000000'), true) + + const pendingBefore = await issuanceAllocator.pendingAccumulatedAllocatorIssuance() + expect(pendingBefore).to.be.gt(0) + + const initialBalance1 = await (graphToken as any).balanceOf(await target1.getAddress()) + const initialBalance2 = await (graphToken as any).balanceOf(await target2.getAddress()) + + // Call distributePendingIssuance + await issuanceAllocator.connect(accounts.governor)['distributePendingIssuance()']() + + const finalBalance1 = await (graphToken as any).balanceOf(await target1.getAddress()) + const finalBalance2 = await (graphToken as any).balanceOf(await target2.getAddress()) + + const distributed1 = finalBalance1 - initialBalance1 + const distributed2 = finalBalance2 - initialBalance2 + + // Verify proportional distribution (target2 should get ~2x target1) + if (distributed1 > 0 && distributed2 > 0) { + const ratio = (BigInt(distributed2) * 1000n) / BigInt(distributed1) // Multiply by 1000 for precision + expect(ratio).to.be.closeTo(2000n, 100n) // Should be close to 2.0 with some tolerance + } + }) + + it('should handle zero pending amount correctly', async () => { + const { issuanceAllocator, graphToken, target1 } = await setupIssuanceAllocator() + + // Setup + await (graphToken as any).addMinter(await issuanceAllocator.getAddress()) + await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('100'), false) + + // Add target + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'](await target1.getAddress(), 500000, 0, false) // 50% + + // Distribute to ensure no pending amount + await issuanceAllocator.connect(accounts.governor).distributeIssuance() + expect(await issuanceAllocator.pendingAccumulatedAllocatorIssuance()).to.equal(0) + + const initialBalance = await (graphToken as any).balanceOf(await target1.getAddress()) + + // Call distributePendingIssuance with zero pending - should be no-op + await expect(issuanceAllocator.connect(accounts.governor)['distributePendingIssuance()']()).to.not.be.reverted + + // Balance should remain unchanged + expect(await (graphToken as any).balanceOf(await target1.getAddress())).to.equal(initialBalance) + }) + }) + + describe('Mixed Allocation Scenarios', () => { + it('should correctly distribute with extreme allocation ratios', async () => { + const { issuanceAllocator, graphToken, target1, target2, target3 } = await setupIssuanceAllocator() + + // Setup + await (graphToken as any).addMinter(await issuanceAllocator.getAddress()) + await issuanceAllocator.connect(accounts.governor).grantRole(PAUSE_ROLE, accounts.governor.address) + await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('1000'), false) + + // Add targets with extreme ratios: 1 PPM, 499,999 PPM allocator-minting, 500,000 PPM self-minting + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'](await target1.getAddress(), 1, 0, false) // 0.0001% + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'](await target2.getAddress(), 499999, 0, false) // 49.9999% + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'](await target3.getAddress(), 0, 500000, false) // 50% self-minting + + // Distribute once to initialize + await issuanceAllocator.connect(accounts.governor).distributeIssuance() + + // Pause and accumulate + await issuanceAllocator.connect(accounts.governor).pause() + for (let i = 0; i < 5; i++) { + await ethers.provider.send('evm_mine', []) + } + + // Trigger accumulation by forcing rate change + await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('2000'), true) + + const pendingBefore = await issuanceAllocator.pendingAccumulatedAllocatorIssuance() + expect(pendingBefore).to.be.gt(0) + + const initialBalance1 = await (graphToken as any).balanceOf(await target1.getAddress()) + const initialBalance2 = await (graphToken as any).balanceOf(await target2.getAddress()) + const initialBalance3 = await (graphToken as any).balanceOf(await target3.getAddress()) + + // Call distributePendingIssuance + await issuanceAllocator.connect(accounts.governor)['distributePendingIssuance()']() + + const finalBalance1 = await (graphToken as any).balanceOf(await target1.getAddress()) + const finalBalance2 = await (graphToken as any).balanceOf(await target2.getAddress()) + const finalBalance3 = await (graphToken as any).balanceOf(await target3.getAddress()) + + const distributed1 = finalBalance1 - initialBalance1 + const distributed2 = finalBalance2 - initialBalance2 + const distributed3 = finalBalance3 - initialBalance3 + + // Target3 (self-minting) should receive nothing from distributePendingIssuance + expect(distributed3).to.equal(0) + + // Target2 should receive ~499,999x more than target1 + if (distributed1 > 0 && distributed2 > 0) { + const ratio = distributed2 / distributed1 + expect(ratio).to.be.closeTo(499999n, 1000n) // Allow for rounding + } + + // Total distributed should equal pending (within rounding) + const totalDistributed = distributed1 + distributed2 + expect(totalDistributed).to.be.closeTo(pendingBefore, ethers.parseEther('0.001')) + }) + + it('should handle dynamic allocation changes affecting denominator', async () => { + const { issuanceAllocator, graphToken, target1, target2 } = await setupIssuanceAllocator() + + // Setup + await (graphToken as any).addMinter(await issuanceAllocator.getAddress()) + await issuanceAllocator.connect(accounts.governor).grantRole(PAUSE_ROLE, accounts.governor.address) + await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('100'), false) + + // Initial setup: 50% allocator-minting, 50% self-minting + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'](await target1.getAddress(), 500000, 0, false) // 50% allocator + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'](await target2.getAddress(), 0, 500000, false) // 50% self + + // Distribute once to initialize + await issuanceAllocator.connect(accounts.governor).distributeIssuance() + + // Pause and accumulate + await issuanceAllocator.connect(accounts.governor).pause() + await ethers.provider.send('evm_mine', []) + await ethers.provider.send('evm_mine', []) + + // Change allocation to make denominator smaller: 10% allocator, 90% self-minting + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'](await target1.getAddress(), 100000, 0, true) // 10% allocator + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'](await target2.getAddress(), 0, 900000, true) // 90% self + + const pendingBefore = await issuanceAllocator.pendingAccumulatedAllocatorIssuance() + expect(pendingBefore).to.be.gt(0) + + const initialBalance = await (graphToken as any).balanceOf(await target1.getAddress()) + + // Call distributePendingIssuance with changed denominator + await expect(issuanceAllocator.connect(accounts.governor)['distributePendingIssuance()']()).to.not.be.reverted + + const finalBalance = await (graphToken as any).balanceOf(await target1.getAddress()) + expect(finalBalance).to.be.gt(initialBalance) + + // The distribution should use the new denominator (MILLION - 900000 = 100000) + // So target1 should get all the pending amount since it's the only allocator-minting target + const distributed = finalBalance - initialBalance + expect(distributed).to.be.closeTo(pendingBefore, ethers.parseEther('0.001')) + }) + }) + + describe('Boundary Value Testing', () => { + it('should handle totalSelfMintingPPM = 0 (no self-minting targets)', async () => { + const { issuanceAllocator, graphToken, target1, target2 } = await setupIssuanceAllocator() + + // Setup + await (graphToken as any).addMinter(await issuanceAllocator.getAddress()) + await issuanceAllocator.connect(accounts.governor).grantRole(PAUSE_ROLE, accounts.governor.address) + await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('100'), false) + + // Add only allocator-minting targets (totalSelfMintingPPM = 0) + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'](await target1.getAddress(), 300000, 0, false) // 30% + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'](await target2.getAddress(), 200000, 0, false) // 20% + + // Distribute once to initialize + await issuanceAllocator.connect(accounts.governor).distributeIssuance() + + // Pause and accumulate + await issuanceAllocator.connect(accounts.governor).pause() + await ethers.provider.send('evm_mine', []) + await ethers.provider.send('evm_mine', []) + + // Trigger accumulation by forcing rate change + await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('200'), true) + + const pendingBefore = await issuanceAllocator.pendingAccumulatedAllocatorIssuance() + expect(pendingBefore).to.be.gt(0) + + const initialBalance1 = await (graphToken as any).balanceOf(await target1.getAddress()) + const initialBalance2 = await (graphToken as any).balanceOf(await target2.getAddress()) + + // Call distributePendingIssuance - denominator should be MILLION (1,000,000) + await expect(issuanceAllocator.connect(accounts.governor)['distributePendingIssuance()']()).to.not.be.reverted + + const finalBalance1 = await (graphToken as any).balanceOf(await target1.getAddress()) + const finalBalance2 = await (graphToken as any).balanceOf(await target2.getAddress()) + + const distributed1 = finalBalance1 - initialBalance1 + const distributed2 = finalBalance2 - initialBalance2 + + // Verify proportional distribution (3:2 ratio) + if (distributed1 > 0 && distributed2 > 0) { + const ratio = (BigInt(distributed1) * 1000n) / BigInt(distributed2) // Multiply by 1000 for precision + expect(ratio).to.be.closeTo(1500n, 50n) // 300000/200000 = 1.5 + } + + // Total distributed should equal the allocated portion of pending + // With 50% total allocator-minting allocation: (30% + 20%) / 100% = 50% of pending + const totalDistributed = distributed1 + distributed2 + const expectedTotal = pendingBefore / 2n // 50% of pending + expect(totalDistributed).to.be.closeTo(expectedTotal, ethers.parseEther('0.001')) + }) + + it('should handle totalSelfMintingPPM = MILLION - 1 (minimal allocator-minting)', async () => { + const { issuanceAllocator, graphToken, target1, target2 } = await setupIssuanceAllocator() + + // Setup + await (graphToken as any).addMinter(await issuanceAllocator.getAddress()) + await issuanceAllocator.connect(accounts.governor).grantRole(PAUSE_ROLE, accounts.governor.address) + await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('1000'), false) + + // Add targets: 1 PPM allocator-minting, 999,999 PPM self-minting + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'](await target1.getAddress(), 1, 0, false) // 1 PPM allocator + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'](await target2.getAddress(), 0, 999999, false) // 999,999 PPM self + + // Distribute once to initialize + await issuanceAllocator.connect(accounts.governor).distributeIssuance() + + // Pause and accumulate significant issuance + await issuanceAllocator.connect(accounts.governor).pause() + for (let i = 0; i < 10; i++) { + await ethers.provider.send('evm_mine', []) + } + + // Trigger accumulation by forcing rate change + await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('2000'), true) + + const pendingBefore = await issuanceAllocator.pendingAccumulatedAllocatorIssuance() + expect(pendingBefore).to.be.gt(0) + + const initialBalance1 = await (graphToken as any).balanceOf(await target1.getAddress()) + const initialBalance2 = await (graphToken as any).balanceOf(await target2.getAddress()) + + // Call distributePendingIssuance - denominator should be 1 + await expect(issuanceAllocator.connect(accounts.governor)['distributePendingIssuance()']()).to.not.be.reverted + + const finalBalance1 = await (graphToken as any).balanceOf(await target1.getAddress()) + const finalBalance2 = await (graphToken as any).balanceOf(await target2.getAddress()) + + const distributed1 = finalBalance1 - initialBalance1 + const distributed2 = finalBalance2 - initialBalance2 + + // Target2 (self-minting) should receive nothing + expect(distributed2).to.equal(0) + + // Target1 should receive all pending issuance + expect(distributed1).to.be.closeTo(pendingBefore, ethers.parseEther('0.001')) + }) + }) + }) +}) diff --git a/packages/issuance/test/tests/allocate/IssuanceSystem.test.ts b/packages/issuance/test/tests/allocate/IssuanceSystem.test.ts new file mode 100644 index 000000000..5a2de54aa --- /dev/null +++ b/packages/issuance/test/tests/allocate/IssuanceSystem.test.ts @@ -0,0 +1,134 @@ +/** + * Issuance System Integration Tests - Optimized Version + * Reduced from 149 lines to ~80 lines using shared utilities + */ + +import { expect } from 'chai' + +import { setupOptimizedAllocateSystem } from './optimizedFixtures' +import { expectRatioToEqual, mineBlocks, TestConstants } from './testPatterns' + +describe('Issuance System', () => { + let system: any + + before(async () => { + // Single setup instead of beforeEach - major performance improvement + system = await setupOptimizedAllocateSystem({ + setupTargets: false, // We'll set up specific scenarios per test + }) + }) + + beforeEach(async () => { + // Fast state reset instead of full redeployment + await system.helpers.resetState() + }) + + describe('End-to-End Issuance Flow', () => { + it('should allocate tokens to targets based on their allocation percentages', async () => { + const { contracts, addresses, accounts } = system + + // Verify initial balances (should be 0) + expect(await contracts.graphToken.balanceOf(addresses.target1)).to.equal(0) + expect(await contracts.graphToken.balanceOf(addresses.target2)).to.equal(0) + + // Set up allocations using predefined constants: target1 = 30%, target2 = 40% + await contracts.issuanceAllocator + .connect(accounts.governor) + [ + 'setTargetAllocation(address,uint256,uint256,bool)' + ](addresses.target1, TestConstants.ALLOCATION_30_PERCENT, 0, false) + await contracts.issuanceAllocator + .connect(accounts.governor) + [ + 'setTargetAllocation(address,uint256,uint256,bool)' + ](addresses.target2, TestConstants.ALLOCATION_40_PERCENT, 0, false) + + // Grant operator roles using predefined constants + await contracts.target1 + .connect(accounts.governor) + .grantRole(TestConstants.OPERATOR_ROLE, accounts.operator.address) + await contracts.target2 + .connect(accounts.governor) + .grantRole(TestConstants.OPERATOR_ROLE, accounts.operator.address) + + // Get balances after allocation setup + const balanceAfterAllocation1 = await contracts.graphToken.balanceOf(addresses.target1) + const balanceAfterAllocation2 = await contracts.graphToken.balanceOf(addresses.target2) + + // Mine blocks using helper function + await mineBlocks(10) + await contracts.issuanceAllocator.distributeIssuance() + + // Get final balances and verify distributions + const finalBalance1 = await contracts.graphToken.balanceOf(addresses.target1) + const finalBalance2 = await contracts.graphToken.balanceOf(addresses.target2) + + // Verify targets received tokens proportionally + expect(finalBalance1).to.be.gt(balanceAfterAllocation1) + expect(finalBalance2).to.be.gt(balanceAfterAllocation2) + + // Test token distribution from targets to users + await contracts.target1.connect(accounts.operator).sendTokens(accounts.user.address, finalBalance1) + await contracts.target2.connect(accounts.operator).sendTokens(accounts.indexer1.address, finalBalance2) + + // Verify user balances and target emptiness + expect(await contracts.graphToken.balanceOf(accounts.user.address)).to.equal(finalBalance1) + expect(await contracts.graphToken.balanceOf(accounts.indexer1.address)).to.equal(finalBalance2) + expect(await contracts.graphToken.balanceOf(addresses.target1)).to.equal(0) + expect(await contracts.graphToken.balanceOf(addresses.target2)).to.equal(0) + }) + + it('should handle allocation changes correctly', async () => { + const { contracts, addresses, accounts } = system + + // Set up initial allocations using helper + await system.helpers.setupStandardAllocations() + + // Verify initial total allocation (30% + 40% = 70%) + const totalAlloc = await contracts.issuanceAllocator.getTotalAllocation() + expect(totalAlloc.totalAllocationPPM).to.equal( + TestConstants.ALLOCATION_30_PERCENT + TestConstants.ALLOCATION_40_PERCENT, + ) + + // Change allocations: target1 = 50%, target2 = 20% (still 70%) + await contracts.issuanceAllocator + .connect(accounts.governor) + [ + 'setTargetAllocation(address,uint256,uint256,bool)' + ](addresses.target1, TestConstants.ALLOCATION_50_PERCENT, 0, false) + await contracts.issuanceAllocator + .connect(accounts.governor) + [ + 'setTargetAllocation(address,uint256,uint256,bool)' + ](addresses.target2, TestConstants.ALLOCATION_20_PERCENT, 0, false) + + // Verify updated allocations + const updatedTotalAlloc = await contracts.issuanceAllocator.getTotalAllocation() + expect(updatedTotalAlloc.totalAllocationPPM).to.equal( + TestConstants.ALLOCATION_50_PERCENT + TestConstants.ALLOCATION_20_PERCENT, + ) + + // Verify individual target allocations + const target1Info = await contracts.issuanceAllocator.getTargetData(addresses.target1) + const target2Info = await contracts.issuanceAllocator.getTargetData(addresses.target2) + + expect(target1Info.allocatorMintingPPM + target1Info.selfMintingPPM).to.equal(TestConstants.ALLOCATION_50_PERCENT) + expect(target2Info.allocatorMintingPPM + target2Info.selfMintingPPM).to.equal(TestConstants.ALLOCATION_20_PERCENT) + + // Verify proportional issuance distribution (50:20 = 5:2 ratio) + const target1Result = await contracts.issuanceAllocator.getTargetIssuancePerBlock(addresses.target1) + const target2Result = await contracts.issuanceAllocator.getTargetIssuancePerBlock(addresses.target2) + + expect(target1Result.selfIssuancePerBlock).to.equal(0) + expect(target2Result.selfIssuancePerBlock).to.equal(0) + + // Verify the ratio using helper function: 50/20 = 2.5, so 2500 in our precision + expectRatioToEqual( + target1Result.allocatorIssuancePerBlock, + target2Result.allocatorIssuancePerBlock, + 2500n, // 50/20 * 1000 precision + TestConstants.DEFAULT_TOLERANCE, + ) + }) + }) +}) diff --git a/packages/issuance/test/tests/allocate/commonTestUtils.ts b/packages/issuance/test/tests/allocate/commonTestUtils.ts new file mode 100644 index 000000000..c150e92d6 --- /dev/null +++ b/packages/issuance/test/tests/allocate/commonTestUtils.ts @@ -0,0 +1,46 @@ +/** + * Common test utilities for access control and other shared test patterns + */ + +import type { SignerWithAddress } from '@nomicfoundation/hardhat-ethers/signers' +import { expect } from 'chai' +import type { Contract } from 'ethers' + +/** + * Test multiple access control methods on a contract + * @param contract - The contract to test + * @param methods - Array of methods to test with their arguments + * @param authorizedAccount - Account that should have access + * @param unauthorizedAccount - Account that should not have access + */ + +export async function testMultipleAccessControl( + contract: Contract, + methods: Array<{ + method: string + args: unknown[] + description: string + }>, + authorizedAccount: SignerWithAddress, + unauthorizedAccount: SignerWithAddress, +): Promise { + for (const methodConfig of methods) { + const { method, args, description: _description } = methodConfig + + // Test that unauthorized account is rejected + await expect(contract.connect(unauthorizedAccount)[method](...args)).to.be.revertedWithCustomError( + contract, + 'AccessControlUnauthorizedAccount', + ) + + // Test that authorized account can call the method (if it exists and is callable) + try { + // Some methods might revert for business logic reasons even with proper access + // We just want to ensure they don't revert with AccessControlUnauthorizedAccount + await contract.connect(authorizedAccount)[method](...args) + } catch (error: any) { + // If it reverts, make sure it's not due to access control + expect(error.message).to.not.include('AccessControlUnauthorizedAccount') + } + } +} diff --git a/packages/issuance/test/tests/allocate/fixtures.ts b/packages/issuance/test/tests/allocate/fixtures.ts new file mode 100644 index 000000000..2a9212be4 --- /dev/null +++ b/packages/issuance/test/tests/allocate/fixtures.ts @@ -0,0 +1,91 @@ +/** + * Allocate-specific test fixtures + * Deployment and setup functions for allocate contracts + */ + +import hre from 'hardhat' + +const { ethers } = hre +const { upgrades } = require('hardhat') + +import { Constants, deployTestGraphToken } from '../common/fixtures' +import { GraphTokenHelper } from '../common/graphTokenHelper' + +/** + * Deploy the IssuanceAllocator contract with proxy using OpenZeppelin's upgrades library + * @param {string} graphToken + * @param {HardhatEthersSigner} governor + * @param {bigint} issuancePerBlock + * @returns {Promise} + */ +export async function deployIssuanceAllocator(graphToken, governor, issuancePerBlock) { + // Deploy implementation and proxy using OpenZeppelin's upgrades library + const IssuanceAllocatorFactory = await ethers.getContractFactory('IssuanceAllocator') + + // Deploy proxy with implementation + const issuanceAllocatorContract = await upgrades.deployProxy(IssuanceAllocatorFactory, [governor.address], { + constructorArgs: [graphToken], + initializer: 'initialize', + }) + + // Get the contract instance + const issuanceAllocator = issuanceAllocatorContract + + // Set issuance per block + await issuanceAllocator.connect(governor).setIssuancePerBlock(issuancePerBlock, false) + + return issuanceAllocator +} + +/** + * Deploy the DirectAllocation contract with proxy using OpenZeppelin's upgrades library + * @param {string} graphToken + * @param {HardhatEthersSigner} governor + * @returns {Promise} + */ +export async function deployDirectAllocation(graphToken, governor) { + // Deploy implementation and proxy using OpenZeppelin's upgrades library + const DirectAllocationFactory = await ethers.getContractFactory('DirectAllocation') + + // Deploy proxy with implementation + const directAllocationContract = await upgrades.deployProxy(DirectAllocationFactory, [governor.address], { + constructorArgs: [graphToken], + initializer: 'initialize', + }) + + // Return the contract instance + return directAllocationContract +} + +/** + * Deploy allocate-only system (IssuanceAllocator + DirectAllocation targets) + * This version excludes eligibility contracts for clean separation in tests + * @param {TestAccounts} accounts + * @param {bigint} [issuancePerBlock=Constants.DEFAULT_ISSUANCE_PER_BLOCK] + * @returns {Promise} + */ +export async function deployAllocateSystem(accounts, issuancePerBlock = Constants.DEFAULT_ISSUANCE_PER_BLOCK) { + const { governor } = accounts + + // Deploy test GraphToken + const graphToken = await deployTestGraphToken() + const graphTokenAddress = await graphToken.getAddress() + + // Deploy IssuanceAllocator + const issuanceAllocator = await deployIssuanceAllocator(graphTokenAddress, governor, issuancePerBlock) + + // Add the IssuanceAllocator as a minter on the GraphToken + const graphTokenHelper = new GraphTokenHelper(graphToken as any, governor) + await graphTokenHelper.addMinter(await issuanceAllocator.getAddress()) + + // Deploy DirectAllocation targets + const target1 = await deployDirectAllocation(graphTokenAddress, governor) + const target2 = await deployDirectAllocation(graphTokenAddress, governor) + + return { + graphToken, + issuanceAllocator, + target1, + target2, + } +} diff --git a/packages/issuance/test/tests/allocate/issuanceCalculations.ts b/packages/issuance/test/tests/allocate/issuanceCalculations.ts new file mode 100644 index 000000000..fc69edea9 --- /dev/null +++ b/packages/issuance/test/tests/allocate/issuanceCalculations.ts @@ -0,0 +1,154 @@ +import { ethers } from 'hardhat' + +/** + * Shared calculation utilities for issuance tests. + * These functions provide reference implementations for expected values in tests. + * Enhanced with better naming, documentation, and error handling. + */ + +// Constants for better readability +export const CALCULATION_CONSTANTS = { + PPM_DENOMINATOR: 1_000_000n, // Parts per million denominator + PRECISION_MULTIPLIER: 1000n, // For ratio calculations + WEI_PER_ETHER: ethers.parseEther('1'), +} as const + +/** + * Calculate expected accumulation for allocator-minting targets during pause. + * Accumulation happens from lastIssuanceAccumulationBlock to current block. + * + * @param issuancePerBlock - Issuance rate per block + * @param blocks - Number of blocks to accumulate over + * @param allocatorMintingPPM - Total allocator-minting allocation in PPM + * @returns Expected accumulated amount for allocator-minting targets + */ +export function calculateExpectedAccumulation( + issuancePerBlock: bigint, + blocks: bigint, + allocatorMintingPPM: bigint, +): bigint { + if (blocks === 0n || allocatorMintingPPM === 0n) return 0n + + const totalIssuance = issuancePerBlock * blocks + // Contract uses: totalIssuance * totalAllocatorMintingAllocationPPM / MILLION + return (totalIssuance * allocatorMintingPPM) / CALCULATION_CONSTANTS.PPM_DENOMINATOR +} + +/** + * Calculate expected issuance for a specific target. + * + * @param issuancePerBlock - Issuance rate per block + * @param blocks - Number of blocks + * @param targetAllocationPPM - Target's allocation in PPM + * @returns Expected issuance for the target + */ +export function calculateExpectedTargetIssuance( + issuancePerBlock: bigint, + blocks: bigint, + targetAllocationPPM: bigint, +): bigint { + if (blocks === 0n || targetAllocationPPM === 0n) return 0n + + const totalIssuance = issuancePerBlock * blocks + return (totalIssuance * targetAllocationPPM) / CALCULATION_CONSTANTS.PPM_DENOMINATOR +} + +/** + * Calculate proportional distribution of pending issuance among allocator-minting targets. + * + * @param pendingAmount - Total pending amount to distribute + * @param targetAllocationPPM - Target's allocator-minting allocation in PPM + * @param totalSelfMintingPPM - Total self-minting allocation in PPM + * @returns Expected amount for the target + */ +export function calculateProportionalDistribution( + pendingAmount: bigint, + targetAllocationPPM: bigint, + totalSelfMintingPPM: bigint, +): bigint { + if (pendingAmount === 0n || targetAllocationPPM === 0n) return 0n + + const totalAllocatorMintingPPM = CALCULATION_CONSTANTS.PPM_DENOMINATOR - totalSelfMintingPPM + if (totalAllocatorMintingPPM === 0n) return 0n + + return (pendingAmount * targetAllocationPPM) / totalAllocatorMintingPPM +} + +/** + * Calculate expected total issuance for multiple targets. + * + * @param issuancePerBlock - Issuance rate per block + * @param blocks - Number of blocks + * @param targetAllocations - Array of target allocations in PPM + * @returns Array of expected issuance amounts for each target + */ +export function calculateMultiTargetIssuance( + issuancePerBlock: bigint, + blocks: bigint, + targetAllocations: bigint[], +): bigint[] { + return targetAllocations.map((allocation) => calculateExpectedTargetIssuance(issuancePerBlock, blocks, allocation)) +} + +/** + * Verify that distributed amounts add up to expected total rate. + * + * @param distributedAmounts - Array of distributed amounts + * @param expectedTotalRate - Expected total issuance rate + * @param blocks - Number of blocks + * @param tolerance - Tolerance for rounding errors (default: 1 wei) + * @returns True if amounts add up within tolerance + */ +export function verifyTotalDistribution( + distributedAmounts: bigint[], + expectedTotalRate: bigint, + blocks: bigint, + tolerance: bigint = 1n, +): boolean { + const totalDistributed = distributedAmounts.reduce((sum, amount) => sum + amount, 0n) + const expectedTotal = expectedTotalRate * blocks + const diff = totalDistributed > expectedTotal ? totalDistributed - expectedTotal : expectedTotal - totalDistributed + return diff <= tolerance +} + +/** + * Calculate expected distribution ratios between targets + * + * @param allocations - Array of allocations in PPM + * @returns Array of ratios relative to first target + */ +export function calculateExpectedRatios(allocations: bigint[]): bigint[] { + if (allocations.length === 0) return [] + + const baseAllocation = allocations[0] + if (baseAllocation === 0n) return allocations.map(() => 0n) + + return allocations.map((allocation) => (allocation * CALCULATION_CONSTANTS.PRECISION_MULTIPLIER) / baseAllocation) +} + +/** + * Convert allocation percentage to PPM + * + * @param percentage - Percentage as a number (e.g., 30 for 30%) + * @returns PPM value + */ +export function percentageToPPM(percentage: number): number { + return Math.round(percentage * 10_000) // 1% = 10,000 PPM +} + +/** + * Convert PPM to percentage + * + * @param ppm - PPM value + * @returns Percentage as a number + */ +export function ppmToPercentage(ppm: bigint | number): number { + return Number(ppm) / 10_000 +} + +/** + * Helper to convert ETH string to wei bigint. + */ +export function parseEther(value: string): bigint { + return ethers.parseEther(value) +} diff --git a/packages/issuance/test/tests/allocate/optimizationHelpers.ts b/packages/issuance/test/tests/allocate/optimizationHelpers.ts new file mode 100644 index 000000000..d9d986516 --- /dev/null +++ b/packages/issuance/test/tests/allocate/optimizationHelpers.ts @@ -0,0 +1,59 @@ +/** + * Performance optimization helpers for test files + * Focus on reducing code duplication and improving readability + */ + +import { expect } from 'chai' +import hre from 'hardhat' +const { ethers } = hre + +// Common test constants to avoid magic numbers +const TEST_CONSTANTS = { + // Common allocation percentages (in PPM) + ALLOCATION_10_PERCENT: 100_000, + ALLOCATION_20_PERCENT: 200_000, + ALLOCATION_30_PERCENT: 300_000, + ALLOCATION_40_PERCENT: 400_000, + ALLOCATION_50_PERCENT: 500_000, + ALLOCATION_60_PERCENT: 600_000, + ALLOCATION_100_PERCENT: 1_000_000, + + // Common amounts + AMOUNT_100_TOKENS: '100', + AMOUNT_1000_TOKENS: '1000', + AMOUNT_10000_TOKENS: '10000', + + // Time constants + ONE_DAY: 24 * 60 * 60, + ONE_WEEK: 7 * 24 * 60 * 60, + TWO_WEEKS: 14 * 24 * 60 * 60, + + // Common interface IDs (to avoid recalculation) + ERC165_INTERFACE_ID: '0x01ffc9a7', + INVALID_INTERFACE_ID: '0x12345678', +} + +/** + * Helper to create consistent ethers amounts + */ +export function parseEther(amount: string): bigint { + return ethers.parseEther(amount) +} + +/** + * Helper to expect a transaction to revert with a specific custom error + */ +export async function expectCustomError(txPromise: Promise, contract: any, errorName: string): Promise { + await expect(txPromise).to.be.revertedWithCustomError(contract, errorName) +} + +/** + * Helper to mine blocks for time-sensitive tests + */ +export async function mineBlocks(count: number): Promise { + for (let i = 0; i < count; i++) { + await ethers.provider.send('evm_mine', []) + } +} + +export { TEST_CONSTANTS } diff --git a/packages/issuance/test/tests/allocate/optimizedFixtures.ts b/packages/issuance/test/tests/allocate/optimizedFixtures.ts new file mode 100644 index 000000000..22f407f7d --- /dev/null +++ b/packages/issuance/test/tests/allocate/optimizedFixtures.ts @@ -0,0 +1,310 @@ +/** + * Enhanced Test Fixtures with Performance Optimizations + * Consolidates common test setup patterns and reduces duplication + */ + +import hre from 'hardhat' + +import { Constants, deployTestGraphToken, getTestAccounts } from '../common/fixtures' +import { deployAllocateSystem } from './fixtures' +import { TestConstants } from './testPatterns' +const { ethers } = hre + +/** + * Enhanced fixture for allocate-only system (excludes eligibility contracts) + * Use this for allocate tests to ensure clean separation from eligibility + */ +export async function setupOptimizedAllocateSystem(customOptions: any = {}) { + const accounts = await getTestAccounts() + + const options = { + issuancePerBlock: Constants.DEFAULT_ISSUANCE_PER_BLOCK, + setupMinterRole: true, + setupTargets: true, + targetCount: 2, + ...customOptions, + } + + // Deploy allocate-only system (no eligibility contracts) + const { graphToken, issuanceAllocator, target1, target2 } = await deployAllocateSystem( + accounts, + options.issuancePerBlock, + ) + + // Cache addresses to avoid repeated getAddress() calls + const addresses = { + graphToken: await graphToken.getAddress(), + issuanceAllocator: await issuanceAllocator.getAddress(), + target1: await target1.getAddress(), + target2: await target2.getAddress(), + } + + // Setup minter role if requested + if (options.setupMinterRole) { + await (graphToken as any).addMinter(addresses.issuanceAllocator) + } + + // Setup default targets if requested + if (options.setupTargets) { + await issuanceAllocator + .connect(accounts.governor) + [ + 'setTargetAllocation(address,uint256,uint256,bool)' + ](addresses.target1, TestConstants.ALLOCATION_30_PERCENT, 0, false) + + if (options.targetCount >= 2) { + await issuanceAllocator + .connect(accounts.governor) + [ + 'setTargetAllocation(address,uint256,uint256,bool)' + ](addresses.target2, TestConstants.ALLOCATION_20_PERCENT, 0, false) + } + } + + return { + accounts, + contracts: { + graphToken, + issuanceAllocator, + target1, + target2, + }, + addresses, + helpers: { + // Helper to reset state without redeploying + resetState: async () => { + // Remove all targets + const targets = await issuanceAllocator.getTargets() + for (const targetAddr of targets) { + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'](targetAddr, 0, 0, false) + } + + // Reset issuance rate + await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(options.issuancePerBlock, false) + }, + + // Helper to setup standard allocations + setupStandardAllocations: async () => { + await issuanceAllocator + .connect(accounts.governor) + [ + 'setTargetAllocation(address,uint256,uint256,bool)' + ](addresses.target1, TestConstants.ALLOCATION_30_PERCENT, 0, false) + await issuanceAllocator + .connect(accounts.governor) + [ + 'setTargetAllocation(address,uint256,uint256,bool)' + ](addresses.target2, TestConstants.ALLOCATION_40_PERCENT, 0, false) + }, + + // Helper to verify proportional distributions + verifyProportionalDistribution: async (expectedRatios: number[]) => { + const balance1: bigint = await (graphToken as any).balanceOf(addresses.target1) + const balance2: bigint = await (graphToken as any).balanceOf(addresses.target2) + + if (balance2 > 0n) { + const ratio: bigint = (balance1 * TestConstants.RATIO_PRECISION) / balance2 + const expectedRatio: bigint = BigInt( + Math.round((expectedRatios[0] / expectedRatios[1]) * Number(TestConstants.RATIO_PRECISION)), + ) + + // Allow for small rounding errors + const tolerance: bigint = 50n // TestConstants.DEFAULT_TOLERANCE + const diff: bigint = ratio > expectedRatio ? ratio - expectedRatio : expectedRatio - ratio + + if (diff > tolerance) { + throw new Error( + `Distribution ratio ${ratio} does not match expected ${expectedRatio} within tolerance ${tolerance}`, + ) + } + } + }, + }, + } +} + +/** + * Lightweight fixture for testing single allocate contracts + */ +export async function setupSingleContract(contractType: 'issuanceAllocator' | 'directAllocation') { + const accounts = await getTestAccounts() + const graphToken = await deployTestGraphToken() + const graphTokenAddress = await graphToken.getAddress() + + let contract: any + + switch (contractType) { + case 'issuanceAllocator': { + const { deployIssuanceAllocator } = await import('./fixtures') + contract = await deployIssuanceAllocator( + graphTokenAddress, + accounts.governor, + Constants.DEFAULT_ISSUANCE_PER_BLOCK, + ) + break + } + case 'directAllocation': { + const { deployDirectAllocation } = await import('./fixtures') + contract = await deployDirectAllocation(graphTokenAddress, accounts.governor) + break + } + default: + throw new Error(`Unknown contract type: ${contractType}`) + } + + return { + accounts, + contract, + graphToken, + addresses: { + contract: await contract.getAddress(), + graphToken: graphTokenAddress, + }, + } +} + +/** + * Shared test data for consistent testing + */ +export const TestData = { + // Standard allocation scenarios + scenarios: { + balanced: [ + { target: 'target1', allocatorPPM: TestConstants.ALLOCATION_30_PERCENT, selfPPM: 0 }, + { target: 'target2', allocatorPPM: TestConstants.ALLOCATION_40_PERCENT, selfPPM: 0 }, + ], + mixed: [ + { target: 'target1', allocatorPPM: TestConstants.ALLOCATION_20_PERCENT, selfPPM: 0 }, + { target: 'target2', allocatorPPM: 0, selfPPM: TestConstants.ALLOCATION_30_PERCENT }, + ], + selfMintingOnly: [ + { target: 'target1', allocatorPPM: 0, selfPPM: TestConstants.ALLOCATION_50_PERCENT }, + { target: 'target2', allocatorPPM: 0, selfPPM: TestConstants.ALLOCATION_30_PERCENT }, + ], + }, + + // Standard test parameters + issuanceRates: { + low: ethers.parseEther('10'), + medium: ethers.parseEther('100'), + high: ethers.parseEther('1000'), + }, + + // Common test tolerances + tolerances: { + strict: 1n, + normal: 50n, // TestConstants.DEFAULT_TOLERANCE + loose: 100n, // TestConstants.DEFAULT_TOLERANCE * 2n + }, +} + +/** + * Helper to apply a scenario to contracts + */ +export async function applyAllocationScenario(issuanceAllocator: any, addresses: any, scenario: any[], governor: any) { + for (const allocation of scenario) { + const targetAddress = addresses[allocation.target] + await issuanceAllocator + .connect(governor) + [ + 'setTargetAllocation(address,uint256,uint256,bool)' + ](targetAddress, allocation.allocatorPPM, allocation.selfPPM, false) + } +} + +/** + * OptimizedFixtures class for managing test contracts and state + */ +export class OptimizedFixtures { + private accounts: any + private sharedContracts: any = null + + constructor(accounts: any) { + this.accounts = accounts + } + + async setupDirectAllocationSuite() { + const graphToken = await deployTestGraphToken() + const graphTokenAddress = await graphToken.getAddress() + const { deployDirectAllocation } = await import('./fixtures') + const directAllocation = await deployDirectAllocation(graphTokenAddress, this.accounts.governor) + const directAllocationAddress = await directAllocation.getAddress() + + const { GraphTokenHelper } = require('../common/graphTokenHelper') + const graphTokenHelper = new GraphTokenHelper(graphToken, this.accounts.governor) + + this.sharedContracts = { + graphToken, + directAllocation, + graphTokenHelper, + addresses: { + graphToken: graphTokenAddress, + directAllocation: directAllocationAddress, + }, + } + } + + getContracts() { + if (!this.sharedContracts) { + throw new Error('Contracts not initialized. Call setupDirectAllocationSuite() first.') + } + return this.sharedContracts + } + + async resetContractsState() { + if (!this.sharedContracts) return + + const { directAllocation } = this.sharedContracts + const { ROLES } = require('./testPatterns') + + // Reset pause state + try { + if (await directAllocation.paused()) { + await directAllocation.connect(this.accounts.governor).unpause() + } + } catch { + // Ignore if not paused + } + + // Remove all roles except governor + try { + for (const account of [this.accounts.operator, this.accounts.user, this.accounts.nonGovernor]) { + if (await directAllocation.hasRole(ROLES.OPERATOR, account.address)) { + await directAllocation.connect(this.accounts.governor).revokeRole(ROLES.OPERATOR, account.address) + } + if (await directAllocation.hasRole(ROLES.PAUSE, account.address)) { + await directAllocation.connect(this.accounts.governor).revokeRole(ROLES.PAUSE, account.address) + } + } + + // Remove pause role from governor if present + if (await directAllocation.hasRole(ROLES.PAUSE, this.accounts.governor.address)) { + await directAllocation.connect(this.accounts.governor).revokeRole(ROLES.PAUSE, this.accounts.governor.address) + } + } catch { + // Ignore role management errors during reset + } + } + + async createFreshDirectAllocation() { + const graphToken = await deployTestGraphToken() + const graphTokenAddress = await graphToken.getAddress() + const { deployDirectAllocation } = await import('./fixtures') + const directAllocation = await deployDirectAllocation(graphTokenAddress, this.accounts.governor) + + const { GraphTokenHelper } = require('../common/graphTokenHelper') + const graphTokenHelper = new GraphTokenHelper(graphToken, this.accounts.governor) + + return { + directAllocation, + graphToken, + graphTokenHelper, + addresses: { + graphToken: graphTokenAddress, + directAllocation: await directAllocation.getAddress(), + }, + } + } +} diff --git a/packages/issuance/test/tests/allocate/testPatterns.ts b/packages/issuance/test/tests/allocate/testPatterns.ts new file mode 100644 index 000000000..4592eb9e9 --- /dev/null +++ b/packages/issuance/test/tests/allocate/testPatterns.ts @@ -0,0 +1,583 @@ +/** + * Shared test patterns and utilities to reduce duplication across test files + */ + +import { expect } from 'chai' +import { ethers } from 'hardhat' + +// Type definitions for test utilities +export interface TestAccounts { + governor: any + nonGovernor: any + operator: any + user: any + indexer1: any + indexer2: any + selfMintingTarget: any +} + +export interface ContractWithMethods { + connect(signer: any): ContractWithMethods + [methodName: string]: any +} + +// Test constants - centralized to avoid magic numbers +export const TestConstants = { + // Precision and tolerance constants + RATIO_PRECISION: 1000n, + DEFAULT_TOLERANCE: 50n, + STRICT_TOLERANCE: 10n, + + // Common allocation percentages in PPM + ALLOCATION_10_PERCENT: 100_000, + ALLOCATION_20_PERCENT: 200_000, + ALLOCATION_30_PERCENT: 300_000, + ALLOCATION_40_PERCENT: 400_000, + ALLOCATION_50_PERCENT: 500_000, + ALLOCATION_60_PERCENT: 600_000, + ALLOCATION_100_PERCENT: 1_000_000, + + // Role constants - pre-calculated to avoid repeated contract calls + GOVERNOR_ROLE: ethers.keccak256(ethers.toUtf8Bytes('GOVERNOR_ROLE')), + OPERATOR_ROLE: ethers.keccak256(ethers.toUtf8Bytes('OPERATOR_ROLE')), + PAUSE_ROLE: ethers.keccak256(ethers.toUtf8Bytes('PAUSE_ROLE')), + ORACLE_ROLE: ethers.keccak256(ethers.toUtf8Bytes('ORACLE_ROLE')), +} as const + +// Consolidated role constants +export const ROLES = { + GOVERNOR: TestConstants.GOVERNOR_ROLE, + OPERATOR: TestConstants.OPERATOR_ROLE, + PAUSE: TestConstants.PAUSE_ROLE, + ORACLE: TestConstants.ORACLE_ROLE, +} as const + +/** + * Shared test pattern for governor-only access control + */ +export function shouldEnforceGovernorRole( + contractGetter: () => T, + methodName: string, + methodArgs: any[] = [], + accounts?: any, +) { + return function () { + it(`should revert when non-governor calls ${methodName}`, async function () { + const contract = contractGetter() + const testAccounts = accounts || this.parent.ctx.accounts + + await expect( + (contract as any).connect(testAccounts.nonGovernor)[methodName](...methodArgs), + ).to.be.revertedWithCustomError(contract as any, 'AccessControlUnauthorizedAccount') + }) + + it(`should allow governor to call ${methodName}`, async function () { + const contract = contractGetter() + const testAccounts = accounts || this.parent.ctx.accounts + + await expect((contract as any).connect(testAccounts.governor)[methodName](...methodArgs)).to.not.be.reverted + }) + } +} + +/** + * Shared test pattern for role-based access control + */ +export function shouldEnforceRoleAccess( + contractGetter: () => T, + methodName: string, + requiredRole: string, + methodArgs: any[] = [], + accounts?: any, +) { + return function () { + it(`should revert when account without ${requiredRole} calls ${methodName}`, async function () { + const contract = contractGetter() + const testAccounts = accounts || this.parent.ctx.accounts + + await expect( + (contract as any).connect(testAccounts.nonGovernor)[methodName](...methodArgs), + ).to.be.revertedWithCustomError(contract as any, 'AccessControlUnauthorizedAccount') + }) + } +} + +/** + * Calculate ratio between two values with precision + */ +export function calculateRatio( + value1: bigint, + value2: bigint, + precision: bigint = TestConstants.RATIO_PRECISION, +): bigint { + return (value1 * precision) / value2 +} + +/** + * Helper to verify ratio matches expected value within tolerance + */ +export function expectRatioToEqual( + actual1: bigint, + actual2: bigint, + expectedRatio: bigint, + tolerance: bigint = TestConstants.DEFAULT_TOLERANCE, + precision: bigint = TestConstants.RATIO_PRECISION, +) { + const actualRatio = calculateRatio(actual1, actual2, precision) + expect(actualRatio).to.be.closeTo(expectedRatio, tolerance) +} + +/** + * Shared test pattern for initialization + */ +export function shouldInitializeCorrectly(contractGetter: () => T, expectedValues: Record) { + return function () { + Object.entries(expectedValues).forEach(([property, expectedValue]) => { + it(`should set ${property} correctly during initialization`, async function () { + const contract = contractGetter() + // Type assertion is necessary here since we're accessing dynamic properties + const actualValue = await (contract as any)[property]() + expect(actualValue).to.equal(expectedValue) + }) + }) + + it('should revert when initialize is called more than once', async function () { + const contract = contractGetter() + const accounts = this.parent.ctx.accounts + + await expect((contract as any).initialize(accounts.governor.address)).to.be.revertedWithCustomError( + contract as any, + 'InvalidInitialization', + ) + }) + } +} + +/** + * Shared test pattern for pausing functionality + */ +export function shouldHandlePausingCorrectly( + contractGetter: () => T, + pauseRoleAccount: any, + methodName: string = 'distributeIssuance', +) { + return function () { + it('should allow pausing and unpausing by authorized account', async function () { + const contract = contractGetter() + + await (contract as any).connect(pauseRoleAccount).pause() + expect(await (contract as any).paused()).to.be.true + + await (contract as any).connect(pauseRoleAccount).unpause() + expect(await (contract as any).paused()).to.be.false + }) + + it(`should handle ${methodName} when paused`, async function () { + const contract = contractGetter() + + await (contract as any).connect(pauseRoleAccount).pause() + + // Should not revert when paused, but behavior may differ + await expect((contract as any)[methodName]()).to.not.be.reverted + }) + } +} + +/** + * Helper for mining blocks consistently across tests + */ +export async function mineBlocks(count: number): Promise { + for (let i = 0; i < count; i++) { + await ethers.provider.send('evm_mine', []) + } +} + +/** + * Helper to get current block number + */ +export async function getCurrentBlockNumber(): Promise { + return await ethers.provider.getBlockNumber() +} + +/** + * Helper to disable/enable auto-mining for precise block control + */ +export async function withAutoMiningDisabled(callback: () => Promise): Promise { + await ethers.provider.send('evm_setAutomine', [false]) + try { + return await callback() + } finally { + await ethers.provider.send('evm_setAutomine', [true]) + } +} + +/** + * Helper to verify role assignment + */ +export async function expectRole(contract: any, role: string, account: string, shouldHaveRole: boolean) { + const hasRole = await contract.hasRole(role, account) + expect(hasRole).to.equal(shouldHaveRole) +} + +/** + * Helper to verify transaction reverts with specific error + */ +export async function expectRevert(transactionPromise: Promise, errorName: string, contract?: any) { + if (contract) { + await expect(transactionPromise).to.be.revertedWithCustomError(contract, errorName) + } else { + await expect(transactionPromise).to.be.revertedWith(errorName) + } +} + +/** + * Comprehensive access control test suite for a contract + * Replaces multiple individual access control tests + */ +export function shouldEnforceAccessControl( + contractGetter: () => T, + methods: Array<{ + name: string + args: any[] + requiredRole?: string + allowedRoles?: string[] + }>, + accounts: any, +) { + return function () { + methods.forEach((method) => { + const allowedRoles = method.allowedRoles || [TestConstants.GOVERNOR_ROLE] + + describe(`${method.name} access control`, () => { + it(`should revert when unauthorized account calls ${method.name}`, async function () { + const contract = contractGetter() + await expect( + (contract as any).connect(accounts.nonGovernor)[method.name](...method.args), + ).to.be.revertedWithCustomError(contract as any, 'AccessControlUnauthorizedAccount') + }) + + allowedRoles.forEach((role) => { + const roleName = + role === TestConstants.GOVERNOR_ROLE + ? 'governor' + : role === TestConstants.OPERATOR_ROLE + ? 'operator' + : 'authorized' + const account = + role === TestConstants.GOVERNOR_ROLE + ? accounts.governor + : role === TestConstants.OPERATOR_ROLE + ? accounts.operator + : accounts.governor + + it(`should allow ${roleName} to call ${method.name}`, async function () { + const contract = contractGetter() + await expect((contract as any).connect(account)[method.name](...method.args)).to.not.be.reverted + }) + }) + }) + }) + } +} + +/** + * Comprehensive initialization test suite + * Replaces multiple individual initialization tests + */ +export function shouldInitializeProperly( + contractGetter: () => T, + initializationTests: Array<{ + description: string + check: (contract: T) => Promise + }>, + reinitializationTest?: { + method: string + args: any[] + expectedError: string + }, +) { + return function () { + describe('Initialization', () => { + initializationTests.forEach((test) => { + it(test.description, async function () { + const contract = contractGetter() + await test.check(contract) + }) + }) + + if (reinitializationTest) { + it('should revert when initialize is called more than once', async function () { + const contract = contractGetter() + await expect( + (contract as any)[reinitializationTest.method](...reinitializationTest.args), + ).to.be.revertedWithCustomError(contract as any, reinitializationTest.expectedError) + }) + } + }) + } +} + +/** + * Comprehensive pausability test suite + * Replaces multiple individual pause/unpause tests + */ +export function shouldHandlePausability( + contractGetter: () => T, + pausableOperations: Array<{ + name: string + args: any[] + caller: string + }>, + accounts: any, +) { + return function () { + describe('Pausability', () => { + it('should allow PAUSE_ROLE to pause and unpause', async function () { + const contract = contractGetter() + + // Grant pause role to operator + await (contract as any) + .connect(accounts.governor) + .grantRole(TestConstants.PAUSE_ROLE, accounts.operator.address) + + // Should be able to pause + await expect((contract as any).connect(accounts.operator).pause()).to.not.be.reverted + expect(await (contract as any).paused()).to.be.true + + // Should be able to unpause + await expect((contract as any).connect(accounts.operator).unpause()).to.not.be.reverted + expect(await (contract as any).paused()).to.be.false + }) + + it('should revert when non-PAUSE_ROLE tries to pause', async function () { + const contract = contractGetter() + await expect((contract as any).connect(accounts.nonGovernor).pause()).to.be.revertedWithCustomError( + contract as any, + 'AccessControlUnauthorizedAccount', + ) + }) + + pausableOperations.forEach((operation) => { + it(`should revert ${operation.name} when paused`, async function () { + const contract = contractGetter() + const caller = + operation.caller === 'governor' + ? accounts.governor + : operation.caller === 'operator' + ? accounts.operator + : accounts.nonGovernor + + // Grant pause role and pause + await (contract as any) + .connect(accounts.governor) + .grantRole(TestConstants.PAUSE_ROLE, accounts.governor.address) + await (contract as any).connect(accounts.governor).pause() + + await expect( + (contract as any).connect(caller)[operation.name](...operation.args), + ).to.be.revertedWithCustomError(contract as any, 'EnforcedPause') + }) + }) + }) + } +} + +/** + * Comprehensive role management test suite + * Replaces multiple individual role grant/revoke tests + */ +export function shouldManageRoles( + contractGetter: () => T, + roles: Array<{ + role: string + roleName: string + grantableBy?: string[] + }>, + accounts: any, +) { + return function () { + describe('Role Management', () => { + roles.forEach((roleConfig) => { + const grantableBy = roleConfig.grantableBy || ['governor'] + + describe(`${roleConfig.roleName} management`, () => { + grantableBy.forEach((granterRole) => { + const granter = granterRole === 'governor' ? accounts.governor : accounts.operator + + it(`should allow ${granterRole} to grant ${roleConfig.roleName}`, async function () { + const contract = contractGetter() + await expect((contract as any).connect(granter).grantRole(roleConfig.role, accounts.user.address)).to.not + .be.reverted + + expect(await (contract as any).hasRole(roleConfig.role, accounts.user.address)).to.be.true + }) + + it(`should allow ${granterRole} to revoke ${roleConfig.roleName}`, async function () { + const contract = contractGetter() + + // First grant the role + await (contract as any).connect(granter).grantRole(roleConfig.role, accounts.user.address) + + // Then revoke it + await expect((contract as any).connect(granter).revokeRole(roleConfig.role, accounts.user.address)).to.not + .be.reverted + + expect(await (contract as any).hasRole(roleConfig.role, accounts.user.address)).to.be.false + }) + }) + + it(`should revert when non-authorized tries to grant ${roleConfig.roleName}`, async function () { + const contract = contractGetter() + await expect( + (contract as any).connect(accounts.nonGovernor).grantRole(roleConfig.role, accounts.user.address), + ).to.be.revertedWithCustomError(contract as any, 'AccessControlUnauthorizedAccount') + }) + }) + }) + }) + } +} + +/** + * Comprehensive interface compliance test suite + * Replaces multiple individual interface support tests + * + * @param contractGetter - Function that returns the contract instance to test + * @param interfaces - Array of Typechain factory classes with interfaceId and interfaceName + * + * @example + * import { IPausableControl__factory, IAccessControl__factory } from '@graphprotocol/interfaces/types' + * + * shouldSupportInterfaces( + * () => contract, + * [ + * IPausableControl__factory, + * IAccessControl__factory, + * ] + * ) + */ +export function shouldSupportInterfaces( + contractGetter: () => T, + interfaces: Array<{ + interfaceId: string + interfaceName: string + }>, +) { + return function () { + describe('Interface Compliance', () => { + it('should support ERC-165 interface', async function () { + const contract = contractGetter() + expect(await (contract as any).supportsInterface('0x01ffc9a7')).to.be.true + }) + + interfaces.forEach((iface) => { + it(`should support ${iface.interfaceName} interface`, async function () { + const contract = contractGetter() + expect(await (contract as any).supportsInterface(iface.interfaceId)).to.be.true + }) + }) + + it('should not support random interface', async function () { + const contract = contractGetter() + expect(await (contract as any).supportsInterface('0x12345678')).to.be.false + }) + }) + } +} + +/** + * Comprehensive validation test suite + * Replaces multiple individual validation tests + */ +export function shouldValidateInputs( + contractGetter: () => T, + validationTests: Array<{ + method: string + args: any[] + expectedError: string + description: string + caller?: string + }>, + accounts: any, +) { + return function () { + describe('Input Validation', () => { + validationTests.forEach((test) => { + it(test.description, async function () { + const contract = contractGetter() + const caller = + test.caller === 'operator' ? accounts.operator : test.caller === 'user' ? accounts.user : accounts.governor + + await expect((contract as any).connect(caller)[test.method](...test.args)).to.be.revertedWithCustomError( + contract as any, + test.expectedError, + ) + }) + }) + }) + } +} + +/** + * Shared assertion helpers for common test patterns + */ +export const TestAssertions = { + /** + * Assert that a target received tokens proportionally + */ + expectProportionalDistribution: ( + distributions: bigint[], + expectedRatios: number[], + tolerance: bigint = TestConstants.DEFAULT_TOLERANCE, + ) => { + for (let i = 1; i < distributions.length; i++) { + const expectedRatio = BigInt( + Math.round((expectedRatios[0] / expectedRatios[i]) * Number(TestConstants.RATIO_PRECISION)), + ) + expectRatioToEqual(distributions[0], distributions[i], expectedRatio, tolerance) + } + }, + + /** + * Assert that balance increased by at least expected amount + */ + expectBalanceIncreasedBy: (initialBalance: bigint, finalBalance: bigint, expectedIncrease: bigint) => { + const actualIncrease = finalBalance - initialBalance + expect(actualIncrease).to.be.gte(expectedIncrease) + }, + + /** + * Assert that total allocations add up correctly + */ + expectTotalAllocation: (contract: any, expectedTotal: number) => { + return async () => { + const totalAlloc = await contract.getTotalAllocation() + expect(totalAlloc.totalAllocationPPM).to.equal(expectedTotal) + } + }, +} + +/** + * Shared test patterns organized by functionality + */ +export const TestPatterns = { + roleManagement: { + grantRole: async (contract: any, granter: any, role: string, account: string) => { + await contract.connect(granter).grantRole(role, account) + }, + + revokeRole: async (contract: any, revoker: any, role: string, account: string) => { + await contract.connect(revoker).revokeRole(role, account) + }, + }, + + pausable: { + pause: async (contract: any, account: any) => { + await contract.connect(account).pause() + }, + + unpause: async (contract: any, account: any) => { + await contract.connect(account).unpause() + }, + }, +} From 9f6c704d8e2a1e280a3b216aef0ff94b98ffb3c4 Mon Sep 17 00:00:00 2001 From: Rembrandt Kuipers <50174308+RembrandtK@users.noreply.github.com> Date: Fri, 12 Dec 2025 17:43:43 +0000 Subject: [PATCH 3/9] feat: default issuance allocation for Issuance Allocator The default allocation receives issuance that is not explicitly allocated to another target. - Added setDefaultAllocationAddress() to set. - getTotalAllocation() excludes default when it is the zero address. - Tests and documentation updated. --- .../IIssuanceAllocationAdministration.sol | 7 + .../contracts/allocate/IssuanceAllocator.sol | 162 ++++- .../tests/allocate/DefaultAllocation.test.ts | 556 ++++++++++++++++++ .../allocate/InterfaceIdStability.test.ts | 2 +- .../tests/allocate/IssuanceAllocator.test.ts | 265 +++++---- .../tests/allocate/IssuanceSystem.test.ts | 14 +- .../test/tests/allocate/optimizedFixtures.ts | 8 +- 7 files changed, 847 insertions(+), 167 deletions(-) create mode 100644 packages/issuance/test/tests/allocate/DefaultAllocation.test.ts diff --git a/packages/interfaces/contracts/issuance/allocate/IIssuanceAllocationAdministration.sol b/packages/interfaces/contracts/issuance/allocate/IIssuanceAllocationAdministration.sol index 23bc7ea05..919cea168 100644 --- a/packages/interfaces/contracts/issuance/allocate/IIssuanceAllocationAdministration.sol +++ b/packages/interfaces/contracts/issuance/allocate/IIssuanceAllocationAdministration.sol @@ -72,6 +72,13 @@ interface IIssuanceAllocationAdministration { */ function forceTargetNoChangeNotificationBlock(address target, uint256 blockNumber) external returns (uint256); + /** + * @notice Set the address that receives the default portion of issuance not allocated to other targets + * @param newAddress The new default allocation address (can be address(0)) + * @return True if successful + */ + function setDefaultAllocationAddress(address newAddress) external returns (bool); + /** * @notice Distribute any pending accumulated issuance to allocator-minting targets. * @return Block number up to which issuance has been distributed diff --git a/packages/issuance/contracts/allocate/IssuanceAllocator.sol b/packages/issuance/contracts/allocate/IssuanceAllocator.sol index e6e9ba62c..9f6110bea 100644 --- a/packages/issuance/contracts/allocate/IssuanceAllocator.sol +++ b/packages/issuance/contracts/allocate/IssuanceAllocator.sol @@ -25,6 +25,14 @@ import { ERC165Upgradeable } from "@openzeppelin/contracts-upgradeable/utils/int * of the protocol. It calculates issuance for all targets based on their configured proportions * and handles minting for allocator-minting portions. * + * @dev The contract maintains a 100% allocation invariant through a default allocation mechanism: + * - A default allocation target exists at targetAddresses[0] (initialized to address(0)) + * - The default allocation automatically receives any unallocated portion of issuance + * - Total allocation across all targets always equals 100% (MILLION PPM) + * - The default allocation address can be changed via setDefaultAllocationAddress() + * - When the default address is address(0), the unallocated portion is not minted + * - Regular targets cannot be set as the default allocation address + * * @dev The contract supports two types of allocation for each target: * 1. Allocator-minting allocation: The IssuanceAllocator calculates and mints tokens directly to targets * for this portion of their allocation. @@ -77,10 +85,13 @@ contract IssuanceAllocator is /// @param lastAccumulationBlock Last block when pending issuance was accumulated /// @dev Design invariant: lastDistributionBlock <= lastAccumulationBlock /// @param allocationTargets Mapping of target addresses to their allocation data - /// @param targetAddresses Array of all target addresses with non-zero allocation - /// @param totalAllocatorMintingPPM Total allocator-minting allocation (in PPM) across all targets + /// @param targetAddresses Array of all target addresses (including default allocation at index 0) /// @param totalSelfMintingPPM Total self-minting allocation (in PPM) across all targets /// @param pendingAccumulatedAllocatorIssuance Accumulated but not distributed issuance for allocator-minting from lastDistributionBlock to lastAccumulationBlock + /// @dev Design invariant: Total allocation across all targets always equals MILLION (100%) + /// @dev Design invariant: targetAddresses[0] is always the default allocation address + /// @dev Design invariant: 1 <= targetAddresses.length (default allocation always exists) + /// @dev Design invariant: Default allocation (targetAddresses[0]) is automatically adjusted to maintain 100% total /// @custom:storage-location erc7201:graphprotocol.storage.IssuanceAllocator struct IssuanceAllocatorData { uint256 issuancePerBlock; @@ -88,7 +99,6 @@ contract IssuanceAllocator is uint256 lastAccumulationBlock; mapping(address => AllocationTarget) allocationTargets; address[] targetAddresses; - uint256 totalAllocatorMintingPPM; uint256 totalSelfMintingPPM; uint256 pendingAccumulatedAllocatorIssuance; } @@ -122,6 +132,12 @@ contract IssuanceAllocator is /// @notice Thrown when toBlockNumber is out of valid range for accumulation error ToBlockOutOfRange(); + /// @notice Thrown when attempting to set allocation for the default allocation target + error CannotSetAllocationForDefaultTarget(); + + /// @notice Thrown when attempting to set default allocation address to a normally allocated target + error CannotSetDefaultToAllocatedTarget(); + // -- Events -- /// @notice Emitted when issuance is distributed to a target @@ -143,6 +159,11 @@ contract IssuanceAllocator is event IssuancePerBlockUpdated(uint256 oldIssuancePerBlock, uint256 newIssuancePerBlock); // solhint-disable-line gas-indexed-events // Do not need to index issuance per block values + /// @notice Emitted when the default allocation address is updated + /// @param oldAddress The previous default allocation address + /// @param newAddress The new default allocation address + event DefaultAllocationAddressUpdated(address indexed oldAddress, address indexed newAddress); + // -- Constructor -- /** @@ -159,9 +180,17 @@ contract IssuanceAllocator is /** * @notice Initialize the IssuanceAllocator contract * @param _governor Address that will have the GOVERNOR_ROLE + * @dev Initializes with a default allocation at index 0 set to address(0) with 100% allocation */ function initialize(address _governor) external virtual initializer { __BaseUpgradeable_init(_governor); + + IssuanceAllocatorData storage $ = _getIssuanceAllocatorStorage(); + + // Initialize default allocation at index 0 with address(0) and 100% allocator-minting + $.targetAddresses.push(address(0)); + $.allocationTargets[address(0)].allocatorMintingPPM = MILLION; + $.allocationTargets[address(0)].selfMintingPPM = 0; } // -- Core Functionality -- @@ -219,6 +248,10 @@ contract IssuanceAllocator is if (0 < newIssuance) { for (uint256 i = 0; i < $.targetAddresses.length; ++i) { address target = $.targetAddresses[i]; + + // Skip minting to zero address (default allocation when not configured) + if (target == address(0)) continue; + AllocationTarget storage targetData = $.allocationTargets[target]; if (0 < targetData.allocatorMintingPPM) { @@ -272,9 +305,12 @@ contract IssuanceAllocator is * Use forceTargetNoChangeNotificationBlock to skip notification for malfunctioning targets. * * @param target Address of the target to notify - * @return True if notification was sent or already sent for this block + * @return True if notification was sent or already sent for this block. Always returns true for address(0) without notifying. */ function _notifyTarget(address target) private returns (bool) { + // Skip notification for zero address (default allocation when unset) + if (target == address(0)) return true; + IssuanceAllocatorData storage $ = _getIssuanceAllocatorStorage(); AllocationTarget storage targetData = $.allocationTargets[target]; @@ -384,6 +420,43 @@ contract IssuanceAllocator is return _setTargetAllocation(target, allocatorMintingPPM, selfMintingPPM, evenIfDistributionPending); } + /** + * @inheritdoc IIssuanceAllocationAdministration + * @dev The default allocation automatically receives the portion of issuance not allocated to other targets + * @dev This maintains the invariant that total allocation is always 100% + * @dev Reverts if attempting to set to an address that has a normal (non-default) allocation + * @dev No-op if setting to the same address + */ + function setDefaultAllocationAddress(address newAddress) external override onlyRole(GOVERNOR_ROLE) returns (bool) { + IssuanceAllocatorData storage $ = _getIssuanceAllocatorStorage(); + + address oldAddress = $.targetAddresses[0]; + + // No-op if setting to same address + if (newAddress == oldAddress) return true; + + // Cannot set default allocation to a normally allocated target + // Check if newAddress is in targetAddresses (excluding index 0 which is the default) + // Note: This is O(n) for the number of targets, which could become expensive as targets increase. + // However, other operations (distribution, notifications) already loop through all targets and + // would encounter gas issues first. Recovery mechanisms exist (pause, per-target notification control). + for (uint256 i = 1; i < $.targetAddresses.length; ++i) { + require($.targetAddresses[i] != newAddress, CannotSetDefaultToAllocatedTarget()); + } + + // Notify both old and new addresses of the allocation change + _notifyTarget(oldAddress); + _notifyTarget(newAddress); + + // Update the default allocation address at index 0 + $.targetAddresses[0] = newAddress; + $.allocationTargets[newAddress] = $.allocationTargets[oldAddress]; + delete $.allocationTargets[oldAddress]; + + emit DefaultAllocationAddressUpdated(oldAddress, newAddress); + return true; + } + /** * @notice Internal implementation for setting target allocation * @param target Address of the target to update @@ -404,8 +477,16 @@ contract IssuanceAllocator is _notifyTarget(target); + // Total allocation calculation and check is delayed until after notifications. + // Distributing and notifying unnecessarily is harmless, but we need to prevent + // reentrancy from looping and changing allocations mid-calculation. + // (Would not be likely to be exploitable due to only governor being able to + // make a call to set target allocation, but better to be paranoid.) + // Validate totals and auto-adjust default allocation BEFORE updating target data + // so we can read the old allocation values _validateAndUpdateTotalAllocations(target, allocatorMintingPPM, selfMintingPPM); + // Then update the target's allocation data _updateTargetAllocationData(target, allocatorMintingPPM, selfMintingPPM); emit TargetAllocationUpdated(target, allocatorMintingPPM, selfMintingPPM); @@ -427,6 +508,9 @@ contract IssuanceAllocator is require(target != address(0), TargetAddressCannotBeZero()); IssuanceAllocatorData storage $ = _getIssuanceAllocatorStorage(); + + require(target != $.targetAddresses[0], CannotSetAllocationForDefaultTarget()); + AllocationTarget storage targetData = $.allocationTargets[target]; if (targetData.allocatorMintingPPM == allocatorMintingPPM && targetData.selfMintingPPM == selfMintingPPM) @@ -467,10 +551,12 @@ contract IssuanceAllocator is } /** - * @notice Updates global allocation totals and validates they don't exceed maximum + * @notice Updates global allocation totals and auto-adjusts default allocation to maintain 100% invariant * @param target Address of the target being updated * @param allocatorMintingPPM New allocator-minting allocation for the target (in PPM) * @param selfMintingPPM New self-minting allocation for the target (in PPM) + * @dev The default allocation (at targetAddresses[0]) is automatically adjusted to ensure total allocation equals MILLION + * @dev This function is called BEFORE the target's allocation data has been updated so we can read old values */ function _validateAndUpdateTotalAllocations( address target, @@ -479,18 +565,18 @@ contract IssuanceAllocator is ) private { IssuanceAllocatorData storage $ = _getIssuanceAllocatorStorage(); AllocationTarget storage targetData = $.allocationTargets[target]; + AllocationTarget storage defaultTarget = $.allocationTargets[$.targetAddresses[0]]; - // Total allocation calculation and check is delayed until after notifications. - // Distributing and notifying unnecessarily is harmless, but we need to prevent - // reentrancy looping changing allocations mid-calculation. - // (Would not be likely to be exploitable due to only governor being able to - // make a call to set target allocation, but better to be paranoid.) - $.totalAllocatorMintingPPM = $.totalAllocatorMintingPPM - targetData.allocatorMintingPPM + allocatorMintingPPM; - $.totalSelfMintingPPM = $.totalSelfMintingPPM - targetData.selfMintingPPM + selfMintingPPM; + // Calculation is done here after notifications to prevent reentrancy issues - // Ensure the new total allocation doesn't exceed MILLION as in PPM. + uint256 availablePPM = defaultTarget.allocatorMintingPPM + + targetData.allocatorMintingPPM + + targetData.selfMintingPPM; // solhint-disable-next-line gas-strict-inequalities - require(($.totalAllocatorMintingPPM + $.totalSelfMintingPPM) <= MILLION, InsufficientAllocationAvailable()); + require(allocatorMintingPPM + selfMintingPPM <= availablePPM, InsufficientAllocationAvailable()); + + defaultTarget.allocatorMintingPPM = availablePPM - allocatorMintingPPM - selfMintingPPM; + $.totalSelfMintingPPM = $.totalSelfMintingPPM - targetData.selfMintingPPM + selfMintingPPM; } /** @@ -498,23 +584,24 @@ contract IssuanceAllocator is * @param target Address of the target being updated * @param allocatorMintingPPM New allocator-minting allocation for the target (in PPM) * @param selfMintingPPM New self-minting allocation for the target (in PPM) + * @dev This function is never called for the default allocation (at index 0), which is handled separately */ function _updateTargetAllocationData(address target, uint256 allocatorMintingPPM, uint256 selfMintingPPM) private { IssuanceAllocatorData storage $ = _getIssuanceAllocatorStorage(); AllocationTarget storage targetData = $.allocationTargets[target]; // Internal design invariants: - // - targetAddresses contains all targets with non-zero allocation. - // - targetAddresses does not contain targets with zero allocation. - // - targetAddresses does not contain duplicates. - // - allocationTargets mapping contains all targets in targetAddresses with a non-zero allocation. - // - allocationTargets mapping allocations are zero for targets not in targetAddresses. + // - targetAddresses[0] is always the default allocation and is never removed + // - targetAddresses[1..n] contains all non-default targets with explicitly set non-zero allocations + // - targetAddresses does not contain duplicates + // - allocationTargets mapping contains allocation data for all targets in targetAddresses + // - Default allocation is automatically adjusted by _validateAndUpdateTotalAllocations // - Governance actions can create allocationTarget mappings with lastChangeNotifiedBlock set for targets not in targetAddresses. This is valid. // Therefore: - // - Only add a target to the list if it previously had no allocation. - // - Remove a target from the list when setting both allocations to 0. - // - Delete allocationTargets mapping entry when removing a target from targetAddresses. - // - Do not set lastChangeNotifiedBlock in this function. + // - Only add a non-default target to the list if it previously had no allocation + // - Remove a non-default target from the list when setting both allocations to 0 + // - Delete allocationTargets mapping entry when removing a target from targetAddresses + // - Do not set lastChangeNotifiedBlock in this function if (allocatorMintingPPM != 0 || selfMintingPPM != 0) { // Add to list if previously had no allocation if (targetData.allocatorMintingPPM == 0 && targetData.selfMintingPPM == 0) $.targetAddresses.push(target); @@ -586,10 +673,14 @@ contract IssuanceAllocator is if (pendingAmount == 0) return $.lastDistributionBlock; $.pendingAccumulatedAllocatorIssuance = 0; - if ($.totalAllocatorMintingPPM == 0) return $.lastDistributionBlock; + if ($.totalSelfMintingPPM == MILLION) return $.lastDistributionBlock; for (uint256 i = 0; i < $.targetAddresses.length; ++i) { address target = $.targetAddresses[i]; + + // Skip minting to zero address (default allocation when not configured) + if (target == address(0)) continue; + AllocationTarget storage targetData = $.allocationTargets[target]; if (0 < targetData.allocatorMintingPPM) { @@ -727,13 +818,32 @@ contract IssuanceAllocator is /** * @inheritdoc IIssuanceAllocationStatus + * @dev For reporting purposes, if the default allocation target is address(0), its allocation + * @dev is treated as "unallocated" since address(0) cannot receive minting. + * @dev When default is address(0): returns actual allocated amounts (may be less than 100%) + * @dev When default is a real address: returns 100% total allocation + * @dev Note: Internally, the contract always maintains 100% allocation invariant, even when default is address(0) */ function getTotalAllocation() external view override returns (Allocation memory) { IssuanceAllocatorData storage $ = _getIssuanceAllocatorStorage(); + + uint256 totalAllocatorMinting = MILLION - $.totalSelfMintingPPM; + uint256 totalAllocation = MILLION; + + // If default is address(0), exclude its allocation from reported totals + // since it doesn't actually receive minting (effectively unallocated) + address defaultAddress = $.targetAddresses[0]; + if (defaultAddress == address(0)) { + AllocationTarget storage defaultTarget = $.allocationTargets[defaultAddress]; + uint256 defaultAllocation = defaultTarget.allocatorMintingPPM; + totalAllocatorMinting -= defaultAllocation; + totalAllocation -= defaultAllocation; + } + return Allocation({ - totalAllocationPPM: $.totalAllocatorMintingPPM + $.totalSelfMintingPPM, - allocatorMintingPPM: $.totalAllocatorMintingPPM, + totalAllocationPPM: totalAllocation, + allocatorMintingPPM: totalAllocatorMinting, selfMintingPPM: $.totalSelfMintingPPM }); } diff --git a/packages/issuance/test/tests/allocate/DefaultAllocation.test.ts b/packages/issuance/test/tests/allocate/DefaultAllocation.test.ts new file mode 100644 index 000000000..d59fb457e --- /dev/null +++ b/packages/issuance/test/tests/allocate/DefaultAllocation.test.ts @@ -0,0 +1,556 @@ +import { expect } from 'chai' +import hre from 'hardhat' +const { ethers } = hre + +import { deployTestGraphToken, getTestAccounts } from '../common/fixtures' +import { deployDirectAllocation, deployIssuanceAllocator } from './fixtures' +import { expectCustomError } from './optimizationHelpers' + +describe('IssuanceAllocator - Default Allocation', () => { + let accounts + let graphToken + let issuanceAllocator + let target1 + let target2 + let target3 + let addresses + + const MILLION = 1_000_000n + const issuancePerBlock = ethers.parseEther('100') + + beforeEach(async () => { + accounts = await getTestAccounts() + + // Deploy fresh contracts for each test + graphToken = await deployTestGraphToken() + const graphTokenAddress = await graphToken.getAddress() + + issuanceAllocator = await deployIssuanceAllocator(graphTokenAddress, accounts.governor, issuancePerBlock) + + target1 = await deployDirectAllocation(graphTokenAddress, accounts.governor) + target2 = await deployDirectAllocation(graphTokenAddress, accounts.governor) + target3 = await deployDirectAllocation(graphTokenAddress, accounts.governor) + + addresses = { + issuanceAllocator: await issuanceAllocator.getAddress(), + target1: await target1.getAddress(), + target2: await target2.getAddress(), + target3: await target3.getAddress(), + graphToken: graphTokenAddress, + } + + // Grant minter role to issuanceAllocator + await (graphToken as any).addMinter(addresses.issuanceAllocator) + }) + + describe('Initialization', () => { + it('should initialize with default allocation at index 0', async () => { + const targetCount = await issuanceAllocator.getTargetCount() + expect(targetCount).to.equal(1n) + + const defaultAddress = await issuanceAllocator.getTargetAt(0) + expect(defaultAddress).to.equal(ethers.ZeroAddress) + }) + + it('should initialize with 100% allocation to default target', async () => { + const defaultAddress = await issuanceAllocator.getTargetAt(0) + const allocation = await issuanceAllocator.getTargetAllocation(defaultAddress) + + expect(allocation.totalAllocationPPM).to.equal(MILLION) + expect(allocation.allocatorMintingPPM).to.equal(MILLION) + expect(allocation.selfMintingPPM).to.equal(0n) + }) + + it('should report total allocation as 0% when default is address(0)', async () => { + const totalAllocation = await issuanceAllocator.getTotalAllocation() + + // When default is address(0), it is treated as unallocated for reporting purposes + expect(totalAllocation.totalAllocationPPM).to.equal(0n) + expect(totalAllocation.allocatorMintingPPM).to.equal(0n) + expect(totalAllocation.selfMintingPPM).to.equal(0n) + }) + }) + + describe('100% Allocation Invariant', () => { + it('should auto-adjust default allocation when setting normal target allocation', async () => { + const allocation1PPM = 300_000n // 30% + + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256)'](addresses.target1, allocation1PPM) + + // Check target1 has correct allocation + const target1Allocation = await issuanceAllocator.getTargetAllocation(addresses.target1) + expect(target1Allocation.totalAllocationPPM).to.equal(allocation1PPM) + + // Check default allocation was auto-adjusted + const defaultAddress = await issuanceAllocator.getTargetAt(0) + const defaultAllocation = await issuanceAllocator.getTargetAllocation(defaultAddress) + expect(defaultAllocation.totalAllocationPPM).to.equal(MILLION - allocation1PPM) + + // Check reported total (excludes default since it's address(0)) + const totalAllocation = await issuanceAllocator.getTotalAllocation() + expect(totalAllocation.totalAllocationPPM).to.equal(allocation1PPM) + }) + + it('should maintain 100% invariant with multiple targets', async () => { + const allocation1PPM = 200_000n // 20% + const allocation2PPM = 350_000n // 35% + const allocation3PPM = 150_000n // 15% + + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256)'](addresses.target1, allocation1PPM) + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256)'](addresses.target2, allocation2PPM) + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256)'](addresses.target3, allocation3PPM) + + // Check default allocation is 30% (100% - 20% - 35% - 15%) + const defaultAddress = await issuanceAllocator.getTargetAt(0) + const defaultAllocation = await issuanceAllocator.getTargetAllocation(defaultAddress) + const expectedDefault = MILLION - allocation1PPM - allocation2PPM - allocation3PPM + expect(defaultAllocation.totalAllocationPPM).to.equal(expectedDefault) + + // Check reported total (excludes default since it's address(0)) + const totalAllocation = await issuanceAllocator.getTotalAllocation() + expect(totalAllocation.totalAllocationPPM).to.equal(allocation1PPM + allocation2PPM + allocation3PPM) + }) + + it('should allow 0% default allocation when all allocation is assigned', async () => { + const allocation1PPM = 600_000n // 60% + const allocation2PPM = 400_000n // 40% + + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256)'](addresses.target1, allocation1PPM) + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256)'](addresses.target2, allocation2PPM) + + // Check default allocation is 0% + const defaultAddress = await issuanceAllocator.getTargetAt(0) + const defaultAllocation = await issuanceAllocator.getTargetAllocation(defaultAddress) + expect(defaultAllocation.totalAllocationPPM).to.equal(0n) + + // Check reported total is 100% (default has 0%, so exclusion doesn't matter) + const totalAllocation = await issuanceAllocator.getTotalAllocation() + expect(totalAllocation.totalAllocationPPM).to.equal(MILLION) + }) + + it('should revert if non-default allocations exceed 100%', async () => { + const allocation1PPM = 600_000n // 60% + const allocation2PPM = 500_000n // 50% (total would be 110%) + + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256)'](addresses.target1, allocation1PPM) + + await expectCustomError( + issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256)'](addresses.target2, allocation2PPM), + issuanceAllocator, + 'InsufficientAllocationAvailable', + ) + }) + + it('should adjust default when removing a target allocation', async () => { + // Set up initial allocations + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256)'](addresses.target1, 300_000n) + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256)'](addresses.target2, 200_000n) + + // Default should be 50% + let defaultAddress = await issuanceAllocator.getTargetAt(0) + let defaultAllocation = await issuanceAllocator.getTargetAllocation(defaultAddress) + expect(defaultAllocation.totalAllocationPPM).to.equal(500_000n) + + // Remove target1 allocation + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'](addresses.target1, 0, 0, false) + + // Default should now be 80% + defaultAddress = await issuanceAllocator.getTargetAt(0) + defaultAllocation = await issuanceAllocator.getTargetAllocation(defaultAddress) + expect(defaultAllocation.totalAllocationPPM).to.equal(800_000n) + + // Reported total excludes default (only target2's 20% is reported) + const totalAllocation = await issuanceAllocator.getTotalAllocation() + expect(totalAllocation.totalAllocationPPM).to.equal(200_000n) + }) + + it('should handle self-minting allocations correctly in 100% invariant', async () => { + const allocator1 = 200_000n + const self1 = 100_000n + const allocator2 = 300_000n + const self2 = 50_000n + + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256)'](addresses.target1, allocator1, self1) + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256)'](addresses.target2, allocator2, self2) + + // Total non-default: 20% + 10% + 30% + 5% = 65% + // Default should be: 35% + const defaultAddress = await issuanceAllocator.getTargetAt(0) + const defaultAllocation = await issuanceAllocator.getTargetAllocation(defaultAddress) + expect(defaultAllocation.totalAllocationPPM).to.equal(350_000n) + + // Reported total excludes default (only target1+target2's 65% is reported) + const totalAllocation = await issuanceAllocator.getTotalAllocation() + expect(totalAllocation.totalAllocationPPM).to.equal(allocator1 + self1 + allocator2 + self2) + expect(totalAllocation.selfMintingPPM).to.equal(self1 + self2) + }) + }) + + describe('setDefaultAllocationAddress', () => { + it('should allow governor to change default allocation address', async () => { + const newDefaultAddress = addresses.target1 + + await issuanceAllocator.connect(accounts.governor).setDefaultAllocationAddress(newDefaultAddress) + + const defaultAddress = await issuanceAllocator.getTargetAt(0) + expect(defaultAddress).to.equal(newDefaultAddress) + }) + + it('should maintain allocation when changing default address', async () => { + // Set a target allocation first + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256)'](addresses.target2, 400_000n) + + // Default should be 60% + let defaultAddress = await issuanceAllocator.getTargetAt(0) + let defaultAllocation = await issuanceAllocator.getTargetAllocation(defaultAddress) + expect(defaultAllocation.totalAllocationPPM).to.equal(600_000n) + + // Change default address + await issuanceAllocator.connect(accounts.governor).setDefaultAllocationAddress(addresses.target1) + + // Check new address has the same allocation + defaultAddress = await issuanceAllocator.getTargetAt(0) + expect(defaultAddress).to.equal(addresses.target1) + defaultAllocation = await issuanceAllocator.getTargetAllocation(addresses.target1) + expect(defaultAllocation.totalAllocationPPM).to.equal(600_000n) + + // Old address should have zero allocation + const oldAllocation = await issuanceAllocator.getTargetAllocation(ethers.ZeroAddress) + expect(oldAllocation.totalAllocationPPM).to.equal(0n) + }) + + it('should emit DefaultAllocationAddressUpdated event', async () => { + const newDefaultAddress = addresses.target1 + + await expect(issuanceAllocator.connect(accounts.governor).setDefaultAllocationAddress(newDefaultAddress)) + .to.emit(issuanceAllocator, 'DefaultAllocationAddressUpdated') + .withArgs(ethers.ZeroAddress, newDefaultAddress) + }) + + it('should be no-op when setting to same address', async () => { + const currentAddress = await issuanceAllocator.getTargetAt(0) + + const tx = await issuanceAllocator.connect(accounts.governor).setDefaultAllocationAddress(currentAddress) + const receipt = await tx.wait() + + // Should not emit event when no-op + const events = receipt!.logs.filter((log: any) => { + try { + return issuanceAllocator.interface.parseLog(log)?.name === 'DefaultAllocationAddressUpdated' + } catch { + return false + } + }) + expect(events.length).to.equal(0) + }) + + it('should revert when non-governor tries to change default address', async () => { + await expect( + issuanceAllocator.connect(accounts.user).setDefaultAllocationAddress(addresses.target1), + ).to.be.revertedWithCustomError(issuanceAllocator, 'AccessControlUnauthorizedAccount') + }) + + it('should revert when trying to set default to a normally allocated target', async () => { + // Set target1 as a normal allocation + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256)'](addresses.target1, 300_000n) + + // Try to set target1 as default should fail + await expectCustomError( + issuanceAllocator.connect(accounts.governor).setDefaultAllocationAddress(addresses.target1), + issuanceAllocator, + 'CannotSetDefaultToAllocatedTarget', + ) + }) + + it('should allow changing back to zero address', async () => { + // Change to target1 + await issuanceAllocator.connect(accounts.governor).setDefaultAllocationAddress(addresses.target1) + + // Change back to zero address + await issuanceAllocator.connect(accounts.governor).setDefaultAllocationAddress(ethers.ZeroAddress) + + const defaultAddress = await issuanceAllocator.getTargetAt(0) + expect(defaultAddress).to.equal(ethers.ZeroAddress) + }) + }) + + describe('setTargetAllocation restrictions', () => { + it('should revert with zero address error when default target is address(0)', async () => { + const defaultAddress = await issuanceAllocator.getTargetAt(0) + expect(defaultAddress).to.equal(ethers.ZeroAddress) + + // When default is address(0), the zero address check happens first + await expectCustomError( + issuanceAllocator.connect(accounts.governor)['setTargetAllocation(address,uint256)'](defaultAddress, 500_000n), + issuanceAllocator, + 'TargetAddressCannotBeZero', + ) + }) + + it('should revert when trying to set allocation for changed default target', async () => { + // Change default to target1 + await issuanceAllocator.connect(accounts.governor).setDefaultAllocationAddress(addresses.target1) + + // Should not be able to set allocation for target1 now + await expectCustomError( + issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256)'](addresses.target1, 500_000n), + issuanceAllocator, + 'CannotSetAllocationForDefaultTarget', + ) + }) + + it('should allow setting allocation for previous default address after it changes', async () => { + // Change default to target1 + await issuanceAllocator.connect(accounts.governor).setDefaultAllocationAddress(addresses.target1) + + // Should now be able to set allocation for old default (zero address would fail for other reasons, use target2) + await issuanceAllocator.connect(accounts.governor).setDefaultAllocationAddress(addresses.target2) + + // Now target1 is no longer default, should be able to allocate to it + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256)'](addresses.target1, 300_000n) + + const allocation = await issuanceAllocator.getTargetAllocation(addresses.target1) + expect(allocation.totalAllocationPPM).to.equal(300_000n) + }) + + it('should revert when trying to set allocation for address(0) when default is not address(0)', async () => { + // Change default to target1 + await issuanceAllocator.connect(accounts.governor).setDefaultAllocationAddress(addresses.target1) + + // Try to set allocation for address(0) directly should fail + await expectCustomError( + issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256)'](ethers.ZeroAddress, 300_000n), + issuanceAllocator, + 'TargetAddressCannotBeZero', + ) + }) + }) + + describe('Distribution with default allocation', () => { + it('should not mint to zero address when default is unset', async () => { + // Set a normal target allocation (this is block 1) + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256)'](addresses.target1, 400_000n) + + // Distribute (this is block 2, so we distribute for block 1->2 = 1 block since last distribution) + await issuanceAllocator.distributeIssuance() + + // Target1 should receive 40% of issuance for the block between setTargetAllocation and distributeIssuance + const target1Balance = await graphToken.balanceOf(addresses.target1) + const expectedTarget1 = (issuancePerBlock * 400_000n) / MILLION + expect(target1Balance).to.equal(expectedTarget1) + + // Zero address should have nothing (cannot be minted to) + const zeroBalance = await graphToken.balanceOf(ethers.ZeroAddress) + expect(zeroBalance).to.equal(0n) + + // The 60% for default (zero address) is effectively burned/not minted + }) + + it('should mint to default address when it is set', async () => { + // Distribute any pending issuance first to start fresh + await issuanceAllocator.distributeIssuance() + + // Change default to target3 + await issuanceAllocator.connect(accounts.governor).setDefaultAllocationAddress(addresses.target3) + + // Set target1 allocation using evenIfDistributionPending to avoid premature distribution + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'](addresses.target1, 300_000n, 0n, true) + + // Distribute once (exactly 1 block with the new allocations) + await issuanceAllocator.distributeIssuance() + + // Target1 should receive 30% for 1 block (from last distributeIssuance call) + const target1Balance = await graphToken.balanceOf(addresses.target1) + const expectedTarget1 = (issuancePerBlock * 300_000n) / MILLION + expect(target1Balance).to.equal(expectedTarget1) + + // Target3 (default) should receive: + // - 100% for 2 blocks (from initial distributeIssuance to setTargetAllocation) + // - 70% for 1 block (from setTargetAllocation to final distributeIssuance) + const target3Balance = await graphToken.balanceOf(addresses.target3) + const expectedTarget3 = issuancePerBlock * 2n + (issuancePerBlock * 700_000n) / MILLION + expect(target3Balance).to.equal(expectedTarget3) + }) + + it('should distribute correctly with multiple targets and default', async () => { + // Distribute any pending issuance first to start fresh + await issuanceAllocator.distributeIssuance() + + // Set default to target3 + await issuanceAllocator.connect(accounts.governor).setDefaultAllocationAddress(addresses.target3) + + // Set allocations using evenIfDistributionPending to avoid premature distributions + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'](addresses.target1, 200_000n, 0n, true) // 20% + + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'](addresses.target2, 300_000n, 0n, true) // 30% + // Default (target3) gets 50% + + // Distribute once (exactly 1 block with the final allocations) + await issuanceAllocator.distributeIssuance() + + // Check all balances accounting for block accumulation: + // - target1 gets 20% for 2 blocks (from first setTargetAllocation onwards) + // - target2 gets 30% for 1 block (from second setTargetAllocation onwards) + // - target3 (default) gets 100% for 2 blocks + 80% for 1 block + 50% for 1 block + const target1Balance = await graphToken.balanceOf(addresses.target1) + const target2Balance = await graphToken.balanceOf(addresses.target2) + const target3Balance = await graphToken.balanceOf(addresses.target3) + + const expectedTarget1 = (issuancePerBlock * 200_000n * 2n) / MILLION + const expectedTarget2 = (issuancePerBlock * 300_000n) / MILLION + const expectedTarget3 = + issuancePerBlock * 2n + (issuancePerBlock * 800_000n) / MILLION + (issuancePerBlock * 500_000n) / MILLION + + expect(target1Balance).to.equal(expectedTarget1) + expect(target2Balance).to.equal(expectedTarget2) + expect(target3Balance).to.equal(expectedTarget3) + + // Total minted should equal 4 blocks of issuance + const totalMinted = target1Balance + target2Balance + target3Balance + expect(totalMinted).to.equal(issuancePerBlock * 4n) + }) + + it('should handle distribution when default allocation is 0%', async () => { + // Distribute any pending issuance first to start fresh + await issuanceAllocator.distributeIssuance() + + // Default is address(0), which doesn't receive minting + // Allocate 100% to explicit targets + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256)'](addresses.target1, 600_000n) + // At this point target1 has 60%, default has 40% + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256)'](addresses.target2, 400_000n) + // Now target1 has 60%, target2 has 40%, default has 0% + + // Distribute (1 block since last setTargetAllocation) + await issuanceAllocator.distributeIssuance() + + // Zero address (default) should receive nothing + const zeroBalance = await graphToken.balanceOf(ethers.ZeroAddress) + expect(zeroBalance).to.equal(0n) + + // Target1 receives: 0% (from first distributeIssuance to first setTargetAllocation) + // + 60% (from first setTargetAllocation to second setTargetAllocation) + // + 60% (from second setTargetAllocation to final distributeIssuance) + // = 120% of one block = 60% * 2 blocks + const target1Balance = await graphToken.balanceOf(addresses.target1) + expect(target1Balance).to.equal((issuancePerBlock * 600_000n * 2n) / MILLION) + + // Target2 receives: 40% (from second setTargetAllocation to final distributeIssuance) + const target2Balance = await graphToken.balanceOf(addresses.target2) + expect(target2Balance).to.equal((issuancePerBlock * 400_000n) / MILLION) + + // Default allocation is now 0% + const defaultAddress = await issuanceAllocator.getTargetAt(0) + const defaultAllocation = await issuanceAllocator.getTargetAllocation(defaultAddress) + expect(defaultAllocation.totalAllocationPPM).to.equal(0n) + }) + }) + + describe('View functions', () => { + it('should return correct target count including default', async () => { + let count = await issuanceAllocator.getTargetCount() + expect(count).to.equal(1n) // Just default + + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256)'](addresses.target1, 300_000n) + + count = await issuanceAllocator.getTargetCount() + expect(count).to.equal(2n) // Default + target1 + + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256)'](addresses.target2, 200_000n) + + count = await issuanceAllocator.getTargetCount() + expect(count).to.equal(3n) // Default + target1 + target2 + }) + + it('should include default in getTargets array', async () => { + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256)'](addresses.target1, 300_000n) + + const targets = await issuanceAllocator.getTargets() + expect(targets.length).to.equal(2) + expect(targets[0]).to.equal(ethers.ZeroAddress) // Default at index 0 + expect(targets[1]).to.equal(addresses.target1) + }) + + it('should return correct data for default target', async () => { + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256)'](addresses.target1, 400_000n) + + const defaultAddress = await issuanceAllocator.getTargetAt(0) + const data = await issuanceAllocator.getTargetData(defaultAddress) + + expect(data.allocatorMintingPPM).to.equal(600_000n) + expect(data.selfMintingPPM).to.equal(0n) + }) + + it('should report 100% total allocation when default is a real address', async () => { + // Set target1 allocation first + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256)'](addresses.target1, 300_000n) + + // Change default to target2 (a real address, not address(0)) + await issuanceAllocator.connect(accounts.governor).setDefaultAllocationAddress(addresses.target2) + + // When default is a real address, it should report 100% total allocation + const totalAllocation = await issuanceAllocator.getTotalAllocation() + expect(totalAllocation.totalAllocationPPM).to.equal(MILLION) + expect(totalAllocation.allocatorMintingPPM).to.equal(MILLION) // target1=30% + target2=70% = 100% + expect(totalAllocation.selfMintingPPM).to.equal(0n) + }) + }) +}) diff --git a/packages/issuance/test/tests/allocate/InterfaceIdStability.test.ts b/packages/issuance/test/tests/allocate/InterfaceIdStability.test.ts index e6ee54260..fcb07ea85 100644 --- a/packages/issuance/test/tests/allocate/InterfaceIdStability.test.ts +++ b/packages/issuance/test/tests/allocate/InterfaceIdStability.test.ts @@ -26,7 +26,7 @@ describe('Allocate Interface ID Stability', () => { }) it('IIssuanceAllocationAdministration should have stable interface ID', () => { - expect(IIssuanceAllocationAdministration__factory.interfaceId).to.equal('0x36759695') + expect(IIssuanceAllocationAdministration__factory.interfaceId).to.equal('0x069d5a27') }) it('IIssuanceAllocationStatus should have stable interface ID', () => { diff --git a/packages/issuance/test/tests/allocate/IssuanceAllocator.test.ts b/packages/issuance/test/tests/allocate/IssuanceAllocator.test.ts index 8ecc20509..599f9b334 100644 --- a/packages/issuance/test/tests/allocate/IssuanceAllocator.test.ts +++ b/packages/issuance/test/tests/allocate/IssuanceAllocator.test.ts @@ -74,11 +74,12 @@ describe('IssuanceAllocator', () => { const { issuanceAllocator } = sharedContracts - // Remove all existing allocations + // Remove all existing allocations (except default at index 0) try { const targetCount = await issuanceAllocator.getTargetCount() - for (let i = 0; i < targetCount; i++) { - const targetAddr = await issuanceAllocator.getTargetAt(0) // Always remove first + // Skip index 0 (default allocation) and remove from index 1 onwards + for (let i = 1; i < targetCount; i++) { + const targetAddr = await issuanceAllocator.getTargetAt(1) // Always remove index 1 await issuanceAllocator .connect(accounts.governor) ['setTargetAllocation(address,uint256,uint256,bool)'](targetAddr, 0, 0, false) @@ -305,6 +306,7 @@ describe('IssuanceAllocator', () => { const target1Allocation = await issuanceAllocator.getTargetAllocation(addresses.target1) expect(target1Allocation.totalAllocationPPM).to.equal(allocation) const totalAlloc = await issuanceAllocator.getTotalAllocation() + // With default as address(0), only non-default allocations are reported expect(totalAlloc.totalAllocationPPM).to.equal(allocation) // Remove target by setting allocation to 0 @@ -312,11 +314,11 @@ describe('IssuanceAllocator', () => { .connect(accounts.governor) ['setTargetAllocation(address,uint256,uint256,bool)'](addresses.target1, 0, 0, false) - // Verify target is removed + // Verify target is removed (only default remains) const targets = await issuanceAllocator.getTargets() - expect(targets.length).to.equal(0) + expect(targets.length).to.equal(1) // Only default allocation - // Verify total allocation is updated + // Verify reported total is 0% (default has it all, but isn't reported) { const totalAlloc = await issuanceAllocator.getTotalAllocation() expect(totalAlloc.totalAllocationPPM).to.equal(0) @@ -341,24 +343,25 @@ describe('IssuanceAllocator', () => { expect(target2Allocation.totalAllocationPPM).to.equal(400000) { const totalAlloc = await issuanceAllocator.getTotalAllocation() + // With default as address(0), only non-default allocations are reported (70%) expect(totalAlloc.totalAllocationPPM).to.equal(700000) } - // Get initial target addresses + // Get initial target addresses (including default) const initialTargets = await issuanceAllocator.getTargets() - expect(initialTargets.length).to.equal(2) + expect(initialTargets.length).to.equal(3) // default + target1 + target2 // Remove target2 by setting allocation to 0 (tests the swap-and-pop logic in the contract) await issuanceAllocator .connect(accounts.governor) ['setTargetAllocation(address,uint256,uint256,bool)'](addresses.target2, 0, 0, false) - // Verify target2 is removed but target1 remains + // Verify target2 is removed but target1 and default remain const remainingTargets = await issuanceAllocator.getTargets() - expect(remainingTargets.length).to.equal(1) - expect(remainingTargets[0]).to.equal(addresses.target1) + expect(remainingTargets.length).to.equal(2) // default + target1 + expect(remainingTargets).to.include(addresses.target1) - // Verify total allocation is updated (only target1's allocation remains) + // Verify reported total excludes default (only target1's 30% is reported) { const totalAlloc = await issuanceAllocator.getTotalAllocation() expect(totalAlloc.totalAllocationPPM).to.equal(300000) @@ -386,7 +389,7 @@ describe('IssuanceAllocator', () => { expect(target1Info.selfMintingPPM).to.equal(0) expect(target2Info.selfMintingPPM).to.equal(0) - // Verify total allocation is updated correctly + // Verify reported total excludes default (only target1+target2's 70% is reported) { const totalAlloc = await issuanceAllocator.getTotalAllocation() expect(totalAlloc.totalAllocationPPM).to.equal(300000) @@ -396,16 +399,7 @@ describe('IssuanceAllocator', () => { it('should validate setTargetAllocation parameters and constraints', async () => { const { issuanceAllocator, addresses } = sharedContracts - // Test 1: Should revert when setting allocation for target with address zero - await expectCustomError( - issuanceAllocator - .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](ethers.ZeroAddress, 100000, 0, false), - issuanceAllocator, - 'TargetAddressCannotBeZero', - ) - - // Test 2: Should revert when setting non-zero allocation for target that does not support IIssuanceTarget + // Test 1: Should revert when setting non-zero allocation for target that does not support IIssuanceTarget const nonExistentTarget = accounts.nonGovernor.address // When trying to set allocation for an EOA, the IERC165 call will revert await expect( @@ -414,13 +408,13 @@ describe('IssuanceAllocator', () => { ['setTargetAllocation(address,uint256,uint256,bool)'](nonExistentTarget, 500_000, 0, false), ).to.be.reverted - // Test 3: Should revert when total allocation would exceed 100% + // Test 2: Should revert when total allocation would exceed 100% // Set allocation for target1 to 60% await issuanceAllocator .connect(accounts.governor) ['setTargetAllocation(address,uint256,uint256,bool)'](addresses.target1, 600_000, 0, false) - // Try to set allocation for target2 to 50%, which would exceed 100% + // Try to set allocation for target2 to 50%, which would exceed 100% (60% + 50% > 100%) await expectCustomError( issuanceAllocator .connect(accounts.governor) @@ -841,13 +835,13 @@ describe('IssuanceAllocator', () => { await issuanceAllocator.connect(accounts.governor).grantRole(PAUSE_ROLE, accounts.governor.address) await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('100'), false) - // Mix of targets: 30% allocator-minting, 70% self-minting + // Mix of targets: 20% allocator-minting, 5% self-minting (leaving 75% for default, total 95% allocator) await issuanceAllocator .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](await target1.getAddress(), 300000, 0, false) // 30% allocator-minting + ['setTargetAllocation(address,uint256,uint256,bool)'](await target1.getAddress(), 200000, 0, false) // 20% allocator-minting await issuanceAllocator .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](await target2.getAddress(), 0, 700000, false) // 70% self-minting + ['setTargetAllocation(address,uint256,uint256,bool)'](await target2.getAddress(), 0, 50000, false) // 5% self-minting // Initialize distribution await issuanceAllocator.connect(accounts.governor).distributeIssuance() @@ -859,24 +853,25 @@ describe('IssuanceAllocator', () => { await ethers.provider.send('evm_mine', []) await issuanceAllocator .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](await target2.getAddress(), 0, 600000, true) // Change self-minting from 70% to 60% + ['setTargetAllocation(address,uint256,uint256,bool)'](await target2.getAddress(), 0, 0, true) // Change self-minting from 5% to 0% // Accumulation should happen from lastIssuanceDistributionBlock to current block const blockAfterAccumulation = await ethers.provider.getBlockNumber() const pendingAmount = await issuanceAllocator.pendingAccumulatedAllocatorIssuance() const lastDistributionBlock = await issuanceAllocator.lastIssuanceDistributionBlock() - const allocation = await issuanceAllocator.getTotalAllocation() // Calculate what accumulation SHOULD be from lastDistributionBlock + // During accumulation: 20% (target1) + 75% (default) = 95% allocator-minting, 5% self-minting + // Accumulated issuance is based on the 95% allocator-minting that was active during accumulation const blocksFromDistribution = BigInt(blockAfterAccumulation) - BigInt(lastDistributionBlock) + const allocatorMintingDuringAccumulation = 950000n // 95% in PPM const expectedFromDistribution = calculateExpectedAccumulation( parseEther('100'), blocksFromDistribution, - allocation.allocatorMintingPPM, + allocatorMintingDuringAccumulation, ) - // This will fail, but we can see which calculation matches the actual result expect(pendingAmount).to.equal(expectedFromDistribution) // Now test distribution of pending issuance to cover the self-minter branch @@ -903,20 +898,20 @@ describe('IssuanceAllocator', () => { await issuanceAllocator.connect(accounts.governor).grantRole(PAUSE_ROLE, accounts.governor.address) await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('1000'), false) - // Mix of targets: 20% and 30% allocator-minting (50% total), 50% self-minting + // Mix of targets: 15% and 25% allocator-minting (40% total), 10% self-minting (leaving 50% for default, total 90% allocator) await issuanceAllocator .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](await target1.getAddress(), 200000, 0, false) // 20% allocator-minting + ['setTargetAllocation(address,uint256,uint256,bool)'](await target1.getAddress(), 150000, 0, false) // 15% allocator-minting await issuanceAllocator .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](await target2.getAddress(), 300000, 0, false) // 30% allocator-minting + ['setTargetAllocation(address,uint256,uint256,bool)'](await target2.getAddress(), 250000, 0, false) // 25% allocator-minting // Add a self-minting target to create the mixed scenario const MockTarget = await ethers.getContractFactory('MockSimpleTarget') const selfMintingTarget = await MockTarget.deploy() await issuanceAllocator .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](await selfMintingTarget.getAddress(), 0, 500000, false) // 50% self-minting + ['setTargetAllocation(address,uint256,uint256,bool)'](await selfMintingTarget.getAddress(), 0, 100000, false) // 10% self-minting // Initialize and pause await issuanceAllocator.connect(accounts.governor).distributeIssuance() @@ -930,23 +925,24 @@ describe('IssuanceAllocator', () => { await ethers.provider.send('evm_mine', []) await issuanceAllocator .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](await selfMintingTarget.getAddress(), 0, 400000, true) // Change self-minting from 50% to 40% + ['setTargetAllocation(address,uint256,uint256,bool)'](await selfMintingTarget.getAddress(), 0, 0, true) // Change self-minting from 10% to 0% // Calculate actual blocks accumulated (from block 0 since lastIssuanceAccumulationBlock starts at 0) const blockAfterAccumulation = await ethers.provider.getBlockNumber() - // Verify accumulation: 50% allocator-minting allocation (500000 PPM) + // Verify accumulation: 90% allocator-minting allocation (150000 + 250000 + 500000 default = 900000 PPM) // Accumulation should happen from lastIssuanceDistributionBlock to current block const pendingAmount = await issuanceAllocator.pendingAccumulatedAllocatorIssuance() const lastDistributionBlock = await issuanceAllocator.lastIssuanceDistributionBlock() // Calculate expected accumulation from when issuance was last distributed + // During accumulation: 15% (target1) + 25% (target2) + 50% (default) = 90% allocator-minting, 10% self-minting const blocksToAccumulate = BigInt(blockAfterAccumulation) - BigInt(lastDistributionBlock) - const allocation = await issuanceAllocator.getTotalAllocation() + const allocatorMintingDuringAccumulation = 900000n // 90% in PPM const expectedPending = calculateExpectedAccumulation( parseEther('1000'), blocksToAccumulate, - allocation.allocatorMintingPPM, + allocatorMintingDuringAccumulation, ) expect(pendingAmount).to.equal(expectedPending) @@ -959,11 +955,11 @@ describe('IssuanceAllocator', () => { const finalBalance2 = await (graphToken as any).balanceOf(await target2.getAddress()) // Calculate expected distributions: - // Total allocator-minting allocation: 200000 + 300000 = 500000 - // target1 should get: 2000 * (200000 / 500000) = 800 tokens from pending (doubled due to known issue) - // target2 should get: 2000 * (300000 / 500000) = 1200 tokens from pending (doubled due to known issue) - const expectedTarget1Pending = ethers.parseEther('800') - const expectedTarget2Pending = ethers.parseEther('1200') + // Total allocator-minting allocation after change: 150000 + 250000 + 600000 (default) = 1000000 (100%) + // target1 should get: 2000 * (150000 / 1000000) = 300 tokens from pending (doubled due to known issue) + // target2 should get: 2000 * (250000 / 1000000) = 500 tokens from pending (doubled due to known issue) + const expectedTarget1Pending = ethers.parseEther('300') + const expectedTarget2Pending = ethers.parseEther('500') // Account for any additional issuance from the distribution block itself const pendingDistribution1 = finalBalance1 - initialBalance1 @@ -974,10 +970,10 @@ describe('IssuanceAllocator', () => { expect(pendingDistribution1).to.be.gte(expectedTarget1Pending) expect(pendingDistribution2).to.be.gte(expectedTarget2Pending) - // Verify the ratio is correct: target2 should get 1.5x what target1 gets from pending - // (300000 / 200000 = 1.5) + // Verify the ratio is correct: target2 should get 1.67x what target1 gets from pending + // (250000 / 150000 = 1.67) const ratio = (BigInt(pendingDistribution2) * 1000n) / BigInt(pendingDistribution1) // Multiply by 1000 for precision - expect(ratio).to.be.closeTo(1500n, 50n) // Allow small rounding tolerance + expect(ratio).to.be.closeTo(1667n, 100n) // Allow larger tolerance due to default allocation adjustments // Verify pending was reset expect(await issuanceAllocator.pendingAccumulatedAllocatorIssuance()).to.equal(0) @@ -991,13 +987,13 @@ describe('IssuanceAllocator', () => { await issuanceAllocator.connect(accounts.governor).grantRole(PAUSE_ROLE, accounts.governor.address) await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('1000'), false) - // Allocator-minting targets: 40% and 60%, plus a small self-minting target initially + // Allocator-minting targets: 30% and 50%, plus a small self-minting target initially (leaving 19% for default) await issuanceAllocator .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](await target1.getAddress(), 400000, 0, false) // 40% allocator-minting + ['setTargetAllocation(address,uint256,uint256,bool)'](await target1.getAddress(), 300000, 0, false) // 30% allocator-minting await issuanceAllocator .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](await target2.getAddress(), 590000, 10000, false) // 59% allocator-minting, 1% self-minting + ['setTargetAllocation(address,uint256,uint256,bool)'](await target2.getAddress(), 500000, 10000, false) // 50% allocator-minting, 1% self-minting // Initialize and pause await issuanceAllocator.connect(accounts.governor).distributeIssuance() @@ -1012,19 +1008,19 @@ describe('IssuanceAllocator', () => { await ethers.provider.send('evm_mine', []) await issuanceAllocator .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](await target2.getAddress(), 600000, 0, true) // Remove self-minting, now 100% allocator-minting + ['setTargetAllocation(address,uint256,uint256,bool)'](await target2.getAddress(), 510000, 0, true) // Remove self-minting (now 51% allocator-minting, leaving 19% for default) // Calculate actual blocks accumulated (from block 0 since lastIssuanceAccumulationBlock starts at 0) const blockAfterAccumulation = await ethers.provider.getBlockNumber() - // Verify accumulation: should use the OLD allocation (99% allocator-minting) that was active during pause - // Accumulation happens BEFORE the allocation change, so uses 40% + 59% = 99% + // Verify accumulation: should use the OLD allocation (80% allocator-minting) that was active during pause + // Accumulation happens BEFORE the allocation change, so uses 30% + 50% + 19% default = 99% allocator-minting, 1% self-minting const pendingAmount = await issuanceAllocator.pendingAccumulatedAllocatorIssuance() const lastDistributionBlock = await issuanceAllocator.lastIssuanceDistributionBlock() // Calculate expected accumulation using the OLD allocation (before the change) const blocksToAccumulate = BigInt(blockAfterAccumulation) - BigInt(lastDistributionBlock) - const oldAllocatorMintingPPM = 400000n + 590000n // 40% + 59% = 99% + const oldAllocatorMintingPPM = 300000n + 500000n + 190000n // 30% + 50% + 19% default = 99% const expectedPending = calculateExpectedAccumulation( parseEther('1000'), blocksToAccumulate, @@ -1041,11 +1037,11 @@ describe('IssuanceAllocator', () => { const finalBalance2 = await (graphToken as any).balanceOf(await target2.getAddress()) // Calculate expected distributions: - // Total allocator-minting allocation: 400000 + 600000 = 1000000 (100%) - // target1 should get: 5000 * (400000 / 1000000) = 2000 tokens from pending - // target2 should get: 5000 * (600000 / 1000000) = 3000 tokens from pending - const expectedTarget1Pending = ethers.parseEther('2000') - const expectedTarget2Pending = ethers.parseEther('3000') + // Total allocator-minting allocation: 300000 + 510000 + 190000 = 1000000 (100%) + // target1 should get: 5000 * (300000 / 1000000) = 1500 tokens from pending + // target2 should get: 5000 * (510000 / 1000000) = 2550 tokens from pending + const expectedTarget1Pending = ethers.parseEther('1500') + const expectedTarget2Pending = ethers.parseEther('2550') // Account for any additional issuance from the distribution block itself const pendingDistribution1 = finalBalance1 - initialBalance1 @@ -1055,10 +1051,10 @@ describe('IssuanceAllocator', () => { expect(pendingDistribution1).to.be.gte(expectedTarget1Pending) expect(pendingDistribution2).to.be.gte(expectedTarget2Pending) - // Verify the ratio is correct: target2 should get 1.5x what target1 gets from pending - // (600000 / 400000 = 1.5) + // Verify the ratio is correct: target2 should get 1.7x what target1 gets from pending + // (510000 / 300000 = 1.7) const ratio = (BigInt(pendingDistribution2) * 1000n) / BigInt(pendingDistribution1) // Multiply by 1000 for precision - expect(ratio).to.be.closeTo(1500n, 50n) // Allow small rounding tolerance + expect(ratio).to.be.closeTo(1700n, 50n) // Allow small rounding tolerance // Verify pending was reset expect(await issuanceAllocator.pendingAccumulatedAllocatorIssuance()).to.equal(0) @@ -1076,13 +1072,13 @@ describe('IssuanceAllocator', () => { const MockTarget = await ethers.getContractFactory('MockSimpleTarget') const target3 = await MockTarget.deploy() - // Mix of targets: 30% + 20% + 10% allocator-minting (60% total), 40% self-minting + // Mix of targets: 25% + 15% + 10% allocator-minting (50% total), 20% self-minting (leaving 30% for default, total 80% allocator) await issuanceAllocator .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](await target1.getAddress(), 300000, 0, false) // 30% allocator-minting + ['setTargetAllocation(address,uint256,uint256,bool)'](await target1.getAddress(), 250000, 0, false) // 25% allocator-minting await issuanceAllocator .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](await target2.getAddress(), 200000, 0, false) // 20% allocator-minting + ['setTargetAllocation(address,uint256,uint256,bool)'](await target2.getAddress(), 150000, 0, false) // 15% allocator-minting await issuanceAllocator .connect(accounts.governor) ['setTargetAllocation(address,uint256,uint256,bool)'](await target3.getAddress(), 100000, 0, false) // 10% allocator-minting @@ -1091,7 +1087,7 @@ describe('IssuanceAllocator', () => { const selfMintingTarget = await MockTarget.deploy() await issuanceAllocator .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](await selfMintingTarget.getAddress(), 0, 400000, false) // 40% self-minting + ['setTargetAllocation(address,uint256,uint256,bool)'](await selfMintingTarget.getAddress(), 0, 200000, false) // 20% self-minting // Initialize and pause await issuanceAllocator.connect(accounts.governor).distributeIssuance() @@ -1107,23 +1103,24 @@ describe('IssuanceAllocator', () => { } await issuanceAllocator .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](await selfMintingTarget.getAddress(), 0, 300000, true) // Change self-minting from 40% to 30% + ['setTargetAllocation(address,uint256,uint256,bool)'](await selfMintingTarget.getAddress(), 0, 100000, true) // Change self-minting from 20% to 10% // Calculate actual blocks accumulated (from block 0 since lastIssuanceAccumulationBlock starts at 0) const blockAfterAccumulation = await ethers.provider.getBlockNumber() - // Calculate expected total accumulation: 60% allocator-minting allocation (600000 PPM) + // Calculate expected total accumulation: 80% allocator-minting allocation (25% + 15% + 10% + 30% default = 800000 PPM) // Accumulation should happen from lastIssuanceDistributionBlock to current block const pendingAmount = await issuanceAllocator.pendingAccumulatedAllocatorIssuance() const lastDistributionBlock = await issuanceAllocator.lastIssuanceDistributionBlock() // Calculate expected accumulation from when issuance was last distributed + // During accumulation: 25% (target1) + 15% (target2) + 10% (target3) + 30% (default) = 80% allocator-minting, 20% self-minting const blocksToAccumulate = BigInt(blockAfterAccumulation) - BigInt(lastDistributionBlock) - const allocation = await issuanceAllocator.getTotalAllocation() + const allocatorMintingDuringAccumulation = 800000n // 80% in PPM const expectedPending = calculateExpectedAccumulation( parseEther('1000'), blocksToAccumulate, - allocation.allocatorMintingPPM, + allocatorMintingDuringAccumulation, ) expect(pendingAmount).to.equal(expectedPending) @@ -1142,19 +1139,22 @@ describe('IssuanceAllocator', () => { const totalDistributed = distribution1 + distribution2 + distribution3 // Verify total distributed amount is reasonable - // Should be at least the pending amount (might be more due to additional block issuance) - expect(totalDistributed).to.be.gte(pendingAmount) + // The three explicit targets get 50% of total allocation, default gets 30% + // So they should receive (50/80) = 62.5% of pending allocator-minting issuance + // Plus additional issuance from blocks between accumulation and distribution + const expectedMinimumToThreeTargets = (pendingAmount * 50n) / 80n + expect(totalDistributed).to.be.gte(expectedMinimumToThreeTargets) // Verify proportional distribution within allocator-minting targets - // Total allocator-minting allocation: 300000 + 200000 + 100000 = 600000 - // Expected ratios: target1:target2:target3 = 30:20:10 = 3:2:1 - const ratio12 = (BigInt(distribution1) * 1000n) / BigInt(distribution2) // Should be ~1500 (3/2 * 1000) - const ratio13 = (BigInt(distribution1) * 1000n) / BigInt(distribution3) // Should be ~3000 (3/1 * 1000) - const ratio23 = (BigInt(distribution2) * 1000n) / BigInt(distribution3) // Should be ~2000 (2/1 * 1000) + // Actual allocations: target1=25%, target2=15%, target3=10% + // Expected ratios: target1:target2:target3 = 25:15:10 = 5:3:2 + const ratio12 = (BigInt(distribution1) * 1000n) / BigInt(distribution2) // Should be ~1667 (5/3 * 1000) + const ratio13 = (BigInt(distribution1) * 1000n) / BigInt(distribution3) // Should be ~2500 (5/2 * 1000) + const ratio23 = (BigInt(distribution2) * 1000n) / BigInt(distribution3) // Should be ~1500 (3/2 * 1000) - expect(ratio12).to.be.closeTo(1500n, 100n) // 3:2 ratio with tolerance - expect(ratio13).to.be.closeTo(3000n, 200n) // 3:1 ratio with tolerance - expect(ratio23).to.be.closeTo(2000n, 150n) // 2:1 ratio with tolerance + expect(ratio12).to.be.closeTo(1667n, 100n) // 5:3 ratio with tolerance + expect(ratio13).to.be.closeTo(2500n, 200n) // 5:2 ratio with tolerance + expect(ratio23).to.be.closeTo(1500n, 150n) // 3:2 ratio with tolerance // Verify pending was reset expect(await issuanceAllocator.pendingAccumulatedAllocatorIssuance()).to.equal(0) @@ -2027,11 +2027,11 @@ describe('IssuanceAllocator', () => { .connect(accounts.governor) ['setTargetAllocation(address,uint256,uint256,bool)'](nonExistentTarget, 0, 0, false) - // Verify no targets were added + // Verify no non-default targets were added (only default remains) const targets = await issuanceAllocator.getTargets() - expect(targets.length).to.equal(0) + expect(targets.length).to.equal(1) // Only default allocation - // Verify total allocation remains 0 + // Verify reported total is 0% (all in default, which isn't reported) const totalAlloc = await issuanceAllocator.getTotalAllocation() expect(totalAlloc.totalAllocationPPM).to.equal(0) @@ -2040,9 +2040,9 @@ describe('IssuanceAllocator', () => { .connect(accounts.governor) ['setTargetAllocation(address,uint256,uint256,bool)'](nonExistentTarget, 0, 0, false) - // Verify still no targets + // Verify still only default target const targetsAfter = await issuanceAllocator.getTargets() - expect(targetsAfter.length).to.equal(0) + expect(targetsAfter.length).to.equal(1) // Only default allocation }) }) @@ -2067,24 +2067,24 @@ describe('IssuanceAllocator', () => { it('should manage target count and array correctly', async () => { const { issuanceAllocator, addresses } = sharedContracts - // Test initial state - expect(await issuanceAllocator.getTargetCount()).to.equal(0) - expect((await issuanceAllocator.getTargets()).length).to.equal(0) + // Test initial state (with default allocation) + expect(await issuanceAllocator.getTargetCount()).to.equal(1) // Default allocation exists + expect((await issuanceAllocator.getTargets()).length).to.equal(1) // Test adding targets await issuanceAllocator .connect(accounts.governor) ['setTargetAllocation(address,uint256,uint256,bool)'](addresses.target1, 100000, 0, false) - expect(await issuanceAllocator.getTargetCount()).to.equal(1) + expect(await issuanceAllocator.getTargetCount()).to.equal(2) // Default + target1 await issuanceAllocator .connect(accounts.governor) ['setTargetAllocation(address,uint256,uint256,bool)'](addresses.target2, 200000, 0, false) - expect(await issuanceAllocator.getTargetCount()).to.equal(2) + expect(await issuanceAllocator.getTargetCount()).to.equal(3) // Default + target1 + target2 // Test getTargets array content const targetAddresses = await issuanceAllocator.getTargets() - expect(targetAddresses.length).to.equal(2) + expect(targetAddresses.length).to.equal(3) expect(targetAddresses).to.include(addresses.target1) expect(targetAddresses).to.include(addresses.target2) @@ -2092,13 +2092,13 @@ describe('IssuanceAllocator', () => { await issuanceAllocator .connect(accounts.governor) ['setTargetAllocation(address,uint256,uint256,bool)'](addresses.target1, 0, 0, false) - expect(await issuanceAllocator.getTargetCount()).to.equal(1) + expect(await issuanceAllocator.getTargetCount()).to.equal(2) // Default + target2 await issuanceAllocator .connect(accounts.governor) ['setTargetAllocation(address,uint256,uint256,bool)'](addresses.target2, 0, 0, false) - expect(await issuanceAllocator.getTargetCount()).to.equal(0) - expect((await issuanceAllocator.getTargets()).length).to.equal(0) + expect(await issuanceAllocator.getTargetCount()).to.equal(1) // Only default remains + expect((await issuanceAllocator.getTargets()).length).to.equal(1) }) it('should store targets in the getTargets array in correct order', async () => { @@ -2116,9 +2116,11 @@ describe('IssuanceAllocator', () => { const targetAddresses = await issuanceAllocator.getTargets() // Check that the addresses are in the correct order - expect(targetAddresses[0]).to.equal(addresses.target1) - expect(targetAddresses[1]).to.equal(addresses.target2) - expect(targetAddresses.length).to.equal(2) + // targetAddresses[0] is the default allocation (address(0)) + expect(targetAddresses[0]).to.equal(ethers.ZeroAddress) // Default + expect(targetAddresses[1]).to.equal(addresses.target1) + expect(targetAddresses[2]).to.equal(addresses.target2) + expect(targetAddresses.length).to.equal(3) // Default + target1 + target2 }) it('should return the correct target address by index', async () => { @@ -2140,17 +2142,20 @@ describe('IssuanceAllocator', () => { // Get all target addresses const addresses = await issuanceAllocator.getTargets() - expect(addresses.length).to.equal(3) + expect(addresses.length).to.equal(4) // Default + 3 targets // Check that the addresses are in the correct order - expect(addresses[0]).to.equal(await target1.getAddress()) - expect(addresses[1]).to.equal(await target2.getAddress()) - expect(addresses[2]).to.equal(await target3.getAddress()) + // addresses[0] is the default allocation (address(0)) + expect(addresses[0]).to.equal(ethers.ZeroAddress) // Default + expect(addresses[1]).to.equal(await target1.getAddress()) + expect(addresses[2]).to.equal(await target2.getAddress()) + expect(addresses[3]).to.equal(await target3.getAddress()) // Test getTargetAt method for individual access - expect(await issuanceAllocator.getTargetAt(0)).to.equal(await target1.getAddress()) - expect(await issuanceAllocator.getTargetAt(1)).to.equal(await target2.getAddress()) - expect(await issuanceAllocator.getTargetAt(2)).to.equal(await target3.getAddress()) + expect(await issuanceAllocator.getTargetAt(0)).to.equal(ethers.ZeroAddress) // Default + expect(await issuanceAllocator.getTargetAt(1)).to.equal(await target1.getAddress()) + expect(await issuanceAllocator.getTargetAt(2)).to.equal(await target2.getAddress()) + expect(await issuanceAllocator.getTargetAt(3)).to.equal(await target3.getAddress()) }) it('should return the correct target allocation', async () => { @@ -2958,20 +2963,20 @@ describe('IssuanceAllocator', () => { await issuanceAllocator.connect(accounts.governor).grantRole(PAUSE_ROLE, accounts.governor.address) await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('1000'), false) - // Test scenario: 25% allocator-minting + 50% self-minting + 25% unallocated + // Test scenario: 20% allocator-minting + 40% self-minting (leaving 40% for default) await issuanceAllocator .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](await target1.getAddress(), 250000, 0, false) // 25% allocator-minting + ['setTargetAllocation(address,uint256,uint256,bool)'](await target1.getAddress(), 200000, 0, false) // 20% allocator-minting await issuanceAllocator .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](await target2.getAddress(), 0, 500000, false) // 50% self-minting - // 25% remains unallocated + ['setTargetAllocation(address,uint256,uint256,bool)'](await target2.getAddress(), 0, 400000, false) // 40% self-minting + // 40% goes to default allocation // Verify the setup const totalAllocation = await issuanceAllocator.getTotalAllocation() - expect(totalAllocation.totalAllocationPPM).to.equal(750000) // 75% total - expect(totalAllocation.allocatorMintingPPM).to.equal(250000) // 25% allocator - expect(totalAllocation.selfMintingPPM).to.equal(500000) // 50% self + expect(totalAllocation.totalAllocationPPM).to.equal(600000) // 60% reported (excludes default's 40%) + expect(totalAllocation.allocatorMintingPPM).to.equal(200000) // 20% allocator (excludes default's 40%) + expect(totalAllocation.selfMintingPPM).to.equal(400000) // 40% self // Distribute once to initialize await issuanceAllocator.connect(accounts.governor).distributeIssuance() @@ -3004,9 +3009,9 @@ describe('IssuanceAllocator', () => { expect(distributed2).to.equal(0) // Target1 should receive the correct proportional amount - // The calculation is: (pendingAmount * 250000) / (1000000 - 500000) = (pendingAmount * 250000) / 500000 = pendingAmount * 0.5 - // So target1 should get exactly 50% of the pending amount - const expectedDistribution = pendingBefore / 2n // 50% of pending + // The calculation is: (pendingAmount * 200000) / (1000000 - 400000) = (pendingAmount * 200000) / 600000 = pendingAmount * 1/3 + // So target1 should get exactly 33.33% of the pending amount + const expectedDistribution = (pendingBefore * 200000n) / 600000n // 33.33% of pending expect(distributed1).to.be.closeTo(expectedDistribution, ethers.parseEther('1')) // Verify pending issuance was reset @@ -3021,22 +3026,22 @@ describe('IssuanceAllocator', () => { await issuanceAllocator.connect(accounts.governor).grantRole(PAUSE_ROLE, accounts.governor.address) await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('1000'), false) - // Test scenario: 15% + 10% allocator-minting + 50% self-minting + 25% unallocated + // Test scenario: 12% + 8% allocator-minting + 40% self-minting (leaving 40% for default) await issuanceAllocator .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](await target1.getAddress(), 150000, 0, false) // 15% allocator-minting + ['setTargetAllocation(address,uint256,uint256,bool)'](await target1.getAddress(), 120000, 0, false) // 12% allocator-minting await issuanceAllocator .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](await target2.getAddress(), 100000, 0, false) // 10% allocator-minting + ['setTargetAllocation(address,uint256,uint256,bool)'](await target2.getAddress(), 80000, 0, false) // 8% allocator-minting await issuanceAllocator .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](await target3.getAddress(), 0, 500000, false) // 50% self-minting - // 25% remains unallocated + ['setTargetAllocation(address,uint256,uint256,bool)'](await target3.getAddress(), 0, 400000, false) // 40% self-minting + // 40% goes to default allocation // Verify the setup const totalAllocation = await issuanceAllocator.getTotalAllocation() - expect(totalAllocation.allocatorMintingPPM).to.equal(250000) // 25% total allocator - expect(totalAllocation.selfMintingPPM).to.equal(500000) // 50% self + expect(totalAllocation.allocatorMintingPPM).to.equal(200000) // 12% + 8% = 20% (excludes default's 40%) + expect(totalAllocation.selfMintingPPM).to.equal(400000) // 40% self // Distribute once to initialize await issuanceAllocator.connect(accounts.governor).distributeIssuance() @@ -3072,21 +3077,21 @@ describe('IssuanceAllocator', () => { expect(distributed3).to.equal(0) // Verify proportional distribution between allocator-minting targets - // Target1 should get 15/25 = 60% of the distributed amount - // Target2 should get 10/25 = 40% of the distributed amount + // Target1 should get 12/20 = 60% of the distributed amount + // Target2 should get 8/20 = 40% of the distributed amount if (distributed1 > 0 && distributed2 > 0) { const ratio = (BigInt(distributed1) * 1000n) / BigInt(distributed2) // Multiply by 1000 for precision - expect(ratio).to.be.closeTo(1500n, 50n) // 150000/100000 = 1.5 + expect(ratio).to.be.closeTo(1500n, 50n) // 120000/80000 = 1.5 } // Total distributed should equal the allocator-minting portion of pending - // With 25% total allocator-minting out of 50% allocator-minting space: + // With 20% total allocator-minting (12% + 8%) out of 60% allocator-minting space (20% + 40% default): // Each target gets: (targetPPM / (MILLION - selfMintingPPM)) * pendingAmount - // Target1: (150000 / 500000) * pendingAmount = 30% of pending - // Target2: (100000 / 500000) * pendingAmount = 20% of pending - // Total: 50% of pending + // Target1: (120000 / 600000) * pendingAmount = 20% of pending + // Target2: (80000 / 600000) * pendingAmount = 13.33% of pending + // Total: 33.33% of pending const totalDistributed = distributed1 + distributed2 - const expectedTotal = pendingBefore / 2n // 50% of pending + const expectedTotal = (pendingBefore * 200000n) / 600000n // 33.33% of pending expect(totalDistributed).to.be.closeTo(expectedTotal, ethers.parseEther('1')) }) }) diff --git a/packages/issuance/test/tests/allocate/IssuanceSystem.test.ts b/packages/issuance/test/tests/allocate/IssuanceSystem.test.ts index 5a2de54aa..77645546a 100644 --- a/packages/issuance/test/tests/allocate/IssuanceSystem.test.ts +++ b/packages/issuance/test/tests/allocate/IssuanceSystem.test.ts @@ -84,13 +84,11 @@ describe('Issuance System', () => { // Set up initial allocations using helper await system.helpers.setupStandardAllocations() - // Verify initial total allocation (30% + 40% = 70%) + // Verify initial total allocation (excludes default since it's address(0)) const totalAlloc = await contracts.issuanceAllocator.getTotalAllocation() - expect(totalAlloc.totalAllocationPPM).to.equal( - TestConstants.ALLOCATION_30_PERCENT + TestConstants.ALLOCATION_40_PERCENT, - ) + expect(totalAlloc.totalAllocationPPM).to.equal(700000) // 70% (30% + 40%, excludes default) - // Change allocations: target1 = 50%, target2 = 20% (still 70%) + // Change allocations: target1 = 50%, target2 = 20% (30% goes to default) await contracts.issuanceAllocator .connect(accounts.governor) [ @@ -102,11 +100,9 @@ describe('Issuance System', () => { 'setTargetAllocation(address,uint256,uint256,bool)' ](addresses.target2, TestConstants.ALLOCATION_20_PERCENT, 0, false) - // Verify updated allocations + // Verify updated allocations (excludes default since it's address(0)) const updatedTotalAlloc = await contracts.issuanceAllocator.getTotalAllocation() - expect(updatedTotalAlloc.totalAllocationPPM).to.equal( - TestConstants.ALLOCATION_50_PERCENT + TestConstants.ALLOCATION_20_PERCENT, - ) + expect(updatedTotalAlloc.totalAllocationPPM).to.equal(700000) // 70% (50% + 20%, excludes default) // Verify individual target allocations const target1Info = await contracts.issuanceAllocator.getTargetData(addresses.target1) diff --git a/packages/issuance/test/tests/allocate/optimizedFixtures.ts b/packages/issuance/test/tests/allocate/optimizedFixtures.ts index 22f407f7d..6ded870a1 100644 --- a/packages/issuance/test/tests/allocate/optimizedFixtures.ts +++ b/packages/issuance/test/tests/allocate/optimizedFixtures.ts @@ -73,14 +73,20 @@ export async function setupOptimizedAllocateSystem(customOptions: any = {}) { helpers: { // Helper to reset state without redeploying resetState: async () => { - // Remove all targets + // Remove all targets except the default at index 0 const targets = await issuanceAllocator.getTargets() + const defaultAddress = await issuanceAllocator.getTargetAt(0) for (const targetAddr of targets) { + // Skip the default allocation target + if (targetAddr === defaultAddress) continue await issuanceAllocator .connect(accounts.governor) ['setTargetAllocation(address,uint256,uint256,bool)'](targetAddr, 0, 0, false) } + // Reset default allocation to address(0) with 100% + await issuanceAllocator.connect(accounts.governor).setDefaultAllocationAddress(ethers.ZeroAddress) + // Reset issuance rate await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(options.issuancePerBlock, false) }, From 13a77e4d92dea53d5fa85a644d052f70213e0d8a Mon Sep 17 00:00:00 2001 From: Rembrandt Kuipers <50174308+RembrandtK@users.noreply.github.com> Date: Fri, 12 Dec 2025 17:44:41 +0000 Subject: [PATCH 4/9] feat: optional issunace reclaim addresses for Rewards Manager Added two configurable reclaim addresses: - indexerEligibilityReclaimAddress - subgraphDeniedReclaimAddress When rewards are denied and the reclaim address is set (non-zero), tokens are minted to that address instead of not being minted at all. Defaults to address(0) (original behavior). Also updated associated documentation and tests. --- .../contracts/rewards/RewardsManager.sol | 147 ++++++++- .../rewards/RewardsManagerStorage.sol | 6 +- .../unit/rewards/rewards-interface.test.ts | 2 +- .../unit/rewards/rewards-reclaim.test.ts | 299 ++++++++++++++++++ .../contracts/rewards/IRewardsManager.sol | 14 + .../test/unit/mocks/MockRewardsManager.sol | 6 + 6 files changed, 455 insertions(+), 19 deletions(-) create mode 100644 packages/contracts/test/tests/unit/rewards/rewards-reclaim.test.ts diff --git a/packages/contracts/contracts/rewards/RewardsManager.sol b/packages/contracts/contracts/rewards/RewardsManager.sol index 458893308..3f45073c4 100644 --- a/packages/contracts/contracts/rewards/RewardsManager.sol +++ b/packages/contracts/contracts/rewards/RewardsManager.sol @@ -108,6 +108,46 @@ contract RewardsManager is RewardsManagerV6Storage, GraphUpgradeable, IERC165, I address indexed newRewardsEligibilityOracle ); + /** + * @notice Emitted when the eligibility reclaim address is set + * @param oldReclaimAddress Previous eligibility reclaim address + * @param newReclaimAddress New eligibility reclaim address + */ + event IndexerEligibilityReclaimAddressSet(address indexed oldReclaimAddress, address indexed newReclaimAddress); + + /** + * @notice Emitted when the subgraph reclaim address is set + * @param oldReclaimAddress Previous subgraph reclaim address + * @param newReclaimAddress New subgraph reclaim address + */ + event SubgraphDeniedReclaimAddressSet(address indexed oldReclaimAddress, address indexed newReclaimAddress); + + /** + * @notice Emitted when denied rewards are reclaimed due to eligibility + * @param indexer Address of the indexer whose rewards were denied + * @param allocationID Address of the allocation + * @param amount Amount of rewards reclaimed + */ + event RewardsReclaimedDueToIndexerEligibility( + address indexed indexer, + address indexed allocationID, + uint256 amount + ); + + /** + * @notice Emitted when denied rewards are reclaimed due to subgraph denylist + * @param indexer Address of the indexer whose rewards were denied + * @param allocationID Address of the allocation + * @param subgraphDeploymentID Subgraph deployment ID that was denied + * @param amount Amount of rewards reclaimed + */ + event RewardsReclaimedDueToSubgraphDenylist( + address indexed indexer, + address indexed allocationID, + bytes32 indexed subgraphDeploymentID, + uint256 amount + ); + // -- Modifiers -- /** @@ -264,6 +304,32 @@ contract RewardsManager is RewardsManagerV6Storage, GraphUpgradeable, IERC165, I } } + /** + * @inheritdoc IRewardsManager + * @dev Set to zero address to disable eligibility reclaim functionality + */ + function setIndexerEligibilityReclaimAddress(address newReclaimAddress) external override onlyGovernor { + address oldReclaimAddress = indexerEligibilityReclaimAddress; + + if (oldReclaimAddress != newReclaimAddress) { + indexerEligibilityReclaimAddress = newReclaimAddress; + emit IndexerEligibilityReclaimAddressSet(oldReclaimAddress, newReclaimAddress); + } + } + + /** + * @inheritdoc IRewardsManager + * @dev Set to zero address to disable subgraph reclaim functionality + */ + function setSubgraphDeniedReclaimAddress(address newReclaimAddress) external override onlyGovernor { + address oldReclaimAddress = subgraphDeniedReclaimAddress; + + if (oldReclaimAddress != newReclaimAddress) { + subgraphDeniedReclaimAddress = newReclaimAddress; + emit SubgraphDeniedReclaimAddressSet(oldReclaimAddress, newReclaimAddress); + } + } + // -- Denylist -- /** @@ -494,6 +560,60 @@ contract RewardsManager is RewardsManagerV6Storage, GraphUpgradeable, IERC165, I return newAccrued.mul(_tokens).div(FIXED_POINT_SCALING_FACTOR); } + /** + * @notice Checks for and handles denial and reclaim of rewards due to subgraph deny list + * @dev If denied, emits RewardsDenied event and mints to reclaim address if configured + * @param indexer Address of the indexer + * @param allocationID Address of the allocation + * @param subgraphDeploymentID Subgraph deployment ID + * @param rewards Amount of rewards that would be distributed + * @return True if rewards are denied, false otherwise + */ + function _rewardsDeniedDueToSubgraphDenyList( + address indexer, + address allocationID, + bytes32 subgraphDeploymentID, + uint256 rewards + ) private returns (bool) { + if (isDenied(subgraphDeploymentID)) { + emit RewardsDenied(indexer, allocationID); + + // If a reclaim address is set, mint the denied rewards there + if (0 < rewards && subgraphDeniedReclaimAddress != address(0)) { + graphToken().mint(subgraphDeniedReclaimAddress, rewards); + emit RewardsReclaimedDueToSubgraphDenylist(indexer, allocationID, subgraphDeploymentID, rewards); + } + return true; + } + return false; + } + + /** + * @notice Checks for and handles denial and reclaim of rewards due to indexer eligibility + * @dev If denied, emits RewardsDeniedDueToEligibility event and mints to reclaim address if configured + * @param indexer Address of the indexer + * @param allocationID Address of the allocation + * @param rewards Amount of rewards that would be distributed + * @return True if rewards are denied, false otherwise + */ + function _rewardsDeniedDueToIndexerEligibility( + address indexer, + address allocationID, + uint256 rewards + ) private returns (bool) { + if (address(rewardsEligibilityOracle) != address(0) && !rewardsEligibilityOracle.isEligible(indexer)) { + emit RewardsDeniedDueToEligibility(indexer, allocationID, rewards); + + // If a reclaim address is set, mint the denied rewards there + if (0 < rewards && indexerEligibilityReclaimAddress != address(0)) { + graphToken().mint(indexerEligibilityReclaimAddress, rewards); + emit RewardsReclaimedDueToIndexerEligibility(indexer, allocationID, rewards); + } + return true; + } + return false; + } + /** * @inheritdoc IRewardsManager * @dev This function can only be called by an authorized rewards issuer which are @@ -518,31 +638,24 @@ contract RewardsManager is RewardsManagerV6Storage, GraphUpgradeable, IERC165, I uint256 updatedAccRewardsPerAllocatedToken = onSubgraphAllocationUpdate(subgraphDeploymentID); - // Do not do rewards on denied subgraph deployments ID - if (isDenied(subgraphDeploymentID)) { - emit RewardsDenied(indexer, _allocationID); - return 0; - } - uint256 rewards = 0; if (isActive) { // Calculate rewards accrued by this allocation rewards = accRewardsPending.add( _calcRewards(tokens, accRewardsPerAllocatedToken, updatedAccRewardsPerAllocatedToken) ); + } - // Do not reward if indexer is not eligible based on rewards eligibility - if (address(rewardsEligibilityOracle) != address(0) && !rewardsEligibilityOracle.isEligible(indexer)) { - emit RewardsDeniedDueToEligibility(indexer, _allocationID, rewards); - return 0; - } + if (_rewardsDeniedDueToSubgraphDenyList(indexer, _allocationID, subgraphDeploymentID, rewards)) return 0; - if (rewards > 0) { - // Mint directly to rewards issuer for the reward amount - // The rewards issuer contract will do bookkeeping of the reward and - // assign in proportion to each stakeholder incentive - graphToken().mint(rewardsIssuer, rewards); - } + if (_rewardsDeniedDueToIndexerEligibility(indexer, _allocationID, rewards)) return 0; + + // Mint rewards to the rewards issuer + if (rewards > 0) { + // Mint directly to rewards issuer for the reward amount + // The rewards issuer contract will do bookkeeping of the reward and + // assign in proportion to each stakeholder incentive + graphToken().mint(rewardsIssuer, rewards); } emit HorizonRewardsAssigned(indexer, _allocationID, rewards); diff --git a/packages/contracts/contracts/rewards/RewardsManagerStorage.sol b/packages/contracts/contracts/rewards/RewardsManagerStorage.sol index 63897f431..c1d9c37dd 100644 --- a/packages/contracts/contracts/rewards/RewardsManagerStorage.sol +++ b/packages/contracts/contracts/rewards/RewardsManagerStorage.sol @@ -83,11 +83,15 @@ contract RewardsManagerV5Storage is RewardsManagerV4Storage { * @title RewardsManagerV6Storage * @author Edge & Node * @notice Storage layout for RewardsManager V6 - * Includes support for Rewards Eligibility Oracle and Issuance Allocator. + * Includes support for Rewards Eligibility Oracle, Issuance Allocator, and reclaim addresses. */ contract RewardsManagerV6Storage is RewardsManagerV5Storage { /// @notice Address of the rewards eligibility oracle contract IRewardsEligibility public rewardsEligibilityOracle; /// @notice Address of the issuance allocator IIssuanceAllocationDistribution public issuanceAllocator; + /// @notice Address to receive tokens denied due to indexer eligibility checks, set to zero to disable + address public indexerEligibilityReclaimAddress; + /// @notice Address to receive tokens denied due to subgraph denylist, set to zero to disable + address public subgraphDeniedReclaimAddress; } diff --git a/packages/contracts/test/tests/unit/rewards/rewards-interface.test.ts b/packages/contracts/test/tests/unit/rewards/rewards-interface.test.ts index 3a9b7c23b..d7db40458 100644 --- a/packages/contracts/test/tests/unit/rewards/rewards-interface.test.ts +++ b/packages/contracts/test/tests/unit/rewards/rewards-interface.test.ts @@ -57,7 +57,7 @@ describe('RewardsManager interfaces', () => { }) it('IRewardsManager should have stable interface ID', () => { - expect(IRewardsManager__factory.interfaceId).to.equal('0xa31d8306') + expect(IRewardsManager__factory.interfaceId).to.equal('0x731e44f0') }) }) diff --git a/packages/contracts/test/tests/unit/rewards/rewards-reclaim.test.ts b/packages/contracts/test/tests/unit/rewards/rewards-reclaim.test.ts new file mode 100644 index 000000000..d7071840f --- /dev/null +++ b/packages/contracts/test/tests/unit/rewards/rewards-reclaim.test.ts @@ -0,0 +1,299 @@ +import { Curation } from '@graphprotocol/contracts' +import { EpochManager } from '@graphprotocol/contracts' +import { GraphToken } from '@graphprotocol/contracts' +import { IStaking } from '@graphprotocol/contracts' +import { RewardsManager } from '@graphprotocol/contracts' +import { deriveChannelKey, GraphNetworkContracts, helpers, randomHexBytes, toGRT } from '@graphprotocol/sdk' +import type { SignerWithAddress } from '@nomiclabs/hardhat-ethers/signers' +import { expect } from 'chai' +import { constants } from 'ethers' +import hre from 'hardhat' + +import { NetworkFixture } from '../lib/fixtures' + +const { HashZero } = constants + +describe('Rewards - Reclaim Addresses', () => { + const graph = hre.graph() + let curator1: SignerWithAddress + let governor: SignerWithAddress + let indexer1: SignerWithAddress + let reclaimWallet: SignerWithAddress + + let fixture: NetworkFixture + + let contracts: GraphNetworkContracts + let grt: GraphToken + let curation: Curation + let epochManager: EpochManager + let staking: IStaking + let rewardsManager: RewardsManager + + // Derive channel key for indexer used to sign attestations + const channelKey1 = deriveChannelKey() + + const subgraphDeploymentID1 = randomHexBytes() + + const allocationID1 = channelKey1.address + + const metadata = HashZero + + const ISSUANCE_PER_BLOCK = toGRT('200') // 200 GRT every block + + async function setupIndexerAllocation() { + // Setup + await epochManager.connect(governor).setEpochLength(10) + + // Update total signalled + const signalled1 = toGRT('1500') + await curation.connect(curator1).mint(subgraphDeploymentID1, signalled1, 0) + + // Allocate + const tokensToAllocate = toGRT('12500') + await staking.connect(indexer1).stake(tokensToAllocate) + await staking + .connect(indexer1) + .allocateFrom( + indexer1.address, + subgraphDeploymentID1, + tokensToAllocate, + allocationID1, + metadata, + await channelKey1.generateProof(indexer1.address), + ) + } + + before(async function () { + const testAccounts = await graph.getTestAccounts() + curator1 = testAccounts[0] + indexer1 = testAccounts[1] + reclaimWallet = testAccounts[2] + ;({ governor } = await graph.getNamedAccounts()) + + fixture = new NetworkFixture(graph.provider) + contracts = await fixture.load(governor) + grt = contracts.GraphToken as GraphToken + curation = contracts.Curation as Curation + epochManager = contracts.EpochManager + staking = contracts.Staking as IStaking + rewardsManager = contracts.RewardsManager + + // 200 GRT per block + await rewardsManager.connect(governor).setIssuancePerBlock(ISSUANCE_PER_BLOCK) + + // Distribute test funds + for (const wallet of [indexer1, curator1]) { + await grt.connect(governor).mint(wallet.address, toGRT('1000000')) + await grt.connect(wallet).approve(staking.address, toGRT('1000000')) + await grt.connect(wallet).approve(curation.address, toGRT('1000000')) + } + }) + + beforeEach(async function () { + await fixture.setUp() + }) + + afterEach(async function () { + await fixture.tearDown() + }) + + describe('setIndexerEligibilityReclaimAddress', function () { + it('should reject if not governor', async function () { + const tx = rewardsManager.connect(indexer1).setIndexerEligibilityReclaimAddress(reclaimWallet.address) + await expect(tx).revertedWith('Only Controller governor') + }) + + it('should set eligibility reclaim address if governor', async function () { + const tx = rewardsManager.connect(governor).setIndexerEligibilityReclaimAddress(reclaimWallet.address) + await expect(tx) + .emit(rewardsManager, 'IndexerEligibilityReclaimAddressSet') + .withArgs(constants.AddressZero, reclaimWallet.address) + + expect(await rewardsManager.indexerEligibilityReclaimAddress()).eq(reclaimWallet.address) + }) + + it('should allow setting to zero address', async function () { + await rewardsManager.connect(governor).setIndexerEligibilityReclaimAddress(reclaimWallet.address) + + const tx = rewardsManager.connect(governor).setIndexerEligibilityReclaimAddress(constants.AddressZero) + await expect(tx) + .emit(rewardsManager, 'IndexerEligibilityReclaimAddressSet') + .withArgs(reclaimWallet.address, constants.AddressZero) + + expect(await rewardsManager.indexerEligibilityReclaimAddress()).eq(constants.AddressZero) + }) + + it('should not emit event when setting same address', async function () { + await rewardsManager.connect(governor).setIndexerEligibilityReclaimAddress(reclaimWallet.address) + + const tx = rewardsManager.connect(governor).setIndexerEligibilityReclaimAddress(reclaimWallet.address) + await expect(tx).to.not.emit(rewardsManager, 'IndexerEligibilityReclaimAddressSet') + }) + }) + + describe('setSubgraphDeniedReclaimAddress', function () { + it('should reject if not governor', async function () { + const tx = rewardsManager.connect(indexer1).setSubgraphDeniedReclaimAddress(reclaimWallet.address) + await expect(tx).revertedWith('Only Controller governor') + }) + + it('should set subgraph reclaim address if governor', async function () { + const tx = rewardsManager.connect(governor).setSubgraphDeniedReclaimAddress(reclaimWallet.address) + await expect(tx) + .emit(rewardsManager, 'SubgraphDeniedReclaimAddressSet') + .withArgs(constants.AddressZero, reclaimWallet.address) + + expect(await rewardsManager.subgraphDeniedReclaimAddress()).eq(reclaimWallet.address) + }) + + it('should allow setting to zero address', async function () { + await rewardsManager.connect(governor).setSubgraphDeniedReclaimAddress(reclaimWallet.address) + + const tx = rewardsManager.connect(governor).setSubgraphDeniedReclaimAddress(constants.AddressZero) + await expect(tx) + .emit(rewardsManager, 'SubgraphDeniedReclaimAddressSet') + .withArgs(reclaimWallet.address, constants.AddressZero) + + expect(await rewardsManager.subgraphDeniedReclaimAddress()).eq(constants.AddressZero) + }) + + it('should not emit event when setting same address', async function () { + await rewardsManager.connect(governor).setSubgraphDeniedReclaimAddress(reclaimWallet.address) + + const tx = rewardsManager.connect(governor).setSubgraphDeniedReclaimAddress(reclaimWallet.address) + await expect(tx).to.not.emit(rewardsManager, 'SubgraphDeniedReclaimAddressSet') + }) + }) + + describe('reclaim denied rewards - subgraph denylist', function () { + it('should mint to reclaim address when subgraph denied and reclaim address set', async function () { + // Setup reclaim address + await rewardsManager.connect(governor).setSubgraphDeniedReclaimAddress(reclaimWallet.address) + + // Setup denylist + await rewardsManager.connect(governor).setSubgraphAvailabilityOracle(governor.address) + await rewardsManager.connect(governor).setDenied(subgraphDeploymentID1, true) + + // Align with the epoch boundary + await helpers.mineEpoch(epochManager) + + // Setup allocation + await setupIndexerAllocation() + + // Jump to next epoch + await helpers.mineEpoch(epochManager) + + // Calculate expected rewards + const expectedRewards = toGRT('1400') + + // Check reclaim wallet balance before + const balanceBefore = await grt.balanceOf(reclaimWallet.address) + + // Close allocation - should emit both denial and reclaim events + const tx = staking.connect(indexer1).closeAllocation(allocationID1, randomHexBytes()) + await expect(tx).emit(rewardsManager, 'RewardsDenied').withArgs(indexer1.address, allocationID1) + await expect(tx) + .emit(rewardsManager, 'RewardsReclaimedDueToSubgraphDenylist') + .withArgs(indexer1.address, allocationID1, subgraphDeploymentID1, expectedRewards) + + // Check reclaim wallet received the rewards + const balanceAfter = await grt.balanceOf(reclaimWallet.address) + expect(balanceAfter.sub(balanceBefore)).eq(expectedRewards) + }) + + it('should not mint to reclaim address when reclaim address not set', async function () { + // Do NOT set reclaim address (defaults to zero address) + + // Setup denylist + await rewardsManager.connect(governor).setSubgraphAvailabilityOracle(governor.address) + await rewardsManager.connect(governor).setDenied(subgraphDeploymentID1, true) + + // Align with the epoch boundary + await helpers.mineEpoch(epochManager) + + // Setup allocation + await setupIndexerAllocation() + + // Jump to next epoch + await helpers.mineEpoch(epochManager) + + // Close allocation - should only emit denial event, not reclaim + const tx = staking.connect(indexer1).closeAllocation(allocationID1, randomHexBytes()) + await expect(tx).emit(rewardsManager, 'RewardsDenied').withArgs(indexer1.address, allocationID1) + await expect(tx).to.not.emit(rewardsManager, 'RewardsReclaimedDueToSubgraphDenylist') + }) + }) + + describe('reclaim denied rewards - eligibility', function () { + it('should mint to reclaim address when eligibility denied and reclaim address set', async function () { + // Setup reclaim address + await rewardsManager.connect(governor).setIndexerEligibilityReclaimAddress(reclaimWallet.address) + + // Setup eligibility oracle that denies + const MockRewardsEligibilityOracleFactory = await hre.ethers.getContractFactory( + 'contracts/tests/MockRewardsEligibilityOracle.sol:MockRewardsEligibilityOracle', + ) + const mockOracle = await MockRewardsEligibilityOracleFactory.deploy(false) // Deny + await mockOracle.deployed() + await rewardsManager.connect(governor).setRewardsEligibilityOracle(mockOracle.address) + + // Align with the epoch boundary + await helpers.mineEpoch(epochManager) + + // Setup allocation + await setupIndexerAllocation() + + // Jump to next epoch + await helpers.mineEpoch(epochManager) + + // Calculate expected rewards + const expectedRewards = toGRT('1400') + + // Check reclaim wallet balance before + const balanceBefore = await grt.balanceOf(reclaimWallet.address) + + // Close allocation - should emit both denial and reclaim events + const tx = staking.connect(indexer1).closeAllocation(allocationID1, randomHexBytes()) + await expect(tx) + .emit(rewardsManager, 'RewardsDeniedDueToEligibility') + .withArgs(indexer1.address, allocationID1, expectedRewards) + await expect(tx) + .emit(rewardsManager, 'RewardsReclaimedDueToIndexerEligibility') + .withArgs(indexer1.address, allocationID1, expectedRewards) + + // Check reclaim wallet received the rewards + const balanceAfter = await grt.balanceOf(reclaimWallet.address) + expect(balanceAfter.sub(balanceBefore)).eq(expectedRewards) + }) + + it('should not mint to reclaim address when reclaim address not set', async function () { + // Do NOT set reclaim address (defaults to zero address) + + // Setup eligibility oracle that denies + const MockRewardsEligibilityOracleFactory = await hre.ethers.getContractFactory( + 'contracts/tests/MockRewardsEligibilityOracle.sol:MockRewardsEligibilityOracle', + ) + const mockOracle = await MockRewardsEligibilityOracleFactory.deploy(false) // Deny + await mockOracle.deployed() + await rewardsManager.connect(governor).setRewardsEligibilityOracle(mockOracle.address) + + // Align with the epoch boundary + await helpers.mineEpoch(epochManager) + + // Setup allocation + await setupIndexerAllocation() + + // Jump to next epoch + await helpers.mineEpoch(epochManager) + + const expectedRewards = toGRT('1400') + + // Close allocation - should only emit denial event, not reclaim + const tx = staking.connect(indexer1).closeAllocation(allocationID1, randomHexBytes()) + await expect(tx) + .emit(rewardsManager, 'RewardsDeniedDueToEligibility') + .withArgs(indexer1.address, allocationID1, expectedRewards) + await expect(tx).to.not.emit(rewardsManager, 'RewardsReclaimedDueToIndexerEligibility') + }) + }) +}) diff --git a/packages/interfaces/contracts/contracts/rewards/IRewardsManager.sol b/packages/interfaces/contracts/contracts/rewards/IRewardsManager.sol index bd8da3508..dd5346b06 100644 --- a/packages/interfaces/contracts/contracts/rewards/IRewardsManager.sol +++ b/packages/interfaces/contracts/contracts/rewards/IRewardsManager.sol @@ -49,6 +49,20 @@ interface IRewardsManager { */ function setRewardsEligibilityOracle(address newRewardsEligibilityOracle) external; + /** + * @notice Set the indexer eligibility reclaim address + * @dev Address to mint tokens that would be denied due to indexer eligibility. Set to zero to disable. + * @param newReclaimAddress The address to receive eligibility-denied tokens + */ + function setIndexerEligibilityReclaimAddress(address newReclaimAddress) external; + + /** + * @notice Set the subgraph denied reclaim address + * @dev Address to mint tokens that would be denied due to subgraph denylist. Set to zero to disable. + * @param newReclaimAddress The address to receive subgraph-denied tokens + */ + function setSubgraphDeniedReclaimAddress(address newReclaimAddress) external; + // -- Denylist -- /** diff --git a/packages/subgraph-service/test/unit/mocks/MockRewardsManager.sol b/packages/subgraph-service/test/unit/mocks/MockRewardsManager.sol index 8286f2570..773d676f9 100644 --- a/packages/subgraph-service/test/unit/mocks/MockRewardsManager.sol +++ b/packages/subgraph-service/test/unit/mocks/MockRewardsManager.sol @@ -57,6 +57,12 @@ contract MockRewardsManager is IRewardsManager { function isDenied(bytes32) external view returns (bool) {} + // -- Reclaim -- + + function setSubgraphDeniedReclaimAddress(address) external {} + + function setIndexerEligibilityReclaimAddress(address) external {} + // -- Getters -- function getNewRewardsPerSignal() external view returns (uint256) {} From 70c00e40620fb95ddcb7ac3ca06b9a9a48333143 Mon Sep 17 00:00:00 2001 From: Rembrandt Kuipers <50174308+RembrandtK@users.noreply.github.com> Date: Fri, 12 Dec 2025 18:16:25 +0000 Subject: [PATCH 5/9] docs: improve code comments in IssuanceAllocator - Add clarity to availablePPM calculation explaining it comprises default allocation's allocator-minting PPM, target's allocator-minting PPM, and target's self-minting PPM to maintain 100% allocation invariant - Refine reentrancy comment to explicitly reference that calculations occur after notifications to prevent reentrancy issues Addresses PR feedback from code review --- packages/issuance/contracts/allocate/IssuanceAllocator.sol | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/packages/issuance/contracts/allocate/IssuanceAllocator.sol b/packages/issuance/contracts/allocate/IssuanceAllocator.sol index 9f6110bea..6b8d004f0 100644 --- a/packages/issuance/contracts/allocate/IssuanceAllocator.sol +++ b/packages/issuance/contracts/allocate/IssuanceAllocator.sol @@ -567,8 +567,12 @@ contract IssuanceAllocator is AllocationTarget storage targetData = $.allocationTargets[target]; AllocationTarget storage defaultTarget = $.allocationTargets[$.targetAddresses[0]]; - // Calculation is done here after notifications to prevent reentrancy issues + // Calculations occur after notifications in the caller to prevent reentrancy issues + // availablePPM comprises the default allocation's current allocator-minting PPM, + // the target's current allocator-minting PPM, and the target's current self-minting PPM. + // This maintains the 100% allocation invariant by calculating how much can be reallocated + // to the target without exceeding total available allocation. uint256 availablePPM = defaultTarget.allocatorMintingPPM + targetData.allocatorMintingPPM + targetData.selfMintingPPM; From cbe2bd1eb018d1490bed669385170ad91ceb1f89 Mon Sep 17 00:00:00 2001 From: Rembrandt Kuipers <50174308+RembrandtK@users.noreply.github.com> Date: Fri, 12 Dec 2025 19:08:51 +0000 Subject: [PATCH 6/9] fix: resolve fuzz test failure in testGetBalance_WhenCollectedOverThawing (#1268) Replace vm.assume with bounded inputs to fix "vm.assume rejected too many inputs" error. The previous implementation used overly restrictive constraints that caused the fuzzer to reject most random inputs. Now limits amountThawing and amountCollected to half of MAX_STAKING_TOKENS, guaranteeing valid deposit ranges while maintaining test coverage. --- .../horizon/test/unit/escrow/getters.t.sol | 21 +++++++++++++------ 1 file changed, 15 insertions(+), 6 deletions(-) diff --git a/packages/horizon/test/unit/escrow/getters.t.sol b/packages/horizon/test/unit/escrow/getters.t.sol index 262192125..ded655b39 100644 --- a/packages/horizon/test/unit/escrow/getters.t.sol +++ b/packages/horizon/test/unit/escrow/getters.t.sol @@ -34,12 +34,21 @@ contract GraphEscrowGettersTest is GraphEscrowTest { uint256 amountDeposit, uint256 amountThawing, uint256 amountCollected - ) public useGateway useDeposit(amountDeposit) { - vm.assume(amountThawing > 0); - vm.assume(amountDeposit > 0); - vm.assume(amountDeposit >= amountThawing); - vm.assume(amountDeposit >= amountCollected); - vm.assume(amountDeposit - amountCollected < amountThawing); + ) public useGateway { + // Limit thawing and collected to half of MAX_STAKING_TOKENS to ensure valid deposit range + amountThawing = bound(amountThawing, 1, MAX_STAKING_TOKENS / 2); + amountCollected = bound(amountCollected, 1, MAX_STAKING_TOKENS / 2); + + // amountDeposit must be: + // - >= amountThawing (so we can thaw that amount) + // - >= amountCollected (so we can collect that amount) + // - < amountThawing + amountCollected (so that after collecting, balance < thawing) + // With the above bounds, this range is guaranteed to be valid + uint256 minDeposit = amountThawing > amountCollected ? amountThawing : amountCollected; + uint256 maxDeposit = amountThawing + amountCollected - 1; + amountDeposit = bound(amountDeposit, minDeposit, maxDeposit); + + _depositTokens(users.verifier, users.indexer, amountDeposit); // thaw some funds _thawEscrow(users.verifier, users.indexer, amountThawing); From ba9c4667b860ad0b8b00b2deee95be3e99cdaad8 Mon Sep 17 00:00:00 2001 From: Rembrandt Kuipers <50174308+RembrandtK@users.noreply.github.com> Date: Mon, 15 Dec 2025 16:08:59 +0000 Subject: [PATCH 7/9] feat: distribute pending issuance before changing default allocation address Change setDefaultAllocationAddress to distribute any pending issuance to the old default address before changing to the new address. Add evenIfDistributionPending parameter to handle the case when distribution cannot proceed (e.g., when paused). This allows callers to either: - Return false when distribution is pending (evenIfDistributionPending=false) - Force the change anyway (evenIfDistributionPending=true) Implementation: - Calls _handleDistributionBeforeAllocation before changing default address - Distributes pending issuance to old default address up to current block - Then changes allocation to new default address - When paused and evenIfDistributionPending=false, returns false instead of changing - Original setDefaultAllocationAddress(address) signature wraps new implementation with evenIfDistributionPending=false --- .../IIssuanceAllocationAdministration.sol | 8 + .../contracts/allocate/IssuanceAllocator.sol | 29 ++- .../tests/allocate/DefaultAllocation.test.ts | 206 +++++++++++++++--- .../allocate/InterfaceIdStability.test.ts | 2 +- 4 files changed, 213 insertions(+), 32 deletions(-) diff --git a/packages/interfaces/contracts/issuance/allocate/IIssuanceAllocationAdministration.sol b/packages/interfaces/contracts/issuance/allocate/IIssuanceAllocationAdministration.sol index 919cea168..1887b0c07 100644 --- a/packages/interfaces/contracts/issuance/allocate/IIssuanceAllocationAdministration.sol +++ b/packages/interfaces/contracts/issuance/allocate/IIssuanceAllocationAdministration.sol @@ -79,6 +79,14 @@ interface IIssuanceAllocationAdministration { */ function setDefaultAllocationAddress(address newAddress) external returns (bool); + /** + * @notice Set the address that receives the default portion of issuance not allocated to other targets + * @param newAddress The new default allocation address (can be address(0)) + * @param evenIfDistributionPending Whether to force the allocation change even if issuance has not been distributed up to the current block + * @return True if successful + */ + function setDefaultAllocationAddress(address newAddress, bool evenIfDistributionPending) external returns (bool); + /** * @notice Distribute any pending accumulated issuance to allocator-minting targets. * @return Block number up to which issuance has been distributed diff --git a/packages/issuance/contracts/allocate/IssuanceAllocator.sol b/packages/issuance/contracts/allocate/IssuanceAllocator.sol index 6b8d004f0..713e9635a 100644 --- a/packages/issuance/contracts/allocate/IssuanceAllocator.sol +++ b/packages/issuance/contracts/allocate/IssuanceAllocator.sol @@ -422,12 +422,33 @@ contract IssuanceAllocator is /** * @inheritdoc IIssuanceAllocationAdministration + */ + function setDefaultAllocationAddress(address newAddress) external override onlyRole(GOVERNOR_ROLE) returns (bool) { + return _setDefaultAllocationAddress(newAddress, false); + } + + /** + * @inheritdoc IIssuanceAllocationAdministration + */ + function setDefaultAllocationAddress( + address newAddress, + bool evenIfDistributionPending + ) external override onlyRole(GOVERNOR_ROLE) returns (bool) { + return _setDefaultAllocationAddress(newAddress, evenIfDistributionPending); + } + + /** + * @notice Internal implementation for setting default allocation address * @dev The default allocation automatically receives the portion of issuance not allocated to other targets * @dev This maintains the invariant that total allocation is always 100% * @dev Reverts if attempting to set to an address that has a normal (non-default) allocation + * @dev Allocation data is copied from the old default to the new default, including lastChangeNotifiedBlock * @dev No-op if setting to the same address */ - function setDefaultAllocationAddress(address newAddress) external override onlyRole(GOVERNOR_ROLE) returns (bool) { + function _setDefaultAllocationAddress( + address newAddress, + bool evenIfDistributionPending + ) internal returns (bool) { IssuanceAllocatorData storage $ = _getIssuanceAllocatorStorage(); address oldAddress = $.targetAddresses[0]; @@ -444,11 +465,17 @@ contract IssuanceAllocator is require($.targetAddresses[i] != newAddress, CannotSetDefaultToAllocatedTarget()); } + // Distribute any pending issuance to the old default address before changing. + // If paused and evenIfDistributionPending is false, return false to prevent the change. + if (!_handleDistributionBeforeAllocation(oldAddress, 0, evenIfDistributionPending)) return false; + // Notify both old and new addresses of the allocation change _notifyTarget(oldAddress); _notifyTarget(newAddress); // Update the default allocation address at index 0 + // Note this will also copy the lastChangeNotifiedBlock from old to new, which is relevant if + // forceTargetNoChangeNotificationBlock was used to set a future block for the default address. $.targetAddresses[0] = newAddress; $.allocationTargets[newAddress] = $.allocationTargets[oldAddress]; delete $.allocationTargets[oldAddress]; diff --git a/packages/issuance/test/tests/allocate/DefaultAllocation.test.ts b/packages/issuance/test/tests/allocate/DefaultAllocation.test.ts index d59fb457e..5a02ec0e6 100644 --- a/packages/issuance/test/tests/allocate/DefaultAllocation.test.ts +++ b/packages/issuance/test/tests/allocate/DefaultAllocation.test.ts @@ -278,6 +278,37 @@ describe('IssuanceAllocator - Default Allocation', () => { ).to.be.revertedWithCustomError(issuanceAllocator, 'AccessControlUnauthorizedAccount') }) + it('should revert when non-governor tries to change default address with evenIfDistributionPending flag', async () => { + await expect( + issuanceAllocator.connect(accounts.user)['setDefaultAllocationAddress(address,bool)'](addresses.target1, true), + ).to.be.revertedWithCustomError(issuanceAllocator, 'AccessControlUnauthorizedAccount') + }) + + it('should return false when trying to change default address while paused without evenIfDistributionPending', async () => { + // Grant pause role and pause + const PAUSE_ROLE = ethers.keccak256(ethers.toUtf8Bytes('PAUSE_ROLE')) + await issuanceAllocator.connect(accounts.governor).grantRole(PAUSE_ROLE, accounts.governor.address) + await issuanceAllocator.connect(accounts.governor).pause() + + // Try to change default without force - should return false (checked via staticCall) + const result = await issuanceAllocator + .connect(accounts.governor) + .setDefaultAllocationAddress.staticCall(addresses.target3) + expect(result).to.equal(false) + + // Verify allocation didn't change + const currentDefault = await issuanceAllocator.getTargetAt(0) + expect(currentDefault).to.equal(ethers.ZeroAddress) + + // Should succeed with evenIfDistributionPending=true + await issuanceAllocator + .connect(accounts.governor) + ['setDefaultAllocationAddress(address,bool)'](addresses.target3, true) + + const newDefault = await issuanceAllocator.getTargetAt(0) + expect(newDefault).to.equal(addresses.target3) + }) + it('should revert when trying to set default to a normally allocated target', async () => { // Set target1 as a normal allocation await issuanceAllocator @@ -335,10 +366,10 @@ describe('IssuanceAllocator - Default Allocation', () => { // Change default to target1 await issuanceAllocator.connect(accounts.governor).setDefaultAllocationAddress(addresses.target1) - // Should now be able to set allocation for old default (zero address would fail for other reasons, use target2) + // Change default to target2 (target1 is no longer the default) await issuanceAllocator.connect(accounts.governor).setDefaultAllocationAddress(addresses.target2) - // Now target1 is no longer default, should be able to allocate to it + // Now target1 can receive a normal allocation since it's no longer the default await issuanceAllocator .connect(accounts.governor) ['setTargetAllocation(address,uint256)'](addresses.target1, 300_000n) @@ -385,57 +416,50 @@ describe('IssuanceAllocator - Default Allocation', () => { }) it('should mint to default address when it is set', async () => { - // Distribute any pending issuance first to start fresh - await issuanceAllocator.distributeIssuance() - // Change default to target3 await issuanceAllocator.connect(accounts.governor).setDefaultAllocationAddress(addresses.target3) - // Set target1 allocation using evenIfDistributionPending to avoid premature distribution + // Set target1 allocation await issuanceAllocator .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](addresses.target1, 300_000n, 0n, true) + ['setTargetAllocation(address,uint256)'](addresses.target1, 300_000n) - // Distribute once (exactly 1 block with the new allocations) + // Distribute to settle issuance await issuanceAllocator.distributeIssuance() - // Target1 should receive 30% for 1 block (from last distributeIssuance call) + // Target1 should receive 30% for 1 block const target1Balance = await graphToken.balanceOf(addresses.target1) const expectedTarget1 = (issuancePerBlock * 300_000n) / MILLION expect(target1Balance).to.equal(expectedTarget1) // Target3 (default) should receive: - // - 100% for 2 blocks (from initial distributeIssuance to setTargetAllocation) - // - 70% for 1 block (from setTargetAllocation to final distributeIssuance) + // - 100% for 1 block (from setDefaultAllocationAddress to setTargetAllocation) + // - 70% for 1 block (from setTargetAllocation to distributeIssuance) const target3Balance = await graphToken.balanceOf(addresses.target3) - const expectedTarget3 = issuancePerBlock * 2n + (issuancePerBlock * 700_000n) / MILLION + const expectedTarget3 = issuancePerBlock + (issuancePerBlock * 700_000n) / MILLION expect(target3Balance).to.equal(expectedTarget3) }) it('should distribute correctly with multiple targets and default', async () => { - // Distribute any pending issuance first to start fresh - await issuanceAllocator.distributeIssuance() - // Set default to target3 await issuanceAllocator.connect(accounts.governor).setDefaultAllocationAddress(addresses.target3) - // Set allocations using evenIfDistributionPending to avoid premature distributions + // Set allocations (target3 gets remaining 50% as default) await issuanceAllocator .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](addresses.target1, 200_000n, 0n, true) // 20% + ['setTargetAllocation(address,uint256)'](addresses.target1, 200_000n) // 20% await issuanceAllocator .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](addresses.target2, 300_000n, 0n, true) // 30% - // Default (target3) gets 50% + ['setTargetAllocation(address,uint256)'](addresses.target2, 300_000n) // 30% - // Distribute once (exactly 1 block with the final allocations) + // Distribute to settle issuance await issuanceAllocator.distributeIssuance() - // Check all balances accounting for block accumulation: + // Check balances: // - target1 gets 20% for 2 blocks (from first setTargetAllocation onwards) // - target2 gets 30% for 1 block (from second setTargetAllocation onwards) - // - target3 (default) gets 100% for 2 blocks + 80% for 1 block + 50% for 1 block + // - target3 (default) gets 100% for 1 block + 80% for 1 block + 50% for 1 block const target1Balance = await graphToken.balanceOf(addresses.target1) const target2Balance = await graphToken.balanceOf(addresses.target2) const target3Balance = await graphToken.balanceOf(addresses.target3) @@ -443,23 +467,19 @@ describe('IssuanceAllocator - Default Allocation', () => { const expectedTarget1 = (issuancePerBlock * 200_000n * 2n) / MILLION const expectedTarget2 = (issuancePerBlock * 300_000n) / MILLION const expectedTarget3 = - issuancePerBlock * 2n + (issuancePerBlock * 800_000n) / MILLION + (issuancePerBlock * 500_000n) / MILLION + issuancePerBlock + (issuancePerBlock * 800_000n) / MILLION + (issuancePerBlock * 500_000n) / MILLION expect(target1Balance).to.equal(expectedTarget1) expect(target2Balance).to.equal(expectedTarget2) expect(target3Balance).to.equal(expectedTarget3) - // Total minted should equal 4 blocks of issuance + // Total minted should equal 3 blocks of issuance const totalMinted = target1Balance + target2Balance + target3Balance - expect(totalMinted).to.equal(issuancePerBlock * 4n) + expect(totalMinted).to.equal(issuancePerBlock * 3n) }) it('should handle distribution when default allocation is 0%', async () => { - // Distribute any pending issuance first to start fresh - await issuanceAllocator.distributeIssuance() - - // Default is address(0), which doesn't receive minting - // Allocate 100% to explicit targets + // Allocate 100% to explicit targets (default gets 0%) await issuanceAllocator .connect(accounts.governor) ['setTargetAllocation(address,uint256)'](addresses.target1, 600_000n) @@ -492,6 +512,132 @@ describe('IssuanceAllocator - Default Allocation', () => { const defaultAllocation = await issuanceAllocator.getTargetAllocation(defaultAddress) expect(defaultAllocation.totalAllocationPPM).to.equal(0n) }) + + it('should distribute during setDefaultAllocationAddress when using default behavior', async () => { + // Change default to target3 WITHOUT evenIfDistributionPending flag (uses default false) + // This should distribute issuance up to current block before changing the default + await issuanceAllocator.connect(accounts.governor).setDefaultAllocationAddress(addresses.target3) + + // Set target1 allocation + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,bool)'](addresses.target1, 300_000n, 0n, true) + + // Distribute once more + await issuanceAllocator.distributeIssuance() + + // Target3 (default) should receive: + // - 0% for 1 block (setDefaultAllocationAddress distributes to old default (zero address) before changing) + // - 100% for 1 block (from setDefaultAllocationAddress to setTargetAllocation) + // - 70% for 1 block (from setTargetAllocation to final distributeIssuance) + const target3Balance = await graphToken.balanceOf(addresses.target3) + const expectedTarget3 = issuancePerBlock + (issuancePerBlock * 700_000n) / MILLION + expect(target3Balance).to.equal(expectedTarget3) + + // Target1 should receive 30% for 1 block + const target1Balance = await graphToken.balanceOf(addresses.target1) + const expectedTarget1 = (issuancePerBlock * 300_000n) / MILLION + expect(target1Balance).to.equal(expectedTarget1) + }) + + it('should inherit lastChangeNotifiedBlock when changing default address', async () => { + // This test verifies the comment at IssuanceAllocator.sol:477-478 + // "Note this will also copy the lastChangeNotifiedBlock from old to new, which is relevant if + // forceTargetNoChangeNotificationBlock was used to set a future block for the default address." + + // Set default to target1 + await issuanceAllocator.connect(accounts.governor).setDefaultAllocationAddress(addresses.target1) + + // Force a future notification block on target1 (the current default) + const currentBlock = await ethers.provider.getBlockNumber() + const futureBlock = currentBlock + 100 + await issuanceAllocator + .connect(accounts.governor) + .forceTargetNoChangeNotificationBlock(addresses.target1, futureBlock) + + // Verify target1 has the future block set + const target1Data = await issuanceAllocator.getTargetData(addresses.target1) + expect(target1Data.lastChangeNotifiedBlock).to.equal(futureBlock) + + // Change default from target1 to target2 + await issuanceAllocator + .connect(accounts.governor) + ['setDefaultAllocationAddress(address,bool)'](addresses.target2, true) + + // Verify target2 (new default) inherited the lastChangeNotifiedBlock from target1 + const target2Data = await issuanceAllocator.getTargetData(addresses.target2) + expect(target2Data.lastChangeNotifiedBlock).to.equal(futureBlock) + + // Verify old default (target1) no longer has data + const oldDefaultData = await issuanceAllocator.getTargetData(addresses.target1) + expect(oldDefaultData.lastChangeNotifiedBlock).to.equal(0) + }) + + it('should handle changing default to address that previously had normal allocation', async () => { + // Scenario: target1 has normal allocation → removed (0%) → set as default + // This tests for stale data issues + + // Set target1 as normal allocation with 30% + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256)'](addresses.target1, 300_000n) + + let allocation = await issuanceAllocator.getTargetAllocation(addresses.target1) + expect(allocation.totalAllocationPPM).to.equal(300_000n) + + // Remove target1's allocation (set to 0%) + await issuanceAllocator.connect(accounts.governor)['setTargetAllocation(address,uint256)'](addresses.target1, 0n) + + // Verify target1 is no longer in targetAddresses (except if it's at index 0, which it's not) + const targetCount = await issuanceAllocator.getTargetCount() + const targets = [] + for (let i = 0; i < targetCount; i++) { + targets.push(await issuanceAllocator.getTargetAt(i)) + } + expect(targets).to.not.include(addresses.target1) // Should not be in list anymore + + // Now set target1 as default - should work and not have stale allocation data + await issuanceAllocator.connect(accounts.governor).setDefaultAllocationAddress(addresses.target1) + + // Verify target1 is now default with 100% allocation (since no other targets) + const defaultAddress = await issuanceAllocator.getTargetAt(0) + expect(defaultAddress).to.equal(addresses.target1) + + allocation = await issuanceAllocator.getTargetAllocation(addresses.target1) + expect(allocation.totalAllocationPPM).to.equal(MILLION) // Should have full allocation as default + }) + + it('should handle changing default when default has 0% allocation', async () => { + // Allocate 100% to other targets so default has 0% + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256)'](addresses.target1, 600_000n) + + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256)'](addresses.target2, 400_000n) + + // Default should now have 0% + const defaultAddress = await issuanceAllocator.getTargetAt(0) + const defaultAllocation = await issuanceAllocator.getTargetAllocation(defaultAddress) + expect(defaultAllocation.totalAllocationPPM).to.equal(0n) + + // Change default to target3 + await issuanceAllocator.connect(accounts.governor).setDefaultAllocationAddress(addresses.target3) + + // New default should have 0% (same as old default) + const newDefaultAddress = await issuanceAllocator.getTargetAt(0) + expect(newDefaultAddress).to.equal(addresses.target3) + + const newDefaultAllocation = await issuanceAllocator.getTargetAllocation(addresses.target3) + expect(newDefaultAllocation.totalAllocationPPM).to.equal(0n) + + // Other allocations should be maintained + const target1Allocation = await issuanceAllocator.getTargetAllocation(addresses.target1) + const target2Allocation = await issuanceAllocator.getTargetAllocation(addresses.target2) + expect(target1Allocation.totalAllocationPPM).to.equal(600_000n) + expect(target2Allocation.totalAllocationPPM).to.equal(400_000n) + }) }) describe('View functions', () => { diff --git a/packages/issuance/test/tests/allocate/InterfaceIdStability.test.ts b/packages/issuance/test/tests/allocate/InterfaceIdStability.test.ts index fcb07ea85..ea29a2ea9 100644 --- a/packages/issuance/test/tests/allocate/InterfaceIdStability.test.ts +++ b/packages/issuance/test/tests/allocate/InterfaceIdStability.test.ts @@ -26,7 +26,7 @@ describe('Allocate Interface ID Stability', () => { }) it('IIssuanceAllocationAdministration should have stable interface ID', () => { - expect(IIssuanceAllocationAdministration__factory.interfaceId).to.equal('0x069d5a27') + expect(IIssuanceAllocationAdministration__factory.interfaceId).to.equal('0x1110962a') }) it('IIssuanceAllocationStatus should have stable interface ID', () => { From cc652ae3cb4d6c0e4c183923f6f3c94ccb6a0517 Mon Sep 17 00:00:00 2001 From: Rembrandt Kuipers <50174308+RembrandtK@users.noreply.github.com> Date: Mon, 15 Dec 2025 18:53:23 +0000 Subject: [PATCH 8/9] refactor: improve IssuanceAllocator target removal Refactor _removeTargetFromList to _removeTarget to logically group target removal operations (array removal and mapping deletion) in a single function. Loop starts at index 1 to protect default allocation at index 0. Also fixes MILLION/PPM terminology, adds missing natspec, and adds test for address(0) default allocation. --- .../contracts/allocate/IssuanceAllocator.sol | 28 +++++++------ .../tests/allocate/DefaultAllocation.test.ts | 42 +++++++++++++++++++ 2 files changed, 57 insertions(+), 13 deletions(-) diff --git a/packages/issuance/contracts/allocate/IssuanceAllocator.sol b/packages/issuance/contracts/allocate/IssuanceAllocator.sol index 713e9635a..0550e7594 100644 --- a/packages/issuance/contracts/allocate/IssuanceAllocator.sol +++ b/packages/issuance/contracts/allocate/IssuanceAllocator.sol @@ -28,7 +28,7 @@ import { ERC165Upgradeable } from "@openzeppelin/contracts-upgradeable/utils/int * @dev The contract maintains a 100% allocation invariant through a default allocation mechanism: * - A default allocation target exists at targetAddresses[0] (initialized to address(0)) * - The default allocation automatically receives any unallocated portion of issuance - * - Total allocation across all targets always equals 100% (MILLION PPM) + * - Total allocation across all targets always equals 100% (tracked in parts per MILLION) * - The default allocation address can be changed via setDefaultAllocationAddress() * - When the default address is address(0), the unallocated portion is not minted * - Regular targets cannot be set as the default allocation address @@ -123,7 +123,7 @@ contract IssuanceAllocator is /// @notice Thrown when attempting to add a target with zero address error TargetAddressCannotBeZero(); - /// @notice Thrown when the total allocation would exceed 100% (PPM) + /// @notice Thrown when the total allocation would exceed 100% (MILLION) error InsufficientAllocationAvailable(); /// @notice Thrown when a target does not support the IIssuanceTarget interface @@ -400,7 +400,7 @@ contract IssuanceAllocator is * - If both allocations are 0 and the target doesn't exist, this function is a no-op * - If both allocations are 0 and the target exists, the target will be removed * - If any allocation is non-zero and the target doesn't exist, the target will be added - * - Will revert if the total allocation would exceed PPM, or if attempting to add a target that doesn't support IIssuanceTarget + * - Will revert if the total allocation would exceed 100% (MILLION), or if attempting to add a target that doesn't support IIssuanceTarget * * Self-minting allocation is a special case for backwards compatibility with * existing contracts like the RewardsManager. The IssuanceAllocator calculates @@ -439,16 +439,16 @@ contract IssuanceAllocator is /** * @notice Internal implementation for setting default allocation address + * @param newAddress The address to set as the new default allocation target + * @param evenIfDistributionPending Whether to force the change even if issuance distribution is behind + * @return True if the value is applied (including if already the case), false if not applied due to paused state * @dev The default allocation automatically receives the portion of issuance not allocated to other targets * @dev This maintains the invariant that total allocation is always 100% * @dev Reverts if attempting to set to an address that has a normal (non-default) allocation * @dev Allocation data is copied from the old default to the new default, including lastChangeNotifiedBlock * @dev No-op if setting to the same address */ - function _setDefaultAllocationAddress( - address newAddress, - bool evenIfDistributionPending - ) internal returns (bool) { + function _setDefaultAllocationAddress(address newAddress, bool evenIfDistributionPending) internal returns (bool) { IssuanceAllocatorData storage $ = _getIssuanceAllocatorStorage(); address oldAddress = $.targetAddresses[0]; @@ -640,23 +640,25 @@ contract IssuanceAllocator is targetData.allocatorMintingPPM = allocatorMintingPPM; targetData.selfMintingPPM = selfMintingPPM; } else { - // Remove from list and delete mapping - _removeTargetFromList(target); - delete $.allocationTargets[target]; + // Remove target completely (from list and mapping) + _removeTarget(target); } } /** - * @notice Removes target from targetAddresses array using swap-and-pop for gas efficiency + * @notice Removes target from targetAddresses array and deletes its allocation data * @param target Address of the target to remove + * @dev Starts at index 1 since index 0 is always the default allocation and should never be removed + * @dev Uses swap-and-pop for gas efficiency */ - function _removeTargetFromList(address target) private { + function _removeTarget(address target) private { IssuanceAllocatorData storage $ = _getIssuanceAllocatorStorage(); - for (uint256 i = 0; i < $.targetAddresses.length; ++i) { + for (uint256 i = 1; i < $.targetAddresses.length; ++i) { if ($.targetAddresses[i] == target) { $.targetAddresses[i] = $.targetAddresses[$.targetAddresses.length - 1]; $.targetAddresses.pop(); + delete $.allocationTargets[target]; break; } } diff --git a/packages/issuance/test/tests/allocate/DefaultAllocation.test.ts b/packages/issuance/test/tests/allocate/DefaultAllocation.test.ts index 5a02ec0e6..549d3386b 100644 --- a/packages/issuance/test/tests/allocate/DefaultAllocation.test.ts +++ b/packages/issuance/test/tests/allocate/DefaultAllocation.test.ts @@ -638,6 +638,48 @@ describe('IssuanceAllocator - Default Allocation', () => { expect(target1Allocation.totalAllocationPPM).to.equal(600_000n) expect(target2Allocation.totalAllocationPPM).to.equal(400_000n) }) + + it('should handle changing from initial address(0) default without errors', async () => { + // Verify initial state: default is address(0) + const initialDefault = await issuanceAllocator.getTargetAt(0) + expect(initialDefault).to.equal(ethers.ZeroAddress) + + // Add a normal allocation so there's pending issuance to distribute + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256)'](addresses.target1, 400_000n) + + // Mine a few blocks to accumulate issuance + await ethers.provider.send('evm_mine', []) + await ethers.provider.send('evm_mine', []) + + // Change default from address(0) to target2 + // This should: + // 1. Call _handleDistributionBeforeAllocation(address(0), ...) - should not revert + // 2. Call _notifyTarget(address(0)) - should return early safely + // 3. Delete allocationTargets[address(0)] - should not cause issues + await issuanceAllocator.connect(accounts.governor).setDefaultAllocationAddress(addresses.target2) + + // Verify the change succeeded + const newDefault = await issuanceAllocator.getTargetAt(0) + expect(newDefault).to.equal(addresses.target2) + + // Verify address(0) received no tokens (can't mint to zero address) + const zeroAddressBalance = await graphToken.balanceOf(ethers.ZeroAddress) + expect(zeroAddressBalance).to.equal(0n) + + // Distribute and verify target2 (new default) receives correct allocation + await issuanceAllocator.distributeIssuance() + + // Target2 should have received 60% for 1 block (from setDefaultAllocationAddress to distributeIssuance) + const target2Balance = await graphToken.balanceOf(addresses.target2) + const expectedTarget2 = (issuancePerBlock * 600_000n) / MILLION + expect(target2Balance).to.equal(expectedTarget2) + + // Target1 should have accumulated tokens across multiple blocks + const target1Balance = await graphToken.balanceOf(addresses.target1) + expect(target1Balance).to.be.gt(0n) // Should have received something + }) }) describe('View functions', () => { From 88ce412963add2e65259b5cc8e749e0c15c2fc78 Mon Sep 17 00:00:00 2001 From: Rembrandt Kuipers <50174308+RembrandtK@users.noreply.github.com> Date: Mon, 15 Dec 2025 19:32:13 +0000 Subject: [PATCH 9/9] fix: preserve notification state when changing default allocation When changing default allocation address, allocation data is copied from oldAddress to newAddress, which was overwriting newAddress's lastChangeNotifiedBlock. - If newAddress was just notified, the notification record was lost - If newAddress had a future block set (via forceTargetNoChangeNotificationBlock), that restriction was lost - When changing from address(0) (which is never notified), newAddress's lastChangeNotifiedBlock was incorrectly set to 0 Fix: preserve newAddress's lastChangeNotifiedBlock when copying allocation data. Also clarifies that notification blocks are target-specific, not about the default in general. --- .../contracts/allocate/IssuanceAllocator.sol | 9 ++- .../tests/allocate/DefaultAllocation.test.ts | 71 ++++++++++--------- 2 files changed, 45 insertions(+), 35 deletions(-) diff --git a/packages/issuance/contracts/allocate/IssuanceAllocator.sol b/packages/issuance/contracts/allocate/IssuanceAllocator.sol index 0550e7594..ac0714622 100644 --- a/packages/issuance/contracts/allocate/IssuanceAllocator.sol +++ b/packages/issuance/contracts/allocate/IssuanceAllocator.sol @@ -473,13 +473,18 @@ contract IssuanceAllocator is _notifyTarget(oldAddress); _notifyTarget(newAddress); + // Preserve the notification block of newAddress before copying old address data + uint256 newAddressNotificationBlock = $.allocationTargets[newAddress].lastChangeNotifiedBlock; + // Update the default allocation address at index 0 - // Note this will also copy the lastChangeNotifiedBlock from old to new, which is relevant if - // forceTargetNoChangeNotificationBlock was used to set a future block for the default address. + // This copies allocation data from old to new, including allocatorMintingPPM and selfMintingPPM $.targetAddresses[0] = newAddress; $.allocationTargets[newAddress] = $.allocationTargets[oldAddress]; delete $.allocationTargets[oldAddress]; + // Restore the notification block for newAddress (regard as target-specific, not about default) + $.allocationTargets[newAddress].lastChangeNotifiedBlock = newAddressNotificationBlock; + emit DefaultAllocationAddressUpdated(oldAddress, newAddress); return true; } diff --git a/packages/issuance/test/tests/allocate/DefaultAllocation.test.ts b/packages/issuance/test/tests/allocate/DefaultAllocation.test.ts index 549d3386b..5b7937ecc 100644 --- a/packages/issuance/test/tests/allocate/DefaultAllocation.test.ts +++ b/packages/issuance/test/tests/allocate/DefaultAllocation.test.ts @@ -540,39 +540,6 @@ describe('IssuanceAllocator - Default Allocation', () => { expect(target1Balance).to.equal(expectedTarget1) }) - it('should inherit lastChangeNotifiedBlock when changing default address', async () => { - // This test verifies the comment at IssuanceAllocator.sol:477-478 - // "Note this will also copy the lastChangeNotifiedBlock from old to new, which is relevant if - // forceTargetNoChangeNotificationBlock was used to set a future block for the default address." - - // Set default to target1 - await issuanceAllocator.connect(accounts.governor).setDefaultAllocationAddress(addresses.target1) - - // Force a future notification block on target1 (the current default) - const currentBlock = await ethers.provider.getBlockNumber() - const futureBlock = currentBlock + 100 - await issuanceAllocator - .connect(accounts.governor) - .forceTargetNoChangeNotificationBlock(addresses.target1, futureBlock) - - // Verify target1 has the future block set - const target1Data = await issuanceAllocator.getTargetData(addresses.target1) - expect(target1Data.lastChangeNotifiedBlock).to.equal(futureBlock) - - // Change default from target1 to target2 - await issuanceAllocator - .connect(accounts.governor) - ['setDefaultAllocationAddress(address,bool)'](addresses.target2, true) - - // Verify target2 (new default) inherited the lastChangeNotifiedBlock from target1 - const target2Data = await issuanceAllocator.getTargetData(addresses.target2) - expect(target2Data.lastChangeNotifiedBlock).to.equal(futureBlock) - - // Verify old default (target1) no longer has data - const oldDefaultData = await issuanceAllocator.getTargetData(addresses.target1) - expect(oldDefaultData.lastChangeNotifiedBlock).to.equal(0) - }) - it('should handle changing default to address that previously had normal allocation', async () => { // Scenario: target1 has normal allocation → removed (0%) → set as default // This tests for stale data issues @@ -679,6 +646,44 @@ describe('IssuanceAllocator - Default Allocation', () => { // Target1 should have accumulated tokens across multiple blocks const target1Balance = await graphToken.balanceOf(addresses.target1) expect(target1Balance).to.be.gt(0n) // Should have received something + + // Verify lastChangeNotifiedBlock was preserved for the new default (not overwritten to 0 from address(0)) + const target2Data = await issuanceAllocator.getTargetData(addresses.target2) + const currentBlock = await ethers.provider.getBlockNumber() + expect(target2Data.lastChangeNotifiedBlock).to.be.gt(0n) + expect(target2Data.lastChangeNotifiedBlock).to.be.lte(currentBlock) + }) + + it('should not transfer future notification block from old default to new default', async () => { + // Set initial default to target1 + await issuanceAllocator.connect(accounts.governor).setDefaultAllocationAddress(addresses.target1) + + // Force a future notification block on target1 (the current default) + const currentBlock = await ethers.provider.getBlockNumber() + const futureBlock = currentBlock + 100 + await issuanceAllocator + .connect(accounts.governor) + .forceTargetNoChangeNotificationBlock(addresses.target1, futureBlock) + + // Verify target1 has the future block set + const target1DataBefore = await issuanceAllocator.getTargetData(addresses.target1) + expect(target1DataBefore.lastChangeNotifiedBlock).to.equal(futureBlock) + + // Change default from target1 to target2 + await issuanceAllocator.connect(accounts.governor).setDefaultAllocationAddress(addresses.target2) + + // Verify target2 (new default) has its own notification block (current block), not the future block from target1 + const target2Data = await issuanceAllocator.getTargetData(addresses.target2) + const blockAfterChange = await ethers.provider.getBlockNumber() + + // target2 should have been notified at the current block, not inherit the future block + expect(target2Data.lastChangeNotifiedBlock).to.equal(blockAfterChange) + expect(target2Data.lastChangeNotifiedBlock).to.not.equal(futureBlock) + expect(target2Data.lastChangeNotifiedBlock).to.be.lt(futureBlock) + + // Old default (target1) should no longer have data (it was removed) + const target1DataAfter = await issuanceAllocator.getTargetData(addresses.target1) + expect(target1DataAfter.lastChangeNotifiedBlock).to.equal(0) }) })