From 6c51afd4de0a1e925486cd080f8bc0969920b538 Mon Sep 17 00:00:00 2001 From: Aneta Tsvetkova Date: Thu, 24 Jul 2025 10:39:47 +0300 Subject: [PATCH 1/3] feat(contracts): Change blocknumber in ADFS to history accumulator --- libs/ts/contracts/README.md | 2 +- .../contracts/AggregatedDataFeedStore.sol | 55 ++++----- .../test/AggregatedDataFeedStoreGeneric.sol | 13 ++- .../contracts/tasks/test-multichain-deploy.ts | 21 +++- .../test/AggregatedDataFeedStore.test.ts | 102 +++++++++++------ .../test/CLAggregatorAdapter.test.ts | 105 +++++++++++++----- .../test/CLFeedRegistryAdapter.test.ts | 45 +++++++- libs/ts/contracts/test/Oracle.test.ts | 37 +++++- libs/ts/contracts/test/Registry.test.ts | 10 +- .../test/UpgradeableProxyADFS.test.ts | 35 ++++-- .../test/examples/ADFSConsumer.test.ts | 15 ++- .../CLFeedRegistryAdapterConsumer.test.ts | 9 +- .../ts/contracts/test/utils/helpers/common.ts | 32 +++++- .../helpers/compareGasWithExperiments.ts | 8 +- .../test/utils/wrappers/adfs/ADFSBase.ts | 62 ++++++++--- .../utils/wrappers/adfs/ADFSBaseGeneric.ts | 61 +++++++--- .../test/utils/wrappers/chainlink/Base.ts | 13 ++- .../utils/wrappers/interfaces/IADFSWrapper.ts | 11 +- 18 files changed, 464 insertions(+), 172 deletions(-) diff --git a/libs/ts/contracts/README.md b/libs/ts/contracts/README.md index 9ecb3d947a..d645bee5d3 100644 --- a/libs/ts/contracts/README.md +++ b/libs/ts/contracts/README.md @@ -84,7 +84,7 @@ The slots between 2\*\*128 and 2\*\*160 - 1 are considered Data feed slots. Here ### Events -When an update is posted to the contract, an event is emitted - “DataFeedsUpdated(uint256 blockNumber)” (topic: 0xe64378c8d8a289137204264780c7669f3860a703795c6f0574d925d473a4a2a7). Block number is an internal counter for the Blocksense system. Through this event off-chain programs can subscribe to is and trigger on-chain actions when needed. +When an update is posted to the contract, an event is emitted - “DataFeedsUpdated(bytes32 newHistoryAccumulator)” (topic: 0x6f6892f1e8eab8687f7b5f3c3bc0d046cd783c6059310be0bef4e18eb0662789). History accumulator is an internal hash for the Blocksense system. Through this event off-chain programs can subscribe to is and trigger on-chain actions when needed. ## Development diff --git a/libs/ts/contracts/contracts/AggregatedDataFeedStore.sol b/libs/ts/contracts/contracts/AggregatedDataFeedStore.sol index 96c23d38c8..53e5bb13f0 100644 --- a/libs/ts/contracts/contracts/AggregatedDataFeedStore.sol +++ b/libs/ts/contracts/contracts/AggregatedDataFeedStore.sol @@ -25,14 +25,14 @@ contract AggregatedDataFeedStore { address internal immutable ACCESS_CONTROL; /// @notice Topic to be emitted on update - /// @dev keccak256("DataFeedsUpdated(uint256)") + /// @dev keccak256("DataFeedsUpdated(bytes32)") bytes32 internal constant DATA_FEEDS_UPDATE_EVENT_TOPIC = - 0xe64378c8d8a289137204264780c7669f3860a703795c6f0574d925d473a4a2a7; + 0x6f6892f1e8eab8687f7b5f3c3bc0d046cd783c6059310be0bef4e18eb0662789; /* Storage layout: Management space: [0 to 2**128-2**116) - 0x0000 - latest blocknumber + 0x0000 - latest history accumulator 0x0001 - implementation slot (UpgradeableProxy) 0x0002 - admin slot (UpgradeableProxy) Ring buffer index table: [2**128-2**116 to 2**128) @@ -237,17 +237,17 @@ contract AggregatedDataFeedStore { address accessControl = ACCESS_CONTROL; /* - ┌--------------------- index table data --------------------┐ - │ │ - ┌---------------------- feed 1 ----------------------------┬-- feed 2 .. feed N --┼-------------- row 1 --------------┬---- row 2 .. row N ---┤ - ┌────────┬───────────┬──────────┬──────┬────────────┬──────────────┬────────────┬─────┬────┬──────────────────────┬────────────┬─────┬────────────────┬───────────────────────┐ - │selector│blocknumber│# of feeds│stride│index length│feedId + index│bytes length│bytes│data│ .. │index length│index│index table data│ .. │ - ├────────┼───────────┼──────────┼──────┼────────────┼──────────────┼────────────┼─────┼────┼──────────────────────┼────────────┼─────┼────────────────┼───────────────────────┤ - │ 1b │ 8b │ 4b │ 1b │ 1b │ Xb │ 1b │ Yb │ Zb │ .. │ 1b │ Xb │ 32b │ .. │ - └────────┴───────────┴──────────┴──────┴────────────┴──────────────┴────────────┴─────┴────┴──────────────────────┴────────────┴─────┴────────────────┴───────────────────────┘ - │ ▲ │ ▲ │ ▲ │ ▲ - └-------------┘ └-----------┘ └----┘ └---------┘ - X=index length Y=bytes length Z=bytes X=index length + / ┌--------------------- round table data ---------------------┐ + / │ │ + / ┌---------------------- feed 1 ----------------------------┬-- feed 2 ... feed N --┼-------------- row 1 --------------┬---- row 2 ... row N ---┤ + / ┌────────┬──────────┬───────────────┬──────────┬──────┬────────────┬──────────────┬────────────┬─────┬────┬───────────────────────┬────────────┬─────┬────────────────┬────────────────────────┐ + / │selector│source acc│destination acc│# of feeds│stride│index length│feedId + round│bytes length│bytes│data│ ... │index length│index│round table data│ ... │ + / ├────────┼──────────┼───────────────┼──────────┼──────┼────────────┼──────────────┼────────────┼─────┼────┼───────────────────────┼────────────┼─────┼────────────────┼────────────────────────┤ + / │ 1b │ 32b │ 32b │ 4b │ 1b │ 1b │ Xb │ 1b │ Yb │ Zb │ ... │ 1b │ Xb │ 32b │ ... │ + / └────────┴──────────┴───────────────┴──────────┴──────┴────────────┴──────────────┴────────────┴─────┴────┴───────────────────────┴────────────┴─────┴────────────────┴────────────────────────┘ + / │ ▲ │ ▲ │ ▲ │ ▲ + / └-------------┘ └-----------┘ └----┘ └---------┘ + / X=index length Y=bytes length Z=bytes X=index length */ assembly { let ptr := mload(0x40) @@ -267,27 +267,28 @@ contract AggregatedDataFeedStore { // setFeeds(bytes) if eq(byte(0, data), 0x01) { - /////////////////////////////////// - // Update Blocksense blocknumber // - /////////////////////////////////// - let newBlockNumber := shr(192, shl(8, data)) - let prevBlockNumber := sload(0x00) + /////////////////////////////////////////// + // Update Blocksense history accumulator // + /////////////////////////////////////////// + let sourceAccumulator := calldataload(1) + let destinationAccumulator := calldataload(33) + let historyAccumulator := sload(0x00) // ensure it is strictly increasing - if eq(gt(newBlockNumber, prevBlockNumber), 0) { + if iszero(eq(sourceAccumulator, historyAccumulator)) { revert(0x00, 0x00) } - sstore(0x00, newBlockNumber) + sstore(0x00, destinationAccumulator) /////////////////////////////////// // Update feeds // /////////////////////////////////// let len := calldatasize() - let feedsCount := shr(224, shl(72, data)) - // selector (1b) + blocknumber (8b) + feeds count (4b) = 13b + let feedsCount := shr(224, calldataload(65)) + // selector (1b) + source acc (32b) + dest acc (32b) + feeds count (4b) = 69b // points to the start of the feeds data - let pointer := 13 + let pointer := 69 /* ┌───────────────────────────────┐ ......... ┌───────────────────────────────┐ @@ -433,10 +434,10 @@ contract AggregatedDataFeedStore { // Emit DataFeedsUpdated event // /////////////////////////////////// - // store blocknumber at slot 0 in memory - mstore(0x00, newBlockNumber) + // store new history accumulator at slot 0 in memory + mstore(0x00, destinationAccumulator) - // Emit event with new block number + // Emit event with new history accumulator log1(0x00, 0x20, DATA_FEEDS_UPDATE_EVENT_TOPIC) return(0x00, 0x00) diff --git a/libs/ts/contracts/contracts/test/AggregatedDataFeedStoreGeneric.sol b/libs/ts/contracts/contracts/test/AggregatedDataFeedStoreGeneric.sol index 6c6e21ab38..a745c401eb 100644 --- a/libs/ts/contracts/contracts/test/AggregatedDataFeedStoreGeneric.sol +++ b/libs/ts/contracts/contracts/test/AggregatedDataFeedStoreGeneric.sol @@ -8,9 +8,9 @@ contract AggregatedDataFeedStoreGeneric { 0x0000000000000000000000000000000000001000; address internal immutable ACCESS_CONTROL; - event DataFeedsUpdated(uint256 blocknumber); + event DataFeedsUpdated(bytes32 destinationAccumulator); - uint256 internal blocknumber; + bytes32 internal historyAccumulator; constructor(address accessControl) { ACCESS_CONTROL = accessControl; @@ -106,7 +106,8 @@ contract AggregatedDataFeedStoreGeneric { } function write( - uint256 blocknumber_, + bytes32 sourceAccumulator, + bytes32 destinationAccumulator, uint256[] calldata strides, uint256[] calldata indices, bytes[] calldata data, @@ -119,8 +120,8 @@ contract AggregatedDataFeedStoreGeneric { bool isAdmin = abi.decode(res, (bool)); require(success && isAdmin); - require(blocknumber < blocknumber_); - blocknumber = blocknumber_; + require(historyAccumulator == sourceAccumulator); + historyAccumulator = destinationAccumulator; for (uint256 i = 0; i < indices.length; i++) { uint256 index = indices[i]; @@ -149,7 +150,7 @@ contract AggregatedDataFeedStoreGeneric { } } - emit DataFeedsUpdated(blocknumber_); + emit DataFeedsUpdated(destinationAccumulator); } function _bytesToBytes32Array( diff --git a/libs/ts/contracts/tasks/test-multichain-deploy.ts b/libs/ts/contracts/tasks/test-multichain-deploy.ts index 41ef4b3d8f..eb01e1bfd0 100644 --- a/libs/ts/contracts/tasks/test-multichain-deploy.ts +++ b/libs/ts/contracts/tasks/test-multichain-deploy.ts @@ -342,11 +342,24 @@ task( const resolveBool = (value: Promise) => value.then(x => Boolean(Number(x))); -const encodeDataWrite = (feeds: Feed[], blockNumber?: number) => { - blockNumber ??= Date.now() + 100; +const encodeDataWrite = ( + feeds: Feed[], + sourceAccumulator?: string, + destinationAccumulator?: string, +) => { + sourceAccumulator ??= ethers.toBeHex(0, 32); + destinationAccumulator ??= ethers.toBeHex( + (Math.random() * 10000).toFixed(0), + 32, + ); const prefix = solidityPacked( - ['bytes1', 'uint64', 'uint32'], - [toBeHex(WriteOp.SetFeeds), blockNumber, feeds.length], + ['bytes1', 'bytes32', 'bytes32', 'uint32'], + [ + toBeHex(WriteOp.SetFeeds), + sourceAccumulator, + destinationAccumulator, + feeds.length, + ], ); const data = feeds.map(feed => { diff --git a/libs/ts/contracts/test/AggregatedDataFeedStore.test.ts b/libs/ts/contracts/test/AggregatedDataFeedStore.test.ts index 2aa4f8eaee..d73525cd5c 100644 --- a/libs/ts/contracts/test/AggregatedDataFeedStore.test.ts +++ b/libs/ts/contracts/test/AggregatedDataFeedStore.test.ts @@ -73,12 +73,12 @@ describe('AggregatedDataFeedStore', () => { }); it('Should emit event when data feeds updated', async () => { - const blockNumber = 1234; - const tx = await contract.setFeeds(sequencer, feeds, { - blockNumber, + const destinationAccumulator = ethers.toBeHex(1, 32); + const res = await contract.setFeeds(sequencer, feeds, { + destinationAccumulator, }); - const receipt = await tx.wait(); - contract.checkEvent(receipt!, blockNumber); + const receipt = await res.tx.wait(); + contract.checkEvent(receipt!, destinationAccumulator); }); it('Should get latest index', async () => { @@ -104,7 +104,7 @@ describe('AggregatedDataFeedStore', () => { }); it('Should get historical feed at index', async () => { - await contract.setFeeds(sequencer, feeds); + const res = await contract.setFeeds(sequencer, feeds); const updatedFeeds = feeds.map(feed => { return { @@ -114,7 +114,10 @@ describe('AggregatedDataFeedStore', () => { }; }); - await contract.setFeeds(sequencer, updatedFeeds); + await contract.setFeeds(sequencer, updatedFeeds, { + sourceAccumulator: res.destinationAccumulator, + destinationAccumulator: ethers.toBeHex(101, 32), + }); await contract.checkDataAtIndex(sequencer, feeds); await contract.checkDataAtIndex(sequencer, updatedFeeds); @@ -122,7 +125,7 @@ describe('AggregatedDataFeedStore', () => { it('Should get latest single feed and index after update', async () => { const stride0Feeds = feeds.filter(feed => feed.stride === 0n); - await contract.setFeeds(sequencer, stride0Feeds); + const res = await contract.setFeeds(sequencer, stride0Feeds); const updatedFeeds = stride0Feeds.map(feed => { return { @@ -132,13 +135,16 @@ describe('AggregatedDataFeedStore', () => { }; }); - await contract.setFeeds(sequencer, updatedFeeds); - const res = await contract.getValues(sequencer, stride0Feeds, { + await contract.setFeeds(sequencer, updatedFeeds, { + sourceAccumulator: res.destinationAccumulator, + destinationAccumulator: ethers.toBeHex(101, 32), + }); + const values = await contract.getValues(sequencer, stride0Feeds, { operations: stride0Feeds.map(() => ReadOp.GetLatestSingleDataAndIndex), }); for (const [i, feed] of updatedFeeds.entries()) { - expect(res[i]).to.equal( + expect(values[i]).to.equal( ethers .toBeHex(feed.index, 32) .concat(contract.formatData(feed).slice(2)), @@ -147,7 +153,7 @@ describe('AggregatedDataFeedStore', () => { }); it('Should get latest feed and index after update', async () => { - await contract.setFeeds(sequencer, feeds); + const res = await contract.setFeeds(sequencer, feeds); const updatedFeeds = feeds.map(feed => { return { @@ -157,7 +163,10 @@ describe('AggregatedDataFeedStore', () => { }; }); - await contract.setFeeds(sequencer, updatedFeeds); + await contract.setFeeds(sequencer, updatedFeeds, { + sourceAccumulator: res.destinationAccumulator, + destinationAccumulator: ethers.toBeHex(101, 32), + }); await contract.checkLatestDataAndIndex(sequencer, updatedFeeds); }); @@ -165,20 +174,17 @@ describe('AggregatedDataFeedStore', () => { await expect(contract.setFeeds(signers[2], feeds)).to.be.reverted; }); - it('Should revert if blockNumber same as previous block', async () => { - const blockNumber = 1; - await contract.setFeeds(sequencer, feeds, { blockNumber }); - - await expect(contract.setFeeds(sequencer, feeds, { blockNumber })).to.be - .reverted; - }); - - it('Should revert if blockNumber lower than previous block', async () => { - const blockNumber = 1; - await contract.setFeeds(sequencer, feeds, { blockNumber }); + it('Should revert if source history accumulator is different from the stored', async () => { + const sourceAccumulator = ethers.toBeHex(100, 32); + // set history accumulator for the first time (0 -> 100) + await contract.setFeeds(sequencer, feeds, { + destinationAccumulator: sourceAccumulator, + }); await expect( - contract.setFeeds(sequencer, feeds, { blockNumber: blockNumber - 1 }), + contract.setFeeds(sequencer, feeds, { + sourceAccumulator: ethers.toBeHex(101, 32), + }), ).to.be.reverted; }); @@ -402,13 +408,14 @@ describe('AggregatedDataFeedStore', () => { data: '0x12343267643573', }; - let data = contract.encodeDataWrite([feed]); + let res = contract.encodeDataWrite([feed]); + let data = res.data; const indexTableIndex = ethers.toBeHex( (2n ** 115n * feed.stride + feed.id) / 16n, ); - const maxindexTableIndex = ethers.toBeHex(2n ** 116n - 1n); - data = data.replace(indexTableIndex.slice(2), maxindexTableIndex.slice(2)); + const maxIndexTableIndex = ethers.toBeHex(2n ** 116n - 1n); + data = data.replace(indexTableIndex.slice(2), maxIndexTableIndex.slice(2)); await expect( sequencer.sendTransaction({ to: contract.contract.target, @@ -416,15 +423,21 @@ describe('AggregatedDataFeedStore', () => { }), ).to.not.be.reverted; - const overflowindexTableIndex = ethers.toBeHex(2n ** 116n); + const overflowIndexTableIndex = ethers.toBeHex(2n ** 116n); data = data.replace( - maxindexTableIndex.slice(2), - overflowindexTableIndex.slice(2), + maxIndexTableIndex.slice(2), + overflowIndexTableIndex.slice(2), ); - // change blocknumber - const newPrefix = contract.encodeDataWrite([]); - data = data.replace(data.slice(2, 20), newPrefix.slice(2, 20)); + // change history accumulator + data = data.replace( + res.destinationAccumulator.slice(2), + ethers.toBeHex(0, 32).slice(2), + ); + data = data.replace( + res.sourceAccumulator.slice(2), + res.destinationAccumulator.slice(2), + ); await expect( sequencer.sendTransaction({ @@ -484,6 +497,9 @@ describe('AggregatedDataFeedStore', () => { let genericContract: ADFSGenericWrapper; + const historyAccumulator = ethers.toBeHex(1234, 32); + const newHistoryAccumulator = ethers.toBeHex(12345, 32); + beforeEach(async function () { contractWrappers = []; genericContractWrappers = []; @@ -513,9 +529,13 @@ describe('AggregatedDataFeedStore', () => { [true], ); - // store no data first time in ADFS to avoid first sstore of blocknumber - await contract.setFeeds(sequencer, []); - await genericContract.setFeeds(sequencer, []); + // store no data first time in ADFS to avoid first sstore of history accumulator + await contract.setFeeds(sequencer, [], { + destinationAccumulator: historyAccumulator, + }); + await genericContract.setFeeds(sequencer, [], { + destinationAccumulator: historyAccumulator, + }); }); for (let i = 1; i <= 100; i *= 10) { @@ -527,6 +547,8 @@ describe('AggregatedDataFeedStore', () => { [contract], [genericContract], i, + historyAccumulator, + newHistoryAccumulator, { index: 1n, }, @@ -539,6 +561,8 @@ describe('AggregatedDataFeedStore', () => { [contract], [genericContract], i, + newHistoryAccumulator, + ethers.toBeHex(123456, 32), { index: 2n, }, @@ -553,6 +577,8 @@ describe('AggregatedDataFeedStore', () => { [contract], [genericContract], i, + historyAccumulator, + newHistoryAccumulator, { skip: 16, index: 1n, @@ -566,6 +592,8 @@ describe('AggregatedDataFeedStore', () => { [contract], [genericContract], i, + newHistoryAccumulator, + ethers.toBeHex(123456, 32), { skip: 16, index: 2n, diff --git a/libs/ts/contracts/test/CLAggregatorAdapter.test.ts b/libs/ts/contracts/test/CLAggregatorAdapter.test.ts index 9704e6e99c..a52209cec3 100644 --- a/libs/ts/contracts/test/CLAggregatorAdapter.test.ts +++ b/libs/ts/contracts/test/CLAggregatorAdapter.test.ts @@ -75,20 +75,27 @@ describe('CLAggregatorAdapter', function () { for (const [i, data] of aggregatorData.entries()) { it(`Should get latest answer for ${data.description}`, async function () { const data1 = encodeDataAndTimestamp(1234); - await contractWrappers[i].setFeed(sequencer, data1, 1n); + const res = await contractWrappers[i].setFeed(sequencer, data1, 1n); await contractWrappers[i].checkSetValue(caller, data1); await contractWrappers[i].checkLatestAnswer(caller, data1); const data2 = encodeDataAndTimestamp(2345); - await contractWrappers[i].proxy.proxyCall('setFeeds', sequencer, [ + await contractWrappers[i].proxy.proxyCall( + 'setFeeds', + sequencer, + [ + { + id: BigInt(data.id), + index: 2n, + data: data2, + stride: 0n, + }, + ], { - id: BigInt(data.id), - index: 2n, - data: data2, - stride: 0n, + sourceAccumulator: res.destinationAccumulator, }, - ]); + ); await contractWrappers[i].checkSetValue(caller, data2); await contractWrappers[i].checkLatestAnswer(caller, data2); @@ -97,20 +104,27 @@ describe('CLAggregatorAdapter', function () { it(`Should get latest round id for ${data.description}`, async function () { const data1 = encodeDataAndTimestamp(1234); - await contractWrappers[i].setFeed(sequencer, data1, 1n); + const res = await contractWrappers[i].setFeed(sequencer, data1, 1n); await contractWrappers[i].checkSetValue(caller, data1); await contractWrappers[i].checkLatestRoundId(caller, 1n); const data2 = encodeDataAndTimestamp(2345); - await contractWrappers[i].proxy.proxyCall('setFeeds', sequencer, [ + await contractWrappers[i].proxy.proxyCall( + 'setFeeds', + sequencer, + [ + { + id: BigInt(data.id), + index: 2n, + data: data2, + stride: 0n, + }, + ], { - id: BigInt(data.id), - index: 2n, - data: data2, - stride: 0n, + sourceAccumulator: res.destinationAccumulator, }, - ]); + ); await contractWrappers[i].checkSetValue(caller, data2); await contractWrappers[i].checkLatestRoundId(caller, 2n); @@ -119,20 +133,27 @@ describe('CLAggregatorAdapter', function () { it(`Should get latest round data for ${aggregatorData[i].description}`, async function () { const data1 = encodeDataAndTimestamp(1234); - await contractWrappers[i].setFeed(sequencer, data1, 1n); + const res = await contractWrappers[i].setFeed(sequencer, data1, 1n); await contractWrappers[i].checkSetValue(caller, data1); await contractWrappers[i].checkLatestRoundData(caller, data1, 1n); const data2 = encodeDataAndTimestamp(2345); - await contractWrappers[i].proxy.proxyCall('setFeeds', sequencer, [ + await contractWrappers[i].proxy.proxyCall( + 'setFeeds', + sequencer, + [ + { + id: BigInt(data.id), + index: 2n, + data: data2, + stride: 0n, + }, + ], { - id: BigInt(data.id), - index: 2n, - data: data2, - stride: 0n, + sourceAccumulator: res.destinationAccumulator, }, - ]); + ); await contractWrappers[i].checkSetValue(caller, data2); await contractWrappers[i].checkLatestRoundData(caller, data2, 2n); @@ -154,23 +175,47 @@ describe('CLAggregatorAdapter', function () { const data1 = encodeDataAndTimestamp(3132); const data2 = encodeDataAndTimestamp(2345); const data3 = encodeDataAndTimestamp(12348747364); - - await contractWrappers[i].setFeed(sequencer, data1, 1n, 1); - await contractWrappers[i].setFeed(sequencer, data2, 2n, 2); + const historyAccumulator1 = ethers.toBeHex(0, 32); + const historyAccumulator2 = ethers.toBeHex(1, 32); + const historyAccumulator3 = ethers.toBeHex(2, 32); + const historyAccumulator4 = ethers.toBeHex(3, 32); + + await contractWrappers[i].setFeed( + sequencer, + data1, + 1n, + historyAccumulator1, + historyAccumulator2, + ); + await contractWrappers[i].setFeed( + sequencer, + data2, + 2n, + historyAccumulator2, + historyAccumulator3, + ); await contractWrappers[i].checkSetValue(caller, data2); await contractWrappers[i].checkRoundData(caller, data1, 1n); await contractWrappers[i].checkRoundData(caller, data2, 2n); - await contractWrappers[i].proxy.proxyCall('setFeeds', sequencer, [ + await contractWrappers[i].proxy.proxyCall( + 'setFeeds', + sequencer, + [ + { + id: BigInt(data.id), + index: 3n, + data: data3, + stride: 0n, + }, + ], { - id: BigInt(data.id), - index: 3n, - data: data3, - stride: 0n, + sourceAccumulator: historyAccumulator3, + destinationAccumulator: historyAccumulator4, }, - ]); + ); await contractWrappers[i].checkSetValue(caller, data3); await contractWrappers[i].checkRoundData(caller, data3, 3n); diff --git a/libs/ts/contracts/test/CLFeedRegistryAdapter.test.ts b/libs/ts/contracts/test/CLFeedRegistryAdapter.test.ts index 6e97149ee6..8626b2a237 100644 --- a/libs/ts/contracts/test/CLFeedRegistryAdapter.test.ts +++ b/libs/ts/contracts/test/CLFeedRegistryAdapter.test.ts @@ -37,12 +37,17 @@ describe('CLFeedRegistryAdapter', async () => { let caller: HardhatEthersSigner; let registryOwner: HardhatEthersSigner; + let sourceAccumulator: string = ethers.toBeHex(0, 32); + beforeEach(async function () { admin = (await ethers.getSigners())[9]; sequencer = (await ethers.getSigners())[10]; accessControlAdmin = (await ethers.getSigners())[5]; caller = (await ethers.getSigners())[6]; registryOwner = (await ethers.getSigners())[11]; + clAdapters = []; + + sourceAccumulator = ethers.toBeHex(0, 32); proxy = new UpgradeableProxyADFSWrapper(); await proxy.init(admin, accessControlAdmin); @@ -94,10 +99,18 @@ describe('CLFeedRegistryAdapter', async () => { await clRegistry.checkDecimals(d.base, d.quote, d.decimals); } }); + it('Should return the correct latest answer', async () => { const value = encodeDataAndTimestamp(3132); for (const clAdapter of clAdapters) { - await clAdapter.setFeed(sequencer, value, 1n); + const res = await clAdapter.setFeed( + sequencer, + value, + 1n, + sourceAccumulator, + ); + sourceAccumulator = res.destinationAccumulator; + await clAdapter.checkLatestAnswer(caller, value); } @@ -108,8 +121,16 @@ describe('CLFeedRegistryAdapter', async () => { it('Should return the correct latest round id', async () => { const value = encodeDataAndTimestamp(3132); + for (const clAdapter of clAdapters) { - await clAdapter.setFeed(sequencer, value, 1n); + const res = await clAdapter.setFeed( + sequencer, + value, + 1n, + sourceAccumulator, + ); + sourceAccumulator = res.destinationAccumulator; + await clAdapter.checkLatestRoundId(caller, 1n); } @@ -120,8 +141,16 @@ describe('CLFeedRegistryAdapter', async () => { it('Should return the correct latest round data', async () => { const value = encodeDataAndTimestamp(3132); + for (const clAdapter of clAdapters) { - await clAdapter.setFeed(sequencer, value, 1n); + const res = await clAdapter.setFeed( + sequencer, + value, + 1n, + sourceAccumulator, + ); + sourceAccumulator = res.destinationAccumulator; + await clAdapter.checkLatestRoundData(caller, value, 1n); } @@ -132,8 +161,16 @@ describe('CLFeedRegistryAdapter', async () => { it('Should return the correct round data', async () => { const value = encodeDataAndTimestamp(3132); + for (const clAdapter of clAdapters) { - await clAdapter.setFeed(sequencer, value, 1n); + const res = await clAdapter.setFeed( + sequencer, + value, + 1n, + sourceAccumulator, + ); + sourceAccumulator = res.destinationAccumulator; + await clAdapter.checkRoundData(caller, value, 1n); } diff --git a/libs/ts/contracts/test/Oracle.test.ts b/libs/ts/contracts/test/Oracle.test.ts index 0c91ae34f5..0c44e41214 100644 --- a/libs/ts/contracts/test/Oracle.test.ts +++ b/libs/ts/contracts/test/Oracle.test.ts @@ -30,6 +30,8 @@ describe('Gas usage comparison between Chainlink and Blocksense @fork', async fu let proxy: UpgradeableProxyADFSWrapper; let caller: HardhatEthersSigner; + let sourceAccumulator: string = ethers.toBeHex(0, 32); + before(async function () { if (process.env['FORKING'] !== 'true') { this.skip(); @@ -52,7 +54,8 @@ describe('Gas usage comparison between Chainlink and Blocksense @fork', async fu clAdapter = new CLAdapterWrapper(); await clAdapter.init(data.description, data.decimals, data.id, proxy); const value = encodeDataAndTimestamp(312343354, Date.now() - 1234); - await clAdapter.setFeed(sequencer, value, 1n); + const res = await clAdapter.setFeed(sequencer, value, 1n); + sourceAccumulator = res.destinationAccumulator; const signer = (await ethers.getSigners())[5]; @@ -198,28 +201,52 @@ describe('Gas usage comparison between Chainlink and Blocksense @fork', async fu await proxyV2.setFeed(value1); const clAdapterValue1 = encodeDataAndTimestamp(312343); - await clAdapter.setFeed(sequencer, clAdapterValue1, 2n); + let res = await clAdapter.setFeed( + sequencer, + clAdapterValue1, + 2n, + sourceAccumulator, + ); + sourceAccumulator = res.destinationAccumulator; const value2 = encodeData(1312343); await proxyV1.setFeed(value2); await proxyV2.setFeed(value2); const clAdapterValue2 = encodeDataAndTimestamp(1312343); - await clAdapter.setFeed(sequencer, clAdapterValue2, 3n); + res = await clAdapter.setFeed( + sequencer, + clAdapterValue2, + 3n, + sourceAccumulator, + ); + sourceAccumulator = res.destinationAccumulator; const value3 = encodeData(13123433); await proxyV1.setFeed(value3); await proxyV2.setFeed(value3); const clAdapterValue3 = encodeDataAndTimestamp(13123433); - await clAdapter.setFeed(sequencer, clAdapterValue3, 4n); + res = await clAdapter.setFeed( + sequencer, + clAdapterValue3, + 4n, + sourceAccumulator, + ); + sourceAccumulator = res.destinationAccumulator; const value4 = encodeData(13142343); await proxyV1.setFeed(value4); await proxyV2.setFeed(value4); const clAdapterValue4 = encodeDataAndTimestamp(13142343); - await clAdapter.setFeed(sequencer, clAdapterValue4, 5n); + res = await clAdapter.setFeed( + sequencer, + clAdapterValue4, + 5n, + sourceAccumulator, + ); + sourceAccumulator = res.destinationAccumulator; await callAndCompareOracles( oracles, diff --git a/libs/ts/contracts/test/Registry.test.ts b/libs/ts/contracts/test/Registry.test.ts index 4131447fbb..329f117825 100644 --- a/libs/ts/contracts/test/Registry.test.ts +++ b/libs/ts/contracts/test/Registry.test.ts @@ -110,10 +110,18 @@ describe('Gas usage comparison between Chainlink and Blocksense registry @fork', const valueETH = encodeDataAndTimestamp(312343354); const valueBTC = encodeDataAndTimestamp(3123434); + + let sourceAccumulator = ethers.toBeHex(0, 32); for (const [i, value] of [valueETH, valueBTC].entries()) { await aggregatorWrappersV1[i].setFeed(value); await aggregatorWrappersV2[i].setFeed(value); - await clAdapters[i].setFeed(sequencer, value, 1n); + const res = await clAdapters[i].setFeed( + sequencer, + value, + 1n, + sourceAccumulator, + ); + sourceAccumulator = res.destinationAccumulator; } registryWrapperV1 = new CLRegistryBaseWrapperExp( diff --git a/libs/ts/contracts/test/UpgradeableProxyADFS.test.ts b/libs/ts/contracts/test/UpgradeableProxyADFS.test.ts index cf83d8cbc1..11e0463d23 100644 --- a/libs/ts/contracts/test/UpgradeableProxyADFS.test.ts +++ b/libs/ts/contracts/test/UpgradeableProxyADFS.test.ts @@ -100,8 +100,9 @@ describe('UpgradeableProxyADFS', function () { }); it('Should preserve storage when implementation is changed and call implementation with calldata', async function () { + const historyAccumulator = ethers.toBeHex(1234, 32); await proxy.proxyCall('setFeeds', sequencer, [feed], { - blockNumber: 1, + destinationAccumulator: historyAccumulator, }); const valueV1 = (await proxy.proxyCall('getValues', sequencer, [feed]))[0]; @@ -130,8 +131,12 @@ describe('UpgradeableProxyADFS', function () { }; // set feed at round 2 when upgrading to new implementation - const blocknumber = 1234; - const callData = newImplementation.encodeDataWrite([newFeed], blocknumber); + const newHistoryAccumulator = ethers.toBeHex(12345, 32); + const callData = newImplementation.encodeDataWrite( + [newFeed], + historyAccumulator, + newHistoryAccumulator, + ).data; const tx = await proxy.upgradeImplementationAndCall( newImplementation, @@ -146,7 +151,7 @@ describe('UpgradeableProxyADFS', function () { await expect(tx) .to.emit(proxy.contract, 'Upgraded') .withArgs(newImplementation.contract); - newImplementation.checkEvent(receipt!, blocknumber); + newImplementation.checkEvent(receipt!, newHistoryAccumulator); // get old value at round 1 and assert that is hasn't changed after upgrade const valueV2 = ( @@ -257,6 +262,9 @@ describe('UpgradeableProxyADFS', function () { let genericProxy: UpgradeableProxyADFSGenericWrapper; + const historyAccumulator = ethers.toBeHex(1234, 32); + const newHistoryAccumulator = ethers.toBeHex(12345, 32); + beforeEach(async function () { historicalContractWrappers = []; historicalContractGenericWrappers = []; @@ -294,10 +302,13 @@ describe('UpgradeableProxyADFS', function () { [true], ); - // store no data first time in ADFS to avoid first sstore of blocknumber - await proxy.proxyCall('setFeeds', sequencer, []); - - await genericProxy.proxyCall('setFeeds', sequencer, []); + // store no data first time in ADFS to avoid first sstore of history accumulator + await proxy.proxyCall('setFeeds', sequencer, [], { + destinationAccumulator: historyAccumulator, + }); + await genericProxy.proxyCall('setFeeds', sequencer, [], { + destinationAccumulator: historyAccumulator, + }); }); for (let i = 1; i <= 100; i *= 10) { @@ -309,6 +320,8 @@ describe('UpgradeableProxyADFS', function () { [proxy], [genericProxy], i, + historyAccumulator, + newHistoryAccumulator, { index: 1n, }, @@ -321,6 +334,8 @@ describe('UpgradeableProxyADFS', function () { [proxy], [genericProxy], i, + newHistoryAccumulator, + ethers.toBeHex(123456, 32), { index: 2n, }, @@ -335,6 +350,8 @@ describe('UpgradeableProxyADFS', function () { [proxy], [genericProxy], i, + historyAccumulator, + newHistoryAccumulator, { skip: 16, index: 1n, @@ -348,6 +365,8 @@ describe('UpgradeableProxyADFS', function () { [proxy], [genericProxy], i, + newHistoryAccumulator, + ethers.toBeHex(123456, 32), { skip: 16, index: 2n, diff --git a/libs/ts/contracts/test/examples/ADFSConsumer.test.ts b/libs/ts/contracts/test/examples/ADFSConsumer.test.ts index 3b25892a78..f2a30443bb 100644 --- a/libs/ts/contracts/test/examples/ADFSConsumer.test.ts +++ b/libs/ts/contracts/test/examples/ADFSConsumer.test.ts @@ -46,6 +46,7 @@ describe('Example: ADFSConsumer', function () { const id = 1n; const index = 1n; + const destinationAccumulator = ethers.toBeHex(1, 32); beforeEach(async function () { sequencer = (await ethers.getSigners())[0]; @@ -59,7 +60,9 @@ describe('Example: ADFSConsumer', function () { [true], ); - await dataFeedStore.setFeeds(sequencer, feeds); + await dataFeedStore.setFeeds(sequencer, feeds, { + destinationAccumulator, + }); adfsConsumer = await deployContract( 'ADFSConsumer', @@ -168,7 +171,10 @@ describe('Example: ADFSConsumer', function () { stride: 0n, data: feedData, }; - await dataFeedStore.setFeeds(sequencer, [feed]); + await dataFeedStore.setFeeds(sequencer, [feed], { + sourceAccumulator: destinationAccumulator, + destinationAccumulator: ethers.toBeHex(2, 32), + }); const timestamp = await adfsConsumer.getEpochSeconds(feed.id); expect(timestamp).to.be.equal(Math.floor(timestampNow / 1000)); @@ -183,7 +189,10 @@ describe('Example: ADFSConsumer', function () { stride: 0n, data: feedData, }; - await dataFeedStore.setFeeds(sequencer, [feed]); + await dataFeedStore.setFeeds(sequencer, [feed], { + sourceAccumulator: destinationAccumulator, + destinationAccumulator: ethers.toBeHex(2, 32), + }); const timestamp = await adfsConsumer.getEpochMilliseconds(feed.id); expect(timestamp).to.be.equal(timestampNow); diff --git a/libs/ts/contracts/test/examples/CLFeedRegistryAdapterConsumer.test.ts b/libs/ts/contracts/test/examples/CLFeedRegistryAdapterConsumer.test.ts index 1ae70d56dd..8a53fdfdc4 100644 --- a/libs/ts/contracts/test/examples/CLFeedRegistryAdapterConsumer.test.ts +++ b/libs/ts/contracts/test/examples/CLFeedRegistryAdapterConsumer.test.ts @@ -51,13 +51,20 @@ describe('Example: CLFeedRegistryAdapterConsumer', function () { [true], ); + let sourceAccumulator = ethers.toBeHex(0, 32); for (const data of aggregatorData) { const newAdapter = new CLAdapterWrapper(); await newAdapter.init(data.description, data.decimals, data.id, proxy); aggregators.push(newAdapter); const value = encodeDataAndTimestamp(data.id * 1000, Date.now()); - await newAdapter.setFeed(sequencer, value, 1n); + const res = await newAdapter.setFeed( + sequencer, + value, + 1n, + sourceAccumulator, + ); + sourceAccumulator = res.destinationAccumulator; } feedRegistry = new CLRegistryBaseWrapper('CLRegistryV2', proxy.contract); diff --git a/libs/ts/contracts/test/utils/helpers/common.ts b/libs/ts/contracts/test/utils/helpers/common.ts index 2c0e92f847..96afd0fce8 100644 --- a/libs/ts/contracts/test/utils/helpers/common.ts +++ b/libs/ts/contracts/test/utils/helpers/common.ts @@ -9,6 +9,8 @@ export const setFeeds = async ( genericContractWrappers: IADFSWrapper[] | IUpgradeableProxyADFSWrapper[], contractWrappers: IADFSWrapper[] | IUpgradeableProxyADFSWrapper[], valuesCount: number, + sourceAccumulator: string, + destinationAccumulator: string, adfsData?: { skip?: number; // used to skip feeds so to make testing ring buffer index table write index?: bigint; @@ -31,10 +33,22 @@ export const setFeeds = async ( for (const contract of contractWrappers) { if (isUpgradeableProxy(contract)) { receipts.push( - await (await contract.proxyCall('setFeeds', sequencer, feeds)).wait(), + await ( + await contract.proxyCall('setFeeds', sequencer, feeds, { + sourceAccumulator, + destinationAccumulator, + }) + ).tx.wait(), ); } else { - receipts.push(await (await contract.setFeeds(sequencer, feeds)).wait()); + receipts.push( + await ( + await contract.setFeeds(sequencer, feeds, { + sourceAccumulator, + destinationAccumulator, + }) + ).tx.wait(), + ); } } @@ -42,11 +56,21 @@ export const setFeeds = async ( for (const contract of genericContractWrappers) { if (isUpgradeableProxy(contract)) { receiptsGeneric.push( - await (await contract.proxyCall('setFeeds', sequencer, feeds)).wait(), + await ( + await contract.proxyCall('setFeeds', sequencer, feeds, { + sourceAccumulator, + destinationAccumulator, + }) + ).tx.wait(), ); } else { receiptsGeneric.push( - await (await contract.setFeeds(sequencer, feeds)).wait(), + await ( + await contract.setFeeds(sequencer, feeds, { + sourceAccumulator, + destinationAccumulator, + }) + ).tx.wait(), ); } } diff --git a/libs/ts/contracts/test/utils/helpers/compareGasWithExperiments.ts b/libs/ts/contracts/test/utils/helpers/compareGasWithExperiments.ts index db491e9813..77e971455f 100644 --- a/libs/ts/contracts/test/utils/helpers/compareGasWithExperiments.ts +++ b/libs/ts/contracts/test/utils/helpers/compareGasWithExperiments.ts @@ -20,6 +20,8 @@ export const compareGasUsed = async < adfsContractWrappers: IADFSWrapper[] | IUpgradeableProxyADFSWrapper[], adfsGenericContractWrappers: IADFSWrapper[] | IUpgradeableProxyADFSWrapper[], valuesCount: number, + sourceAccumulator: string, + destinationAccumulator: string, adfsData?: { skip?: number; // used to skip feeds so to make testing ring buffer index table write index?: bigint; @@ -38,6 +40,8 @@ export const compareGasUsed = async < adfsGenericContractWrappers, adfsContractWrappers, valuesCount, + sourceAccumulator, + destinationAccumulator, adfsData, start, ); @@ -60,9 +64,9 @@ export const compareGasUsed = async < for (const receipt of data.receipts) { for (const wrapper of adfsContractWrappers) { const tx = await ethers.provider.getTransaction(receipt?.hash!); - const blockNumberInReceipt = parseInt('0x' + tx!.data.slice(4, 20), 16); + const destinationAccumulatorInReceipt = '0x' + tx!.data.slice(68, 132); if (!isUpgradeableProxy(wrapper)) { - wrapper.checkEvent(receipt!, blockNumberInReceipt); + wrapper.checkEvent(receipt!, destinationAccumulatorInReceipt); } } } diff --git a/libs/ts/contracts/test/utils/wrappers/adfs/ADFSBase.ts b/libs/ts/contracts/test/utils/wrappers/adfs/ADFSBase.ts index b46033600a..211b72fa99 100644 --- a/libs/ts/contracts/test/utils/wrappers/adfs/ADFSBase.ts +++ b/libs/ts/contracts/test/utils/wrappers/adfs/ADFSBase.ts @@ -15,18 +15,32 @@ export abstract class ADFSBaseWrapper implements IADFSWrapper { sequencer: HardhatEthersSigner, feeds: Feed[], opts: { - blockNumber?: number; + sourceAccumulator?: string; + destinationAccumulator?: string; txData?: any; } = {}, ) { - return sequencer.sendTransaction({ - to: this.contract.target, - data: this.encodeDataWrite(feeds, opts.blockNumber), - ...opts.txData, - }); + const { data, sourceAccumulator, destinationAccumulator } = + this.encodeDataWrite( + feeds, + opts.sourceAccumulator, + opts.destinationAccumulator, + ); + return { + tx: await sequencer.sendTransaction({ + to: this.contract.target, + data, + ...opts.txData, + }), + sourceAccumulator, + destinationAccumulator, + }; } - public checkEvent(receipt: TransactionReceipt, newBlockNumber: number): void { + public checkEvent( + receipt: TransactionReceipt, + destinationAccumulator: string, + ): void { const fragment = this.getEventFragment(); let log: Log = Object.assign({}); @@ -46,7 +60,7 @@ export abstract class ADFSBaseWrapper implements IADFSWrapper { log.data, ); - expect(parsedEvent[0]).to.be.eq(newBlockNumber); + expect(parsedEvent[0]).to.be.eq(destinationAccumulator); } public getEventFragment(): EventFragment { @@ -54,8 +68,8 @@ export abstract class ADFSBaseWrapper implements IADFSWrapper { name: 'DataFeedsUpdated', inputs: [ { - type: 'uint256', - name: 'newBlockNumber', + type: 'bytes32', + name: 'newHistoryAccumulator', }, ], }); @@ -160,11 +174,25 @@ export abstract class ADFSBaseWrapper implements IADFSWrapper { return results; } - public encodeDataWrite = (feeds: Feed[], blockNumber?: number) => { - blockNumber ??= Date.now() + 100; + public encodeDataWrite = ( + feeds: Feed[], + sourceAccumulator?: string, + destinationAccumulator?: string, + ) => { + sourceAccumulator ??= ethers.toBeHex(0, 32); + destinationAccumulator ??= ethers.toBeHex( + (Math.random() * 10000).toFixed(0), + 32, + ); + const prefix = ethers.solidityPacked( - ['bytes1', 'uint64', 'uint32'], - [ethers.toBeHex(WriteOp.SetFeeds), blockNumber, feeds.length], + ['bytes1', 'bytes32', 'bytes32', 'uint32'], + [ + ethers.toBeHex(WriteOp.SetFeeds), + sourceAccumulator, + destinationAccumulator, + feeds.length, + ], ); const data = feeds.map(feed => { @@ -234,7 +262,11 @@ export abstract class ADFSBaseWrapper implements IADFSWrapper { }) .join(''); - return prefix.concat(data.join('')).concat(roundData); + return { + data: prefix.concat(data.join('')).concat(roundData), + sourceAccumulator, + destinationAccumulator, + }; }; public encodeDataRead = (operation: ReadOp, feed: ReadFeed) => { diff --git a/libs/ts/contracts/test/utils/wrappers/adfs/ADFSBaseGeneric.ts b/libs/ts/contracts/test/utils/wrappers/adfs/ADFSBaseGeneric.ts index 91bb760343..6bc44c12cd 100644 --- a/libs/ts/contracts/test/utils/wrappers/adfs/ADFSBaseGeneric.ts +++ b/libs/ts/contracts/test/utils/wrappers/adfs/ADFSBaseGeneric.ts @@ -15,25 +15,36 @@ export abstract class ADFSBaseGenericWrapper implements IADFSWrapper { sequencer: HardhatEthersSigner, feeds: Feed[], opts: { - blockNumber?: number; + sourceAccumulator?: string; + destinationAccumulator?: string; txData?: any; } = {}, ) { - return sequencer.sendTransaction({ - to: this.contract.target, - data: this.encodeDataWrite(feeds, opts.blockNumber), - ...opts.txData, - }); + const { data, sourceAccumulator, destinationAccumulator } = + this.encodeDataWrite( + feeds, + opts.sourceAccumulator, + opts.destinationAccumulator, + ); + return { + tx: await sequencer.sendTransaction({ + to: this.contract.target, + data, + ...opts.txData, + }), + sourceAccumulator, + destinationAccumulator, + }; } - public checkEvent(receipt: any, newBlockNumber: number): void { + public checkEvent(receipt: any, destinationAccumulator: string): void { const fragment = this.getEventFragment(); const parsedEvent = this.contract.interface.decodeEventLog( fragment, receipt.logs[0].data, ); - expect(parsedEvent[0]).to.be.eq(newBlockNumber); + expect(parsedEvent[0]).to.be.eq(destinationAccumulator); } public getEventFragment(): EventFragment { @@ -147,8 +158,17 @@ export abstract class ADFSBaseGenericWrapper implements IADFSWrapper { return results; } - public encodeDataWrite = (feeds: Feed[], blockNumber?: number) => { - blockNumber ??= Date.now() + 100; + public encodeDataWrite = ( + feeds: Feed[], + sourceAccumulator?: string, + destinationAccumulator?: string, + ) => { + sourceAccumulator ??= ethers.toBeHex(0, 32); + destinationAccumulator ??= ethers.toBeHex( + (Math.random() * 10000).toFixed(0), + 32, + ); + const indices = feeds.map( feed => (feed.id * 2n ** 13n + feed.index) * 2n ** feed.stride, ); @@ -183,14 +203,19 @@ export abstract class ADFSBaseGenericWrapper implements IADFSWrapper { const indexTableData = indexTableIndices.map(key => batchFeeds[key]); - return this.contract.interface.encodeFunctionData('write', [ - blockNumber, - feeds.map(feed => feed.stride), - indices, - feeds.map(feed => feed.data), - indexTableIndices, - indexTableData, - ]); + return { + data: this.contract.interface.encodeFunctionData('write', [ + sourceAccumulator, + destinationAccumulator, + feeds.map(feed => feed.stride), + indices, + feeds.map(feed => feed.data), + indexTableIndices, + indexTableData, + ]), + sourceAccumulator, + destinationAccumulator, + }; }; public encodeDataRead = (operation: ReadOp, feed: ReadFeed) => { diff --git a/libs/ts/contracts/test/utils/wrappers/chainlink/Base.ts b/libs/ts/contracts/test/utils/wrappers/chainlink/Base.ts index 17ce8a0e11..2041b9b345 100644 --- a/libs/ts/contracts/test/utils/wrappers/chainlink/Base.ts +++ b/libs/ts/contracts/test/utils/wrappers/chainlink/Base.ts @@ -3,6 +3,7 @@ import { UpgradeableProxyADFSBaseWrapper } from '../adfs/UpgradeableProxyBase'; import { CLAggregatorAdapter } from '@blocksense/contracts/typechain'; import { HardhatEthersSigner } from '@nomicfoundation/hardhat-ethers/signers'; import { Feed, ReadOp } from '../types'; +import { TransactionResponse } from 'ethers'; export abstract class CLBaseWrapper { public contract!: CLAggregatorAdapter; @@ -14,8 +15,13 @@ export abstract class CLBaseWrapper { sequencer: HardhatEthersSigner, data: string, index: bigint, - blockNumber?: number, - ): Promise { + sourceAccumulator?: string, + destinationAccumulator?: string, + ): Promise<{ + tx: TransactionResponse; + sourceAccumulator: string; + destinationAccumulator: string; + }> { return this.proxy.proxyCall( 'setFeeds', sequencer, @@ -28,7 +34,8 @@ export abstract class CLBaseWrapper { }, ], { - blockNumber, + sourceAccumulator, + destinationAccumulator, }, ); } diff --git a/libs/ts/contracts/test/utils/wrappers/interfaces/IADFSWrapper.ts b/libs/ts/contracts/test/utils/wrappers/interfaces/IADFSWrapper.ts index ceb14842ec..285d413476 100644 --- a/libs/ts/contracts/test/utils/wrappers/interfaces/IADFSWrapper.ts +++ b/libs/ts/contracts/test/utils/wrappers/interfaces/IADFSWrapper.ts @@ -13,12 +13,17 @@ export interface IADFSWrapper extends IBaseWrapper { sequencer: HardhatEthersSigner, feeds: Feed[], opts?: { - blockNumber?: number; + sourceAccumulator?: string; + destinationAccumulator?: string; txData?: any; }, - ): Promise; + ): Promise<{ + tx: TransactionResponse; + sourceAccumulator: string; + destinationAccumulator: string; + }>; - checkEvent(receipt: TransactionReceipt, newBlockNumber: number): void; + checkEvent(receipt: TransactionReceipt, destinationAccumulator: string): void; getEventFragment(): EventFragment; From a19d631d784b70e5cea758fa7359fd081f429074 Mon Sep 17 00:00:00 2001 From: Hristo Staykov Date: Mon, 28 Jul 2025 19:47:38 +0300 Subject: [PATCH 2/3] refactor(sequencer/state): Use Zcash's Frontier to maintain pruned Merkle Tree over the calldatas per network --- Cargo.lock | 27 ++++++ apps/sequencer/Cargo.toml | 2 + .../sequencer/src/providers/eth_send_utils.rs | 96 +++++++++++++++++-- apps/sequencer/src/providers/provider.rs | 41 ++++++-- .../feeds_processing/src/adfs_gen_calldata.rs | 6 +- 5 files changed, 156 insertions(+), 16 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 2528b010e9..08cc1d5998 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2335,6 +2335,14 @@ dependencies = [ "zeroize", ] +[[package]] +name = "bridgetree" +version = "0.7.0" +source = "git+https://github.com/zcash/incrementalmerkletree#8bab449ebd6c3ce610b827afaa3b30584a36f3c9" +dependencies = [ + "incrementalmerkletree 0.8.2 (registry+https://github.com/rust-lang/crates.io-index)", +] + [[package]] name = "brotli" version = "6.0.0" @@ -5135,6 +5143,23 @@ dependencies = [ "syn 2.0.100", ] +[[package]] +name = "incrementalmerkletree" +version = "0.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "30821f91f0fa8660edca547918dc59812893b497d07c1144f326f07fdd94aba9" +dependencies = [ + "either", +] + +[[package]] +name = "incrementalmerkletree" +version = "0.8.2" +source = "git+https://github.com/zcash/incrementalmerkletree#8bab449ebd6c3ce610b827afaa3b30584a36f3c9" +dependencies = [ + "either", +] + [[package]] name = "indenter" version = "0.3.3" @@ -9063,12 +9088,14 @@ dependencies = [ "blocksense-metrics", "blocksense-registry", "blocksense-utils", + "bridgetree", "bytes", "chrono", "console-subscriber", "eyre", "futures", "futures-util", + "incrementalmerkletree 0.8.2 (git+https://github.com/zcash/incrementalmerkletree)", "once_cell", "paste", "pprof", diff --git a/apps/sequencer/Cargo.toml b/apps/sequencer/Cargo.toml index c25836ae70..16a4629e56 100644 --- a/apps/sequencer/Cargo.toml +++ b/apps/sequencer/Cargo.toml @@ -48,12 +48,14 @@ alloy-primitives = { workspace = true } alloy-u256-literal = { workspace = true } anyhow = { workspace = true } +bridgetree = { git = "https://github.com/zcash/incrementalmerkletree", package = "bridgetree" } bytes = { workspace = true } chrono = { workspace = true } console-subscriber = { workspace = true } eyre = { workspace = true } futures = { workspace = true } futures-util = { workspace = true } +incrementalmerkletree = { git = "https://github.com/zcash/incrementalmerkletree", package = "incrementalmerkletree" } once_cell = { workspace = true } paste = { workspace = true } pprof = { version = "0.11", features = ["flamegraph"] } diff --git a/apps/sequencer/src/providers/eth_send_utils.rs b/apps/sequencer/src/providers/eth_send_utils.rs index c6a0b465b1..e514ebae93 100644 --- a/apps/sequencer/src/providers/eth_send_utils.rs +++ b/apps/sequencer/src/providers/eth_send_utils.rs @@ -2,11 +2,12 @@ use actix_web::{rt::time::interval, web::Data}; use alloy::{ hex, network::TransactionBuilder, - primitives::{Address, Bytes, U256}, + primitives::{Address, Bytes}, providers::{Provider, ProviderBuilder}, rpc::types::{eth::TransactionRequest, TransactionReceipt}, }; -use alloy_primitives::{FixedBytes, TxHash}; + +use alloy_primitives::{keccak256, FixedBytes, TxHash, U256}; use blocksense_config::{FeedStrideAndDecimals, GNOSIS_SAFE_CONTRACT_NAME}; use blocksense_data_feeds::feeds_processing::{BatchedAggregatesToSend, VotedFeedUpdate}; use blocksense_registry::config::FeedConfig; @@ -20,7 +21,7 @@ use tokio::{ use crate::{ providers::provider::{ - parse_eth_address, ProviderStatus, ProviderType, ProvidersMetrics, RpcProvider, + parse_eth_address, HashValue, ProviderStatus, ProviderType, ProvidersMetrics, RpcProvider, SharedRpcProviders, }, sequencer_state::SequencerState, @@ -396,7 +397,29 @@ pub async fn eth_batch_send_to_contract( let provider_metrics = &provider.provider_metrics; let rpc_handle = &provider.provider; - let input = Bytes::from(serialized_updates); + let mut input = Bytes::from(serialized_updates.clone()); + + let latest_call_data_hash = keccak256(input.as_ref()); + + let mut next_calldata_merkle_tree = provider.calldata_merkle_tree_frontier.clone(); + next_calldata_merkle_tree.append(HashValue(latest_call_data_hash)); + + let prev_calldata_merkle_tree_root = match &provider.merkle_root_in_contract { + Some(stored_hash) => stored_hash.clone(), + None => provider.calldata_merkle_tree_frontier.root(), + }; + let next_calldata_merkle_tree_root = next_calldata_merkle_tree.root(); + + // Merkle tree over all call data management for ADFS contracts (version 0 is legacy). + let serialized_updates = [ + vec![1], + prev_calldata_merkle_tree_root.0.to_vec(), + next_calldata_merkle_tree_root.0.to_vec(), + serialized_updates, + ] + .concat(); + + input = Bytes::from(serialized_updates); let receipt; let tx_time = Instant::now(); @@ -465,7 +488,7 @@ pub async fn eth_batch_send_to_contract( return Ok(("timeout".to_string(), feeds_to_update_ids)); } - match get_nonce( + let latest_nonce = match get_nonce( &net, rpc_handle, &sender_address, @@ -489,8 +512,21 @@ pub async fn eth_batch_send_to_contract( "Detected previously submitted transaction included on-chain: {included_tx_hash:?} in network `{net}` block height {block_height}" ); } + + try_to_sync( + net.as_str(), + &mut provider, + &contract_address, + block_height, + &next_calldata_merkle_tree_root, + latest_nonce, + nonce, + ) + .await; + return Ok(("true".to_string(), feeds_to_update_ids)); } + latest_nonce } Err(err) => { warn!("{err}"); @@ -643,6 +679,16 @@ pub async fn eth_batch_send_to_contract( Err(err) => { warn!("Error while submitting transaction in network `{net}` block height {block_height} and address {sender_address} due to {err}"); if err.to_string().contains("execution revert") { + try_to_sync( + net.as_str(), + &mut provider, + &contract_address, + block_height, + &next_calldata_merkle_tree_root, + latest_nonce, + nonce, + ) + .await; return Ok(("false".to_string(), feeds_to_update_ids)); } else { inc_retries_with_backoff( @@ -722,10 +768,18 @@ pub async fn eth_batch_send_to_contract( log_gas_used(&net, &receipt, transaction_time, provider_metrics).await; provider.update_history(&updates.updates); + let result = receipt.status().to_string(); + if result == "true" { + //Transaction was successfully confirmed therefore we update the latest state hash + let root = next_calldata_merkle_tree.root(); + provider.calldata_merkle_tree_frontier = next_calldata_merkle_tree; + provider.merkle_root_in_contract = None; + debug!("Successfully updated contract in network `{net}` block height {block_height} Merkle root {root:?}"); + } // TODO: Reread round counters + latest state hash from contract drop(provider); debug!("Released a read/write lock on provider state in network `{net}` block height {block_height}"); - Ok((receipt.status().to_string(), feeds_to_update_ids)) + Ok((result, feeds_to_update_ids)) } #[allow(clippy::too_many_arguments)] @@ -824,6 +878,36 @@ pub async fn get_gas_limit( } } +async fn try_to_sync( + net: &str, + provider: &mut RpcProvider, + contract_address: &Address, + block_height: u64, + next_calldata_merkle_tree_root: &HashValue, + latest_nonce: u64, + previous_nonce: u64, +) { + let rpc_handle = &provider.provider; + match rpc_handle + .get_storage_at(*contract_address, U256::from(0)) + .await + { + Ok(root) => { + if root != next_calldata_merkle_tree_root.0.into() { + // If the nonce in the contract increased and the next state root hash is not as we expect, + // another sequencer was able to post updates for the current block height before this one. + // We need to take this into account and reread the round counters of the feeds. + info!("Updates to contract already posted, network {net}, block_height {block_height}, latest_nonce {latest_nonce}, previous_nonce {previous_nonce}, merkle_root in contract {root}"); + provider.merkle_root_in_contract = Some(HashValue(root.into())); + // TODO: Read round counters from contract + } + } + Err(e) => { + warn!("Failed to read root from network {net} with contract address {contract_address} : {e}"); + } + } +} + pub async fn get_nonce( net: &str, rpc_handle: &ProviderType, diff --git a/apps/sequencer/src/providers/provider.rs b/apps/sequencer/src/providers/provider.rs index 6dd145ced3..e1bdfcd501 100644 --- a/apps/sequencer/src/providers/provider.rs +++ b/apps/sequencer/src/providers/provider.rs @@ -11,7 +11,7 @@ use alloy::{ }, signers::local::PrivateKeySigner, }; -use alloy_primitives::U256; +use alloy_primitives::{keccak256, B256, U256}; use alloy_u256_literal::u256; use blocksense_feeds_processing::adfs_gen_calldata::{ calc_row_index, RoundBufferIndices, MAX_HISTORY_ELEMENTS_PER_FEED, @@ -19,6 +19,7 @@ use blocksense_feeds_processing::adfs_gen_calldata::{ }; use blocksense_utils::{EncodedFeedId, FeedId}; use futures::future::join_all; +use incrementalmerkletree::{frontier::Frontier, Hashable, Level}; use reqwest::Url; // TODO @ymadzhunkov include URL directly from url crate use blocksense_config::{ @@ -86,7 +87,25 @@ impl Contract { } } +#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord)] +pub struct HashValue(pub B256); + +impl Hashable for HashValue { + fn combine(_level: Level, a: &Self, b: &Self) -> Self { + HashValue(keccak256([a.0.to_vec(), b.0.to_vec()].concat())) + } + fn empty_root(_level: Level) -> Self { + HashValue(B256::ZERO) + } + + fn empty_leaf() -> Self { + HashValue(B256::ZERO) + } +} + pub struct RpcProvider { + pub calldata_merkle_tree_frontier: Frontier, + pub merkle_root_in_contract: Option, pub network: String, pub provider: ProviderType, pub signer: PrivateKeySigner, @@ -233,12 +252,12 @@ async fn load_data_from_chain( } } -async fn log_if_contract_exists(rpc_provider: Arc>, contract_name: String) { - rpc_provider +async fn log_if_contract_exists(provider_mutex: Arc>, contract_name: String) { + provider_mutex .lock() .await - .log_if_contract_exists(&contract_name) - .await + .log_if_contract_exists_and_get_latest_root(contract_name.as_str()) + .await; } #[derive(Debug, Clone, Copy)] @@ -317,6 +336,8 @@ impl RpcProvider { } } RpcProvider { + calldata_merkle_tree_frontier: Frontier::::empty(), + merkle_root_in_contract: None, network: network.to_string(), provider, signer: signer.clone(), @@ -764,7 +785,7 @@ impl RpcProvider { self.rpc_url.clone() } - pub async fn log_if_contract_exists(&self, contract_name: &str) { + pub async fn log_if_contract_exists_and_get_latest_root(&mut self, contract_name: &str) { let address = self.get_contract_address(contract_name).ok(); let network = self.network.as_str(); if let Some(addr) = address { @@ -783,6 +804,14 @@ impl RpcProvider { let same_byte_code = expected_byte_code.eq(&a); if same_byte_code { info!("Contract {contract_name} exists in network {network} on {addr} matching byte code!"); + match self.provider.get_storage_at(addr, U256::from(0)).await { + Ok(root) => { + self.merkle_root_in_contract = Some(HashValue(root.into())); + } + Err(e) => { + warn!("Failed to read root from network {network} with contract address {addr} : {e}"); + } + }; } else { warn!("Contract {contract_name} exists in network {network} on {addr} but bytecode differs! Found {byte_code:?} expected {expected_byte_code:?}"); } diff --git a/libs/feeds_processing/src/adfs_gen_calldata.rs b/libs/feeds_processing/src/adfs_gen_calldata.rs index f8d5dd6ff4..d70d4facbc 100644 --- a/libs/feeds_processing/src/adfs_gen_calldata.rs +++ b/libs/feeds_processing/src/adfs_gen_calldata.rs @@ -78,8 +78,6 @@ pub async fn adfs_serialize_updates( let updates = &feed_updates.updates; info!("Preparing a batch of ADFS feeds for network `{net}`"); - result.push(0x01); - result.append(&mut feed_updates.block_height.to_be_bytes().to_vec()); result.append(&mut (updates.len() as u32).to_be_bytes().to_vec()); let mut feeds_info = HashMap::new(); @@ -354,7 +352,7 @@ pub mod tests { let (updates, rb_indices, config) = setup_updates_rb_indices_and_config(&updates_init, &round_counters_init, config_init); - let expected_result = "0100000000499602d2000000050102400c0107123432676435730002400501022456000260040102367800028003010248900002a00201025abc010000000000000500040003000200000000000000000000000000000000000000000e80000000000000000000000000000000000600000000000000000000000000000000000000000000000000000000"; + let expected_result = "000000050102400c0107123432676435730002400501022456000260040102367800028003010248900002a00201025abc010000000000000500040003000200000000000000000000000000000000000000000e80000000000000000000000000000000000600000000000000000000000000000000000000000000000000000000"; let mut feeds_rb_indexes = HashMap::new(); @@ -409,7 +407,7 @@ pub mod tests { setup_updates_rb_indices_and_config(&updates_init, &round_counters_init, config_init); rb_indices.insert(encoded_feed_id_for_stride_zero(6), 5); - let expected_result = "0100000000499602d2000000050102400c0107123432676435730002400501022456000260040102367800028003010248900002a00201025abc010000000000000500040003000200040000000000000000000000000000000000000e80000000000000000000000000000000000600000000000000000000000000000000000000000000000000000000"; + let expected_result = "000000050102400c0107123432676435730002400501022456000260040102367800028003010248900002a00201025abc010000000000000500040003000200040000000000000000000000000000000000000e80000000000000000000000000000000000600000000000000000000000000000000000000000000000000000000"; let mut feeds_rb_indexes = HashMap::new(); From caee75b876f70989ee2fc5a20cd063b68dfcd3e1 Mon Sep 17 00:00:00 2001 From: Hristo Staykov Date: Wed, 27 Aug 2025 19:59:57 +0300 Subject: [PATCH 3/3] fix(sequencer/tests): Take into consideration changes to serialization regarding source/target hashes Co-authored-by: Aneta Tsvetkova --- .../sequencer/src/providers/eth_send_utils.rs | 42 ++++++++++++------- apps/sequencer/src/providers/provider.rs | 2 +- libs/config/src/lib.rs | 8 ++-- .../feeds_processing/src/adfs_gen_calldata.rs | 2 +- 4 files changed, 32 insertions(+), 22 deletions(-) diff --git a/apps/sequencer/src/providers/eth_send_utils.rs b/apps/sequencer/src/providers/eth_send_utils.rs index e514ebae93..d8dd0a346c 100644 --- a/apps/sequencer/src/providers/eth_send_utils.rs +++ b/apps/sequencer/src/providers/eth_send_utils.rs @@ -397,9 +397,7 @@ pub async fn eth_batch_send_to_contract( let provider_metrics = &provider.provider_metrics; let rpc_handle = &provider.provider; - let mut input = Bytes::from(serialized_updates.clone()); - - let latest_call_data_hash = keccak256(input.as_ref()); + let latest_call_data_hash = keccak256(&serialized_updates); let mut next_calldata_merkle_tree = provider.calldata_merkle_tree_frontier.clone(); next_calldata_merkle_tree.append(HashValue(latest_call_data_hash)); @@ -410,16 +408,26 @@ pub async fn eth_batch_send_to_contract( }; let next_calldata_merkle_tree_root = next_calldata_merkle_tree.root(); - // Merkle tree over all call data management for ADFS contracts (version 0 is legacy). - let serialized_updates = [ - vec![1], - prev_calldata_merkle_tree_root.0.to_vec(), - next_calldata_merkle_tree_root.0.to_vec(), - serialized_updates, - ] - .concat(); + // Merkle tree over all call data management for ADFS contracts + let prev_root_bytes = prev_calldata_merkle_tree_root.0.as_slice(); + let next_root_bytes = next_calldata_merkle_tree_root.0.as_slice(); + + // Compute (with just one allocation) the new input bytes + // from the serialized updates with roots + let mut serialized_updates_with_roots = Vec::with_capacity( + 1 + prev_root_bytes.len() + next_root_bytes.len() + serialized_updates.len(), + ); + + // - Command index (`1` is for writing) + serialized_updates_with_roots.push(1); + // - Previous merkle root bytes + serialized_updates_with_roots.extend_from_slice(prev_root_bytes); + // - Next merkle root bytes + serialized_updates_with_roots.extend_from_slice(next_root_bytes); + // - Original serialized updates + serialized_updates_with_roots.extend_from_slice(&serialized_updates); - input = Bytes::from(serialized_updates); + let input = Bytes::from(serialized_updates_with_roots); let receipt; let tx_time = Instant::now(); @@ -513,6 +521,8 @@ pub async fn eth_batch_send_to_contract( ); } + // TODO: maybe move into an else clause of the `if` above, + // i.e. only do it when there were no included transactions found try_to_sync( net.as_str(), &mut provider, @@ -1678,7 +1688,7 @@ mod tests { .await .expect("Could not serialize updates!"); - assert_eq!(serialized_updates.to_bytes().encode_hex(), "01000000000000000000000002000303e00701026869000401ffe00801036279650101000000000000000000000000000000000000000000000000000000000000000701ff0000000000000000000000000000000000000000000000000000000000000008"); + assert_eq!(serialized_updates.to_bytes().encode_hex(), "00000002000303e00701026869000401ffe00801036279650101000000000000000000000000000000000000000000000000000000000000000701ff0000000000000000000000000000000000000000000000000000000000000008"); } use blocksense_feed_registry::types::FeedType; @@ -1742,7 +1752,7 @@ mod tests { // Note: bye is filtered out: assert_eq!( serialized_updates.to_bytes().encode_hex(), - "01000000000000000000000001000303e0070102686901010000000000000000000000000000000000000000000000000000000000000007" + "00000001000303e0070102686901010000000000000000000000000000000000000000000000000000000000000007" ); // Berachain @@ -1773,7 +1783,7 @@ mod tests { assert_eq!( serialized_updates.to_bytes().encode_hex(), - "01000000000000000000000001000303e0070102686901010000000000000000000000000000000000000000000000000000000000000007" + "00000001000303e0070102686901010000000000000000000000000000000000000000000000000000000000000007" ); // Manta @@ -1803,7 +1813,7 @@ mod tests { assert_eq!( serialized_updates.to_bytes().encode_hex(), - "01000000000000000000000001000303e0070102686901010000000000000000000000000000000000000000000000000000000000000007" + "00000001000303e0070102686901010000000000000000000000000000000000000000000000000000000000000007" ); } diff --git a/apps/sequencer/src/providers/provider.rs b/apps/sequencer/src/providers/provider.rs index e1bdfcd501..d9ef447031 100644 --- a/apps/sequencer/src/providers/provider.rs +++ b/apps/sequencer/src/providers/provider.rs @@ -1391,7 +1391,7 @@ mod tests { { use alloy::hex::FromHex; // Provided calldata to pre-populate counters/values - let calldata_hex = "0x01000000000000000100000001000303e3970120000000000000000000000000000015d41642f71aa02900000000003647f7d78101010000000000000000000000000000000000000000000000000000000000002397"; + let calldata_hex = "0x010000000000000000000000000000000000000000000000000000000000000000908e112c5ba35c399906f7e029ec927d3390b62188d7c51208f3d30e1283da4e00000001000303e3970120000000000000000000000000000015d41642f71aa02900000000003647f7d78101010000000000000000000000000000000000000000000000000000000000002397"; let pre_data: Bytes = Bytes::from_hex(calldata_hex).expect("Invalid calldata hex"); diff --git a/libs/config/src/lib.rs b/libs/config/src/lib.rs index ec9661a15f..7ea9660f60 100644 --- a/libs/config/src/lib.rs +++ b/libs/config/src/lib.rs @@ -534,7 +534,7 @@ pub fn test_feeds_config(id: FeedId, stride: u8) -> HashMap } pub fn test_adfs_byte_code() -> String { - "0x60a0604052348015600f57600080fd5b506040516104a23803806104a2833981016040819052602c91603c565b6001600160a01b0316608052606a565b600060208284031215604d57600080fd5b81516001600160a01b0381168114606357600080fd5b9392505050565b60805161041d610085600039600061020d015261041d6000f3fe6080604052600035600160ff1b811615610201578060011a8160101b60881c601f82116e07ffffffffffffffffffffffffffff8211171561003f57600080fd5b6011358360f81c93506086840361010b576001831b8160f01c611fff81111561006757600080fd5b841b600d84901b851b0160133611156100bc5763ffffffff60b084901c8116925060d084901c160160018401600d1b851b60001901826100a8576001861b92505b8060018403830111156100ba57600080fd5b505b80600160801b861b019050600060405183600181146100fc5760005b858110156100f65784810154838501526020909301926001016100d8565b50508181f35b83548383015260209250508181f35b600f821660041b6001600160f01b0319811c838560731b0160041c610fff60741b0154168160f0031c90506000604051600187161561014c57602091508281525b600287161561017057600160801b861b600d86901b84010154818301526020820181f35b60048716156101fd576001861b83871b86600d1b881b0160133611156101d05763ffffffff60c087901c16915060e086901c0160001960018801600d1b891b01826101bc576001891b92505b8060018403830111156101ce57600080fd5b505b600160801b881b0160005b828110156101f95781810154848601526020909401936001016101db565b5050505b8181f35b506040513360601b81527f000000000000000000000000000000000000000000000000000000000000000090602081601481855afa8151811661024357600080fd5b50600081525060003560018160001a036103e2578060081b60c01c60005460008183110361027057600080fd5b5080600055368260481b60e01c600d60005b8281101561035e5781358060001a601f81111561029e57600080fd5b600160801b811b8260011a8060031b8460101b81610100031c85836002011a86836018011b8160031b610100031c965080840160030189019850508560051c9250601f86169550600184600160801b600188011b03036001600088118501038201111561030a57600080fd5b600094505b8285101561033057873585820185015560018501945060208801975061030f565b851561034d5787358660031b610100031c83820185015585880197505b505050505050600181019050610282565b505b828110156103b257803591508160001a8260081b8160031b610100031c92506e0fffffffffffffffffffffffffffff83111561039b57600080fd5b016001810135610fff60741b830155602101610360565b50505080600052507fe64378c8d8a289137204264780c7669f3860a703795c6f0574d925d473a4a2a760206000a1005b600080fdfea2646970667358221220e8068dbe395db7fb6a5aae9a33faf6c8f50c1363a8668f99a6f96c132303734264736f6c634300081c0033".to_string() + "0x60a0604052348015600f57600080fd5b5060405161049f38038061049f833981016040819052602c91603c565b6001600160a01b0316608052606a565b600060208284031215604d57600080fd5b81516001600160a01b0381168114606357600080fd5b9392505050565b60805161041a610085600039600061020d015261041a6000f3fe6080604052600035600160ff1b811615610201578060011a8160101b60881c601f82116e07ffffffffffffffffffffffffffff8211171561003f57600080fd5b6011358360f81c93506086840361010b576001831b8160f01c611fff81111561006757600080fd5b841b600d84901b851b0160133611156100bc5763ffffffff60b084901c8116925060d084901c160160018401600d1b851b60001901826100a8576001861b92505b8060018403830111156100ba57600080fd5b505b80600160801b861b019050600060405183600181146100fc5760005b858110156100f65784810154838501526020909301926001016100d8565b50508181f35b83548383015260209250508181f35b600f821660041b6001600160f01b0319811c838560731b0160041c610fff60741b0154168160f0031c90506000604051600187161561014c57602091508281525b600287161561017057600160801b861b600d86901b84010154818301526020820181f35b60048716156101fd576001861b83871b86600d1b881b0160133611156101d05763ffffffff60c087901c16915060e086901c0160001960018801600d1b891b01826101bc576001891b92505b8060018403830111156101ce57600080fd5b505b600160801b881b0160005b828110156101f95781810154848601526020909401936001016101db565b5050505b8181f35b506040513360601b81527f000000000000000000000000000000000000000000000000000000000000000090602081601481855afa8151811661024357600080fd5b50600081525060003560018160001a036103df5760013560213560005480831461026c57600080fd5b508060005536915060413560e01c604560005b8281101561035b5781358060001a601f81111561029b57600080fd5b600160801b811b8260011a8060031b8460101b81610100031c85836002011a86836018011b8160031b610100031c965080840160030189019850508560051c9250601f86169550600184600160801b600188011b03036001600088118501038201111561030757600080fd5b600094505b8285101561032d57873585820185015560018501945060208801975061030c565b851561034a5787358660031b610100031c83820185015585880197505b50505050505060018101905061027f565b505b838110156103af57803591508160001a8260081b8160031b610100031c92506e0fffffffffffffffffffffffffffff83111561039857600080fd5b016001810135610fff60741b83015560210161035d565b50508060005250507f6f6892f1e8eab8687f7b5f3c3bc0d046cd783c6059310be0bef4e18eb066278960206000a1005b600080fdfea2646970667358221220ee3f07b7b495b735f559d6a460639ca14de0d378e683627ebbad45eda75c331364736f6c634300081c0033".to_string() } pub fn get_test_config_with_single_provider( @@ -609,7 +609,7 @@ pub fn get_test_config_with_multiple_providers( ContractConfig { name: ADFS_CONTRACT_NAME.to_string(), address: None, - creation_byte_code: Some("0x60a0604052348015600f57600080fd5b506040516104a23803806104a2833981016040819052602c91603c565b6001600160a01b0316608052606a565b600060208284031215604d57600080fd5b81516001600160a01b0381168114606357600080fd5b9392505050565b60805161041d610085600039600061020d015261041d6000f3fe6080604052600035600160ff1b811615610201578060011a8160101b60881c601f82116e07ffffffffffffffffffffffffffff8211171561003f57600080fd5b6011358360f81c93506086840361010b576001831b8160f01c611fff81111561006757600080fd5b841b600d84901b851b0160133611156100bc5763ffffffff60b084901c8116925060d084901c160160018401600d1b851b60001901826100a8576001861b92505b8060018403830111156100ba57600080fd5b505b80600160801b861b019050600060405183600181146100fc5760005b858110156100f65784810154838501526020909301926001016100d8565b50508181f35b83548383015260209250508181f35b600f821660041b6001600160f01b0319811c838560731b0160041c610fff60741b0154168160f0031c90506000604051600187161561014c57602091508281525b600287161561017057600160801b861b600d86901b84010154818301526020820181f35b60048716156101fd576001861b83871b86600d1b881b0160133611156101d05763ffffffff60c087901c16915060e086901c0160001960018801600d1b891b01826101bc576001891b92505b8060018403830111156101ce57600080fd5b505b600160801b881b0160005b828110156101f95781810154848601526020909401936001016101db565b5050505b8181f35b506040513360601b81527f000000000000000000000000000000000000000000000000000000000000000090602081601481855afa8151811661024357600080fd5b50600081525060003560018160001a036103e2578060081b60c01c60005460008183110361027057600080fd5b5080600055368260481b60e01c600d60005b8281101561035e5781358060001a601f81111561029e57600080fd5b600160801b811b8260011a8060031b8460101b81610100031c85836002011a86836018011b8160031b610100031c965080840160030189019850508560051c9250601f86169550600184600160801b600188011b03036001600088118501038201111561030a57600080fd5b600094505b8285101561033057873585820185015560018501945060208801975061030f565b851561034d5787358660031b610100031c83820185015585880197505b505050505050600181019050610282565b505b828110156103b257803591508160001a8260081b8160031b610100031c92506e0fffffffffffffffffffffffffffff83111561039b57600080fd5b016001810135610fff60741b830155602101610360565b50505080600052507fe64378c8d8a289137204264780c7669f3860a703795c6f0574d925d473a4a2a760206000a1005b600080fdfea2646970667358221220e8068dbe395db7fb6a5aae9a33faf6c8f50c1363a8668f99a6f96c132303734264736f6c634300081c0033".to_string()), + creation_byte_code: Some("0x60a0604052348015600f57600080fd5b5060405161049f38038061049f833981016040819052602c91603c565b6001600160a01b0316608052606a565b600060208284031215604d57600080fd5b81516001600160a01b0381168114606357600080fd5b9392505050565b60805161041a610085600039600061020d015261041a6000f3fe6080604052600035600160ff1b811615610201578060011a8160101b60881c601f82116e07ffffffffffffffffffffffffffff8211171561003f57600080fd5b6011358360f81c93506086840361010b576001831b8160f01c611fff81111561006757600080fd5b841b600d84901b851b0160133611156100bc5763ffffffff60b084901c8116925060d084901c160160018401600d1b851b60001901826100a8576001861b92505b8060018403830111156100ba57600080fd5b505b80600160801b861b019050600060405183600181146100fc5760005b858110156100f65784810154838501526020909301926001016100d8565b50508181f35b83548383015260209250508181f35b600f821660041b6001600160f01b0319811c838560731b0160041c610fff60741b0154168160f0031c90506000604051600187161561014c57602091508281525b600287161561017057600160801b861b600d86901b84010154818301526020820181f35b60048716156101fd576001861b83871b86600d1b881b0160133611156101d05763ffffffff60c087901c16915060e086901c0160001960018801600d1b891b01826101bc576001891b92505b8060018403830111156101ce57600080fd5b505b600160801b881b0160005b828110156101f95781810154848601526020909401936001016101db565b5050505b8181f35b506040513360601b81527f000000000000000000000000000000000000000000000000000000000000000090602081601481855afa8151811661024357600080fd5b50600081525060003560018160001a036103df5760013560213560005480831461026c57600080fd5b508060005536915060413560e01c604560005b8281101561035b5781358060001a601f81111561029b57600080fd5b600160801b811b8260011a8060031b8460101b81610100031c85836002011a86836018011b8160031b610100031c965080840160030189019850508560051c9250601f86169550600184600160801b600188011b03036001600088118501038201111561030757600080fd5b600094505b8285101561032d57873585820185015560018501945060208801975061030c565b851561034a5787358660031b610100031c83820185015585880197505b50505050505060018101905061027f565b505b838110156103af57803591508160001a8260081b8160031b610100031c92506e0fffffffffffffffffffffffffffff83111561039857600080fd5b016001810135610fff60741b83015560210161035d565b50508060005250507f6f6892f1e8eab8687f7b5f3c3bc0d046cd783c6059310be0bef4e18eb066278960206000a1005b600080fdfea2646970667358221220ee3f07b7b495b735f559d6a460639ca14de0d378e683627ebbad45eda75c331364736f6c634300081c0033".to_string()), deployed_byte_code: None, min_quorum: None, }, @@ -667,7 +667,7 @@ mod tests { { "name": "AggregatedDataFeedStore", "address": "0xADF5aab875E6f7F39bA76478B477007c6c934D97", - "byte_code": "0x60a0604052348015600f57600080fd5b506040516104a23803806104a2833981016040819052602c91603c565b6001600160a01b0316608052606a565b600060208284031215604d57600080fd5b81516001600160a01b0381168114606357600080fd5b9392505050565b60805161041d610085600039600061020d015261041d6000f3fe6080604052600035600160ff1b811615610201578060011a8160101b60881c601f82116e07ffffffffffffffffffffffffffff8211171561003f57600080fd5b6011358360f81c93506086840361010b576001831b8160f01c611fff81111561006757600080fd5b841b600d84901b851b0160133611156100bc5763ffffffff60b084901c8116925060d084901c160160018401600d1b851b60001901826100a8576001861b92505b8060018403830111156100ba57600080fd5b505b80600160801b861b019050600060405183600181146100fc5760005b858110156100f65784810154838501526020909301926001016100d8565b50508181f35b83548383015260209250508181f35b600f821660041b6001600160f01b0319811c838560731b0160041c610fff60741b0154168160f0031c90506000604051600187161561014c57602091508281525b600287161561017057600160801b861b600d86901b84010154818301526020820181f35b60048716156101fd576001861b83871b86600d1b881b0160133611156101d05763ffffffff60c087901c16915060e086901c0160001960018801600d1b891b01826101bc576001891b92505b8060018403830111156101ce57600080fd5b505b600160801b881b0160005b828110156101f95781810154848601526020909401936001016101db565b5050505b8181f35b506040513360601b81527f000000000000000000000000000000000000000000000000000000000000000090602081601481855afa8151811661024357600080fd5b50600081525060003560018160001a036103e2578060081b60c01c60005460008183110361027057600080fd5b5080600055368260481b60e01c600d60005b8281101561035e5781358060001a601f81111561029e57600080fd5b600160801b811b8260011a8060031b8460101b81610100031c85836002011a86836018011b8160031b610100031c965080840160030189019850508560051c9250601f86169550600184600160801b600188011b03036001600088118501038201111561030a57600080fd5b600094505b8285101561033057873585820185015560018501945060208801975061030f565b851561034d5787358660031b610100031c83820185015585880197505b505050505050600181019050610282565b505b828110156103b257803591508160001a8260081b8160031b610100031c92506e0fffffffffffffffffffffffffffff83111561039b57600080fd5b016001810135610fff60741b830155602101610360565b50505080600052507fe64378c8d8a289137204264780c7669f3860a703795c6f0574d925d473a4a2a760206000a1005b600080fdfea2646970667358221220e8068dbe395db7fb6a5aae9a33faf6c8f50c1363a8668f99a6f96c132303734264736f6c634300081c0033" + "byte_code": "0x60a0604052348015600f57600080fd5b5060405161049f38038061049f833981016040819052602c91603c565b6001600160a01b0316608052606a565b600060208284031215604d57600080fd5b81516001600160a01b0381168114606357600080fd5b9392505050565b60805161041a610085600039600061020d015261041a6000f3fe6080604052600035600160ff1b811615610201578060011a8160101b60881c601f82116e07ffffffffffffffffffffffffffff8211171561003f57600080fd5b6011358360f81c93506086840361010b576001831b8160f01c611fff81111561006757600080fd5b841b600d84901b851b0160133611156100bc5763ffffffff60b084901c8116925060d084901c160160018401600d1b851b60001901826100a8576001861b92505b8060018403830111156100ba57600080fd5b505b80600160801b861b019050600060405183600181146100fc5760005b858110156100f65784810154838501526020909301926001016100d8565b50508181f35b83548383015260209250508181f35b600f821660041b6001600160f01b0319811c838560731b0160041c610fff60741b0154168160f0031c90506000604051600187161561014c57602091508281525b600287161561017057600160801b861b600d86901b84010154818301526020820181f35b60048716156101fd576001861b83871b86600d1b881b0160133611156101d05763ffffffff60c087901c16915060e086901c0160001960018801600d1b891b01826101bc576001891b92505b8060018403830111156101ce57600080fd5b505b600160801b881b0160005b828110156101f95781810154848601526020909401936001016101db565b5050505b8181f35b506040513360601b81527f000000000000000000000000000000000000000000000000000000000000000090602081601481855afa8151811661024357600080fd5b50600081525060003560018160001a036103df5760013560213560005480831461026c57600080fd5b508060005536915060413560e01c604560005b8281101561035b5781358060001a601f81111561029b57600080fd5b600160801b811b8260011a8060031b8460101b81610100031c85836002011a86836018011b8160031b610100031c965080840160030189019850508560051c9250601f86169550600184600160801b600188011b03036001600088118501038201111561030757600080fd5b600094505b8285101561032d57873585820185015560018501945060208801975061030c565b851561034a5787358660031b610100031c83820185015585880197505b50505050505060018101905061027f565b505b838110156103af57803591508160001a8260081b8160031b610100031c92506e0fffffffffffffffffffffffffffff83111561039857600080fd5b016001810135610fff60741b83015560210161035d565b50508060005250507f6f6892f1e8eab8687f7b5f3c3bc0d046cd783c6059310be0bef4e18eb066278960206000a1005b600080fdfea2646970667358221220ee3f07b7b495b735f559d6a460639ca14de0d378e683627ebbad45eda75c331364736f6c634300081c0033" } ] }"#, @@ -759,7 +759,7 @@ mod tests { { "name": "AggregatedDataFeedStore", "address": "0xADF5aab875E6f7F39bA76478B477007c6c934D97", - "byte_code": "0x60a0604052348015600f57600080fd5b506040516104a23803806104a2833981016040819052602c91603c565b6001600160a01b0316608052606a565b600060208284031215604d57600080fd5b81516001600160a01b0381168114606357600080fd5b9392505050565b60805161041d610085600039600061020d015261041d6000f3fe6080604052600035600160ff1b811615610201578060011a8160101b60881c601f82116e07ffffffffffffffffffffffffffff8211171561003f57600080fd5b6011358360f81c93506086840361010b576001831b8160f01c611fff81111561006757600080fd5b841b600d84901b851b0160133611156100bc5763ffffffff60b084901c8116925060d084901c160160018401600d1b851b60001901826100a8576001861b92505b8060018403830111156100ba57600080fd5b505b80600160801b861b019050600060405183600181146100fc5760005b858110156100f65784810154838501526020909301926001016100d8565b50508181f35b83548383015260209250508181f35b600f821660041b6001600160f01b0319811c838560731b0160041c610fff60741b0154168160f0031c90506000604051600187161561014c57602091508281525b600287161561017057600160801b861b600d86901b84010154818301526020820181f35b60048716156101fd576001861b83871b86600d1b881b0160133611156101d05763ffffffff60c087901c16915060e086901c0160001960018801600d1b891b01826101bc576001891b92505b8060018403830111156101ce57600080fd5b505b600160801b881b0160005b828110156101f95781810154848601526020909401936001016101db565b5050505b8181f35b506040513360601b81527f000000000000000000000000000000000000000000000000000000000000000090602081601481855afa8151811661024357600080fd5b50600081525060003560018160001a036103e2578060081b60c01c60005460008183110361027057600080fd5b5080600055368260481b60e01c600d60005b8281101561035e5781358060001a601f81111561029e57600080fd5b600160801b811b8260011a8060031b8460101b81610100031c85836002011a86836018011b8160031b610100031c965080840160030189019850508560051c9250601f86169550600184600160801b600188011b03036001600088118501038201111561030a57600080fd5b600094505b8285101561033057873585820185015560018501945060208801975061030f565b851561034d5787358660031b610100031c83820185015585880197505b505050505050600181019050610282565b505b828110156103b257803591508160001a8260081b8160031b610100031c92506e0fffffffffffffffffffffffffffff83111561039b57600080fd5b016001810135610fff60741b830155602101610360565b50505080600052507fe64378c8d8a289137204264780c7669f3860a703795c6f0574d925d473a4a2a760206000a1005b600080fdfea2646970667358221220e8068dbe395db7fb6a5aae9a33faf6c8f50c1363a8668f99a6f96c132303734264736f6c634300081c0033" + "byte_code": "0x60a0604052348015600f57600080fd5b5060405161049f38038061049f833981016040819052602c91603c565b6001600160a01b0316608052606a565b600060208284031215604d57600080fd5b81516001600160a01b0381168114606357600080fd5b9392505050565b60805161041a610085600039600061020d015261041a6000f3fe6080604052600035600160ff1b811615610201578060011a8160101b60881c601f82116e07ffffffffffffffffffffffffffff8211171561003f57600080fd5b6011358360f81c93506086840361010b576001831b8160f01c611fff81111561006757600080fd5b841b600d84901b851b0160133611156100bc5763ffffffff60b084901c8116925060d084901c160160018401600d1b851b60001901826100a8576001861b92505b8060018403830111156100ba57600080fd5b505b80600160801b861b019050600060405183600181146100fc5760005b858110156100f65784810154838501526020909301926001016100d8565b50508181f35b83548383015260209250508181f35b600f821660041b6001600160f01b0319811c838560731b0160041c610fff60741b0154168160f0031c90506000604051600187161561014c57602091508281525b600287161561017057600160801b861b600d86901b84010154818301526020820181f35b60048716156101fd576001861b83871b86600d1b881b0160133611156101d05763ffffffff60c087901c16915060e086901c0160001960018801600d1b891b01826101bc576001891b92505b8060018403830111156101ce57600080fd5b505b600160801b881b0160005b828110156101f95781810154848601526020909401936001016101db565b5050505b8181f35b506040513360601b81527f000000000000000000000000000000000000000000000000000000000000000090602081601481855afa8151811661024357600080fd5b50600081525060003560018160001a036103df5760013560213560005480831461026c57600080fd5b508060005536915060413560e01c604560005b8281101561035b5781358060001a601f81111561029b57600080fd5b600160801b811b8260011a8060031b8460101b81610100031c85836002011a86836018011b8160031b610100031c965080840160030189019850508560051c9250601f86169550600184600160801b600188011b03036001600088118501038201111561030757600080fd5b600094505b8285101561032d57873585820185015560018501945060208801975061030c565b851561034a5787358660031b610100031c83820185015585880197505b50505050505060018101905061027f565b505b838110156103af57803591508160001a8260081b8160031b610100031c92506e0fffffffffffffffffffffffffffff83111561039857600080fd5b016001810135610fff60741b83015560210161035d565b50508060005250507f6f6892f1e8eab8687f7b5f3c3bc0d046cd783c6059310be0bef4e18eb066278960206000a1005b600080fdfea2646970667358221220ee3f07b7b495b735f559d6a460639ca14de0d378e683627ebbad45eda75c331364736f6c634300081c0033" } ] }"#, diff --git a/libs/feeds_processing/src/adfs_gen_calldata.rs b/libs/feeds_processing/src/adfs_gen_calldata.rs index d70d4facbc..5345fb776d 100644 --- a/libs/feeds_processing/src/adfs_gen_calldata.rs +++ b/libs/feeds_processing/src/adfs_gen_calldata.rs @@ -462,7 +462,7 @@ pub mod tests { setup_updates_rb_indices_and_config(&updates_init, &round_counters_init, config_init); round_counters.insert(encoded_feed_id_for_stride_zero(6), 5); - let expected_result = "0100000000499602d2000000050102400c0107123432676435730002400501022456000260040102367800028328010248900002a00201025abc010000000000000500040328000200040000000000000000000000000000000000000e80000000000000000000000000000000000600000000000000000000000000000000000000000000000000000000"; + let expected_result = "000000050102400c0107123432676435730002400501022456000260040102367800028328010248900002a00201025abc010000000000000500040328000200040000000000000000000000000000000000000e80000000000000000000000000000000000600000000000000000000000000000000000000000000000000000000"; let mut feeds_rounds = HashMap::new();