Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
56 changes: 44 additions & 12 deletions yarn-project/archiver/src/archiver/archiver.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@ import { makeBlockAttestationFromBlock } from '@aztec/stdlib/testing';
import { getTelemetryClient } from '@aztec/telemetry-client';

import { jest } from '@jest/globals';
import assert from 'assert';
import { type MockProxy, mock } from 'jest-mock-extended';
import { type FormattedBlock, type Log, type Transaction, encodeFunctionData, multicall3Abi, toHex } from 'viem';

Expand Down Expand Up @@ -380,7 +381,7 @@ describe('Archiver', () => {
});
}, 10_000);

it('ignores block 2 because it had invalid attestations', async () => {
it('ignores blocks because of invalid attestations', async () => {
let latestBlockNum = await archiver.getBlockNumber();
expect(latestBlockNum).toEqual(0);

Expand All @@ -395,21 +396,27 @@ describe('Archiver', () => {
const blobHashes = await Promise.all(blocks.map(makeVersionedBlobHashes));
const blobsFromBlocks = await Promise.all(blocks.map(b => makeBlobsFromBlock(b)));

// And define a bad block 2 with attestations from random signers
const badBlock2 = await makeBlock(2);
badBlock2.archive.root = new Fr(0x1002);
const badBlock2RollupTx = await makeRollupTx(badBlock2, times(3, Secp256k1Signer.random));
const badBlock2BlobHashes = await makeVersionedBlobHashes(badBlock2);
const badBlock2Blobs = await makeBlobsFromBlock(badBlock2);
// And define bad blocks with attestations from random signers
const makeBadBlock = async (blockNumber: number) => {
const badBlock = await makeBlock(blockNumber);
badBlock.archive.root = new Fr(0x1000 + blockNumber);
const badBlockRollupTx = await makeRollupTx(badBlock, times(3, Secp256k1Signer.random));
const badBlockBlobHashes = await makeVersionedBlobHashes(badBlock);
const badBlockBlobs = await makeBlobsFromBlock(badBlock);
return [badBlock, badBlockRollupTx, badBlockBlobHashes, badBlockBlobs] as const;
};

const [badBlock2, badBlock2RollupTx, badBlock2BlobHashes, badBlock2Blobs] = await makeBadBlock(2);
const [badBlock3, badBlock3RollupTx, badBlock3BlobHashes, badBlock3Blobs] = await makeBadBlock(3);

// Return the archive root for the bad block 2 when L1 is queried
mockRollupRead.archiveAt.mockImplementation((args: readonly [bigint]) =>
Promise.resolve((args[0] === 2n ? badBlock2 : blocks[Number(args[0] - 1n)]).archive.root.toString()),
);

logger.warn(`Created 3 valid blocks`);
blocks.forEach(block => logger.warn(`Block ${block.number} with root ${block.archive.root.toString()}`));
blocks.forEach(b => logger.warn(`Created valid block ${b.number} with root ${b.archive.root.toString()}`));
logger.warn(`Created invalid block 2 with root ${badBlock2.archive.root.toString()}`);
logger.warn(`Created invalid block 3 with root ${badBlock3.archive.root.toString()}`);

// During the first archiver loop, we fetch block 1 and the block 2 with bad attestations
publicClient.getBlockNumber.mockResolvedValue(85n);
Expand All @@ -432,11 +439,35 @@ describe('Archiver', () => {
}),
);

// Now we go for another loop, where a proper block 2 is proposed with correct attestations
// Now another loop, where we propose a block 3 with bad attestations
logger.warn(`Adding new block 3 with bad attestations`);
publicClient.getBlockNumber.mockResolvedValue(90n);
makeL2BlockProposedEvent(85n, 3n, badBlock3.archive.root.toString(), badBlock3BlobHashes);
mockRollup.read.status.mockResolvedValue([
0n,
GENESIS_ROOT,
3n,
badBlock3.archive.root.toString(),
blocks[0].archive.root.toString(),
]);
publicClient.getTransaction.mockResolvedValueOnce(badBlock3RollupTx);
blobSinkClient.getBlobSidecar.mockResolvedValueOnce(badBlock3Blobs);

// We should still be at block 1, and the pending chain validation status should still be invalid and point to block 2
// since we want the archiver to always return the earliest block with invalid attestations
await archiver.syncImmediate();
latestBlockNum = await archiver.getBlockNumber();
expect(latestBlockNum).toEqual(1);
const validationStatus = await archiver.getPendingChainValidationStatus();
assert(!validationStatus.valid);
expect(validationStatus.block.block.number).toEqual(2);
expect(validationStatus.block.block.archive.root.toString()).toEqual(badBlock2.archive.root.toString());

// Now we go for another loop, where proper blocks 2 and 3 are proposed with correct attestations
// IRL there would be an "Invalidated" event, but we are not currently relying on it
logger.warn(`Adding new block 2 with correct attestations and a block 3`);
logger.warn(`Adding new blocks 2 and 3 with correct attestations`);
publicClient.getBlockNumber.mockResolvedValue(100n);
makeL2BlockProposedEvent(90n, 2n, blocks[1].archive.root.toString(), blobHashes[1]);
makeL2BlockProposedEvent(94n, 2n, blocks[1].archive.root.toString(), blobHashes[1]);
makeL2BlockProposedEvent(95n, 3n, blocks[2].archive.root.toString(), blobHashes[2]);
mockRollup.read.status.mockResolvedValue([
0n,
Expand All @@ -452,6 +483,7 @@ describe('Archiver', () => {
);

// Now we should move to block 3
await archiver.syncImmediate();
await waitUntilArchiverBlock(3);
latestBlockNum = await archiver.getBlockNumber();
expect(latestBlockNum).toEqual(3);
Expand Down
33 changes: 24 additions & 9 deletions yarn-project/archiver/src/archiver/archiver.ts
Original file line number Diff line number Diff line change
Expand Up @@ -354,15 +354,25 @@ export class Archiver extends (EventEmitter as new () => ArchiverEmitter) implem
currentL1BlockNumber,
currentL1Timestamp,
);

// Update the pending chain validation status with the last block validation result.
// Again, we only update if validation status changed, so in a sequence of invalid blocks
// we keep track of the first invalid block so we can invalidate that one if needed.
if (
rollupStatus.validationResult &&
rollupStatus.validationResult?.valid !== this.pendingChainValidationStatus.valid
) {
this.pendingChainValidationStatus = rollupStatus.validationResult;
}

// And lastly we check if we are missing any L2 blocks behind us due to a possible L1 reorg.
// We only do this if rollup cant prune on the next submission. Otherwise we will end up
// re-syncing the blocks we have just unwound above. We also dont do this if the last block is invalid,
// since the archiver will rightfully refuse to sync up to it.
if (!rollupCanPrune && rollupStatus.lastBlockValidationResult.valid) {
if (!rollupCanPrune && this.pendingChainValidationStatus.valid) {
await this.checkForNewBlocksBeforeL1SyncPoint(rollupStatus, blocksSynchedTo, currentL1BlockNumber);
}

this.pendingChainValidationStatus = rollupStatus.lastBlockValidationResult;
this.instrumentation.updateL1BlockHeight(currentL1BlockNumber);
}

Expand Down Expand Up @@ -620,7 +630,7 @@ export class Archiver extends (EventEmitter as new () => ArchiverEmitter) implem
provenArchive,
pendingBlockNumber: Number(pendingBlockNumber),
pendingArchive,
lastBlockValidationResult: { valid: true } as ValidateBlockResult,
validationResult: undefined as ValidateBlockResult | undefined,
};
this.log.trace(`Retrieved rollup status at current L1 block ${currentL1BlockNumber}.`, {
localPendingBlockNumber,
Expand Down Expand Up @@ -795,17 +805,22 @@ export class Archiver extends (EventEmitter as new () => ArchiverEmitter) implem
const validBlocks: PublishedL2Block[] = [];

for (const block of publishedBlocks) {
const isProven = block.block.number <= provenBlockNumber;
rollupStatus.lastBlockValidationResult = isProven
? { valid: true }
: await validateBlockAttestations(block, this.epochCache, this.l1constants, this.log);
const validationResult = await validateBlockAttestations(block, this.epochCache, this.l1constants, this.log);

// Only update the validation result if it has changed, so we can keep track of the first invalid block
// in case there is a sequence of more than one invalid block, as we need to invalidate the first one.
if (rollupStatus.validationResult?.valid !== validationResult.valid) {
rollupStatus.validationResult = validationResult;
}

if (!rollupStatus.lastBlockValidationResult.valid) {
if (!validationResult.valid) {
this.log.warn(`Skipping block ${block.block.number} due to invalid attestations`, {
blockHash: block.block.hash(),
l1BlockNumber: block.l1.blockNumber,
...pick(rollupStatus.lastBlockValidationResult, 'reason'),
...pick(validationResult, 'reason'),
});
// We keep consuming blocks if we find an invalid one, since we do not listen for BlockInvalidated events
// We just pretend the invalid ones are not there and keep consuming the next blocks
continue;
}

Expand Down
3 changes: 0 additions & 3 deletions yarn-project/archiver/src/archiver/validation.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,6 @@ describe('validateBlockAttestations', () => {
const result = await validateBlockAttestations(block, epochCache, constants, logger);

expect(result.valid).toBe(true);
expect(result.block).toBe(block);
expect(epochCache.getCommitteeForEpoch).toHaveBeenCalledWith(0n);
});

Expand All @@ -55,7 +54,6 @@ describe('validateBlockAttestations', () => {
const result = await validateBlockAttestations(block, epochCache, constants, logger);

expect(result.valid).toBe(true);
expect(result.block).toBe(block);
expect(epochCache.getCommitteeForEpoch).toHaveBeenCalledWith(0n);
});
});
Expand Down Expand Up @@ -101,7 +99,6 @@ describe('validateBlockAttestations', () => {
const block = await makeBlock(signers.slice(0, 4), committee);
const result = await validateBlockAttestations(block, epochCache, constants, logger);
expect(result.valid).toBe(true);
expect(result.block).toBe(block);
});
});
});
5 changes: 2 additions & 3 deletions yarn-project/archiver/src/archiver/validation.ts
Original file line number Diff line number Diff line change
Expand Up @@ -37,9 +37,8 @@ export async function validateBlockAttestations(
});

if (!committee || committee.length === 0) {
// Q: Should we accept blocks with no committee?
logger?.warn(`No committee found for epoch ${epoch} at slot ${slot}. Accepting block without validation.`, logData);
return { valid: true, block: publishedBlock };
return { valid: true };
}

const committeeSet = new Set(committee.map(member => member.toString()));
Expand All @@ -64,5 +63,5 @@ export async function validateBlockAttestations(
}

logger?.debug(`Block attestations validated successfully for block ${block.number} at slot ${slot}`, logData);
return { valid: true, block: publishedBlock };
return { valid: true };
}
123 changes: 121 additions & 2 deletions yarn-project/end-to-end/src/e2e_epochs/epochs_invalidate_block.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@ describe('e2e_epochs/epochs_invalidate_block', () => {
let logger: Logger;
let l1Client: ExtendedViemWalletClient;
let rollupContract: RollupContract;
let anvilPort = 8545;

let test: EpochsTestContext;
let validators: (Operator & { privateKey: `0x${string}` })[];
Expand All @@ -50,6 +51,8 @@ describe('e2e_epochs/epochs_invalidate_block', () => {
aztecProofSubmissionEpochs: 1024,
startProverNode: false,
aztecTargetCommitteeSize: VALIDATOR_COUNT,
archiverPollingIntervalMS: 200,
anvilPort: ++anvilPort,
});

({ context, logger, l1Client } = test);
Expand Down Expand Up @@ -77,7 +80,7 @@ describe('e2e_epochs/epochs_invalidate_block', () => {
await test.teardown();
});

it('invalidates a block published without sufficient attestations', async () => {
it('proposer invalidates previous block while posting its own', async () => {
const sequencers = nodes.map(node => node.getSequencer()!);
const initialBlockNumber = await nodes[0].getBlockNumber();

Expand Down Expand Up @@ -126,10 +129,11 @@ describe('e2e_epochs/epochs_invalidate_block', () => {
0.1,
);

// Verify the BlockInvalidated event was emitted
// Verify the BlockInvalidated event was emitted and that the block was removed
const [event] = blockInvalidatedEvents;
logger.warn(`BlockInvalidated event emitted`, { event });
expect(event.args.blockNumber).toBeGreaterThan(initialBlockNumber);
expect(test.rollup.address).toEqual(event.address);

// Wait for all nodes to sync the new block
logger.warn('Waiting for all nodes to sync');
Expand All @@ -149,4 +153,119 @@ describe('e2e_epochs/epochs_invalidate_block', () => {
expect(receipt.status).toBe('success');
logger.warn(`Transaction included in block ${receipt.blockNumber}`);
});

it('proposer invalidates previous block without publishing its own', async () => {
const sequencers = nodes.map(node => node.getSequencer()!);
const initialBlockNumber = await nodes[0].getBlockNumber();

// Configure all sequencers to skip collecting attestations before starting
logger.warn('Configuring all sequencers to skip attestation collection and always publish blocks');
sequencers.forEach(sequencer => {
sequencer.updateSequencerConfig({ skipCollectingAttestations: true, minTxsPerBlock: 0 });
});

// Disable skipCollectingAttestations after the first block is mined and prevent sequencers from publishing any more blocks
test.monitor.once('l2-block', ({ l2BlockNumber }) => {
logger.warn(`Disabling skipCollectingAttestations after L2 block ${l2BlockNumber} has been mined`);
sequencers.forEach(sequencer => {
sequencer.updateSequencerConfig({ skipCollectingAttestations: false, minTxsPerBlock: 100 });
});
});

// Start all sequencers
await Promise.all(sequencers.map(s => s.start()));
logger.warn(`Started all sequencers with skipCollectingAttestations=true`);

// Create a filter for BlockInvalidated events
const blockInvalidatedFilter = await l1Client.createContractEventFilter({
address: rollupContract.address,
abi: RollupAbi,
eventName: 'BlockInvalidated',
fromBlock: 1n,
toBlock: 'latest',
});

// The next proposer should invalidate the previous block and publish a new one
logger.warn('Waiting for next proposer to invalidate the previous block');

// Wait for the BlockInvalidated event
const blockInvalidatedEvents = await retryUntil(
async () => {
const events = await l1Client.getFilterLogs({ filter: blockInvalidatedFilter });
return events.length > 0 ? events : undefined;
},
'BlockInvalidated event',
test.L2_SLOT_DURATION_IN_S * 5,
0.1,
);

// Verify the BlockInvalidated event was emitted and that the block was removed
const [event] = blockInvalidatedEvents;
logger.warn(`BlockInvalidated event emitted`, { event });
expect(event.args.blockNumber).toBeGreaterThan(initialBlockNumber);
expect(await test.rollup.getBlockNumber()).toEqual(BigInt(initialBlockNumber));
});

it('committee member invalidates a block if proposer does not come through', async () => {
const sequencers = nodes.map(node => node.getSequencer()!);
const initialBlockNumber = await nodes[0].getBlockNumber();

// Configure all sequencers to skip collecting attestations before starting
logger.warn('Configuring all sequencers to skip attestation collection and invalidation as proposer');
const invalidationDelay = test.L1_BLOCK_TIME_IN_S * 4;
sequencers.forEach(sequencer => {
sequencer.updateSequencerConfig({
skipCollectingAttestations: true,
minTxsPerBlock: 0,
skipInvalidateBlockAsProposer: true,
secondsBeforeInvalidatingBlockAsCommitteeMember: invalidationDelay,
});
});

// Disable skipCollectingAttestations after the first block is mined
let invalidBlockTimestamp: bigint | undefined;
test.monitor.once('l2-block', ({ l2BlockNumber, timestamp }) => {
logger.warn(`Disabling skipCollectingAttestations after L2 block ${l2BlockNumber} has been mined`);
invalidBlockTimestamp = timestamp;
sequencers.forEach(sequencer => {
sequencer.updateSequencerConfig({ skipCollectingAttestations: false });
});
});

// Start all sequencers
await Promise.all(sequencers.map(s => s.start()));
logger.warn(`Started all sequencers with skipCollectingAttestations=true`);

// Create a filter for BlockInvalidated events
const blockInvalidatedFilter = await l1Client.createContractEventFilter({
address: rollupContract.address,
abi: RollupAbi,
eventName: 'BlockInvalidated',
fromBlock: 1n,
toBlock: 'latest',
});

// Some committee member should invalidate the previous block
logger.warn('Waiting for committee member to invalidate the previous block');

// Wait for the BlockInvalidated event
const blockInvalidatedEvents = await retryUntil(
async () => {
const events = await l1Client.getFilterLogs({ filter: blockInvalidatedFilter });
return events.length > 0 ? events : undefined;
},
'BlockInvalidated event',
test.L2_SLOT_DURATION_IN_S * 5,
0.1,
);

// Verify the BlockInvalidated event was emitted
const [event] = blockInvalidatedEvents;
logger.warn(`BlockInvalidated event emitted`, { event });
expect(event.args.blockNumber).toBeGreaterThan(initialBlockNumber);

// And check that the invalidation happened at least after the specified timeout
const { timestamp: invalidationTimestamp } = await l1Client.getBlock({ blockNumber: event.blockNumber });
expect(invalidationTimestamp).toBeGreaterThanOrEqual(invalidBlockTimestamp! + BigInt(invalidationDelay));
});
});
8 changes: 7 additions & 1 deletion yarn-project/end-to-end/src/fixtures/utils.ts
Original file line number Diff line number Diff line change
Expand Up @@ -313,6 +313,8 @@ export type SetupOptions = {
automineL1Setup?: boolean;
/** How many accounts to seed and unlock in anvil. */
anvilAccounts?: number;
/** Port to start anvil (defaults to 8545) */
anvilPort?: number;
} & Partial<AztecNodeConfig>;

/** Context for an end-to-end test as returned by the `setup` function */
Expand Down Expand Up @@ -404,7 +406,11 @@ export async function setup(
);
}

const res = await startAnvil({ l1BlockTime: opts.ethereumSlotDuration, accounts: opts.anvilAccounts });
const res = await startAnvil({
l1BlockTime: opts.ethereumSlotDuration,
accounts: opts.anvilAccounts,
port: opts.anvilPort,
});
anvil = res.anvil;
config.l1RpcUrls = [res.rpcUrl];
}
Expand Down
2 changes: 2 additions & 0 deletions yarn-project/foundation/src/config/env_var.ts
Original file line number Diff line number Diff line change
Expand Up @@ -192,6 +192,8 @@ export type EnvVar =
| 'SEQ_ENFORCE_TIME_TABLE'
| 'SEQ_MAX_L1_TX_INCLUSION_TIME_INTO_SLOT'
| 'SEQ_ATTESTATION_PROPAGATION_TIME'
| 'SEQ_SECONDS_BEFORE_INVALIDATING_BLOCK_AS_COMMITTEE_MEMBER'
| 'SEQ_SECONDS_BEFORE_INVALIDATING_BLOCK_AS_NON_COMMITTEE_MEMBER'
| 'SLASH_FACTORY_CONTRACT_ADDRESS'
| 'SLASH_PRUNE_ENABLED'
| 'SLASH_PRUNE_PENALTY'
Expand Down
Loading
Loading