Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,10 @@ defmodule AlignedProofAggregationService do
end

def get_aggregated_proof_event(%{from_block: fromBlock, to_block: toBlock}) do
Logger.debug(
"[Aggregated Proofs] Querying contract #{@contract_address} for events from block #{fromBlock} to #{toBlock}"
)

events =
AlignedProofAggregationService.EventFilters.aggregated_proof_verified(nil)
|> Ethers.get_logs(fromBlock: fromBlock, toBlock: toBlock)
Expand All @@ -45,6 +49,8 @@ defmodule AlignedProofAggregationService do
{:ok, []}

{:ok, list} ->
Logger.debug("[Aggregated Proofs] Raw events received: #{length(list)}")

{:ok,
Enum.map(list, fn x ->
data = x |> Map.get(:data)
Expand All @@ -64,6 +70,10 @@ defmodule AlignedProofAggregationService do
end)}

{:error, reason} ->
Logger.error(
"[Aggregated Proofs] RPC error querying events from contract #{@contract_address}: #{inspect(reason)}"
)

{:error, reason}
end
end
Expand All @@ -78,11 +88,20 @@ defmodule AlignedProofAggregationService do
# Note: first two characters are the 0x
function_signature = String.slice(input, 0..9)

case function_signature do
@verifyRisc0_solidity_signature -> :risc0
@verifySp1_solidity_signature -> :sp1
_ -> nil
aggregator =
case function_signature do
@verifyRisc0_solidity_signature -> :risc0
@verifySp1_solidity_signature -> :sp1
_ -> nil
end

if is_nil(aggregator) do
Logger.warning(
"[Aggregated Proofs] Unknown aggregator function signature: #{function_signature} for tx_hash: #{tx_hash}"
)
end

aggregator
end

def get_block_timestamp(block_number) do
Expand All @@ -93,25 +112,47 @@ defmodule AlignedProofAggregationService do
end

def get_blob_data!(aggregated_proof) do
Logger.debug(
"[Aggregated Proofs] Fetching blob data for block #{aggregated_proof.block_number}, versioned_hash: #{aggregated_proof.blob_versioned_hash}"
)

{:ok, block} =
Explorer.EthClient.get_block_by_number(
Explorer.Utils.decimal_to_hex(aggregated_proof.block_number)
)

parent_beacon_block_hash = Map.get(block, "parentBeaconBlockRoot")

Logger.debug(
"[Aggregated Proofs] Got parent beacon block hash: #{parent_beacon_block_hash}"
)

{:ok, beacon_block} =
Explorer.BeaconClient.get_block_header_by_parent_hash(parent_beacon_block_hash)

slot = Explorer.BeaconClient.get_block_slot(beacon_block)

Logger.debug("[Aggregated Proofs] Fetching blob from beacon slot: #{slot}")

data =
Explorer.BeaconClient.fetch_blob_by_versioned_hash!(
slot,
aggregated_proof.blob_versioned_hash
)

Map.get(data, "blob")
blob = Map.get(data, "blob")

if is_nil(blob) do
Logger.error(
"[Aggregated Proofs] No blob data returned for versioned_hash: #{aggregated_proof.blob_versioned_hash} at slot #{slot}"
)
else
Logger.debug(
"[Aggregated Proofs] Successfully fetched blob data (#{String.length(blob)} chars)"
)
end

blob
end

@doc """
Expand Down
143 changes: 100 additions & 43 deletions explorer/lib/explorer/periodically.ex
Original file line number Diff line number Diff line change
Expand Up @@ -88,59 +88,116 @@ defmodule Explorer.Periodically do
end

def process_aggregated_proofs(from_block, to_block) do
"Processing aggregated proofs from #{from_block} to #{to_block}" |> Logger.debug()

{:ok, proofs} =
AlignedProofAggregationService.get_aggregated_proof_event(%{
from_block: from_block,
to_block: to_block
})

blob_data =
proofs
|> Enum.map(&AlignedProofAggregationService.get_blob_data!/1)

proof_hashes =
blob_data
|> Enum.map(fn x ->
AlignedProofAggregationService.decode_blob(
to_charlist(String.replace_prefix(x, "0x", ""))
Logger.info("[Aggregated Proofs] Starting fetch from block #{from_block} to #{to_block}")

case AlignedProofAggregationService.get_aggregated_proof_event(%{
from_block: from_block,
to_block: to_block
}) do
{:ok, []} ->
Logger.info("[Aggregated Proofs] No events found in block range #{from_block}-#{to_block}")

{:ok, proofs} ->
Logger.info(
"[Aggregated Proofs] Found #{length(proofs)} events in block range #{from_block}-#{to_block}"
)
end)

# Store aggregated proofs to db
proofs =
proofs
|> Enum.zip(proof_hashes)
|> Enum.map(fn {agg_proof, hashes} ->
aggregator = AlignedProofAggregationService.get_aggregator!(agg_proof)
process_aggregated_proof_events(proofs)

{:error, reason} ->
Logger.error(
"[Aggregated Proofs] Failed to fetch events from block #{from_block} to #{to_block}: #{inspect(reason)}"
)
end
end

defp process_aggregated_proof_events(proofs) do
proofs
|> Enum.each(fn proof ->
Logger.info(
"[Aggregated Proofs] Processing proof at block #{proof.block_number}, merkle_root: #{proof.merkle_root}"
)

try do
# Fetch blob data
Logger.debug(
"[Aggregated Proofs] Fetching blob data for versioned_hash: #{proof.blob_versioned_hash}"
)

blob_data = AlignedProofAggregationService.get_blob_data!(proof)

# Decode blob to get proof hashes
proof_hashes =
AlignedProofAggregationService.decode_blob(
to_charlist(String.replace_prefix(blob_data, "0x", ""))
)

Logger.info(
"[Aggregated Proofs] Decoded #{length(proof_hashes)} proof hashes from blob"
)

# Get aggregator type
aggregator = AlignedProofAggregationService.get_aggregator!(proof)

Logger.debug(
"[Aggregated Proofs] Aggregator type: #{inspect(aggregator)} for merkle_root: #{proof.merkle_root}"
)

# Store aggregated proof to db
agg_proof =
agg_proof
proof
|> Map.merge(%{aggregator: aggregator})
|> Map.merge(%{number_of_proofs: length(hashes)})
|> Map.merge(%{number_of_proofs: length(proof_hashes)})

{:ok, %{id: id}} = AggregatedProofs.insert_or_update(agg_proof)
case AggregatedProofs.insert_or_update(agg_proof) do
{:ok, %{id: id}} ->
Logger.info(
"[Aggregated Proofs] Stored aggregated proof id=#{id}, merkle_root: #{proof.merkle_root}, proofs_count: #{length(proof_hashes)}"
)

Map.merge(agg_proof, %{id: id})
end)
# Store each individual proof hash
store_individual_proofs(id, proof_hashes, proof.merkle_root)

# Store each individual proof
proofs
|> Enum.zip(proof_hashes)
|> Enum.each(fn {agg_proof, hashes} ->
hashes
|> Enum.with_index()
|> Enum.each(fn {hash, index} ->
AggregationModeProof.insert_or_update(%{
agg_proof_id: agg_proof.id,
proof_hash: "0x" <> List.to_string(hash),
index: index
})
end)
{:error, reason} ->
Logger.error(
"[Aggregated Proofs] Failed to store aggregated proof merkle_root: #{proof.merkle_root}: #{inspect(reason)}"
)
end
rescue
error ->
Logger.error(
"[Aggregated Proofs] Error processing proof at block #{proof.block_number}, merkle_root: #{proof.merkle_root}: #{Exception.message(error)}"
)

Logger.debug(
"[Aggregated Proofs] Stacktrace: #{Exception.format_stacktrace(__STACKTRACE__)}"
)
end
end)

"Done processing aggregated proofs from #{from_block} to #{to_block}" |> Logger.debug()
Logger.info("[Aggregated Proofs] Finished processing #{length(proofs)} events")
end

defp store_individual_proofs(agg_proof_id, proof_hashes, merkle_root) do
proof_hashes
|> Enum.with_index()
|> Enum.each(fn {hash, index} ->
proof_hash = "0x" <> List.to_string(hash)

case AggregationModeProof.insert_or_update(%{
agg_proof_id: agg_proof_id,
proof_hash: proof_hash,
index: index
}) do
{:ok, _} ->
:ok

{:error, reason} ->
Logger.error(
"[Aggregated Proofs] Failed to store individual proof hash #{proof_hash} for merkle_root: #{merkle_root}: #{inspect(reason)}"
)
end
end)
end

def process_batches(fromBlock, toBlock) do
Expand Down
22 changes: 5 additions & 17 deletions explorer/lib/explorer_web/components/contracts.ex
Original file line number Diff line number Diff line change
Expand Up @@ -8,25 +8,13 @@ defmodule ContractsComponent do
def mount(socket) do
addresses = Helpers.get_aligned_contracts_addresses()

proof_aggregator_service =
case Helpers.is_mainnet() do
true ->
[]

false ->
[
%{
contract_name: "AlignedProofAggregationService",
address: addresses["alignedProofAggregationService"]
}
]
end

{:ok,
assign(socket,
contracts:
proof_aggregator_service ++
[
contracts: [
%{
contract_name: "AlignedProofAggregationService",
address: addresses["alignedProofAggregationService"]
},
%{
contract_name: "AlignedServiceManager",
address: addresses["alignedLayerServiceManager"]
Expand Down
54 changes: 25 additions & 29 deletions explorer/lib/explorer_web/components/nav.ex
Original file line number Diff line number Diff line change
Expand Up @@ -61,19 +61,17 @@ defmodule NavComponent do
>
Batches
</.link>
<%= if !ExplorerWeb.Helpers.is_mainnet() do %>
<.link
class={
active_view_class(@socket.view, [
ExplorerWeb.AggProofs.Index,
ExplorerWeb.AggProof.Index
])
}
navigate={~p"/aggregated_proofs"}
>
Aggregation
</.link>
<% end %>
<.link
class={
active_view_class(@socket.view, [
ExplorerWeb.AggProofs.Index,
ExplorerWeb.AggProof.Index
])
}
navigate={~p"/aggregated_proofs"}
>
Aggregation
</.link>
<.nav_links_dropdown
title="Restaking"
class={
Expand Down Expand Up @@ -158,22 +156,20 @@ defmodule NavComponent do
>
Batches
</.link>
<%= if !ExplorerWeb.Helpers.is_mainnet() do %>
<.link
class={
classes([
active_view_class(assigns.socket.view, [
ExplorerWeb.AggregatedProofs.Index,
ExplorerWeb.AggregatedProof.Index
]),
"text-foreground/80 hover:text-foreground font-semibold"
])
}
navigate={~p"/aggregated_proofs"}
>
Aggregation
</.link>
<% end %>
<.link
class={
classes([
active_view_class(assigns.socket.view, [
ExplorerWeb.AggregatedProofs.Index,
ExplorerWeb.AggregatedProof.Index
]),
"text-foreground/80 hover:text-foreground font-semibold"
])
}
navigate={~p"/aggregated_proofs"}
>
Aggregation
</.link>
<.link
class="hover:text-foreground"
target="_blank"
Expand Down
Loading