Skip to content

Commit

Permalink
Verify packing in the verify tool
Browse files Browse the repository at this point in the history
  • Loading branch information
Lev Berman committed Jan 30, 2025
1 parent aef4225 commit ce24a16
Showing 1 changed file with 39 additions and 0 deletions.
39 changes: 39 additions & 0 deletions apps/arweave/src/ar_verify_chunks.erl
Original file line number Diff line number Diff line change
Expand Up @@ -128,6 +128,8 @@ verify_chunk({ok, _Key, Metadata}, Intervals, State) ->

State3 = verify_proof(Metadata, State2),

State4 = verify_packing(Metadata, State3),

State3#state{ cursor = PaddedOffset }.

verify_proof(Metadata, State) ->
Expand All @@ -148,6 +150,43 @@ verify_proof(Metadata, State) ->
read_data_path_error, AbsoluteOffset, ChunkSize, [{reason, Error}], State)
end.

verify_packing(Metadata, State) ->
#state{packing=Packing, store_id=StoreID} = State,
{AbsoluteOffset, ChunkDataKey, TXRoot, _DataRoot, TXPath,
_TXRelativeOffset, ChunkSize} = Metadata,
PaddedOffset = ar_block:get_chunk_padded_offset(AbsoluteOffset),
StoredPackingCheck = ar_sync_record:is_recorded(AbsoluteOffset, ar_data_sync, StoreID),
ExpectedPacking =
case ar_chunk_storage:is_storage_supported(PaddedOffset, ChunkSize, Packing) of
true ->
Packing;
false ->
unpacked
end,
case {StoredPackingCheck, ExpectedPacking} of
{{true, ExpectedPacking}, _} ->
ok;
{{true, unpacked_padded}, {replica_2_9, _}} ->
%% The module is in the process of entropy generation but has already
%% synced some chunks.
ok;
{true, StoredPacking} ->
?LOG_WARNING([{event, verify_chunk_storage_unexpected_packing},
{expected_packing, ar_storage_module:packing_label(Packing)},
{stored_packing, ar_storage_module:packing_label(StoredPacking)},
{offset, AbsoluteOffset}]),
ar_data_sync:invalidate_bad_data_record(AbsoluteOffset, ChunkSize, StoreID,
unexpected_packing);
Reply ->
?LOG_WARNING([{event, verify_chunk_storage_missing_packing_info},
{expected_packing, ar_storage_module:packing_label(Packing)},
{packing_reply, io_lib:format("~p", [Reply])},
{offset, AbsoluteOffset}]),
ar_data_sync:invalidate_bad_data_record(AbsoluteOffset, ChunkSize, StoreID,
missing_packing_info)
end,
State.

verify_chunk_storage(AbsoluteOffset, PaddedOffset, ChunkSize, {End, Start}, State)
when PaddedOffset - ?DATA_CHUNK_SIZE >= Start andalso PaddedOffset =< End ->
#state{store_id = StoreID} = State,
Expand Down

0 comments on commit ce24a16

Please sign in to comment.