Skip to content

Commit

Permalink
Merge pull request #15523 from MinaProtocol/dkijania/expose_precomput…
Browse files Browse the repository at this point in the history
…ed_and_checkpoint_output_folders_dev

Port expose_precomputed_and_checkpoint_output folders to develop
  • Loading branch information
dkijania authored Apr 16, 2024
2 parents 10465f5 + bc16448 commit ba4c7f6
Show file tree
Hide file tree
Showing 9 changed files with 103 additions and 174 deletions.
40 changes: 2 additions & 38 deletions buildkite/scripts/export-git-env-vars.sh
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@ set -u
export MINA_DEB_CODENAME=${MINA_DEB_CODENAME:=bullseye}
[[ -n "$BUILDKITE_BRANCH" ]] && export GITBRANCH=$(echo "$BUILDKITE_BRANCH" | sed 's!/!-!g; s!_!-!g; s!#!-!g')

export RELEASE=unstable

if [ "${BUILDKITE_REPO}" != "${MINA_REPO}" ]; then
# Abort if `BUILDKITE_REPO` doesn't have the expected format
Expand All @@ -41,47 +42,10 @@ if [ "${BUILDKITE_REPO}" != "${MINA_REPO}" ]; then
# For example: for given repo 'https://github.com/dkijania/mina.git' we convert it to 'dkijania_mina'
export GITTAG=1.0.0$(echo ${BUILDKITE_REPO} | sed -e 's/^.*github.com[:\/]\(.*\)\.git$/\1/' -e 's/\//-/')
export THIS_COMMIT_TAG=""
RELEASE=unstable

else
# GITTAG is the closest tagged commit to this commit, while THIS_COMMIT_TAG only has a value when the current commit is tagged
export GITTAG=$(find_most_recent_numeric_tag HEAD)

# Determine deb repo to use
case $GITBRANCH in
berkeley|rampup|compatible|master|release/*) # whitelist of branches that can be tagged
case "${THIS_COMMIT_TAG}" in
*alpha*) # any tag including the string `alpha`
RELEASE=alpha ;;
*beta*) # any tag including the string `beta`
RELEASE=beta ;;
*berkeley*) # any tag including the string `berkeley`
RELEASE=berkeley ;;
*rampup*) # any tag including the string `rampup`
RELEASE=rampup ;;
*devnet*)
RELEASE=devnet ;;
?*)
# if the tag is a version number sans any suffix, then it's a stable release
if grep -qP '^\d+\.\d+\.\d+$' <<< "${THIS_COMMIT_TAG}"; then
RELEASE=stable
else
RELEASE=unstable
fi ;;
"") # No tag
RELEASE="unstable" ;;
# real soon now:
# RELEASE="${GITHASH}" ;; # avoids deb-s3 concurrency issues between PRs
*) # The above set of cases should be exhaustive, if they're not then still set RELEASE=unstable
RELEASE=unstable
echo "git tag --points-at HEAD may have failed, falling back to unstable. Value: \"$(git tag --points-at HEAD)\""
;;
esac ;;
release-automation-testing/*) # whitelist of branches that can be tagged
RELEASE=prerelease ;;
*)
RELEASE=unstable ;;
esac
fi

if [[ -n "${THIS_COMMIT_TAG}" ]]; then # If the commit is tagged
Expand All @@ -100,6 +64,6 @@ case $GITBRANCH in
MINA_BUILD_MAINNET=false ;;
esac

echo "Publishing on release channel \"${RELEASE}\" based on branch \"${GITBRANCH}\" and tag \"${THIS_COMMIT_TAG}\""
echo "Publishing on release channel \"${RELEASE}\""
[[ -n ${THIS_COMMIT_TAG} ]] && export MINA_COMMIT_TAG="${THIS_COMMIT_TAG}"
export MINA_DEB_RELEASE="${RELEASE}"
2 changes: 1 addition & 1 deletion dockerfiles/scripts/daemon-entrypoint.sh
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ INPUT_ARGS="$@"
declare -a VERBOSE_LOG_FILES=('mina-stderr.log' '.mina-config/mina-prover.log' '.mina-config/mina-verifier.log')

# Attempt to execute or source custom entrypoint scripts accordingly
for script in /entrypoint.d/* /entrypoint.d/.*; do
for script in /entrypoint.d/*; do
if [[ "$( basename "${script}")" == *mina-env ]]; then
source "${script}"
elif [[ -f "${script}" ]] && [[ ! -x "${script}" ]]; then
Expand Down
21 changes: 16 additions & 5 deletions scripts/archive/migration/mina-berkeley-migration-script
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@ declare -r CLI_NAME="$0";
declare -r PS4='debug($LINENO) ${FUNCNAME[0]:+${FUNCNAME[0]}}(): ';

CHECKPOINT_PREFIX=migration
CHECKPOINT_INTERVAL=1000

################################################################################
# functions
Expand Down Expand Up @@ -87,6 +88,7 @@ function initial_help(){
printf " %-25s %s\n" "-n | --network" "[string] network name when determining precomputed blocks. NOTICE: there is an assumption that precomputed blocks are named with format: {network}-{height}-{state_hash}.json";
printf " %-25s %s\n" "-d | --delete-blocks" "[flag] delete blocks after they are processed (saves space with -sb)"
printf " %-25s %s\n" "-p | --prefetch-blocks" "[flag] downloads all blocks at once instead of incrementally"
printf " %-25s %s\n" "-i | --checkpoint-interval" "[int] replayer checkpoint interval. Default: 1000"
printf " %-25s %s\n" "-c | --checkpoint-output-path" "[file] output folder for replayer checkpoints"
printf " %-25s %s\n" "-l | --precomputed-blocks-local-path" "[file] on-disk precomputed blocks location"
echo ""
Expand Down Expand Up @@ -119,7 +121,8 @@ function initial(){
local __network=''
local __checkpoint_output_path='.'
local __precomputed_blocks_local_path='.'

local __checkpoint_interval=$CHECKPOINT_INTERVAL

while [ ${#} -gt 0 ]; do
error_message="Error: a value is needed for '$1'";
case $1 in
Expand Down Expand Up @@ -162,6 +165,10 @@ function initial(){
__checkpoint_output_path=${2:?$error_message}
shift 2;
;;
-i | --checkpoint-interval )
__checkpoint_interval=${2:?$error_message}
shift 2;
;;
-l | --precomputed-blocks-local-path )
__precomputed_blocks_local_path=${2:?$error_message}
shift 2;
Expand Down Expand Up @@ -217,7 +224,8 @@ function initial(){
"$__stream_blocks" \
"$__network" \
"$__checkpoint_output_path" \
"$__precomputed_blocks_local_path"
"$__precomputed_blocks_local_path" \
"$__checkpoint_interval"
}

function check_log_for_error() {
Expand Down Expand Up @@ -263,6 +271,7 @@ function run_initial_migration() {
local __network=$8
local __checkpoint_output_path=$9
local __precomputed_blocks_local_path=${10}
local __checkpoint_interval=${11}

local __date=$(date '+%Y-%m-%d_%H_%M_%S')
local __berkely_migration_log="berkeley_migration_$__date.log"
Expand Down Expand Up @@ -295,7 +304,7 @@ function run_initial_migration() {
--migration-mode \
--archive-uri "$__migrated_archive_uri" \
--input-file "$__config_file" \
--checkpoint-interval 1000 \
--checkpoint-interval "$__checkpoint_interval" \
--checkpoint-file-prefix "$CHECKPOINT_PREFIX" \
--checkpoint-output-folder "$__checkpoint_output_path" \
--log-file "$__replayer_log"
Expand Down Expand Up @@ -331,6 +340,7 @@ function incremental_help(){
printf " %-25s %s\n" "-d | --delete-blocks" "delete blocks after they are processed (saves space with -sb)"
printf " %-25s %s\n" "-p | --prefetch-blocks" "downloads all blocks at once instead of incrementally"
printf " %-25s %s\n" "-c | --checkpoint-output-path" "[file] output folder for replayer checkpoints"
printf " %-25s %s\n" "-i | --checkpoint-interval" "[int] replayer checkpoint interval. Default: 1000"
printf " %-25s %s\n" "-l | --precomputed-blocks-local-path" "[file] on-disk precomputed blocks location"
echo ""
echo "Example:"
Expand Down Expand Up @@ -388,7 +398,7 @@ function incremental(){
local __keep_precomputed_blocks=true
local __stream_blocks=true
local __network=''
local __checkpoint_interval=1000
local __checkpoint_interval=$CHECKPOINT_INTERVAL
local __checkpoint_output_path='.'
local __precomputed_blocks_local_path='.'

Expand Down Expand Up @@ -584,6 +594,7 @@ function final_help(){
printf " %-25s %s\n" "-d | --delete-blocks" "delete blocks after they are processed (saves space with -sb)"
printf " %-25s %s\n" "-p | --prefetch-blocks" "downloads all blocks at once instead of incrementally"
printf " %-25s %s\n" "-c | --checkpoint-output-path" "[file] output folder for replayer checkpoints"
printf " %-25s %s\n" "-i | --checkpoint-interval" "[int] replayer checkpoint interval. Default: 1000"
printf " %-25s %s\n" "-l | --precomputed-blocks-local-path" "[file] on-disk precomputed blocks location"
echo ""
echo "Example:"
Expand Down Expand Up @@ -612,7 +623,7 @@ function final(){
local __keep_precomputed_blocks=true
local __stream_blocks=true
local __network=''
local __checkpoint_interval=1000
local __checkpoint_interval=$CHECKPOINT_INTERVAL
local __fork_genesis_config=''
local __checkpoint_output_path='.'
local __precomputed_blocks_local_path='.'
Expand Down
11 changes: 0 additions & 11 deletions scripts/release-docker.sh
Original file line number Diff line number Diff line change
Expand Up @@ -175,15 +175,4 @@ if [[ -z "$NOUPLOAD" ]] || [[ "$NOUPLOAD" -eq 0 ]]; then
docker tag "${TAG}" "${HASHTAG}"
docker push "${HASHTAG}"

echo "Release Env Var: ${DEB_RELEASE}"
echo "Release: ${DEB_RELEASE##*=}"

if [[ "${DEB_RELEASE##*=}" = "unstable" ]]; then
echo "Release is unstable: not pushing to docker hub"
else
echo "Release is public (alpha, beta, berkeley, or stable): pushing image to docker hub"
# tag and push to dockerhub
docker tag "${TAG}" "minaprotocol/${SERVICE}:${VERSION}"
docker push "minaprotocol/${SERVICE}:${VERSION}"
fi
fi
115 changes: 51 additions & 64 deletions src/app/berkeley_migration/berkeley_migration.ml
Original file line number Diff line number Diff line change
Expand Up @@ -507,70 +507,54 @@ let main ~mainnet_archive_uri ~migrated_archive_uri ~runtime_config_file
[%log info] "Prefetching all required precomputed blocks" ;
fetch_precomputed_blocks_for mainnet_blocks_to_migrate )
in
(* 3 * because we want to report download, migration, and reupload separately *)
let total_reports_expected = 3 * List.length mainnet_blocks_to_migrate in
[%log info] "Migrating mainnet blocks" ;
let%bind () =
Progress.with_reporter
Progress.Line.(
list
[ const "Migrating blocks"
; spinner ()
; bar total_reports_expected
; eta total_reports_expected
; percentage_of total_reports_expected
])
(fun f ->
List.chunks_of ~length:batch_size mainnet_blocks_to_migrate
|> Deferred.List.iter ~f:(fun (blocks : Sql.Mainnet.Block.t list) ->
[%log debug]
"Migrating %d blocks starting at height %Ld (%s..%s)"
(List.length blocks) (List.hd_exn blocks).height
(List.hd_exn blocks).state_hash
(List.last_exn blocks).state_hash ;
let%bind precomputed_blocks =
if stream_precomputed_blocks then (
[%log debug] "Fetching batch of precomputed blocks" ;
fetch_precomputed_blocks_for blocks )
else return prefetched_precomputed_blocks
in
f (Map.length precomputed_blocks) ;
[%log debug] "Converting blocks to extensional format..." ;
let%bind extensional_blocks =
mainnet_block_to_extensional_batch ~logger ~mainnet_pool
~genesis_block ~precomputed_blocks blocks
in
f (List.length extensional_blocks) ;
[%log debug] "Adding blocks to migrated database..." ;
let%bind () =
query_migrated_db ~f:(fun db ->
match%map
Archive_lib.Processor.Block
.add_from_extensional_batch db extensional_blocks
~v1_transaction_hash:true
with
| Ok _id ->
f (List.length extensional_blocks) ;
Ok ()
| Error (`Congested _) ->
failwith
"Could not archive extensional block batch: \
congested"
| Error (`Decode_rejected _ as err)
| Error (`Encode_failed _ as err)
| Error (`Encode_rejected _ as err)
| Error (`Request_failed _ as err)
| Error (`Request_rejected _ as err)
| Error (`Response_failed _ as err)
| Error (`Response_rejected _ as err) ->
failwithf
"Could not archive extensional block batch: %s"
(Caqti_error.show err) () )
in
if stream_precomputed_blocks && not keep_precomputed_blocks
then
Precomputed_block.delete_fetched_concrete ~network
(required_precomputed_blocks blocks)
else return () ) )
List.chunks_of ~length:batch_size mainnet_blocks_to_migrate
|> Deferred.List.iter ~f:(fun (blocks : Sql.Mainnet.Block.t list) ->
[%log info] "Migrating %d blocks starting at height %Ld (%s..%s)"
(List.length blocks) (List.hd_exn blocks).height
(List.hd_exn blocks).state_hash
(List.last_exn blocks).state_hash ;
let%bind precomputed_blocks =
if stream_precomputed_blocks then (
[%log info] "Fetching batch of precomputed blocks" ;
fetch_precomputed_blocks_for blocks )
else return prefetched_precomputed_blocks
in
[%log info] "Converting blocks to extensional format..." ;
let%bind extensional_blocks =
mainnet_block_to_extensional_batch ~logger ~mainnet_pool
~genesis_block ~precomputed_blocks blocks
in
[%log info] "Adding blocks to migrated database..." ;
let%bind () =
query_migrated_db ~f:(fun db ->
match%map
Archive_lib.Processor.Block.add_from_extensional_batch db
extensional_blocks ~v1_transaction_hash:true
with
| Ok _id ->
Ok ()
| Error (`Congested _) ->
failwith
"Could not archive extensional block batch: \
congested"
| Error (`Decode_rejected _ as err)
| Error (`Encode_failed _ as err)
| Error (`Encode_rejected _ as err)
| Error (`Request_failed _ as err)
| Error (`Request_rejected _ as err)
| Error (`Response_failed _ as err)
| Error (`Response_rejected _ as err) ->
failwithf
"Could not archive extensional block batch: %s"
(Caqti_error.show err) () )
in
if stream_precomputed_blocks && not keep_precomputed_blocks then
Precomputed_block.delete_fetched_concrete ~network
~local_path:precomputed_blocks_local_path
(required_precomputed_blocks blocks)
else return () )
in
let%bind () =
(* this will still run even if we are downloading precomputed blocks in batches, to handle any leftover blocks from prior runs *)
Expand Down Expand Up @@ -643,12 +627,15 @@ let () =
and precomputed_blocks_local_path =
Param.flag "--precomputed-blocks-local-path"
~aliases:[ "-precomputed-blocks-local-path" ]
Param.(required string)
Param.(optional string)
~doc:"PATH the precomputed blocks on-disk location"
and log_json = Cli_lib.Flag.Log.json
and log_level = Cli_lib.Flag.Log.level
and file_log_level = Cli_lib.Flag.Log.file_log_level
and log_filename = Cli_lib.Flag.Log.file in
let precomputed_blocks_local_path =
Option.value precomputed_blocks_local_path ~default:"."
in
main ~mainnet_archive_uri ~migrated_archive_uri ~runtime_config_file
~fork_state_hash ~mina_network_blocks_bucket ~batch_size ~network
~stream_precomputed_blocks ~keep_precomputed_blocks ~log_json
Expand Down
Loading

0 comments on commit ba4c7f6

Please sign in to comment.