Skip to content

Commit

Permalink
Merge branch 'zkevm/v0.5.3' into v0.2.1-split-basezk0.4.4
Browse files Browse the repository at this point in the history
  • Loading branch information
zjg555543 committed Feb 4, 2024
2 parents 0060ae5 + 291ac85 commit 88e95ce
Show file tree
Hide file tree
Showing 465 changed files with 88,659 additions and 26,581 deletions.
4 changes: 2 additions & 2 deletions .github/ISSUE_TEMPLATE/bug.md
Original file line number Diff line number Diff line change
@@ -1,14 +1,14 @@
---
name: Report a bug
about: Something with X1 is not working as expected
about: Something with Polygon zkEVM is not working as expected
title: ''
labels: 'type:bug'
assignees: ''
---

#### System information

X1 Node version: `v0.0.X-RCXX`
zkEVM Node version: `v0.0.X-RCXX`
OS & Version: `Windows/Linux/OSX`
Commit hash : (if `develop`)
Network: `Mainnet/Testnet`
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/codeql.yml
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ jobs:

steps:
- name: Checkout repository
uses: actions/checkout@v3
uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744 # v3

# Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/jsonschema.yml
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v3
uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744 #v3
# https://github.com/actions/checkout#Checkout-pull-request-HEAD-commit-instead-of-merge-commit
# Checkout pull request HEAD commit instead of merge commit
with:
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/lint.yml
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ jobs:
with:
go-version: 1.21.x
- name: Checkout code
uses: actions/checkout@v3
uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744 # v3
- name: Lint
run: |
make install-linter
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/push-docker-develop.yml
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v3
uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744 # v3

- name: Set up QEMU
uses: docker/setup-qemu-action@v3
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/push-docker-tagged.yml
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v3
uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744 # v3

- name: Set up QEMU
uses: docker/setup-qemu-action@v3
Expand Down
18 changes: 14 additions & 4 deletions .github/workflows/release.yml
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v3
uses: actions/checkout@v4
with:
fetch-depth: 0

Expand All @@ -33,8 +33,18 @@ jobs:
uses: olegtarasov/[email protected]
id: tagName

- name: Put testnet and mainnet artifacts into a single zip
- name: Put cardona, testnet and mainnet artifacts into a single zip
run: |
# CARDONA
mkdir -p cardona/config/environments/cardona
mkdir -p cardona/db/scripts
cp config/environments/cardona/* cardona/config/environments/cardona
cp docker-compose.yml cardona
sed -i 's/\/config\/environments\/${ZKEVM_NETWORK}/\/config\/environments\/cardona/g' cardona/docker-compose.yml
cp db/scripts/init_prover_db.sql cardona/db/scripts
mv cardona/config/environments/cardona/example.env cardona
sed -i -e "s/image: zkevm-node/image: hermeznetwork\/zkevm-node:$GIT_TAG_NAME/g" cardona/docker-compose.yml
zip -r cardona.zip cardona
# TESTNET
mkdir -p testnet/config/environments/testnet
mkdir -p testnet/db/scripts
Expand All @@ -56,10 +66,10 @@ jobs:
sed -i -e "s/image: zkevm-node/image: hermeznetwork\/zkevm-node:$GIT_TAG_NAME/g" mainnet/docker-compose.yml
zip -r mainnet.zip mainnet
- name: Publish testnet and mainnet zip into release
- name: Publish cardona, testnet and mainnet zip into release
uses: AButler/[email protected]
with:
files: 'testnet.zip;mainnet.zip'
files: 'cardona.zip;testnet.zip;mainnet.zip'
repo-token: ${{ secrets.TOKEN_RELEASE }}
release-tag: ${{ steps.tagName.outputs.tag }}

2 changes: 1 addition & 1 deletion .github/workflows/sonarqube.yml
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ jobs:
sonarqube:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744 #v3
with:
# Disabling shallow clone is recommended for improving relevancy of reporting.
fetch-depth: 0
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/test-e2e.yml
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v3
uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744

- name: Install Go
uses: actions/setup-go@v3
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/test-from-prover.yml
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ jobs:

steps:
- name: Checkout code
uses: actions/checkout@v3
uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744 #v3
with:
repository: okx/x1-node

Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/test-full-non-e2e.yml
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v3
uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744

- name: Install Go
uses: actions/setup-go@v3
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/updatedeps.yml
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v3
uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744

- name: Install Go
uses: actions/setup-go@v3
Expand Down
1 change: 1 addition & 0 deletions .golangci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@ linters:
- gofmt
- goimports
- revive
- unconvert

linters-settings:
revive:
Expand Down
2 changes: 1 addition & 1 deletion CONTRIBUTING.md
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ Note that non of this is a hard rule, but suggestions / guidelines. Although eve
- Offer clarification, explain the decisions you made to reach a solution in question.
- Try to respond to every comment.
- If there is growing confusion or debate, ask yourself if the written word is still the best form of communication. Talk (virtually) face-to-face, then mutually consider posting a follow-up to summarize any offline discussion (useful for others who be following along, now or later).
- If concensus is still not reached, involve someone else in the discussion. As a last resource the lead of the project could take the decision
- If consensus is still not reached, involve someone else in the discussion. As a last resource the lead of the project could take the decision

## Links and credits

Expand Down
2 changes: 1 addition & 1 deletion Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ RUN cd /src/db && packr2
RUN cd /src && make build

# CONTAINER FOR RUNNING BINARY
FROM alpine:3.18.0
FROM alpine:3.18.4
COPY --from=build /src/dist/x1-node /app/x1-node
COPY --from=build /src/config/environments/testnet/node.config.toml /app/example.config.toml
RUN apk update && apk add postgresql15-client
Expand Down
2 changes: 1 addition & 1 deletion Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -119,7 +119,7 @@ venv: $(VENV_PYTHON)
.PHONY: install-config-doc-gen
$(GENERATE_SCHEMA_DOC): $(VENV_PYTHON)
$(PYTHON) -m pip install --upgrade pip
$(PYTHON) -m pip install json-schema-for-humans
$(PYTHON) -m pip install json-schema-for-humans==0.47

.PHONY: config-doc-gen
config-doc-gen: config-doc-node config-doc-custom_network ## Generate config file's json-schema for node and custom_network and documentation
Expand Down
134 changes: 121 additions & 13 deletions aggregator/aggregator.go
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@ import (
"github.com/0xPolygonHermez/zkevm-node/encoding"
ethmanTypes "github.com/0xPolygonHermez/zkevm-node/etherman/types"
"github.com/0xPolygonHermez/zkevm-node/ethtxmanager"
"github.com/0xPolygonHermez/zkevm-node/l1infotree"
"github.com/0xPolygonHermez/zkevm-node/log"
"github.com/0xPolygonHermez/zkevm-node/state"
"github.com/ethereum/go-ethereum/common"
Expand Down Expand Up @@ -268,7 +269,7 @@ func (a *Aggregator) sendFinalProof() {

// add batch verification to be monitored
sender := common.HexToAddress(a.cfg.SenderAddress)
to, data, err := a.Ethman.BuildTrustedVerifyBatchesTxData(proof.BatchNumber-1, proof.BatchNumberFinal, &inputs)
to, data, err := a.Ethman.BuildTrustedVerifyBatchesTxData(proof.BatchNumber-1, proof.BatchNumberFinal, &inputs, sender)
if err != nil {
log.Errorf("Error estimating batch verification to add to eth tx manager: %v", err)
a.handleFailureToAddVerifyBatchToBeMonitored(ctx, proof)
Expand Down Expand Up @@ -766,15 +767,15 @@ func (a *Aggregator) getAndLockBatchToProve(ctx context.Context, prover proverIn

log.Info("Checking profitability to aggregate batch")

// pass matic collateral as zero here, bcs in smart contract fee for aggregator is not defined yet
// pass pol collateral as zero here, bcs in smart contract fee for aggregator is not defined yet
isProfitable, err := a.ProfitabilityChecker.IsProfitable(ctx, big.NewInt(0))
if err != nil {
log.Errorf("Failed to check aggregator profitability, err: %v", err)
return nil, nil, err
}

if !isProfitable {
log.Infof("Batch is not profitable, matic collateral %d", big.NewInt(0))
log.Infof("Batch is not profitable, pol collateral %d", big.NewInt(0))
return nil, nil, err
}

Expand Down Expand Up @@ -977,26 +978,133 @@ func (a *Aggregator) buildInputProver(ctx context.Context, batchToVerify *state.
return nil, fmt.Errorf("failed to get previous batch, err: %v", err)
}

isForcedBatch := false
batchRawData := &state.BatchRawV2{}
if batchToVerify.BatchNumber == 1 || batchToVerify.ForcedBatchNum != nil || batchToVerify.BatchNumber == a.cfg.UpgradeEtrogBatchNumber {
isForcedBatch = true
} else {
batchRawData, err = state.DecodeBatchV2(batchToVerify.BatchL2Data)
if err != nil {
log.Errorf("Failed to decode batch data, err: %v", err)
return nil, err
}
}

l1InfoTreeData := map[uint32]*prover.L1Data{}
vb, err := a.State.GetVirtualBatch(ctx, batchToVerify.BatchNumber, nil)
if err != nil {
log.Errorf("Failed getting virtualBatch %d, err: %v", batchToVerify.BatchNumber, err)
return nil, err
}
l1InfoRoot := vb.L1InfoRoot
forcedBlockhashL1 := common.Hash{}

if !isForcedBatch {
tree, err := l1infotree.NewL1InfoTree(32, [][32]byte{}) // nolint:gomnd
if err != nil {
return nil, err
}
leaves, err := a.State.GetLeafsByL1InfoRoot(ctx, *l1InfoRoot, nil)
if err != nil {
return nil, err
}

aLeaves := make([][32]byte, len(leaves))
for i, leaf := range leaves {
aLeaves[i] = l1infotree.HashLeafData(leaf.GlobalExitRoot.GlobalExitRoot, leaf.PreviousBlockHash, uint64(leaf.Timestamp.Unix()))
}

for _, l2blockRaw := range batchRawData.Blocks {
_, contained := l1InfoTreeData[l2blockRaw.IndexL1InfoTree]
if !contained && l2blockRaw.IndexL1InfoTree != 0 {
l1InfoTreeExitRootStorageEntry, err := a.State.GetL1InfoRootLeafByIndex(ctx, l2blockRaw.IndexL1InfoTree, nil)
if err != nil {
return nil, err
}

// Calculate smt proof
smtProof, calculatedL1InfoRoot, err := tree.ComputeMerkleProof(l2blockRaw.IndexL1InfoTree, aLeaves)
if err != nil {
return nil, err
}
if l1InfoRoot != nil && *l1InfoRoot != calculatedL1InfoRoot {
for i, l := range aLeaves {
log.Info("AllLeaves[%d]: %s", i, common.Bytes2Hex(l[:]))
}
for i, s := range smtProof {
log.Info("smtProof[%d]: %s", i, common.Bytes2Hex(s[:]))
}
return nil, fmt.Errorf("error: l1InfoRoot mismatch. L1InfoRoot: %s, calculatedL1InfoRoot: %s. l1InfoTreeIndex: %d", l1InfoRoot.String(), calculatedL1InfoRoot.String(), l2blockRaw.IndexL1InfoTree)
}

protoProof := make([][]byte, len(smtProof))
for i, proof := range smtProof {
tmpProof := proof
protoProof[i] = tmpProof[:]
}

l1InfoTreeData[l2blockRaw.IndexL1InfoTree] = &prover.L1Data{
GlobalExitRoot: l1InfoTreeExitRootStorageEntry.L1InfoTreeLeaf.GlobalExitRoot.GlobalExitRoot.Bytes(),
BlockhashL1: l1InfoTreeExitRootStorageEntry.L1InfoTreeLeaf.PreviousBlockHash.Bytes(),
MinTimestamp: uint32(l1InfoTreeExitRootStorageEntry.L1InfoTreeLeaf.GlobalExitRoot.Timestamp.Unix()),
SmtProof: protoProof,
}
}
}
} else {
// Initial batch must be handled differently
if batchToVerify.BatchNumber == 1 || batchToVerify.BatchNumber == a.cfg.UpgradeEtrogBatchNumber {
forcedBlockhashL1, err = a.State.GetVirtualBatchParentHash(ctx, batchToVerify.BatchNumber, nil)
if err != nil {
return nil, err
}
} else {
forcedBlockhashL1, err = a.State.GetForcedBatchParentHash(ctx, *batchToVerify.ForcedBatchNum, nil)
if err != nil {
return nil, err
}
}
}

inputProver := &prover.InputProver{
PublicInputs: &prover.PublicInputs{
OldStateRoot: previousBatch.StateRoot.Bytes(),
OldAccInputHash: previousBatch.AccInputHash.Bytes(),
OldBatchNum: previousBatch.BatchNumber,
ChainId: a.cfg.ChainID,
ForkId: a.cfg.ForkId,
BatchL2Data: batchToVerify.BatchL2Data,
GlobalExitRoot: batchToVerify.GlobalExitRoot.Bytes(),
EthTimestamp: uint64(batchToVerify.Timestamp.Unix()),
SequencerAddr: batchToVerify.Coinbase.String(),
AggregatorAddr: a.cfg.SenderAddress,
OldStateRoot: previousBatch.StateRoot.Bytes(),
OldAccInputHash: previousBatch.AccInputHash.Bytes(),
OldBatchNum: previousBatch.BatchNumber,
ChainId: a.cfg.ChainID,
ForkId: a.cfg.ForkId,
BatchL2Data: batchToVerify.BatchL2Data,
L1InfoRoot: l1InfoRoot.Bytes(),
TimestampLimit: uint64(batchToVerify.Timestamp.Unix()),
SequencerAddr: batchToVerify.Coinbase.String(),
AggregatorAddr: a.cfg.SenderAddress,
L1InfoTreeData: l1InfoTreeData,
ForcedBlockhashL1: forcedBlockhashL1.Bytes(),
},
Db: map[string]string{},
ContractsBytecode: map[string]string{},
}

printInputProver(inputProver)

return inputProver, nil
}

func printInputProver(inputProver *prover.InputProver) {
log.Debugf("OldStateRoot: %v", common.BytesToHash(inputProver.PublicInputs.OldStateRoot))
log.Debugf("OldAccInputHash: %v", common.BytesToHash(inputProver.PublicInputs.OldAccInputHash))
log.Debugf("OldBatchNum: %v", inputProver.PublicInputs.OldBatchNum)
log.Debugf("ChainId: %v", inputProver.PublicInputs.ChainId)
log.Debugf("ForkId: %v", inputProver.PublicInputs.ForkId)
log.Debugf("BatchL2Data: %v", common.Bytes2Hex(inputProver.PublicInputs.BatchL2Data))
log.Debugf("L1InfoRoot: %v", common.BytesToHash(inputProver.PublicInputs.L1InfoRoot))
log.Debugf("TimestampLimit: %v", inputProver.PublicInputs.TimestampLimit)
log.Debugf("SequencerAddr: %v", inputProver.PublicInputs.SequencerAddr)
log.Debugf("AggregatorAddr: %v", inputProver.PublicInputs.AggregatorAddr)
log.Debugf("L1InfoTreeData: %+v", inputProver.PublicInputs.L1InfoTreeData)
log.Debugf("ForcedBlockhashL1: %v", common.Bytes2Hex(inputProver.PublicInputs.ForcedBlockhashL1))
}

// healthChecker will provide an implementation of the HealthCheck interface.
type healthChecker struct{}

Expand Down
Loading

0 comments on commit 88e95ce

Please sign in to comment.