Skip to content

feat: openvm euclid v1 #1633

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Closed
wants to merge 24 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
24 commits
Select commit Hold shift + click to select a range
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
589 changes: 327 additions & 262 deletions common/libzkp/impl/Cargo.lock

Large diffs are not rendered by default.

4 changes: 2 additions & 2 deletions common/libzkp/impl/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -14,8 +14,8 @@ ruint = { git = "https://github.com/scroll-tech/uint.git", branch = "v1.12.3" }
tiny-keccak = { git = "https://github.com/scroll-tech/tiny-keccak", branch = "scroll-patch-v2.0.2-openvm-v1.0.0-rc.1" }

[dependencies]
euclid_prover = { git = "https://github.com/scroll-tech/zkvm-prover.git", tag = "v0.1.0-rc.6", package = "scroll-zkvm-prover" }
euclid_verifier = { git = "https://github.com/scroll-tech/zkvm-prover.git", tag = "v0.1.0-rc.6", package = "scroll-zkvm-verifier" }
euclid_prover = { git = "https://github.com/scroll-tech/zkvm-prover.git", tag = "v0.1.0-rc.10", package = "scroll-zkvm-prover" }
euclid_verifier = { git = "https://github.com/scroll-tech/zkvm-prover.git", tag = "v0.1.0-rc.10", package = "scroll-zkvm-verifier" }

base64 = "0.13.0"
env_logger = "0.9.0"
Expand Down
119 changes: 91 additions & 28 deletions common/types/message/message.go
Original file line number Diff line number Diff line change
Expand Up @@ -4,12 +4,14 @@ import (
"encoding/json"
"errors"
"fmt"
"math/big"

"github.com/scroll-tech/go-ethereum/common"
"github.com/scroll-tech/go-ethereum/common/hexutil"
)

const (
euclidFork = "euclid"
EuclidFork = "euclid"
)

// ProofType represents the type of task.
Expand Down Expand Up @@ -41,36 +43,94 @@ const (

// ChunkTaskDetail is a type containing ChunkTask detail.
type ChunkTaskDetail struct {
BlockHashes []common.Hash `json:"block_hashes"`
BlockHashes []common.Hash `json:"block_hashes"`
PrevMsgQueueHash common.Hash `json:"prev_msg_queue_hash"`
}

// it is a hex encoded big with fixed length on 48 bytes
type Byte48 struct {
hexutil.Big
}

func (e Byte48) MarshalText() ([]byte, error) {
i := e.ToInt()
// overrite encode big
if sign := i.Sign(); sign < 0 {
// sanity check
return nil, fmt.Errorf("Byte48 must be positive integer")
} else {
s := i.Text(16)
if len(s) > 96 {
return nil, fmt.Errorf("integer Exceed 384bit")
}
return []byte(fmt.Sprintf("0x%0*s", 96, s)), nil
}
}

func isString(input []byte) bool {
return len(input) >= 2 && input[0] == '"' && input[len(input)-1] == '"'
}

// hexutil.Big has limition of 256bit so we have to override it ...
func (e *Byte48) UnmarshalJSON(input []byte) error {
if !isString(input) {
return fmt.Errorf("not hex string")
}

b, err := hexutil.Decode(string(input[1 : len(input)-1]))
if err != nil {
return err
}
if len(b) != 48 {
return fmt.Errorf("not a 48 bytes hex string: %d", len(b))
}
var dec big.Int
dec.SetBytes(b)
*e = Byte48{(hexutil.Big)(dec)}
return nil
}

// BatchTaskDetail is a type containing BatchTask detail.
type BatchTaskDetail struct {
ChunkInfos []*ChunkInfo `json:"chunk_infos"`
ChunkProofs []ChunkProof `json:"chunk_proofs"`
BatchHeader interface{} `json:"batch_header"`
BlobBytes []byte `json:"blob_bytes"`
KzgProof []byte `json:"kzg_proof"`
KzgCommitment []byte `json:"kzg_commitment"`
Challenge common.Hash `json:"challenge"`
ChunkInfos []*ChunkInfo `json:"chunk_infos"`
ChunkProofs []ChunkProof `json:"chunk_proofs"`
BatchHeader interface{} `json:"batch_header"`
BlobBytes []byte `json:"blob_bytes"`
KzgProof Byte48 `json:"kzg_proof,omitempty"`
KzgCommitment Byte48 `json:"kzg_commitment,omitempty"`
ChallengeDigest *common.Hash `json:"challenge_digest,omitempty"`
}

// BundleTaskDetail consists of all the information required to describe the task to generate a proof for a bundle of batches.
type BundleTaskDetail struct {
BatchProofs []BatchProof `json:"batch_proofs"`
BatchProofs []BatchProof `json:"batch_proofs"`
BundleInfo *OpenVMBundleInfo `json:"bundle_info,omitempty"`
}

// ChunkInfo is for calculating pi_hash for chunk
type ChunkInfo struct {
ChainID uint64 `json:"chain_id"`
PrevStateRoot common.Hash `json:"prev_state_root"`
PostStateRoot common.Hash `json:"post_state_root"`
WithdrawRoot common.Hash `json:"withdraw_root"`
DataHash common.Hash `json:"data_hash"`
IsPadding bool `json:"is_padding"`
TxBytes []byte `json:"tx_bytes"`
TxBytesHash common.Hash `json:"tx_data_digest"`
PrevMsgQueueHash common.Hash `json:"prev_msg_queue_hash"`
ChainID uint64 `json:"chain_id"`
PrevStateRoot common.Hash `json:"prev_state_root"`
PostStateRoot common.Hash `json:"post_state_root"`
WithdrawRoot common.Hash `json:"withdraw_root"`
DataHash common.Hash `json:"data_hash"`
IsPadding bool `json:"is_padding"`
TxBytes []byte `json:"tx_bytes"`
TxBytesHash common.Hash `json:"tx_data_digest"`
PrevMsgQueueHash common.Hash `json:"prev_msg_queue_hash"`
PostMsgQueueHash common.Hash `json:"post_msg_queue_hash"`
TxDataLength uint64 `json:"tx_data_length"`
InitialBlockNumber uint64 `json:"initial_block_number"`
BlockCtxs []BlockContextV2 `json:"block_ctxs"`
}

// BlockContextV2 is the block context for euclid v2
type BlockContextV2 struct {
Timestamp uint64 `json:"timestamp"`
BaseFee hexutil.Big `json:"base_fee"`
GasLimit uint64 `json:"gas_limit"`
NumTxs uint16 `json:"num_txs"`
NumL1Msgs uint16 `json:"num_l1_msgs"`
}

// SubCircuitRowUsage tracing info added in v0.11.0rc8
Expand All @@ -87,7 +147,7 @@ type ChunkProof interface {
// NewChunkProof creates a new ChunkProof instance.
func NewChunkProof(hardForkName string) ChunkProof {
switch hardForkName {
case euclidFork:
case EuclidFork:
return &OpenVMChunkProof{}
default:
return &Halo2ChunkProof{}
Expand Down Expand Up @@ -121,7 +181,7 @@ type BatchProof interface {
// NewBatchProof creates a new BatchProof instance.
func NewBatchProof(hardForkName string) BatchProof {
switch hardForkName {
case euclidFork:
case EuclidFork:
return &OpenVMBatchProof{}
default:
return &Halo2BatchProof{}
Expand Down Expand Up @@ -178,7 +238,7 @@ type BundleProof interface {
// NewBundleProof creates a new BundleProof instance.
func NewBundleProof(hardForkName string) BundleProof {
switch hardForkName {
case euclidFork:
case EuclidFork:
return &OpenVMBundleProof{}
default:
return &Halo2BundleProof{}
Expand Down Expand Up @@ -258,12 +318,14 @@ func (p *OpenVMChunkProof) Proof() []byte {

// OpenVMBatchInfo is for calculating pi_hash for batch header
type OpenVMBatchInfo struct {
ParentBatchHash common.Hash `json:"parent_batch_hash"`
ParentStateRoot common.Hash `json:"parent_state_root"`
StateRoot common.Hash `json:"state_root"`
WithdrawRoot common.Hash `json:"withdraw_root"`
BatchHash common.Hash `json:"batch_hash"`
ChainID uint64 `json:"chain_id"`
ParentBatchHash common.Hash `json:"parent_batch_hash"`
ParentStateRoot common.Hash `json:"parent_state_root"`
StateRoot common.Hash `json:"state_root"`
WithdrawRoot common.Hash `json:"withdraw_root"`
BatchHash common.Hash `json:"batch_hash"`
ChainID uint64 `json:"chain_id"`
PrevMsgQueueHash common.Hash `json:"prev_msg_queue_hash"`
PostMsgQueueHash common.Hash `json:"post_msg_queue_hash"`
}

// BatchProof includes the proof info that are required for batch verification and rollup.
Expand Down Expand Up @@ -323,6 +385,7 @@ type OpenVMBundleInfo struct {
NumBatches uint32 `json:"num_batches"`
PrevBatchHash common.Hash `json:"prev_batch_hash"`
BatchHash common.Hash `json:"batch_hash"`
MsgQueueHash common.Hash `json:"msg_queue_hash"`
}

// OpenVMBundleProof includes the proof info that are required for verification of a bundle of batch proofs.
Expand Down
24 changes: 18 additions & 6 deletions coordinator/internal/logic/provertask/batch_prover_task.go
Original file line number Diff line number Diff line change
Expand Up @@ -4,13 +4,15 @@ import (
"context"
"encoding/json"
"fmt"
"math/big"
"time"

"github.com/gin-gonic/gin"
"github.com/prometheus/client_golang/prometheus"
"github.com/prometheus/client_golang/prometheus/promauto"
"github.com/scroll-tech/da-codec/encoding"
"github.com/scroll-tech/go-ethereum/common"
"github.com/scroll-tech/go-ethereum/common/hexutil"
"github.com/scroll-tech/go-ethereum/log"
"github.com/scroll-tech/go-ethereum/params"
"gorm.io/gorm"
Expand Down Expand Up @@ -211,13 +213,19 @@ func (bp *BatchProverTask) formatProverTask(ctx context.Context, task *orm.Prove
WithdrawRoot: common.HexToHash(chunk.WithdrawRoot),
DataHash: common.HexToHash(chunk.Hash),
PrevMsgQueueHash: common.HexToHash(chunk.PrevL1MessageQueueHash),
PostMsgQueueHash: common.HexToHash(chunk.PostL1MessageQueueHash),
IsPadding: false,
}
if haloProot, ok := proof.(*message.Halo2ChunkProof); ok {
if haloProot.ChunkInfo != nil {
chunkInfo.TxBytes = haloProot.ChunkInfo.TxBytes
if halo2Proof, ok := proof.(*message.Halo2ChunkProof); ok {
if halo2Proof.ChunkInfo != nil {
chunkInfo.TxBytes = halo2Proof.ChunkInfo.TxBytes
}
}
if openvmProof, ok := proof.(*message.OpenVMChunkProof); ok {
chunkInfo.InitialBlockNumber = openvmProof.MetaData.ChunkInfo.InitialBlockNumber
chunkInfo.BlockCtxs = openvmProof.MetaData.ChunkInfo.BlockCtxs
chunkInfo.TxDataLength = openvmProof.MetaData.ChunkInfo.TxDataLength
}
chunkInfos = append(chunkInfos, &chunkInfo)
}

Expand Down Expand Up @@ -280,8 +288,12 @@ func (bp *BatchProverTask) getBatchTaskDetail(dbBatch *orm.Batch, chunkInfos []*
// | z | y | kzg_commitment | kzg_proof |
// |---------|---------|----------------|-----------|
// | bytes32 | bytes32 | bytes48 | bytes48 |
taskDetail.KzgProof = dbBatch.BlobDataProof[112:160]
taskDetail.KzgCommitment = dbBatch.BlobDataProof[64:112]
taskDetail.Challenge = common.Hash(dbBatch.BlobDataProof[0:32])
taskDetail.KzgProof = message.Byte48{Big: hexutil.Big(*new(big.Int).SetBytes(dbBatch.BlobDataProof[112:160]))}
taskDetail.KzgCommitment = message.Byte48{Big: hexutil.Big(*new(big.Int).SetBytes(dbBatch.BlobDataProof[64:112]))}
// FIXME: Challenge = ChallengeDigest % BLS_MODULUS, get the original ChallengeDigest.
// Simply omit the field now to skip sanity check in prover side
// Resume it later (or FIXME has worked or the prover side has relax its checking)
// taskDetail.ChallengeDigest = new(common.Hash)
// *taskDetail.ChallengeDigest = common.BytesToHash(dbBatch.BlobDataProof[0:32])
return taskDetail, nil
}
17 changes: 17 additions & 0 deletions coordinator/internal/logic/provertask/bundle_prover_task.go
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ import (
"github.com/gin-gonic/gin"
"github.com/prometheus/client_golang/prometheus"
"github.com/prometheus/client_golang/prometheus/promauto"
"github.com/scroll-tech/go-ethereum/common"
"github.com/scroll-tech/go-ethereum/log"
"github.com/scroll-tech/go-ethereum/params"
"gorm.io/gorm"
Expand Down Expand Up @@ -194,6 +195,11 @@ func (bp *BundleProverTask) formatProverTask(ctx context.Context, task *orm.Prov
return nil, fmt.Errorf("failed to get batch proofs for bundle task id:%s, no batch found", task.TaskID)
}

parentBatch, err := bp.batchOrm.GetBatchByHash(ctx, batches[0].ParentBatchHash)
if err != nil {
return nil, fmt.Errorf("failed to get parent batch for batch task id:%s err:%w", task.TaskID, err)
}

var batchProofs []message.BatchProof
for _, batch := range batches {
proof := message.NewBatchProof(hardForkName)
Expand All @@ -207,6 +213,17 @@ func (bp *BundleProverTask) formatProverTask(ctx context.Context, task *orm.Prov
BatchProofs: batchProofs,
}

taskDetail.BundleInfo = &message.OpenVMBundleInfo{
ChainID: bp.cfg.L2.ChainID,
PrevStateRoot: common.HexToHash(parentBatch.StateRoot),
PostStateRoot: common.HexToHash(batches[len(batches)-1].StateRoot),
WithdrawRoot: common.HexToHash(batches[len(batches)-1].WithdrawRoot),
NumBatches: uint32(len(batches)),
PrevBatchHash: common.HexToHash(batches[0].ParentBatchHash),
BatchHash: common.HexToHash(batches[len(batches)-1].Hash),
// MsgQueueHash is omittable here, because it is introduced in phase-2.
}

batchProofsBytes, err := json.Marshal(taskDetail)
if err != nil {
return nil, fmt.Errorf("failed to marshal batch proofs, taskID:%s err:%w", task.TaskID, err)
Expand Down
8 changes: 5 additions & 3 deletions coordinator/internal/logic/provertask/chunk_prover_task.go
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ import (
"github.com/gin-gonic/gin"
"github.com/prometheus/client_golang/prometheus"
"github.com/prometheus/client_golang/prometheus/promauto"
"github.com/scroll-tech/go-ethereum/common"
"github.com/scroll-tech/go-ethereum/log"
"github.com/scroll-tech/go-ethereum/params"
"gorm.io/gorm"
Expand Down Expand Up @@ -162,7 +163,7 @@ func (cp *ChunkProverTask) Assign(ctx *gin.Context, getTaskParameter *coordinato
return nil, ErrCoordinatorInternalFailure
}

taskMsg, err := cp.formatProverTask(ctx.Copy(), &proverTask, hardForkName)
taskMsg, err := cp.formatProverTask(ctx.Copy(), &proverTask, chunkTask, hardForkName)
if err != nil {
cp.recoverActiveAttempts(ctx, chunkTask)
log.Error("format prover task failure", "task_id", chunkTask.Hash, "err", err)
Expand All @@ -179,15 +180,16 @@ func (cp *ChunkProverTask) Assign(ctx *gin.Context, getTaskParameter *coordinato
return taskMsg, nil
}

func (cp *ChunkProverTask) formatProverTask(ctx context.Context, task *orm.ProverTask, hardForkName string) (*coordinatorType.GetTaskSchema, error) {
func (cp *ChunkProverTask) formatProverTask(ctx context.Context, task *orm.ProverTask, chunk *orm.Chunk, hardForkName string) (*coordinatorType.GetTaskSchema, error) {
// Get block hashes.
blockHashes, dbErr := cp.blockOrm.GetL2BlockHashesByChunkHash(ctx, task.TaskID)
if dbErr != nil || len(blockHashes) == 0 {
return nil, fmt.Errorf("failed to fetch block hashes of a chunk, chunk hash:%s err:%w", task.TaskID, dbErr)
}

taskDetail := message.ChunkTaskDetail{
BlockHashes: blockHashes,
BlockHashes: blockHashes,
PrevMsgQueueHash: common.HexToHash(chunk.PrevL1MessageQueueHash),
}
blockHashesBytes, err := json.Marshal(taskDetail)
if err != nil {
Expand Down
Loading
Loading