diff --git a/.github/workflows/ccip-ocr3-build-lint-test.yml b/.github/workflows/ccip-ocr3-build-lint-test.yml index 085367c5d..47ad84128 100644 --- a/.github/workflows/ccip-ocr3-build-lint-test.yml +++ b/.github/workflows/ccip-ocr3-build-lint-test.yml @@ -24,14 +24,14 @@ jobs: - name: Display Go version run: go version - name: Build - run: go build -v ./... + run: make - name: Install linter run: | curl -sSfL https://raw.githubusercontent.com/golangci/golangci-lint/master/install.sh | sh -s -- -b $(go env GOPATH)/bin v1.59.0 - name: Run linter - run: golangci-lint run -c .golangci.yml + run: make lint - name: Run tests - run: go test -race -fullpath -shuffle on -count 20 -coverprofile=coverage.out ./... + run: TEST_COUNT=20 COVERAGE_FILE=coverage.out make test - name: Generate coverage report if: github.event_name == 'pull_request' run: | diff --git a/.github/workflows/codegen.yml b/.github/workflows/codegen.yml new file mode 100644 index 000000000..9a19fd602 --- /dev/null +++ b/.github/workflows/codegen.yml @@ -0,0 +1,42 @@ +# All code generation should be run prior to pull request. Running it again should not produce a diff. +name: "Codegen Verifier" + +on: + pull_request: + push: + branches: + - 'ccip-develop' + +jobs: + codegen-verifier: + runs-on: ubuntu-20.04 + strategy: + matrix: + go-version: ['1.21'] + defaults: + run: + working-directory: . + steps: + - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 + - name: Setup Go ${{ matrix.go-version }} + uses: actions/setup-go@0c52d547c9bc32b1aa3301fd7a9cb496313a4491 # v5.0.0 + with: + go-version: ${{ matrix.go-version }} + - name: Display Go version + run: go version + - name: Re-Generate mocks + run: | + rm -rf mocks # delete all mocks to ensure any deprecated files have been removed. + make generate + - name: Tidy + run: go mod tidy + - name: ensure no changes + run: | + set -e + git_status=$(git status --porcelain=v1) + if [ ! -z "$git_status" ]; then + git status + git diff + echo "Error: modified files detected, run 'make generate' / 'go mod tidy'." + exit 1 + fi diff --git a/.golangci.yml b/.golangci.yml index af254a793..9f30e7850 100644 --- a/.golangci.yml +++ b/.golangci.yml @@ -1,6 +1,7 @@ run: timeout: 60s linters: + verbose: true enable: - exhaustive - exportloopref @@ -86,6 +87,8 @@ linters-settings: # Default: 120. line-length: 120 issues: + max-issues-per-linter: 0 + max-same-issues: 0 exclude-rules: - path: test text: "^G404:" diff --git a/.mockery.yaml b/.mockery.yaml new file mode 100644 index 000000000..0e7b60e53 --- /dev/null +++ b/.mockery.yaml @@ -0,0 +1,13 @@ +with-expecter: true +filename: "{{.InterfaceName | snakecase}}.go" +dir: mocks/{{ replaceAll .InterfaceDirRelative "internal" "internal_" }} +packages: + github.com/smartcontractkit/chainlink-ccip/execute/types: + interfaces: + TokenDataReader: + github.com/smartcontractkit/chainlink-ccip/execute/internal/gen: + interfaces: + ExecutePluginCodec: + github.com/smartcontractkit/chainlink-ccip/internal/reader: + interfaces: + HomeChain: diff --git a/Makefile b/Makefile index 358b75feb..abb0ac382 100644 --- a/Makefile +++ b/Makefile @@ -1,12 +1,22 @@ +TEST_COUNT ?= 10 +COVERAGE_FILE ?= coverage.out -ensure_go_1_21: +build: ensure_go_version + go build -v ./... + +generate: ensure_go_version + go install github.com/vektra/mockery/v2@v2.43.0 + mockery + +test: ensure_go_version + go test -race -fullpath -shuffle on -count $(TEST_COUNT) -coverprofile=$(COVERAGE_FILE) ./... + +lint: ensure_go_version + golangci-lint run -c .golangci.yml + +ensure_go_version: @go version | grep -q 'go1.21' || (echo "Please use go1.21" && exit 1) ensure_golangcilint_1_59: @golangci-lint --version | grep -q '1.59' || (echo "Please use golangci-lint 1.59" && exit 1) -test: ensure_go_1_21 - go test -race -fullpath -shuffle on -count 10 ./... - -lint: ensure_go_1_21 - golangci-lint run -c .golangci.yml diff --git a/commit/plugin.go b/commit/plugin.go index a98453eaa..08ea40da0 100644 --- a/commit/plugin.go +++ b/commit/plugin.go @@ -5,11 +5,11 @@ import ( "errors" "fmt" "sort" - "sync" "time" mapset "github.com/deckarep/golang-set/v2" + "github.com/smartcontractkit/chainlink-ccip/internal/plugincommon" "github.com/smartcontractkit/chainlink-ccip/internal/reader" "github.com/smartcontractkit/chainlink-ccip/pluginconfig" "github.com/smartcontractkit/chainlink-ccip/plugintypes" @@ -34,14 +34,13 @@ type Plugin struct { oracleIDToP2pID map[commontypes.OracleID]libocrtypes.PeerID cfg pluginconfig.CommitPluginConfig ccipReader reader.CCIP + readerSyncer *plugincommon.BackgroundReaderSyncer tokenPricesReader reader.TokenPrices reportCodec cciptypes.CommitPluginCodec msgHasher cciptypes.MessageHasher lggr logger.Logger - homeChain reader.HomeChain - bgSyncCancelFunc context.CancelFunc - bgSyncWG *sync.WaitGroup + homeChain reader.HomeChain } func NewPlugin( @@ -56,32 +55,28 @@ func NewPlugin( lggr logger.Logger, homeChain reader.HomeChain, ) *Plugin { - p := &Plugin{ + readerSyncer := plugincommon.NewBackgroundReaderSyncer( + lggr, + ccipReader, + syncTimeout(cfg.SyncTimeout), + syncFrequency(cfg.SyncFrequency), + ) + if err := readerSyncer.Start(context.Background()); err != nil { + lggr.Errorw("error starting background reader syncer", "err", err) + } + + return &Plugin{ nodeID: nodeID, oracleIDToP2pID: oracleIDToP2pID, cfg: cfg, ccipReader: ccipReader, + readerSyncer: readerSyncer, tokenPricesReader: tokenPricesReader, reportCodec: reportCodec, msgHasher: msgHasher, lggr: lggr, homeChain: homeChain, } - - bgSyncCtx, bgSyncCf := context.WithCancel(context.Background()) - p.bgSyncCancelFunc = bgSyncCf - p.bgSyncWG = &sync.WaitGroup{} - p.bgSyncWG.Add(1) - backgroundReaderSync( - bgSyncCtx, - p.bgSyncWG, - lggr, - ccipReader, - syncTimeout(cfg.SyncTimeout), - time.NewTicker(syncFrequency(p.cfg.SyncFrequency)).C, - ) - - return p } // Query phase is not used. @@ -200,14 +195,22 @@ func (p *Plugin) Observation( } -func (p *Plugin) ValidateObservation(_ ocr3types.OutcomeContext, _ types.Query, ao types.AttributedObservation) error { +func (p *Plugin) ValidateObservation( + outCtx ocr3types.OutcomeContext, _ types.Query, ao types.AttributedObservation) error { obs, err := plugintypes.DecodeCommitPluginObservation(ao.Observation) if err != nil { return fmt.Errorf("decode commit plugin observation: %w", err) } - if err := validateObservedSequenceNumbers(obs.NewMsgs, obs.MaxSeqNums); err != nil { - return fmt.Errorf("validate sequence numbers: %w", err) + if outCtx.PreviousOutcome != nil { + prevOutcome, err := plugintypes.DecodeCommitPluginOutcome(outCtx.PreviousOutcome) + if err != nil { + return fmt.Errorf("decode commit plugin previous outcome: %w", err) + } + + if err := validateObservedSequenceNumbers(obs.NewMsgs, prevOutcome.MaxSeqNums); err != nil { + return fmt.Errorf("validate sequence numbers: %w", err) + } } observerSupportedChains, err := p.supportedChains(ao.Observer) @@ -358,16 +361,18 @@ func (p *Plugin) ShouldTransmitAcceptedReport( } func (p *Plugin) Close() error { - timeout := 10 * time.Second + timeout := 10 * time.Second // todo: cfg ctx, cf := context.WithTimeout(context.Background(), timeout) defer cf() + if err := p.readerSyncer.Close(); err != nil { + p.lggr.Errorw("error closing reader syncer", "err", err) + } + if err := p.ccipReader.Close(ctx); err != nil { return fmt.Errorf("close ccip reader: %w", err) } - p.bgSyncCancelFunc() - p.bgSyncWG.Wait() return nil } diff --git a/commit/plugin_functions.go b/commit/plugin_functions.go index 53b9cb60c..cca9751c0 100644 --- a/commit/plugin_functions.go +++ b/commit/plugin_functions.go @@ -4,8 +4,6 @@ import ( "context" "fmt" "sort" - "sync" - "time" mapset "github.com/deckarep/golang-set/v2" "github.com/smartcontractkit/libocr/offchainreporting2plus/types" @@ -636,53 +634,6 @@ func validateMerkleRootsState( return true, nil } -func backgroundReaderSync( - ctx context.Context, - wg *sync.WaitGroup, - lggr logger.Logger, - reader reader.CCIP, - syncTimeout time.Duration, - ticker <-chan time.Time, -) { - go func() { - defer wg.Done() - - for { - select { - case <-ctx.Done(): - return - case <-ticker: - if err := syncReader(ctx, lggr, reader, syncTimeout); err != nil { - lggr.Errorw("runBackgroundReaderSync failed", "err", err) - } - } - } - }() -} - -func syncReader( - ctx context.Context, - lggr logger.Logger, - reader reader.CCIP, - syncTimeout time.Duration, -) error { - timeoutCtx, cf := context.WithTimeout(ctx, syncTimeout) - defer cf() - - updated, err := reader.Sync(timeoutCtx) - if err != nil { - return err - } - - if !updated { - lggr.Debug("no updates found after trying to sync") - } else { - lggr.Info("ccip reader sync success") - } - - return nil -} - type observedMsgsConsensus struct { seqNumRange cciptypes.SeqNumRange merkleRoot [32]byte diff --git a/commit/plugin_functions_test.go b/commit/plugin_functions_test.go index 8c6df0aa5..5fc2c84f7 100644 --- a/commit/plugin_functions_test.go +++ b/commit/plugin_functions_test.go @@ -3,10 +3,8 @@ package commit import ( "context" "encoding/binary" - "fmt" "math/big" "slices" - "sync" "testing" "time" @@ -1482,50 +1480,6 @@ func Test_gasPricesConsensus(t *testing.T) { } } -func Test_backgroundReaderSync(t *testing.T) { - ctx, cf := context.WithCancel(context.Background()) - lggr := logger.Test(t) - reader := mocks.NewCCIPReader() - syncTimeout := 50 * time.Millisecond - ticker := make(chan time.Time) - wg := &sync.WaitGroup{} - wg.Add(1) - - // start background syncing - backgroundReaderSync(ctx, wg, lggr, reader, syncTimeout, ticker) - - // send a tick to trigger the first sync that errors - reader.On("Sync", mock.Anything).Return(false, fmt.Errorf("some err")).Once() - ticker <- time.Now() - - // send a tick to trigger the second sync that succeeds without changes - reader.On("Sync", mock.Anything).Return(false, nil).Once() - ticker <- time.Now() - - // make sync hang to see the context timeout - reader.On("Sync", mock.Anything).Run(func(args mock.Arguments) { - ctx := args.Get(0).(context.Context) - for { // simulate endless work until context times out - select { - case <-ctx.Done(): - t.Log("context cancelled as expected") - return - default: - time.Sleep(time.Millisecond) // sleep to not block the CPU - } - } - }).Return(false, nil).Once() - ticker <- time.Now() - - // send a tick to trigger the fourth sync that succeeds with changes - reader.On("Sync", mock.Anything).Return(true, nil).Once() - ticker <- time.Now() - - cf() // trigger bg sync to stop - wg.Wait() // wait for it to stop - reader.AssertExpectations(t) -} - func Test_validateMerkleRootsState(t *testing.T) { testCases := []struct { name string diff --git a/execute/internal/gen/codec.go b/execute/internal/gen/codec.go new file mode 100644 index 000000000..d5c2fcb91 --- /dev/null +++ b/execute/internal/gen/codec.go @@ -0,0 +1,9 @@ +// Package gen wraps an external type to generate a mock object. +package gen + +import cciptypes "github.com/smartcontractkit/chainlink-common/pkg/types/ccipocr3" + +// ExecutePluginCodec is defined in chainlink-common. +type ExecutePluginCodec interface { + cciptypes.ExecutePluginCodec +} diff --git a/execute/plugin.go b/execute/plugin.go index 32ec6297d..65e12c9a4 100644 --- a/execute/plugin.go +++ b/execute/plugin.go @@ -9,13 +9,16 @@ import ( mapset "github.com/deckarep/golang-set/v2" - "github.com/smartcontractkit/chainlink-common/pkg/logger" - cciptypes "github.com/smartcontractkit/chainlink-common/pkg/types/ccipocr3" "github.com/smartcontractkit/libocr/commontypes" "github.com/smartcontractkit/libocr/offchainreporting2plus/ocr3types" "github.com/smartcontractkit/libocr/offchainreporting2plus/types" libocrtypes "github.com/smartcontractkit/libocr/ragep2p/types" + "github.com/smartcontractkit/chainlink-ccip/internal/plugincommon" + + "github.com/smartcontractkit/chainlink-common/pkg/logger" + cciptypes "github.com/smartcontractkit/chainlink-common/pkg/types/ccipocr3" + "github.com/smartcontractkit/chainlink-ccip/execute/report" types2 "github.com/smartcontractkit/chainlink-ccip/execute/types" "github.com/smartcontractkit/chainlink-ccip/internal/reader" @@ -32,10 +35,11 @@ type Plugin struct { cfg pluginconfig.ExecutePluginConfig // providers - ccipReader reader.CCIP - reportCodec cciptypes.ExecutePluginCodec - msgHasher cciptypes.MessageHasher - homeChain reader.HomeChain + ccipReader reader.CCIP + readerSyncer *plugincommon.BackgroundReaderSyncer + reportCodec cciptypes.ExecutePluginCodec + msgHasher cciptypes.MessageHasher + homeChain reader.HomeChain oracleIDToP2pID map[commontypes.OracleID]libocrtypes.PeerID tokenDataReader types2.TokenDataReader @@ -59,11 +63,22 @@ func NewPlugin( // TODO: initialize tokenDataReader. + readerSyncer := plugincommon.NewBackgroundReaderSyncer( + lggr, + ccipReader, + syncTimeout(cfg.SyncTimeout), + syncFrequency(cfg.SyncFrequency), + ) + if err := readerSyncer.Start(context.Background()); err != nil { + lggr.Errorw("error starting background reader syncer", "err", err) + } + return &Plugin{ reportingCfg: reportingCfg, cfg: cfg, oracleIDToP2pID: oracleIDToP2pID, ccipReader: ccipReader, + readerSyncer: readerSyncer, reportCodec: reportCodec, msgHasher: msgHasher, homeChain: homeChain, @@ -85,7 +100,6 @@ func getPendingExecutedReports( if err != nil { return nil, time.Time{}, err } - // TODO: this could be more efficient. commitReports is also traversed in 'groupByChainSelector'. for _, report := range commitReports { if report.Timestamp.After(latestReportTS) { @@ -177,7 +191,7 @@ func (p *Plugin) Observation( // No reports to execute. // This is expected after a cold start. } else { - commitReportCache := make(map[cciptypes.ChainSelector][]plugintypes.ExecutePluginCommitDataWithMessages) + commitReportCache := make(map[cciptypes.ChainSelector][]plugintypes.ExecutePluginCommitData) for _, report := range previousOutcome.PendingCommitReports { commitReportCache[report.SourceChain] = append(commitReportCache[report.SourceChain], report) } @@ -254,14 +268,14 @@ func selectReport( hasher cciptypes.MessageHasher, encoder cciptypes.ExecutePluginCodec, tokenDataReader types2.TokenDataReader, - commitReports []plugintypes.ExecutePluginCommitDataWithMessages, + commitReports []plugintypes.ExecutePluginCommitData, maxReportSizeBytes int, -) ([]cciptypes.ExecutePluginReportSingleChain, []plugintypes.ExecutePluginCommitDataWithMessages, error) { +) ([]cciptypes.ExecutePluginReportSingleChain, []plugintypes.ExecutePluginCommitData, error) { // TODO: It may be desirable for this entire function to be an interface so that // different selection algorithms can be used. builder := report.NewBuilder(ctx, lggr, hasher, tokenDataReader, encoder, uint64(maxReportSizeBytes), 99) - var stillPendingReports []plugintypes.ExecutePluginCommitDataWithMessages + var stillPendingReports []plugintypes.ExecutePluginCommitData for i, report := range commitReports { // Reports at the end may not have messages yet. if len(report.Messages) == 0 { @@ -323,7 +337,7 @@ func (p *Plugin) Outcome( mergedMessageObservations) // flatten commit reports and sort by timestamp. - var commitReports []plugintypes.ExecutePluginCommitDataWithMessages + var commitReports []plugintypes.ExecutePluginCommitData for _, report := range observation.CommitReports { commitReports = append(commitReports, report...) } @@ -418,6 +432,18 @@ func (p *Plugin) ShouldTransmitAcceptedReport( } func (p *Plugin) Close() error { + timeout := 10 * time.Second // todo: cfg + ctx, cf := context.WithTimeout(context.Background(), timeout) + defer cf() + + if err := p.readerSyncer.Close(); err != nil { + p.lggr.Errorw("error closing reader syncer", "err", err) + } + + if err := p.ccipReader.Close(ctx); err != nil { + return fmt.Errorf("close ccip reader: %w", err) + } + return nil } @@ -443,5 +469,19 @@ func (p *Plugin) supportsDestChain() (bool, error) { return chains.Contains(p.cfg.DestChain), nil } +func syncFrequency(configuredValue time.Duration) time.Duration { + if configuredValue.Milliseconds() == 0 { + return 10 * time.Second + } + return configuredValue +} + +func syncTimeout(configuredValue time.Duration) time.Duration { + if configuredValue.Milliseconds() == 0 { + return 3 * time.Second + } + return configuredValue +} + // Interface compatibility checks. var _ ocr3types.ReportingPlugin[[]byte] = &Plugin{} diff --git a/execute/plugin_e2e_test.go b/execute/plugin_e2e_test.go index 105959cc8..8af1731ad 100644 --- a/execute/plugin_e2e_test.go +++ b/execute/plugin_e2e_test.go @@ -22,6 +22,7 @@ import ( "github.com/smartcontractkit/chainlink-ccip/internal/mocks" "github.com/smartcontractkit/chainlink-ccip/internal/mocks/inmem" "github.com/smartcontractkit/chainlink-ccip/internal/reader" + mock_types "github.com/smartcontractkit/chainlink-ccip/mocks/execute/types" "github.com/smartcontractkit/chainlink-ccip/pkg/consts" "github.com/smartcontractkit/chainlink-ccip/pluginconfig" "github.com/smartcontractkit/chainlink-ccip/plugintypes" @@ -71,9 +72,9 @@ func TestPlugin(t *testing.T) { type nodeSetup struct { node *Plugin - reportCodec *mocks.ExecutePluginJSONReportCodec + reportCodec cciptypes.ExecutePluginCodec msgHasher cciptypes.MessageHasher - TokenDataReader *mocks.TokenDataReader + TokenDataReader *mock_types.MockTokenDataReader } func setupHomeChainPoller(lggr logger.Logger, chainConfigInfos []reader.ChainConfigInfo) reader.HomeChain { @@ -130,12 +131,11 @@ func setupSimpleTest( makeMsg(105, srcSelector, dstSelector, false), } - reportData := plugintypes.ExecutePluginCommitDataWithMessages{ - ExecutePluginCommitData: plugintypes.ExecutePluginCommitData{ - SourceChain: srcSelector, - SequenceNumberRange: cciptypes.NewSeqNumRange(100, 105), - }, - Messages: slicelib.Map(messages, func(m inmem.MessagesWithMetadata) cciptypes.Message { return m.Message }), + mapped := slicelib.Map(messages, func(m inmem.MessagesWithMetadata) cciptypes.Message { return m.Message }) + reportData := plugintypes.ExecutePluginCommitData{ + SourceChain: srcSelector, + SequenceNumberRange: cciptypes.NewSeqNumRange(100, 105), + Messages: mapped, } tree, err := report.ConstructMerkleTree(context.Background(), msgHasher, reportData) @@ -201,7 +201,7 @@ func setupSimpleTest( err = homeChain.Start(ctx) require.NoError(t, err, "failed to start home chain poller") - tokenDataReader := mocks.NewTokenDataReader(t) + tokenDataReader := mock_types.NewMockTokenDataReader(t) tokenDataReader.On("ReadTokenData", mock.Anything, mock.Anything, mock.Anything).Return([][]byte{}, nil) oracleIDToP2pID := GetP2pIDs(1, 2, 3) diff --git a/execute/plugin_functions.go b/execute/plugin_functions.go index b088f11bf..9b8b3cbc1 100644 --- a/execute/plugin_functions.go +++ b/execute/plugin_functions.go @@ -38,7 +38,7 @@ func validateObserverReadingEligibility( // validateObservedSequenceNumbers checks if the sequence numbers of the provided messages are unique for each chain // and that they match the observed max sequence numbers. func validateObservedSequenceNumbers( - observedData map[cciptypes.ChainSelector][]plugintypes.ExecutePluginCommitDataWithMessages, + observedData map[cciptypes.ChainSelector][]plugintypes.ExecutePluginCommitData, ) error { for _, commitData := range observedData { // observed commitData must not contain duplicates @@ -77,7 +77,7 @@ var errOverlappingRanges = errors.New("overlapping sequence numbers in reports") // computeRanges takes a slice of reports and computes the smallest number of contiguous ranges // that cover all the sequence numbers in the reports. // Note: reports need all messages to create a proof even if some are already executed. -func computeRanges(reports []plugintypes.ExecutePluginCommitDataWithMessages) ([]cciptypes.SeqNumRange, error) { +func computeRanges(reports []plugintypes.ExecutePluginCommitData) ([]cciptypes.SeqNumRange, error) { var ranges []cciptypes.SeqNumRange if len(reports) == 0 { @@ -109,19 +109,17 @@ func computeRanges(reports []plugintypes.ExecutePluginCommitDataWithMessages) ([ func groupByChainSelector( reports []plugintypes.CommitPluginReportWithMeta) plugintypes.ExecutePluginCommitObservations { - commitReportCache := make(map[cciptypes.ChainSelector][]plugintypes.ExecutePluginCommitDataWithMessages) + commitReportCache := make(map[cciptypes.ChainSelector][]plugintypes.ExecutePluginCommitData) for _, report := range reports { for _, singleReport := range report.Report.MerkleRoots { commitReportCache[singleReport.ChainSel] = append(commitReportCache[singleReport.ChainSel], - plugintypes.ExecutePluginCommitDataWithMessages{ - ExecutePluginCommitData: plugintypes.ExecutePluginCommitData{ - SourceChain: singleReport.ChainSel, - Timestamp: report.Timestamp, - BlockNum: report.BlockNum, - MerkleRoot: singleReport.MerkleRoot, - SequenceNumberRange: singleReport.SeqNumsRange, - ExecutedMessages: nil, - }}) + plugintypes.ExecutePluginCommitData{ + SourceChain: singleReport.ChainSel, + Timestamp: report.Timestamp, + BlockNum: report.BlockNum, + MerkleRoot: singleReport.MerkleRoot, + SequenceNumberRange: singleReport.SeqNumsRange, + }) } } return commitReportCache @@ -130,8 +128,8 @@ func groupByChainSelector( // filterOutExecutedMessages returns a new reports slice with fully executed messages removed. // Unordered inputs are supported. func filterOutExecutedMessages( - reports []plugintypes.ExecutePluginCommitDataWithMessages, executedMessages []cciptypes.SeqNumRange, -) ([]plugintypes.ExecutePluginCommitDataWithMessages, error) { + reports []plugintypes.ExecutePluginCommitData, executedMessages []cciptypes.SeqNumRange, +) ([]plugintypes.ExecutePluginCommitData, error) { sort.Slice(reports, func(i, j int) bool { return reports[i].SequenceNumberRange.Start() < reports[j].SequenceNumberRange.Start() }) @@ -154,7 +152,7 @@ func filterOutExecutedMessages( previousMax = seqRange.End() } - var filtered []plugintypes.ExecutePluginCommitDataWithMessages + var filtered []plugintypes.ExecutePluginCommitData reportIdx := 0 for _, executed := range executedMessages { @@ -269,13 +267,13 @@ func mergeCommitObservations( ) (plugintypes.ExecutePluginCommitObservations, error) { // Create a validator for each chain validators := - make(map[cciptypes.ChainSelector]validation.MinObservationFilter[plugintypes.ExecutePluginCommitDataWithMessages]) - idFunc := func(data plugintypes.ExecutePluginCommitDataWithMessages) [32]byte { + make(map[cciptypes.ChainSelector]validation.MinObservationFilter[plugintypes.ExecutePluginCommitData]) + idFunc := func(data plugintypes.ExecutePluginCommitData) [32]byte { return sha3.Sum256([]byte(fmt.Sprintf("%v", data))) } for selector, f := range fChain { validators[selector] = - validation.NewMinObservationValidator[plugintypes.ExecutePluginCommitDataWithMessages](f+1, idFunc) + validation.NewMinObservationValidator[plugintypes.ExecutePluginCommitData](f+1, idFunc) } // Add reports to the validator for each chain selector. diff --git a/execute/plugin_functions_test.go b/execute/plugin_functions_test.go index 3d23eff94..b544ecb50 100644 --- a/execute/plugin_functions_test.go +++ b/execute/plugin_functions_test.go @@ -77,49 +77,41 @@ func Test_validateObserverReadingEligibility(t *testing.T) { func Test_validateObservedSequenceNumbers(t *testing.T) { testCases := []struct { name string - observedData map[cciptypes.ChainSelector][]plugintypes.ExecutePluginCommitDataWithMessages + observedData map[cciptypes.ChainSelector][]plugintypes.ExecutePluginCommitData expErr bool }{ { name: "ValidData", - observedData: map[cciptypes.ChainSelector][]plugintypes.ExecutePluginCommitDataWithMessages{ + observedData: map[cciptypes.ChainSelector][]plugintypes.ExecutePluginCommitData{ 1: { { - ExecutePluginCommitData: plugintypes.ExecutePluginCommitData{ - MerkleRoot: cciptypes.Bytes32{1}, - SequenceNumberRange: cciptypes.SeqNumRange{1, 10}, - ExecutedMessages: []cciptypes.SeqNum{1, 2, 3}, - }, + MerkleRoot: cciptypes.Bytes32{1}, + SequenceNumberRange: cciptypes.SeqNumRange{1, 10}, + ExecutedMessages: []cciptypes.SeqNum{1, 2, 3}, }, }, 2: { { - ExecutePluginCommitData: plugintypes.ExecutePluginCommitData{ - MerkleRoot: cciptypes.Bytes32{2}, - SequenceNumberRange: cciptypes.SeqNumRange{11, 20}, - ExecutedMessages: []cciptypes.SeqNum{11, 12, 13}, - }, + MerkleRoot: cciptypes.Bytes32{2}, + SequenceNumberRange: cciptypes.SeqNumRange{11, 20}, + ExecutedMessages: []cciptypes.SeqNum{11, 12, 13}, }, }, }, }, { name: "DuplicateMerkleRoot", - observedData: map[cciptypes.ChainSelector][]plugintypes.ExecutePluginCommitDataWithMessages{ + observedData: map[cciptypes.ChainSelector][]plugintypes.ExecutePluginCommitData{ 1: { { - ExecutePluginCommitData: plugintypes.ExecutePluginCommitData{ - MerkleRoot: cciptypes.Bytes32{1}, - SequenceNumberRange: cciptypes.SeqNumRange{1, 10}, - ExecutedMessages: []cciptypes.SeqNum{1, 2, 3}, - }, + MerkleRoot: cciptypes.Bytes32{1}, + SequenceNumberRange: cciptypes.SeqNumRange{1, 10}, + ExecutedMessages: []cciptypes.SeqNum{1, 2, 3}, }, { - ExecutePluginCommitData: plugintypes.ExecutePluginCommitData{ - MerkleRoot: cciptypes.Bytes32{1}, - SequenceNumberRange: cciptypes.SeqNumRange{11, 20}, - ExecutedMessages: []cciptypes.SeqNum{11, 12, 13}, - }, + MerkleRoot: cciptypes.Bytes32{1}, + SequenceNumberRange: cciptypes.SeqNumRange{11, 20}, + ExecutedMessages: []cciptypes.SeqNum{11, 12, 13}, }, }, }, @@ -127,21 +119,17 @@ func Test_validateObservedSequenceNumbers(t *testing.T) { }, { name: "OverlappingSequenceNumberRange", - observedData: map[cciptypes.ChainSelector][]plugintypes.ExecutePluginCommitDataWithMessages{ + observedData: map[cciptypes.ChainSelector][]plugintypes.ExecutePluginCommitData{ 1: { { - ExecutePluginCommitData: plugintypes.ExecutePluginCommitData{ - MerkleRoot: cciptypes.Bytes32{1}, - SequenceNumberRange: cciptypes.SeqNumRange{1, 10}, - ExecutedMessages: []cciptypes.SeqNum{1, 2, 3}, - }, + MerkleRoot: cciptypes.Bytes32{1}, + SequenceNumberRange: cciptypes.SeqNumRange{1, 10}, + ExecutedMessages: []cciptypes.SeqNum{1, 2, 3}, }, { - ExecutePluginCommitData: plugintypes.ExecutePluginCommitData{ - MerkleRoot: cciptypes.Bytes32{2}, - SequenceNumberRange: cciptypes.SeqNumRange{5, 15}, - ExecutedMessages: []cciptypes.SeqNum{6, 7, 8}, - }, + MerkleRoot: cciptypes.Bytes32{2}, + SequenceNumberRange: cciptypes.SeqNumRange{5, 15}, + ExecutedMessages: []cciptypes.SeqNum{6, 7, 8}, }, }, }, @@ -149,14 +137,12 @@ func Test_validateObservedSequenceNumbers(t *testing.T) { }, { name: "ExecutedMessageOutsideObservedRange", - observedData: map[cciptypes.ChainSelector][]plugintypes.ExecutePluginCommitDataWithMessages{ + observedData: map[cciptypes.ChainSelector][]plugintypes.ExecutePluginCommitData{ 1: { { - ExecutePluginCommitData: plugintypes.ExecutePluginCommitData{ - MerkleRoot: cciptypes.Bytes32{1}, - SequenceNumberRange: cciptypes.SeqNumRange{1, 10}, - ExecutedMessages: []cciptypes.SeqNum{1, 2, 11}, - }, + MerkleRoot: cciptypes.Bytes32{1}, + SequenceNumberRange: cciptypes.SeqNumRange{1, 10}, + ExecutedMessages: []cciptypes.SeqNum{1, 2, 11}, }, }, }, @@ -164,13 +150,13 @@ func Test_validateObservedSequenceNumbers(t *testing.T) { }, { name: "NoCommitData", - observedData: map[cciptypes.ChainSelector][]plugintypes.ExecutePluginCommitDataWithMessages{ + observedData: map[cciptypes.ChainSelector][]plugintypes.ExecutePluginCommitData{ 1: {}, }, }, { name: "EmptyObservedData", - observedData: map[cciptypes.ChainSelector][]plugintypes.ExecutePluginCommitDataWithMessages{}, + observedData: map[cciptypes.ChainSelector][]plugintypes.ExecutePluginCommitData{}, }, } @@ -188,7 +174,7 @@ func Test_validateObservedSequenceNumbers(t *testing.T) { func Test_computeRanges(t *testing.T) { type args struct { - reports []plugintypes.ExecutePluginCommitDataWithMessages + reports []plugintypes.ExecutePluginCommitData } tests := []struct { @@ -199,89 +185,63 @@ func Test_computeRanges(t *testing.T) { }{ { name: "empty", - args: args{reports: []plugintypes.ExecutePluginCommitDataWithMessages{}}, + args: args{reports: []plugintypes.ExecutePluginCommitData{}}, want: nil, }, { name: "overlapping ranges", - args: args{reports: []plugintypes.ExecutePluginCommitDataWithMessages{ + args: args{reports: []plugintypes.ExecutePluginCommitData{ { - ExecutePluginCommitData: plugintypes.ExecutePluginCommitData{ - SequenceNumberRange: cciptypes.NewSeqNumRange(10, 20), - }, + SequenceNumberRange: cciptypes.NewSeqNumRange(10, 20), }, { - ExecutePluginCommitData: plugintypes.ExecutePluginCommitData{ - SequenceNumberRange: cciptypes.NewSeqNumRange(15, 25), - }, + SequenceNumberRange: cciptypes.NewSeqNumRange(15, 25), }, - }, - }, + }}, err: errOverlappingRanges, }, { name: "simple ranges collapsed", - args: args{reports: []plugintypes.ExecutePluginCommitDataWithMessages{ + args: args{reports: []plugintypes.ExecutePluginCommitData{ { - ExecutePluginCommitData: plugintypes.ExecutePluginCommitData{ - SequenceNumberRange: cciptypes.NewSeqNumRange(10, 20), - }, + SequenceNumberRange: cciptypes.NewSeqNumRange(10, 20), }, { - ExecutePluginCommitData: plugintypes.ExecutePluginCommitData{ - SequenceNumberRange: cciptypes.NewSeqNumRange(21, 40), - }, + SequenceNumberRange: cciptypes.NewSeqNumRange(21, 40), }, { - ExecutePluginCommitData: plugintypes.ExecutePluginCommitData{ - SequenceNumberRange: cciptypes.NewSeqNumRange(41, 60), - }, + SequenceNumberRange: cciptypes.NewSeqNumRange(41, 60), }, - }, - }, + }}, want: []cciptypes.SeqNumRange{{10, 60}}, }, { name: "non-contiguous ranges", - args: args{reports: []plugintypes.ExecutePluginCommitDataWithMessages{ + args: args{reports: []plugintypes.ExecutePluginCommitData{ { - ExecutePluginCommitData: plugintypes.ExecutePluginCommitData{ - SequenceNumberRange: cciptypes.NewSeqNumRange(10, 20), - }, + SequenceNumberRange: cciptypes.NewSeqNumRange(10, 20), }, { - ExecutePluginCommitData: plugintypes.ExecutePluginCommitData{ - SequenceNumberRange: cciptypes.NewSeqNumRange(30, 40), - }, + SequenceNumberRange: cciptypes.NewSeqNumRange(30, 40), }, { - ExecutePluginCommitData: plugintypes.ExecutePluginCommitData{ - SequenceNumberRange: cciptypes.NewSeqNumRange(50, 60)}, - }, - }, - }, + SequenceNumberRange: cciptypes.NewSeqNumRange(50, 60)}, + }}, want: []cciptypes.SeqNumRange{{10, 20}, {30, 40}, {50, 60}}, }, { name: "contiguous and non-contiguous ranges", - args: args{reports: []plugintypes.ExecutePluginCommitDataWithMessages{ + args: args{reports: []plugintypes.ExecutePluginCommitData{ { - ExecutePluginCommitData: plugintypes.ExecutePluginCommitData{ - SequenceNumberRange: cciptypes.NewSeqNumRange(10, 20), - }, + SequenceNumberRange: cciptypes.NewSeqNumRange(10, 20), }, { - ExecutePluginCommitData: plugintypes.ExecutePluginCommitData{ - SequenceNumberRange: cciptypes.NewSeqNumRange(21, 40), - }, + SequenceNumberRange: cciptypes.NewSeqNumRange(21, 40), }, { - ExecutePluginCommitData: plugintypes.ExecutePluginCommitData{ - SequenceNumberRange: cciptypes.NewSeqNumRange(50, 60), - }, + SequenceNumberRange: cciptypes.NewSeqNumRange(50, 60), }, - }, - }, + }}, want: []cciptypes.SeqNumRange{{10, 40}, {50, 60}}, }, } @@ -325,22 +285,16 @@ func Test_groupByChainSelector(t *testing.T) { want: plugintypes.ExecutePluginCommitObservations{ 1: { { - ExecutePluginCommitData: plugintypes.ExecutePluginCommitData{ - SourceChain: 1, - MerkleRoot: cciptypes.Bytes32{1}, - SequenceNumberRange: cciptypes.NewSeqNumRange(10, 20), - ExecutedMessages: nil, - }, + SourceChain: 1, + MerkleRoot: cciptypes.Bytes32{1}, + SequenceNumberRange: cciptypes.NewSeqNumRange(10, 20), }, }, 2: { { - ExecutePluginCommitData: plugintypes.ExecutePluginCommitData{ - SourceChain: 2, - MerkleRoot: cciptypes.Bytes32{2}, - SequenceNumberRange: cciptypes.NewSeqNumRange(30, 40), - ExecutedMessages: nil, - }, + SourceChain: 2, + MerkleRoot: cciptypes.Bytes32{2}, + SequenceNumberRange: cciptypes.NewSeqNumRange(30, 40), }, }, }, @@ -357,13 +311,13 @@ func Test_groupByChainSelector(t *testing.T) { func Test_filterOutFullyExecutedMessages(t *testing.T) { type args struct { - reports []plugintypes.ExecutePluginCommitDataWithMessages + reports []plugintypes.ExecutePluginCommitData executedMessages []cciptypes.SeqNumRange } tests := []struct { name string args args - want []plugintypes.ExecutePluginCommitDataWithMessages + want []plugintypes.ExecutePluginCommitData wantErr assert.ErrorAssertionFunc }{ { @@ -378,60 +332,36 @@ func Test_filterOutFullyExecutedMessages(t *testing.T) { { name: "empty2", args: args{ - reports: []plugintypes.ExecutePluginCommitDataWithMessages{}, + reports: []plugintypes.ExecutePluginCommitData{}, executedMessages: nil, }, - want: []plugintypes.ExecutePluginCommitDataWithMessages{}, + want: []plugintypes.ExecutePluginCommitData{}, wantErr: assert.NoError, }, { name: "no executed messages", args: args{ - reports: []plugintypes.ExecutePluginCommitDataWithMessages{ - { - ExecutePluginCommitData: plugintypes.ExecutePluginCommitData{ - SequenceNumberRange: cciptypes.NewSeqNumRange(10, 20), - }, - }, - { - ExecutePluginCommitData: plugintypes.ExecutePluginCommitData{ - SequenceNumberRange: cciptypes.NewSeqNumRange(30, 40), - }, - }, - { - ExecutePluginCommitData: plugintypes.ExecutePluginCommitData{ - SequenceNumberRange: cciptypes.NewSeqNumRange(50, 60), - }, - }, + reports: []plugintypes.ExecutePluginCommitData{ + {SequenceNumberRange: cciptypes.NewSeqNumRange(10, 20)}, + {SequenceNumberRange: cciptypes.NewSeqNumRange(30, 40)}, + {SequenceNumberRange: cciptypes.NewSeqNumRange(50, 60)}, }, executedMessages: nil, }, - want: []plugintypes.ExecutePluginCommitDataWithMessages{ - { - ExecutePluginCommitData: plugintypes.ExecutePluginCommitData{ - SequenceNumberRange: cciptypes.NewSeqNumRange(10, 20)}}, - { - ExecutePluginCommitData: plugintypes.ExecutePluginCommitData{ - SequenceNumberRange: cciptypes.NewSeqNumRange(30, 40)}}, - { - ExecutePluginCommitData: plugintypes.ExecutePluginCommitData{ - SequenceNumberRange: cciptypes.NewSeqNumRange(50, 60)}}, + want: []plugintypes.ExecutePluginCommitData{ + {SequenceNumberRange: cciptypes.NewSeqNumRange(10, 20)}, + {SequenceNumberRange: cciptypes.NewSeqNumRange(30, 40)}, + {SequenceNumberRange: cciptypes.NewSeqNumRange(50, 60)}, }, wantErr: assert.NoError, }, { name: "executed messages", args: args{ - reports: []plugintypes.ExecutePluginCommitDataWithMessages{ - { - ExecutePluginCommitData: plugintypes.ExecutePluginCommitData{ - SequenceNumberRange: cciptypes.NewSeqNumRange(10, 20)}}, - { - ExecutePluginCommitData: plugintypes.ExecutePluginCommitData{ - SequenceNumberRange: cciptypes.NewSeqNumRange(30, 40)}}, - { - ExecutePluginCommitData: plugintypes.ExecutePluginCommitData{ - SequenceNumberRange: cciptypes.NewSeqNumRange(50, 60)}}, + reports: []plugintypes.ExecutePluginCommitData{ + {SequenceNumberRange: cciptypes.NewSeqNumRange(10, 20)}, + {SequenceNumberRange: cciptypes.NewSeqNumRange(30, 40)}, + {SequenceNumberRange: cciptypes.NewSeqNumRange(50, 60)}, }, executedMessages: []cciptypes.SeqNumRange{ cciptypes.NewSeqNumRange(0, 100), @@ -443,41 +373,26 @@ func Test_filterOutFullyExecutedMessages(t *testing.T) { { name: "2 partially executed", args: args{ - reports: []plugintypes.ExecutePluginCommitDataWithMessages{ - { - ExecutePluginCommitData: plugintypes.ExecutePluginCommitData{ - SequenceNumberRange: cciptypes.NewSeqNumRange(10, 20)}, - }, - { - ExecutePluginCommitData: plugintypes.ExecutePluginCommitData{ - SequenceNumberRange: cciptypes.NewSeqNumRange(30, 40)}, - }, - { - ExecutePluginCommitData: plugintypes.ExecutePluginCommitData{ - SequenceNumberRange: cciptypes.NewSeqNumRange(50, 60)}, - }, + reports: []plugintypes.ExecutePluginCommitData{ + {SequenceNumberRange: cciptypes.NewSeqNumRange(10, 20)}, + {SequenceNumberRange: cciptypes.NewSeqNumRange(30, 40)}, + {SequenceNumberRange: cciptypes.NewSeqNumRange(50, 60)}, }, executedMessages: []cciptypes.SeqNumRange{ cciptypes.NewSeqNumRange(15, 35), }, }, - want: []plugintypes.ExecutePluginCommitDataWithMessages{ + want: []plugintypes.ExecutePluginCommitData{ { - ExecutePluginCommitData: plugintypes.ExecutePluginCommitData{ - SequenceNumberRange: cciptypes.NewSeqNumRange(10, 20), - ExecutedMessages: []cciptypes.SeqNum{15, 16, 17, 18, 19, 20}, - }, + SequenceNumberRange: cciptypes.NewSeqNumRange(10, 20), + ExecutedMessages: []cciptypes.SeqNum{15, 16, 17, 18, 19, 20}, }, { - ExecutePluginCommitData: plugintypes.ExecutePluginCommitData{ - SequenceNumberRange: cciptypes.NewSeqNumRange(30, 40), - ExecutedMessages: []cciptypes.SeqNum{30, 31, 32, 33, 34, 35}, - }, + SequenceNumberRange: cciptypes.NewSeqNumRange(30, 40), + ExecutedMessages: []cciptypes.SeqNum{30, 31, 32, 33, 34, 35}, }, { - ExecutePluginCommitData: plugintypes.ExecutePluginCommitData{ - SequenceNumberRange: cciptypes.NewSeqNumRange(50, 60), - }, + SequenceNumberRange: cciptypes.NewSeqNumRange(50, 60), }, }, wantErr: assert.NoError, @@ -485,39 +400,23 @@ func Test_filterOutFullyExecutedMessages(t *testing.T) { { name: "2 partially executed 1 fully executed", args: args{ - reports: []plugintypes.ExecutePluginCommitDataWithMessages{ - { - ExecutePluginCommitData: plugintypes.ExecutePluginCommitData{ - SequenceNumberRange: cciptypes.NewSeqNumRange(10, 20), - }, - }, - { - ExecutePluginCommitData: plugintypes.ExecutePluginCommitData{ - SequenceNumberRange: cciptypes.NewSeqNumRange(30, 40), - }, - }, - { - ExecutePluginCommitData: plugintypes.ExecutePluginCommitData{ - SequenceNumberRange: cciptypes.NewSeqNumRange(50, 60), - }, - }, + reports: []plugintypes.ExecutePluginCommitData{ + {SequenceNumberRange: cciptypes.NewSeqNumRange(10, 20)}, + {SequenceNumberRange: cciptypes.NewSeqNumRange(30, 40)}, + {SequenceNumberRange: cciptypes.NewSeqNumRange(50, 60)}, }, executedMessages: []cciptypes.SeqNumRange{ cciptypes.NewSeqNumRange(15, 55), }, }, - want: []plugintypes.ExecutePluginCommitDataWithMessages{ + want: []plugintypes.ExecutePluginCommitData{ { - ExecutePluginCommitData: plugintypes.ExecutePluginCommitData{ - SequenceNumberRange: cciptypes.NewSeqNumRange(10, 20), - ExecutedMessages: []cciptypes.SeqNum{15, 16, 17, 18, 19, 20}, - }, + SequenceNumberRange: cciptypes.NewSeqNumRange(10, 20), + ExecutedMessages: []cciptypes.SeqNum{15, 16, 17, 18, 19, 20}, }, { - ExecutePluginCommitData: plugintypes.ExecutePluginCommitData{ - SequenceNumberRange: cciptypes.NewSeqNumRange(50, 60), - ExecutedMessages: []cciptypes.SeqNum{50, 51, 52, 53, 54, 55}, - }, + SequenceNumberRange: cciptypes.NewSeqNumRange(50, 60), + ExecutedMessages: []cciptypes.SeqNum{50, 51, 52, 53, 54, 55}, }, }, wantErr: assert.NoError, @@ -525,116 +424,64 @@ func Test_filterOutFullyExecutedMessages(t *testing.T) { { name: "first report executed", args: args{ - reports: []plugintypes.ExecutePluginCommitDataWithMessages{ - { - ExecutePluginCommitData: plugintypes.ExecutePluginCommitData{ - SequenceNumberRange: cciptypes.NewSeqNumRange(10, 20), - }, - }, - { - ExecutePluginCommitData: plugintypes.ExecutePluginCommitData{ - SequenceNumberRange: cciptypes.NewSeqNumRange(30, 40), - }, - }, - { - ExecutePluginCommitData: plugintypes.ExecutePluginCommitData{ - SequenceNumberRange: cciptypes.NewSeqNumRange(50, 60), - }, - }, + reports: []plugintypes.ExecutePluginCommitData{ + {SequenceNumberRange: cciptypes.NewSeqNumRange(10, 20)}, + {SequenceNumberRange: cciptypes.NewSeqNumRange(30, 40)}, + {SequenceNumberRange: cciptypes.NewSeqNumRange(50, 60)}, }, executedMessages: []cciptypes.SeqNumRange{ cciptypes.NewSeqNumRange(10, 20), }, }, - want: []plugintypes.ExecutePluginCommitDataWithMessages{ - { - ExecutePluginCommitData: plugintypes.ExecutePluginCommitData{ - SequenceNumberRange: cciptypes.NewSeqNumRange(30, 40), - }, - }, - { - ExecutePluginCommitData: plugintypes.ExecutePluginCommitData{ - SequenceNumberRange: cciptypes.NewSeqNumRange(50, 60), - }, - }, + want: []plugintypes.ExecutePluginCommitData{ + {SequenceNumberRange: cciptypes.NewSeqNumRange(30, 40)}, + {SequenceNumberRange: cciptypes.NewSeqNumRange(50, 60)}, }, wantErr: assert.NoError, }, { name: "last report executed", args: args{ - reports: []plugintypes.ExecutePluginCommitDataWithMessages{ - { - ExecutePluginCommitData: plugintypes.ExecutePluginCommitData{ - SequenceNumberRange: cciptypes.NewSeqNumRange(10, 20), - }, - }, - { - ExecutePluginCommitData: plugintypes.ExecutePluginCommitData{ - SequenceNumberRange: cciptypes.NewSeqNumRange(30, 40), - }, - }, - { - ExecutePluginCommitData: plugintypes.ExecutePluginCommitData{ - SequenceNumberRange: cciptypes.NewSeqNumRange(50, 60), - }, - }, + reports: []plugintypes.ExecutePluginCommitData{ + {SequenceNumberRange: cciptypes.NewSeqNumRange(10, 20)}, + {SequenceNumberRange: cciptypes.NewSeqNumRange(30, 40)}, + {SequenceNumberRange: cciptypes.NewSeqNumRange(50, 60)}, }, executedMessages: []cciptypes.SeqNumRange{ cciptypes.NewSeqNumRange(50, 60), }, }, - want: []plugintypes.ExecutePluginCommitDataWithMessages{ - { - ExecutePluginCommitData: plugintypes.ExecutePluginCommitData{ - SequenceNumberRange: cciptypes.NewSeqNumRange(10, 20), - }, - }, - { - ExecutePluginCommitData: plugintypes.ExecutePluginCommitData{ - SequenceNumberRange: cciptypes.NewSeqNumRange(30, 40), - }, - }, + want: []plugintypes.ExecutePluginCommitData{ + {SequenceNumberRange: cciptypes.NewSeqNumRange(10, 20)}, + {SequenceNumberRange: cciptypes.NewSeqNumRange(30, 40)}, }, wantErr: assert.NoError, }, { name: "sort-report", args: args{ - reports: []plugintypes.ExecutePluginCommitDataWithMessages{ + reports: []plugintypes.ExecutePluginCommitData{ { - ExecutePluginCommitData: plugintypes.ExecutePluginCommitData{ - SequenceNumberRange: cciptypes.NewSeqNumRange(30, 40), - }, + SequenceNumberRange: cciptypes.NewSeqNumRange(30, 40), }, { - ExecutePluginCommitData: plugintypes.ExecutePluginCommitData{ - SequenceNumberRange: cciptypes.NewSeqNumRange(50, 60), - }, + SequenceNumberRange: cciptypes.NewSeqNumRange(50, 60), }, { - ExecutePluginCommitData: plugintypes.ExecutePluginCommitData{ - SequenceNumberRange: cciptypes.NewSeqNumRange(10, 20), - }, + SequenceNumberRange: cciptypes.NewSeqNumRange(10, 20), }, }, executedMessages: nil, }, - want: []plugintypes.ExecutePluginCommitDataWithMessages{ + want: []plugintypes.ExecutePluginCommitData{ { - ExecutePluginCommitData: plugintypes.ExecutePluginCommitData{ - SequenceNumberRange: cciptypes.NewSeqNumRange(10, 20), - }, + SequenceNumberRange: cciptypes.NewSeqNumRange(10, 20), }, { - ExecutePluginCommitData: plugintypes.ExecutePluginCommitData{ - SequenceNumberRange: cciptypes.NewSeqNumRange(30, 40), - }, + SequenceNumberRange: cciptypes.NewSeqNumRange(30, 40), }, { - ExecutePluginCommitData: plugintypes.ExecutePluginCommitData{ - SequenceNumberRange: cciptypes.NewSeqNumRange(50, 60), - }, + SequenceNumberRange: cciptypes.NewSeqNumRange(50, 60), }, }, wantErr: assert.NoError, @@ -642,21 +489,15 @@ func Test_filterOutFullyExecutedMessages(t *testing.T) { { name: "sort-executed", args: args{ - reports: []plugintypes.ExecutePluginCommitDataWithMessages{ + reports: []plugintypes.ExecutePluginCommitData{ { - ExecutePluginCommitData: plugintypes.ExecutePluginCommitData{ - SequenceNumberRange: cciptypes.NewSeqNumRange(10, 20), - }, + SequenceNumberRange: cciptypes.NewSeqNumRange(10, 20), }, { - ExecutePluginCommitData: plugintypes.ExecutePluginCommitData{ - SequenceNumberRange: cciptypes.NewSeqNumRange(30, 40), - }, + SequenceNumberRange: cciptypes.NewSeqNumRange(30, 40), }, { - ExecutePluginCommitData: plugintypes.ExecutePluginCommitData{ - SequenceNumberRange: cciptypes.NewSeqNumRange(50, 60), - }, + SequenceNumberRange: cciptypes.NewSeqNumRange(50, 60), }, }, executedMessages: []cciptypes.SeqNumRange{ @@ -708,7 +549,7 @@ func Test_decodeAttributedObservations(t *testing.T) { Observer: commontypes.OracleID(1), Observation: mustEncode(plugintypes.ExecutePluginObservation{ CommitReports: plugintypes.ExecutePluginCommitObservations{ - 1: {{ExecutePluginCommitData: plugintypes.ExecutePluginCommitData{MerkleRoot: cciptypes.Bytes32{1}}}}, + 1: {{MerkleRoot: cciptypes.Bytes32{1}}}, }, }), }, @@ -718,7 +559,7 @@ func Test_decodeAttributedObservations(t *testing.T) { Observer: commontypes.OracleID(1), Observation: plugintypes.ExecutePluginObservation{ CommitReports: plugintypes.ExecutePluginCommitObservations{ - 1: {{ExecutePluginCommitData: plugintypes.ExecutePluginCommitData{MerkleRoot: cciptypes.Bytes32{1}}}}, + 1: {{MerkleRoot: cciptypes.Bytes32{1}}}, }, }, }, @@ -732,7 +573,7 @@ func Test_decodeAttributedObservations(t *testing.T) { Observer: commontypes.OracleID(1), Observation: mustEncode(plugintypes.ExecutePluginObservation{ CommitReports: plugintypes.ExecutePluginCommitObservations{ - 1: {{ExecutePluginCommitData: plugintypes.ExecutePluginCommitData{MerkleRoot: cciptypes.Bytes32{1}}}}, + 1: {{MerkleRoot: cciptypes.Bytes32{1}}}, }, }), }, @@ -740,7 +581,7 @@ func Test_decodeAttributedObservations(t *testing.T) { Observer: commontypes.OracleID(2), Observation: mustEncode(plugintypes.ExecutePluginObservation{ CommitReports: plugintypes.ExecutePluginCommitObservations{ - 2: {{ExecutePluginCommitData: plugintypes.ExecutePluginCommitData{MerkleRoot: cciptypes.Bytes32{2}}}}, + 2: {{MerkleRoot: cciptypes.Bytes32{2}}}, }, }), }, @@ -750,7 +591,7 @@ func Test_decodeAttributedObservations(t *testing.T) { Observer: commontypes.OracleID(1), Observation: plugintypes.ExecutePluginObservation{ CommitReports: plugintypes.ExecutePluginCommitObservations{ - 1: {{ExecutePluginCommitData: plugintypes.ExecutePluginCommitData{MerkleRoot: cciptypes.Bytes32{1}}}}, + 1: {{MerkleRoot: cciptypes.Bytes32{1}}}, }, }, }, @@ -758,7 +599,7 @@ func Test_decodeAttributedObservations(t *testing.T) { Observer: commontypes.OracleID(2), Observation: plugintypes.ExecutePluginObservation{ CommitReports: plugintypes.ExecutePluginCommitObservations{ - 2: {{ExecutePluginCommitData: plugintypes.ExecutePluginCommitData{MerkleRoot: cciptypes.Bytes32{2}}}}, + 2: {{MerkleRoot: cciptypes.Bytes32{2}}}, }, }, }, diff --git a/execute/plugin_test.go b/execute/plugin_test.go index 34d466b82..c056a0235 100644 --- a/execute/plugin_test.go +++ b/execute/plugin_test.go @@ -3,7 +3,6 @@ package execute import ( "context" "fmt" - "math/rand" "testing" "time" @@ -19,64 +18,19 @@ import ( "github.com/smartcontractkit/libocr/offchainreporting2plus/types" libocrtypes "github.com/smartcontractkit/libocr/ragep2p/types" - "github.com/smartcontractkit/chainlink-common/pkg/hashutil" "github.com/smartcontractkit/chainlink-common/pkg/logger" - "github.com/smartcontractkit/chainlink-common/pkg/merklemulti" cciptypes "github.com/smartcontractkit/chainlink-common/pkg/types/ccipocr3" - "github.com/smartcontractkit/chainlink-ccip/execute/report" "github.com/smartcontractkit/chainlink-ccip/internal/libs/slicelib" "github.com/smartcontractkit/chainlink-ccip/internal/mocks" + "github.com/smartcontractkit/chainlink-ccip/internal/plugincommon" "github.com/smartcontractkit/chainlink-ccip/internal/reader" - reader_mock "github.com/smartcontractkit/chainlink-ccip/internal/reader/mocks" + codec_mocks "github.com/smartcontractkit/chainlink-ccip/mocks/execute/internal_/gen" + reader_mock "github.com/smartcontractkit/chainlink-ccip/mocks/internal_/reader" "github.com/smartcontractkit/chainlink-ccip/pluginconfig" "github.com/smartcontractkit/chainlink-ccip/plugintypes" ) -// makeMessage creates a message deterministically derived from the given inputs. -func makeMessage(src cciptypes.ChainSelector, num cciptypes.SeqNum, nonce uint64) cciptypes.Message { - var placeholderID cciptypes.Bytes32 - n, err := rand.New(rand.NewSource(int64(src) * int64(num) * int64(nonce))).Read(placeholderID[:]) - if n != 32 { - panic(fmt.Sprintf("Unexpected number of bytes read for placeholder id: want 32, got %d", n)) - } - if err != nil { - panic(fmt.Sprintf("Error reading random bytes: %v", err)) - } - - return cciptypes.Message{ - Header: cciptypes.RampMessageHeader{ - MessageID: placeholderID, - SourceChainSelector: src, - SequenceNumber: num, - MsgHash: cciptypes.Bytes32{}, - Nonce: nonce, - }, - } -} - -// mustParseByteStr parses a given string into a byte array, any error causes a panic. Pass in an empty string for a -// random byte array. -// nolint:unparam // surly this will be useful at some point... -func mustParseByteStr(byteStr string) cciptypes.Bytes32 { - if byteStr == "" { - var randomBytes cciptypes.Bytes32 - n, err := rand.New(rand.NewSource(0)).Read(randomBytes[:]) - if n != 32 { - panic(fmt.Sprintf("Unexpected number of bytes read for placeholder id: want 32, got %d", n)) - } - if err != nil { - panic(fmt.Sprintf("Error reading random bytes: %v", err)) - } - return randomBytes - } - b, err := cciptypes.NewBytes32FromString(byteStr) - if err != nil { - panic(err) - } - return b -} - func Test_getPendingExecutedReports(t *testing.T) { tests := []struct { name string @@ -114,14 +68,13 @@ func Test_getPendingExecutedReports(t *testing.T) { 1: nil, }, want: plugintypes.ExecutePluginCommitObservations{ - 1: []plugintypes.ExecutePluginCommitDataWithMessages{ - {ExecutePluginCommitData: plugintypes.ExecutePluginCommitData{ + 1: []plugintypes.ExecutePluginCommitData{ + { SourceChain: 1, SequenceNumberRange: cciptypes.NewSeqNumRange(1, 10), - ExecutedMessages: nil, Timestamp: time.UnixMilli(10101010101), BlockNum: 999, - }}, + }, }, }, want1: time.UnixMilli(10101010101), @@ -150,14 +103,14 @@ func Test_getPendingExecutedReports(t *testing.T) { }, }, want: plugintypes.ExecutePluginCommitObservations{ - 1: []plugintypes.ExecutePluginCommitDataWithMessages{ - {ExecutePluginCommitData: plugintypes.ExecutePluginCommitData{ + 1: []plugintypes.ExecutePluginCommitData{ + { SourceChain: 1, SequenceNumberRange: cciptypes.NewSeqNumRange(1, 10), Timestamp: time.UnixMilli(10101010101), BlockNum: 999, ExecutedMessages: []cciptypes.SeqNum{1, 2, 3, 7, 8}, - }}, + }, }, }, want1: time.UnixMilli(10101010101), @@ -201,7 +154,6 @@ func Test_getPendingExecutedReports(t *testing.T) { // CommitReportsGTETimestamp(ctx, dest, ts, 1000) -> ([]cciptypes.CommitPluginReportWithMeta, error) // for each chain selector: // ExecutedMessageRanges(ctx, selector, dest, seqRange) -> ([]cciptypes.SeqNumRange, error) - got, got1, err := getPendingExecutedReports(context.Background(), mockReader, 123, time.Now()) if !tt.wantErr(t, err, "getPendingExecutedReports(...)") { return @@ -212,346 +164,13 @@ func Test_getPendingExecutedReports(t *testing.T) { } } -// TODO: better than this -type tdr struct{} - -func (t tdr) ReadTokenData( - ctx context.Context, srcChain cciptypes.ChainSelector, num cciptypes.SeqNum) ([][]byte, error, -) { - return nil, nil -} - -// breakCommitReport by adding an extra message. This causes the report to have an unexpected number of messages. -func breakCommitReport( - commitReport plugintypes.ExecutePluginCommitDataWithMessages, -) plugintypes.ExecutePluginCommitDataWithMessages { - commitReport.Messages = append(commitReport.Messages, cciptypes.Message{}) - return commitReport -} - -// makeTestCommitReport creates a basic commit report with messages given different parameters. This function -// will panic if the input parameters are inconsistent. -func makeTestCommitReport( - hasher cciptypes.MessageHasher, - numMessages, - srcChain, - firstSeqNum, - block int, - timestamp int64, - rootOverride cciptypes.Bytes32, - executed []cciptypes.SeqNum, -) plugintypes.ExecutePluginCommitDataWithMessages { - sequenceNumberRange := - cciptypes.NewSeqNumRange(cciptypes.SeqNum(firstSeqNum), cciptypes.SeqNum(firstSeqNum+numMessages-1)) - - for _, e := range executed { - if !sequenceNumberRange.Contains(e) { - panic("executed message out of range") - } - } - var messages []cciptypes.Message - for i := 0; i < numMessages; i++ { - messages = append(messages, makeMessage( - cciptypes.ChainSelector(srcChain), - cciptypes.SeqNum(i+firstSeqNum), - uint64(i))) - } - - commitReport := plugintypes.ExecutePluginCommitDataWithMessages{ - ExecutePluginCommitData: plugintypes.ExecutePluginCommitData{ - //MerkleRoot: root, - SourceChain: cciptypes.ChainSelector(srcChain), - SequenceNumberRange: sequenceNumberRange, - Timestamp: time.UnixMilli(timestamp), - BlockNum: uint64(block), - ExecutedMessages: executed, - }, - Messages: messages, - } - - // calculate merkle root - root := rootOverride - if root.IsEmpty() { - tree, err := report.ConstructMerkleTree(context.Background(), hasher, commitReport) - if err != nil { - panic(fmt.Sprintf("unable to construct merkle tree: %s", err)) - } - commitReport.MerkleRoot = tree.Root() - } - - return commitReport -} - -// assertMerkleRoot computes the source messages merkle root, then computes a verification with the proof, then compares -// the roots. -func assertMerkleRoot( - t *testing.T, - hasher cciptypes.MessageHasher, - execReport cciptypes.ExecutePluginReportSingleChain, - commitReport plugintypes.ExecutePluginCommitDataWithMessages, -) { - keccak := hashutil.NewKeccak() - // Generate merkle root from commit report messages - var leafHashes [][32]byte - for _, msg := range commitReport.Messages { - hash, err := hasher.Hash(context.Background(), msg) - require.NoError(t, err) - leafHashes = append(leafHashes, hash) - } - tree, err := merklemulti.NewTree(keccak, leafHashes) - require.NoError(t, err) - merkleRoot := tree.Root() - - // Generate merkle root from exec report messages and proof - ctx := context.Background() - var leaves [][32]byte - for _, msg := range execReport.Messages { - hash, err := hasher.Hash(ctx, msg) - require.NoError(t, err) - leaves = append(leaves, hash) - } - proofCast := make([][32]byte, len(execReport.Proofs)) - for i, p := range execReport.Proofs { - copy(proofCast[i][:], p[:32]) - } - var proof merklemulti.Proof[[32]byte] - proof.Hashes = proofCast - proof.SourceFlags = slicelib.BitFlagsToBools(execReport.ProofFlagBits.Int, len(leaves)+len(proofCast)-1) - recomputedMerkleRoot, err := merklemulti.VerifyComputeRoot(hashutil.NewKeccak(), - leaves, - proof) - assert.NoError(t, err) - assert.NotNil(t, recomputedMerkleRoot) - - // Compare them - assert.Equal(t, merkleRoot, recomputedMerkleRoot) -} +func TestPlugin_Close(t *testing.T) { + mockReader := mocks.NewCCIPReader() + mockReader.On("Close", mock.Anything).Return(nil) -func Test_selectReport(t *testing.T) { - hasher := mocks.NewMessageHasher() - codec := mocks.NewExecutePluginJSONReportCodec() lggr := logger.Test(t) - var tokenDataReader tdr - - type args struct { - reports []plugintypes.ExecutePluginCommitDataWithMessages - maxReportSize int - } - tests := []struct { - name string - args args - expectedExecReports int - expectedCommitReports int - expectedExecThings []int - lastReportExecuted []cciptypes.SeqNum - wantErr string - }{ - { - name: "empty report", - args: args{ - reports: []plugintypes.ExecutePluginCommitDataWithMessages{}, - }, - expectedExecReports: 0, - expectedCommitReports: 0, - }, - { - name: "half report", - args: args{ - maxReportSize: 2300, - reports: []plugintypes.ExecutePluginCommitDataWithMessages{ - makeTestCommitReport(hasher, 10, 1, 100, 999, 10101010101, - cciptypes.Bytes32{}, // generate a correct root. - nil), - }, - }, - expectedExecReports: 1, - expectedCommitReports: 1, - expectedExecThings: []int{5}, - lastReportExecuted: []cciptypes.SeqNum{100, 101, 102, 103, 104}, - }, - { - name: "full report", - args: args{ - maxReportSize: 10000, - reports: []plugintypes.ExecutePluginCommitDataWithMessages{ - makeTestCommitReport(hasher, 10, 1, 100, 999, 10101010101, - cciptypes.Bytes32{}, // generate a correct root. - nil), - }, - }, - expectedExecReports: 1, - expectedCommitReports: 0, - expectedExecThings: []int{10}, - }, - { - name: "two reports", - args: args{ - maxReportSize: 15000, - reports: []plugintypes.ExecutePluginCommitDataWithMessages{ - makeTestCommitReport(hasher, 10, 1, 100, 999, 10101010101, - cciptypes.Bytes32{}, // generate a correct root. - nil), - makeTestCommitReport(hasher, 20, 2, 100, 999, 10101010101, - cciptypes.Bytes32{}, // generate a correct root. - nil), - }, - }, - expectedExecReports: 2, - expectedCommitReports: 0, - expectedExecThings: []int{10, 20}, - }, - { - name: "one and half reports", - args: args{ - maxReportSize: 8500, - reports: []plugintypes.ExecutePluginCommitDataWithMessages{ - makeTestCommitReport(hasher, 10, 1, 100, 999, 10101010101, - cciptypes.Bytes32{}, // generate a correct root. - nil), - makeTestCommitReport(hasher, 20, 2, 100, 999, 10101010101, - cciptypes.Bytes32{}, // generate a correct root. - nil), - }, - }, - expectedExecReports: 2, - expectedCommitReports: 1, - expectedExecThings: []int{10, 10}, - lastReportExecuted: []cciptypes.SeqNum{100, 101, 102, 103, 104, 105, 106, 107, 108, 109}, - }, - { - name: "exactly one report", - args: args{ - maxReportSize: 4200, - reports: []plugintypes.ExecutePluginCommitDataWithMessages{ - makeTestCommitReport(hasher, 10, 1, 100, 999, 10101010101, - cciptypes.Bytes32{}, // generate a correct root. - nil), - makeTestCommitReport(hasher, 20, 2, 100, 999, 10101010101, - cciptypes.Bytes32{}, // generate a correct root. - nil), - }, - }, - expectedExecReports: 1, - expectedCommitReports: 1, - expectedExecThings: []int{10}, - lastReportExecuted: []cciptypes.SeqNum{}, - }, - { - name: "execute remainder of partially executed report", - args: args{ - maxReportSize: 2500, - reports: []plugintypes.ExecutePluginCommitDataWithMessages{ - makeTestCommitReport(hasher, 10, 1, 100, 999, 10101010101, - cciptypes.Bytes32{}, // generate a correct root. - []cciptypes.SeqNum{100, 101, 102, 103, 104}), - }, - }, - expectedExecReports: 1, - expectedCommitReports: 0, - expectedExecThings: []int{5}, - }, - { - name: "partially execute remainder of partially executed report", - args: args{ - maxReportSize: 2050, - reports: []plugintypes.ExecutePluginCommitDataWithMessages{ - makeTestCommitReport(hasher, 10, 1, 100, 999, 10101010101, - cciptypes.Bytes32{}, // generate a correct root. - []cciptypes.SeqNum{100, 101, 102, 103, 104}), - }, - }, - expectedExecReports: 1, - expectedCommitReports: 1, - expectedExecThings: []int{4}, - lastReportExecuted: []cciptypes.SeqNum{100, 101, 102, 103, 104, 105, 106, 107, 108}, - }, - { - name: "execute remainder of sparsely executed report", - args: args{ - maxReportSize: 3500, - reports: []plugintypes.ExecutePluginCommitDataWithMessages{ - makeTestCommitReport(hasher, 10, 1, 100, 999, 10101010101, - cciptypes.Bytes32{}, // generate a correct root. - []cciptypes.SeqNum{100, 102, 104, 106, 108}), - }, - }, - expectedExecReports: 1, - expectedCommitReports: 0, - expectedExecThings: []int{5}, - }, - { - name: "partially execute remainder of partially executed sparse report", - args: args{ - maxReportSize: 2050, - reports: []plugintypes.ExecutePluginCommitDataWithMessages{ - makeTestCommitReport(hasher, 10, 1, 100, 999, 10101010101, - cciptypes.Bytes32{}, // generate a correct root. - []cciptypes.SeqNum{100, 102, 104, 106, 108}), - }, - }, - expectedExecReports: 1, - expectedCommitReports: 1, - expectedExecThings: []int{4}, - lastReportExecuted: []cciptypes.SeqNum{100, 101, 102, 103, 104, 105, 106, 107, 108}, - }, - { - name: "broken report", - args: args{ - maxReportSize: 10000, - reports: []plugintypes.ExecutePluginCommitDataWithMessages{ - breakCommitReport(makeTestCommitReport(hasher, 10, 1, 101, 1000, 10101010102, - cciptypes.Bytes32{}, // generate a correct root. - nil)), - }, - }, - wantErr: "unable to build a single chain report", - }, - { - name: "invalid merkle root", - args: args{ - reports: []plugintypes.ExecutePluginCommitDataWithMessages{ - makeTestCommitReport(hasher, 10, 1, 100, 999, 10101010101, - mustParseByteStr(""), // random root - nil), - }, - }, - wantErr: "merkle root mismatch: expected 0x00000000000000000", - }, - // TODO: A test that requires skipping over a large message because only a smaller message fits in the report. - } - for _, tt := range tests { - tt := tt - t.Run(tt.name, func(t *testing.T) { - t.Parallel() - ctx := context.Background() - execReports, commitReports, err := - selectReport(ctx, lggr, hasher, codec, tokenDataReader, tt.args.reports, tt.args.maxReportSize) - if tt.wantErr != "" { - assert.Contains(t, err.Error(), tt.wantErr) - return - } - require.NoError(t, err) - require.Len(t, execReports, tt.expectedExecReports) - require.Len(t, commitReports, tt.expectedCommitReports) - for i, execReport := range execReports { - require.Lenf(t, execReport.Messages, tt.expectedExecThings[i], - "Unexpected number of messages, iter %d", i) - require.Lenf(t, execReport.OffchainTokenData, tt.expectedExecThings[i], - "Unexpected number of token data, iter %d", i) - require.NotEmptyf(t, execReport.Proofs, "Proof should not be empty.") - assertMerkleRoot(t, hasher, execReport, tt.args.reports[i]) - } - // If the last report is partially executed, the executed messages can be checked. - if len(execReports) > 0 && len(tt.lastReportExecuted) > 0 { - lastReport := commitReports[len(commitReports)-1] - require.ElementsMatch(t, tt.lastReportExecuted, lastReport.ExecutedMessages) - } - }) - } -} - -func TestPlugin_Close(t *testing.T) { - p := &Plugin{} + readerSyncer := plugincommon.NewBackgroundReaderSyncer(lggr, mockReader, 50*time.Millisecond, 100*time.Millisecond) + p := &Plugin{lggr: lggr, ccipReader: mockReader, readerSyncer: readerSyncer} require.NoError(t, p.Close()) } @@ -636,19 +255,11 @@ func TestPlugin_ValidateObservation_ValidateObservedSeqNum_Error(t *testing.T) { } // Reports with duplicate roots. - root := mustParseByteStr("") - commitReports := map[cciptypes.ChainSelector][]plugintypes.ExecutePluginCommitDataWithMessages{ + root := cciptypes.Bytes32{} + commitReports := map[cciptypes.ChainSelector][]plugintypes.ExecutePluginCommitData{ 1: { - { - ExecutePluginCommitData: plugintypes.ExecutePluginCommitData{ - MerkleRoot: root, - }, - }, - { - ExecutePluginCommitData: plugintypes.ExecutePluginCommitData{ - MerkleRoot: root, - }, - }, + {MerkleRoot: root}, + {MerkleRoot: root}, }, } observation := plugintypes.NewExecutePluginObservation(commitReports, nil) @@ -684,7 +295,7 @@ func TestPlugin_Observation_EligibilityCheckFailure(t *testing.T) { } func TestPlugin_Outcome_BadObservationEncoding(t *testing.T) { - p := &Plugin{} + p := &Plugin{lggr: logger.Test(t)} _, err := p.Outcome(ocr3types.OutcomeContext{}, nil, []types.AttributedObservation{ { @@ -701,6 +312,7 @@ func TestPlugin_Outcome_BelowF(t *testing.T) { reportingCfg: ocr3types.ReportingPluginConfig{ F: 1, }, + lggr: logger.Test(t), } _, err := p.Outcome(ocr3types.OutcomeContext{}, nil, []types.AttributedObservation{}) @@ -709,7 +321,7 @@ func TestPlugin_Outcome_BelowF(t *testing.T) { } func TestPlugin_Outcome_HomeChainError(t *testing.T) { - homeChain := reader_mock.NewHomeChain(t) + homeChain := reader_mock.NewMockHomeChain(t) homeChain.On("GetFChain", mock.Anything).Return(nil, fmt.Errorf("test error")) p := &Plugin{ @@ -721,7 +333,7 @@ func TestPlugin_Outcome_HomeChainError(t *testing.T) { } func TestPlugin_Outcome_CommitReportsMergeError(t *testing.T) { - homeChain := reader_mock.NewHomeChain(t) + homeChain := reader_mock.NewMockHomeChain(t) fChainMap := map[cciptypes.ChainSelector]int{ 10: 20, } @@ -729,21 +341,11 @@ func TestPlugin_Outcome_CommitReportsMergeError(t *testing.T) { p := &Plugin{ homeChain: homeChain, + lggr: logger.Test(t), } - commitReports := map[cciptypes.ChainSelector][]plugintypes.ExecutePluginCommitDataWithMessages{ - 1: { - { - ExecutePluginCommitData: plugintypes.ExecutePluginCommitData{ - MerkleRoot: mustParseByteStr(""), - }, - }, - { - ExecutePluginCommitData: plugintypes.ExecutePluginCommitData{ - MerkleRoot: mustParseByteStr(""), - }, - }, - }, + commitReports := map[cciptypes.ChainSelector][]plugintypes.ExecutePluginCommitData{ + 1: {}, } observation, err := plugintypes.NewExecutePluginObservation(commitReports, nil).Encode() require.NoError(t, err) @@ -757,7 +359,7 @@ func TestPlugin_Outcome_CommitReportsMergeError(t *testing.T) { } func TestPlugin_Outcome_MessagesMergeError(t *testing.T) { - homeChain := reader_mock.NewHomeChain(t) + homeChain := reader_mock.NewMockHomeChain(t) fChainMap := map[cciptypes.ChainSelector]int{ 10: 20, } @@ -767,7 +369,7 @@ func TestPlugin_Outcome_MessagesMergeError(t *testing.T) { homeChain: homeChain, } - //map[cciptypes.ChainSelector]map[cciptypes.SeqNum]cciptypes.Message + // map[cciptypes.ChainSelector]map[cciptypes.SeqNum]cciptypes.Message messages := map[cciptypes.ChainSelector]map[cciptypes.SeqNum]cciptypes.Message{ 1: { 1: { @@ -796,7 +398,7 @@ func TestPlugin_Reports_UnableToParse(t *testing.T) { } func TestPlugin_Reports_UnableToEncode(t *testing.T) { - codec := mocks.NewExecutePluginCodec(t) + codec := codec_mocks.NewMockExecutePluginCodec(t) codec.On("Encode", mock.Anything, mock.Anything). Return(nil, fmt.Errorf("test error")) p := &Plugin{reportCodec: codec} @@ -809,7 +411,7 @@ func TestPlugin_Reports_UnableToEncode(t *testing.T) { } func TestPlugin_ShouldAcceptAttestedReport_DoesNotDecode(t *testing.T) { - codec := mocks.NewExecutePluginCodec(t) + codec := codec_mocks.NewMockExecutePluginCodec(t) codec.On("Decode", mock.Anything, mock.Anything). Return(cciptypes.ExecutePluginReport{}, fmt.Errorf("test error")) p := &Plugin{ @@ -821,7 +423,7 @@ func TestPlugin_ShouldAcceptAttestedReport_DoesNotDecode(t *testing.T) { } func TestPlugin_ShouldAcceptAttestedReport_NoReports(t *testing.T) { - codec := mocks.NewExecutePluginCodec(t) + codec := codec_mocks.NewMockExecutePluginCodec(t) codec.On("Decode", mock.Anything, mock.Anything). Return(cciptypes.ExecutePluginReport{}, nil) p := &Plugin{ @@ -834,7 +436,7 @@ func TestPlugin_ShouldAcceptAttestedReport_NoReports(t *testing.T) { } func TestPlugin_ShouldAcceptAttestedReport_ShouldAccept(t *testing.T) { - codec := mocks.NewExecutePluginCodec(t) + codec := codec_mocks.NewMockExecutePluginCodec(t) codec.On("Decode", mock.Anything, mock.Anything). Return(cciptypes.ExecutePluginReport{ ChainReports: []cciptypes.ExecutePluginReportSingleChain{ @@ -886,9 +488,9 @@ func TestPlugin_ShouldTransmitAcceptReport_Ineligible(t *testing.T) { } func TestPlugin_ShouldTransmitAcceptReport_DecodeFailure(t *testing.T) { - homeChain := reader_mock.NewHomeChain(t) + homeChain := reader_mock.NewMockHomeChain(t) homeChain.On("GetSupportedChainsForPeer", mock.Anything).Return(mapset.NewSet(cciptypes.ChainSelector(1)), nil) - codec := mocks.NewExecutePluginCodec(t) + codec := codec_mocks.NewMockExecutePluginCodec(t) codec.On("Decode", mock.Anything, mock.Anything). Return(cciptypes.ExecutePluginReport{}, fmt.Errorf("test error")) @@ -910,9 +512,9 @@ func TestPlugin_ShouldTransmitAcceptReport_DecodeFailure(t *testing.T) { func TestPlugin_ShouldTransmitAcceptReport_Success(t *testing.T) { lggr, logs := logger.TestObserved(t, zapcore.DebugLevel) - homeChain := reader_mock.NewHomeChain(t) + homeChain := reader_mock.NewMockHomeChain(t) homeChain.On("GetSupportedChainsForPeer", mock.Anything).Return(mapset.NewSet(cciptypes.ChainSelector(1)), nil) - codec := mocks.NewExecutePluginCodec(t) + codec := codec_mocks.NewMockExecutePluginCodec(t) codec.On("Decode", mock.Anything, mock.Anything). Return(cciptypes.ExecutePluginReport{}, nil) diff --git a/execute/report/builder.go b/execute/report/builder.go index be5decc62..bca6c3f88 100644 --- a/execute/report/builder.go +++ b/execute/report/builder.go @@ -15,7 +15,7 @@ import ( var _ ExecReportBuilder = &execReportBuilder{} type ExecReportBuilder interface { - Add(report plugintypes.ExecutePluginCommitDataWithMessages) (plugintypes.ExecutePluginCommitDataWithMessages, error) + Add(report plugintypes.ExecutePluginCommitData) (plugintypes.ExecutePluginCommitData, error) Build() ([]cciptypes.ExecutePluginReportSingleChain, error) } @@ -41,6 +41,14 @@ func NewBuilder( } } +// validationMetadata contains all metadata needed to accumulate results across multiple reports and messages. +type validationMetadata struct { + encodedSizeBytes uint64 + + // TODO: gas limit + //gas uint64 +} + type execReportBuilder struct { ctx context.Context // TODO: remove context from builder so that it can be pure? lggr logger.Logger @@ -55,32 +63,25 @@ type execReportBuilder struct { maxGas uint64 // State - reportSizeBytes uint64 - // TODO: gas limit - //gas uint64 + accumulated validationMetadata // Result execReports []cciptypes.ExecutePluginReportSingleChain } func (b *execReportBuilder) Add( - commitReport plugintypes.ExecutePluginCommitDataWithMessages, -) (plugintypes.ExecutePluginCommitDataWithMessages, error) { - // TODO: buildSingleChainReportMaxSize needs to be part of the builder in order to access state. - execReport, encodedSize, updatedReport, err := - buildSingleChainReportMaxSize(b.ctx, b.lggr, b.hasher, b.tokenDataReader, b.encoder, - commitReport, - int(b.maxReportSizeBytes-b.reportSizeBytes)) + commitReport plugintypes.ExecutePluginCommitData, +) (plugintypes.ExecutePluginCommitData, error) { + execReport, updatedReport, err := b.buildSingleChainReport(b.ctx, commitReport) // No messages fit into the report, move to next report if errors.Is(err, ErrEmptyReport) { return commitReport, nil } if err != nil { - return commitReport, fmt.Errorf("unable to build single chain report: %w", err) + return commitReport, fmt.Errorf("unable to add a single chain report: %w", err) } - b.reportSizeBytes += uint64(encodedSize) b.execReports = append(b.execReports, execReport) return updatedReport, nil @@ -90,7 +91,7 @@ func (b *execReportBuilder) Build() ([]cciptypes.ExecutePluginReportSingleChain, b.lggr.Infow( "selected commit reports for execution report", "numReports", len(b.execReports), - "size", b.reportSizeBytes, + "sizeBytes", b.accumulated.encodedSizeBytes, "maxSize", b.maxReportSizeBytes) return b.execReports, nil } diff --git a/execute/report/data.go b/execute/report/data.go index 5e5cc6c52..35d82187f 100644 --- a/execute/report/data.go +++ b/execute/report/data.go @@ -11,8 +11,8 @@ import ( // markNewMessagesExecuted compares an execute plugin report with the commit report metadata and marks the new messages // as executed. func markNewMessagesExecuted( - execReport cciptypes.ExecutePluginReportSingleChain, report plugintypes.ExecutePluginCommitDataWithMessages, -) plugintypes.ExecutePluginCommitDataWithMessages { + execReport cciptypes.ExecutePluginReportSingleChain, report plugintypes.ExecutePluginCommitData, +) plugintypes.ExecutePluginCommitData { // Mark new messages executed. for i := 0; i < len(execReport.Messages); i++ { report.ExecutedMessages = diff --git a/execute/report/report.go b/execute/report/report.go index a5d8aa0da..4eb789aaa 100644 --- a/execute/report/report.go +++ b/execute/report/report.go @@ -5,9 +5,10 @@ import ( "context" "encoding/hex" "fmt" - "sort" + "slices" "github.com/smartcontractkit/chainlink-common/pkg/logger" + cciptypes "github.com/smartcontractkit/chainlink-common/pkg/types/ccipocr3" "github.com/smartcontractkit/chainlink-ccip/execute/types" @@ -15,25 +16,29 @@ import ( "github.com/smartcontractkit/chainlink-ccip/plugintypes" ) -// buildSingleChainReport converts the on-chain event data stored in cciptypes.ExecutePluginCommitDataWithMessages into -// the final on-chain report format. +// buildSingleChainReportHelper converts the on-chain event data stored in +// cciptypes.ExecutePluginCommitDataWithMessages into the final on-chain report format. // // The hasher and encoding codec are provided as arguments to allow for chain-specific formats to be used. // // The messages argument indicates which messages should be included in the report. If messages is empty // all messages will be included. This allows the caller to create smaller reports if needed. Executed messages // are skipped automatically. -func buildSingleChainReport( +func buildSingleChainReportHelper( ctx context.Context, lggr logger.Logger, hasher cciptypes.MessageHasher, tokenDataReader types.TokenDataReader, - encoder cciptypes.ExecutePluginCodec, - report plugintypes.ExecutePluginCommitDataWithMessages, - maxMessages int, -) (cciptypes.ExecutePluginReportSingleChain, int, error) { - if maxMessages == 0 { - maxMessages = len(report.Messages) + report plugintypes.ExecutePluginCommitData, + messages map[int]struct{}, +) (cciptypes.ExecutePluginReportSingleChain, error) { + if len(messages) == 0 { + if messages == nil { + messages = make(map[int]struct{}) + } + for i := 0; i < len(report.Messages); i++ { + messages[i] = struct{}{} + } } lggr.Debugw( @@ -44,7 +49,7 @@ func buildSingleChainReport( tree, err := ConstructMerkleTree(ctx, hasher, report) if err != nil { - return cciptypes.ExecutePluginReportSingleChain{}, 0, + return cciptypes.ExecutePluginReportSingleChain{}, fmt.Errorf("unable to construct merkle tree from messages for report (%s): %w", report.MerkleRoot.String(), err) } @@ -52,7 +57,7 @@ func buildSingleChainReport( hash := tree.Root() if !bytes.Equal(hash[:], report.MerkleRoot[:]) { actualStr := "0x" + hex.EncodeToString(hash[:]) - return cciptypes.ExecutePluginReportSingleChain{}, 0, + return cciptypes.ExecutePluginReportSingleChain{}, fmt.Errorf("merkle root mismatch: expected %s, got %s", report.MerkleRoot.String(), actualStr) } @@ -62,13 +67,12 @@ func buildSingleChainReport( var offchainTokenData [][][]byte var msgInRoot []cciptypes.Message executedIdx := 0 - for i := 0; i < numMsgs && len(toExecute) <= maxMessages; i++ { + for i, msg := range report.Messages { seqNum := report.SequenceNumberRange.Start() + cciptypes.SeqNum(i) // Skip messages which are already executed if executedIdx < len(report.ExecutedMessages) && report.ExecutedMessages[executedIdx] == seqNum { executedIdx++ - } else { - msg := report.Messages[i] + } else if _, ok := messages[i]; ok { tokenData, err := tokenDataReader.ReadTokenData(context.Background(), report.SourceChain, msg.Header.SequenceNumber) if err != nil { // TODO: skip message instead of failing the whole thing. @@ -78,7 +82,7 @@ func buildSingleChainReport( "sourceChain", report.SourceChain, "seqNum", msg.Header.SequenceNumber, "error", err) - return cciptypes.ExecutePluginReportSingleChain{}, 0, + return cciptypes.ExecutePluginReportSingleChain{}, fmt.Errorf("unable to read token data for message %d: %w", msg.Header.SequenceNumber, err) } @@ -101,7 +105,7 @@ func buildSingleChainReport( "toExecute", len(toExecute)) proof, err := tree.Prove(toExecute) if err != nil { - return cciptypes.ExecutePluginReportSingleChain{}, 0, + return cciptypes.ExecutePluginReportSingleChain{}, fmt.Errorf("unable to prove messages for report %s: %w", report.MerkleRoot.String(), err) } @@ -118,81 +122,144 @@ func buildSingleChainReport( ProofFlagBits: cciptypes.BigInt{Int: slicelib.BoolsToBitFlags(proof.SourceFlags)}, } - // Note: ExecutePluginReport is a strict array of data, so wrapping the final report - // does not add any additional overhead to the size being computed here. + return finalReport, nil +} + +type messageStatus string + +const ( + ReadyToExecute messageStatus = "ready_to_execute" + AlreadyExecuted messageStatus = "already_executed" + /* + SenderAlreadySkipped messageStatus = "sender_already_skipped" + MessageMaxGasCalcError messageStatus = "message_max_gas_calc_error" + InsufficientRemainingBatchDataLength messageStatus = "insufficient_remaining_batch_data_length" + InsufficientRemainingBatchGas messageStatus = "insufficient_remaining_batch_gas" + MissingNonce messageStatus = "missing_nonce" + InvalidNonce messageStatus = "invalid_nonce" + AggregateTokenValueComputeError messageStatus = "aggregate_token_value_compute_error" + AggregateTokenLimitExceeded messageStatus = "aggregate_token_limit_exceeded" + TokenDataNotReady messageStatus = "token_data_not_ready" + TokenDataFetchError messageStatus = "token_data_fetch_error" + TokenNotInDestTokenPrices messageStatus = "token_not_in_dest_token_prices" + TokenNotInSrcTokenPrices messageStatus = "token_not_in_src_token_prices" + InsufficientRemainingFee messageStatus = "insufficient_remaining_fee" + AddedToBatch messageStatus = "added_to_batch" + */ +) + +func (b *execReportBuilder) checkMessage( + _ context.Context, idx int, execReport plugintypes.ExecutePluginCommitData, + // TODO: get rid of the nolint when the error is used +) (messageStatus, error) { // nolint this will use the error eventually + if slices.Contains(execReport.ExecutedMessages, execReport.Messages[idx].Header.SequenceNumber) { + return AlreadyExecuted, nil + } + + return ReadyToExecute, nil +} +func (b *execReportBuilder) verifyReport( + ctx context.Context, execReport cciptypes.ExecutePluginReportSingleChain, +) (bool, validationMetadata, error) { // Compute the size of the encoded report. - encoded, err := encoder.Encode( + // Note: ExecutePluginReport is a strict array of data, so wrapping the final report + // does not add any additional overhead to the size being computed here. + encoded, err := b.encoder.Encode( ctx, cciptypes.ExecutePluginReport{ - ChainReports: []cciptypes.ExecutePluginReportSingleChain{finalReport}, + ChainReports: []cciptypes.ExecutePluginReportSingleChain{execReport}, }, ) if err != nil { - lggr.Errorw("unable to encode report", "err", err, "report", finalReport) - return cciptypes.ExecutePluginReportSingleChain{}, 0, fmt.Errorf("unable to encode report: %w", err) + b.lggr.Errorw("unable to encode report", "err", err, "report", execReport) + return false, validationMetadata{}, fmt.Errorf("unable to encode report: %w", err) + } + + maxSizeBytes := int(b.maxReportSizeBytes - b.accumulated.encodedSizeBytes) + if len(encoded) > maxSizeBytes { + b.lggr.Debugw("invalid report, report size exceeds limit", "size", len(encoded), "maxSize", maxSizeBytes) + return false, validationMetadata{}, nil } - return finalReport, len(encoded), nil + return true, validationMetadata{ + encodedSizeBytes: uint64(len(encoded)), + }, nil } -// buildSingleChainReportMaxSize generates the largest report which fits into maxSizeBytes. +// buildSingleChainReport generates the largest report which fits into maxSizeBytes. // See buildSingleChainReport for more details about how a report is built. -func buildSingleChainReportMaxSize( +func (b *execReportBuilder) buildSingleChainReport( ctx context.Context, - lggr logger.Logger, - hasher cciptypes.MessageHasher, - tokenDataReader types.TokenDataReader, - encoder cciptypes.ExecutePluginCodec, - report plugintypes.ExecutePluginCommitDataWithMessages, - maxSizeBytes int, -) (cciptypes.ExecutePluginReportSingleChain, int, plugintypes.ExecutePluginCommitDataWithMessages, error) { - finalReport, encodedSize, err := - buildSingleChainReport(ctx, lggr, hasher, tokenDataReader, encoder, report, 0) + report plugintypes.ExecutePluginCommitData, +) (cciptypes.ExecutePluginReportSingleChain, plugintypes.ExecutePluginCommitData, error) { + finalize := func( + execReport cciptypes.ExecutePluginReportSingleChain, + commitReport plugintypes.ExecutePluginCommitData, + meta validationMetadata, + ) (cciptypes.ExecutePluginReportSingleChain, plugintypes.ExecutePluginCommitData, error) { + b.accumulated.encodedSizeBytes += meta.encodedSizeBytes + commitReport = markNewMessagesExecuted(execReport, commitReport) + return execReport, commitReport, nil + } + // Attempt to include all messages in the report. + finalReport, err := + buildSingleChainReportHelper(b.ctx, b.lggr, b.hasher, b.tokenDataReader, report, nil) if err != nil { return cciptypes.ExecutePluginReportSingleChain{}, - 0, - plugintypes.ExecutePluginCommitDataWithMessages{}, + plugintypes.ExecutePluginCommitData{}, fmt.Errorf("unable to build a single chain report (max): %w", err) } - // return fully executed report - if encodedSize <= maxSizeBytes { - report = markNewMessagesExecuted(finalReport, report) - return finalReport, encodedSize, report, nil + validReport, meta, err := b.verifyReport(ctx, finalReport) + if err != nil { + return cciptypes.ExecutePluginReportSingleChain{}, + plugintypes.ExecutePluginCommitData{}, + fmt.Errorf("unable to verify report: %w", err) + } else if validReport { + return finalize(finalReport, report, meta) } - var searchErr error - idx := sort.Search(len(report.Messages), func(mid int) bool { - if searchErr != nil { - return false + finalReport = cciptypes.ExecutePluginReportSingleChain{} + msgs := make(map[int]struct{}) + for i := range report.Messages { + status, err := b.checkMessage(ctx, i, report) + if err != nil { + return cciptypes.ExecutePluginReportSingleChain{}, + plugintypes.ExecutePluginCommitData{}, + fmt.Errorf("unable to check message: %w", err) + } + if status != ReadyToExecute { + continue } - finalReport2, encodedSize2, err := - buildSingleChainReport(ctx, lggr, hasher, tokenDataReader, encoder, report, mid) - if searchErr != nil { - searchErr = fmt.Errorf("unable to build a single chain report (messages %d): %w", mid, err) + + msgs[i] = struct{}{} + + finalReport2, err := + buildSingleChainReportHelper(b.ctx, b.lggr, b.hasher, b.tokenDataReader, report, msgs) + if err != nil { + return cciptypes.ExecutePluginReportSingleChain{}, + plugintypes.ExecutePluginCommitData{}, + fmt.Errorf("unable to build a single chain report (messages %d): %w", len(msgs), err) } - if (encodedSize2) <= maxSizeBytes { - // mid is a valid report size, try something bigger next iteration. + validReport, meta2, err := b.verifyReport(ctx, finalReport2) + if err != nil { + return cciptypes.ExecutePluginReportSingleChain{}, + plugintypes.ExecutePluginCommitData{}, + fmt.Errorf("unable to verify report: %w", err) + } else if validReport { finalReport = finalReport2 - encodedSize = encodedSize2 - return false // not full + meta = meta2 + } else { + // this message didn't work, continue to the next one + delete(msgs, i) } - return true // full - }) - if searchErr != nil { - return cciptypes.ExecutePluginReportSingleChain{}, 0, plugintypes.ExecutePluginCommitDataWithMessages{}, searchErr } - // No messages fit into the report. - if idx <= 0 { - return cciptypes.ExecutePluginReportSingleChain{}, - 0, - plugintypes.ExecutePluginCommitDataWithMessages{}, - ErrEmptyReport + if len(msgs) == 0 { + return cciptypes.ExecutePluginReportSingleChain{}, report, ErrEmptyReport } - report = markNewMessagesExecuted(finalReport, report) - return finalReport, encodedSize, report, nil + return finalize(finalReport, report, meta) } diff --git a/execute/report/report_test.go b/execute/report/report_test.go index b44ee2544..d67fde3ba 100644 --- a/execute/report/report_test.go +++ b/execute/report/report_test.go @@ -3,19 +3,243 @@ package report import ( "context" "fmt" + "math/rand" + "strings" "testing" + "time" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" + "github.com/smartcontractkit/chainlink-common/pkg/hashutil" "github.com/smartcontractkit/chainlink-common/pkg/logger" + "github.com/smartcontractkit/chainlink-common/pkg/merklemulti" cciptypes "github.com/smartcontractkit/chainlink-common/pkg/types/ccipocr3" "github.com/smartcontractkit/chainlink-ccip/execute/types" + "github.com/smartcontractkit/chainlink-ccip/internal/libs/slicelib" "github.com/smartcontractkit/chainlink-ccip/internal/mocks" "github.com/smartcontractkit/chainlink-ccip/plugintypes" ) +// mustMakeBytes parses a given string into a byte array, any error causes a panic. Pass in an empty string for a +// random byte array. +// nolint:unparam // surly this will be useful at some point... +func mustMakeBytes(byteStr string) cciptypes.Bytes32 { + if byteStr == "" { + var randomBytes cciptypes.Bytes32 + n, err := rand.New(rand.NewSource(0)).Read(randomBytes[:]) + if n != 32 { + panic(fmt.Sprintf("Unexpected number of bytes read for placeholder id: want 32, got %d", n)) + } + if err != nil { + panic(fmt.Sprintf("Error reading random bytes: %v", err)) + } + return randomBytes + } + b, err := cciptypes.NewBytes32FromString(byteStr) + if err != nil { + panic(err) + } + return b +} + +func TestMustMakeBytes(t *testing.T) { + type args struct { + byteStr string + } + tests := []struct { + name string + args args + want cciptypes.Bytes32 + willPanic bool + }{ + { + name: "empty - deterministic random", + args: args{byteStr: ""}, + want: cciptypes.Bytes32{ + 0x01, 0x94, 0xfd, 0xc2, 0xfa, 0x2f, 0xfc, 0xc0, 0x41, 0xd3, 0xff, 0x12, 0x4, 0x5b, 0x73, 0xc8, + 0x6e, 0x4f, 0xf9, 0x5f, 0xf6, 0x62, 0xa5, 0xee, 0xe8, 0x2a, 0xbd, 0xf4, 0x4a, 0x2d, 0xb, 0x75, + }, + }, + { + name: "constant", + args: args{byteStr: "0x0000000000000000000000000000000000000000000000000000000000000000"}, + want: cciptypes.Bytes32{}, + }, + { + name: "constant2", + args: args{byteStr: "0x0102030405060708090102030405060708090102030405060708090102030405"}, + want: cciptypes.Bytes32{ + 0x1, 0x2, 0x3, 0x4, 0x5, 0x6, 0x7, 0x8, 0x9, 0x1, 0x2, 0x3, 0x4, 0x5, 0x6, 0x7, + 0x8, 0x9, 0x1, 0x2, 0x3, 0x4, 0x5, 0x6, 0x7, 0x8, 0x9, 0x1, 0x2, 0x3, 0x4, 0x5, + }, + }, + { + name: "panic - wrong size", + args: args{byteStr: "0x00000"}, + willPanic: true, + }, + { + name: "panic - wrong format", + args: args{byteStr: "lorem ipsum"}, + willPanic: true, + }, + } + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + if tt.willPanic { + assert.Panics(t, func() { + mustMakeBytes(tt.args.byteStr) + }) + return + } + + got := mustMakeBytes(tt.args.byteStr) + assert.Equal(t, tt.want, got, "mustMakeBytes() = %v, want %v", got, tt.want) + fmt.Println(got) + }) + } +} + +// assertMerkleRoot computes the source messages merkle root, then computes a verification with the proof, then compares +// the roots. +func assertMerkleRoot( + t *testing.T, + hasher cciptypes.MessageHasher, + execReport cciptypes.ExecutePluginReportSingleChain, + commitReport plugintypes.ExecutePluginCommitData, +) { + keccak := hashutil.NewKeccak() + // Generate merkle root from commit report messages + var leafHashes [][32]byte + for _, msg := range commitReport.Messages { + hash, err := hasher.Hash(context.Background(), msg) + require.NoError(t, err) + leafHashes = append(leafHashes, hash) + } + tree, err := merklemulti.NewTree(keccak, leafHashes) + require.NoError(t, err) + merkleRoot := tree.Root() + + // Generate merkle root from exec report messages and proof + ctx := context.Background() + var leaves [][32]byte + for _, msg := range execReport.Messages { + hash, err := hasher.Hash(ctx, msg) + require.NoError(t, err) + leaves = append(leaves, hash) + } + proofCast := make([][32]byte, len(execReport.Proofs)) + for i, p := range execReport.Proofs { + copy(proofCast[i][:], p[:32]) + } + var proof merklemulti.Proof[[32]byte] + proof.Hashes = proofCast + proof.SourceFlags = slicelib.BitFlagsToBools(execReport.ProofFlagBits.Int, len(leaves)+len(proofCast)-1) + recomputedMerkleRoot, err := merklemulti.VerifyComputeRoot(hashutil.NewKeccak(), + leaves, + proof) + assert.NoError(t, err) + assert.NotNil(t, recomputedMerkleRoot) + + // Compare them + assert.Equal(t, merkleRoot, recomputedMerkleRoot) +} + +// makeMessage creates a message deterministically derived from the given inputs. +func makeMessage(src cciptypes.ChainSelector, num cciptypes.SeqNum, nonce uint64) cciptypes.Message { + var placeholderID cciptypes.Bytes32 + n, err := rand.New(rand.NewSource(int64(src) * int64(num) * int64(nonce))).Read(placeholderID[:]) + if n != 32 { + panic(fmt.Sprintf("Unexpected number of bytes read for placeholder id: want 32, got %d", n)) + } + if err != nil { + panic(fmt.Sprintf("Error reading random bytes: %v", err)) + } + + return cciptypes.Message{ + Header: cciptypes.RampMessageHeader{ + MessageID: placeholderID, + SourceChainSelector: src, + SequenceNumber: num, + MsgHash: cciptypes.Bytes32{}, + }, + } +} + +// makeTestCommitReport creates a basic commit report with messages given different parameters. This function +// will panic if the input parameters are inconsistent. +func makeTestCommitReport( + hasher cciptypes.MessageHasher, + numMessages, + srcChain, + firstSeqNum, + block int, + timestamp int64, + rootOverride cciptypes.Bytes32, + executed []cciptypes.SeqNum, +) plugintypes.ExecutePluginCommitData { + sequenceNumberRange := + cciptypes.NewSeqNumRange(cciptypes.SeqNum(firstSeqNum), cciptypes.SeqNum(firstSeqNum+numMessages-1)) + + for _, e := range executed { + if !sequenceNumberRange.Contains(e) { + panic("executed message out of range") + } + } + var messages []cciptypes.Message + for i := 0; i < numMessages; i++ { + messages = append(messages, makeMessage( + cciptypes.ChainSelector(srcChain), + cciptypes.SeqNum(i+firstSeqNum), + uint64(i))) + } + + commitReport := plugintypes.ExecutePluginCommitData{ + //MerkleRoot: root, + SourceChain: cciptypes.ChainSelector(srcChain), + SequenceNumberRange: sequenceNumberRange, + Timestamp: time.UnixMilli(timestamp), + BlockNum: uint64(block), + Messages: messages, + ExecutedMessages: executed, + } + + // calculate merkle root + root := rootOverride + if root.IsEmpty() { + tree, err := ConstructMerkleTree(context.Background(), hasher, commitReport) + if err != nil { + panic(fmt.Sprintf("unable to construct merkle tree: %s", err)) + } + commitReport.MerkleRoot = tree.Root() + } + + return commitReport +} + +// breakCommitReport by adding an extra message. This causes the report to have an unexpected number of messages. +func breakCommitReport( + commitReport plugintypes.ExecutePluginCommitData, +) plugintypes.ExecutePluginCommitData { + commitReport.Messages = append(commitReport.Messages, cciptypes.Message{}) + return commitReport +} + +// setMessageData at the given index to the given size. This function will panic if the index is out of range. +func setMessageData( + idx int, size uint64, commitReport plugintypes.ExecutePluginCommitData, +) plugintypes.ExecutePluginCommitData { + if len(commitReport.Messages) < idx { + panic("message index out of range") + } + commitReport.Messages[idx].Data = make([]byte, size) + return commitReport +} + // TODO: better than this type tdr struct{} @@ -39,6 +263,8 @@ func (btdr badTokenDataReader) ReadTokenData( return nil, fmt.Errorf("bad token data reader") } +/* +// TODO: Use this to test the verifyReport function. type badCodec struct{} func (bc badCodec) Encode(ctx context.Context, report cciptypes.ExecutePluginReport) ([]byte, error) { @@ -48,15 +274,15 @@ func (bc badCodec) Encode(ctx context.Context, report cciptypes.ExecutePluginRep func (bc badCodec) Decode(ctx context.Context, bytes []byte) (cciptypes.ExecutePluginReport, error) { return cciptypes.ExecutePluginReport{}, fmt.Errorf("bad codec") } +*/ func Test_buildSingleChainReport_Errors(t *testing.T) { lggr := logger.Test(t) type args struct { - report plugintypes.ExecutePluginCommitDataWithMessages + report plugintypes.ExecutePluginCommitData hasher cciptypes.MessageHasher tokenDataReader types.TokenDataReader - codec cciptypes.ExecutePluginCodec } tests := []struct { name string @@ -67,10 +293,8 @@ func Test_buildSingleChainReport_Errors(t *testing.T) { name: "wrong number of messages", wantErr: "unexpected number of messages: expected 1, got 2", args: args{ - report: plugintypes.ExecutePluginCommitDataWithMessages{ - ExecutePluginCommitData: plugintypes.ExecutePluginCommitData{ - SequenceNumberRange: cciptypes.NewSeqNumRange(cciptypes.SeqNum(100), cciptypes.SeqNum(100)), - }, + report: plugintypes.ExecutePluginCommitData{ + SequenceNumberRange: cciptypes.NewSeqNumRange(cciptypes.SeqNum(100), cciptypes.SeqNum(100)), Messages: []cciptypes.Message{ {Header: cciptypes.RampMessageHeader{}}, {Header: cciptypes.RampMessageHeader{}}, @@ -82,10 +306,8 @@ func Test_buildSingleChainReport_Errors(t *testing.T) { name: "wrong sequence numbers", wantErr: "sequence number 102 outside of report range [100 -> 101]", args: args{ - report: plugintypes.ExecutePluginCommitDataWithMessages{ - ExecutePluginCommitData: plugintypes.ExecutePluginCommitData{ - SequenceNumberRange: cciptypes.NewSeqNumRange(cciptypes.SeqNum(100), cciptypes.SeqNum(101)), - }, + report: plugintypes.ExecutePluginCommitData{ + SequenceNumberRange: cciptypes.NewSeqNumRange(cciptypes.SeqNum(100), cciptypes.SeqNum(101)), Messages: []cciptypes.Message{ { Header: cciptypes.RampMessageHeader{ @@ -105,11 +327,9 @@ func Test_buildSingleChainReport_Errors(t *testing.T) { name: "source mismatch", wantErr: "unexpected source chain: expected 1111, got 2222", args: args{ - report: plugintypes.ExecutePluginCommitDataWithMessages{ - ExecutePluginCommitData: plugintypes.ExecutePluginCommitData{ - SourceChain: 1111, - SequenceNumberRange: cciptypes.NewSeqNumRange(cciptypes.SeqNum(100), cciptypes.SeqNum(100)), - }, + report: plugintypes.ExecutePluginCommitData{ + SourceChain: 1111, + SequenceNumberRange: cciptypes.NewSeqNumRange(cciptypes.SeqNum(100), cciptypes.SeqNum(100)), Messages: []cciptypes.Message{ {Header: cciptypes.RampMessageHeader{ SourceChainSelector: 2222, @@ -124,11 +344,9 @@ func Test_buildSingleChainReport_Errors(t *testing.T) { name: "bad hasher", wantErr: "unable to hash message (1234567, 100): bad hasher", args: args{ - report: plugintypes.ExecutePluginCommitDataWithMessages{ - ExecutePluginCommitData: plugintypes.ExecutePluginCommitData{ - SourceChain: 1234567, - SequenceNumberRange: cciptypes.NewSeqNumRange(cciptypes.SeqNum(100), cciptypes.SeqNum(100)), - }, + report: plugintypes.ExecutePluginCommitData{ + SourceChain: 1234567, + SequenceNumberRange: cciptypes.NewSeqNumRange(cciptypes.SeqNum(100), cciptypes.SeqNum(100)), Messages: []cciptypes.Message{ {Header: cciptypes.RampMessageHeader{ SourceChainSelector: 1234567, @@ -143,11 +361,9 @@ func Test_buildSingleChainReport_Errors(t *testing.T) { name: "bad token data reader", wantErr: "unable to read token data for message 100: bad token data reader", args: args{ - report: plugintypes.ExecutePluginCommitDataWithMessages{ - ExecutePluginCommitData: plugintypes.ExecutePluginCommitData{ - SourceChain: 1234567, - SequenceNumberRange: cciptypes.NewSeqNumRange(cciptypes.SeqNum(100), cciptypes.SeqNum(100)), - }, + report: plugintypes.ExecutePluginCommitData{ + SourceChain: 1234567, + SequenceNumberRange: cciptypes.NewSeqNumRange(cciptypes.SeqNum(100), cciptypes.SeqNum(100)), Messages: []cciptypes.Message{ {Header: cciptypes.RampMessageHeader{ SourceChainSelector: 1234567, @@ -158,25 +374,6 @@ func Test_buildSingleChainReport_Errors(t *testing.T) { tokenDataReader: badTokenDataReader{}, }, }, - { - name: "bad codec", - wantErr: "unable to encode report: bad codec", - args: args{ - report: plugintypes.ExecutePluginCommitDataWithMessages{ - ExecutePluginCommitData: plugintypes.ExecutePluginCommitData{ - SourceChain: 1234567, - SequenceNumberRange: cciptypes.NewSeqNumRange(cciptypes.SeqNum(100), cciptypes.SeqNum(100)), - }, - Messages: []cciptypes.Message{ - {Header: cciptypes.RampMessageHeader{ - SourceChainSelector: 1234567, - SequenceNumber: cciptypes.SeqNum(100), - }}, - }, - }, - codec: badCodec{}, - }, - }, } for _, tt := range tests { tt := tt @@ -199,24 +396,288 @@ func Test_buildSingleChainReport_Errors(t *testing.T) { resolvedTokenDataReader = tdr{} } - // Select codec mock. - var resolvedCodec cciptypes.ExecutePluginCodec - if tt.args.codec != nil { - resolvedCodec = tt.args.codec - } else { - resolvedCodec = mocks.NewExecutePluginJSONReportCodec() + ctx := context.Background() + msgs := make(map[int]struct{}) + for i := 0; i < len(tt.args.report.Messages); i++ { + msgs[i] = struct{}{} } + _, err := buildSingleChainReportHelper( + ctx, lggr, resolvedHasher, resolvedTokenDataReader, tt.args.report, msgs) + require.Error(t, err) + assert.Contains(t, err.Error(), tt.wantErr) + }) + } +} + +func Test_Builder_Build(t *testing.T) { + hasher := mocks.NewMessageHasher() + codec := mocks.NewExecutePluginJSONReportCodec() + lggr := logger.Test(t) + var tokenDataReader tdr + type args struct { + reports []plugintypes.ExecutePluginCommitData + maxReportSize uint64 + } + tests := []struct { + name string + args args + expectedExecReports int + expectedCommitReports int + expectedExecThings []int + lastReportExecuted []cciptypes.SeqNum + wantErr string + }{ + { + name: "empty report", + args: args{ + reports: []plugintypes.ExecutePluginCommitData{}, + }, + expectedExecReports: 0, + expectedCommitReports: 0, + }, + { + name: "half report", + args: args{ + maxReportSize: 2300, + reports: []plugintypes.ExecutePluginCommitData{ + makeTestCommitReport(hasher, 10, 1, 100, 999, 10101010101, + cciptypes.Bytes32{}, // generate a correct root. + nil), + }, + }, + expectedExecReports: 1, + expectedCommitReports: 1, + expectedExecThings: []int{5}, + lastReportExecuted: []cciptypes.SeqNum{100, 101, 102, 103, 104}, + }, + { + name: "full report", + args: args{ + maxReportSize: 10000, + reports: []plugintypes.ExecutePluginCommitData{ + makeTestCommitReport(hasher, 10, 1, 100, 999, 10101010101, + cciptypes.Bytes32{}, // generate a correct root. + nil), + }, + }, + expectedExecReports: 1, + expectedCommitReports: 0, + expectedExecThings: []int{10}, + }, + { + name: "two reports", + args: args{ + maxReportSize: 15000, + reports: []plugintypes.ExecutePluginCommitData{ + makeTestCommitReport(hasher, 10, 1, 100, 999, 10101010101, + cciptypes.Bytes32{}, // generate a correct root. + nil), + makeTestCommitReport(hasher, 20, 2, 100, 999, 10101010101, + cciptypes.Bytes32{}, // generate a correct root. + nil), + }, + }, + expectedExecReports: 2, + expectedCommitReports: 0, + expectedExecThings: []int{10, 20}, + }, + { + name: "one and half reports", + args: args{ + maxReportSize: 8500, + reports: []plugintypes.ExecutePluginCommitData{ + makeTestCommitReport(hasher, 10, 1, 100, 999, 10101010101, + cciptypes.Bytes32{}, // generate a correct root. + nil), + makeTestCommitReport(hasher, 20, 2, 100, 999, 10101010101, + cciptypes.Bytes32{}, // generate a correct root. + nil), + }, + }, + expectedExecReports: 2, + expectedCommitReports: 1, + expectedExecThings: []int{10, 10}, + lastReportExecuted: []cciptypes.SeqNum{100, 101, 102, 103, 104, 105, 106, 107, 108, 109}, + }, + { + name: "exactly one report", + args: args{ + maxReportSize: 4200, + reports: []plugintypes.ExecutePluginCommitData{ + makeTestCommitReport(hasher, 10, 1, 100, 999, 10101010101, + cciptypes.Bytes32{}, // generate a correct root. + nil), + makeTestCommitReport(hasher, 20, 2, 100, 999, 10101010101, + cciptypes.Bytes32{}, // generate a correct root. + nil), + }, + }, + expectedExecReports: 1, + expectedCommitReports: 1, + expectedExecThings: []int{10}, + lastReportExecuted: []cciptypes.SeqNum{}, + }, + { + name: "execute remainder of partially executed report", + args: args{ + maxReportSize: 2500, + reports: []plugintypes.ExecutePluginCommitData{ + makeTestCommitReport(hasher, 10, 1, 100, 999, 10101010101, + cciptypes.Bytes32{}, // generate a correct root. + []cciptypes.SeqNum{100, 101, 102, 103, 104}), + }, + }, + expectedExecReports: 1, + expectedCommitReports: 0, + expectedExecThings: []int{5}, + }, + { + name: "partially execute remainder of partially executed report", + args: args{ + maxReportSize: 2050, + reports: []plugintypes.ExecutePluginCommitData{ + makeTestCommitReport(hasher, 10, 1, 100, 999, 10101010101, + cciptypes.Bytes32{}, // generate a correct root. + []cciptypes.SeqNum{100, 101, 102, 103, 104}), + }, + }, + expectedExecReports: 1, + expectedCommitReports: 1, + expectedExecThings: []int{4}, + lastReportExecuted: []cciptypes.SeqNum{100, 101, 102, 103, 104, 105, 106, 107, 108}, + }, + { + name: "execute remainder of sparsely executed report", + args: args{ + maxReportSize: 3500, + reports: []plugintypes.ExecutePluginCommitData{ + makeTestCommitReport(hasher, 10, 1, 100, 999, 10101010101, + cciptypes.Bytes32{}, // generate a correct root. + []cciptypes.SeqNum{100, 102, 104, 106, 108}), + }, + }, + expectedExecReports: 1, + expectedCommitReports: 0, + expectedExecThings: []int{5}, + }, + { + name: "partially execute remainder of partially executed sparse report", + args: args{ + maxReportSize: 2050, + reports: []plugintypes.ExecutePluginCommitData{ + makeTestCommitReport(hasher, 10, 1, 100, 999, 10101010101, + cciptypes.Bytes32{}, // generate a correct root. + []cciptypes.SeqNum{100, 102, 104, 106, 108}), + }, + }, + expectedExecReports: 1, + expectedCommitReports: 1, + expectedExecThings: []int{4}, + lastReportExecuted: []cciptypes.SeqNum{100, 101, 102, 103, 104, 105, 106, 107, 108}, + }, + { + name: "broken report", + args: args{ + maxReportSize: 10000, + reports: []plugintypes.ExecutePluginCommitData{ + breakCommitReport(makeTestCommitReport(hasher, 10, 1, 101, 1000, 10101010102, + cciptypes.Bytes32{}, // generate a correct root. + nil)), + }, + }, + wantErr: "unable to add a single chain report", + }, + { + name: "invalid merkle root", + args: args{ + reports: []plugintypes.ExecutePluginCommitData{ + makeTestCommitReport(hasher, 10, 1, 100, 999, 10101010101, + mustMakeBytes(""), // random root + nil), + }, + }, + wantErr: "merkle root mismatch: expected 0x00000000000000000", + }, + { + name: "skip over one large messages", + args: args{ + maxReportSize: 10000, + reports: []plugintypes.ExecutePluginCommitData{ + setMessageData(5, 20000, + makeTestCommitReport(hasher, 10, 1, 100, 999, 10101010101, + cciptypes.Bytes32{}, // generate a correct root. + nil)), + }, + }, + expectedExecReports: 1, + expectedCommitReports: 0, + expectedExecThings: []int{9}, + lastReportExecuted: []cciptypes.SeqNum{100, 101, 102, 103, 104, 106, 107, 108, 109}, + }, + { + name: "skip over two large messages", + args: args{ + maxReportSize: 10000, + reports: []plugintypes.ExecutePluginCommitData{ + setMessageData(8, 20000, + setMessageData(5, 20000, + makeTestCommitReport(hasher, 10, 1, 100, 999, 10101010101, + cciptypes.Bytes32{}, // generate a correct root. + nil))), + }, + }, + expectedExecReports: 1, + expectedCommitReports: 0, + expectedExecThings: []int{8}, + lastReportExecuted: []cciptypes.SeqNum{100, 101, 102, 103, 104, 106, 107, 109}, + }, + } + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() ctx := context.Background() - execReport, size, err := buildSingleChainReport( - ctx, lggr, resolvedHasher, resolvedTokenDataReader, resolvedCodec, tt.args.report, 0) + // look for error in Add or Build + foundError := false + + builder := NewBuilder(ctx, lggr, hasher, tokenDataReader, codec, tt.args.maxReportSize, 0) + var updatedMessages []plugintypes.ExecutePluginCommitData + for _, report := range tt.args.reports { + updatedMessage, err := builder.Add(report) + if err != nil && tt.wantErr != "" { + if strings.Contains(err.Error(), tt.wantErr) { + foundError = true + break + } + } + require.NoError(t, err) + updatedMessages = append(updatedMessages, updatedMessage) + } + if foundError { + return + } + execReports, err := builder.Build() if tt.wantErr != "" { - require.Error(t, err) assert.Contains(t, err.Error(), tt.wantErr) return } require.NoError(t, err) - fmt.Println(execReport, size, err) + require.Len(t, execReports, tt.expectedExecReports) + //require.Len(t, commitReports, tt.expectedCommitReports) + for i, execReport := range execReports { + require.Lenf(t, execReport.Messages, tt.expectedExecThings[i], + "Unexpected number of messages, iter %d", i) + require.Lenf(t, execReport.OffchainTokenData, tt.expectedExecThings[i], + "Unexpected number of token data, iter %d", i) + require.NotEmptyf(t, execReport.Proofs, "Proof should not be empty.") + assertMerkleRoot(t, hasher, execReport, tt.args.reports[i]) + } + // If the last report is partially executed, the executed messages can be checked. + if len(updatedMessages) > 0 && len(tt.lastReportExecuted) > 0 { + lastReport := updatedMessages[len(updatedMessages)-1] + require.ElementsMatch(t, tt.lastReportExecuted, lastReport.ExecutedMessages) + } }) } } diff --git a/execute/report/roots.go b/execute/report/roots.go index f0a407a6f..47c4be4e8 100644 --- a/execute/report/roots.go +++ b/execute/report/roots.go @@ -15,7 +15,7 @@ import ( func ConstructMerkleTree( ctx context.Context, hasher cciptypes.MessageHasher, - report plugintypes.ExecutePluginCommitDataWithMessages, + report plugintypes.ExecutePluginCommitData, ) (*merklemulti.Tree[[32]byte], error) { // Ensure we have the expected number of messages numMsgs := int(report.SequenceNumberRange.End() - report.SequenceNumberRange.Start() + 1) diff --git a/execute/types/token_data_reader.go b/execute/types/token_data_reader.go index f8ecae524..59f3b0f20 100644 --- a/execute/types/token_data_reader.go +++ b/execute/types/token_data_reader.go @@ -8,8 +8,6 @@ import ( // TokenDataReader is an interface for reading extra token data from an async process. // TODO: Build a token data reading process. -// -//go:generate mockery --quiet --name TokenDataReader --output ../../internal/mocks --case=underscore type TokenDataReader interface { ReadTokenData(ctx context.Context, srcChain cciptypes.ChainSelector, num cciptypes.SeqNum) ([][]byte, error) } diff --git a/internal/libs/typconv/address.go b/internal/libs/typconv/address.go new file mode 100644 index 000000000..3b8c1e640 --- /dev/null +++ b/internal/libs/typconv/address.go @@ -0,0 +1,10 @@ +package typconv + +import ( + "encoding/hex" +) + +// HexEncode converts a byte slice to a hex representation +func HexEncode(addr []byte) string { + return "0x" + hex.EncodeToString(addr) +} diff --git a/internal/libs/typconv/address_test.go b/internal/libs/typconv/address_test.go new file mode 100644 index 000000000..e4015dfbb --- /dev/null +++ b/internal/libs/typconv/address_test.go @@ -0,0 +1,14 @@ +package typconv + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestHexEncode(t *testing.T) { + addr := []byte{0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x0A, 0x0B, 0x0C, 0x0D, 0x0E, 0x0F} + want := "0x000102030405060708090a0b0c0d0e0f" + got := HexEncode(addr) + assert.Equal(t, want, got) +} diff --git a/internal/mocks/contract_reader.go b/internal/mocks/contract_reader.go index 0821ec296..4148b2017 100644 --- a/internal/mocks/contract_reader.go +++ b/internal/mocks/contract_reader.go @@ -20,16 +20,14 @@ func NewContractReaderMock() *ContractReaderMock { } } -// GetLatestValue Returns given configs at initialization -func (cr *ContractReaderMock) GetLatestValue( - ctx context.Context, contractName, method string, confidenceLevel primitives.ConfidenceLevel, params, returnVal any, -) error { +func (cr *ContractReaderMock) GetLatestValue(ctx context.Context, contractName, method string, + confidenceLevel primitives.ConfidenceLevel, params, returnVal any) error { args := cr.Called(ctx, contractName, method, confidenceLevel, params, returnVal) return args.Error(0) } -func (cr *ContractReaderMock) BatchGetLatestValues( - ctx context.Context, request types.BatchGetLatestValuesRequest) (types.BatchGetLatestValuesResult, error) { +func (cr *ContractReaderMock) BatchGetLatestValues(ctx context.Context, + request types.BatchGetLatestValuesRequest) (types.BatchGetLatestValuesResult, error) { args := cr.Called(ctx, request) return args.Get(0).(types.BatchGetLatestValuesResult), args.Error(1) } diff --git a/internal/mocks/execute_plugin_codec.go b/internal/mocks/execute_plugin_codec.go deleted file mode 100644 index 952a90041..000000000 --- a/internal/mocks/execute_plugin_codec.go +++ /dev/null @@ -1,88 +0,0 @@ -// Code generated by mockery v2.43.0. DO NOT EDIT. - -package mocks - -import ( - "context" - - "github.com/smartcontractkit/chainlink-common/pkg/types/ccipocr3" - - "github.com/stretchr/testify/mock" -) - -// ExecutePluginCodec is an autogenerated mock type for the ExecutePluginCodec type -type ExecutePluginCodec struct { - mock.Mock -} - -// Decode provides a mock function with given fields: _a0, _a1 -func (_m *ExecutePluginCodec) Decode(_a0 context.Context, _a1 []byte) (ccipocr3.ExecutePluginReport, error) { - ret := _m.Called(_a0, _a1) - - if len(ret) == 0 { - panic("no return value specified for Decode") - } - - var r0 ccipocr3.ExecutePluginReport - var r1 error - if rf, ok := ret.Get(0).(func(context.Context, []byte) (ccipocr3.ExecutePluginReport, error)); ok { - return rf(_a0, _a1) - } - if rf, ok := ret.Get(0).(func(context.Context, []byte) ccipocr3.ExecutePluginReport); ok { - r0 = rf(_a0, _a1) - } else { - r0 = ret.Get(0).(ccipocr3.ExecutePluginReport) - } - - if rf, ok := ret.Get(1).(func(context.Context, []byte) error); ok { - r1 = rf(_a0, _a1) - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - -// Encode provides a mock function with given fields: _a0, _a1 -func (_m *ExecutePluginCodec) Encode(_a0 context.Context, _a1 ccipocr3.ExecutePluginReport) ([]byte, error) { - ret := _m.Called(_a0, _a1) - - if len(ret) == 0 { - panic("no return value specified for Encode") - } - - var r0 []byte - var r1 error - if rf, ok := ret.Get(0).(func(context.Context, ccipocr3.ExecutePluginReport) ([]byte, error)); ok { - return rf(_a0, _a1) - } - if rf, ok := ret.Get(0).(func(context.Context, ccipocr3.ExecutePluginReport) []byte); ok { - r0 = rf(_a0, _a1) - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).([]byte) - } - } - - if rf, ok := ret.Get(1).(func(context.Context, ccipocr3.ExecutePluginReport) error); ok { - r1 = rf(_a0, _a1) - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - -// NewExecutePluginCodec creates a new instance of ExecutePluginCodec. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. -// The first argument is typically a *testing.T value. -func NewExecutePluginCodec(t interface { - mock.TestingT - Cleanup(func()) -}) *ExecutePluginCodec { - mock := &ExecutePluginCodec{} - mock.Mock.Test(t) - - t.Cleanup(func() { mock.AssertExpectations(t) }) - - return mock -} diff --git a/internal/mocks/token_data_reader.go b/internal/mocks/token_data_reader.go deleted file mode 100644 index 190e741ce..000000000 --- a/internal/mocks/token_data_reader.go +++ /dev/null @@ -1,60 +0,0 @@ -// Code generated by mockery v2.43.0. DO NOT EDIT. - -package mocks - -import ( - context "context" - - ccipocr3 "github.com/smartcontractkit/chainlink-common/pkg/types/ccipocr3" - - mock "github.com/stretchr/testify/mock" -) - -// TokenDataReader is an autogenerated mock type for the TokenDataReader type -type TokenDataReader struct { - mock.Mock -} - -// ReadTokenData provides a mock function with given fields: ctx, srcChain, num -func (_m *TokenDataReader) ReadTokenData(ctx context.Context, srcChain ccipocr3.ChainSelector, num ccipocr3.SeqNum) ([][]byte, error) { - ret := _m.Called(ctx, srcChain, num) - - if len(ret) == 0 { - panic("no return value specified for ReadTokenData") - } - - var r0 [][]byte - var r1 error - if rf, ok := ret.Get(0).(func(context.Context, ccipocr3.ChainSelector, ccipocr3.SeqNum) ([][]byte, error)); ok { - return rf(ctx, srcChain, num) - } - if rf, ok := ret.Get(0).(func(context.Context, ccipocr3.ChainSelector, ccipocr3.SeqNum) [][]byte); ok { - r0 = rf(ctx, srcChain, num) - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).([][]byte) - } - } - - if rf, ok := ret.Get(1).(func(context.Context, ccipocr3.ChainSelector, ccipocr3.SeqNum) error); ok { - r1 = rf(ctx, srcChain, num) - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - -// NewTokenDataReader creates a new instance of TokenDataReader. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. -// The first argument is typically a *testing.T value. -func NewTokenDataReader(t interface { - mock.TestingT - Cleanup(func()) -}) *TokenDataReader { - mock := &TokenDataReader{} - mock.Mock.Test(t) - - t.Cleanup(func() { mock.AssertExpectations(t) }) - - return mock -} diff --git a/internal/plugincommon/ccipreader.go b/internal/plugincommon/ccipreader.go new file mode 100644 index 000000000..f9f350aae --- /dev/null +++ b/internal/plugincommon/ccipreader.go @@ -0,0 +1,136 @@ +package plugincommon + +import ( + "context" + "fmt" + "sync" + "time" + + "github.com/smartcontractkit/chainlink-ccip/internal/reader" + + "github.com/smartcontractkit/chainlink-common/pkg/logger" +) + +type BackgroundReaderSyncer struct { + lggr logger.Logger + reader reader.CCIP + syncTimeout time.Duration + syncFrequency time.Duration + + bgSyncCtx context.Context + bgSyncCf context.CancelFunc + bgSyncWG *sync.WaitGroup + bgSyncTicker *time.Ticker +} + +var syncTimeoutRecommendedRange = [2]time.Duration{500 * time.Millisecond, 15 * time.Second} +var syncFrequencyRecommendedRange = [2]time.Duration{time.Second, time.Hour} + +func NewBackgroundReaderSyncer( + lggr logger.Logger, + reader reader.CCIP, + syncTimeout time.Duration, + syncFrequency time.Duration, +) *BackgroundReaderSyncer { + + if syncTimeout < syncTimeoutRecommendedRange[0] || syncTimeout > syncTimeoutRecommendedRange[1] { + lggr.Warnw("syncTimeout outside recommended range", "syncTimeout", syncTimeout) + } + + if syncFrequency < syncFrequencyRecommendedRange[0] || syncFrequency > syncFrequencyRecommendedRange[1] { + lggr.Warnw("syncFrequency outside recommended range", "syncFrequency", syncFrequency) + } + + return &BackgroundReaderSyncer{ + lggr: lggr, + reader: reader, + syncTimeout: syncTimeout, + syncFrequency: syncFrequency, + } +} + +func (b *BackgroundReaderSyncer) Start(ctx context.Context) error { + if b.bgSyncCtx != nil { + return fmt.Errorf("background syncer already started") + } + + b.bgSyncCtx, b.bgSyncCf = context.WithCancel(ctx) + b.bgSyncWG = &sync.WaitGroup{} + b.bgSyncWG.Add(1) + b.bgSyncTicker = time.NewTicker(b.syncFrequency) + + backgroundReaderSync( + b.bgSyncCtx, + b.bgSyncWG, + b.lggr, + b.reader, + b.syncTimeout, + b.bgSyncTicker.C, + ) + + return nil +} + +func (b *BackgroundReaderSyncer) Close() error { + if b.bgSyncCtx == nil { + return fmt.Errorf("background syncer not started") + } + + if b.bgSyncCf != nil { + b.bgSyncCf() + b.bgSyncWG.Wait() + } + + b.bgSyncTicker.Stop() + + return nil +} + +// backgroundReaderSync runs a background process that periodically syncs the provider CCIP reader. +func backgroundReaderSync( + ctx context.Context, + wg *sync.WaitGroup, + lggr logger.Logger, + reader reader.CCIP, + syncTimeout time.Duration, + ticker <-chan time.Time, +) { + go func() { + defer wg.Done() + + for { + select { + case <-ctx.Done(): + lggr.Debug("backgroundReaderSync context done") + return + case <-ticker: + if err := syncReader(ctx, lggr, reader, syncTimeout); err != nil { + lggr.Errorw("runBackgroundReaderSync failed", "err", err) + } + } + } + }() +} + +func syncReader( + ctx context.Context, + lggr logger.Logger, + reader reader.CCIP, + syncTimeout time.Duration, +) error { + timeoutCtx, cf := context.WithTimeout(ctx, syncTimeout) + defer cf() + + updated, err := reader.Sync(timeoutCtx) + if err != nil { + return err + } + + if !updated { + lggr.Debug("no updates found after trying to sync") + } else { + lggr.Info("ccip reader sync success") + } + + return nil +} diff --git a/internal/plugincommon/ccipreader_test.go b/internal/plugincommon/ccipreader_test.go new file mode 100644 index 000000000..5e52eaabb --- /dev/null +++ b/internal/plugincommon/ccipreader_test.go @@ -0,0 +1,103 @@ +package plugincommon + +import ( + "context" + "fmt" + "sync" + "testing" + "time" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/mock" + + "github.com/smartcontractkit/chainlink-ccip/internal/mocks" + + "github.com/smartcontractkit/chainlink-common/pkg/logger" +) + +func TestBackgroundReaderSyncer(t *testing.T) { + lggr := logger.Test(t) + mockReader := mocks.NewCCIPReader() + + t.Run("start/stop checks", func(t *testing.T) { + readerSyncer := NewBackgroundReaderSyncer(lggr, mockReader, time.Hour, time.Hour) + + ctx, cf := context.WithCancel(context.Background()) + + err := readerSyncer.Close() + assert.Error(t, err, "closing a non-started syncer") + + err = readerSyncer.Start(ctx) + assert.NoError(t, err, "start success") + + err = readerSyncer.Start(ctx) + assert.Error(t, err, "cannot be started twice") + + err = readerSyncer.Close() + assert.NoError(t, err, "closing a started syncer") + + err = readerSyncer.Start(ctx) + assert.Error(t, err, "restarting") + + cf() + err = readerSyncer.Close() + assert.NoError(t, err, "closing a syncer with expired context") + }) + + t.Run("syncing", func(t *testing.T) { + ctx := context.Background() + mockReader.On("Sync", mock.Anything).Return(false, nil) + readerSyncer := NewBackgroundReaderSyncer(lggr, mockReader, time.Second, time.Millisecond) + err := readerSyncer.Start(ctx) + assert.NoError(t, err, "start success") + assert.Eventually(t, func() bool { + return mockReader.AssertExpectations(t) + }, time.Second, 10*time.Millisecond) + err = readerSyncer.Close() + assert.NoError(t, err, "closing a started syncer") + }) +} + +func Test_backgroundReaderSync(t *testing.T) { + ctx, cf := context.WithCancel(context.Background()) + lggr := logger.Test(t) + reader := mocks.NewCCIPReader() + syncTimeout := 50 * time.Millisecond + ticker := make(chan time.Time) + wg := &sync.WaitGroup{} + wg.Add(1) + + // start background syncing + backgroundReaderSync(ctx, wg, lggr, reader, syncTimeout, ticker) + + // send a tick to trigger the first sync that errors + reader.On("Sync", mock.Anything).Return(false, fmt.Errorf("some err")).Once() + ticker <- time.Now() + + // send a tick to trigger the second sync that succeeds without changes + reader.On("Sync", mock.Anything).Return(false, nil).Once() + ticker <- time.Now() + + // make sync hang to see the context timeout + reader.On("Sync", mock.Anything).Run(func(args mock.Arguments) { + ctx := args.Get(0).(context.Context) + for { // simulate endless work until context times out + select { + case <-ctx.Done(): + t.Log("context cancelled as expected") + return + default: + time.Sleep(time.Millisecond) // sleep to not block the CPU + } + } + }).Return(false, nil).Once() + ticker <- time.Now() + + // send a tick to trigger the fourth sync that succeeds with changes + reader.On("Sync", mock.Anything).Return(true, nil).Once() + ticker <- time.Now() + + cf() // trigger bg sync to stop + wg.Wait() // wait for it to stop + reader.AssertExpectations(t) +} diff --git a/internal/reader/ccip.go b/internal/reader/ccip.go index 76664f602..649eb8dbf 100644 --- a/internal/reader/ccip.go +++ b/internal/reader/ccip.go @@ -4,10 +4,12 @@ import ( "context" "errors" "fmt" + "math/big" "strconv" "sync" "time" + types2 "github.com/smartcontractkit/libocr/offchainreporting2plus/types" "golang.org/x/sync/errgroup" "github.com/smartcontractkit/chainlink-common/pkg/logger" @@ -16,6 +18,7 @@ import ( "github.com/smartcontractkit/chainlink-common/pkg/types/query" "github.com/smartcontractkit/chainlink-common/pkg/types/query/primitives" + "github.com/smartcontractkit/chainlink-ccip/internal/libs/typconv" typeconv "github.com/smartcontractkit/chainlink-ccip/internal/libs/typeconv" "github.com/smartcontractkit/chainlink-ccip/pkg/consts" "github.com/smartcontractkit/chainlink-ccip/plugintypes" @@ -99,19 +102,54 @@ func (r *CCIPChainReader) CommitReportsGTETimestamp( return nil, err } - dataTyp := cciptypes.CommitPluginReport{} + // --------------------------------------------------- + // The following types are used to decode the events + // but should be replaced by chain-reader modifiers and use the base cciptypes.CommitReport type. + + type Interval struct { + Min uint64 + Max uint64 + } + + type MerkleRoot struct { + SourceChainSelector uint64 + Interval Interval + MerkleRoot cciptypes.Bytes32 + } + + type TokenPriceUpdate struct { + SourceToken []byte + UsdPerToken *big.Int + } + + type GasPriceUpdate struct { + DestChainSelector uint64 + UsdPerUnitGas *big.Int + } + + type PriceUpdates struct { + TokenPriceUpdates []TokenPriceUpdate + GasPriceUpdates []GasPriceUpdate + } + + type CommitReportAccepted struct { + PriceUpdates PriceUpdates + MerkleRoots []MerkleRoot + } + + type CommitReportAcceptedEvent struct { + Report CommitReportAccepted + } + // --------------------------------------------------- + + ev := CommitReportAcceptedEvent{} + iter, err := r.contractReaders[dest].QueryKey( ctx, consts.ContractNameOffRamp, query.KeyFilter{ Key: consts.EventNameCommitReportAccepted, Expressions: []query.Expression{ - { - Primitive: &primitives.Timestamp{ - Timestamp: uint64(ts.Unix()), - Operator: primitives.Gte, - }, - }, query.Confidence(primitives.Finalized), }, }, @@ -119,7 +157,7 @@ func (r *CCIPChainReader) CommitReportsGTETimestamp( SortBy: []query.SortBy{query.NewSortByTimestamp(query.Asc)}, Limit: query.Limit{Count: uint64(limit)}, }, - &dataTyp, + &ev, ) if err != nil { return nil, fmt.Errorf("failed to query offRamp: %w", err) @@ -127,18 +165,58 @@ func (r *CCIPChainReader) CommitReportsGTETimestamp( reports := make([]plugintypes.CommitPluginReportWithMeta, 0) for _, item := range iter { - report, is := (item.Data).(cciptypes.CommitPluginReport) + ev, is := (item.Data).(*CommitReportAcceptedEvent) if !is { return nil, fmt.Errorf("unexpected type %T while expecting a commit report", item) } + valid := item.Timestamp >= uint64(ts.Unix()) + if !valid { + r.lggr.Debugw("skipping invalid commit report", "report", ev.Report) + continue + } + + merkleRoots := make([]cciptypes.MerkleRootChain, 0, len(ev.Report.MerkleRoots)) + for _, mr := range ev.Report.MerkleRoots { + merkleRoots = append(merkleRoots, cciptypes.MerkleRootChain{ + ChainSel: cciptypes.ChainSelector(mr.SourceChainSelector), + SeqNumsRange: cciptypes.NewSeqNumRange( + cciptypes.SeqNum(mr.Interval.Min), + cciptypes.SeqNum(mr.Interval.Max), + ), + MerkleRoot: mr.MerkleRoot, + }) + } + + priceUpdates := cciptypes.PriceUpdates{ + TokenPriceUpdates: make([]cciptypes.TokenPrice, 0), + GasPriceUpdates: make([]cciptypes.GasPriceChain, 0), + } + + for _, tokenPriceUpdate := range ev.Report.PriceUpdates.TokenPriceUpdates { + priceUpdates.TokenPriceUpdates = append(priceUpdates.TokenPriceUpdates, cciptypes.TokenPrice{ + TokenID: types2.Account(typconv.HexEncode(tokenPriceUpdate.SourceToken)), + Price: cciptypes.NewBigInt(tokenPriceUpdate.UsdPerToken), + }) + } + + for _, gasPriceUpdate := range ev.Report.PriceUpdates.GasPriceUpdates { + priceUpdates.GasPriceUpdates = append(priceUpdates.GasPriceUpdates, cciptypes.GasPriceChain{ + ChainSel: cciptypes.ChainSelector(gasPriceUpdate.DestChainSelector), + GasPrice: cciptypes.NewBigInt(gasPriceUpdate.UsdPerUnitGas), + }) + } + blockNum, err := strconv.ParseUint(item.Head.Identifier, 10, 64) if err != nil { return nil, fmt.Errorf("failed to parse block number %s: %w", item.Head.Identifier, err) } reports = append(reports, plugintypes.CommitPluginReportWithMeta{ - Report: report, + Report: cciptypes.CommitPluginReport{ + MerkleRoots: merkleRoots, + PriceUpdates: priceUpdates, + }, Timestamp: time.Unix(int64(item.Timestamp), 0), BlockNum: blockNum, }) @@ -154,13 +232,13 @@ func (r *CCIPChainReader) ExecutedMessageRanges( return nil, err } - type executionStateChangedEvent struct { - sourceChainSelector cciptypes.ChainSelector - sequenceNumber cciptypes.SeqNum - state uint8 + type ExecutionStateChangedEvent struct { + SourceChainSelector cciptypes.ChainSelector + SequenceNumber cciptypes.SeqNum + State uint8 } - dataTyp := executionStateChangedEvent{} + dataTyp := ExecutionStateChangedEvent{} iter, err := r.contractReaders[dest].QueryKey( ctx, @@ -168,34 +246,6 @@ func (r *CCIPChainReader) ExecutedMessageRanges( query.KeyFilter{ Key: consts.EventNameExecutionStateChanged, Expressions: []query.Expression{ - { - // sequence numbers inside the range - Primitive: &primitives.Comparator{ - Name: consts.EventAttributeSequenceNumber, - ValueComparators: []primitives.ValueComparator{ - { - Value: seqNumRange.Start().String(), - Operator: primitives.Gte, - }, - { - Value: seqNumRange.End().String(), - Operator: primitives.Lte, - }, - }, - }, - }, - { - // source chain - Primitive: &primitives.Comparator{ - Name: consts.EventAttributeSourceChain, - ValueComparators: []primitives.ValueComparator{ - { - Value: source.String(), - Operator: primitives.Eq, - }, - }, - }, - }, query.Confidence(primitives.Finalized), }, }, @@ -210,23 +260,22 @@ func (r *CCIPChainReader) ExecutedMessageRanges( executed := make([]cciptypes.SeqNumRange, 0) for _, item := range iter { - stateChange, ok := item.Data.(*executionStateChangedEvent) + stateChange, ok := item.Data.(*ExecutionStateChangedEvent) if !ok { return nil, fmt.Errorf("failed to cast %T to executionStateChangedEvent", item.Data) } - if stateChange.sourceChainSelector != source { - return nil, fmt.Errorf("wrong cr query, unexpected source chain %d", stateChange.sourceChainSelector) - } - if stateChange.sequenceNumber < seqNumRange.Start() || stateChange.sequenceNumber > seqNumRange.End() { - return nil, fmt.Errorf("wrong cr query, unexpected sequence number %d", stateChange.sequenceNumber) - } - if stateChange.state <= 1 { - r.lggr.Debugw("execution state change status is %d, skipped", - "seqNum", stateChange.sequenceNumber, "state", stateChange.state) + + // todo: filter via the query + valid := stateChange.SourceChainSelector == source && + stateChange.SequenceNumber >= seqNumRange.Start() && + stateChange.SequenceNumber <= seqNumRange.End() && + stateChange.State > 0 + if !valid { + r.lggr.Debugw("skipping invalid state change", "stateChange", stateChange) continue } - executed = append(executed, cciptypes.NewSeqNumRange(stateChange.sequenceNumber, stateChange.sequenceNumber)) + executed = append(executed, cciptypes.NewSeqNumRange(stateChange.SequenceNumber, stateChange.SequenceNumber)) } return executed, nil @@ -239,27 +288,17 @@ func (r *CCIPChainReader) MsgsBetweenSeqNums( return nil, err } + type SendRequestedEvent struct { + DestChainSelector cciptypes.ChainSelector + Message cciptypes.Message + } + seq, err := r.contractReaders[sourceChainSelector].QueryKey( ctx, consts.ContractNameOnRamp, query.KeyFilter{ Key: consts.EventNameCCIPSendRequested, Expressions: []query.Expression{ - { - Primitive: &primitives.Comparator{ - Name: consts.EventAttributeSequenceNumber, - ValueComparators: []primitives.ValueComparator{ - { - Value: seqNumRange.Start().String(), - Operator: primitives.Gte, - }, - { - Value: seqNumRange.End().String(), - Operator: primitives.Lte, - }, - }, - }, - }, query.Confidence(primitives.Finalized), }, }, @@ -271,7 +310,7 @@ func (r *CCIPChainReader) MsgsBetweenSeqNums( Count: uint64(seqNumRange.End() - seqNumRange.Start() + 1), }, }, - &cciptypes.Message{}, + &SendRequestedEvent{}, ) if err != nil { return nil, fmt.Errorf("failed to query onRamp: %w", err) @@ -285,12 +324,20 @@ func (r *CCIPChainReader) MsgsBetweenSeqNums( msgs := make([]cciptypes.Message, 0) for _, item := range seq { - msg, ok := item.Data.(*cciptypes.Message) + msg, ok := item.Data.(*SendRequestedEvent) if !ok { return nil, fmt.Errorf("failed to cast %v to Message", item.Data) } - msgs = append(msgs, *msg) + // todo: filter via the query + valid := msg.Message.Header.SourceChainSelector == sourceChainSelector && + msg.Message.Header.DestChainSelector == r.destChain && + msg.Message.Header.SequenceNumber >= seqNumRange.Start() && + msg.Message.Header.SequenceNumber <= seqNumRange.End() + + if valid { + msgs = append(msgs, msg.Message) + } } r.lggr.Infow("decoded messages between sequence numbers", "msgs", msgs, @@ -303,7 +350,7 @@ func (r *CCIPChainReader) MsgsBetweenSeqNums( func (r *CCIPChainReader) NextSeqNum( ctx context.Context, chains []cciptypes.ChainSelector, ) ([]cciptypes.SeqNum, error) { - cfgs, err := r.getSourceChainsConfig(ctx) + cfgs, err := r.getSourceChainsConfig(ctx, chains) if err != nil { return nil, fmt.Errorf("get source chains config: %w", err) } @@ -349,7 +396,11 @@ func (r *CCIPChainReader) GasPrices(ctx context.Context, chains []cciptypes.Chai } func (r *CCIPChainReader) Sync(ctx context.Context) (bool, error) { - sourceConfigs, err := r.getSourceChainsConfig(ctx) + chains := make([]cciptypes.ChainSelector, 0, len(r.contractReaders)) + for chain := range r.contractReaders { + chains = append(chains, chain) + } + sourceConfigs, err := r.getSourceChainsConfig(ctx, chains) if err != nil { return false, fmt.Errorf("get onramps: %w", err) } @@ -390,7 +441,7 @@ func (r *CCIPChainReader) Close(ctx context.Context) error { // getSourceChainsConfig returns the offRamp contract's source chain configurations for each supported source chain. func (r *CCIPChainReader) getSourceChainsConfig( - ctx context.Context) (map[cciptypes.ChainSelector]sourceChainConfig, error) { + ctx context.Context, chains []cciptypes.ChainSelector) (map[cciptypes.ChainSelector]sourceChainConfig, error) { if err := r.validateReaderExistence(r.destChain); err != nil { return nil, err } @@ -399,7 +450,7 @@ func (r *CCIPChainReader) getSourceChainsConfig( mu := new(sync.Mutex) eg := new(errgroup.Group) - for chainSel := range r.contractReaders { + for _, chainSel := range chains { if chainSel == r.destChain { continue } diff --git a/internal/reader/ccip_test.go b/internal/reader/ccip_test.go index 6731c267b..b8098ccc5 100644 --- a/internal/reader/ccip_test.go +++ b/internal/reader/ccip_test.go @@ -48,7 +48,7 @@ func TestCCIPChainReader_getSourceChainsConfig(t *testing.T) { ) ctx := context.Background() - cfgs, err := ccipReader.getSourceChainsConfig(ctx) + cfgs, err := ccipReader.getSourceChainsConfig(ctx, []cciptypes.ChainSelector{chainA, chainB}) assert.NoError(t, err) assert.Len(t, cfgs, 2) assert.Equal(t, []byte("onramp-1"), cfgs[chainA].OnRamp) diff --git a/internal/reader/home_chain.go b/internal/reader/home_chain.go index 0f63cc5d5..cef027d68 100644 --- a/internal/reader/home_chain.go +++ b/internal/reader/home_chain.go @@ -19,7 +19,6 @@ import ( "github.com/smartcontractkit/chainlink-ccip/pkg/consts" ) -//go:generate mockery --name HomeChain --output ./mocks/ --case underscore type HomeChain interface { GetChainConfig(chainSelector cciptypes.ChainSelector) (ChainConfig, error) GetAllChainConfigs() (map[cciptypes.ChainSelector]ChainConfig, error) diff --git a/internal/reader/mocks/home_chain.go b/internal/reader/mocks/home_chain.go deleted file mode 100644 index 62239f01d..000000000 --- a/internal/reader/mocks/home_chain.go +++ /dev/null @@ -1,306 +0,0 @@ -// Code generated by mockery v2.43.0. DO NOT EDIT. - -package mocks - -import ( - context "context" - - ccipocr3 "github.com/smartcontractkit/chainlink-common/pkg/types/ccipocr3" - - mapset "github.com/deckarep/golang-set/v2" - - mock "github.com/stretchr/testify/mock" - - reader "github.com/smartcontractkit/chainlink-ccip/internal/reader" - - types "github.com/smartcontractkit/libocr/ragep2p/types" -) - -// HomeChain is an autogenerated mock type for the HomeChain type -type HomeChain struct { - mock.Mock -} - -// Close provides a mock function with given fields: -func (_m *HomeChain) Close() error { - ret := _m.Called() - - if len(ret) == 0 { - panic("no return value specified for Close") - } - - var r0 error - if rf, ok := ret.Get(0).(func() error); ok { - r0 = rf() - } else { - r0 = ret.Error(0) - } - - return r0 -} - -// GetAllChainConfigs provides a mock function with given fields: -func (_m *HomeChain) GetAllChainConfigs() (map[ccipocr3.ChainSelector]reader.ChainConfig, error) { - ret := _m.Called() - - if len(ret) == 0 { - panic("no return value specified for GetAllChainConfigs") - } - - var r0 map[ccipocr3.ChainSelector]reader.ChainConfig - var r1 error - if rf, ok := ret.Get(0).(func() (map[ccipocr3.ChainSelector]reader.ChainConfig, error)); ok { - return rf() - } - if rf, ok := ret.Get(0).(func() map[ccipocr3.ChainSelector]reader.ChainConfig); ok { - r0 = rf() - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).(map[ccipocr3.ChainSelector]reader.ChainConfig) - } - } - - if rf, ok := ret.Get(1).(func() error); ok { - r1 = rf() - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - -// GetChainConfig provides a mock function with given fields: chainSelector -func (_m *HomeChain) GetChainConfig(chainSelector ccipocr3.ChainSelector) (reader.ChainConfig, error) { - ret := _m.Called(chainSelector) - - if len(ret) == 0 { - panic("no return value specified for GetChainConfig") - } - - var r0 reader.ChainConfig - var r1 error - if rf, ok := ret.Get(0).(func(ccipocr3.ChainSelector) (reader.ChainConfig, error)); ok { - return rf(chainSelector) - } - if rf, ok := ret.Get(0).(func(ccipocr3.ChainSelector) reader.ChainConfig); ok { - r0 = rf(chainSelector) - } else { - r0 = ret.Get(0).(reader.ChainConfig) - } - - if rf, ok := ret.Get(1).(func(ccipocr3.ChainSelector) error); ok { - r1 = rf(chainSelector) - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - -// GetFChain provides a mock function with given fields: -func (_m *HomeChain) GetFChain() (map[ccipocr3.ChainSelector]int, error) { - ret := _m.Called() - - if len(ret) == 0 { - panic("no return value specified for GetFChain") - } - - var r0 map[ccipocr3.ChainSelector]int - var r1 error - if rf, ok := ret.Get(0).(func() (map[ccipocr3.ChainSelector]int, error)); ok { - return rf() - } - if rf, ok := ret.Get(0).(func() map[ccipocr3.ChainSelector]int); ok { - r0 = rf() - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).(map[ccipocr3.ChainSelector]int) - } - } - - if rf, ok := ret.Get(1).(func() error); ok { - r1 = rf() - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - -// GetKnownCCIPChains provides a mock function with given fields: -func (_m *HomeChain) GetKnownCCIPChains() (mapset.Set[ccipocr3.ChainSelector], error) { - ret := _m.Called() - - if len(ret) == 0 { - panic("no return value specified for GetKnownCCIPChains") - } - - var r0 mapset.Set[ccipocr3.ChainSelector] - var r1 error - if rf, ok := ret.Get(0).(func() (mapset.Set[ccipocr3.ChainSelector], error)); ok { - return rf() - } - if rf, ok := ret.Get(0).(func() mapset.Set[ccipocr3.ChainSelector]); ok { - r0 = rf() - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).(mapset.Set[ccipocr3.ChainSelector]) - } - } - - if rf, ok := ret.Get(1).(func() error); ok { - r1 = rf() - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - -// GetOCRConfigs provides a mock function with given fields: ctx, donID, pluginType -func (_m *HomeChain) GetOCRConfigs(ctx context.Context, donID uint32, pluginType uint8) ([]reader.OCR3ConfigWithMeta, error) { - ret := _m.Called(ctx, donID, pluginType) - - if len(ret) == 0 { - panic("no return value specified for GetOCRConfigs") - } - - var r0 []reader.OCR3ConfigWithMeta - var r1 error - if rf, ok := ret.Get(0).(func(context.Context, uint32, uint8) ([]reader.OCR3ConfigWithMeta, error)); ok { - return rf(ctx, donID, pluginType) - } - if rf, ok := ret.Get(0).(func(context.Context, uint32, uint8) []reader.OCR3ConfigWithMeta); ok { - r0 = rf(ctx, donID, pluginType) - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).([]reader.OCR3ConfigWithMeta) - } - } - - if rf, ok := ret.Get(1).(func(context.Context, uint32, uint8) error); ok { - r1 = rf(ctx, donID, pluginType) - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - -// GetSupportedChainsForPeer provides a mock function with given fields: id -func (_m *HomeChain) GetSupportedChainsForPeer(id types.PeerID) (mapset.Set[ccipocr3.ChainSelector], error) { - ret := _m.Called(id) - - if len(ret) == 0 { - panic("no return value specified for GetSupportedChainsForPeer") - } - - var r0 mapset.Set[ccipocr3.ChainSelector] - var r1 error - if rf, ok := ret.Get(0).(func(types.PeerID) (mapset.Set[ccipocr3.ChainSelector], error)); ok { - return rf(id) - } - if rf, ok := ret.Get(0).(func(types.PeerID) mapset.Set[ccipocr3.ChainSelector]); ok { - r0 = rf(id) - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).(mapset.Set[ccipocr3.ChainSelector]) - } - } - - if rf, ok := ret.Get(1).(func(types.PeerID) error); ok { - r1 = rf(id) - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - -// HealthReport provides a mock function with given fields: -func (_m *HomeChain) HealthReport() map[string]error { - ret := _m.Called() - - if len(ret) == 0 { - panic("no return value specified for HealthReport") - } - - var r0 map[string]error - if rf, ok := ret.Get(0).(func() map[string]error); ok { - r0 = rf() - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).(map[string]error) - } - } - - return r0 -} - -// Name provides a mock function with given fields: -func (_m *HomeChain) Name() string { - ret := _m.Called() - - if len(ret) == 0 { - panic("no return value specified for Name") - } - - var r0 string - if rf, ok := ret.Get(0).(func() string); ok { - r0 = rf() - } else { - r0 = ret.Get(0).(string) - } - - return r0 -} - -// Ready provides a mock function with given fields: -func (_m *HomeChain) Ready() error { - ret := _m.Called() - - if len(ret) == 0 { - panic("no return value specified for Ready") - } - - var r0 error - if rf, ok := ret.Get(0).(func() error); ok { - r0 = rf() - } else { - r0 = ret.Error(0) - } - - return r0 -} - -// Start provides a mock function with given fields: _a0 -func (_m *HomeChain) Start(_a0 context.Context) error { - ret := _m.Called(_a0) - - if len(ret) == 0 { - panic("no return value specified for Start") - } - - var r0 error - if rf, ok := ret.Get(0).(func(context.Context) error); ok { - r0 = rf(_a0) - } else { - r0 = ret.Error(0) - } - - return r0 -} - -// NewHomeChain creates a new instance of HomeChain. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. -// The first argument is typically a *testing.T value. -func NewHomeChain(t interface { - mock.TestingT - Cleanup(func()) -}) *HomeChain { - mock := &HomeChain{} - mock.Mock.Test(t) - - t.Cleanup(func() { mock.AssertExpectations(t) }) - - return mock -} diff --git a/mocks/execute/internal_/gen/execute_plugin_codec.go b/mocks/execute/internal_/gen/execute_plugin_codec.go new file mode 100644 index 000000000..8fe48e5a9 --- /dev/null +++ b/mocks/execute/internal_/gen/execute_plugin_codec.go @@ -0,0 +1,154 @@ +// Code generated by mockery v2.43.0. DO NOT EDIT. + +package gen + +import ( + context "context" + + ccipocr3 "github.com/smartcontractkit/chainlink-common/pkg/types/ccipocr3" + + mock "github.com/stretchr/testify/mock" +) + +// MockExecutePluginCodec is an autogenerated mock type for the ExecutePluginCodec type +type MockExecutePluginCodec struct { + mock.Mock +} + +type MockExecutePluginCodec_Expecter struct { + mock *mock.Mock +} + +func (_m *MockExecutePluginCodec) EXPECT() *MockExecutePluginCodec_Expecter { + return &MockExecutePluginCodec_Expecter{mock: &_m.Mock} +} + +// Decode provides a mock function with given fields: _a0, _a1 +func (_m *MockExecutePluginCodec) Decode(_a0 context.Context, _a1 []byte) (ccipocr3.ExecutePluginReport, error) { + ret := _m.Called(_a0, _a1) + + if len(ret) == 0 { + panic("no return value specified for Decode") + } + + var r0 ccipocr3.ExecutePluginReport + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, []byte) (ccipocr3.ExecutePluginReport, error)); ok { + return rf(_a0, _a1) + } + if rf, ok := ret.Get(0).(func(context.Context, []byte) ccipocr3.ExecutePluginReport); ok { + r0 = rf(_a0, _a1) + } else { + r0 = ret.Get(0).(ccipocr3.ExecutePluginReport) + } + + if rf, ok := ret.Get(1).(func(context.Context, []byte) error); ok { + r1 = rf(_a0, _a1) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// MockExecutePluginCodec_Decode_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Decode' +type MockExecutePluginCodec_Decode_Call struct { + *mock.Call +} + +// Decode is a helper method to define mock.On call +// - _a0 context.Context +// - _a1 []byte +func (_e *MockExecutePluginCodec_Expecter) Decode(_a0 interface{}, _a1 interface{}) *MockExecutePluginCodec_Decode_Call { + return &MockExecutePluginCodec_Decode_Call{Call: _e.mock.On("Decode", _a0, _a1)} +} + +func (_c *MockExecutePluginCodec_Decode_Call) Run(run func(_a0 context.Context, _a1 []byte)) *MockExecutePluginCodec_Decode_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].([]byte)) + }) + return _c +} + +func (_c *MockExecutePluginCodec_Decode_Call) Return(_a0 ccipocr3.ExecutePluginReport, _a1 error) *MockExecutePluginCodec_Decode_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *MockExecutePluginCodec_Decode_Call) RunAndReturn(run func(context.Context, []byte) (ccipocr3.ExecutePluginReport, error)) *MockExecutePluginCodec_Decode_Call { + _c.Call.Return(run) + return _c +} + +// Encode provides a mock function with given fields: _a0, _a1 +func (_m *MockExecutePluginCodec) Encode(_a0 context.Context, _a1 ccipocr3.ExecutePluginReport) ([]byte, error) { + ret := _m.Called(_a0, _a1) + + if len(ret) == 0 { + panic("no return value specified for Encode") + } + + var r0 []byte + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, ccipocr3.ExecutePluginReport) ([]byte, error)); ok { + return rf(_a0, _a1) + } + if rf, ok := ret.Get(0).(func(context.Context, ccipocr3.ExecutePluginReport) []byte); ok { + r0 = rf(_a0, _a1) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]byte) + } + } + + if rf, ok := ret.Get(1).(func(context.Context, ccipocr3.ExecutePluginReport) error); ok { + r1 = rf(_a0, _a1) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// MockExecutePluginCodec_Encode_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Encode' +type MockExecutePluginCodec_Encode_Call struct { + *mock.Call +} + +// Encode is a helper method to define mock.On call +// - _a0 context.Context +// - _a1 ccipocr3.ExecutePluginReport +func (_e *MockExecutePluginCodec_Expecter) Encode(_a0 interface{}, _a1 interface{}) *MockExecutePluginCodec_Encode_Call { + return &MockExecutePluginCodec_Encode_Call{Call: _e.mock.On("Encode", _a0, _a1)} +} + +func (_c *MockExecutePluginCodec_Encode_Call) Run(run func(_a0 context.Context, _a1 ccipocr3.ExecutePluginReport)) *MockExecutePluginCodec_Encode_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(ccipocr3.ExecutePluginReport)) + }) + return _c +} + +func (_c *MockExecutePluginCodec_Encode_Call) Return(_a0 []byte, _a1 error) *MockExecutePluginCodec_Encode_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *MockExecutePluginCodec_Encode_Call) RunAndReturn(run func(context.Context, ccipocr3.ExecutePluginReport) ([]byte, error)) *MockExecutePluginCodec_Encode_Call { + _c.Call.Return(run) + return _c +} + +// NewMockExecutePluginCodec creates a new instance of MockExecutePluginCodec. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. +// The first argument is typically a *testing.T value. +func NewMockExecutePluginCodec(t interface { + mock.TestingT + Cleanup(func()) +}) *MockExecutePluginCodec { + mock := &MockExecutePluginCodec{} + mock.Mock.Test(t) + + t.Cleanup(func() { mock.AssertExpectations(t) }) + + return mock +} diff --git a/mocks/execute/types/token_data_reader.go b/mocks/execute/types/token_data_reader.go new file mode 100644 index 000000000..76abb3786 --- /dev/null +++ b/mocks/execute/types/token_data_reader.go @@ -0,0 +1,98 @@ +// Code generated by mockery v2.43.0. DO NOT EDIT. + +package types + +import ( + context "context" + + ccipocr3 "github.com/smartcontractkit/chainlink-common/pkg/types/ccipocr3" + + mock "github.com/stretchr/testify/mock" +) + +// MockTokenDataReader is an autogenerated mock type for the TokenDataReader type +type MockTokenDataReader struct { + mock.Mock +} + +type MockTokenDataReader_Expecter struct { + mock *mock.Mock +} + +func (_m *MockTokenDataReader) EXPECT() *MockTokenDataReader_Expecter { + return &MockTokenDataReader_Expecter{mock: &_m.Mock} +} + +// ReadTokenData provides a mock function with given fields: ctx, srcChain, num +func (_m *MockTokenDataReader) ReadTokenData(ctx context.Context, srcChain ccipocr3.ChainSelector, num ccipocr3.SeqNum) ([][]byte, error) { + ret := _m.Called(ctx, srcChain, num) + + if len(ret) == 0 { + panic("no return value specified for ReadTokenData") + } + + var r0 [][]byte + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, ccipocr3.ChainSelector, ccipocr3.SeqNum) ([][]byte, error)); ok { + return rf(ctx, srcChain, num) + } + if rf, ok := ret.Get(0).(func(context.Context, ccipocr3.ChainSelector, ccipocr3.SeqNum) [][]byte); ok { + r0 = rf(ctx, srcChain, num) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([][]byte) + } + } + + if rf, ok := ret.Get(1).(func(context.Context, ccipocr3.ChainSelector, ccipocr3.SeqNum) error); ok { + r1 = rf(ctx, srcChain, num) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// MockTokenDataReader_ReadTokenData_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'ReadTokenData' +type MockTokenDataReader_ReadTokenData_Call struct { + *mock.Call +} + +// ReadTokenData is a helper method to define mock.On call +// - ctx context.Context +// - srcChain ccipocr3.ChainSelector +// - num ccipocr3.SeqNum +func (_e *MockTokenDataReader_Expecter) ReadTokenData(ctx interface{}, srcChain interface{}, num interface{}) *MockTokenDataReader_ReadTokenData_Call { + return &MockTokenDataReader_ReadTokenData_Call{Call: _e.mock.On("ReadTokenData", ctx, srcChain, num)} +} + +func (_c *MockTokenDataReader_ReadTokenData_Call) Run(run func(ctx context.Context, srcChain ccipocr3.ChainSelector, num ccipocr3.SeqNum)) *MockTokenDataReader_ReadTokenData_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(ccipocr3.ChainSelector), args[2].(ccipocr3.SeqNum)) + }) + return _c +} + +func (_c *MockTokenDataReader_ReadTokenData_Call) Return(_a0 [][]byte, _a1 error) *MockTokenDataReader_ReadTokenData_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *MockTokenDataReader_ReadTokenData_Call) RunAndReturn(run func(context.Context, ccipocr3.ChainSelector, ccipocr3.SeqNum) ([][]byte, error)) *MockTokenDataReader_ReadTokenData_Call { + _c.Call.Return(run) + return _c +} + +// NewMockTokenDataReader creates a new instance of MockTokenDataReader. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. +// The first argument is typically a *testing.T value. +func NewMockTokenDataReader(t interface { + mock.TestingT + Cleanup(func()) +}) *MockTokenDataReader { + mock := &MockTokenDataReader{} + mock.Mock.Test(t) + + t.Cleanup(func() { mock.AssertExpectations(t) }) + + return mock +} diff --git a/mocks/internal_/reader/home_chain.go b/mocks/internal_/reader/home_chain.go new file mode 100644 index 000000000..b497b7888 --- /dev/null +++ b/mocks/internal_/reader/home_chain.go @@ -0,0 +1,617 @@ +// Code generated by mockery v2.43.0. DO NOT EDIT. + +package reader + +import ( + context "context" + + ccipocr3 "github.com/smartcontractkit/chainlink-common/pkg/types/ccipocr3" + + mapset "github.com/deckarep/golang-set/v2" + + mock "github.com/stretchr/testify/mock" + + reader "github.com/smartcontractkit/chainlink-ccip/internal/reader" + + types "github.com/smartcontractkit/libocr/ragep2p/types" +) + +// MockHomeChain is an autogenerated mock type for the HomeChain type +type MockHomeChain struct { + mock.Mock +} + +type MockHomeChain_Expecter struct { + mock *mock.Mock +} + +func (_m *MockHomeChain) EXPECT() *MockHomeChain_Expecter { + return &MockHomeChain_Expecter{mock: &_m.Mock} +} + +// Close provides a mock function with given fields: +func (_m *MockHomeChain) Close() error { + ret := _m.Called() + + if len(ret) == 0 { + panic("no return value specified for Close") + } + + var r0 error + if rf, ok := ret.Get(0).(func() error); ok { + r0 = rf() + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// MockHomeChain_Close_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Close' +type MockHomeChain_Close_Call struct { + *mock.Call +} + +// Close is a helper method to define mock.On call +func (_e *MockHomeChain_Expecter) Close() *MockHomeChain_Close_Call { + return &MockHomeChain_Close_Call{Call: _e.mock.On("Close")} +} + +func (_c *MockHomeChain_Close_Call) Run(run func()) *MockHomeChain_Close_Call { + _c.Call.Run(func(args mock.Arguments) { + run() + }) + return _c +} + +func (_c *MockHomeChain_Close_Call) Return(_a0 error) *MockHomeChain_Close_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *MockHomeChain_Close_Call) RunAndReturn(run func() error) *MockHomeChain_Close_Call { + _c.Call.Return(run) + return _c +} + +// GetAllChainConfigs provides a mock function with given fields: +func (_m *MockHomeChain) GetAllChainConfigs() (map[ccipocr3.ChainSelector]reader.ChainConfig, error) { + ret := _m.Called() + + if len(ret) == 0 { + panic("no return value specified for GetAllChainConfigs") + } + + var r0 map[ccipocr3.ChainSelector]reader.ChainConfig + var r1 error + if rf, ok := ret.Get(0).(func() (map[ccipocr3.ChainSelector]reader.ChainConfig, error)); ok { + return rf() + } + if rf, ok := ret.Get(0).(func() map[ccipocr3.ChainSelector]reader.ChainConfig); ok { + r0 = rf() + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(map[ccipocr3.ChainSelector]reader.ChainConfig) + } + } + + if rf, ok := ret.Get(1).(func() error); ok { + r1 = rf() + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// MockHomeChain_GetAllChainConfigs_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetAllChainConfigs' +type MockHomeChain_GetAllChainConfigs_Call struct { + *mock.Call +} + +// GetAllChainConfigs is a helper method to define mock.On call +func (_e *MockHomeChain_Expecter) GetAllChainConfigs() *MockHomeChain_GetAllChainConfigs_Call { + return &MockHomeChain_GetAllChainConfigs_Call{Call: _e.mock.On("GetAllChainConfigs")} +} + +func (_c *MockHomeChain_GetAllChainConfigs_Call) Run(run func()) *MockHomeChain_GetAllChainConfigs_Call { + _c.Call.Run(func(args mock.Arguments) { + run() + }) + return _c +} + +func (_c *MockHomeChain_GetAllChainConfigs_Call) Return(_a0 map[ccipocr3.ChainSelector]reader.ChainConfig, _a1 error) *MockHomeChain_GetAllChainConfigs_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *MockHomeChain_GetAllChainConfigs_Call) RunAndReturn(run func() (map[ccipocr3.ChainSelector]reader.ChainConfig, error)) *MockHomeChain_GetAllChainConfigs_Call { + _c.Call.Return(run) + return _c +} + +// GetChainConfig provides a mock function with given fields: chainSelector +func (_m *MockHomeChain) GetChainConfig(chainSelector ccipocr3.ChainSelector) (reader.ChainConfig, error) { + ret := _m.Called(chainSelector) + + if len(ret) == 0 { + panic("no return value specified for GetChainConfig") + } + + var r0 reader.ChainConfig + var r1 error + if rf, ok := ret.Get(0).(func(ccipocr3.ChainSelector) (reader.ChainConfig, error)); ok { + return rf(chainSelector) + } + if rf, ok := ret.Get(0).(func(ccipocr3.ChainSelector) reader.ChainConfig); ok { + r0 = rf(chainSelector) + } else { + r0 = ret.Get(0).(reader.ChainConfig) + } + + if rf, ok := ret.Get(1).(func(ccipocr3.ChainSelector) error); ok { + r1 = rf(chainSelector) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// MockHomeChain_GetChainConfig_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetChainConfig' +type MockHomeChain_GetChainConfig_Call struct { + *mock.Call +} + +// GetChainConfig is a helper method to define mock.On call +// - chainSelector ccipocr3.ChainSelector +func (_e *MockHomeChain_Expecter) GetChainConfig(chainSelector interface{}) *MockHomeChain_GetChainConfig_Call { + return &MockHomeChain_GetChainConfig_Call{Call: _e.mock.On("GetChainConfig", chainSelector)} +} + +func (_c *MockHomeChain_GetChainConfig_Call) Run(run func(chainSelector ccipocr3.ChainSelector)) *MockHomeChain_GetChainConfig_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(ccipocr3.ChainSelector)) + }) + return _c +} + +func (_c *MockHomeChain_GetChainConfig_Call) Return(_a0 reader.ChainConfig, _a1 error) *MockHomeChain_GetChainConfig_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *MockHomeChain_GetChainConfig_Call) RunAndReturn(run func(ccipocr3.ChainSelector) (reader.ChainConfig, error)) *MockHomeChain_GetChainConfig_Call { + _c.Call.Return(run) + return _c +} + +// GetFChain provides a mock function with given fields: +func (_m *MockHomeChain) GetFChain() (map[ccipocr3.ChainSelector]int, error) { + ret := _m.Called() + + if len(ret) == 0 { + panic("no return value specified for GetFChain") + } + + var r0 map[ccipocr3.ChainSelector]int + var r1 error + if rf, ok := ret.Get(0).(func() (map[ccipocr3.ChainSelector]int, error)); ok { + return rf() + } + if rf, ok := ret.Get(0).(func() map[ccipocr3.ChainSelector]int); ok { + r0 = rf() + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(map[ccipocr3.ChainSelector]int) + } + } + + if rf, ok := ret.Get(1).(func() error); ok { + r1 = rf() + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// MockHomeChain_GetFChain_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetFChain' +type MockHomeChain_GetFChain_Call struct { + *mock.Call +} + +// GetFChain is a helper method to define mock.On call +func (_e *MockHomeChain_Expecter) GetFChain() *MockHomeChain_GetFChain_Call { + return &MockHomeChain_GetFChain_Call{Call: _e.mock.On("GetFChain")} +} + +func (_c *MockHomeChain_GetFChain_Call) Run(run func()) *MockHomeChain_GetFChain_Call { + _c.Call.Run(func(args mock.Arguments) { + run() + }) + return _c +} + +func (_c *MockHomeChain_GetFChain_Call) Return(_a0 map[ccipocr3.ChainSelector]int, _a1 error) *MockHomeChain_GetFChain_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *MockHomeChain_GetFChain_Call) RunAndReturn(run func() (map[ccipocr3.ChainSelector]int, error)) *MockHomeChain_GetFChain_Call { + _c.Call.Return(run) + return _c +} + +// GetKnownCCIPChains provides a mock function with given fields: +func (_m *MockHomeChain) GetKnownCCIPChains() (mapset.Set[ccipocr3.ChainSelector], error) { + ret := _m.Called() + + if len(ret) == 0 { + panic("no return value specified for GetKnownCCIPChains") + } + + var r0 mapset.Set[ccipocr3.ChainSelector] + var r1 error + if rf, ok := ret.Get(0).(func() (mapset.Set[ccipocr3.ChainSelector], error)); ok { + return rf() + } + if rf, ok := ret.Get(0).(func() mapset.Set[ccipocr3.ChainSelector]); ok { + r0 = rf() + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(mapset.Set[ccipocr3.ChainSelector]) + } + } + + if rf, ok := ret.Get(1).(func() error); ok { + r1 = rf() + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// MockHomeChain_GetKnownCCIPChains_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetKnownCCIPChains' +type MockHomeChain_GetKnownCCIPChains_Call struct { + *mock.Call +} + +// GetKnownCCIPChains is a helper method to define mock.On call +func (_e *MockHomeChain_Expecter) GetKnownCCIPChains() *MockHomeChain_GetKnownCCIPChains_Call { + return &MockHomeChain_GetKnownCCIPChains_Call{Call: _e.mock.On("GetKnownCCIPChains")} +} + +func (_c *MockHomeChain_GetKnownCCIPChains_Call) Run(run func()) *MockHomeChain_GetKnownCCIPChains_Call { + _c.Call.Run(func(args mock.Arguments) { + run() + }) + return _c +} + +func (_c *MockHomeChain_GetKnownCCIPChains_Call) Return(_a0 mapset.Set[ccipocr3.ChainSelector], _a1 error) *MockHomeChain_GetKnownCCIPChains_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *MockHomeChain_GetKnownCCIPChains_Call) RunAndReturn(run func() (mapset.Set[ccipocr3.ChainSelector], error)) *MockHomeChain_GetKnownCCIPChains_Call { + _c.Call.Return(run) + return _c +} + +// GetOCRConfigs provides a mock function with given fields: ctx, donID, pluginType +func (_m *MockHomeChain) GetOCRConfigs(ctx context.Context, donID uint32, pluginType uint8) ([]reader.OCR3ConfigWithMeta, error) { + ret := _m.Called(ctx, donID, pluginType) + + if len(ret) == 0 { + panic("no return value specified for GetOCRConfigs") + } + + var r0 []reader.OCR3ConfigWithMeta + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, uint32, uint8) ([]reader.OCR3ConfigWithMeta, error)); ok { + return rf(ctx, donID, pluginType) + } + if rf, ok := ret.Get(0).(func(context.Context, uint32, uint8) []reader.OCR3ConfigWithMeta); ok { + r0 = rf(ctx, donID, pluginType) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]reader.OCR3ConfigWithMeta) + } + } + + if rf, ok := ret.Get(1).(func(context.Context, uint32, uint8) error); ok { + r1 = rf(ctx, donID, pluginType) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// MockHomeChain_GetOCRConfigs_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetOCRConfigs' +type MockHomeChain_GetOCRConfigs_Call struct { + *mock.Call +} + +// GetOCRConfigs is a helper method to define mock.On call +// - ctx context.Context +// - donID uint32 +// - pluginType uint8 +func (_e *MockHomeChain_Expecter) GetOCRConfigs(ctx interface{}, donID interface{}, pluginType interface{}) *MockHomeChain_GetOCRConfigs_Call { + return &MockHomeChain_GetOCRConfigs_Call{Call: _e.mock.On("GetOCRConfigs", ctx, donID, pluginType)} +} + +func (_c *MockHomeChain_GetOCRConfigs_Call) Run(run func(ctx context.Context, donID uint32, pluginType uint8)) *MockHomeChain_GetOCRConfigs_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(uint32), args[2].(uint8)) + }) + return _c +} + +func (_c *MockHomeChain_GetOCRConfigs_Call) Return(_a0 []reader.OCR3ConfigWithMeta, _a1 error) *MockHomeChain_GetOCRConfigs_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *MockHomeChain_GetOCRConfigs_Call) RunAndReturn(run func(context.Context, uint32, uint8) ([]reader.OCR3ConfigWithMeta, error)) *MockHomeChain_GetOCRConfigs_Call { + _c.Call.Return(run) + return _c +} + +// GetSupportedChainsForPeer provides a mock function with given fields: id +func (_m *MockHomeChain) GetSupportedChainsForPeer(id types.PeerID) (mapset.Set[ccipocr3.ChainSelector], error) { + ret := _m.Called(id) + + if len(ret) == 0 { + panic("no return value specified for GetSupportedChainsForPeer") + } + + var r0 mapset.Set[ccipocr3.ChainSelector] + var r1 error + if rf, ok := ret.Get(0).(func(types.PeerID) (mapset.Set[ccipocr3.ChainSelector], error)); ok { + return rf(id) + } + if rf, ok := ret.Get(0).(func(types.PeerID) mapset.Set[ccipocr3.ChainSelector]); ok { + r0 = rf(id) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(mapset.Set[ccipocr3.ChainSelector]) + } + } + + if rf, ok := ret.Get(1).(func(types.PeerID) error); ok { + r1 = rf(id) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// MockHomeChain_GetSupportedChainsForPeer_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetSupportedChainsForPeer' +type MockHomeChain_GetSupportedChainsForPeer_Call struct { + *mock.Call +} + +// GetSupportedChainsForPeer is a helper method to define mock.On call +// - id types.PeerID +func (_e *MockHomeChain_Expecter) GetSupportedChainsForPeer(id interface{}) *MockHomeChain_GetSupportedChainsForPeer_Call { + return &MockHomeChain_GetSupportedChainsForPeer_Call{Call: _e.mock.On("GetSupportedChainsForPeer", id)} +} + +func (_c *MockHomeChain_GetSupportedChainsForPeer_Call) Run(run func(id types.PeerID)) *MockHomeChain_GetSupportedChainsForPeer_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(types.PeerID)) + }) + return _c +} + +func (_c *MockHomeChain_GetSupportedChainsForPeer_Call) Return(_a0 mapset.Set[ccipocr3.ChainSelector], _a1 error) *MockHomeChain_GetSupportedChainsForPeer_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *MockHomeChain_GetSupportedChainsForPeer_Call) RunAndReturn(run func(types.PeerID) (mapset.Set[ccipocr3.ChainSelector], error)) *MockHomeChain_GetSupportedChainsForPeer_Call { + _c.Call.Return(run) + return _c +} + +// HealthReport provides a mock function with given fields: +func (_m *MockHomeChain) HealthReport() map[string]error { + ret := _m.Called() + + if len(ret) == 0 { + panic("no return value specified for HealthReport") + } + + var r0 map[string]error + if rf, ok := ret.Get(0).(func() map[string]error); ok { + r0 = rf() + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(map[string]error) + } + } + + return r0 +} + +// MockHomeChain_HealthReport_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'HealthReport' +type MockHomeChain_HealthReport_Call struct { + *mock.Call +} + +// HealthReport is a helper method to define mock.On call +func (_e *MockHomeChain_Expecter) HealthReport() *MockHomeChain_HealthReport_Call { + return &MockHomeChain_HealthReport_Call{Call: _e.mock.On("HealthReport")} +} + +func (_c *MockHomeChain_HealthReport_Call) Run(run func()) *MockHomeChain_HealthReport_Call { + _c.Call.Run(func(args mock.Arguments) { + run() + }) + return _c +} + +func (_c *MockHomeChain_HealthReport_Call) Return(_a0 map[string]error) *MockHomeChain_HealthReport_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *MockHomeChain_HealthReport_Call) RunAndReturn(run func() map[string]error) *MockHomeChain_HealthReport_Call { + _c.Call.Return(run) + return _c +} + +// Name provides a mock function with given fields: +func (_m *MockHomeChain) Name() string { + ret := _m.Called() + + if len(ret) == 0 { + panic("no return value specified for Name") + } + + var r0 string + if rf, ok := ret.Get(0).(func() string); ok { + r0 = rf() + } else { + r0 = ret.Get(0).(string) + } + + return r0 +} + +// MockHomeChain_Name_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Name' +type MockHomeChain_Name_Call struct { + *mock.Call +} + +// Name is a helper method to define mock.On call +func (_e *MockHomeChain_Expecter) Name() *MockHomeChain_Name_Call { + return &MockHomeChain_Name_Call{Call: _e.mock.On("Name")} +} + +func (_c *MockHomeChain_Name_Call) Run(run func()) *MockHomeChain_Name_Call { + _c.Call.Run(func(args mock.Arguments) { + run() + }) + return _c +} + +func (_c *MockHomeChain_Name_Call) Return(_a0 string) *MockHomeChain_Name_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *MockHomeChain_Name_Call) RunAndReturn(run func() string) *MockHomeChain_Name_Call { + _c.Call.Return(run) + return _c +} + +// Ready provides a mock function with given fields: +func (_m *MockHomeChain) Ready() error { + ret := _m.Called() + + if len(ret) == 0 { + panic("no return value specified for Ready") + } + + var r0 error + if rf, ok := ret.Get(0).(func() error); ok { + r0 = rf() + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// MockHomeChain_Ready_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Ready' +type MockHomeChain_Ready_Call struct { + *mock.Call +} + +// Ready is a helper method to define mock.On call +func (_e *MockHomeChain_Expecter) Ready() *MockHomeChain_Ready_Call { + return &MockHomeChain_Ready_Call{Call: _e.mock.On("Ready")} +} + +func (_c *MockHomeChain_Ready_Call) Run(run func()) *MockHomeChain_Ready_Call { + _c.Call.Run(func(args mock.Arguments) { + run() + }) + return _c +} + +func (_c *MockHomeChain_Ready_Call) Return(_a0 error) *MockHomeChain_Ready_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *MockHomeChain_Ready_Call) RunAndReturn(run func() error) *MockHomeChain_Ready_Call { + _c.Call.Return(run) + return _c +} + +// Start provides a mock function with given fields: _a0 +func (_m *MockHomeChain) Start(_a0 context.Context) error { + ret := _m.Called(_a0) + + if len(ret) == 0 { + panic("no return value specified for Start") + } + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context) error); ok { + r0 = rf(_a0) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// MockHomeChain_Start_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Start' +type MockHomeChain_Start_Call struct { + *mock.Call +} + +// Start is a helper method to define mock.On call +// - _a0 context.Context +func (_e *MockHomeChain_Expecter) Start(_a0 interface{}) *MockHomeChain_Start_Call { + return &MockHomeChain_Start_Call{Call: _e.mock.On("Start", _a0)} +} + +func (_c *MockHomeChain_Start_Call) Run(run func(_a0 context.Context)) *MockHomeChain_Start_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context)) + }) + return _c +} + +func (_c *MockHomeChain_Start_Call) Return(_a0 error) *MockHomeChain_Start_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *MockHomeChain_Start_Call) RunAndReturn(run func(context.Context) error) *MockHomeChain_Start_Call { + _c.Call.Return(run) + return _c +} + +// NewMockHomeChain creates a new instance of MockHomeChain. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. +// The first argument is typically a *testing.T value. +func NewMockHomeChain(t interface { + mock.TestingT + Cleanup(func()) +}) *MockHomeChain { + mock := &MockHomeChain{} + mock.Mock.Test(t) + + t.Cleanup(func() { mock.AssertExpectations(t) }) + + return mock +} diff --git a/pkg/consts/consts.go b/pkg/consts/consts.go index 41a14e720..788dcd9cc 100644 --- a/pkg/consts/consts.go +++ b/pkg/consts/consts.go @@ -12,6 +12,7 @@ const ( ) // Method Names +// TODO: these should be better organized, maybe separate packages. const ( // Offramp methods MethodNameGetSourceChainConfig = "GetSourceChainConfig" @@ -23,12 +24,19 @@ const ( MethodNameGetExecutionState = "GetExecutionState" // Onramp methods - MethodNameGetDestChainConfig = "GetDestChainConfig" MethodNameOnrampGetDynamicConfig = "OnrampGetDynamicConfig" MethodNameOnrampGetStaticConfig = "OnrampGetStaticConfig" MethodNameGetExpectedNextSequenceNumber = "GetExpectedNextSequenceNumber" + + // Price registry view/pure methods + MethodNamePriceRegistryGetStaticConfig = "GetStaticConfig" + MethodNameGetDestChainConfig = "GetDestChainConfig" MethodNameGetPremiumMultiplierWeiPerEth = "GetPremiumMultiplierWeiPerEth" MethodNameGetTokenTransferFeeConfig = "GetTokenTransferFeeConfig" + MethodNameProcessMessageArgs = "ProcessMessageArgs" + MethodNameValidatePoolReturnData = "ValidatePoolReturnData" + MethodNameGetValidatedTokenPrice = "GetValidatedTokenPrice" + MethodNameGetFeeTokens = "GetFeeTokens" /* // On EVM: diff --git a/pkg/contractreader/extended.go b/pkg/contractreader/extended.go new file mode 100644 index 000000000..67588cc14 --- /dev/null +++ b/pkg/contractreader/extended.go @@ -0,0 +1,89 @@ +package contractreader + +import ( + "context" + "fmt" + "sync" + "time" + + "github.com/smartcontractkit/chainlink-ccip/internal/libs/slicelib" + + "github.com/smartcontractkit/chainlink-common/pkg/types" +) + +// Extended version of a ContractReader. +type Extended interface { + types.ContractReader + GetBindings(contractName string) []ExtendedBoundContract +} + +type ExtendedBoundContract struct { + BoundAt time.Time + Binding types.BoundContract +} + +// extendedContractReader is an extended version of the contract reader. +type extendedContractReader struct { + types.ContractReader + contractBindingsByName map[string][]ExtendedBoundContract + mu *sync.RWMutex +} + +func NewExtendedContractReader(baseContractReader types.ContractReader) Extended { + return &extendedContractReader{ + ContractReader: baseContractReader, + contractBindingsByName: make(map[string][]ExtendedBoundContract), + mu: &sync.RWMutex{}, + } +} + +func (e *extendedContractReader) Bind(ctx context.Context, allBindings []types.BoundContract) error { + validBindings := slicelib.Filter(allBindings, func(b types.BoundContract) bool { return !e.bindingExists(b) }) + if len(validBindings) == 0 { + return nil + } + + err := e.ContractReader.Bind(ctx, validBindings) + if err != nil { + return fmt.Errorf("bind: %w", err) + } + + e.mu.Lock() + defer e.mu.Unlock() + for _, binding := range validBindings { + e.contractBindingsByName[binding.Name] = append(e.contractBindingsByName[binding.Name], ExtendedBoundContract{ + BoundAt: time.Now(), + Binding: binding, + }) + } + + return nil +} + +func (e *extendedContractReader) GetBindings(contractName string) []ExtendedBoundContract { + e.mu.RLock() + defer e.mu.RUnlock() + + bindings, exists := e.contractBindingsByName[contractName] + if !exists { + return []ExtendedBoundContract{} + } + return bindings +} + +func (e *extendedContractReader) bindingExists(b types.BoundContract) bool { + e.mu.RLock() + defer e.mu.RUnlock() + + for _, boundContracts := range e.contractBindingsByName { + for _, boundContract := range boundContracts { + if boundContract.Binding.Key() == b.Key() { + return true + } + } + } + return false +} + +// Interface compliance check +var _ Extended = (*extendedContractReader)(nil) diff --git a/pkg/contractreader/extended_test.go b/pkg/contractreader/extended_test.go new file mode 100644 index 000000000..8fdba6f93 --- /dev/null +++ b/pkg/contractreader/extended_test.go @@ -0,0 +1,48 @@ +package contractreader + +import ( + "context" + "fmt" + "testing" + + "github.com/stretchr/testify/assert" + + "github.com/smartcontractkit/chainlink-ccip/internal/mocks" + + "github.com/smartcontractkit/chainlink-common/pkg/types" +) + +func TestExtendedContractReader(t *testing.T) { + const contractName = "testContract" + cr := mocks.NewContractReaderMock() + extCr := NewExtendedContractReader(cr) + + bindings := extCr.GetBindings(contractName) + assert.Len(t, bindings, 0) + + cr.On("Bind", context.Background(), + []types.BoundContract{{Name: contractName, Address: "0x123"}}).Return(nil) + cr.On("Bind", context.Background(), + []types.BoundContract{{Name: contractName, Address: "0x124"}}).Return(nil) + cr.On("Bind", context.Background(), + []types.BoundContract{{Name: contractName, Address: "0x125"}}).Return(fmt.Errorf("some err")) + + err := extCr.Bind(context.Background(), []types.BoundContract{{Name: contractName, Address: "0x123"}}) + assert.NoError(t, err) + + // ignored since 0x123 already exists + err = extCr.Bind(context.Background(), []types.BoundContract{{Name: contractName, Address: "0x123"}}) + assert.NoError(t, err) + + err = extCr.Bind(context.Background(), []types.BoundContract{{Name: contractName, Address: "0x124"}}) + assert.NoError(t, err) + + // Bind fails + err = extCr.Bind(context.Background(), []types.BoundContract{{Name: contractName, Address: "0x125"}}) + assert.Error(t, err) + + bindings = extCr.GetBindings(contractName) + assert.Len(t, bindings, 2) + assert.Equal(t, "0x123", bindings[0].Binding.Address) + assert.Equal(t, "0x124", bindings[1].Binding.Address) +} diff --git a/pluginconfig/execute.go b/pluginconfig/execute.go index 5bafcd716..dda36f531 100644 --- a/pluginconfig/execute.go +++ b/pluginconfig/execute.go @@ -11,6 +11,12 @@ type ExecutePluginConfig struct { // DestChain is the ccip destination chain configured for the execute DON. DestChain cciptypes.ChainSelector `json:"destChain"` + // SyncTimeout is the timeout for syncing the commit plugin reader. + SyncTimeout time.Duration `json:"syncTimeout"` + + // SyncFrequency is the frequency at which the commit plugin reader should sync. + SyncFrequency time.Duration `json:"syncFrequency"` + // MessageVisibilityInterval is the time interval for which the messages are visible by the plugin. MessageVisibilityInterval time.Duration `json:"messageVisibilityInterval"` } diff --git a/plugintypes/execute.go b/plugintypes/execute.go index cdad1dd76..a3d4b6e03 100644 --- a/plugintypes/execute.go +++ b/plugintypes/execute.go @@ -12,11 +12,6 @@ import ( // Execute Observation // // /////////////////////// -type ExecutePluginCommitDataWithMessages struct { - ExecutePluginCommitData - Messages []cciptypes.Message `json:"messages"` -} - // ExecutePluginCommitData is the data that is committed to the chain. type ExecutePluginCommitData struct { // SourceChain of the chain that contains the commit report. @@ -29,11 +24,19 @@ type ExecutePluginCommitData struct { MerkleRoot cciptypes.Bytes32 `json:"merkleRoot"` // SequenceNumberRange of the messages that are in this commit report. SequenceNumberRange cciptypes.SeqNumRange `json:"sequenceNumberRange"` + + // Messages that are part of the commit report. + Messages []cciptypes.Message `json:"messages"` + // ExecutedMessages are the messages in this report that have already been executed. - ExecutedMessages []cciptypes.SeqNum `json:"executed"` + ExecutedMessages []cciptypes.SeqNum `json:"executedMessages"` + + // TODO: cache for token data. + // TokenData for each message. + //TokenData [][][]byte `json:"-"` } -type ExecutePluginCommitObservations map[cciptypes.ChainSelector][]ExecutePluginCommitDataWithMessages +type ExecutePluginCommitObservations map[cciptypes.ChainSelector][]ExecutePluginCommitData type ExecutePluginMessageObservations map[cciptypes.ChainSelector]map[cciptypes.SeqNum]cciptypes.Message // ExecutePluginObservation is the observation of the ExecutePlugin. @@ -77,14 +80,14 @@ func DecodeExecutePluginObservation(b []byte) (ExecutePluginObservation, error) type ExecutePluginOutcome struct { // PendingCommitReports are the oldest reports with pending commits. The slice is // sorted from oldest to newest. - PendingCommitReports []ExecutePluginCommitDataWithMessages `json:"commitReports"` + PendingCommitReports []ExecutePluginCommitData `json:"commitReports"` // Report is built from the oldest pending commit reports. Report cciptypes.ExecutePluginReport `json:"report"` } func NewExecutePluginOutcome( - pendingCommits []ExecutePluginCommitDataWithMessages, + pendingCommits []ExecutePluginCommitData, report cciptypes.ExecutePluginReport, ) ExecutePluginOutcome { return ExecutePluginOutcome{