Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion params/version.go
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ import (
const (
VersionMajor = 5 // Major version component of the current release
VersionMinor = 3 // Minor version component of the current release
VersionPatch = 12 // Patch version component of the current release
VersionPatch = 13 // Patch version component of the current release
VersionMeta = "mainnet" // Version metadata to append to the version string
)

Expand Down
14 changes: 9 additions & 5 deletions rollup/circuitcapacitychecker/impl.go
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ package circuitcapacitychecker
import "C" //nolint:typecheck

import (
"bytes"
"encoding/json"
"fmt"
"sync"
Expand All @@ -29,7 +30,8 @@ func init() {
type CircuitCapacityChecker struct {
// mutex for each CircuitCapacityChecker itself
sync.Mutex
ID uint64
ID uint64
jsonBuffer bytes.Buffer
}

// NewCircuitCapacityChecker creates a new CircuitCapacityChecker
Expand Down Expand Up @@ -65,13 +67,14 @@ func (ccc *CircuitCapacityChecker) ApplyTransaction(traces *types.BlockTrace) (*
return nil, ErrUnknown
}

tracesByt, err := json.Marshal(traces)
ccc.jsonBuffer.Reset()

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

What if we encounter an exceptionally large trace, would we just keep the allocated buffer, even though we almost never fully utilize it?

Copy link
Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Yeah, that is the trade-off here. Such peaks of memory usage become sticky.

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

After disabling stack, I think we won't have such large traces as before.

err := json.NewEncoder(&ccc.jsonBuffer).Encode(traces)
if err != nil {
log.Error("fail to json marshal traces in ApplyTransaction", "id", ccc.ID, "TxHash", traces.Transactions[0].TxHash, "err", err)
return nil, ErrUnknown
}

tracesStr := C.CString(string(tracesByt))
tracesStr := C.CString(string(ccc.jsonBuffer.Bytes()))
defer func() {
C.free(unsafe.Pointer(tracesStr))
}()
Expand Down Expand Up @@ -111,13 +114,14 @@ func (ccc *CircuitCapacityChecker) ApplyBlock(traces *types.BlockTrace) (*types.
ccc.Lock()
defer ccc.Unlock()

tracesByt, err := json.Marshal(traces)
ccc.jsonBuffer.Reset()
err := json.NewEncoder(&ccc.jsonBuffer).Encode(traces)
if err != nil {
log.Error("fail to json marshal traces in ApplyBlock", "id", ccc.ID, "blockNumber", traces.Header.Number, "blockHash", traces.Header.Hash(), "err", err)
return nil, ErrUnknown
}

tracesStr := C.CString(string(tracesByt))
tracesStr := C.CString(string(ccc.jsonBuffer.Bytes()))
defer func() {
C.free(unsafe.Pointer(tracesStr))
}()
Expand Down