Skip to content

Commit 8b03a22

Browse files
committed
Skip StorableNode/Trie when reading checkpoint
- Merge RebuildTries() with LoadCheckpoint() to deserialize data to nodes without creating intermediate StorableNode/StorableTrie objects. - Avoid creating 400+ million element slice holding all StorableNodes read from checkpoint file - DiskWal.Replay*() APIs are changed. checkpointFn receives []*trie.MTrie instead of FlattenedForest. - Remove files contaning StorableNode/StorableTrie/FlattenedForest etc. * mtrie/flattener/forest.go * mtrie/flattener/forest_test.go * mtrie/flattener/storables.go * mtrie/flattener/trie.go * mtrie/flattener/trie_test.go
1 parent f185fd9 commit 8b03a22

File tree

16 files changed

+272
-633
lines changed

16 files changed

+272
-633
lines changed

cmd/util/cmd/checkpoint-list-tries/cmd.go

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -28,12 +28,12 @@ func init() {
2828

2929
func run(*cobra.Command, []string) {
3030

31-
flattenedForest, err := wal.LoadCheckpoint(flagCheckpoint)
31+
tries, err := wal.LoadCheckpoint(flagCheckpoint)
3232
if err != nil {
3333
log.Fatal().Err(err).Msg("error while loading checkpoint")
3434
}
3535

36-
for _, trie := range flattenedForest.Tries {
37-
fmt.Printf("%x\n", trie.RootHash)
36+
for _, trie := range tries {
37+
fmt.Printf("%x\n", trie.RootHash())
3838
}
3939
}

cmd/util/cmd/read-execution-state/list-wals/cmd.go

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,7 @@ import (
1111
"github.com/onflow/flow-go/ledger"
1212
"github.com/onflow/flow-go/ledger/common/pathfinder"
1313
"github.com/onflow/flow-go/ledger/complete"
14-
"github.com/onflow/flow-go/ledger/complete/mtrie/flattener"
14+
"github.com/onflow/flow-go/ledger/complete/mtrie/trie"
1515
"github.com/onflow/flow-go/ledger/complete/wal"
1616
"github.com/onflow/flow-go/module/metrics"
1717
)
@@ -52,7 +52,7 @@ func run(*cobra.Command, []string) {
5252
}()
5353

5454
err = w.ReplayLogsOnly(
55-
func(forestSequencing *flattener.FlattenedForest) error {
55+
func(tries []*trie.MTrie) error {
5656
fmt.Printf("forest sequencing \n")
5757
return nil
5858
},

ledger/complete/mtrie/flattener/encoding.go

Lines changed: 89 additions & 30 deletions
Original file line numberDiff line numberDiff line change
@@ -1,11 +1,13 @@
11
package flattener
22

33
import (
4+
"bytes"
45
"fmt"
56
"io"
67

78
"github.com/onflow/flow-go/ledger"
89
"github.com/onflow/flow-go/ledger/common/encoding"
10+
"github.com/onflow/flow-go/ledger/common/hash"
911
"github.com/onflow/flow-go/ledger/common/utils"
1012
"github.com/onflow/flow-go/ledger/complete/mtrie/node"
1113
"github.com/onflow/flow-go/ledger/complete/mtrie/trie"
@@ -59,82 +61,127 @@ func EncodeNode(n *node.Node, lchildIndex uint64, rchildIndex uint64) []byte {
5961
return buf
6062
}
6163

62-
// ReadStorableNode reads a storable node from io
63-
func ReadStorableNode(reader io.Reader) (*StorableNode, error) {
64+
// ReadNode reconstructs a node from data read from reader.
65+
// TODO: reuse read buffer
66+
func ReadNode(reader io.Reader, getNode func(nodeIndex uint64) (*node.Node, error)) (*node.Node, error) {
6467

6568
// reading version
6669
buf := make([]byte, 2)
6770
read, err := io.ReadFull(reader, buf)
6871
if err != nil {
69-
return nil, fmt.Errorf("error reading storable node, cannot read version part: %w", err)
72+
return nil, fmt.Errorf("failed to read serialized node, cannot read version part: %w", err)
7073
}
7174
if read != len(buf) {
72-
return nil, fmt.Errorf("not enough bytes read %d expected %d", read, len(buf))
75+
return nil, fmt.Errorf("failed to read serialized node: not enough bytes read %d expected %d", read, len(buf))
7376
}
7477

7578
version, _, err := utils.ReadUint16(buf)
7679
if err != nil {
77-
return nil, fmt.Errorf("error reading storable node: %w", err)
80+
return nil, fmt.Errorf("failed to read serialized node: %w", err)
7881
}
7982

8083
if version > encodingDecodingVersion {
81-
return nil, fmt.Errorf("error reading storable node: unsuported version %d > %d", version, encodingDecodingVersion)
84+
return nil, fmt.Errorf("failed to read serialized node: unsuported version %d > %d", version, encodingDecodingVersion)
8285
}
8386

8487
// reading fixed-length part
8588
buf = make([]byte, 2+8+8+2+8)
8689

8790
read, err = io.ReadFull(reader, buf)
8891
if err != nil {
89-
return nil, fmt.Errorf("error reading storable node, cannot read fixed-length part: %w", err)
92+
return nil, fmt.Errorf("failed to read serialized node, cannot read fixed-length part: %w", err)
9093
}
9194
if read != len(buf) {
92-
return nil, fmt.Errorf("not enough bytes read %d expected %d", read, len(buf))
95+
return nil, fmt.Errorf("failed to read serialized node: not enough bytes read %d expected %d", read, len(buf))
9396
}
9497

95-
storableNode := &StorableNode{}
98+
var height, maxDepth uint16
99+
var lchildIndex, rchildIndex, regCount uint64
100+
var path, hashValue, encPayload []byte
96101

97-
storableNode.Height, buf, err = utils.ReadUint16(buf)
102+
height, buf, err = utils.ReadUint16(buf)
98103
if err != nil {
99-
return nil, fmt.Errorf("error reading storable node: %w", err)
104+
return nil, fmt.Errorf("failed to read serialized node: %w", err)
100105
}
101106

102-
storableNode.LIndex, buf, err = utils.ReadUint64(buf)
107+
lchildIndex, buf, err = utils.ReadUint64(buf)
103108
if err != nil {
104-
return nil, fmt.Errorf("error reading storable node: %w", err)
109+
return nil, fmt.Errorf("failed to read serialized node: %w", err)
105110
}
106111

107-
storableNode.RIndex, buf, err = utils.ReadUint64(buf)
112+
rchildIndex, buf, err = utils.ReadUint64(buf)
108113
if err != nil {
109-
return nil, fmt.Errorf("error reading storable node: %w", err)
114+
return nil, fmt.Errorf("failed to read serialized node: %w", err)
110115
}
111116

112-
storableNode.MaxDepth, buf, err = utils.ReadUint16(buf)
117+
maxDepth, buf, err = utils.ReadUint16(buf)
113118
if err != nil {
114-
return nil, fmt.Errorf("error reading storable node: %w", err)
119+
return nil, fmt.Errorf("failed to read serialized node: %w", err)
115120
}
116121

117-
storableNode.RegCount, _, err = utils.ReadUint64(buf)
122+
regCount, _, err = utils.ReadUint64(buf)
118123
if err != nil {
119-
return nil, fmt.Errorf("error reading storable node: %w", err)
124+
return nil, fmt.Errorf("failed to read serialized node: %w", err)
120125
}
121126

122-
storableNode.Path, err = utils.ReadShortDataFromReader(reader)
127+
path, err = utils.ReadShortDataFromReader(reader)
123128
if err != nil {
124129
return nil, fmt.Errorf("cannot read key data: %w", err)
125130
}
126131

127-
storableNode.EncPayload, err = utils.ReadLongDataFromReader(reader)
132+
encPayload, err = utils.ReadLongDataFromReader(reader)
128133
if err != nil {
129134
return nil, fmt.Errorf("cannot read value data: %w", err)
130135
}
131136

132-
storableNode.HashValue, err = utils.ReadShortDataFromReader(reader)
137+
hashValue, err = utils.ReadShortDataFromReader(reader)
133138
if err != nil {
134139
return nil, fmt.Errorf("cannot read hashValue data: %w", err)
135140
}
136141

137-
return storableNode, nil
142+
// Create (and copy) hash from raw data.
143+
nodeHash, err := hash.ToHash(hashValue)
144+
if err != nil {
145+
return nil, fmt.Errorf("failed to decode hash from checkpoint: %w", err)
146+
}
147+
148+
if len(path) > 0 {
149+
// Create (and copy) path from raw data.
150+
path, err := ledger.ToPath(path)
151+
if err != nil {
152+
return nil, fmt.Errorf("failed to decode path from checkpoint: %w", err)
153+
}
154+
155+
// Decode payload (payload data isn't copied).
156+
payload, err := encoding.DecodePayload(encPayload)
157+
if err != nil {
158+
return nil, fmt.Errorf("failed to decode payload from checkpoint: %w", err)
159+
}
160+
161+
// make a copy of payload
162+
var pl *ledger.Payload
163+
if payload != nil {
164+
pl = payload.DeepCopy()
165+
}
166+
167+
n := node.NewNode(int(height), nil, nil, path, pl, nodeHash, maxDepth, regCount)
168+
return n, nil
169+
}
170+
171+
// Get left child node by node index
172+
lchild, err := getNode(lchildIndex)
173+
if err != nil {
174+
return nil, fmt.Errorf("failed to find left child node: %w", err)
175+
}
176+
177+
// Get right child node by node index
178+
rchild, err := getNode(rchildIndex)
179+
if err != nil {
180+
return nil, fmt.Errorf("failed to find right child node: %w", err)
181+
}
182+
183+
n := node.NewNode(int(height), lchild, rchild, ledger.DummyPath, nil, nodeHash, maxDepth, regCount)
184+
return n, nil
138185
}
139186

140187
// EncodeTrie encodes trie root node
@@ -162,9 +209,8 @@ func EncodeTrie(rootNode *node.Node, rootIndex uint64) []byte {
162209
return buf
163210
}
164211

165-
// ReadStorableTrie reads a storable trie from io
166-
func ReadStorableTrie(reader io.Reader) (*StorableTrie, error) {
167-
storableTrie := &StorableTrie{}
212+
// ReadTrie reconstructs a trie from data read from reader.
213+
func ReadTrie(reader io.Reader, getNode func(nodeIndex uint64) (*node.Node, error)) (*trie.MTrie, error) {
168214

169215
// reading version
170216
buf := make([]byte, 2)
@@ -199,13 +245,26 @@ func ReadStorableTrie(reader io.Reader) (*StorableTrie, error) {
199245
if err != nil {
200246
return nil, fmt.Errorf("cannot read root index data: %w", err)
201247
}
202-
storableTrie.RootIndex = rootIndex
203248

204-
roothash, err := utils.ReadShortDataFromReader(reader)
249+
readRootHash, err := utils.ReadShortDataFromReader(reader)
205250
if err != nil {
206251
return nil, fmt.Errorf("cannot read roothash data: %w", err)
207252
}
208-
storableTrie.RootHash = roothash
209253

210-
return storableTrie, nil
254+
rootNode, err := getNode(rootIndex)
255+
if err != nil {
256+
return nil, fmt.Errorf("cannot find root node: %w", err)
257+
}
258+
259+
mtrie, err := trie.NewMTrie(rootNode)
260+
if err != nil {
261+
return nil, fmt.Errorf("restoring trie failed: %w", err)
262+
}
263+
264+
rootHash := mtrie.RootHash()
265+
if !bytes.Equal(readRootHash, rootHash[:]) {
266+
return nil, fmt.Errorf("restoring trie failed: roothash doesn't match")
267+
}
268+
269+
return mtrie, nil
211270
}

0 commit comments

Comments
 (0)