11import { URL } from "url" ;
2- import { Client } from "./apiv2" ;
3- import * as utils from "./utils" ;
2+ import { Client } from "../apiv2" ;
43
54const MAX_CHUNK_SIZE = 1024 * 1024 ;
65
76type Data = {
8- json : any ;
7+ json : { [ key : string ] : any } | string | number | boolean ;
98 pathname : string ;
109} ;
1110
@@ -14,19 +13,28 @@ type ChunkedData = {
1413 size : number ;
1514} ;
1615
17- export class DatabaseChunkUploader {
16+ /**
17+ * Imports JSON data to a given RTDB instance.
18+ *
19+ * The data is parsed and chunked into subtrees of ~1 MB, to be subsequently written in parallel.
20+ */
21+ export default class DatabaseImporter {
22+ chunks : Data [ ] ;
1823 private client : Client ;
19- private chunks : Data [ ] ;
20-
21- constructor ( private dbUrl : URL , file : string ) {
22- this . client = new Client ( { urlPrefix : dbUrl . origin , auth : true } ) ;
2324
25+ constructor ( private dbUrl : URL , file : string , private chunkSize = MAX_CHUNK_SIZE ) {
2426 const data = { json : JSON . parse ( file ) , pathname : dbUrl . pathname } ;
2527 const chunkedData = this . chunkData ( data ) ;
2628 this . chunks = chunkedData . chunks || [ data ] ;
29+ this . client = new Client ( { urlPrefix : dbUrl . origin , auth : true } ) ;
2730 }
2831
29- public async upload ( overwrite : boolean ) : Promise < any > {
32+ /**
33+ * Writes the chunked data to RTDB.
34+ *
35+ * @param overwrite Whether to overwrite the existing data at the given location.
36+ */
37+ async execute ( overwrite : boolean ) : Promise < any > {
3038 return Promise . all (
3139 this . chunks . map ( ( chunk : Data ) =>
3240 this . client . request ( {
@@ -40,16 +48,20 @@ export class DatabaseChunkUploader {
4048 }
4149
4250 private chunkData ( { json, pathname } : Data ) : ChunkedData {
43- if ( isObject ( json ) ) {
51+ if ( typeof json === "string" || typeof json === "number" || typeof json === "boolean" ) {
52+ // Leaf node, cannot be chunked
53+ return { chunks : null , size : JSON . stringify ( json ) . length } ;
54+ } else {
4455 // Children node
4556 let size = 2 ; // {}
46- let chunks = [ ] ;
57+
58+ const chunks = [ ] ;
4759 let hasChunkedChild = false ;
4860
49- for ( const key in json ) {
61+ for ( const key of Object . keys ( json ) ) {
5062 size += key . length + 3 ; // "[key]":
5163
52- const child = { json : json [ key ] , pathname : pathname + "/" + key } ;
64+ const child = { json : json [ key ] , pathname : [ pathname , key ] . join ( "/" ) . replace ( "//" , "/" ) } ;
5365 const childChunks = this . chunkData ( child ) ;
5466 size += childChunks . size ;
5567 if ( childChunks . chunks ) {
@@ -60,18 +72,11 @@ export class DatabaseChunkUploader {
6072 }
6173 }
6274
63- if ( hasChunkedChild || size >= MAX_CHUNK_SIZE ) {
75+ if ( hasChunkedChild || size >= this . chunkSize ) {
6476 return { chunks, size } ;
6577 } else {
6678 return { chunks : null , size } ;
6779 }
68- } else {
69- // Leaf node, cannot be chunked
70- return { chunks : null , size : JSON . stringify ( json ) . length } ;
7180 }
7281 }
7382}
74-
75- function isObject ( blob : any ) : boolean {
76- return blob !== null && typeof blob === "object" ;
77- }
0 commit comments