@@ -8,7 +8,7 @@ import { FirebaseError } from "../../error";
88import { FetchError } from "node-fetch" ;
99
1010const dbUrl = new URL ( "https://test-db.firebaseio.com/foo" ) ;
11- const chunkSize = 1024 * 1024 * 10 ;
11+ const payloadSize = 1024 * 1024 * 10 ;
1212const concurrencyLimit = 5 ;
1313
1414describe ( "DatabaseImporter" , ( ) => {
@@ -26,7 +26,7 @@ describe("DatabaseImporter", () => {
2626 dbUrl ,
2727 utils . stringToStream ( INVALID_JSON ) ! ,
2828 /* importPath= */ "/" ,
29- chunkSize ,
29+ payloadSize ,
3030 concurrencyLimit
3131 ) ;
3232
@@ -36,17 +36,19 @@ describe("DatabaseImporter", () => {
3636 ) ;
3737 } ) ;
3838
39- it ( "chunks data in top-level objects" , async ( ) => {
39+ it ( "batches data from different top-level objects" , async ( ) => {
4040 nock ( "https://test-db.firebaseio.com" ) . get ( "/foo.json?shallow=true" ) . reply ( 200 ) ;
41- nock ( "https://test-db.firebaseio.com" ) . put ( "/foo/a.json" , "100" ) . reply ( 200 ) ;
4241 nock ( "https://test-db.firebaseio.com" )
43- . put ( "/foo/b.json" , JSON . stringify ( [ true , "bar" , { f : { g : 0 , h : 1 } , i : "baz" } ] ) )
42+ . patch ( "/.json" , JSON . stringify ( { "/foo/a" : 100 , "/foo/b/0" : true , "/foo/b/1" : "bar" } ) )
43+ . reply ( 200 ) ;
44+ nock ( "https://test-db.firebaseio.com" )
45+ . patch ( "/.json" , JSON . stringify ( { "/foo/b/2/f" : { g : 0 , h : 1 } , "/foo/b/2/i" : "baz" } ) )
4446 . reply ( 200 ) ;
4547 const importer = new DatabaseImporter (
4648 dbUrl ,
4749 DATA_STREAM ,
4850 /* importPath= */ "/" ,
49- chunkSize ,
51+ /* payloadSize= */ 20 ,
5052 concurrencyLimit
5153 ) ;
5254
@@ -56,39 +58,38 @@ describe("DatabaseImporter", () => {
5658 expect ( nock . isDone ( ) ) . to . be . true ;
5759 } ) ;
5860
59- it ( "chunks data according to provided chunk size" , async ( ) => {
61+ it ( "writes data as a single batch for large enough payload size" , async ( ) => {
6062 nock ( "https://test-db.firebaseio.com" ) . get ( "/foo.json?shallow=true" ) . reply ( 200 ) ;
61- nock ( "https://test-db.firebaseio.com" ) . put ( "/foo/a.json" , "100" ) . reply ( 200 ) ;
62- nock ( "https://test-db.firebaseio.com" ) . put ( "/foo/b/0.json" , "true" ) . reply ( 200 ) ;
63- nock ( "https://test-db.firebaseio.com" ) . put ( "/foo/b/1.json" , '"bar"' ) . reply ( 200 ) ;
6463 nock ( "https://test-db.firebaseio.com" )
65- . put ( "/foo/b/2/f.json" , JSON . stringify ( { g : 0 , h : 1 } ) )
64+ . patch (
65+ "/.json" ,
66+ JSON . stringify ( { "/foo/a" : 100 , "/foo/b" : [ true , "bar" , { f : { g : 0 , h : 1 } , i : "baz" } ] } )
67+ )
6668 . reply ( 200 ) ;
67- nock ( "https://test-db.firebaseio.com" ) . put ( "/foo/b/2/i.json" , '"baz"' ) . reply ( 200 ) ;
6869 const importer = new DatabaseImporter (
6970 dbUrl ,
7071 DATA_STREAM ,
7172 /* importPath= */ "/" ,
72- /* chunkSize= */ 20 ,
73+ payloadSize ,
7374 concurrencyLimit
7475 ) ;
7576
7677 const responses = await importer . execute ( ) ;
7778
78- expect ( responses ) . to . have . length ( 5 ) ;
79+ expect ( responses ) . to . have . length ( 1 ) ;
7980 expect ( nock . isDone ( ) ) . to . be . true ;
8081 } ) ;
8182
8283 it ( "imports from data path" , async ( ) => {
8384 nock ( "https://test-db.firebaseio.com" ) . get ( "/foo.json?shallow=true" ) . reply ( 200 ) ;
8485 nock ( "https://test-db.firebaseio.com" )
85- . put ( "/foo/b .json" , JSON . stringify ( [ true , "bar" , { f : { g : 0 , h : 1 } , i : "baz" } ] ) )
86+ . patch ( "/.json" , JSON . stringify ( { "/foo/b" : [ true , "bar" , { f : { g : 0 , h : 1 } , i : "baz" } ] } ) )
8687 . reply ( 200 ) ;
8788 const importer = new DatabaseImporter (
8889 dbUrl ,
8990 DATA_STREAM ,
9091 /* importPath= */ "/b" ,
91- chunkSize ,
92+ payloadSize ,
9293 concurrencyLimit
9394 ) ;
9495
@@ -104,7 +105,7 @@ describe("DatabaseImporter", () => {
104105 dbUrl ,
105106 DATA_STREAM ,
106107 /* importPath= */ "/" ,
107- chunkSize ,
108+ payloadSize ,
108109 concurrencyLimit
109110 ) ;
110111
@@ -120,26 +121,32 @@ describe("DatabaseImporter", () => {
120121
121122 nock ( "https://test-db.firebaseio.com" ) . get ( "/foo.json?shallow=true" ) . reply ( 200 ) ;
122123 nock ( "https://test-db.firebaseio.com" )
123- . put ( "/foo/a.json" , "100" )
124+ . patch (
125+ "/.json" ,
126+ JSON . stringify ( { "/foo/a" : 100 , "/foo/b" : [ true , "bar" , { f : { g : 0 , h : 1 } , i : "baz" } ] } )
127+ )
124128 . once ( )
125129 . replyWithError ( timeoutErr ) ;
126- nock ( "https://test-db.firebaseio.com" ) . put ( "/foo/a.json" , "100" ) . once ( ) . reply ( 200 ) ;
127130 nock ( "https://test-db.firebaseio.com" )
128- . put ( "/foo/b.json" , JSON . stringify ( [ true , "bar" , { f : { g : 0 , h : 1 } , i : "baz" } ] ) )
131+ . patch (
132+ "/.json" ,
133+ JSON . stringify ( { "/foo/a" : 100 , "/foo/b" : [ true , "bar" , { f : { g : 0 , h : 1 } , i : "baz" } ] } )
134+ )
135+ . once ( )
129136 . reply ( 200 ) ;
130137
131138 const importer = new DatabaseImporter (
132139 dbUrl ,
133140 DATA_STREAM ,
134141 /* importPath= */ "/" ,
135- chunkSize ,
142+ payloadSize ,
136143 concurrencyLimit
137144 ) ;
138145 importer . nonFatalRetryTimeout = 0 ;
139146
140147 const responses = await importer . execute ( ) ;
141148
142- expect ( responses ) . to . have . length ( 2 ) ;
149+ expect ( responses ) . to . have . length ( 1 ) ;
143150 expect ( nock . isDone ( ) ) . to . be . true ;
144151 } ) ;
145152} ) ;
0 commit comments