@@ -4,133 +4,149 @@ function error(err) {
4
4
err && logger ( err . message ) ;
5
5
}
6
6
7
- function writeMetadata ( documentStore ) {
8
- return function ( collection , metadata , next ) {
9
- return collection . indexes ( function ( err , indexes ) {
7
+ function writeMetadata ( collection , metadata , next ) {
8
+ return collection . indexes ( function ( err , indexes ) {
9
+ if ( err ) return next ( err ) ;
10
+ fs . writeFile ( metadata + collection . collectionName , JSON . stringify ( indexes ) , next ) ;
11
+ } ) ;
12
+ }
13
+
14
+ function makeDir ( pathname , next ) {
15
+ fs . stat ( pathname , function ( err , stats ) {
16
+ return err && "ENOENT" === err . code ? ( logger ( "make dir at " + pathname ) , fs . mkdir ( pathname , function ( err ) {
17
+ next ( err , pathname ) ;
18
+ } ) ) : stats && ! 1 === stats . isDirectory ( ) ? ( logger ( "unlink file at " + pathname ) ,
19
+ fs . unlink ( pathname , function ( err ) {
10
20
if ( err ) return next ( err ) ;
11
- documentStore . store ( ".metadata" , collection . collectionName , JSON . stringify ( indexes ) , next ) ;
12
- } ) ;
13
- } ;
21
+ logger ( "make dir at " + pathname ) , fs . mkdir ( pathname , function ( err ) {
22
+ next ( err , pathname ) ;
23
+ } ) ;
24
+ } ) ) : void next ( null , pathname ) ;
25
+ } ) ;
14
26
}
15
27
16
- function toJsonAsync ( documentStore ) {
17
- return function ( doc , collectionPath ) {
18
- documentStore . store ( collectionPath , doc . _id + ".json" , JSON . stringify ( doc ) ) ;
19
- } ;
28
+ function rmDir ( pathname , next ) {
29
+ fs . readdirSync ( pathname ) . forEach ( function ( first ) {
30
+ var database = pathname + first ;
31
+ if ( ! 1 === fs . statSync ( database ) . isDirectory ( ) ) return next ( Error ( "path is not a Directory" ) ) ;
32
+ var metadata = "" , collections = fs . readdirSync ( database ) , metadataPath = path . join ( database , ".metadata" ) ;
33
+ return ! 0 === fs . existsSync ( metadataPath ) && ( metadata = metadataPath + path . sep ,
34
+ delete collections [ collections . indexOf ( ".metadata" ) ] ) , collections . forEach ( function ( second ) {
35
+ var collection = path . join ( database , second ) ;
36
+ ! 1 !== fs . statSync ( collection ) . isDirectory ( ) && ( fs . readdirSync ( collection ) . forEach ( function ( third ) {
37
+ var document = path . join ( collection , third ) ;
38
+ return fs . unlinkSync ( document ) , next ? next ( null , document ) : "" ;
39
+ } ) , "" !== metadata && fs . unlinkSync ( metadata + second ) , fs . rmdirSync ( collection ) ) ;
40
+ } ) , "" !== metadata && fs . rmdirSync ( metadata ) , fs . rmdirSync ( database ) ;
41
+ } ) ;
20
42
}
21
43
22
- function toBsonAsync ( documentStore ) {
23
- return function ( doc , collectionPath ) {
24
- documentStore . store ( collectionPath , doc . _id + ".bson" , BSON . serialize ( doc ) ) ;
25
- } ;
44
+ function toJsonAsync ( doc , collectionPath ) {
45
+ fs . writeFile ( collectionPath + doc . _id + ".json" , JSON . stringify ( doc ) ) ;
26
46
}
27
47
28
- function allCollections ( documentStore ) {
29
- return function ( db , name , query , metadata , parser , next ) {
30
- return db . collections ( function ( err , collections ) {
31
- if ( err ) return next ( err ) ;
32
- var last = ~ ~ collections . length , index = 0 ;
33
- if ( 0 === last ) return next ( err ) ;
34
- collections . forEach ( function ( collection ) {
35
- if ( ! 0 === systemRegex . test ( collection . collectionName ) ) return last === ++ index ? next ( null ) : null ;
36
- logger ( "select collection " + collection . collectionName ) , documentStore . addCollection ( collection . collectionName , function ( err ) {
37
- if ( err ) return last === ++ index ? next ( err ) : error ( err ) ;
38
- meta ( collection , metadata , function ( ) {
39
- collection . find ( query ) . snapshot ( ! 0 ) . stream ( ) . once ( "end" , function ( ) {
40
- return last === ++ index ? next ( null ) : null ;
41
- } ) . on ( "data" , function ( doc ) {
42
- parser ( doc , collection . collectionName ) ;
43
- } ) ;
44
- } ) ;
45
- } ) ;
46
- } ) ;
47
- } ) ;
48
- } ;
48
+ function toBsonAsync ( doc , collectionPath ) {
49
+ fs . writeFile ( collectionPath + doc . _id + ".bson" , BSON . serialize ( doc ) ) ;
49
50
}
50
51
51
- function allCollectionsScan ( documentStore ) {
52
- return function ( db , name , numCursors , metadata , parser , next ) {
53
- return db . collections ( function ( err , collections ) {
54
- if ( err ) return next ( err ) ;
55
- var last = ~ ~ collections . length , index = 0 ;
56
- if ( 0 === last ) return next ( null ) ;
57
- collections . forEach ( function ( collection ) {
58
- if ( ! 0 === systemRegex . test ( collection . collectionName ) ) return last === ++ index ? next ( null ) : null ;
59
- logger ( "select collection scan " + collection . collectionName ) , documentStore . addCollection ( collection . collectionName , function ( err ) {
60
- if ( err ) return last === ++ index ? next ( err ) : error ( err ) ;
61
- meta ( collection , metadata , function ( ) {
62
- collection . parallelCollectionScan ( {
63
- numCursors : numCursors
64
- } , function ( err , cursors ) {
65
- if ( err ) return last === ++ index ? next ( err ) : error ( err ) ;
66
- var ii , cursorsDone ;
67
- if ( 0 === ( ii = cursorsDone = ~ ~ cursors . length ) ) return last === ++ index ? next ( null ) : null ;
68
- for ( var i = 0 ; i < ii ; ++ i ) cursors [ i ] . once ( "end" , function ( ) {
69
- if ( 0 == -- cursorsDone ) return last === ++ index ? next ( null ) : null ;
70
- } ) . on ( "data" , function ( doc ) {
71
- parser ( doc , collection . collectionName ) ;
72
- } ) ;
73
- } ) ;
52
+ function allCollections ( db , name , query , metadata , parser , next ) {
53
+ return db . collections ( function ( err , collections ) {
54
+ if ( err ) return next ( err ) ;
55
+ var last = ~ ~ collections . length , index = 0 ;
56
+ if ( 0 === last ) return next ( err ) ;
57
+ collections . forEach ( function ( collection ) {
58
+ if ( ! 0 === systemRegex . test ( collection . collectionName ) ) return last === ++ index ? next ( null ) : null ;
59
+ logger ( "select collection " + collection . collectionName ) , makeDir ( name + collection . collectionName + path . sep , function ( err , name ) {
60
+ if ( err ) return last === ++ index ? next ( err ) : error ( err ) ;
61
+ meta ( collection , metadata , function ( ) {
62
+ collection . find ( query ) . snapshot ( ! 0 ) . stream ( ) . once ( "end" , function ( ) {
63
+ return last === ++ index ? next ( null ) : null ;
64
+ } ) . on ( "data" , function ( doc ) {
65
+ parser ( doc , name ) ;
74
66
} ) ;
75
67
} ) ;
76
68
} ) ;
77
69
} ) ;
78
- } ;
70
+ } ) ;
79
71
}
80
72
81
- function someCollections ( documentStore ) {
82
- return function ( db , name , query , metadata , parser , next , collections ) {
73
+ function allCollectionsScan ( db , name , numCursors , metadata , parser , next ) {
74
+ return db . collections ( function ( err , collections ) {
75
+ if ( err ) return next ( err ) ;
83
76
var last = ~ ~ collections . length , index = 0 ;
84
77
if ( 0 === last ) return next ( null ) ;
85
78
collections . forEach ( function ( collection ) {
86
- db . collection ( collection , {
87
- strict : ! 0
88
- } , function ( err , collection ) {
79
+ if ( ! 0 === systemRegex . test ( collection . collectionName ) ) return last === ++ index ? next ( null ) : null ;
80
+ logger ( "select collection scan " + collection . collectionName ) , makeDir ( name + collection . collectionName + path . sep , function ( err , name ) {
89
81
if ( err ) return last === ++ index ? next ( err ) : error ( err ) ;
90
- logger ( "select collection " + collection . collectionName ) , documentStore . addCollection ( collection . collectionName , function ( err ) {
91
- if ( err ) return last === ++ index ? next ( err ) : error ( err ) ;
92
- meta ( collection , metadata , function ( ) {
93
- collection . find ( query ) . snapshot ( ! 0 ) . stream ( ) . once ( "end" , function ( ) {
94
- return last === ++ index ? next ( null ) : null ;
82
+ meta ( collection , metadata , function ( ) {
83
+ collection . parallelCollectionScan ( {
84
+ numCursors : numCursors
85
+ } , function ( err , cursors ) {
86
+ if ( err ) return last === ++ index ? next ( err ) : error ( err ) ;
87
+ var ii , cursorsDone ;
88
+ if ( 0 === ( ii = cursorsDone = ~ ~ cursors . length ) ) return last === ++ index ? next ( null ) : null ;
89
+ for ( var i = 0 ; i < ii ; ++ i ) cursors [ i ] . once ( "end" , function ( ) {
90
+ if ( 0 == -- cursorsDone ) return last === ++ index ? next ( null ) : null ;
95
91
} ) . on ( "data" , function ( doc ) {
96
- parser ( doc , collection . collectionName ) ;
92
+ parser ( doc , name ) ;
97
93
} ) ;
98
94
} ) ;
99
95
} ) ;
100
96
} ) ;
101
97
} ) ;
102
- } ;
98
+ } ) ;
103
99
}
104
100
105
- function someCollectionsScan ( documentStore ) {
106
- return function ( db , name , numCursors , metadata , parser , next , collections ) {
107
- var last = ~ ~ collections . length , index = 0 ;
108
- if ( 0 === last ) return next ( null ) ;
109
- collections . forEach ( function ( collection ) {
110
- db . collection ( collection , {
111
- strict : ! 0
112
- } , function ( err , collection ) {
101
+ function someCollections ( db , name , query , metadata , parser , next , collections ) {
102
+ var last = ~ ~ collections . length , index = 0 ;
103
+ if ( 0 === last ) return next ( null ) ;
104
+ collections . forEach ( function ( collection ) {
105
+ db . collection ( collection , {
106
+ strict : ! 0
107
+ } , function ( err , collection ) {
108
+ if ( err ) return last === ++ index ? next ( err ) : error ( err ) ;
109
+ logger ( "select collection " + collection . collectionName ) , makeDir ( name + collection . collectionName + path . sep , function ( err , name ) {
113
110
if ( err ) return last === ++ index ? next ( err ) : error ( err ) ;
114
- logger ( "select collection scan " + collection . collectionName ) , documentStore . addCollection ( collection . collectionName , function ( err ) {
115
- if ( err ) return last === ++ index ? next ( err ) : error ( err ) ;
116
- meta ( collection , metadata , function ( ) {
117
- collection . parallelCollectionScan ( {
118
- numCursors : numCursors
119
- } , function ( err , cursors ) {
120
- if ( err ) return last === ++ index ? next ( err ) : error ( err ) ;
121
- var ii , cursorsDone ;
122
- if ( 0 === ( ii = cursorsDone = ~ ~ cursors . length ) ) return last === ++ index ? next ( null ) : null ;
123
- for ( var i = 0 ; i < ii ; ++ i ) cursors [ i ] . once ( "end" , function ( ) {
124
- if ( 0 == -- cursorsDone ) return last === ++ index ? next ( null ) : null ;
125
- } ) . on ( "data" , function ( doc ) {
126
- parser ( doc , collection . collectionName ) ;
127
- } ) ;
111
+ meta ( collection , metadata , function ( ) {
112
+ collection . find ( query ) . snapshot ( ! 0 ) . stream ( ) . once ( "end" , function ( ) {
113
+ return last === ++ index ? next ( null ) : null ;
114
+ } ) . on ( "data" , function ( doc ) {
115
+ parser ( doc , name ) ;
116
+ } ) ;
117
+ } ) ;
118
+ } ) ;
119
+ } ) ;
120
+ } ) ;
121
+ }
122
+
123
+ function someCollectionsScan ( db , name , numCursors , metadata , parser , next , collections ) {
124
+ var last = ~ ~ collections . length , index = 0 ;
125
+ if ( 0 === last ) return next ( null ) ;
126
+ collections . forEach ( function ( collection ) {
127
+ db . collection ( collection , {
128
+ strict : ! 0
129
+ } , function ( err , collection ) {
130
+ if ( err ) return last === ++ index ? next ( err ) : error ( err ) ;
131
+ logger ( "select collection scan " + collection . collectionName ) , makeDir ( name + collection . collectionName + path . sep , function ( err , name ) {
132
+ if ( err ) return last === ++ index ? next ( err ) : error ( err ) ;
133
+ meta ( collection , metadata , function ( ) {
134
+ collection . parallelCollectionScan ( {
135
+ numCursors : numCursors
136
+ } , function ( err , cursors ) {
137
+ if ( err ) return last === ++ index ? next ( err ) : error ( err ) ;
138
+ var ii , cursorsDone ;
139
+ if ( 0 === ( ii = cursorsDone = ~ ~ cursors . length ) ) return last === ++ index ? next ( null ) : null ;
140
+ for ( var i = 0 ; i < ii ; ++ i ) cursors [ i ] . once ( "end" , function ( ) {
141
+ if ( 0 == -- cursorsDone ) return last === ++ index ? next ( null ) : null ;
142
+ } ) . on ( "data" , function ( doc ) {
143
+ parser ( doc , name ) ;
128
144
} ) ;
129
145
} ) ;
130
146
} ) ;
131
147
} ) ;
132
148
} ) ;
133
- } ;
149
+ } ) ;
134
150
}
135
151
136
152
function wrapper ( my ) {
@@ -140,19 +156,19 @@ function wrapper(my) {
140
156
var parser ;
141
157
if ( "function" == typeof my . parser ) parser = my . parser ; else switch ( my . parser . toLowerCase ( ) ) {
142
158
case "bson" :
143
- BSON = require ( "bson" ) , BSON = new BSON ( ) , parser = toBsonAsync ( my . documentStore ) ;
159
+ BSON = require ( "bson" ) , BSON = new BSON ( ) , parser = toBsonAsync ;
144
160
break ;
145
161
146
162
case "json" :
147
- parser = toJsonAsync ( my . documentStore ) ;
163
+ parser = toJsonAsync ;
148
164
break ;
149
165
150
166
default :
151
167
throw new Error ( "missing parser option" ) ;
152
168
}
153
- var discriminator = allCollections ( my . documentStore ) ;
154
- if ( null !== my . collections ? ( discriminator = someCollections ( my . documentStore ) ,
155
- my . numCursors && ( discriminator = someCollectionsScan ( my . documentStore ) , my . query = my . numCursors ) ) : my . numCursors && ( discriminator = allCollectionsScan ( my . documentStore ) ,
169
+ var discriminator = allCollections ;
170
+ if ( null !== my . collections ? ( discriminator = someCollections , my . numCursors && ( discriminator = someCollectionsScan ,
171
+ my . query = my . numCursors ) ) : my . numCursors && ( discriminator = allCollectionsScan ,
156
172
my . query = my . numCursors ) , null === my . logger ) logger = function ( ) { } ; else {
157
173
( logger = require ( "logger-request" ) ( {
158
174
filename : my . logger ,
@@ -170,19 +186,35 @@ function wrapper(my) {
170
186
} ) ;
171
187
}
172
188
var metadata = "" ;
173
- meta = ! 0 === my . metadata ? writeMetadata ( my . documentStore ) : function ( a , b , c ) {
189
+ meta = ! 0 === my . metadata ? writeMetadata : function ( a , b , c ) {
174
190
return c ( ) ;
175
191
} , require ( "mongodb" ) . MongoClient . connect ( my . uri , my . options , function ( err , db ) {
176
192
if ( logger ( "db open" ) , err ) return callback ( err ) ;
177
- my . documentStore . addDatabase ( db . databaseName , function ( err , name ) {
178
- function go ( ) {
179
- return discriminator ( db , name , my . query , metadata , parser , function ( err ) {
180
- if ( logger ( "db close" ) , db . close ( ) , err ) return callback ( err ) ;
181
- my . documentStore . close ( ) , callback ( null ) ;
182
- } , my . collections ) ;
183
- }
193
+ var root = null === my . tar ? my . root : my . dir ;
194
+ makeDir ( root , function ( err , name ) {
184
195
if ( err ) return callback ( err ) ;
185
- ! 1 === my . metadata ? go ( ) : my . documentStore . addCollection ( ".metadata" , go ) ;
196
+ makeDir ( name + db . databaseName + path . sep , function ( err , name ) {
197
+ function go ( ) {
198
+ return discriminator ( db , name , my . query , metadata , parser , function ( err ) {
199
+ if ( logger ( "db close" ) , db . close ( ) , err ) return callback ( err ) ;
200
+ my . tar ? makeDir ( my . root , function ( e , name ) {
201
+ err && error ( err ) ;
202
+ var dest ;
203
+ my . stream ? ( logger ( "send tar file to stream" ) , dest = my . stream ) : ( logger ( "make tar file at " + name + my . tar ) ,
204
+ dest = fs . createWriteStream ( name + my . tar ) ) ;
205
+ var packer = require ( "tar" ) . Pack ( ) . on ( "error" , callback ) . on ( "end" , function ( ) {
206
+ rmDir ( root ) , callback ( null ) ;
207
+ } ) ;
208
+ require ( "fstream" ) . Reader ( {
209
+ path : root + db . databaseName ,
210
+ type : "Directory"
211
+ } ) . on ( "error" , callback ) . pipe ( packer ) . pipe ( dest ) ;
212
+ } ) : callback ( null ) ;
213
+ } , my . collections ) ;
214
+ }
215
+ if ( err ) return callback ( err ) ;
216
+ ! 1 === my . metadata ? go ( ) : makeDir ( metadata = name + ".metadata" + path . sep , go ) ;
217
+ } ) ;
186
218
} ) ;
187
219
} ) ;
188
220
}
@@ -195,6 +227,7 @@ function backup(options) {
195
227
if ( fs . existsSync ( opt . root ) && ! fs . statSync ( opt . root ) . isDirectory ( ) ) throw new Error ( "root option is not a directory" ) ;
196
228
}
197
229
var my = {
230
+ dir : path . join ( __dirname , "dump" , path . sep ) ,
198
231
uri : String ( opt . uri ) ,
199
232
root : path . resolve ( String ( opt . root || "" ) ) + path . sep ,
200
233
stream : opt . stream || null ,
@@ -208,64 +241,9 @@ function backup(options) {
208
241
options : "object" == typeof opt . options ? opt . options : { } ,
209
242
metadata : Boolean ( opt . metadata )
210
243
} ;
211
- return my . tar && ! my . stream && ( my . stream = fs . createWriteStream ( path . join ( my . root , my . tar ) ) ) ,
212
- my . stream ? ( my . tar = ! 0 , my . documentStore = streamingDocumentStore ( my . root , my . stream ) ) : my . documentStore = fileSystemDocumentStore ( my . root ) ,
213
- wrapper ( my ) ;
244
+ return my . stream && ( my . tar = ! 0 ) , wrapper ( my ) ;
214
245
}
215
246
216
- var systemRegex = / ^ s y s t e m \. / , fs = require ( "graceful-fs" ) . gracefulify ( require ( "fs-extra" ) ) , path = require ( "path" ) , BSON , logger , meta , fileSystemDocumentStore = function ( root ) {
217
- var dbDir = root , makeDir = function ( pathname , next ) {
218
- fs . stat ( pathname , function ( err , stats ) {
219
- return err && "ENOENT" === err . code ? ( logger ( "make dir at " + pathname ) , fs . mkdirp ( pathname , function ( err ) {
220
- next ( err , pathname ) ;
221
- } ) ) : stats && ! 1 === stats . isDirectory ( ) ? ( logger ( "unlink file at " + pathname ) ,
222
- fs . unlink ( pathname , function ( err ) {
223
- if ( err ) return next ( err ) ;
224
- logger ( "make dir at " + pathname ) , fs . mkdir ( pathname , function ( err ) {
225
- next ( err , pathname ) ;
226
- } ) ;
227
- } ) ) : void next ( null , pathname ) ;
228
- } ) ;
229
- } ;
230
- return {
231
- addDatabase : function ( dbName , next ) {
232
- return dbDir = path . join ( root , dbName ) , makeDir ( dbDir , next ) ;
233
- } ,
234
- addCollection : function ( relativePath , next ) {
235
- var pathname = path . join ( dbDir , relativePath ) ;
236
- return makeDir ( pathname , next ) ;
237
- } ,
238
- store : function ( collectionName , relativePath , content , callback ) {
239
- fs . writeFile ( path . join ( dbDir , collectionName , relativePath ) , content , callback ) ;
240
- } ,
241
- close : function ( ) { }
242
- } ;
243
- } , streamingDocumentStore = function ( root , stream ) {
244
- var pack = require ( "tar-stream" ) . pack ( ) ;
245
- pack . pipe ( stream ) ;
246
- var dbDir = root ;
247
- return {
248
- addDatabase : function ( dbName , next ) {
249
- dbDir = path . join ( root , dbName ) , pack . entry ( {
250
- name : dbDir ,
251
- type : "directory"
252
- } ) , next ( ) ;
253
- } ,
254
- addCollection : function ( filename , next ) {
255
- "" !== filename && pack . entry ( {
256
- name : path . join ( dbDir , filename ) ,
257
- type : "directory"
258
- } ) , next ( ) ;
259
- } ,
260
- store : function ( collectionName , filename , content , callback ) {
261
- pack . entry ( {
262
- name : path . join ( dbDir , collectionName , filename )
263
- } , content ) , callback && callback ( ) ;
264
- } ,
265
- close : function ( ) {
266
- pack . finalize ( ) ;
267
- }
268
- } ;
269
- } ;
247
+ var systemRegex = / ^ s y s t e m \. / , fs = require ( "graceful-fs" ) , path = require ( "path" ) , BSON , logger , meta ;
270
248
271
249
module . exports = backup ;
0 commit comments