Skip to content

Commit ae4d9d3

Browse files
committed
Merge pull request #4 from ipfs/importer
WIP - Importer
2 parents 1c1a1d7 + bc1f2b7 commit ae4d9d3

File tree

61 files changed

+25802
-17
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

61 files changed

+25802
-17
lines changed

.gitignore

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,7 @@
1+
2+
.DS_Store
3+
tests/repo-tests*
4+
15
# Logs
26
logs
37
*.log

package.json

Lines changed: 11 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -5,8 +5,8 @@
55
"main": "src/index.js",
66
"scripts": {
77
"lint": "standard",
8-
"coverage": "istanbul cover --print both -- _mocha tests/test-*.js",
9-
"test": "mocha tests/test-*.js"
8+
"coverage": "istanbul cover --print both -- _mocha tests/index.js",
9+
"test": "mocha tests/index.js"
1010
},
1111
"repository": {
1212
"type": "git",
@@ -22,14 +22,23 @@
2222
},
2323
"homepage": "https://github.com/diasdavid/js-ipfs-data-importing#readme",
2424
"devDependencies": {
25+
"bs58": "^3.0.0",
2526
"chai": "^3.4.1",
27+
"fs-blob-store": "^5.2.1",
28+
"ipfs-repo": "^0.5.0",
2629
"istanbul": "^0.4.1",
2730
"mocha": "^2.3.4",
31+
"ncp": "^2.0.0",
2832
"pre-commit": "^1.1.2",
33+
"rimraf": "^2.5.1",
2934
"standard": "^5.4.1"
3035
},
3136
"dependencies": {
37+
"async": "^1.5.2",
3238
"debug": "^2.2.0",
39+
"ipfs-blocks": "^0.1.0",
40+
"ipfs-merkle-dag": "^0.2.1",
41+
"ipfs-unixfs": "^0.1.0",
3342
"through2": "^2.0.0"
3443
}
3544
}

src/chunker-fixed-size.js

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -20,9 +20,9 @@ function FixedSizeChunker (size) {
2020
}
2121

2222
function slice () {
23-
var chunk = new Buffer(size)
24-
var newBuf = new Buffer(buf.length - size)
25-
buf.copy(chunk, 0, 0, size - 1)
23+
var chunk = new Buffer(size, 'binary')
24+
var newBuf = new Buffer(buf.length - size, 'binary')
25+
buf.copy(chunk, 0, 0, size)
2626
buf.copy(newBuf, 0, size - 1, buf.length - size)
2727
buf = newBuf
2828
that.push(chunk)

src/index.js

Lines changed: 157 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -1,27 +1,173 @@
1-
var debug = require('debug')
2-
var log = debug('importer')
1+
const debug = require('debug')
2+
const log = debug('importer')
33
log.err = debug('importer:error')
4-
var fs = require('fs')
4+
const fs = require('fs')
5+
const mDAG = require('ipfs-merkle-dag')
6+
const FixedSizeChunker = require('./chunker-fixed-size')
7+
const through2 = require('through2')
8+
const UnixFS = require('ipfs-unixfs')
9+
const async = require('async')
510

611
exports = module.exports
712

13+
const CHUNK_SIZE = 262144
14+
815
// Use a layout + chunkers to convert a directory (or file) to the layout format
9-
exports.import = function (options, callback) {
10-
// options.store -> where to write stuff (typically js-ipfs-repo.datastore, which impls blob-store)
11-
// options.path -> where to
16+
exports.import = (options, callback) => {
17+
// options.path -> what to import
1218
// options.recursive -> follow dirs
1319
// options.chunkers -> obj with chunkers to each type of data, { default: dumb-chunker }
20+
// options.dag-service-> instance of block service
21+
const dagService = options.dagService
1422

15-
var pathStats = fs.statSync(options.path)
16-
if (pathStats.isFile()) {
23+
const stats = fs.statSync(options.path)
24+
if (stats.isFile()) {
25+
fileImporter(options.path, callback)
26+
} else if (stats.isDirectory() && options.recursive) {
27+
dirImporter(options.path, callback)
28+
} else {
29+
return callback(new Error('recursive must be true to add a directory'))
30+
}
1731

32+
function fileImporter (path, callback) {
33+
const stats = fs.statSync(path)
34+
if (stats.size > CHUNK_SIZE) {
35+
const links = [] // { Hash: , Size: , Name: }
1836

19-
} else if (pathStats.isDir() && options.recursive) {
37+
fs.createReadStream(path)
38+
.pipe(new FixedSizeChunker(CHUNK_SIZE))
39+
.pipe(through2((chunk, enc, cb) => {
40+
// TODO: check if this is right (I believe it should be type 'raw'
41+
// https://github.com/ipfs/go-ipfs/issues/2331
42+
const raw = new UnixFS('file', chunk)
2043

21-
} else {
22-
return callback(new Error('recursive must be true to add a directory'))
44+
const node = new mDAG.DAGNode(raw.marshal())
45+
46+
dagService.add(node, function (err) {
47+
if (err) {
48+
return log.err(err)
49+
}
50+
links.push({
51+
Hash: node.multihash(),
52+
Size: node.size(),
53+
leafSize: raw.fileSize(),
54+
Name: ''
55+
})
56+
57+
cb()
58+
})
59+
}, (cb) => {
60+
const file = new UnixFS('file')
61+
const parentNode = new mDAG.DAGNode()
62+
links.forEach((l) => {
63+
file.addBlockSize(l.leafSize)
64+
const link = new mDAG.DAGLink(l.Name, l.Size, l.Hash)
65+
parentNode.addRawLink(link)
66+
})
67+
68+
parentNode.data = file.marshal()
69+
dagService.add(parentNode, (err) => {
70+
if (err) {
71+
return log.err(err)
72+
}
73+
74+
const pathSplit = path.split('/')
75+
const fileName = pathSplit[pathSplit.length - 1]
76+
77+
callback(null, {
78+
Hash: parentNode.multihash(),
79+
Size: parentNode.size(),
80+
Name: fileName
81+
}) && cb()
82+
})
83+
}))
84+
} else {
85+
// create just one file node with the data directly
86+
const fileUnixFS = new UnixFS('file', fs.readFileSync(path))
87+
const fileNode = new mDAG.DAGNode(fileUnixFS.marshal())
88+
89+
dagService.add(fileNode, (err) => {
90+
if (err) {
91+
return log.err(err)
92+
}
93+
94+
const split = path.split('/')
95+
const fileName = split[split.length - 1]
96+
97+
callback(null, {
98+
Hash: fileNode.multihash(),
99+
Size: fileNode.size(),
100+
Name: fileName
101+
})
102+
})
103+
}
104+
}
105+
106+
function dirImporter (path, callback) {
107+
const files = fs.readdirSync(path)
108+
const dirUnixFS = new UnixFS('directory')
109+
const dirNode = new mDAG.DAGNode()
110+
111+
if (files.length === 0) {
112+
dirNode.data = dirUnixFS.marshal()
113+
dagService.add(dirNode, (err) => {
114+
if (err) {
115+
return callback(err)
116+
}
117+
118+
const split = path.split('/')
119+
const dirName = split[split.length - 1]
120+
121+
callback(null, {
122+
Hash: dirNode.multihash(),
123+
Size: dirNode.size(),
124+
Name: dirName
125+
})
126+
})
127+
return
128+
}
129+
130+
async.map(
131+
files,
132+
(file, cb) => {
133+
const filePath = path + '/' + file
134+
const stats = fs.statSync(filePath)
135+
if (stats.isFile()) {
136+
return fileImporter(filePath, cb)
137+
} if (stats.isDirectory()) {
138+
return dirImporter(filePath, cb)
139+
} else {
140+
return callback(new Error('Found a weird file' + path + file))
141+
}
142+
},
143+
(err, results) => {
144+
if (err) {
145+
return callback(err)
146+
}
147+
results.forEach((result) => {
148+
dirNode.addRawLink(new mDAG.DAGLink(result.Name, result.Size, result.Hash))
149+
dirNode.data = dirUnixFS.marshal()
150+
dagService.add(dirNode, (err) => {
151+
if (err) {
152+
return callback(err)
153+
}
154+
155+
const split = path.split('/')
156+
const dirName = split[split.length - 1]
157+
158+
callback(null, {
159+
Hash: dirNode.multihash(),
160+
Size: dirNode.size(),
161+
Name: dirName
162+
})
163+
})
164+
})
165+
})
23166
}
167+
// function bufferImporter (path) {}
168+
// function streamImporter (path) {}
24169
}
25170

26171
exports.export = function () {
172+
// export into files by hash
27173
}

src/layout-merkledag.js

Lines changed: 0 additions & 1 deletion
This file was deleted.

tests/index.js

Lines changed: 43 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,43 @@
1+
/* globals describe, before, after */
2+
3+
'use strict'
4+
5+
const fs = require('fs')
6+
const ncp = require('ncp').ncp
7+
const rimraf = require('rimraf')
8+
const expect = require('chai').expect
9+
10+
describe('core', () => {
11+
const repoExample = process.cwd() + '/tests/repo-example'
12+
const repoTests = process.cwd() + '/tests/repo-tests' + Date.now()
13+
14+
before(done => {
15+
ncp(repoExample, repoTests, err => {
16+
process.env.IPFS_PATH = repoTests
17+
expect(err).to.equal(null)
18+
done()
19+
})
20+
})
21+
22+
after(done => {
23+
rimraf(repoTests, err => {
24+
expect(err).to.equal(null)
25+
done()
26+
})
27+
})
28+
29+
const tests = fs.readdirSync(__dirname)
30+
tests.filter(file => {
31+
if (file === 'index.js' ||
32+
file === 'browser.js' ||
33+
file === 'test-data' ||
34+
file === 'repo-example' ||
35+
file.indexOf('repo-tests') > -1) {
36+
return false
37+
} else {
38+
return true
39+
}
40+
}).forEach(file => {
41+
require('./' + file)
42+
})
43+
})
Lines changed: 28 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,28 @@
1+
2+
��Hello and Welcome to IPFS!
3+
4+
██╗██████╗ ███████╗███████╗
5+
██║██╔══██╗██╔════╝██╔════╝
6+
██║██████╔╝█████╗ ███████╗
7+
██║██╔═══╝ ██╔══╝ ╚════██║
8+
██║██║ ██║ ███████║
9+
╚═╝╚═╝ ╚═╝ ╚══════╝
10+
11+
If you're seeing this, you have successfully installed
12+
IPFS and are now interfacing with the ipfs merkledag!
13+
14+
-------------------------------------------------------
15+
| Warning: |
16+
| This is alpha software. Use at your own discretion! |
17+
| Much is missing or lacking polish. There are bugs. |
18+
| Not yet secure. Read the security notes for more. |
19+
-------------------------------------------------------
20+
21+
Check out some of the other files in this directory:
22+
23+
./about
24+
./help
25+
./quick-start <-- usage examples
26+
./readme <-- this file
27+
./security-notes
28+
�
Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,4 @@
1+
5
2+
" ���׾F�_�uؔ�l��z�S?��|ڲ��Pc@ js-ipfs-repo�
3+
4+


0 commit comments

Comments
 (0)