diff --git a/README.md b/README.md index e45ea1b..07282bb 100644 --- a/README.md +++ b/README.md @@ -27,31 +27,30 @@ npm install ipfs-multipart ## Usage ```javascript const http = require('http') -const IPFSMultipart = require('ipfs-multipart') +const parser = require('ipfs-multipart') -http.createServer((req, res) => { +http.createServer(async (req, res) => { if (req.method === 'POST' && req.headers['content-type']) { - const parser = IPFSMultipart.reqParser(req) - parser.on('file', (fileName, fileStream) => { - console.log(`file ${fileName} start`) + for await (const entry of parser(req)) { + if (entry.type === 'directory') { + console.log(`dir ${entry.name} start`) + } - fileStream.on('data', (data) => { - console.log(`file ${fileName} contents:`, data.toString()) - }) + if (entry.type === 'file') { + console.log(`file ${entry.name} start`) - fileStream.on('end', (data) => { - console.log(`file ${fileName} end`) - }) - }) + for await (const data of entry.content) { + console.log(`file ${entry.name} contents:`, data.toString()) + } - parser.on('end', () => { - console.log('finished parsing') - res.writeHead(200) - res.end() - }) + console.log(`file ${entry.name} end`) + } + } - return + console.log('finished parsing') + res.writeHead(200) + res.end() } res.writeHead(404) diff --git a/example.js b/example.js index 49c9e6c..7335341 100644 --- a/example.js +++ b/example.js @@ -3,31 +3,23 @@ /* eslint-disable no-console */ const http = require('http') -const IPFSMultipart = require('.') +const multipart = require('ipfs-multipart') -http.createServer((req, res) => { +http.createServer(async (req, res) => { if (req.method === 'POST' && req.headers['content-type']) { - const parser = IPFSMultipart.reqParser(req) - - parser.on('file', (fileName, fileStream) => { - console.log(`file ${fileName} start`) - - fileStream.on('data', (data) => { - console.log(`file ${fileName} contents:`, data.toString()) - }) - - fileStream.on('end', (data) => { - console.log(`file ${fileName} end`) - }) - }) - - parser.on('end', () => { - console.log('finished parsing') - res.writeHead(200) - res.end() - }) - - return + for await (const part of multipart(req)) { + console.log(`file ${part.name} start`) + + if (part.type === 'file') { + for await (const chunk of part.content) { + console.log(`file ${part.name} contents:`, chunk.toString()) + } + } + } + + console.log('finished parsing') + res.writeHead(200) + res.end() } res.writeHead(404) diff --git a/package.json b/package.json index 47c604b..279dde9 100644 --- a/package.json +++ b/package.json @@ -27,12 +27,12 @@ }, "dependencies": { "@hapi/content": "^4.1.0", - "dicer": "~0.3.0" + "it-multipart": "~0.0.2" }, "devDependencies": { "aegir": "^20.0.0", "chai": "^4.2.0", - "ipfs-api": "github:ipfs/js-ipfs-api#1fd9749", + "ipfs-http-client": "^33.1.1", "request": "^2.88.0" }, "engines": { diff --git a/src/index.js b/src/index.js index f5932d4..3a2620c 100644 --- a/src/index.js +++ b/src/index.js @@ -1,20 +1,17 @@ 'use strict' const content = require('@hapi/content') -const Parser = require('./parser') +const parser = require('./parser') -module.exports = { - Parser, - /** - * Request Parser - * - * @param {Object} req - Request - * @returns {Parser} - */ - reqParser: (req) => { - const boundary = content.type(req.headers['content-type']).boundary - const parser = new Parser({ boundary: boundary }) - req.pipe(parser) - return parser - } +/** + * Request Parser + * + * @param {Object} req - Request + * @param {Object} options - Options passed to stream constructors + * @returns {Object} an async iterable + */ +module.exports = (req, options = {}) => { + options.boundary = content.type(req.headers['content-type']).boundary + + return parser(req.payload || req, options) } diff --git a/src/parser.js b/src/parser.js index 8586eb2..86ea02b 100644 --- a/src/parser.js +++ b/src/parser.js @@ -1,10 +1,7 @@ 'use strict' -const Dicer = require('dicer') const Content = require('@hapi/content') -const stream = require('stream') -const util = require('util') -const Transform = stream.Transform +const multipart = require('it-multipart') const multipartFormdataType = 'multipart/form-data' const applicationDirectory = 'application/x-directory' @@ -25,79 +22,79 @@ const parseDisposition = (disposition) => { } const parseHeader = (header) => { - const type = Content.type(header['content-type'][0]) - const disposition = parseDisposition(header['content-disposition'][0]) + const type = Content.type(header['content-type']) + const disposition = parseDisposition(header['content-disposition']) const details = type - details.name = disposition.name + details.name = decodeURIComponent(disposition.name) details.type = disposition.type return details } -/** - * Parser - * - * @constructor - * @param {Object} options - * @returns {Parser} - */ -function Parser (options) { - // allow use without new - if (!(this instanceof Parser)) { - return new Parser(options) - } - - this.dicer = new Dicer({ boundary: options.boundary }) - - this.dicer.on('part', (part) => this.handlePart(part)) - - this.dicer.on('error', (err) => this.emit('err', err)) +const collect = async (stream) => { + const buffers = [] + let size = 0 - this.dicer.on('finish', () => { - this.emit('finish') - this.emit('end') - }) + for await (const buf of stream) { + size += buf.length + buffers.push(buf) + } - Transform.call(this, options) + return Buffer.concat(buffers, size) } -util.inherits(Parser, Transform) -Parser.prototype._transform = function (chunk, enc, cb) { - this.dicer.write(chunk, enc) - cb() -} +const ignore = async (stream) => { + for await (const _ of stream) { // eslint-disable-line no-unused-vars -Parser.prototype._flush = function (cb) { - this.dicer.end() - cb() + } } -Parser.prototype.handlePart = function (part) { - part.on('header', (header) => { - const partHeader = parseHeader(header) +async function * parser (stream, options) { + for await (const part of multipart(stream, options.boundary)) { + const partHeader = parseHeader(part.headers) if (isDirectory(partHeader.mime)) { - part.on('data', () => false) - this.emit('directory', partHeader.name) - return + yield { + type: 'directory', + name: partHeader.name + } + + await ignore(part.body) + + continue } if (partHeader.mime === applicationSymlink) { - part.on('data', (target) => this.emit('symlink', partHeader.name, target.toString())) - return + const target = await collect(part.body) + + yield { + type: 'symlink', + name: partHeader.name, + target: target.toString('utf8') + } + + continue } if (partHeader.boundary) { // recursively parse nested multiparts - const parser = new Parser({ boundary: partHeader.boundary }) - parser.on('file', (file) => this.emit('file', file)) - part.pipe(parser) - return + for await (const entry of parser(part, { + ...options, + boundary: partHeader.boundary + })) { + yield entry + } + + continue } - this.emit('file', partHeader.name, part) - }) + yield { + type: 'file', + name: partHeader.name, + content: part.body + } + } } -module.exports = Parser +module.exports = parser diff --git a/test/node.js b/test/node.js deleted file mode 100644 index 7f91264..0000000 --- a/test/node.js +++ /dev/null @@ -1,14 +0,0 @@ -'use strict' - -/* eslint-env mocha */ - -const fs = require('fs') - -describe('multipart', () => { - var tests = fs.readdirSync(__dirname) - tests - .filter((file) => file !== 'fixtures' && file !== 'index.js') - .forEach((file) => { - require('./' + file) - }) -}) diff --git a/test/parser.js b/test/parser.js deleted file mode 100644 index 7be711f..0000000 --- a/test/parser.js +++ /dev/null @@ -1,210 +0,0 @@ -'use strict' - -/* eslint-env mocha */ -/* eslint-disable no-unused-expressions */ - -const expect = require('chai').expect -const APIctl = require('ipfs-api') -const http = require('http') -const path = require('path') -const fs = require('fs') -const request = require('request') -const IPFSMultipart = require('..') -const os = require('os') - -const isWindows = os.platform() === 'win32' - -describe('parser', () => { - const PORT = 6001 - - let ctl - let handler = (req, cb) => cb() - - before((done) => { - http.createServer((req, res) => { - if (req.method === 'POST' && req.headers['content-type']) { - return handler(req, () => { - res.writeHead(200) - res.end() - }) - } - - res.writeHead(404) - res.end() - }).listen(PORT, () => { - ctl = APIctl(`/ip4/127.0.0.1/tcp/${PORT}`) - done() - }) - }) - - describe('single file', () => { - const filePath = path.resolve(__dirname, 'fixtures/config') - const fileContent = fs.readFileSync(filePath, 'utf8') - - before(() => { - handler = (req, cb) => { - expect(req.headers['content-type']).to.be.a('string') - const parser = IPFSMultipart.reqParser(req) - - const files = [] - - parser.on('file', (fileName, fileStream) => { - const file = { fileName: fileName, content: '' } - fileStream.on('data', (data) => { - file.content = data.toString() - }) - fileStream.on('end', (data) => { - files.push(file) - }) - }) - - parser.on('end', () => { - expect(files.length).to.equal(1) - expect(files[0].fileName).to.contain('config') - expect(files[0].content).to.equal(fileContent) - cb() - }) - } - }) - - it('parses ctl.config.replace correctly', (done) => { - ctl.config.replace(filePath, (err, res) => { - expect(err).not.to.exist - done() - }) - }) - - it('parses regular multipart requests correctly', (done) => { - const formData = { - file: fs.createReadStream(filePath) - } - - request.post({ url: `http://localhost:${PORT}`, formData: formData }, (err, httpResponse, body) => { - expect(err).not.to.exist - done() - }) - }) - }) - - describe('directory', () => { - const dirPath = path.resolve(__dirname, 'fixtures') - - let files = [] - let symlinks = [] - - before(() => { - handler = (req, cb) => { - expect(req.headers['content-type']).to.be.a('string') - const parser = IPFSMultipart.reqParser(req) - - parser.on('file', (fileName, fileStream) => { - const file = { fileName: fileName, content: '' } - fileStream.on('data', (data) => { - file.content = data.toString() - }) - fileStream.on('end', (data) => { - files.push(file) - }) - }) - - parser.on('symlink', (fileName, target) => { - symlinks.push({ fileName: fileName, target: target }) - }) - - parser.on('end', cb) - } - }) - - beforeEach(() => { - files = [] - symlinks = [] - }) - - it('parses ctl.add correctly', (done) => { - ctl.add(dirPath, { recursive: true, followSymlinks: false }, (err, res) => { - expect(err).to.not.exist - - if (isWindows) { - return done() - } - - expect(files.length).to.equal(3) - expect(files[0].fileName).to.equal('fixtures/config') - expect(files[1].fileName).to.equal('fixtures/otherfile') - expect(files[2].fileName).to.equal('fixtures/subfolder/deepfile') - - expect(symlinks.length).to.equal(2) - expect(symlinks[0].fileName).to.equal('fixtures/folderlink') - expect(symlinks[1].fileName).to.equal('fixtures/link') - expect(symlinks[0].target).to.equal('subfolder') - expect(symlinks[1].target).to.equal('subfolder/deepfile') - - done() - }) - }) - - it('parses ctl.add following symlinks correctly', (done) => { - ctl.add(dirPath, { recursive: true }, (err, res) => { - expect(err).to.not.exist - - expect(files.length).to.equal(5) - expect(symlinks.length).to.equal(0) - - done() - }) - }) - }) - - describe('empty', () => { - before(() => { - handler = (req, cb) => { - expect(req.headers['content-type']).to.be.a('string') - const parser = IPFSMultipart.reqParser(req) - - parser.on('end', cb) - } - }) - - it('does not block', (done) => { - request.post({ url: `http://localhost:${PORT}` }, (err, httpResponse, body) => { - expect(err).not.to.exist - done() - }) - }) - }) - - describe('buffer', () => { - const files = [] - - before(() => { - handler = (req, cb) => { - expect(req.headers['content-type']).to.be.a('string') - const parser = IPFSMultipart.reqParser(req) - - parser.on('file', (fileName, fileStream) => { - const file = { fileName: fileName, content: '' } - fileStream.on('data', (data) => { - file.content = data.toString() - }) - fileStream.on('end', (data) => { - files.push(file) - }) - }) - - parser.on('end', cb) - } - }) - - it('parses ctl.add buffer correctly', (done) => { - ctl.add(Buffer.from('hello world'), (err, res) => { - expect(err).to.not.exist - - expect(files.length).to.equal(1) - expect(files[0].fileName).to.equal('') - expect(files[0].content).to.equal('hello world') - - done() - }) - }) - }) -}) diff --git a/test/parser.spec.js b/test/parser.spec.js new file mode 100644 index 0000000..2214306 --- /dev/null +++ b/test/parser.spec.js @@ -0,0 +1,209 @@ +'use strict' + +/* eslint-env mocha */ +/* eslint-disable no-unused-expressions */ + +const expect = require('chai').expect +const APIctl = require('ipfs-http-client') +const http = require('http') +const path = require('path') +const fs = require('fs') +const request = require('request') +const parser = require('../src') +const os = require('os') + +const isWindows = os.platform() === 'win32' + +const readDir = (path, prefix, output = []) => { + const entries = fs.readdirSync(path) + + entries.forEach(entry => { + // resolves symlinks + const entryPath = fs.realpathSync(`${path}/${entry}`) + const type = fs.statSync(entryPath) + + if (type.isDirectory()) { + readDir(entryPath, `${prefix}/${entry}`, output) + } + + if (type.isFile()) { + output.push({ + path: `${prefix}/${entry}`, + content: fs.createReadStream(entryPath) + }) + } + }) + + output.push({ + path: prefix + }) + + return output +} + +describe('parser', () => { + const PORT = 6001 + + let ctl + let handler = () => {} + + before((done) => { + http.createServer((req, res) => { + if (req.method === 'POST' && req.headers['content-type']) { + handler(req) + .then(() => { + res.writeHead(200) + }) + .catch(() => { + res.writeHead(500) + }) + .then(() => { + res.end() + }) + + return + } + + res.writeHead(404) + res.end() + }).listen(PORT, () => { + ctl = APIctl(`/ip4/127.0.0.1/tcp/${PORT}`) + done() + }) + }) + + describe('single file', () => { + const filePath = path.resolve(__dirname, 'fixtures/config') + const fileContent = fs.readFileSync(filePath, 'utf8') + + before(() => { + handler = async (req) => { + expect(req.headers['content-type']).to.be.a('string') + + const files = [] + + for await (const entry of parser(req)) { + if (entry.type === 'file') { + const file = { name: entry.name, content: '' } + + for await (const data of entry.content) { + file.content += data.toString() + } + + files.push(file) + } + } + + expect(files.length).to.equal(1) + expect(files[0].name).to.equal('config') + expect(files[0].content).to.equal(fileContent) + } + }) + + it('parses ctl.config.replace correctly', async () => { + await ctl.config.replace(filePath) + }) + + it('parses regular multipart requests correctly', (done) => { + const formData = { + file: fs.createReadStream(filePath) + } + + request.post({ url: `http://localhost:${PORT}`, formData: formData }, (err) => done(err)) + }) + }) + + describe('directory', () => { + const dirPath = path.resolve(__dirname, 'fixtures') + + let files = [] + + before(() => { + handler = async (req) => { + expect(req.headers['content-type']).to.be.a('string') + + for await (const entry of parser(req)) { + if (entry.type === 'file') { + const file = { name: entry.name, content: '' } + + for await (const data of entry.content) { + file.content += data.toString() + } + + files.push(file) + } + } + } + }) + + beforeEach(() => { + files = [] + }) + + it('parses ctl.add correctly', async () => { + const contents = readDir(dirPath, 'fixtures') + + await ctl.add(contents, { recursive: true, followSymlinks: false }) + + if (isWindows) { + return + } + + expect(files.length).to.equal(5) + expect(files[0].name).to.equal('fixtures/config') + expect(files[1].name).to.equal('fixtures/folderlink/deepfile') + expect(files[2].name).to.equal('fixtures/link') + expect(files[3].name).to.equal('fixtures/otherfile') + expect(files[4].name).to.equal('fixtures/subfolder/deepfile') + }) + }) + + describe('empty', () => { + before(() => { + handler = async (req) => { + expect(req.headers['content-type']).to.be.a('string') + + for await (const _ of parser(req)) { // eslint-disable-line no-unused-vars + + } + } + }) + + it('does not block', (done) => { + request.post({ url: `http://localhost:${PORT}` }, (err, httpResponse, body) => { + expect(err).not.to.exist + done() + }) + }) + }) + + describe('buffer', () => { + const files = [] + + before(() => { + handler = async (req) => { + expect(req.headers['content-type']).to.be.a('string') + + for await (const entry of parser(req)) { + if (entry.type === 'file') { + const file = { name: entry.name, content: '' } + + for await (const data of entry.content) { + file.content += data.toString() + } + + files.push(file) + } + } + } + }) + + it('parses ctl.add buffer correctly', async () => { + await ctl.add(Buffer.from('hello world')) + + expect(files.length).to.equal(1) + expect(files[0].name).to.equal('') + expect(files[0].content).to.equal('hello world') + }) + }) +})