diff --git a/package.json b/package.json index 89fc756..2413edd 100644 --- a/package.json +++ b/package.json @@ -5,12 +5,15 @@ "leadMaintainer": "Alex Potsides ", "main": "src/index.js", "browser": { - "fs": false, - "@hapi/joi": "joi-browser" + "@hapi/joi": "joi-browser", + "fs": false }, "scripts": { "test": "aegir test", "test:node": "aegir test -t node", + "test:cli": "aegir test -t node -f test/cli/**/*.js", + "test:core": "aegir test -t node -f test/core/**/*.js", + "test:http": "aegir test -t node -f test/http/**/*.js", "test:browser": "aegir test -t browser", "test:webworker": "aegir test -t webworker", "build": "aegir build", @@ -18,7 +21,7 @@ "release": "aegir release", "release-minor": "aegir release --type minor", "release-major": "aegir release --type major", - "coverage": "aegir coverage", + "coverage": "nyc --reporter=text --reporter=lcov npm run test:node", "dep-check": "aegir dep-check" }, "repository": { @@ -38,36 +41,46 @@ }, "homepage": "https://github.com/ipfs/js-ipfs-mfs#readme", "devDependencies": { + "@hapi/hapi": "^18.4.0", "aegir": "^20.0.0", - "async-iterator-all": "^1.0.0", "chai": "^4.2.0", + "chai-as-promised": "^7.1.1", + "delay": "^4.3.0", "detect-node": "^2.0.4", "detect-webworker": "^1.0.0", "dirty-chai": "^2.0.1", + "form-data": "^3.0.0", "ipfs-block-service": "~0.16.0", - "ipfs-repo": "~0.27.0", + "ipfs-repo": "^0.30.1", "ipld": "~0.25.0", - "memdown": "^4.0.0", - "temp-write": "^4.0.0" + "it-all": "^1.0.1", + "memdown": "^5.1.0", + "nyc": "^15.0.0", + "sinon": "^8.0.4", + "stream-to-promise": "^2.2.0", + "temp-write": "^4.0.0", + "yargs": "^15.0.2", + "yargs-promise": "^1.1.0" }, "dependencies": { "@hapi/boom": "^7.4.2", "@hapi/joi": "^15.1.0", - "async-iterator-last": "^1.0.0", - "cids": "~0.7.1", + "cids": "^0.7.1", "debug": "^4.1.0", "err-code": "^2.0.0", - "hamt-sharding": "~0.0.2", - "interface-datastore": "~0.7.0", - "ipfs-multipart": "~0.2.0", - "ipfs-unixfs": "~0.1.16", - "ipfs-unixfs-exporter": "~0.38.0", - "ipfs-unixfs-importer": "~0.40.0", - "ipld-dag-pb": "~0.18.0", + "hamt-sharding": "^1.0.0", + "interface-datastore": "^0.8.0", + "ipfs-multipart": "^0.3.0", + "ipfs-unixfs": "^0.3.0", + "ipfs-unixfs-exporter": "^0.40.0", + "ipfs-unixfs-importer": "^0.43.0", + "ipfs-utils": "^0.4.2", + "ipld-dag-pb": "^0.18.0", + "it-last": "^1.0.1", "joi-browser": "^13.4.0", "mortice": "^2.0.0", - "multicodec": "~0.5.3", - "multihashes": "~0.4.14", + "multicodec": "^1.0.0", + "multihashes": "^0.4.14", "once": "^1.4.0", "pull-stream": "^3.6.9" }, diff --git a/src/cli/chmod.js b/src/cli/chmod.js new file mode 100644 index 0000000..d8bd0b4 --- /dev/null +++ b/src/cli/chmod.js @@ -0,0 +1,80 @@ +'use strict' + +const { + asBoolean, + asOctal +} = require('./utils') + +module.exports = { + command: 'chmod [mode] [path]', + + describe: 'Change file modes', + + builder: { + path: { + type: 'string', + describe: 'The MFS path to change the mode of' + }, + mode: { + type: 'int', + coerce: asOctal, + describe: 'The mode to use' + }, + recursive: { + alias: 'r', + type: 'boolean', + default: false, + coerce: asBoolean, + describe: 'Whether to change modes recursively' + }, + codec: { + alias: 'c', + type: 'string', + default: 'dag-pb', + describe: 'If intermediate directories are created, use this codec to create them (experimental)' + }, + 'hash-alg': { + alias: 'h', + type: 'string', + default: 'sha2-256', + describe: 'Hash function to use. Will set CID version to 1 if used' + }, + flush: { + alias: 'f', + type: 'boolean', + default: true, + coerce: asBoolean, + describe: 'Flush the changes to disk immediately' + }, + 'shard-split-threshold': { + type: 'number', + default: 1000, + describe: 'If a directory has more links than this, it will be transformed into a hamt-sharded-directory' + } + }, + + handler (argv) { + const { + path, + mode, + getIpfs, + recursive, + codec, + hashAlg, + flush, + shardSplitThreshold + } = argv + + argv.resolve((async () => { + const ipfs = await getIpfs() + + return ipfs.files.chmod(path, mode, { + recursive, + format: codec, + hashAlg, + flush, + shardSplitThreshold + }) + })()) + } +} diff --git a/src/cli/cp.js b/src/cli/cp.js index 473fb33..3be124b 100644 --- a/src/cli/cp.js +++ b/src/cli/cp.js @@ -17,11 +17,11 @@ module.exports = { coerce: asBoolean, describe: 'Create any non-existent intermediate directories' }, - format: { - alias: 'h', + codec: { + alias: 'c', type: 'string', default: 'dag-pb', - describe: 'If intermediate directories are created, use this format to create them (experimental)' + describe: 'If intermediate directories are created, use this codec to create them (experimental)' }, 'hash-alg': { alias: 'h', @@ -29,6 +29,13 @@ module.exports = { default: 'sha2-256', describe: 'Hash function to use. Will set CID version to 1 if used' }, + flush: { + alias: 'f', + type: 'boolean', + default: true, + coerce: asBoolean, + describe: 'Flush the changes to disk immediately' + }, 'shard-split-threshold': { type: 'number', default: 1000, @@ -42,7 +49,8 @@ module.exports = { dest, getIpfs, parents, - format, + codec, + flush, hashAlg, shardSplitThreshold } = argv @@ -51,7 +59,8 @@ module.exports = { const ipfs = await getIpfs() return ipfs.files.cp(source, dest, { parents, - format, + format: codec, + flush, hashAlg, shardSplitThreshold }) diff --git a/src/cli/index.js b/src/cli/index.js index 1c83aba..ae87d7a 100644 --- a/src/cli/index.js +++ b/src/cli/index.js @@ -1,9 +1,5 @@ 'use strict' -const { - print -} = require('./utils') - const command = { command: 'files ', @@ -14,7 +10,7 @@ const command = { }, handler (argv) { - print('Type `jsipfs files --help` for more instructions') + argv.print('Type `jsipfs files --help` for more instructions') } } diff --git a/src/cli/ls.js b/src/cli/ls.js index 236fd89..c66cc77 100644 --- a/src/cli/ls.js +++ b/src/cli/ls.js @@ -4,12 +4,13 @@ const pull = require('pull-stream/pull') const onEnd = require('pull-stream/sinks/on-end') const through = require('pull-stream/throughs/through') const { - print, asBoolean } = require('./utils') const { FILE_SEPARATOR } = require('../core/utils/constants') +const formatMode = require('ipfs-utils/src/files/format-mode') +const formatMtime = require('ipfs-utils/src/files/format-mtime') module.exports = { command: 'ls [path]', @@ -43,18 +44,15 @@ module.exports = { getIpfs, long, sort, - cidBase + cidBase, + print } = argv argv.resolve((async () => { const ipfs = await getIpfs() return new Promise((resolve, reject) => { if (sort) { - ipfs.files.ls(path || FILE_SEPARATOR, { - long, - sort, - cidBase - }) + ipfs.files.ls(path || FILE_SEPARATOR) .then(files => { // https://github.com/ipfs/go-ipfs/issues/5181 if (sort) { @@ -64,8 +62,8 @@ module.exports = { } if (long) { - files.forEach(link => { - print(`${link.name}\t${link.hash}\t${link.size}`) + files.forEach(file => { + print(`${formatMode(file.mode, file.type === 1)}\t${formatMtime(file.mtime)}\t${file.name}\t${file.hash}\t${file.size}`) }) } else { files.forEach(link => print(link.name)) @@ -85,7 +83,7 @@ module.exports = { }), through(file => { if (long) { - print(`${file.name}\t${file.hash}\t${file.size}`) + print(`${formatMode(file.mode, file.type === 1)}\t${formatMtime(file.mtime)}\t${file.name}\t${file.hash}\t${file.size}`) } else { print(file.name) } diff --git a/src/cli/mkdir.js b/src/cli/mkdir.js index 4e70fe9..fcdfe09 100644 --- a/src/cli/mkdir.js +++ b/src/cli/mkdir.js @@ -1,7 +1,9 @@ 'use strict' const { - asBoolean + asBoolean, + asOctal, + asDateFromSeconds } = require('./utils') module.exports = { @@ -23,9 +25,17 @@ module.exports = { default: 0, describe: 'Cid version to use. (experimental).' }, + codec: { + alias: 'c', + type: 'string', + default: 'dag-pb', + describe: 'If intermediate directories are created, use this codec to create them (experimental)' + }, 'hash-alg': { + alias: 'h', type: 'string', - describe: 'Hash function to use. Will set Cid version to 1 if used. (experimental).' + default: 'sha2-256', + describe: 'Hash function to use. Will set CID version to 1 if used' }, flush: { alias: 'f', @@ -38,6 +48,16 @@ module.exports = { type: 'number', default: 1000, describe: 'If a directory has more links than this, it will be transformed into a hamt-sharded-directory' + }, + mode: { + type: 'number', + coerce: asOctal, + describe: 'Mode to apply to the new directory' + }, + mtime: { + type: 'date', + coerce: asDateFromSeconds, + describe: 'Mtime to apply to the new directory in seconds' } }, @@ -47,9 +67,12 @@ module.exports = { getIpfs, parents, cidVersion, + codec, hashAlg, flush, - shardSplitThreshold + shardSplitThreshold, + mode, + mtime } = argv argv.resolve((async () => { @@ -58,9 +81,12 @@ module.exports = { return ipfs.files.mkdir(path, { parents, cidVersion, + format: codec, hashAlg, flush, - shardSplitThreshold + shardSplitThreshold, + mode, + mtime }) })()) } diff --git a/src/cli/mv.js b/src/cli/mv.js index d1d391c..764d19a 100644 --- a/src/cli/mv.js +++ b/src/cli/mv.js @@ -24,6 +24,31 @@ module.exports = { coerce: asBoolean, describe: 'Remove directories recursively' }, + 'cid-version': { + alias: ['cid-ver'], + type: 'number', + default: 0, + describe: 'Cid version to use. (experimental).' + }, + codec: { + alias: 'c', + type: 'string', + default: 'dag-pb', + describe: 'If intermediate directories are created, use this codec to create them (experimental)' + }, + 'hash-alg': { + alias: 'h', + type: 'string', + default: 'sha2-256', + describe: 'Hash function to use. Will set CID version to 1 if used' + }, + flush: { + alias: 'f', + type: 'boolean', + default: true, + coerce: asBoolean, + describe: 'Flush the changes to disk immediately' + }, 'shard-split-threshold': { type: 'number', default: 1000, @@ -38,6 +63,10 @@ module.exports = { getIpfs, parents, recursive, + cidVersion, + codec, + hashAlg, + flush, shardSplitThreshold } = argv @@ -47,6 +76,10 @@ module.exports = { return ipfs.files.mv(source, dest, { parents, recursive, + cidVersion, + format: codec, + hashAlg, + flush, shardSplitThreshold }) })()) diff --git a/src/cli/read.js b/src/cli/read.js index 02ae3f6..a0cf1b3 100644 --- a/src/cli/read.js +++ b/src/cli/read.js @@ -3,9 +3,6 @@ const pull = require('pull-stream/pull') const through = require('pull-stream/throughs/through') const onEnd = require('pull-stream/sinks/on-end') -const { - print -} = require('./utils') module.exports = { command: 'read ', @@ -29,6 +26,7 @@ module.exports = { const { path, getIpfs, + print, offset, length } = argv diff --git a/src/cli/stat.js b/src/cli/stat.js index 5807f0a..c2c6bd3 100644 --- a/src/cli/stat.js +++ b/src/cli/stat.js @@ -1,8 +1,7 @@ 'use strict' const { - asBoolean, - print + asBoolean } = require('./utils') module.exports = { @@ -18,8 +17,10 @@ module.exports = { Size: CumulativeSize: ChildBlocks: -Type: `, - describe: 'Print statistics in given format. Allowed tokens: . Conflicts with other format options.' +Type: +Mode: +Mtime: `, + describe: 'Print statistics in given format. Allowed tokens: . Conflicts with other format options.' }, hash: { alias: 'h', @@ -52,6 +53,7 @@ Type: `, const { path, getIpfs, + print, format, hash, size, @@ -79,6 +81,8 @@ Type: `, .replace('', stats.cumulativeSize) .replace('', stats.blocks) .replace('', stats.type) + .replace('', stats.mode) + .replace('', stats.mtime) ) }) })()) diff --git a/src/cli/touch.js b/src/cli/touch.js new file mode 100644 index 0000000..260a04d --- /dev/null +++ b/src/cli/touch.js @@ -0,0 +1,78 @@ +'use strict' + +const { + asBoolean, + asDateFromSeconds +} = require('./utils') + +module.exports = { + command: 'touch [path]', + + describe: 'change file modification times', + + builder: { + mtime: { + alias: 'm', + type: 'date', + coerce: asDateFromSeconds, + default: Date.now(), + describe: 'Time to use as the new modification time' + }, + flush: { + alias: 'f', + type: 'boolean', + default: true, + coerce: asBoolean, + describe: 'Flush the changes to disk immediately' + }, + 'cid-version': { + alias: ['cid-ver'], + type: 'number', + default: 0, + describe: 'Cid version to use. (experimental).' + }, + codec: { + alias: 'c', + type: 'string', + default: 'dag-pb', + describe: 'If intermediate directories are created, use this codec to create them (experimental)' + }, + 'hash-alg': { + alias: 'h', + type: 'string', + default: 'sha2-256', + describe: 'Hash function to use. Will set CID version to 1 if used' + }, + 'shard-split-threshold': { + type: 'number', + default: 1000, + describe: 'If a directory has more links than this, it will be transformed into a hamt-sharded-directory' + } + }, + + handler (argv) { + const { + path, + getIpfs, + flush, + cidVersion, + codec, + hashAlg, + shardSplitThreshold, + mtime + } = argv + + argv.resolve((async () => { + const ipfs = await getIpfs() + + return ipfs.files.touch(path, { + mtime, + flush, + cidVersion, + format: codec, + hashAlg, + shardSplitThreshold + }) + })()) + } +} diff --git a/src/cli/utils.js b/src/cli/utils.js index c8169ce..fc0dcf0 100644 --- a/src/cli/utils.js +++ b/src/cli/utils.js @@ -31,8 +31,18 @@ const asBoolean = (value) => { return false } +const asOctal = (value) => { + return parseInt(value, 8) +} + +const asDateFromSeconds = (value) => { + return new Date(parseInt(value, 10) * 1000) +} + module.exports = { disablePrinting, print, - asBoolean + asBoolean, + asOctal, + asDateFromSeconds } diff --git a/src/cli/write.js b/src/cli/write.js index 80fc382..754ef93 100644 --- a/src/cli/write.js +++ b/src/cli/write.js @@ -1,7 +1,9 @@ 'use strict' const { - asBoolean + asBoolean, + asOctal, + asDateFromSeconds } = require('./utils') module.exports = { @@ -13,6 +15,7 @@ module.exports = { parents: { alias: 'p', type: 'boolean', + default: false, describe: 'Create any non-existent intermediate directories' }, create: { @@ -75,7 +78,8 @@ module.exports = { type: 'string', default: 'sha2-256' }, - format: { + codec: { + alias: ['c'], type: 'string', default: 'dag-pb' }, @@ -83,6 +87,17 @@ module.exports = { type: 'number', default: 1000, describe: 'If a directory has more links than this, it will be transformed into a hamt-sharded-directory' + }, + mode: { + type: 'int', + coerce: asOctal, + describe: 'The mode to use' + }, + mtime: { + alias: 'm', + type: 'date', + coerce: asDateFromSeconds, + describe: 'Time to use as the new modification time' } }, @@ -90,6 +105,7 @@ module.exports = { const { path, getIpfs, + getStdin, offset, length, create, @@ -98,18 +114,20 @@ module.exports = { reduceSingleLeafToSelf, cidVersion, hashAlg, - format, + codec, parents, progress, strategy, flush, - shardSplitThreshold + shardSplitThreshold, + mode, + mtime } = argv argv.resolve((async () => { const ipfs = await getIpfs() - await ipfs.files.write(path, process.stdin, { + await ipfs.files.write(path, getStdin(), { offset, length, create, @@ -118,12 +136,14 @@ module.exports = { reduceSingleLeafToSelf, cidVersion, hashAlg, - format, + format: codec, parents, progress, strategy, flush, - shardSplitThreshold + shardSplitThreshold, + mode, + mtime }) })()) } diff --git a/src/core/chmod.js b/src/core/chmod.js new file mode 100644 index 0000000..3b32c07 --- /dev/null +++ b/src/core/chmod.js @@ -0,0 +1,204 @@ +'use strict' + +const applyDefaultOptions = require('./utils/apply-default-options') +const toMfsPath = require('./utils/to-mfs-path') +const log = require('debug')('ipfs:mfs:touch') +const errCode = require('err-code') +const UnixFS = require('ipfs-unixfs') +const toTrail = require('./utils/to-trail') +const addLink = require('./utils/add-link') +const updateTree = require('./utils/update-tree') +const updateMfsRoot = require('./utils/update-mfs-root') +const { DAGNode } = require('ipld-dag-pb') +const mc = require('multicodec') +const mh = require('multihashes') + +const defaultOptions = { + flush: true, + shardSplitThreshold: 1000, + format: 'dag-pb', + hashAlg: 'sha2-256', + cidVersion: 0, + recursive: false +} + +function calculateModification (mode) { + let modification = 0 + + if (mode.includes('x')) { + modification += 1 + } + + if (mode.includes('w')) { + modification += 2 + } + + if (mode.includes('r')) { + modification += 4 + } + + return modification +} + +function calculateUGO (references, modification) { + let ugo = 0 + + if (references.includes('u')) { + ugo += (modification << 6) + } + + if (references.includes('g')) { + ugo += (modification << 3) + } + + if (references.includes('o')) { + ugo += (modification) + } + + return ugo +} + +function calculateSpecial (references, mode, modification) { + if (mode.includes('t')) { + modification += parseInt('1000', 8) + } + + if (mode.includes('s')) { + if (references.includes('u')) { + modification += parseInt('4000', 8) + } + + if (references.includes('g')) { + modification += parseInt('2000', 8) + } + } + + return modification +} + +// https://en.wikipedia.org/wiki/Chmod#Symbolic_modes +function parseSymbolicMode (input, originalMode) { + if (!originalMode) { + originalMode = 0 + } + + const match = input.match(/^(u?g?o?a?)(-?\+?=?)?(r?w?x?X?s?t?)$/) + + if (!match) { + throw new Error(`Invalid file mode: ${input}`) + } + + let [ + _, // eslint-disable-line no-unused-vars + references, + operator, + mode + ] = match + + if (references === 'a' || !references) { + references = 'ugo' + } + + let modification = calculateModification(mode) + modification = calculateUGO(references, modification) + modification = calculateSpecial(references, mode, modification) + + if (operator === '=') { + if (references.includes('u')) { + // blank u bits + originalMode = originalMode & parseInt('7077', 8) + + // or them together + originalMode = originalMode | modification + } + + if (references.includes('g')) { + // blank g bits + originalMode = originalMode & parseInt('7707', 8) + + // or them together + originalMode = originalMode | modification + } + + if (references.includes('o')) { + // blank o bits + originalMode = originalMode & parseInt('7770', 8) + + // or them together + originalMode = originalMode | modification + } + + return originalMode + } + + if (operator === '+') { + return modification | originalMode + } + + if (operator === '-') { + return modification ^ originalMode + } +} + +module.exports = (context) => { + return async function mfsChmod (path, mode, options) { + options = applyDefaultOptions(options, defaultOptions) + + log(`Fetching stats for ${path}`) + + const { + cid, + mfsDirectory, + name + } = await toMfsPath(context, path) + + if (cid.codec !== 'dag-pb') { + throw errCode(new Error(`${path} was not a UnixFS node`), 'ERR_NOT_UNIXFS') + } + + let node = await context.ipld.get(cid) + const metadata = UnixFS.unmarshal(node.Data) + + if (typeof mode === 'string' || mode instanceof String) { + if (mode.match(/^\d+$/g)) { + mode = parseInt(mode, 8) + } else { + mode = mode.split(',').reduce((curr, acc) => { + return parseSymbolicMode(acc, curr) + }, metadata.mode) + } + } + + metadata.mode = mode + node = new DAGNode(metadata.marshal(), node.Links) + + const updatedCid = await context.ipld.put(node, mc.DAG_PB, { + cidVersion: cid.version, + hashAlg: mh.names['sha2-256'], + onlyHash: !options.flush + }) + + const trail = await toTrail(context, mfsDirectory, options) + const parent = trail[trail.length - 1] + const parentNode = await context.ipld.get(parent.cid) + + const result = await addLink(context, { + parent: parentNode, + name: name, + cid: updatedCid, + size: node.serialize().length, + flush: options.flush, + format: 'dag-pb', + hashAlg: 'sha2-256', + cidVersion: cid.version + }) + + parent.cid = result.cid + + // update the tree with the new child + const newRootCid = await updateTree(context, trail, options) + + // Update the MFS record with the new CID for the root of the tree + await updateMfsRoot(context, newRootCid) + } +} diff --git a/src/core/cp.js b/src/core/cp.js index 3160e17..8e70d36 100644 --- a/src/core/cp.js +++ b/src/core/cp.js @@ -152,7 +152,8 @@ const addSourceToParent = async (context, source, childName, parent, options) => name: childName, format: options.format, hashAlg: options.hashAlg, - cidVersion: options.cidVersion + cidVersion: options.cidVersion, + flush: options.flush }) parent.node = node diff --git a/src/core/index.js b/src/core/index.js index 59068b8..a2335f7 100644 --- a/src/core/index.js +++ b/src/core/index.js @@ -10,11 +10,13 @@ const readOperations = { // These operations are locked at the function level and will execute in series const writeOperations = { + chmod: require('./chmod'), cp: require('./cp'), flush: require('./flush'), mkdir: require('./mkdir'), mv: require('./mv'), - rm: require('./rm') + rm: require('./rm'), + touch: require('./touch') } // These operations are asynchronous and manage their own locking diff --git a/src/core/ls.js b/src/core/ls.js index 0fe8648..f8380ce 100644 --- a/src/core/ls.js +++ b/src/core/ls.js @@ -15,18 +15,29 @@ const defaultOptions = { const toOutput = (fsEntry) => { let type = 0 let size = fsEntry.node.size || fsEntry.node.length + let mode + let mtime if (fsEntry.unixfs) { size = fsEntry.unixfs.fileSize() type = FILE_TYPES[fsEntry.unixfs.type] + mode = fsEntry.unixfs.mode + mtime = fsEntry.unixfs.mtime } - return { + const output = { cid: fsEntry.cid, name: fsEntry.name, type, - size + size, + mode } + + if (mtime !== undefined) { + output.mtime = mtime + } + + return output } module.exports = (context) => { diff --git a/src/core/mkdir.js b/src/core/mkdir.js index c868fc2..9b62aca 100644 --- a/src/core/mkdir.js +++ b/src/core/mkdir.js @@ -20,7 +20,9 @@ const defaultOptions = { cidVersion: 0, shardSplitThreshold: 1000, format: 'dag-pb', - flush: true + flush: true, + mode: null, + mtime: null } module.exports = (context) => { @@ -116,7 +118,10 @@ const addEmptyDir = async (context, childName, emptyDir, parent, trail, options) name: childName, format: options.format, hashAlg: options.hashAlg, - cidVersion: options.cidVersion + cidVersion: options.cidVersion, + mode: options.mode, + mtime: options.mtime, + flush: options.flush }) trail[trail.length - 1].cid = result.cid diff --git a/src/core/mv.js b/src/core/mv.js index 00ae295..d432fb6 100644 --- a/src/core/mv.js +++ b/src/core/mv.js @@ -9,6 +9,7 @@ const defaultOptions = { parents: false, recursive: false, flush: true, + cidVersion: 0, format: 'dag-pb', hashAlg: 'sha2-256', shardSplitThreshold: 1000 diff --git a/src/core/stat.js b/src/core/stat.js index 0ec7498..7330463 100644 --- a/src/core/stat.js +++ b/src/core/stat.js @@ -57,35 +57,43 @@ const statters = { } }, 'dag-pb': (file) => { - let blocks = file.node.Links.length - let size = file.node.size - let cumulativeSize = file.node.size - let nodeType = null + const blocks = file.node.Links.length + const size = file.node.size + const cumulativeSize = file.node.size - if (file.unixfs) { - size = file.unixfs.fileSize() - nodeType = file.unixfs.type - - if (nodeType.includes('directory')) { - size = 0 - cumulativeSize = file.node.size - } - - if (nodeType === 'file') { - blocks = file.unixfs.blockSizes.length - } - } - - return { + const output = { cid: file.cid, size: size, cumulativeSize: cumulativeSize, blocks: blocks, - type: nodeType, local: undefined, sizeLocal: undefined, withLocality: false } + + if (file.unixfs) { + output.size = file.unixfs.fileSize() + output.type = file.unixfs.type + + if (file.unixfs.isDirectory()) { + output.size = 0 + output.cumulativeSize = file.node.size + } + + if (output.type === 'file') { + output.blocks = file.unixfs.blockSizes.length + } + + if (file.unixfs.mtime) { + output.mtime = file.unixfs.mtime + } + + if (file.unixfs.mode !== undefined && file.unixfs.mode !== null) { + output.mode = file.unixfs.mode + } + } + + return output }, 'dag-cbor': (file) => { return { diff --git a/src/core/touch.js b/src/core/touch.js new file mode 100644 index 0000000..1517773 --- /dev/null +++ b/src/core/touch.js @@ -0,0 +1,101 @@ +'use strict' + +const applyDefaultOptions = require('./utils/apply-default-options') +const toMfsPath = require('./utils/to-mfs-path') +const log = require('debug')('ipfs:mfs:touch') +const errCode = require('err-code') +const UnixFS = require('ipfs-unixfs') +const toTrail = require('./utils/to-trail') +const addLink = require('./utils/add-link') +const updateTree = require('./utils/update-tree') +const updateMfsRoot = require('./utils/update-mfs-root') +const { DAGNode } = require('ipld-dag-pb') +const mc = require('multicodec') +const mh = require('multihashes') + +const defaultOptions = { + mtime: undefined, + flush: true, + shardSplitThreshold: 1000, + cidVersion: 0, + format: 'dag-pb', + hashAlg: 'sha2-256' +} + +module.exports = (context) => { + return async function mfsTouch (path, options) { + options = options || {} + options = applyDefaultOptions(options, defaultOptions) + options.mtime = options.mtime || new Date() + + log(`Touching ${path} mtime: ${options.mtime}`) + + const { + cid, + mfsDirectory, + name, + exists + } = await toMfsPath(context, path) + + let node + let updatedCid + + let cidVersion = options.cidVersion + + if (!exists) { + const metadata = new UnixFS({ + type: 'file', + mtime: options.mtime + }) + node = new DAGNode(metadata.marshal()) + updatedCid = await context.ipld.put(node, mc.DAG_PB, { + cidVersion: options.cidVersion, + hashAlg: mh.names['sha2-256'], + onlyHash: !options.flush + }) + } else { + if (cid.codec !== 'dag-pb') { + throw errCode(new Error(`${path} was not a UnixFS node`), 'ERR_NOT_UNIXFS') + } + + cidVersion = cid.version + + node = await context.ipld.get(cid) + + const metadata = UnixFS.unmarshal(node.Data) + metadata.mtime = options.mtime + + node = new DAGNode(metadata.marshal(), node.Links) + + updatedCid = await context.ipld.put(node, mc.DAG_PB, { + cidVersion: cid.version, + hashAlg: mh.names['sha2-256'], + onlyHash: !options.flush + }) + } + + const trail = await toTrail(context, mfsDirectory, options) + const parent = trail[trail.length - 1] + const parentNode = await context.ipld.get(parent.cid) + + const result = await addLink(context, { + parent: parentNode, + name: name, + cid: updatedCid, + size: node.serialize().length, + flush: options.flush, + shardSplitThreshold: options.shardSplitThreshold, + format: 'dag-pb', + hashAlg: 'sha2-256', + cidVersion + }) + + parent.cid = result.cid + + // update the tree with the new child + const newRootCid = await updateTree(context, trail, options) + + // Update the MFS record with the new CID for the root of the tree + await updateMfsRoot(context, newRootCid) + } +} diff --git a/src/core/utils/add-link.js b/src/core/utils/add-link.js index 7116556..1e2193a 100644 --- a/src/core/utils/add-link.js +++ b/src/core/utils/add-link.js @@ -1,7 +1,8 @@ 'use strict' const { - DAGLink + DAGLink, + DAGNode } = require('ipld-dag-pb') const CID = require('cids') const log = require('debug')('ipfs:mfs:core:utils:add-link') @@ -17,7 +18,7 @@ const { const errCode = require('err-code') const mc = require('multicodec') const mh = require('multihashes') -const last = require('async-iterator-last') +const last = require('it-last') const addLink = async (context, options) => { if (!options.parentCid && !options.parent) { @@ -61,7 +62,11 @@ const addLink = async (context, options) => { if (options.parent.Links.length >= options.shardSplitThreshold) { log('Converting directory to sharded directory') - return convertToShardedDirectory(context, options) + return convertToShardedDirectory(context, { + ...options, + mtime: meta.mtime, + mode: meta.mode + }) } log(`Adding ${options.name} (${options.cid}) to regular directory`) @@ -89,6 +94,11 @@ const addToDirectory = async (context, options) => { options.parent.rmLink(options.name) options.parent.addLink(new DAGLink(options.name, options.size, options.cid)) + // Update mtime + const node = UnixFS.unmarshal(options.parent.Data) + node.mtime = new Date() + options.parent = new DAGNode(node.marshal(), options.parent.Links) + const format = mc[options.format.toUpperCase().replace(/-/g, '_')] const hashAlg = mh.names[options.hashAlg] @@ -96,7 +106,7 @@ const addToDirectory = async (context, options) => { const cid = await context.ipld.put(options.parent, format, { cidVersion: options.cidVersion, hashAlg, - hashOnly: !options.flush + onlyHash: !options.flush }) return { @@ -137,6 +147,7 @@ const addFileToShardedDirectory = async (context, options) => { // start at the root bucket and descend, loading nodes as we go const rootBucket = await recreateHamtLevel(options.parent.Links) + const node = UnixFS.unmarshal(options.parent.Data) const shard = new DirSharded({ root: true, @@ -145,9 +156,11 @@ const addFileToShardedDirectory = async (context, options) => { parentKey: null, path: '', dirty: true, - flat: false + flat: false, + mode: node.mode }, options) shard._bucket = rootBucket + shard.mtime = new Date() // load subshards until the bucket & position no longer changes const position = await rootBucket._findNewBucketAndPos(file.name) diff --git a/src/core/utils/create-node.js b/src/core/utils/create-node.js index f1cfb28..c0982ae 100644 --- a/src/core/utils/create-node.js +++ b/src/core/utils/create-node.js @@ -10,11 +10,17 @@ const mh = require('multihashes') const createNode = async (context, type, options) => { const format = mc[options.format.toUpperCase().replace(/-/g, '_')] const hashAlg = mh.names[options.hashAlg] + const metadata = new UnixFS({ + type, + mode: options.mode, + mtime: options.mtime + }) - const node = new DAGNode(new UnixFS(type).marshal()) + const node = new DAGNode(metadata.marshal()) const cid = await context.ipld.put(node, format, { cidVersion: options.cidVersion, - hashAlg + hashAlg, + onlyHash: !options.flush }) return { diff --git a/src/core/utils/hamt-utils.js b/src/core/utils/hamt-utils.js index 9289640..d630920 100644 --- a/src/core/utils/hamt-utils.js +++ b/src/core/utils/hamt-utils.js @@ -9,14 +9,20 @@ const log = require('debug')('ipfs:mfs:core:utils:hamt-utils') const UnixFS = require('ipfs-unixfs') const mc = require('multicodec') const mh = require('multihashes') -const last = require('async-iterator-last') +const last = require('it-last') const updateHamtDirectory = async (context, links, bucket, options) => { // update parent with new bit field const data = Buffer.from(bucket._children.bitField().reverse()) - const dir = new UnixFS('hamt-sharded-directory', data) - dir.fanout = bucket.tableSize() - dir.hashType = DirSharded.hashFn.code + const node = UnixFS.unmarshal(options.parent.Data) + const dir = new UnixFS({ + type: 'hamt-sharded-directory', + data, + fanout: bucket.tableSize(), + hashType: DirSharded.hashFn.code, + mode: node.mode, + mtime: node.mtime + }) const format = mc[options.format.toUpperCase().replace(/-/g, '_')] const hashAlg = mh.names[options.hashAlg] @@ -25,7 +31,7 @@ const updateHamtDirectory = async (context, links, bucket, options) => { const cid = await context.ipld.put(parent, format, { cidVersion: options.cidVersion, hashAlg, - hashOnly: !options.flush + onlyHash: !options.flush }) return { @@ -175,7 +181,9 @@ const createShard = async (context, contents, options) => { parentKey: null, path: '', dirty: true, - flat: false + flat: false, + mtime: options.mtime, + mode: options.mode }, options) for (let i = 0; i < contents.length; i++) { diff --git a/src/core/utils/with-mfs-root.js b/src/core/utils/with-mfs-root.js index 38da0af..5cf6740 100644 --- a/src/core/utils/with-mfs-root.js +++ b/src/core/utils/with-mfs-root.js @@ -30,7 +30,7 @@ const loadMfsRoot = async (context) => { } log('Creating new MFS root') - const node = new DAGNode(new UnixFs('directory').marshal()) + const node = new DAGNode(new UnixFs({ type: 'directory' }).marshal()) cid = await context.ipld.put(node, mc.DAG_PB, { cidVersion: 0, hashAlg: mh.names['sha2-256'] // why can't ipld look this up? diff --git a/src/core/write.js b/src/core/write.js index 18c9a2d..2b13c4d 100644 --- a/src/core/write.js +++ b/src/core/write.js @@ -17,7 +17,7 @@ const errCode = require('err-code') const { MAX_CHUNK_SIZE } = require('./utils/constants') -const last = require('async-iterator-last') +const last = require('it-last') const defaultOptions = { offset: 0, // the offset in the file to begin writing @@ -34,12 +34,13 @@ const defaultOptions = { strategy: 'trickle', flush: true, leafType: 'raw', - shardSplitThreshold: 1000 + shardSplitThreshold: 1000, + mode: undefined, + mtime: undefined } module.exports = (context) => { return async function mfsWrite (path, content, options) { - log('Hello world, writing', path, content, options) options = applyDefaultOptions(options, defaultOptions) let source, destination, parent @@ -174,8 +175,28 @@ const write = async (context, source, destination, options) => { } }) + let mode + + if (options.mode !== undefined && options.mode !== null) { + mode = options.mode + } else if (destination && destination.unixfs) { + mode = destination.unixfs.mode + } + + let mtime + + if (options.mtime !== undefined && options.mtine !== null) { + mtime = options.mtime + } else if (destination && destination.unixfs) { + mtime = destination.unixfs.mtime + } + const result = await last(importer([{ - content: content + content: content, + + // persist mode & mtime if set previously + mode, + mtime }], context.ipld, { progress: options.progress, hashAlg: options.hashAlg, diff --git a/src/http/chmod.js b/src/http/chmod.js new file mode 100644 index 0000000..8b6a967 --- /dev/null +++ b/src/http/chmod.js @@ -0,0 +1,51 @@ +'use strict' + +const Joi = require('@hapi/joi') + +const mfsChmod = { + method: 'POST', + path: '/api/v0/files/chmod', + async handler (request, h) { + const { + ipfs + } = request.server.app + const { + arg, + mode, + recursive, + codec, + hashAlg, + flush, + shardSplitThreshold + } = request.query + + await ipfs.files.chmod(arg, mode, { + recursive, + format: codec, + hashAlg, + flush, + shardSplitThreshold + }) + + return h.response() + }, + options: { + validate: { + options: { + allowUnknown: true, + stripUnknown: true + }, + query: Joi.object().keys({ + arg: Joi.string(), + mode: Joi.string(), + recursive: Joi.boolean().default(false), + flush: Joi.boolean().default(true), + codec: Joi.string().default('dag-pb'), + hashAlg: Joi.string().default('sha2-256'), + shardSplitThreshold: Joi.number().integer().min(0).default(1000) + }) + } + } +} + +module.exports = mfsChmod diff --git a/src/http/cp.js b/src/http/cp.js index f047af9..751fda0 100644 --- a/src/http/cp.js +++ b/src/http/cp.js @@ -12,6 +12,7 @@ const mfsCp = { const { arg, parents, + flush, format, hashAlg, shardSplitThreshold @@ -19,6 +20,7 @@ const mfsCp = { const args = arg.concat({ parents, + flush, format, hashAlg, shardSplitThreshold @@ -37,12 +39,15 @@ const mfsCp = { query: Joi.object().keys({ arg: Joi.array().items(Joi.string()).min(2), parents: Joi.boolean().default(false), + flush: Joi.boolean().default(true), format: Joi.string().valid([ 'dag-pb', 'dag-cbor' ]).default('dag-pb'), - hashAlg: Joi.string().default('sha2-256') + hashAlg: Joi.string().default('sha2-256'), + shardSplitThreshold: Joi.number().integer().min(0).default(1000) }) + .rename('codec', 'format') } } } diff --git a/src/http/flush.js b/src/http/flush.js index f362ace..47db6fe 100644 --- a/src/http/flush.js +++ b/src/http/flush.js @@ -2,6 +2,10 @@ const Joi = require('@hapi/joi') +const { + FILE_SEPARATOR +} = require('../core/utils/constants') + const mfsFlush = { method: 'POST', path: '/api/v0/files/flush', @@ -13,7 +17,7 @@ const mfsFlush = { arg } = request.query - await ipfs.files.flush.call(null, arg) + await ipfs.files.flush(arg || FILE_SEPARATOR, {}) return h.response() }, @@ -24,7 +28,7 @@ const mfsFlush = { stripUnknown: true }, query: Joi.object().keys({ - arg: Joi.string().required() + arg: Joi.string() }) } } diff --git a/src/http/index.js b/src/http/index.js index 2ccaa9e..96cb1ad 100644 --- a/src/http/index.js +++ b/src/http/index.js @@ -1,5 +1,6 @@ 'use strict' +const chmod = require('./chmod') const cp = require('./cp') const flush = require('./flush') const ls = require('./ls') @@ -8,9 +9,11 @@ const mv = require('./mv') const read = require('./read') const rm = require('./rm') const stat = require('./stat') +const touch = require('./touch') const write = require('./write') module.exports = [ + chmod, cp, flush, ls, @@ -19,5 +22,6 @@ module.exports = [ read, rm, stat, + touch, write ] diff --git a/src/http/ls.js b/src/http/ls.js index 375a8d3..5b00562 100644 --- a/src/http/ls.js +++ b/src/http/ls.js @@ -6,12 +6,23 @@ const { } = require('stream') const mapEntry = (entry) => { - return { + const output = { Name: entry.name, Type: entry.type, Size: entry.size, - Hash: entry.hash + Hash: entry.hash, + Mode: entry.mode.toString(8).padStart(4, '0') } + + if (entry.mtime) { + output.Mtime = entry.mtime.secs + + if (entry.mtime.nsecs != null) { + output.MtimeNsecs = entry.mtime.nsecs + } + } + + return output } const mfsLs = { @@ -47,7 +58,10 @@ const mfsLs = { passThrough.end(entry ? JSON.stringify(mapEntry(entry)) + '\n' : undefined) }) - readableStream.once('error', reject) + readableStream.once('error', (err) => { + passThrough.end() + reject(err) + }) }) return h.response(responseStream).header('X-Stream-Output', '1') diff --git a/src/http/mkdir.js b/src/http/mkdir.js index 6796e1a..b951f96 100644 --- a/src/http/mkdir.js +++ b/src/http/mkdir.js @@ -1,6 +1,7 @@ 'use strict' const Joi = require('@hapi/joi') +const parseMtime = require('./utils/parse-mtime') const mfsMkdir = { method: 'POST', @@ -11,6 +12,9 @@ const mfsMkdir = { } = request.server.app const { arg, + mode, + mtime, + mtimeNsecs, parents, format, hashAlg, @@ -20,6 +24,8 @@ const mfsMkdir = { } = request.query await ipfs.files.mkdir(arg, { + mode, + mtime: parseMtime(mtime, mtimeNsecs), parents, format, hashAlg, @@ -38,6 +44,9 @@ const mfsMkdir = { }, query: Joi.object().keys({ arg: Joi.string().required(), + mode: Joi.string(), + mtime: Joi.number().integer(), + mtimeNsecs: Joi.number().integer().min(0), parents: Joi.boolean().default(false), format: Joi.string().valid([ 'dag-pb', @@ -48,7 +57,8 @@ const mfsMkdir = { 0, 1 ]).default(0), - flush: Joi.boolean().default(true) + flush: Joi.boolean().default(true), + shardSplitThreshold: Joi.number().integer().min(0).default(1000) }) .rename('p', 'parents', { override: true, diff --git a/src/http/mv.js b/src/http/mv.js index aeee443..2ba60df 100644 --- a/src/http/mv.js +++ b/src/http/mv.js @@ -11,14 +11,20 @@ const mfsMv = { } = request.server.app const { arg, + recursive, parents, format, hashAlg, + cidVersion, + flush, shardSplitThreshold } = request.query const args = arg.concat({ + recursive, parents, + cidVersion, + flush, format, hashAlg, shardSplitThreshold @@ -36,12 +42,19 @@ const mfsMv = { }, query: Joi.object().keys({ arg: Joi.array().items(Joi.string()).min(2), + recursive: Joi.boolean().default(false), parents: Joi.boolean().default(false), format: Joi.string().valid([ 'dag-pb', 'dag-cbor' ]).default('dag-pb'), - hashAlg: Joi.string().default('sha2-256') + hashAlg: Joi.string().default('sha2-256'), + cidVersion: Joi.number().integer().valid([ + 0, + 1 + ]).default(0), + flush: Joi.boolean().default(true), + shardSplitThreshold: Joi.number().integer().min(0).default(1000) }) } } diff --git a/src/http/read.js b/src/http/read.js index 6382a93..6be1aee 100644 --- a/src/http/read.js +++ b/src/http/read.js @@ -15,15 +15,13 @@ const mfsRead = { const { arg, offset, - length, - count + length } = request.query const responseStream = await new Promise((resolve, reject) => { const stream = ipfs.files.readReadableStream(arg, { offset, - length, - count + length }) stream.once('data', (chunk) => { @@ -61,6 +59,10 @@ const mfsRead = { override: true, ignoreUndefined: true }) + .rename('count', 'length', { + override: true, + ignoreUndefined: true + }) } } } diff --git a/src/http/stat.js b/src/http/stat.js index 63724fc..d8b5563 100644 --- a/src/http/stat.js +++ b/src/http/stat.js @@ -32,7 +32,10 @@ const mfsStat = { CumulativeSize: stats.cumulativeSize, WithLocality: stats.withLocality, Local: stats.local, - SizeLocal: stats.sizeLocal + SizeLocal: stats.sizeLocal, + Mtime: stats.mtime ? stats.mtime.secs : undefined, + MtimeNsecs: stats.mtime ? stats.mtime.nsecs : undefined, + Mode: stats.mode.toString(8).padStart(4, '0') }) }, options: { diff --git a/src/http/touch.js b/src/http/touch.js new file mode 100644 index 0000000..5c93fe5 --- /dev/null +++ b/src/http/touch.js @@ -0,0 +1,61 @@ +'use strict' + +const Joi = require('@hapi/joi') +const parseMtime = require('./utils/parse-mtime') + +const mfsTouch = { + method: 'POST', + path: '/api/v0/files/touch', + async handler (request, h) { + const { + ipfs + } = request.server.app + const { + arg, + flush, + shardSplitThreshold, + cidVersion, + format, + hashAlg, + mtime, + mtimeNsecs + } = request.query + + await ipfs.files.touch(arg, { + mtime: parseMtime(mtime, mtimeNsecs), + flush, + shardSplitThreshold, + cidVersion, + format, + hashAlg + }) + + return h.response() + }, + options: { + validate: { + options: { + allowUnknown: true, + stripUnknown: true + }, + query: Joi.object().keys({ + arg: Joi.string().required(), + mtime: Joi.number().integer(), + mtimeNsecs: Joi.number().integer().min(0), + format: Joi.string().valid([ + 'dag-pb', + 'dag-cbor' + ]).default('dag-pb'), + hashAlg: Joi.string().default('sha2-256'), + cidVersion: Joi.number().integer().valid([ + 0, + 1 + ]).default(0), + flush: Joi.boolean().default(true), + shardSplitThreshold: Joi.number().integer().min(0).default(1000) + }) + } + } +} + +module.exports = mfsTouch diff --git a/src/http/utils/parse-mtime.js b/src/http/utils/parse-mtime.js new file mode 100644 index 0000000..863c53b --- /dev/null +++ b/src/http/utils/parse-mtime.js @@ -0,0 +1,20 @@ +'use strict' + +module.exports = (secs, nsecs) => { + if ((secs === undefined || secs === null) && (nsecs === undefined || nsecs === null)) { + return + } + + const mtime = {} + + if (nsecs || nsecs === 0) { + mtime.secs = 0 + mtime.nsecs = nsecs + } + + if (secs || secs === 0) { + mtime.secs = secs + } + + return mtime +} diff --git a/src/http/write.js b/src/http/write.js index f647c05..64d3a7e 100644 --- a/src/http/write.js +++ b/src/http/write.js @@ -18,6 +18,7 @@ const mfsWrite = { create, truncate, rawLeaves, + reduceSingleLeafToSelf, cidVersion, hashAlg, format, @@ -44,6 +45,7 @@ const mfsWrite = { create, truncate, rawLeaves, + reduceSingleLeafToSelf, cidVersion, hashAlg, format, @@ -51,7 +53,9 @@ const mfsWrite = { progress, strategy, flush, - shardSplitThreshold + shardSplitThreshold, + mode: entry.mode, + mtime: entry.mtime }) } } @@ -79,9 +83,7 @@ const mfsWrite = { 0, 1 ]).default(0), - hashAlg: Joi.string().valid([ - 'sha2-256' - ]).default('sha2-256'), + hashAlg: Joi.string().default('sha2-256'), format: Joi.string().valid([ 'dag-pb', 'dag-cbor' @@ -93,7 +95,9 @@ const mfsWrite = { 'balanced', 'trickle' ]).default('trickle'), - flush: Joi.boolean().default(true) + flush: Joi.boolean().default(true), + reduceSingleLeafToSelf: Joi.boolean().default(false), + shardSplitThreshold: Joi.number().integer().min(0).default(1000) }) .rename('o', 'offset', { override: true, diff --git a/test/browser.js b/test/browser.js new file mode 100644 index 0000000..ed5d991 --- /dev/null +++ b/test/browser.js @@ -0,0 +1,3 @@ +'use strict' + +require('./core') diff --git a/test/cli/chmod.js b/test/cli/chmod.js new file mode 100644 index 0000000..c7bd8d5 --- /dev/null +++ b/test/cli/chmod.js @@ -0,0 +1,164 @@ +/* eslint-env mocha */ +'use strict' + +const expect = require('../helpers/chai') +const cli = require('../helpers/cli') +const sinon = require('sinon') + +function defaultOptions (modification = {}) { + const options = { + recursive: false, + format: 'dag-pb', + hashAlg: 'sha2-256', + flush: true, + shardSplitThreshold: 1000 + } + + Object.keys(modification).forEach(key => { + options[key] = modification[key] + }) + + return options +} + +describe('chmod', () => { + const path = '/foo' + const mode = '0777' + let ipfs + + beforeEach(() => { + ipfs = { + files: { + chmod: sinon.stub() + } + } + }) + + it('should update the mode for a file', async () => { + await cli(`files chmod ${mode} ${path}`, { ipfs }) + + expect(ipfs.files.chmod.callCount).to.equal(1) + expect(ipfs.files.chmod.getCall(0).args).to.deep.equal([ + path, + parseInt(mode, 8), + defaultOptions() + ]) + }) + + it('should update the mode recursively', async () => { + await cli(`files chmod ${mode} --recursive ${path}`, { ipfs }) + + expect(ipfs.files.chmod.callCount).to.equal(1) + expect(ipfs.files.chmod.getCall(0).args).to.deep.equal([ + path, + parseInt(mode, 8), + defaultOptions({ + recursive: true + }) + ]) + }) + + it('should update the mode recursively (short option)', async () => { + await cli(`files chmod ${mode} -r ${path}`, { ipfs }) + + expect(ipfs.files.chmod.callCount).to.equal(1) + expect(ipfs.files.chmod.getCall(0).args).to.deep.equal([ + path, + parseInt(mode, 8), + defaultOptions({ + recursive: true + }) + ]) + }) + + it('should update the mode without flushing', async () => { + await cli(`files chmod ${mode} --flush false ${path}`, { ipfs }) + + expect(ipfs.files.chmod.callCount).to.equal(1) + expect(ipfs.files.chmod.getCall(0).args).to.deep.equal([ + path, + parseInt(mode, 8), + defaultOptions({ + flush: false + }) + ]) + }) + + it('should update the mode without flushing (short option)', async () => { + await cli(`files chmod ${mode} -f false ${path}`, { ipfs }) + + expect(ipfs.files.chmod.callCount).to.equal(1) + expect(ipfs.files.chmod.getCall(0).args).to.deep.equal([ + path, + parseInt(mode, 8), + defaultOptions({ + flush: false + }) + ]) + }) + + it('should update the mode with a different codec', async () => { + await cli(`files chmod ${mode} --codec dag-foo ${path}`, { ipfs }) + + expect(ipfs.files.chmod.callCount).to.equal(1) + expect(ipfs.files.chmod.getCall(0).args).to.deep.equal([ + path, + parseInt(mode, 8), + defaultOptions({ + format: 'dag-foo' + }) + ]) + }) + + it('should update the mode with a different codec (short option)', async () => { + await cli(`files chmod ${mode} -c dag-foo ${path}`, { ipfs }) + + expect(ipfs.files.chmod.callCount).to.equal(1) + expect(ipfs.files.chmod.getCall(0).args).to.deep.equal([ + path, + parseInt(mode, 8), + defaultOptions({ + format: 'dag-foo' + }) + ]) + }) + + it('should update the mode a with different hash algorithm', async () => { + await cli(`files chmod ${mode} --hash-alg sha3-256 ${path}`, { ipfs }) + + expect(ipfs.files.chmod.callCount).to.equal(1) + expect(ipfs.files.chmod.getCall(0).args).to.deep.equal([ + path, + parseInt(mode, 8), + defaultOptions({ + hashAlg: 'sha3-256' + }) + ]) + }) + + it('should update the mode a with different hash algorithm (short option)', async () => { + await cli(`files chmod ${mode} -h sha3-256 ${path}`, { ipfs }) + + expect(ipfs.files.chmod.callCount).to.equal(1) + expect(ipfs.files.chmod.getCall(0).args).to.deep.equal([ + path, + parseInt(mode, 8), + defaultOptions({ + hashAlg: 'sha3-256' + }) + ]) + }) + + it('should update the mode with a shard split threshold', async () => { + await cli('files chmod 0777 --shard-split-threshold 10 /foo', { ipfs }) + + expect(ipfs.files.chmod.callCount).to.equal(1) + expect(ipfs.files.chmod.getCall(0).args).to.deep.equal([ + path, + parseInt(mode, 8), + defaultOptions({ + shardSplitThreshold: 10 + }) + ]) + }) +}) diff --git a/test/cli/cp.js b/test/cli/cp.js new file mode 100644 index 0000000..953fc16 --- /dev/null +++ b/test/cli/cp.js @@ -0,0 +1,138 @@ +/* eslint-env mocha */ +'use strict' + +const expect = require('../helpers/chai') +const cli = require('../helpers/cli') +const sinon = require('sinon') + +function defaultOptions (modification = {}) { + const options = { + parents: false, + format: 'dag-pb', + hashAlg: 'sha2-256', + flush: true, + shardSplitThreshold: 1000 + } + + Object.keys(modification).forEach(key => { + options[key] = modification[key] + }) + + return options +} + +describe('cp', () => { + const source = 'source' + const dest = 'dest' + let ipfs + + beforeEach(() => { + ipfs = { + files: { + cp: sinon.stub() + } + } + }) + + it('should copy files', async () => { + await cli(`files cp ${source} ${dest}`, { ipfs }) + + expect(ipfs.files.cp.callCount).to.equal(1) + expect(ipfs.files.cp.getCall(0).args).to.deep.equal([ + source, + dest, + defaultOptions() + ]) + }) + + it('should copy files and create intermediate directories', async () => { + await cli(`files cp --parents ${source} ${dest}`, { ipfs }) + + expect(ipfs.files.cp.callCount).to.equal(1) + expect(ipfs.files.cp.getCall(0).args).to.deep.equal([ + source, + dest, + defaultOptions({ + parents: true + }) + ]) + }) + + it('should copy files and create intermediate directories (short option)', async () => { + await cli(`files cp --parents ${source} ${dest}`, { ipfs }) + + expect(ipfs.files.cp.callCount).to.equal(1) + expect(ipfs.files.cp.getCall(0).args).to.deep.equal([ + source, + dest, + defaultOptions({ + parents: true + }) + ]) + }) + + it('should copy files with a different codec', async () => { + await cli(`files cp --codec dag-foo ${source} ${dest}`, { ipfs }) + + expect(ipfs.files.cp.callCount).to.equal(1) + expect(ipfs.files.cp.getCall(0).args).to.deep.equal([ + source, + dest, + defaultOptions({ + format: 'dag-foo' + }) + ]) + }) + + it('should copy files with a different codec (short option)', async () => { + await cli(`files cp -c dag-foo ${source} ${dest}`, { ipfs }) + + expect(ipfs.files.cp.callCount).to.equal(1) + expect(ipfs.files.cp.getCall(0).args).to.deep.equal([ + source, + dest, + defaultOptions({ + format: 'dag-foo' + }) + ]) + }) + + it('should copy files with a different hash algorithm', async () => { + await cli(`files cp --hash-alg sha3-256 ${source} ${dest}`, { ipfs }) + + expect(ipfs.files.cp.callCount).to.equal(1) + expect(ipfs.files.cp.getCall(0).args).to.deep.equal([ + source, + dest, + defaultOptions({ + hashAlg: 'sha3-256' + }) + ]) + }) + + it('should copy files with a different hash algorithm (short option)', async () => { + await cli(`files cp -h sha3-256 ${source} ${dest}`, { ipfs }) + + expect(ipfs.files.cp.callCount).to.equal(1) + expect(ipfs.files.cp.getCall(0).args).to.deep.equal([ + source, + dest, + defaultOptions({ + hashAlg: 'sha3-256' + }) + ]) + }) + + it('should copy files with a different shard split threshold', async () => { + await cli(`files cp --shard-split-threshold 10 ${source} ${dest}`, { ipfs }) + + expect(ipfs.files.cp.callCount).to.equal(1) + expect(ipfs.files.cp.getCall(0).args).to.deep.equal([ + source, + dest, + defaultOptions({ + shardSplitThreshold: 10 + }) + ]) + }) +}) diff --git a/test/cli/flush.js b/test/cli/flush.js new file mode 100644 index 0000000..f40a0d7 --- /dev/null +++ b/test/cli/flush.js @@ -0,0 +1,39 @@ +/* eslint-env mocha */ +'use strict' + +const expect = require('../helpers/chai') +const cli = require('../helpers/cli') +const sinon = require('sinon') + +describe('flush', () => { + const path = '/foo' + let ipfs + + beforeEach(() => { + ipfs = { + files: { + flush: sinon.stub() + } + } + }) + + it('should flush a path', async () => { + await cli(`files flush ${path}`, { ipfs }) + + expect(ipfs.files.flush.callCount).to.equal(1) + expect(ipfs.files.flush.getCall(0).args).to.deep.equal([ + path, + {} + ]) + }) + + it('should flush without a path', async () => { + await cli('files flush', { ipfs }) + + expect(ipfs.files.flush.callCount).to.equal(1) + expect(ipfs.files.flush.getCall(0).args).to.deep.equal([ + '/', + {} + ]) + }) +}) diff --git a/test/cli/index.js b/test/cli/index.js new file mode 100644 index 0000000..0f1fff2 --- /dev/null +++ b/test/cli/index.js @@ -0,0 +1,16 @@ +/* eslint-env mocha */ +'use strict' + +describe('cli', () => { + require('./chmod') + require('./cp') + require('./flush') + require('./ls') + require('./mkdir') + require('./mv') + require('./read') + require('./rm') + require('./stat') + require('./touch') + require('./write') +}) diff --git a/test/cli/ls.js b/test/cli/ls.js new file mode 100644 index 0000000..478f7fe --- /dev/null +++ b/test/cli/ls.js @@ -0,0 +1,198 @@ +/* eslint-env mocha */ +'use strict' + +const expect = require('../helpers/chai') +const cli = require('../helpers/cli') +const sinon = require('sinon') +const values = require('pull-stream/sources/values') +const isNode = require('detect-node') + +describe('ls', () => { + if (!isNode) { + return + } + + let ipfs + let print + let output + + beforeEach(() => { + output = '' + ipfs = { + files: { + ls: sinon.stub().resolves([]) + } + } + print = (msg = '', newline = true) => { + output += newline ? msg + '\n' : msg + } + }) + + it('should list a path', async () => { + const path = '/foo' + + await cli(`files ls ${path}`, { ipfs, print }) + + expect(ipfs.files.ls.callCount).to.equal(1) + expect(ipfs.files.ls.getCall(0).args).to.deep.equal([ + path + ]) + }) + + it('should list without a path', async () => { + await cli('files ls', { ipfs, print }) + + expect(ipfs.files.ls.callCount).to.equal(1) + expect(ipfs.files.ls.getCall(0).args).to.deep.equal([ + '/' + ]) + }) + + it('should list a path with details', async () => { + const files = [{ + hash: 'file-name', + name: 'file-name', + size: 'file-size', + mode: 'file-mode', + mtime: 'file-mtime' + }] + + ipfs.files.ls = sinon.stub().resolves(files) + + await cli('files ls --long /foo', { ipfs, print }) + + expect(ipfs.files.ls.callCount).to.equal(1) + expect(output).to.include(files[0].hash) + expect(output).to.include(files[0].name) + expect(output).to.include(files[0].size) + }) + + it('should list a path with details (short option)', async () => { + const files = [{ + hash: 'file-name', + name: 'file-name', + size: 'file-size', + mode: 'file-mode', + mtime: 'file-mtime' + }] + + ipfs.files.ls = sinon.stub().resolves(files) + + await cli('files ls -l /foo', { ipfs, print }) + + expect(ipfs.files.ls.callCount).to.equal(1) + expect(output).to.include(files[0].hash) + expect(output).to.include(files[0].name) + expect(output).to.include(files[0].size) + }) + + it('should list a path with details', async () => { + const files = [{ + hash: 'file-name', + name: 'file-name', + size: 'file-size', + mode: 'file-mode', + mtime: 'file-mtime' + }] + + ipfs.files.ls = sinon.stub().resolves(files) + + await cli('files ls --long /foo', { ipfs, print }) + + expect(ipfs.files.ls.callCount).to.equal(1) + expect(output).to.include(files[0].hash) + expect(output).to.include(files[0].name) + expect(output).to.include(files[0].size) + }) + + it('should list a path with details (short option)', async () => { + const files = [{ + hash: 'file-name', + name: 'file-name', + size: 'file-size', + mode: 'file-mode', + mtime: 'file-mtime' + }] + + ipfs.files.ls = sinon.stub().resolves(files) + + await cli('files ls -l /foo', { ipfs, print }) + + expect(ipfs.files.ls.callCount).to.equal(1) + expect(output).to.include(files[0].hash) + expect(output).to.include(files[0].name) + expect(output).to.include(files[0].size) + }) + + it('should list a path without sorting', async () => { + const files = [{ + hash: 'file-name', + name: 'file-name', + size: 'file-size', + mode: 'file-mode', + mtime: 'file-mtime' + }] + + ipfs.files.lsPullStream = sinon.stub().returns(values(files)) + + await cli('files ls --sort false /foo', { ipfs, print }) + + expect(ipfs.files.lsPullStream.callCount).to.equal(1) + expect(output).to.include(files[0].name) + }) + + it('should list a path without sorting (short option)', async () => { + const files = [{ + hash: 'file-name', + name: 'file-name', + size: 'file-size', + mode: 'file-mode', + mtime: 'file-mtime' + }] + + ipfs.files.lsPullStream = sinon.stub().returns(values(files)) + + await cli('files ls -s false /foo', { ipfs, print }) + + expect(ipfs.files.lsPullStream.callCount).to.equal(1) + expect(output).to.include(files[0].name) + }) + + it('should list a path with details without sorting', async () => { + const files = [{ + hash: 'file-name', + name: 'file-name', + size: 'file-size', + mode: 'file-mode', + mtime: 'file-mtime' + }] + + ipfs.files.lsPullStream = sinon.stub().returns(values(files)) + + await cli('files ls --long --sort false /foo', { ipfs, print }) + + expect(ipfs.files.lsPullStream.callCount).to.equal(1) + expect(output).to.include(files[0].hash) + expect(output).to.include(files[0].name) + expect(output).to.include(files[0].size) + }) + + it('should list a path with details without sorting (short option)', async () => { + const files = [{ + hash: 'file-name', + name: 'file-name', + size: 'file-size', + mode: 'file-mode', + mtime: 'file-mtime' + }] + + ipfs.files.lsPullStream = sinon.stub().returns(values(files)) + + await cli('files ls -l -s false /foo', { ipfs, print }) + + expect(ipfs.files.lsPullStream.callCount).to.equal(1) + expect(output).to.include(files[0].hash) + expect(output).to.include(files[0].name) + expect(output).to.include(files[0].size) + }) +}) diff --git a/test/cli/mkdir.js b/test/cli/mkdir.js new file mode 100644 index 0000000..ea3a0f2 --- /dev/null +++ b/test/cli/mkdir.js @@ -0,0 +1,209 @@ +/* eslint-env mocha */ +'use strict' + +const expect = require('../helpers/chai') +const cli = require('../helpers/cli') +const sinon = require('sinon') +const isNode = require('detect-node') + +function defaultOptions (modification = {}) { + const options = { + parents: false, + cidVersion: 0, + format: 'dag-pb', + hashAlg: 'sha2-256', + flush: true, + shardSplitThreshold: 1000, + mode: undefined, + mtime: undefined + } + + Object.keys(modification).forEach(key => { + options[key] = modification[key] + }) + + return options +} + +describe('mkdir', () => { + if (!isNode) { + return + } + + const path = '/foo' + let ipfs + + beforeEach(() => { + ipfs = { + files: { + mkdir: sinon.stub() + } + } + }) + + it('should make a directory', async () => { + await cli(`files mkdir ${path}`, { ipfs }) + + expect(ipfs.files.mkdir.callCount).to.equal(1) + expect(ipfs.files.mkdir.getCall(0).args).to.deep.equal([ + path, + defaultOptions() + ]) + }) + + it('should make a directory with parents', async () => { + await cli(`files mkdir --parents ${path}`, { ipfs }) + + expect(ipfs.files.mkdir.callCount).to.equal(1) + expect(ipfs.files.mkdir.getCall(0).args).to.deep.equal([ + path, + defaultOptions({ + parents: true + }) + ]) + }) + + it('should make a directory with parents (short option)', async () => { + await cli(`files mkdir -p ${path}`, { ipfs }) + + expect(ipfs.files.mkdir.callCount).to.equal(1) + expect(ipfs.files.mkdir.getCall(0).args).to.deep.equal([ + path, + defaultOptions({ + parents: true + }) + ]) + }) + + it('should make a directory with a different cid version', async () => { + await cli(`files mkdir --cid-version 5 ${path}`, { ipfs }) + + expect(ipfs.files.mkdir.callCount).to.equal(1) + expect(ipfs.files.mkdir.getCall(0).args).to.deep.equal([ + path, + defaultOptions({ + cidVersion: 5 + }) + ]) + }) + + it('should make a directory with a different cid version (shortish option)', async () => { + await cli(`files mkdir --cid-ver 5 ${path}`, { ipfs }) + + expect(ipfs.files.mkdir.callCount).to.equal(1) + expect(ipfs.files.mkdir.getCall(0).args).to.deep.equal([ + path, + defaultOptions({ + cidVersion: 5 + }) + ]) + }) + + it('should make a directory with a different codec', async () => { + await cli(`files mkdir --codec dag-foo ${path}`, { ipfs }) + + expect(ipfs.files.mkdir.callCount).to.equal(1) + expect(ipfs.files.mkdir.getCall(0).args).to.deep.equal([ + path, + defaultOptions({ + format: 'dag-foo' + }) + ]) + }) + + it('should make a directory with a different codec (short option)', async () => { + await cli(`files mkdir -c dag-foo ${path}`, { ipfs }) + + expect(ipfs.files.mkdir.callCount).to.equal(1) + expect(ipfs.files.mkdir.getCall(0).args).to.deep.equal([ + path, + defaultOptions({ + format: 'dag-foo' + }) + ]) + }) + + it('should make a directory with a different hash algorithm', async () => { + await cli(`files mkdir --hash-alg sha3-256 ${path}`, { ipfs }) + + expect(ipfs.files.mkdir.callCount).to.equal(1) + expect(ipfs.files.mkdir.getCall(0).args).to.deep.equal([ + path, + defaultOptions({ + hashAlg: 'sha3-256' + }) + ]) + }) + + it('should make a directory with a different hash algorithm (short option)', async () => { + await cli(`files mkdir -h sha3-256 ${path}`, { ipfs }) + + expect(ipfs.files.mkdir.callCount).to.equal(1) + expect(ipfs.files.mkdir.getCall(0).args).to.deep.equal([ + path, + defaultOptions({ + hashAlg: 'sha3-256' + }) + ]) + }) + + it('should make a directory without flushing', async () => { + await cli(`files mkdir --flush false ${path}`, { ipfs }) + + expect(ipfs.files.mkdir.callCount).to.equal(1) + expect(ipfs.files.mkdir.getCall(0).args).to.deep.equal([ + path, + defaultOptions({ + flush: false + }) + ]) + }) + + it('should make a directory without flushing (short option)', async () => { + await cli(`files mkdir -f false ${path}`, { ipfs }) + + expect(ipfs.files.mkdir.callCount).to.equal(1) + expect(ipfs.files.mkdir.getCall(0).args).to.deep.equal([ + path, + defaultOptions({ + flush: false + }) + ]) + }) + + it('should make a directory a different shard split threshold', async () => { + await cli(`files mkdir --shard-split-threshold 10 ${path}`, { ipfs }) + + expect(ipfs.files.mkdir.callCount).to.equal(1) + expect(ipfs.files.mkdir.getCall(0).args).to.deep.equal([ + path, + defaultOptions({ + shardSplitThreshold: 10 + }) + ]) + }) + + it('should make a directory a different mode', async () => { + await cli(`files mkdir --mode 0111 ${path}`, { ipfs }) + + expect(ipfs.files.mkdir.callCount).to.equal(1) + expect(ipfs.files.mkdir.getCall(0).args).to.deep.equal([ + path, + defaultOptions({ + mode: parseInt('0111', 8) + }) + ]) + }) + + it('should make a directory a different mtime', async () => { + await cli(`files mkdir --mtime 5 ${path}`, { ipfs }) + + expect(ipfs.files.mkdir.callCount).to.equal(1) + expect(ipfs.files.mkdir.getCall(0).args).to.deep.equal([ + path, + defaultOptions({ + mtime: new Date(5000) + }) + ]) + }) +}) diff --git a/test/cli/mv.js b/test/cli/mv.js new file mode 100644 index 0000000..ca47005 --- /dev/null +++ b/test/cli/mv.js @@ -0,0 +1,223 @@ +/* eslint-env mocha */ +'use strict' + +const expect = require('../helpers/chai') +const cli = require('../helpers/cli') +const sinon = require('sinon') +const isNode = require('detect-node') + +function defaultOptions (modification = {}) { + const options = { + parents: false, + recursive: false, + cidVersion: 0, + format: 'dag-pb', + hashAlg: 'sha2-256', + flush: true, + shardSplitThreshold: 1000 + } + + Object.keys(modification).forEach(key => { + options[key] = modification[key] + }) + + return options +} + +describe('mv', () => { + if (!isNode) { + return + } + + const source = '/src' + const dest = '/dest' + let ipfs + + beforeEach(() => { + ipfs = { + files: { + mv: sinon.stub() + } + } + }) + + it('should move an entry', async () => { + await cli(`files mv ${source} ${dest}`, { ipfs }) + + expect(ipfs.files.mv.callCount).to.equal(1) + expect(ipfs.files.mv.getCall(0).args).to.deep.equal([ + source, + dest, + defaultOptions() + ]) + }) + + it('should move an entry and create parents', async () => { + await cli(`files mv --parents ${source} ${dest}`, { ipfs }) + + expect(ipfs.files.mv.callCount).to.equal(1) + expect(ipfs.files.mv.getCall(0).args).to.deep.equal([ + source, + dest, + defaultOptions({ + parents: true + }) + ]) + }) + + it('should move an entry and create parents (short option)', async () => { + await cli(`files mv -p ${source} ${dest}`, { ipfs }) + + expect(ipfs.files.mv.callCount).to.equal(1) + expect(ipfs.files.mv.getCall(0).args).to.deep.equal([ + source, + dest, + defaultOptions({ + parents: true + }) + ]) + }) + + it('should move an entry recursively', async () => { + await cli(`files mv --recursive ${source} ${dest}`, { ipfs }) + + expect(ipfs.files.mv.callCount).to.equal(1) + expect(ipfs.files.mv.getCall(0).args).to.deep.equal([ + source, + dest, + defaultOptions({ + recursive: true + }) + ]) + }) + + it('should move an entry recursively (short option)', async () => { + await cli(`files mv -r ${source} ${dest}`, { ipfs }) + + expect(ipfs.files.mv.callCount).to.equal(1) + expect(ipfs.files.mv.getCall(0).args).to.deep.equal([ + source, + dest, + defaultOptions({ + recursive: true + }) + ]) + }) + + it('should make a directory with a different cid version', async () => { + await cli(`files mv --cid-version 5 ${source} ${dest}`, { ipfs }) + + expect(ipfs.files.mv.callCount).to.equal(1) + expect(ipfs.files.mv.getCall(0).args).to.deep.equal([ + source, + dest, + defaultOptions({ + cidVersion: 5 + }) + ]) + }) + + it('should make a directory with a different cid version (shortish option)', async () => { + await cli(`files mv --cid-ver 5 ${source} ${dest}`, { ipfs }) + + expect(ipfs.files.mv.callCount).to.equal(1) + expect(ipfs.files.mv.getCall(0).args).to.deep.equal([ + source, + dest, + defaultOptions({ + cidVersion: 5 + }) + ]) + }) + + it('should make a directory with a different codec', async () => { + await cli(`files mv --codec dag-foo ${source} ${dest}`, { ipfs }) + + expect(ipfs.files.mv.callCount).to.equal(1) + expect(ipfs.files.mv.getCall(0).args).to.deep.equal([ + source, + dest, + defaultOptions({ + format: 'dag-foo' + }) + ]) + }) + + it('should make a directory with a different codec (short option)', async () => { + await cli(`files mv -c dag-foo ${source} ${dest}`, { ipfs }) + + expect(ipfs.files.mv.callCount).to.equal(1) + expect(ipfs.files.mv.getCall(0).args).to.deep.equal([ + source, + dest, + defaultOptions({ + format: 'dag-foo' + }) + ]) + }) + + it('should make a directory with a different hash algorithm', async () => { + await cli(`files mv --hash-alg sha3-256 ${source} ${dest}`, { ipfs }) + + expect(ipfs.files.mv.callCount).to.equal(1) + expect(ipfs.files.mv.getCall(0).args).to.deep.equal([ + source, + dest, + defaultOptions({ + hashAlg: 'sha3-256' + }) + ]) + }) + + it('should make a directory with a different hash algorithm (short option)', async () => { + await cli(`files mv -h sha3-256 ${source} ${dest}`, { ipfs }) + + expect(ipfs.files.mv.callCount).to.equal(1) + expect(ipfs.files.mv.getCall(0).args).to.deep.equal([ + source, + dest, + defaultOptions({ + hashAlg: 'sha3-256' + }) + ]) + }) + + it('should make a directory without flushing', async () => { + await cli(`files mv --flush false ${source} ${dest}`, { ipfs }) + + expect(ipfs.files.mv.callCount).to.equal(1) + expect(ipfs.files.mv.getCall(0).args).to.deep.equal([ + source, + dest, + defaultOptions({ + flush: false + }) + ]) + }) + + it('should make a directory without flushing (short option)', async () => { + await cli(`files mv -f false ${source} ${dest}`, { ipfs }) + + expect(ipfs.files.mv.callCount).to.equal(1) + expect(ipfs.files.mv.getCall(0).args).to.deep.equal([ + source, + dest, + defaultOptions({ + flush: false + }) + ]) + }) + + it('should make a directory a different shard split threshold', async () => { + await cli(`files mv --shard-split-threshold 10 ${source} ${dest}`, { ipfs }) + + expect(ipfs.files.mv.callCount).to.equal(1) + expect(ipfs.files.mv.getCall(0).args).to.deep.equal([ + source, + dest, + defaultOptions({ + shardSplitThreshold: 10 + }) + ]) + }) +}) diff --git a/test/cli/read.js b/test/cli/read.js new file mode 100644 index 0000000..3c5620f --- /dev/null +++ b/test/cli/read.js @@ -0,0 +1,115 @@ +/* eslint-env mocha */ +'use strict' + +const expect = require('../helpers/chai') +const cli = require('../helpers/cli') +const sinon = require('sinon') +const values = require('pull-stream/sources/values') +const isNode = require('detect-node') + +function defaultOptions (modification = {}) { + const options = { + offset: undefined, + length: undefined + } + + Object.keys(modification).forEach(key => { + options[key] = modification[key] + }) + + return options +} + +describe('read', () => { + if (!isNode) { + return + } + + const path = '/foo' + let ipfs + let print + let output + + beforeEach(() => { + output = '' + ipfs = { + files: { + readPullStream: sinon.stub().returns(values(['hello world'])) + } + } + print = (msg = '', newline = true) => { + output += newline ? msg + '\n' : msg + } + }) + + it('should read a path', async () => { + await cli(`files read ${path}`, { ipfs, print }) + + expect(ipfs.files.readPullStream.callCount).to.equal(1) + expect(ipfs.files.readPullStream.getCall(0).args).to.deep.equal([ + path, + defaultOptions() + ]) + expect(output).to.equal('hello world') + }) + + it('should read a path with an offset', async () => { + const offset = 5 + + await cli(`files read --offset ${offset} ${path}`, { ipfs, print }) + + expect(ipfs.files.readPullStream.callCount).to.equal(1) + expect(ipfs.files.readPullStream.getCall(0).args).to.deep.equal([ + path, + defaultOptions({ + offset + }) + ]) + expect(output).to.equal('hello world') + }) + + it('should read a path with an offset (short option)', async () => { + const offset = 5 + + await cli(`files read -o ${offset} ${path}`, { ipfs, print }) + + expect(ipfs.files.readPullStream.callCount).to.equal(1) + expect(ipfs.files.readPullStream.getCall(0).args).to.deep.equal([ + path, { + offset, + length: undefined + } + ]) + expect(output).to.equal('hello world') + }) + + it('should read a path with a length', async () => { + const length = 5 + + await cli(`files read --length ${length} ${path}`, { ipfs, print }) + + expect(ipfs.files.readPullStream.callCount).to.equal(1) + expect(ipfs.files.readPullStream.getCall(0).args).to.deep.equal([ + path, + defaultOptions({ + length + }) + ]) + expect(output).to.equal('hello world') + }) + + it('should read a path with a length (short option)', async () => { + const length = 5 + + await cli(`files read -l ${length} ${path}`, { ipfs, print }) + + expect(ipfs.files.readPullStream.callCount).to.equal(1) + expect(ipfs.files.readPullStream.getCall(0).args).to.deep.equal([ + path, + defaultOptions({ + length + }) + ]) + expect(output).to.equal('hello world') + }) +}) diff --git a/test/cli/rm.js b/test/cli/rm.js new file mode 100644 index 0000000..1370e4b --- /dev/null +++ b/test/cli/rm.js @@ -0,0 +1,70 @@ +/* eslint-env mocha */ +'use strict' + +const expect = require('../helpers/chai') +const cli = require('../helpers/cli') +const sinon = require('sinon') +const isNode = require('detect-node') + +function defaultOptions (modification = {}) { + const options = { + recursive: false + } + + Object.keys(modification).forEach(key => { + options[key] = modification[key] + }) + + return options +} + +describe('rm', () => { + if (!isNode) { + return + } + + const path = '/foo' + let ipfs + + beforeEach(() => { + ipfs = { + files: { + rm: sinon.stub().resolves() + } + } + }) + + it('should remove a path', async () => { + await cli(`files rm ${path}`, { ipfs }) + + expect(ipfs.files.rm.callCount).to.equal(1) + expect(ipfs.files.rm.getCall(0).args).to.deep.equal([ + path, + defaultOptions() + ]) + }) + + it('should remove a path recursively', async () => { + await cli(`files rm --recursive ${path}`, { ipfs }) + + expect(ipfs.files.rm.callCount).to.equal(1) + expect(ipfs.files.rm.getCall(0).args).to.deep.equal([ + path, + defaultOptions({ + recursive: true + }) + ]) + }) + + it('should remove a path recursively (short option)', async () => { + await cli(`files rm -r ${path}`, { ipfs }) + + expect(ipfs.files.rm.callCount).to.equal(1) + expect(ipfs.files.rm.getCall(0).args).to.deep.equal([ + path, + defaultOptions({ + recursive: true + }) + ]) + }) +}) diff --git a/test/cli/stat.js b/test/cli/stat.js new file mode 100644 index 0000000..2ff7736 --- /dev/null +++ b/test/cli/stat.js @@ -0,0 +1,142 @@ +/* eslint-env mocha */ +'use strict' + +const expect = require('../helpers/chai') +const cli = require('../helpers/cli') +const sinon = require('sinon') +const isNode = require('detect-node') + +function defaultOptions (modification = {}) { + const options = { + withLocal: false + } + + Object.keys(modification).forEach(key => { + options[key] = modification[key] + }) + + return options +} + +describe('stat', () => { + if (!isNode) { + return + } + + const path = '/foo' + let ipfs + let print + let output + + beforeEach(() => { + output = '' + ipfs = { + files: { + stat: sinon.stub().resolves({ + hash: 'stats-hash', + size: 'stats-size', + cumulativeSize: 'stats-cumulativeSize', + blocks: 'stats-blocks', + type: 'stats-type', + mode: 'stats-mode', + mtime: 'stats-mtime' + }) + } + } + print = (msg = '', newline = true) => { + output += newline ? msg + '\n' : msg + } + }) + + it('should stat a path', async () => { + await cli(`files stat ${path}`, { ipfs, print }) + + expect(ipfs.files.stat.callCount).to.equal(1) + expect(ipfs.files.stat.getCall(0).args).to.deep.equal([ + path, + defaultOptions() + ]) + expect(output).to.include('CumulativeSize') + }) + + it('should stat a path with local', async () => { + await cli(`files stat --with-local ${path}`, { ipfs, print }) + + expect(ipfs.files.stat.callCount).to.equal(1) + expect(ipfs.files.stat.getCall(0).args).to.deep.equal([ + path, + defaultOptions({ + withLocal: true + }) + ]) + expect(output).to.include('CumulativeSize') + }) + + it('should stat a path with local (short option)', async () => { + await cli(`files stat -l ${path}`, { ipfs, print }) + + expect(ipfs.files.stat.callCount).to.equal(1) + expect(ipfs.files.stat.getCall(0).args).to.deep.equal([ + path, + defaultOptions({ + withLocal: true + }) + ]) + expect(output).to.include('CumulativeSize') + }) + + it('should stat a path and only show hashes', async () => { + await cli(`files stat --hash ${path}`, { ipfs, print }) + + expect(ipfs.files.stat.callCount).to.equal(1) + expect(ipfs.files.stat.getCall(0).args).to.deep.equal([ + path, + defaultOptions() + ]) + expect(output).to.equal('stats-hash\n') + }) + + it('should stat a path and only show hashes (short option)', async () => { + await cli(`files stat -h ${path}`, { ipfs, print }) + + expect(ipfs.files.stat.callCount).to.equal(1) + expect(ipfs.files.stat.getCall(0).args).to.deep.equal([ + path, + defaultOptions() + ]) + expect(output).to.equal('stats-hash\n') + }) + + it('should stat a path and only show sizes', async () => { + await cli(`files stat --size ${path}`, { ipfs, print }) + + expect(ipfs.files.stat.callCount).to.equal(1) + expect(ipfs.files.stat.getCall(0).args).to.deep.equal([ + path, + defaultOptions() + ]) + expect(output).to.equal('stats-size\n') + }) + + it('should stat a path and only show sizes (short option)', async () => { + await cli(`files stat -s ${path}`, { ipfs, print }) + + expect(ipfs.files.stat.callCount).to.equal(1) + expect(ipfs.files.stat.getCall(0).args).to.deep.equal([ + path, + defaultOptions() + ]) + expect(output).to.equal('stats-size\n') + }) + + it('should stat a path with format option', async () => { + await cli(`files stat --format ' ' ${path}`, { ipfs, print }) + + expect(ipfs.files.stat.callCount).to.equal(1) + expect(ipfs.files.stat.getCall(0).args).to.deep.equal([ + path, + defaultOptions() + ]) + expect(output).to.equal('stats-mode stats-type\n') + }) +}) diff --git a/test/cli/touch.js b/test/cli/touch.js new file mode 100644 index 0000000..56febab --- /dev/null +++ b/test/cli/touch.js @@ -0,0 +1,145 @@ +/* eslint-env mocha */ +'use strict' + +const expect = require('../helpers/chai') +const cli = require('../helpers/cli') +const sinon = require('sinon') +const isNode = require('detect-node') + +function defaultOptions (modification = {}) { + const options = { + mtime: null, + cidVersion: 0, + format: 'dag-pb', + hashAlg: 'sha2-256', + flush: true, + shardSplitThreshold: 1000 + } + + Object.keys(modification).forEach(key => { + options[key] = modification[key] + }) + + return options +} + +describe('touch', () => { + if (!isNode) { + return + } + + const path = '/foo' + const mtime = new Date(100000) + let ipfs + + beforeEach(() => { + ipfs = { + files: { + touch: sinon.stub() + } + } + }) + + it('should update the mtime for a file', async () => { + await cli(`files touch -m ${mtime.getTime() / 1000} ${path}`, { ipfs }) + + expect(ipfs.files.touch.callCount).to.equal(1) + expect(ipfs.files.touch.getCall(0).args).to.deep.equal([ + path, + defaultOptions({ + mtime + }) + ]) + }) + + it('should update the mtime without flushing', async () => { + await cli(`files touch -m ${mtime.getTime() / 1000} --flush false ${path}`, { ipfs }) + + expect(ipfs.files.touch.callCount).to.equal(1) + expect(ipfs.files.touch.getCall(0).args).to.deep.equal([ + path, + defaultOptions({ + mtime, + flush: false + }) + ]) + }) + + it('should update the mtime without flushing (short option)', async () => { + await cli(`files touch -m ${mtime.getTime() / 1000} -f false ${path}`, { ipfs }) + + expect(ipfs.files.touch.callCount).to.equal(1) + expect(ipfs.files.touch.getCall(0).args).to.deep.equal([ + path, + defaultOptions({ + mtime, + flush: false + }) + ]) + }) + + it('should update the mtime with a different codec', async () => { + await cli(`files touch -m ${mtime.getTime() / 1000} --codec dag-foo ${path}`, { ipfs }) + + expect(ipfs.files.touch.callCount).to.equal(1) + expect(ipfs.files.touch.getCall(0).args).to.deep.equal([ + path, + defaultOptions({ + mtime, + format: 'dag-foo' + }) + ]) + }) + + it('should update the mtime with a different codec (short option)', async () => { + await cli(`files touch -m ${mtime.getTime() / 1000} -c dag-foo ${path}`, { ipfs }) + + expect(ipfs.files.touch.callCount).to.equal(1) + expect(ipfs.files.touch.getCall(0).args).to.deep.equal([ + path, + defaultOptions({ + mtime, + format: 'dag-foo' + }) + ]) + }) + + it('should update the mtime with a different hash algorithm', async () => { + await cli(`files touch -m ${mtime.getTime() / 1000} --hash-alg sha3-256 ${path}`, { ipfs }) + + expect(ipfs.files.touch.callCount).to.equal(1) + expect(ipfs.files.touch.getCall(0).args).to.deep.equal([ + path, + defaultOptions({ + mtime, + hashAlg: 'sha3-256' + }) + ]) + }) + + it('should update the mtime with a different hash algorithm (short option)', async () => { + await cli(`files touch -m ${mtime.getTime() / 1000} -h sha3-256 ${path}`, { ipfs }) + + expect(ipfs.files.touch.callCount).to.equal(1) + expect(ipfs.files.touch.getCall(0).args).to.deep.equal([ + path, + defaultOptions({ + mtime, + hashAlg: 'sha3-256' + }) + ]) + }) + + it('should update the mtime with a shard split threshold', async () => { + await cli(`files touch -m ${mtime.getTime() / 1000} --shard-split-threshold 10 ${path}`, { ipfs }) + + expect(ipfs.files.touch.callCount).to.equal(1) + expect(ipfs.files.touch.getCall(0).args).to.deep.equal([ + path, + defaultOptions({ + mtime, + shardSplitThreshold: 10 + }) + ]) + }) +}) diff --git a/test/cli/write.js b/test/cli/write.js new file mode 100644 index 0000000..ce65a9a --- /dev/null +++ b/test/cli/write.js @@ -0,0 +1,455 @@ +/* eslint-env mocha */ +'use strict' + +const expect = require('../helpers/chai') +const cli = require('../helpers/cli') +const sinon = require('sinon') +const isNode = require('detect-node') + +function defaultOptions (modification = {}) { + const options = { + offset: undefined, + length: undefined, + create: false, + truncate: false, + rawLeaves: false, + reduceSingleLeafToSelf: false, + cidVersion: 0, + hashAlg: 'sha2-256', + format: 'dag-pb', + parents: false, + progress: undefined, + strategy: 'balanced', + flush: true, + shardSplitThreshold: 1000, + mode: undefined, + mtime: undefined + } + + Object.keys(modification).forEach(key => { + options[key] = modification[key] + }) + + return options +} + +describe('write', () => { + if (!isNode) { + return + } + + const stdin = 'stdin' + const getStdin = () => stdin + let ipfs + + beforeEach(() => { + ipfs = { + files: { + write: sinon.stub() + } + } + }) + + it('should write to a file', async () => { + const path = '/foo' + + await cli(`files write ${path}`, { ipfs, getStdin }) + + expect(ipfs.files.write.callCount).to.equal(1) + expect(ipfs.files.write.getCall(0).args).to.deep.equal([ + path, + stdin, + defaultOptions() + ]) + }) + + it('should write to a file and create parents', async () => { + const path = '/foo' + + await cli(`files write --parents ${path}`, { ipfs, getStdin }) + + expect(ipfs.files.write.callCount).to.equal(1) + expect(ipfs.files.write.getCall(0).args).to.deep.equal([ + path, + stdin, + defaultOptions({ + parents: true + }) + ]) + }) + + it('should write to a file and create parents (short option)', async () => { + const path = '/foo' + + await cli(`files write -p ${path}`, { ipfs, getStdin }) + + expect(ipfs.files.write.callCount).to.equal(1) + expect(ipfs.files.write.getCall(0).args).to.deep.equal([ + path, + stdin, + defaultOptions({ + parents: true + }) + ]) + }) + + it('should write to a file and create it', async () => { + const path = '/foo' + + await cli(`files write --create ${path}`, { ipfs, getStdin }) + + expect(ipfs.files.write.callCount).to.equal(1) + expect(ipfs.files.write.getCall(0).args).to.deep.equal([ + path, + stdin, + defaultOptions({ + create: true + }) + ]) + }) + + it('should write to a file and create it (short option)', async () => { + const path = '/foo' + + await cli(`files write -e ${path}`, { ipfs, getStdin }) + + expect(ipfs.files.write.callCount).to.equal(1) + expect(ipfs.files.write.getCall(0).args).to.deep.equal([ + path, + stdin, + defaultOptions({ + create: true + }) + ]) + }) + + it('should write to a file with an offset', async () => { + const path = '/foo' + + await cli(`files write --offset 10 ${path}`, { ipfs, getStdin }) + + expect(ipfs.files.write.callCount).to.equal(1) + expect(ipfs.files.write.getCall(0).args).to.deep.equal([ + path, + stdin, + defaultOptions({ + offset: 10 + }) + ]) + }) + + it('should write to a file with an offset (short option)', async () => { + const path = '/foo' + + await cli(`files write -o 10 ${path}`, { ipfs, getStdin }) + + expect(ipfs.files.write.callCount).to.equal(1) + expect(ipfs.files.write.getCall(0).args).to.deep.equal([ + path, + stdin, + defaultOptions({ + offset: 10 + }) + ]) + }) + + it('should write to a file with a length', async () => { + const path = '/foo' + + await cli(`files write --length 10 ${path}`, { ipfs, getStdin }) + + expect(ipfs.files.write.callCount).to.equal(1) + expect(ipfs.files.write.getCall(0).args).to.deep.equal([ + path, + stdin, + defaultOptions({ + length: 10 + }) + ]) + }) + + it('should write to a file with a length (short option)', async () => { + const path = '/foo' + + await cli(`files write -l 10 ${path}`, { ipfs, getStdin }) + + expect(ipfs.files.write.callCount).to.equal(1) + expect(ipfs.files.write.getCall(0).args).to.deep.equal([ + path, + stdin, + defaultOptions({ + length: 10 + }) + ]) + }) + + it('should write to a file and truncate it', async () => { + const path = '/foo' + + await cli(`files write --truncate ${path}`, { ipfs, getStdin }) + + expect(ipfs.files.write.callCount).to.equal(1) + expect(ipfs.files.write.getCall(0).args).to.deep.equal([ + path, + stdin, + defaultOptions({ + truncate: true + }) + ]) + }) + + it('should write to a file and truncate it (short option)', async () => { + const path = '/foo' + + await cli(`files write -t ${path}`, { ipfs, getStdin }) + + expect(ipfs.files.write.callCount).to.equal(1) + expect(ipfs.files.write.getCall(0).args).to.deep.equal([ + path, + stdin, + defaultOptions({ + truncate: true + }) + ]) + }) + + it('should write to a file with raw leaves', async () => { + const path = '/foo' + + await cli(`files write --raw-leaves ${path}`, { ipfs, getStdin }) + + expect(ipfs.files.write.callCount).to.equal(1) + expect(ipfs.files.write.getCall(0).args).to.deep.equal([ + path, + stdin, + defaultOptions({ + rawLeaves: true + }) + ]) + }) + + it('should write to a file with raw leaves (short option)', async () => { + const path = '/foo' + + await cli(`files write -r ${path}`, { ipfs, getStdin }) + + expect(ipfs.files.write.callCount).to.equal(1) + expect(ipfs.files.write.getCall(0).args).to.deep.equal([ + path, + stdin, + defaultOptions({ + rawLeaves: true + }) + ]) + }) + + it('should write to a file and reduce a single leaf to one node', async () => { + const path = '/foo' + + await cli(`files write --reduce-single-leaf-to-self ${path}`, { ipfs, getStdin }) + + expect(ipfs.files.write.callCount).to.equal(1) + expect(ipfs.files.write.getCall(0).args).to.deep.equal([ + path, + stdin, + defaultOptions({ + reduceSingleLeafToSelf: true + }) + ]) + }) + + it('should write to a file without flushing', async () => { + const path = '/foo' + + await cli(`files write --flush false ${path}`, { ipfs, getStdin }) + + expect(ipfs.files.write.callCount).to.equal(1) + expect(ipfs.files.write.getCall(0).args).to.deep.equal([ + path, + stdin, + defaultOptions({ + flush: false + }) + ]) + }) + + it('should write to a file without flushing (short option)', async () => { + const path = '/foo' + + await cli(`files write -f false ${path}`, { ipfs, getStdin }) + + expect(ipfs.files.write.callCount).to.equal(1) + expect(ipfs.files.write.getCall(0).args).to.deep.equal([ + path, + stdin, + defaultOptions({ + flush: false + }) + ]) + }) + + it('should write to a file with a specified strategy', async () => { + const path = '/foo' + + await cli(`files write --strategy trickle ${path}`, { ipfs, getStdin }) + + expect(ipfs.files.write.callCount).to.equal(1) + expect(ipfs.files.write.getCall(0).args).to.deep.equal([ + path, + stdin, + defaultOptions({ + strategy: 'trickle' + }) + ]) + }) + + it('should write to a file with a specified strategy (short option)', async () => { + const path = '/foo' + + await cli(`files write -s trickle ${path}`, { ipfs, getStdin }) + + expect(ipfs.files.write.callCount).to.equal(1) + expect(ipfs.files.write.getCall(0).args).to.deep.equal([ + path, + stdin, + defaultOptions({ + strategy: 'trickle' + }) + ]) + }) + + it('should write to a file with a specified cid version', async () => { + const path = '/foo' + + await cli(`files write --cid-version 5 ${path}`, { ipfs, getStdin }) + + expect(ipfs.files.write.callCount).to.equal(1) + expect(ipfs.files.write.getCall(0).args).to.deep.equal([ + path, + stdin, + defaultOptions({ + cidVersion: 5 + }) + ]) + }) + + it('should write to a file with a specified cid version (shortish option)', async () => { + const path = '/foo' + + await cli(`files write --cid-ver 5 ${path}`, { ipfs, getStdin }) + + expect(ipfs.files.write.callCount).to.equal(1) + expect(ipfs.files.write.getCall(0).args).to.deep.equal([ + path, + stdin, + defaultOptions({ + cidVersion: 5 + }) + ]) + }) + + it('should write to a file with a specified codec', async () => { + const path = '/foo' + + await cli(`files write --codec dag-foo ${path}`, { ipfs, getStdin }) + + expect(ipfs.files.write.callCount).to.equal(1) + expect(ipfs.files.write.getCall(0).args).to.deep.equal([ + path, + stdin, + defaultOptions({ + format: 'dag-foo' + }) + ]) + }) + + it('should write to a file with a specified codec (short option)', async () => { + const path = '/foo' + + await cli(`files write -c dag-foo ${path}`, { ipfs, getStdin }) + + expect(ipfs.files.write.callCount).to.equal(1) + expect(ipfs.files.write.getCall(0).args).to.deep.equal([ + path, + stdin, + defaultOptions({ + format: 'dag-foo' + }) + ]) + }) + + it('should write to a file with a specified hash algorithm', async () => { + const path = '/foo' + + await cli(`files write --hash-alg sha3-256 ${path}`, { ipfs, getStdin }) + + expect(ipfs.files.write.callCount).to.equal(1) + expect(ipfs.files.write.getCall(0).args).to.deep.equal([ + path, + stdin, + defaultOptions({ + hashAlg: 'sha3-256' + }) + ]) + }) + + it('should write to a file with a specified hash algorithm (short option)', async () => { + const path = '/foo' + + await cli(`files write -h sha3-256 ${path}`, { ipfs, getStdin }) + + expect(ipfs.files.write.callCount).to.equal(1) + expect(ipfs.files.write.getCall(0).args).to.deep.equal([ + path, + stdin, + defaultOptions({ + hashAlg: 'sha3-256' + }) + ]) + }) + + it('should write to a file with a specified shard split threshold', async () => { + const path = '/foo' + + await cli(`files write --shard-split-threshold 10 ${path}`, { ipfs, getStdin }) + + expect(ipfs.files.write.callCount).to.equal(1) + expect(ipfs.files.write.getCall(0).args).to.deep.equal([ + path, + stdin, + defaultOptions({ + shardSplitThreshold: 10 + }) + ]) + }) + + it('should write to a file with a specified mode', async () => { + const path = '/foo' + + await cli(`files write --mode 0557 ${path}`, { ipfs, getStdin }) + + expect(ipfs.files.write.callCount).to.equal(1) + expect(ipfs.files.write.getCall(0).args).to.deep.equal([ + path, + stdin, + defaultOptions({ + mode: parseInt('0557', 8) + }) + ]) + }) + + it('should write to a file with a specified mtime', async () => { + const path = '/foo' + + await cli(`files write --mtime 11 ${path}`, { ipfs, getStdin }) + + expect(ipfs.files.write.callCount).to.equal(1) + expect(ipfs.files.write.getCall(0).args).to.deep.equal([ + path, + stdin, + defaultOptions({ + mtime: new Date(11000) + }) + ]) + }) +}) diff --git a/test/core/chmod.js b/test/core/chmod.js new file mode 100644 index 0000000..182f893 --- /dev/null +++ b/test/core/chmod.js @@ -0,0 +1,152 @@ +/* eslint-env mocha */ +'use strict' + +const expect = require('../helpers/chai') +const createMfs = require('../helpers/create-mfs') + +describe('chmod', () => { + let mfs + + before(async () => { + mfs = await createMfs() + }) + + async function testChmod (initialMode, modification, expectedFinalMode) { + const path = `/foo-${Date.now()}` + + await mfs.write(path, Buffer.from('Hello world'), { + create: true, + mtime: new Date(), + mode: initialMode + }) + await mfs.chmod(path, modification, { + flush: true + }) + + const updatedMode = (await mfs.stat(path)).mode + expect(updatedMode).to.equal(parseInt(expectedFinalMode, 8)) + } + + it('should update the mode for a file', async () => { + const path = `/foo-${Date.now()}` + + await mfs.write(path, Buffer.from('Hello world'), { + create: true, + mtime: new Date() + }) + const originalMode = (await mfs.stat(path)).mode + await mfs.chmod(path, '0777', { + flush: true + }) + + const updatedMode = (await mfs.stat(path)).mode + expect(updatedMode).to.not.equal(originalMode) + expect(updatedMode).to.equal(parseInt('0777', 8)) + }) + + it('should update the mode for a directory', async () => { + const path = `/foo-${Date.now()}` + + await mfs.mkdir(path) + const originalMode = (await mfs.stat(path)).mode + await mfs.chmod(path, '0777', { + flush: true + }) + + const updatedMode = (await mfs.stat(path)).mode + expect(updatedMode).to.not.equal(originalMode) + expect(updatedMode).to.equal(parseInt('0777', 8)) + }) + + it('should update the mode for a hamt-sharded-directory', async () => { + const path = `/foo-${Date.now()}` + + await mfs.mkdir(path) + await mfs.write(`${path}/foo.txt`, Buffer.from('Hello world'), { + create: true, + shardSplitThreshold: 0 + }) + const originalMode = (await mfs.stat(path)).mode + await mfs.chmod(path, '0777', { + flush: true + }) + + const updatedMode = (await mfs.stat(path)).mode + expect(updatedMode).to.not.equal(originalMode) + expect(updatedMode).to.equal(parseInt('0777', 8)) + }) + + it('should update modes with basic symbolic notation that adds bits', async () => { + await testChmod('0000', '+x', '0111') + await testChmod('0000', '+w', '0222') + await testChmod('0000', '+r', '0444') + await testChmod('0000', 'u+x', '0100') + await testChmod('0000', 'u+w', '0200') + await testChmod('0000', 'u+r', '0400') + await testChmod('0000', 'g+x', '0010') + await testChmod('0000', 'g+w', '0020') + await testChmod('0000', 'g+r', '0040') + await testChmod('0000', 'o+x', '0001') + await testChmod('0000', 'o+w', '0002') + await testChmod('0000', 'o+r', '0004') + await testChmod('0000', 'ug+x', '0110') + await testChmod('0000', 'ug+w', '0220') + await testChmod('0000', 'ug+r', '0440') + await testChmod('0000', 'ugo+x', '0111') + await testChmod('0000', 'ugo+w', '0222') + await testChmod('0000', 'ugo+r', '0444') + }) + + it('should update modes with basic symbolic notation that removes bits', async () => { + await testChmod('0111', '-x', '0000') + await testChmod('0222', '-w', '0000') + await testChmod('0444', '-r', '0000') + await testChmod('0100', 'u-x', '0000') + await testChmod('0200', 'u-w', '0000') + await testChmod('0400', 'u-r', '0000') + await testChmod('0010', 'g-x', '0000') + await testChmod('0020', 'g-w', '0000') + await testChmod('0040', 'g-r', '0000') + await testChmod('0001', 'o-x', '0000') + await testChmod('0002', 'o-w', '0000') + await testChmod('0004', 'o-r', '0000') + await testChmod('0110', 'ug-x', '0000') + await testChmod('0220', 'ug-w', '0000') + await testChmod('0440', 'ug-r', '0000') + await testChmod('0111', 'ugo-x', '0000') + await testChmod('0222', 'ugo-w', '0000') + await testChmod('0444', 'ugo-r', '0000') + }) + + it('should update modes with basic symbolic notation that overrides bits', async () => { + await testChmod('0777', '=x', '0111') + await testChmod('0777', '=w', '0222') + await testChmod('0777', '=r', '0444') + await testChmod('0777', 'u=x', '0177') + await testChmod('0777', 'u=w', '0277') + await testChmod('0777', 'u=r', '0477') + await testChmod('0777', 'g=x', '0717') + await testChmod('0777', 'g=w', '0727') + await testChmod('0777', 'g=r', '0747') + await testChmod('0777', 'o=x', '0771') + await testChmod('0777', 'o=w', '0772') + await testChmod('0777', 'o=r', '0774') + await testChmod('0777', 'ug=x', '0117') + await testChmod('0777', 'ug=w', '0227') + await testChmod('0777', 'ug=r', '0447') + await testChmod('0777', 'ugo=x', '0111') + await testChmod('0777', 'ugo=w', '0222') + await testChmod('0777', 'ugo=r', '0444') + }) + + it('should update modes with multiple symbolic notation', async () => { + await testChmod('0000', 'g+x,u+w', '0210') + }) + + it('should update modes with special symbolic notation', async () => { + await testChmod('0000', 'g+s', '2000') + await testChmod('0000', 'u+s', '4000') + await testChmod('0000', '+t', '1000') + await testChmod('0000', '+s', '6000') + }) +}) diff --git a/test/cp.spec.js b/test/core/cp.js similarity index 97% rename from test/cp.spec.js rename to test/core/cp.js index 2b74642..e22e4e8 100644 --- a/test/cp.spec.js +++ b/test/core/cp.js @@ -4,10 +4,10 @@ const chai = require('chai') chai.use(require('dirty-chai')) const expect = chai.expect -const createMfs = require('./helpers/create-mfs') -const createShardedDirectory = require('./helpers/create-sharded-directory') -const streamToBuffer = require('./helpers/stream-to-buffer') -const streamToArray = require('./helpers/stream-to-array') +const createMfs = require('../helpers/create-mfs') +const createShardedDirectory = require('../helpers/create-sharded-directory') +const streamToBuffer = require('../helpers/stream-to-buffer') +const streamToArray = require('../helpers/stream-to-array') const crypto = require('crypto') describe('cp', () => { diff --git a/test/flush.spec.js b/test/core/flush.js similarity index 91% rename from test/flush.spec.js rename to test/core/flush.js index fced064..494711d 100644 --- a/test/flush.spec.js +++ b/test/core/flush.js @@ -4,7 +4,7 @@ const chai = require('chai') chai.use(require('dirty-chai')) const expect = chai.expect -const createMfs = require('./helpers/create-mfs') +const createMfs = require('../helpers/create-mfs') describe('flush', () => { let mfs diff --git a/test/core/index.js b/test/core/index.js new file mode 100644 index 0000000..f0baeb2 --- /dev/null +++ b/test/core/index.js @@ -0,0 +1,16 @@ +/* eslint-env mocha */ +'use strict' + +describe('core', () => { + require('./chmod') + require('./cp') + require('./flush') + require('./ls') + require('./mkdir') + require('./mv') + require('./read') + require('./rm') + require('./stat') + require('./touch') + require('./write') +}) diff --git a/test/ls.spec.js b/test/core/ls.js similarity index 96% rename from test/ls.spec.js rename to test/core/ls.js index eadd1c9..49d7eec 100644 --- a/test/ls.spec.js +++ b/test/core/ls.js @@ -7,10 +7,10 @@ const expect = chai.expect const CID = require('cids') const { FILE_TYPES -} = require('../src') -const createMfs = require('./helpers/create-mfs') -const createShardedDirectory = require('./helpers/create-sharded-directory') -const streamToArray = require('./helpers/stream-to-array') +} = require('../../src') +const createMfs = require('../helpers/create-mfs') +const createShardedDirectory = require('../helpers/create-sharded-directory') +const streamToArray = require('../helpers/stream-to-array') const crypto = require('crypto') describe('ls', () => { diff --git a/test/mkdir.spec.js b/test/core/mkdir.js similarity index 95% rename from test/mkdir.spec.js rename to test/core/mkdir.js index 628f2dd..d8a3685 100644 --- a/test/mkdir.spec.js +++ b/test/core/mkdir.js @@ -5,10 +5,10 @@ const chai = require('chai') chai.use(require('dirty-chai')) const expect = chai.expect const multihash = require('multihashes') -const createMfs = require('./helpers/create-mfs') -const cidAtPath = require('./helpers/cid-at-path') -const createShardedDirectory = require('./helpers/create-sharded-directory') -const all = require('async-iterator-all') +const createMfs = require('../helpers/create-mfs') +const cidAtPath = require('../helpers/cid-at-path') +const createShardedDirectory = require('../helpers/create-sharded-directory') +const all = require('it-all') describe('mkdir', () => { let mfs diff --git a/test/mv.spec.js b/test/core/mv.js similarity index 97% rename from test/mv.spec.js rename to test/core/mv.js index 655b3b9..261bd20 100644 --- a/test/mv.spec.js +++ b/test/core/mv.js @@ -4,9 +4,9 @@ const chai = require('chai') chai.use(require('dirty-chai')) const expect = chai.expect -const createMfs = require('./helpers/create-mfs') -const createShardedDirectory = require('./helpers/create-sharded-directory') -const streamToBuffer = require('./helpers/stream-to-buffer') +const createMfs = require('../helpers/create-mfs') +const createShardedDirectory = require('../helpers/create-sharded-directory') +const streamToBuffer = require('../helpers/stream-to-buffer') const crypto = require('crypto') describe('mv', () => { diff --git a/test/core/read.js b/test/core/read.js new file mode 100644 index 0000000..e94d68b --- /dev/null +++ b/test/core/read.js @@ -0,0 +1,149 @@ +/* eslint-env mocha */ +'use strict' + +const chai = require('chai') +chai.use(require('dirty-chai')) +const expect = chai.expect +const createMfs = require('../helpers/create-mfs') +const createShardedDirectory = require('../helpers/create-sharded-directory') +const crypto = require('crypto') +const streamToBuffer = require('../helpers/stream-to-buffer') + +describe('read', () => { + let mfs + const smallFile = crypto.randomBytes(13) + + before(async () => { + mfs = await createMfs() + }) + + it('reads a small file', async () => { + const filePath = '/small-file.txt' + + await mfs.write(filePath, smallFile, { + create: true + }) + + const buffer = await streamToBuffer(mfs.read(filePath)) + + expect(buffer).to.deep.equal(smallFile) + }) + + it('reads a file with an offset', async () => { + const path = `/some-file-${Math.random()}.txt` + const data = crypto.randomBytes(100) + const offset = 10 + + await mfs.write(path, data, { + create: true + }) + + const buffer = await streamToBuffer(mfs.read(path, { + offset + })) + + expect(buffer).to.deep.equal(data.slice(offset)) + }) + + it('reads a file with a length', async () => { + const path = `/some-file-${Math.random()}.txt` + const data = crypto.randomBytes(100) + const length = 10 + + await mfs.write(path, data, { + create: true + }) + + const buffer = await streamToBuffer(mfs.read(path, { + length + })) + + expect(buffer).to.deep.equal(data.slice(0, length)) + }) + + it('reads a file with a legacy count argument', async () => { + const path = `/some-file-${Math.random()}.txt` + const data = crypto.randomBytes(100) + const length = 10 + + await mfs.write(path, data, { + create: true + }) + + const buffer = await streamToBuffer(mfs.read(path, { + count: length + })) + + expect(buffer).to.deep.equal(data.slice(0, length)) + }) + + it('reads a file with an offset and a length', async () => { + const path = `/some-file-${Math.random()}.txt` + const data = crypto.randomBytes(100) + const offset = 10 + const length = 10 + + await mfs.write(path, data, { + create: true + }) + + const buffer = await streamToBuffer(mfs.read(path, { + offset, + length + })) + + expect(buffer).to.deep.equal(data.slice(offset, offset + length)) + }) + + it('reads a file with an offset and a legacy count argument', async () => { + const path = `/some-file-${Math.random()}.txt` + const data = crypto.randomBytes(100) + const offset = 10 + const length = 10 + + await mfs.write(path, data, { + create: true + }) + + const buffer = await streamToBuffer(mfs.read(path, { + offset, + count: length + })) + + expect(buffer).to.deep.equal(data.slice(offset, offset + length)) + }) + + it('refuses to read a directory', async () => { + const path = '/' + + try { + await streamToBuffer(mfs.read(path)) + throw new Error('Should have errored on trying to read a directory') + } catch (err) { + expect(err.code).to.equal('ERR_NOT_FILE') + } + }) + + it('refuses to read a non-existent file', async () => { + try { + await streamToBuffer(mfs.read(`/file-${Math.random()}.txt`)) + throw new Error('Should have errored on non-existent file') + } catch (err) { + expect(err.code).to.equal('ERR_NOT_FOUND') + } + }) + + it('reads file from inside a sharded directory', async () => { + const shardedDirPath = await createShardedDirectory(mfs) + const filePath = `${shardedDirPath}/file-${Math.random()}.txt` + const content = Buffer.from([0, 1, 2, 3, 4]) + + await mfs.write(filePath, content, { + create: true + }) + + const buffer = await streamToBuffer(mfs.read(filePath)) + + expect(buffer).to.deep.equal(content) + }) +}) diff --git a/test/rm.spec.js b/test/core/rm.js similarity index 97% rename from test/rm.spec.js rename to test/core/rm.js index 0e662f0..b34aee6 100644 --- a/test/rm.spec.js +++ b/test/core/rm.js @@ -4,13 +4,13 @@ const chai = require('chai') chai.use(require('dirty-chai')) const expect = chai.expect -const createMfs = require('./helpers/create-mfs') -const createShardedDirectory = require('./helpers/create-sharded-directory') -const createTwoShards = require('./helpers/create-two-shards') +const createMfs = require('../helpers/create-mfs') +const createShardedDirectory = require('../helpers/create-sharded-directory') +const createTwoShards = require('../helpers/create-two-shards') const crypto = require('crypto') const { FILE_SEPARATOR -} = require('../src/core/utils/constants') +} = require('../../src/core/utils/constants') describe('rm', () => { let mfs diff --git a/test/stat.spec.js b/test/core/stat.js similarity index 97% rename from test/stat.spec.js rename to test/core/stat.js index d074d75..8b4af83 100644 --- a/test/stat.spec.js +++ b/test/core/stat.js @@ -5,8 +5,8 @@ const chai = require('chai') chai.use(require('dirty-chai')) const expect = chai.expect const crypto = require('crypto') -const createMfs = require('./helpers/create-mfs') -const createShardedDirectory = require('./helpers/create-sharded-directory') +const createMfs = require('../helpers/create-mfs') +const createShardedDirectory = require('../helpers/create-sharded-directory') const mc = require('multicodec') describe('stat', () => { diff --git a/test/core/touch.js b/test/core/touch.js new file mode 100644 index 0000000..99bfe4d --- /dev/null +++ b/test/core/touch.js @@ -0,0 +1,80 @@ +/* eslint-env mocha */ +'use strict' + +const expect = require('../helpers/chai') +const createMfs = require('../helpers/create-mfs') +const streamToBuffer = require('../helpers/stream-to-buffer') +const delay = require('delay') + +describe('touch', () => { + let mfs + + before(async () => { + mfs = await createMfs() + }) + + it('should update the mtime for a file', async () => { + const path = `/foo-${Date.now()}` + + await mfs.write(path, Buffer.from('Hello world'), { + create: true, + mtime: new Date() + }) + const originalMtime = (await mfs.stat(path)).mtime + await delay(1000) + await mfs.touch(path, { + flush: true + }) + + const updatedMtime = (await mfs.stat(path)).mtime + expect(updatedMtime.secs).to.be.greaterThan(originalMtime.secs) + }) + + it('should update the mtime for a directory', async () => { + const path = `/foo-${Date.now()}` + + await mfs.mkdir(path, { + mtime: new Date() + }) + const originalMtime = (await mfs.stat(path)).mtime + await delay(1000) + await mfs.touch(path, { + flush: true + }) + + const updatedMtime = (await mfs.stat(path)).mtime + expect(updatedMtime.secs).to.be.greaterThan(originalMtime.secs) + }) + + it('should update the mtime for a hamt-sharded-directory', async () => { + const path = `/foo-${Date.now()}` + + await mfs.mkdir(path, { + mtime: new Date() + }) + await mfs.write(`${path}/foo.txt`, Buffer.from('Hello world'), { + create: true, + shardSplitThreshold: 0 + }) + const originalMtime = (await mfs.stat(path)).mtime + await delay(1000) + await mfs.touch(path, { + flush: true + }) + + const updatedMtime = (await mfs.stat(path)).mtime + expect(updatedMtime.secs).to.be.greaterThan(originalMtime.secs) + }) + + it('should create an empty file', async () => { + const path = `/foo-${Date.now()}` + + await mfs.touch(path, { + flush: true + }) + + const buffer = await streamToBuffer(mfs.read(path)) + + expect(buffer).to.deep.equal(Buffer.from([])) + }) +}) diff --git a/test/write.spec.js b/test/core/write.js similarity index 98% rename from test/write.spec.js rename to test/core/write.js index 9571e7b..b0df007 100644 --- a/test/write.spec.js +++ b/test/core/write.js @@ -7,14 +7,14 @@ const expect = chai.expect const isNode = require('detect-node') const multihash = require('multihashes') const util = require('util') -const createMfs = require('./helpers/create-mfs') -const cidAtPath = require('./helpers/cid-at-path') -const traverseLeafNodes = require('./helpers/traverse-leaf-nodes') -const createShard = require('./helpers/create-shard') -const createShardedDirectory = require('./helpers/create-sharded-directory') -const createTwoShards = require('./helpers/create-two-shards') +const createMfs = require('../helpers/create-mfs') +const cidAtPath = require('../helpers/cid-at-path') +const traverseLeafNodes = require('../helpers/traverse-leaf-nodes') +const createShard = require('../helpers/create-shard') +const createShardedDirectory = require('../helpers/create-sharded-directory') +const createTwoShards = require('../helpers/create-two-shards') const crypto = require('crypto') -const all = require('async-iterator-all') +const all = require('it-all') let fs, tempWrite diff --git a/test/helpers/chai.js b/test/helpers/chai.js new file mode 100644 index 0000000..c00c40d --- /dev/null +++ b/test/helpers/chai.js @@ -0,0 +1,7 @@ +'use strict' + +const chai = require('chai') +chai.use(require('dirty-chai')) +chai.use(require('chai-as-promised')) + +module.exports = chai.expect diff --git a/test/helpers/cli.js b/test/helpers/cli.js new file mode 100644 index 0000000..f196be6 --- /dev/null +++ b/test/helpers/cli.js @@ -0,0 +1,16 @@ +'use strict' + +const yargs = require('yargs') +const YargsPromise = require('yargs-promise') +const mfs = require('../../src/cli') + +module.exports = (command, { ipfs, print = () => {}, getStdin }) => { + const parser = new YargsPromise(mfs(yargs), { + getIpfs: () => ipfs, + print, + getStdin + }) + + return parser + .parse(command) +} diff --git a/test/helpers/create-shard.js b/test/helpers/create-shard.js index f56f827..91114de 100644 --- a/test/helpers/create-shard.js +++ b/test/helpers/create-shard.js @@ -1,7 +1,7 @@ 'use strict' const importer = require('ipfs-unixfs-importer') -const last = require('async-iterator-last') +const last = require('it-last') const createShard = async (ipld, files, shardSplitThreshold = 10) => { const result = await last(importer(files, ipld, { diff --git a/test/helpers/http.js b/test/helpers/http.js new file mode 100644 index 0000000..e8f58fd --- /dev/null +++ b/test/helpers/http.js @@ -0,0 +1,15 @@ +'use strict' + +const Hapi = require('@hapi/hapi') +const routes = require('../../src/http') + +module.exports = (request, { ipfs }) => { + const server = Hapi.server() + server.app.ipfs = ipfs + + for (const key in routes) { + server.route(routes[key]) + } + + return server.inject(request) +} diff --git a/test/http/chmod.js b/test/http/chmod.js new file mode 100644 index 0000000..19ef1cf --- /dev/null +++ b/test/http/chmod.js @@ -0,0 +1,130 @@ +/* eslint-env mocha */ +'use strict' + +const expect = require('../helpers/chai') +const http = require('../helpers/http') +const sinon = require('sinon') + +function defaultOptions (modification = {}) { + const options = { + recursive: false, + format: 'dag-pb', + hashAlg: 'sha2-256', + flush: true, + shardSplitThreshold: 1000 + } + + Object.keys(modification).forEach(key => { + options[key] = modification[key] + }) + + return options +} + +describe('chmod', () => { + const path = '/foo' + const mode = '0654' + let ipfs + + beforeEach(() => { + ipfs = { + files: { + chmod: sinon.stub() + } + } + }) + + it('should update the mode for a file', async () => { + await http({ + method: 'POST', + url: `/api/v0/files/chmod?arg=${path}&mode=${mode}` + }, { ipfs }) + + expect(ipfs.files.chmod.callCount).to.equal(1) + expect(ipfs.files.chmod.getCall(0).args).to.deep.equal([ + path, + mode, + defaultOptions() + ]) + }) + + it('should update the mode recursively', async () => { + await http({ + method: 'POST', + url: `/api/v0/files/chmod?arg=${path}&mode=${mode}&recursive=true` + }, { ipfs }) + + expect(ipfs.files.chmod.callCount).to.equal(1) + expect(ipfs.files.chmod.getCall(0).args).to.deep.equal([ + path, + mode, + defaultOptions({ + recursive: true + }) + ]) + }) + + it('should update the mode without flushing', async () => { + await http({ + method: 'POST', + url: `/api/v0/files/chmod?arg=${path}&mode=${mode}&flush=false` + }, { ipfs }) + + expect(ipfs.files.chmod.callCount).to.equal(1) + expect(ipfs.files.chmod.getCall(0).args).to.deep.equal([ + path, + mode, + defaultOptions({ + flush: false + }) + ]) + }) + + it('should update the mode a different codec', async () => { + await http({ + method: 'POST', + url: `/api/v0/files/chmod?arg=${path}&mode=${mode}&codec=dag-foo` + }, { ipfs }) + + expect(ipfs.files.chmod.callCount).to.equal(1) + expect(ipfs.files.chmod.getCall(0).args).to.deep.equal([ + path, + mode, + defaultOptions({ + format: 'dag-foo' + }) + ]) + }) + + it('should update the mode a different hash algorithm', async () => { + await http({ + method: 'POST', + url: `/api/v0/files/chmod?arg=${path}&mode=${mode}&hashAlg=sha3-256` + }, { ipfs }) + + expect(ipfs.files.chmod.callCount).to.equal(1) + expect(ipfs.files.chmod.getCall(0).args).to.deep.equal([ + path, + mode, + defaultOptions({ + hashAlg: 'sha3-256' + }) + ]) + }) + + it('should update the mode with a shard split threshold', async () => { + await http({ + method: 'POST', + url: `/api/v0/files/chmod?arg=${path}&mode=${mode}&shardSplitThreshold=10` + }, { ipfs }) + + expect(ipfs.files.chmod.callCount).to.equal(1) + expect(ipfs.files.chmod.getCall(0).args).to.deep.equal([ + path, + mode, + defaultOptions({ + shardSplitThreshold: 10 + }) + ]) + }) +}) diff --git a/test/http/cp.js b/test/http/cp.js new file mode 100644 index 0000000..4b2a69c --- /dev/null +++ b/test/http/cp.js @@ -0,0 +1,114 @@ +/* eslint-env mocha */ +'use strict' + +const expect = require('../helpers/chai') +const http = require('../helpers/http') +const sinon = require('sinon') + +function defaultOptions (modification = {}) { + const options = { + parents: false, + format: 'dag-pb', + hashAlg: 'sha2-256', + flush: true, + shardSplitThreshold: 1000 + } + + Object.keys(modification).forEach(key => { + options[key] = modification[key] + }) + + return options +} + +describe('cp', () => () => { + const source = 'source' + const dest = 'dest' + let ipfs + + beforeEach(() => { + ipfs = { + files: { + cp: sinon.stub() + } + } + }) + + it('should copy files', async () => { + await http({ + method: 'POST', + url: `/api/v0/files/cp?arg=${source}&arg=${dest}` + }, { ipfs }) + + expect(ipfs.files.cp.callCount).to.equal(1) + expect(ipfs.files.cp.getCall(0).args).to.deep.equal([ + source, + dest, + defaultOptions() + ]) + }) + + it('should copy files and create intermediate directories', async () => { + await http({ + method: 'POST', + url: `/api/v0/files/cp?arg=${source}&arg=${dest}&parents=true` + }, { ipfs }) + + expect(ipfs.files.cp.callCount).to.equal(1) + expect(ipfs.files.cp.getCall(0).args).to.deep.equal([ + source, + dest, + defaultOptions({ + parents: true + }) + ]) + }) + + it('should copy files with a different codec', async () => { + await http({ + method: 'POST', + url: `/api/v0/files/cp?arg=${source}&arg=${dest}&codec=dag-cbor` + }, { ipfs }) + + expect(ipfs.files.cp.callCount).to.equal(1) + expect(ipfs.files.cp.getCall(0).args).to.deep.equal([ + source, + dest, + defaultOptions({ + format: 'dag-cbor' + }) + ]) + }) + + it('should copy files with a different hash algorithm', async () => { + await http({ + method: 'POST', + url: `/api/v0/files/cp?arg=${source}&arg=${dest}&hashAlg=sha3-256` + }, { ipfs }) + + expect(ipfs.files.cp.callCount).to.equal(1) + expect(ipfs.files.cp.getCall(0).args).to.deep.equal([ + source, + dest, + defaultOptions({ + hashAlg: 'sha3-256' + }) + ]) + }) + + it('should copy files with a different shard split threshold', async () => { + await http({ + method: 'POST', + url: `/api/v0/files/cp?arg=${source}&arg=${dest}&shardSplitThreshold=10` + }, { ipfs }) + + expect(ipfs.files.cp.callCount).to.equal(1) + expect(ipfs.files.cp.getCall(0).args).to.deep.equal([ + source, + dest, + defaultOptions({ + shardSplitThreshold: 10 + }) + ]) + }) +}) diff --git a/test/http/flush.js b/test/http/flush.js new file mode 100644 index 0000000..cfd28f4 --- /dev/null +++ b/test/http/flush.js @@ -0,0 +1,45 @@ +/* eslint-env mocha */ +'use strict' + +const expect = require('../helpers/chai') +const http = require('../helpers/http') +const sinon = require('sinon') + +describe('flush', () => () => { + const path = '/foo' + let ipfs + + beforeEach(() => { + ipfs = { + files: { + flush: sinon.stub() + } + } + }) + + it('should flush a path', async () => { + await http({ + method: 'POST', + url: `/api/v0/files/flush?arg=${path}` + }, { ipfs }) + + expect(ipfs.files.flush.callCount).to.equal(1) + expect(ipfs.files.flush.getCall(0).args).to.deep.equal([ + path, + {} + ]) + }) + + it('should flush without a path', async () => { + await http({ + method: 'POST', + url: '/api/v0/files/flush' + }, { ipfs }) + + expect(ipfs.files.flush.callCount).to.equal(1) + expect(ipfs.files.flush.getCall(0).args).to.deep.equal([ + '/', + {} + ]) + }) +}) diff --git a/test/http/index.js b/test/http/index.js new file mode 100644 index 0000000..d9bb97a --- /dev/null +++ b/test/http/index.js @@ -0,0 +1,16 @@ +/* eslint-env mocha */ +'use strict' + +describe('http', () => { + require('./chmod') + require('./cp') + require('./flush') + require('./ls') + require('./mkdir') + require('./mv') + require('./read') + require('./rm') + require('./stat') + require('./touch') + require('./write') +}) diff --git a/test/http/ls.js b/test/http/ls.js new file mode 100644 index 0000000..cb7c602 --- /dev/null +++ b/test/http/ls.js @@ -0,0 +1,169 @@ +/* eslint-env mocha */ +'use strict' + +const expect = require('../helpers/chai') +const http = require('../helpers/http') +const sinon = require('sinon') +const PassThrough = require('stream').PassThrough + +function defaultOptions (modification = {}) { + const options = { + cidBase: 'base58btc', + long: false + } + + Object.keys(modification).forEach(key => { + options[key] = modification[key] + }) + + return options +} + +describe('ls', () => { + const path = '/foo' + const file = { + name: 'file-name', + type: 'file-type', + size: 'file-size', + hash: 'file-hash', + mode: 'file-mode', + mtime: { + secs: 'file-mtime-secs', + nsecs: 'file-mtime-nsecs' + } + } + let ipfs + + beforeEach(() => { + ipfs = { + files: { + ls: sinon.stub().resolves([]) + } + } + }) + + it('should list a path', async () => { + ipfs.files.ls = sinon.stub().resolves([file]) + + const response = await http({ + method: 'POST', + url: `/api/v0/files/ls?arg=${path}` + }, { ipfs }) + + expect(ipfs.files.ls.callCount).to.equal(1) + expect(ipfs.files.ls.getCall(0).args).to.deep.equal([ + path, + defaultOptions() + ]) + expect(response).to.have.nested.property('result.Entries.length', 1) + expect(response).to.have.nested.property('result.Entries[0].Name', file.name) + expect(response).to.have.nested.property('result.Entries[0].Type', file.type) + expect(response).to.have.nested.property('result.Entries[0].Size', file.size) + expect(response).to.have.nested.property('result.Entries[0].Hash', file.hash) + }) + + it('should list without a path', async () => { + await http({ + method: 'POST', + url: '/api/v0/files/ls' + }, { ipfs }) + + expect(ipfs.files.ls.callCount).to.equal(1) + expect(ipfs.files.ls.getCall(0).args).to.deep.equal([ + '/', + defaultOptions() + ]) + }) + + it('should list a path with details', async () => { + const file = { + name: 'file-name', + type: 'file-type', + size: 'file-size', + hash: 'file-hash', + mode: 'file-mode', + mtime: { + secs: 'file-mtime-secs', + nsecs: 'file-mtime-nsecs' + } + } + ipfs.files.ls = sinon.stub().resolves([file]) + + const response = await http({ + method: 'POST', + url: `/api/v0/files/ls?arg=${path}&long=true` + }, { ipfs }) + + expect(ipfs.files.ls.callCount).to.equal(1) + expect(ipfs.files.ls.getCall(0).args).to.deep.equal([ + path, + defaultOptions({ + long: true + }) + ]) + expect(response).to.have.nested.property('result.Entries.length', 1) + expect(response).to.have.nested.property('result.Entries[0].Name', file.name) + expect(response).to.have.nested.property('result.Entries[0].Type', file.type) + expect(response).to.have.nested.property('result.Entries[0].Size', file.size) + expect(response).to.have.nested.property('result.Entries[0].Hash', file.hash) + expect(response).to.have.nested.property('result.Entries[0].Mode', file.mode) + expect(response).to.have.nested.property('result.Entries[0].Mtime', file.mtime.secs) + expect(response).to.have.nested.property('result.Entries[0].MtimeNsecs', file.mtime.nsecs) + }) + + it('should stream a path', async () => { + const stream = new PassThrough({ + objectMode: true + }) + stream.emit('data', file) + stream.end() + ipfs.files.lsReadableStream = sinon.stub().returns(stream) + await http({ + method: 'POST', + url: `/api/v0/files/ls?arg=${path}&stream=true` + }, { ipfs }) + + expect(ipfs.files.lsReadableStream.callCount).to.equal(1) + expect(ipfs.files.lsReadableStream.getCall(0).args).to.deep.equal([ + path, + defaultOptions() + ]) + }) + + it('should list a path with details', async () => { + const file = { + name: 'file-name', + type: 'file-type', + size: 'file-size', + hash: 'file-hash', + mode: 'file-mode', + mtime: { + secs: 'file-mtime-secs', + nsecs: 'file-mtime-nsecs' + } + } + ipfs.files.ls = sinon.stub().resolves([file]) + + const response = await http({ + method: 'POST', + url: `/api/v0/files/ls?arg=${path}&long=true` + }, { ipfs }) + + expect(ipfs.files.ls.callCount).to.equal(1) + expect(ipfs.files.ls.getCall(0).args).to.deep.equal([ + path, + defaultOptions({ + long: true + }) + ]) + + expect(response).to.have.nested.property('result.Entries.length', 1) + expect(response).to.have.nested.property('result.Entries[0].Name', file.name) + expect(response).to.have.nested.property('result.Entries[0].Type', file.type) + expect(response).to.have.nested.property('result.Entries[0].Size', file.size) + expect(response).to.have.nested.property('result.Entries[0].Hash', file.hash) + expect(response).to.have.nested.property('result.Entries[0].Mode', file.mode) + expect(response).to.have.nested.property('result.Entries[0].Mtime', file.mtime.secs) + expect(response).to.have.nested.property('result.Entries[0].MtimeNsecs', file.mtime.nsecs) + }) +}) diff --git a/test/http/mkdir.js b/test/http/mkdir.js new file mode 100644 index 0000000..0e97a7b --- /dev/null +++ b/test/http/mkdir.js @@ -0,0 +1,174 @@ +/* eslint-env mocha */ +'use strict' + +const expect = require('../helpers/chai') +const http = require('../helpers/http') +const sinon = require('sinon') + +function defaultOptions (modification = {}) { + const options = { + parents: false, + cidVersion: 0, + format: 'dag-pb', + hashAlg: 'sha2-256', + flush: true, + shardSplitThreshold: 1000, + mode: undefined, + mtime: undefined + } + + Object.keys(modification).forEach(key => { + options[key] = modification[key] + }) + + return options +} + +describe('mkdir', () => { + const path = '/foo' + let ipfs + + beforeEach(() => { + ipfs = { + files: { + mkdir: sinon.stub() + } + } + }) + + it('should make a directory', async () => { + await http({ + method: 'POST', + url: `/api/v0/files/mkdir?arg=${path}` + }, { ipfs }) + + expect(ipfs.files.mkdir.callCount).to.equal(1) + expect(ipfs.files.mkdir.getCall(0).args).to.deep.equal([ + path, + defaultOptions() + ]) + }) + + it('should make a directory with parents', async () => { + await http({ + method: 'POST', + url: `/api/v0/files/mkdir?arg=${path}&parents=true` + }, { ipfs }) + + expect(ipfs.files.mkdir.callCount).to.equal(1) + expect(ipfs.files.mkdir.getCall(0).args).to.deep.equal([ + path, + defaultOptions({ + parents: true + }) + ]) + }) + + it('should make a directory with a different cid version', async () => { + await http({ + method: 'POST', + url: `/api/v0/files/mkdir?arg=${path}&cidVersion=1` + }, { ipfs }) + + expect(ipfs.files.mkdir.callCount).to.equal(1) + expect(ipfs.files.mkdir.getCall(0).args).to.deep.equal([ + path, + defaultOptions({ + cidVersion: 1 + }) + ]) + }) + + it('should make a directory with a different codec', async () => { + await http({ + method: 'POST', + url: `/api/v0/files/mkdir?arg=${path}&format=dag-cbor` + }, { ipfs }) + + expect(ipfs.files.mkdir.callCount).to.equal(1) + expect(ipfs.files.mkdir.getCall(0).args).to.deep.equal([ + path, + defaultOptions({ + format: 'dag-cbor' + }) + ]) + }) + + it('should make a directory with a different hash algorithm', async () => { + await http({ + method: 'POST', + url: `/api/v0/files/mkdir?arg=${path}&hashAlg=sha3-256` + }, { ipfs }) + + expect(ipfs.files.mkdir.callCount).to.equal(1) + expect(ipfs.files.mkdir.getCall(0).args).to.deep.equal([ + path, + defaultOptions({ + hashAlg: 'sha3-256' + }) + ]) + }) + + it('should make a directory without flushing', async () => { + await http({ + method: 'POST', + url: `/api/v0/files/mkdir?arg=${path}&flush=false` + }, { ipfs }) + + expect(ipfs.files.mkdir.callCount).to.equal(1) + expect(ipfs.files.mkdir.getCall(0).args).to.deep.equal([ + path, + defaultOptions({ + flush: false + }) + ]) + }) + + it('should make a directory a different shard split threshold', async () => { + await http({ + method: 'POST', + url: `/api/v0/files/mkdir?arg=${path}&shardSplitThreshold=10` + }, { ipfs }) + + expect(ipfs.files.mkdir.callCount).to.equal(1) + expect(ipfs.files.mkdir.getCall(0).args).to.deep.equal([ + path, + defaultOptions({ + shardSplitThreshold: 10 + }) + ]) + }) + + it('should make a directory a different mode', async () => { + const mode = '0513' + await http({ + method: 'POST', + url: `/api/v0/files/mkdir?arg=${path}&mode=${mode}` + }, { ipfs }) + + expect(ipfs.files.mkdir.callCount).to.equal(1) + expect(ipfs.files.mkdir.getCall(0).args).to.deep.equal([ + path, + defaultOptions({ + mode: mode + }) + ]) + }) + + it('should make a directory a different mtime', async () => { + await http({ + method: 'POST', + url: `/api/v0/files/mkdir?arg=${path}&mtime=5` + }, { ipfs }) + + expect(ipfs.files.mkdir.callCount).to.equal(1) + expect(ipfs.files.mkdir.getCall(0).args).to.deep.equal([ + path, + defaultOptions({ + mtime: { + secs: 5 + } + }) + ]) + }) +}) diff --git a/test/http/mv.js b/test/http/mv.js new file mode 100644 index 0000000..ae1e8b3 --- /dev/null +++ b/test/http/mv.js @@ -0,0 +1,164 @@ +/* eslint-env mocha */ +'use strict' + +const expect = require('../helpers/chai') +const http = require('../helpers/http') +const sinon = require('sinon') + +function defaultOptions (modification = {}) { + const options = { + parents: false, + recursive: false, + cidVersion: 0, + format: 'dag-pb', + hashAlg: 'sha2-256', + flush: true, + shardSplitThreshold: 1000 + } + + Object.keys(modification).forEach(key => { + options[key] = modification[key] + }) + + return options +} + +describe('mv', () => { + const source = '/src' + const dest = '/dest' + let ipfs + + beforeEach(() => { + ipfs = { + files: { + mv: sinon.stub() + } + } + }) + + it('should move an entry', async () => { + await http({ + method: 'POST', + url: `/api/v0/files/mv?arg=${source}&arg=${dest}` + }, { ipfs }) + + expect(ipfs.files.mv.callCount).to.equal(1) + expect(ipfs.files.mv.getCall(0).args).to.deep.equal([ + source, + dest, + defaultOptions() + ]) + }) + + it('should move an entry and create parents', async () => { + await http({ + method: 'POST', + url: `/api/v0/files/mv?arg=${source}&arg=${dest}&parents=true` + }, { ipfs }) + + expect(ipfs.files.mv.callCount).to.equal(1) + expect(ipfs.files.mv.getCall(0).args).to.deep.equal([ + source, + dest, + defaultOptions({ + parents: true + }) + ]) + }) + + it('should move an entry recursively', async () => { + await http({ + method: 'POST', + url: `/api/v0/files/mv?arg=${source}&arg=${dest}&recursive=true` + }, { ipfs }) + + expect(ipfs.files.mv.callCount).to.equal(1) + expect(ipfs.files.mv.getCall(0).args).to.deep.equal([ + source, + dest, + defaultOptions({ + recursive: true + }) + ]) + }) + + it('should make a directory with a different cid version', async () => { + await http({ + method: 'POST', + url: `/api/v0/files/mv?arg=${source}&arg=${dest}&cidVersion=1` + }, { ipfs }) + + expect(ipfs.files.mv.callCount).to.equal(1) + expect(ipfs.files.mv.getCall(0).args).to.deep.equal([ + source, + dest, + defaultOptions({ + cidVersion: 1 + }) + ]) + }) + + it('should make a directory with a different codec', async () => { + await http({ + method: 'POST', + url: `/api/v0/files/mv?arg=${source}&arg=${dest}&format=dag-cbor` + }, { ipfs }) + + expect(ipfs.files.mv.callCount).to.equal(1) + expect(ipfs.files.mv.getCall(0).args).to.deep.equal([ + source, + dest, + defaultOptions({ + format: 'dag-cbor' + }) + ]) + }) + + it('should make a directory with a different hash algorithm', async () => { + await http({ + method: 'POST', + url: `/api/v0/files/mv?arg=${source}&arg=${dest}&hashAlg=sha3-256` + }, { ipfs }) + + expect(ipfs.files.mv.callCount).to.equal(1) + expect(ipfs.files.mv.getCall(0).args).to.deep.equal([ + source, + dest, + defaultOptions({ + hashAlg: 'sha3-256' + }) + ]) + }) + + it('should make a directory without flushing', async () => { + await http({ + method: 'POST', + url: `/api/v0/files/mv?arg=${source}&arg=${dest}&flush=false` + }, { ipfs }) + + expect(ipfs.files.mv.callCount).to.equal(1) + expect(ipfs.files.mv.getCall(0).args).to.deep.equal([ + source, + dest, + defaultOptions({ + flush: false + }) + ]) + }) + + it('should make a directory a different shard split threshold', async () => { + await http({ + method: 'POST', + url: `/api/v0/files/mv?arg=${source}&arg=${dest}&shardSplitThreshold=10` + }, { ipfs }) + + expect(ipfs.files.mv.callCount).to.equal(1) + expect(ipfs.files.mv.getCall(0).args).to.deep.equal([ + source, + dest, + defaultOptions({ + shardSplitThreshold: 10 + }) + ]) + }) +}) diff --git a/test/http/read.js b/test/http/read.js new file mode 100644 index 0000000..ad747da --- /dev/null +++ b/test/http/read.js @@ -0,0 +1,107 @@ +/* eslint-env mocha */ +'use strict' + +const expect = require('../helpers/chai') +const http = require('../helpers/http') +const sinon = require('sinon') +const PassThrough = require('stream').PassThrough + +function defaultOptions (modification = {}) { + const options = { + offset: undefined, + length: undefined + } + + Object.keys(modification).forEach(key => { + options[key] = modification[key] + }) + + return options +} + +describe('read', () => { + const path = '/foo' + let ipfs + + beforeEach(() => { + ipfs = { + files: { + readReadableStream: sinon.stub().callsFake(() => { + const stream = new PassThrough() + + setImmediate(() => { + stream.emit('data', Buffer.from('hello world')) + stream.end() + }) + + return stream + }) + } + } + }) + + it('should read a path', async () => { + const response = await http({ + method: 'POST', + url: `/api/v0/files/read?arg=${path}` + }, { ipfs }) + + expect(ipfs.files.readReadableStream.callCount).to.equal(1) + expect(ipfs.files.readReadableStream.getCall(0).args).to.deep.equal([ + path, + defaultOptions() + ]) + expect(response).to.have.property('result', 'hello world') + }) + + it('should read a path with an offset', async () => { + const offset = 5 + const response = await http({ + method: 'POST', + url: `/api/v0/files/read?arg=${path}&offset=${offset}` + }, { ipfs }) + + expect(ipfs.files.readReadableStream.callCount).to.equal(1) + expect(ipfs.files.readReadableStream.getCall(0).args).to.deep.equal([ + path, + defaultOptions({ + offset + }) + ]) + expect(response).to.have.property('result', 'hello world') + }) + + it('should read a path with a length', async () => { + const length = 5 + const response = await http({ + method: 'POST', + url: `/api/v0/files/read?arg=${path}&length=${length}` + }, { ipfs }) + + expect(ipfs.files.readReadableStream.callCount).to.equal(1) + expect(ipfs.files.readReadableStream.getCall(0).args).to.deep.equal([ + path, + defaultOptions({ + length + }) + ]) + expect(response).to.have.property('result', 'hello world') + }) + + it('should read a path with count treated as length', async () => { + const length = 5 + const response = await http({ + method: 'POST', + url: `/api/v0/files/read?arg=${path}&count=${length}` + }, { ipfs }) + + expect(ipfs.files.readReadableStream.callCount).to.equal(1) + expect(ipfs.files.readReadableStream.getCall(0).args).to.deep.equal([ + path, + defaultOptions({ + length + }) + ]) + expect(response).to.have.property('result', 'hello world') + }) +}) diff --git a/test/http/rm.js b/test/http/rm.js new file mode 100644 index 0000000..6bc877e --- /dev/null +++ b/test/http/rm.js @@ -0,0 +1,59 @@ +/* eslint-env mocha */ +'use strict' + +const expect = require('../helpers/chai') +const http = require('../helpers/http') +const sinon = require('sinon') + +function defaultOptions (modification = {}) { + const options = { + recursive: false + } + + Object.keys(modification).forEach(key => { + options[key] = modification[key] + }) + + return options +} + +describe('rm', () => { + const path = '/foo' + let ipfs + + beforeEach(() => { + ipfs = { + files: { + rm: sinon.stub().resolves() + } + } + }) + + it('should remove a path', async () => { + await http({ + method: 'POST', + url: `/api/v0/files/rm?arg=${path}` + }, { ipfs }) + + expect(ipfs.files.rm.callCount).to.equal(1) + expect(ipfs.files.rm.getCall(0).args).to.deep.equal([ + path, + defaultOptions() + ]) + }) + + it('should remove a path recursively', async () => { + await http({ + method: 'POST', + url: `/api/v0/files/rm?arg=${path}&recursive=true` + }, { ipfs }) + + expect(ipfs.files.rm.callCount).to.equal(1) + expect(ipfs.files.rm.getCall(0).args).to.deep.equal([ + path, + defaultOptions({ + recursive: true + }) + ]) + }) +}) diff --git a/test/http/stat.js b/test/http/stat.js new file mode 100644 index 0000000..c8536a0 --- /dev/null +++ b/test/http/stat.js @@ -0,0 +1,120 @@ +/* eslint-env mocha */ +'use strict' + +const expect = require('../helpers/chai') +const http = require('../helpers/http') +const sinon = require('sinon') + +function defaultOptions (modification = {}) { + const options = { + withLocal: false, + hash: false, + size: false, + cidBase: 'base58btc' + } + + Object.keys(modification).forEach(key => { + options[key] = modification[key] + }) + + return options +} + +describe('stat', () => { + const path = '/foo' + const stats = { + hash: 'stats-hash', + size: 'stats-size', + cumulativeSize: 'stats-cumulativeSize', + blocks: 'stats-blocks', + type: 'stats-type', + mode: 'stats-mode', + mtime: 'stats-mtime' + } + let ipfs + + beforeEach(() => { + ipfs = { + files: { + stat: sinon.stub().resolves(stats) + } + } + }) + + it('should stat a path', async () => { + const response = await http({ + method: 'POST', + url: `/api/v0/files/stat?arg=${path}` + }, { ipfs }) + + expect(ipfs.files.stat.callCount).to.equal(1) + expect(ipfs.files.stat.getCall(0).args).to.deep.equal([ + path, + defaultOptions() + ]) + expect(response).to.have.nested.property('result.CumulativeSize', stats.cumulativeSize) + }) + + it('should stat a path with local', async () => { + await http({ + method: 'POST', + url: `/api/v0/files/stat?arg=${path}&withLocal=true` + }, { ipfs }) + + expect(ipfs.files.stat.callCount).to.equal(1) + expect(ipfs.files.stat.getCall(0).args).to.deep.equal([ + path, + defaultOptions({ + withLocal: true + }) + ]) + }) + + it('should stat a path and only show hashes', async () => { + const response = await http({ + method: 'POST', + url: `/api/v0/files/stat?arg=${path}&hash=true` + }, { ipfs }) + + expect(ipfs.files.stat.callCount).to.equal(1) + expect(ipfs.files.stat.getCall(0).args).to.deep.equal([ + path, + defaultOptions({ + hash: true + }) + ]) + expect(response).to.have.nested.property('result.Hash', stats.hash) + }) + + it('should stat a path and only show sizes', async () => { + const response = await http({ + method: 'POST', + url: `/api/v0/files/stat?arg=${path}&size=true` + }, { ipfs }) + + expect(ipfs.files.stat.callCount).to.equal(1) + expect(ipfs.files.stat.getCall(0).args).to.deep.equal([ + path, + defaultOptions({ + size: true + }) + ]) + expect(response).to.have.nested.property('result.Size', stats.size) + }) + + it('should stat a path and show hashes with a different base', async () => { + const response = await http({ + method: 'POST', + url: `/api/v0/files/stat?arg=${path}&cidBase=base64` + }, { ipfs }) + + expect(ipfs.files.stat.callCount).to.equal(1) + expect(ipfs.files.stat.getCall(0).args).to.deep.equal([ + path, + defaultOptions({ + cidBase: 'base64' + }) + ]) + expect(response).to.have.nested.property('result.Hash', stats.hash) + }) +}) diff --git a/test/http/touch.js b/test/http/touch.js new file mode 100644 index 0000000..9acecef --- /dev/null +++ b/test/http/touch.js @@ -0,0 +1,145 @@ +/* eslint-env mocha */ +'use strict' + +const expect = require('../helpers/chai') +const http = require('../helpers/http') +const sinon = require('sinon') + +function defaultOptions (modification = {}) { + const options = { + mtime: null, + cidVersion: 0, + format: 'dag-pb', + hashAlg: 'sha2-256', + flush: true, + shardSplitThreshold: 1000 + } + + Object.keys(modification).forEach(key => { + options[key] = modification[key] + }) + + return options +} + +describe('touch', () => { + const path = '/foo' + const mtime = new Date(1000000) + let ipfs + + beforeEach(() => { + ipfs = { + files: { + touch: sinon.stub() + } + } + }) + + it('should update the mtime for a file', async () => { + await http({ + method: 'POST', + url: `/api/v0/files/touch?arg=${path}&mtime=${mtime.getTime() / 1000}` + }, { ipfs }) + + expect(ipfs.files.touch.callCount).to.equal(1) + expect(ipfs.files.touch.getCall(0).args).to.deep.equal([ + path, + defaultOptions({ + mtime: { + secs: 1000 + } + }) + ]) + }) + + it('should update the mtime without flushing', async () => { + await http({ + method: 'POST', + url: `/api/v0/files/touch?arg=${path}&mtime=${mtime.getTime() / 1000}&flush=false` + }, { ipfs }) + + expect(ipfs.files.touch.callCount).to.equal(1) + expect(ipfs.files.touch.getCall(0).args).to.deep.equal([ + path, + defaultOptions({ + mtime: { + secs: 1000 + }, + flush: false + }) + ]) + }) + + it('should update the mtime with a different codec', async () => { + await http({ + method: 'POST', + url: `/api/v0/files/touch?arg=${path}&mtime=${mtime.getTime() / 1000}&format=dag-pb` + }, { ipfs }) + + expect(ipfs.files.touch.callCount).to.equal(1) + expect(ipfs.files.touch.getCall(0).args).to.deep.equal([ + path, + defaultOptions({ + mtime: { + secs: 1000 + }, + format: 'dag-pb' + }) + ]) + }) + + it('should update the mtime with a different hash algorithm', async () => { + await http({ + method: 'POST', + url: `/api/v0/files/touch?arg=${path}&mtime=${mtime.getTime() / 1000}&hashAlg=sha3-256` + }, { ipfs }) + + expect(ipfs.files.touch.callCount).to.equal(1) + expect(ipfs.files.touch.getCall(0).args).to.deep.equal([ + path, + defaultOptions({ + mtime: { + secs: 1000 + }, + hashAlg: 'sha3-256' + }) + ]) + }) + + it('should update the mtime with a shard split threshold', async () => { + await http({ + method: 'POST', + url: `/api/v0/files/touch?arg=${path}&mtime=${mtime.getTime() / 1000}&shardSplitThreshold=10` + }, { ipfs }) + + expect(ipfs.files.touch.callCount).to.equal(1) + expect(ipfs.files.touch.getCall(0).args).to.deep.equal([ + path, + defaultOptions({ + mtime: { + secs: 1000 + }, + shardSplitThreshold: 10 + }) + ]) + }) + + it('should update the mtime with nanoseconds with a shard split threshold', async () => { + await http({ + method: 'POST', + url: `/api/v0/files/touch?arg=${path}&mtime=${mtime.getTime() / 1000}&mtimeNsecs=100&shardSplitThreshold=10` + }, { ipfs }) + + expect(ipfs.files.touch.callCount).to.equal(1) + expect(ipfs.files.touch.getCall(0).args).to.deep.equal([ + path, + defaultOptions({ + mtime: { + secs: 1000, + nsecs: 100 + }, + shardSplitThreshold: 10 + }) + ]) + }) +}) diff --git a/test/http/write.js b/test/http/write.js new file mode 100644 index 0000000..19f5ac8 --- /dev/null +++ b/test/http/write.js @@ -0,0 +1,317 @@ +/* eslint-env mocha */ +'use strict' + +const expect = require('../helpers/chai') +const http = require('../helpers/http') +const sinon = require('sinon') +const FormData = require('form-data') +const streamToPromise = require('stream-to-promise') + +function defaultOptions (modification = {}) { + const options = { + offset: undefined, + length: undefined, + create: false, + truncate: false, + rawLeaves: false, + reduceSingleLeafToSelf: false, + cidVersion: 0, + hashAlg: 'sha2-256', + format: 'dag-pb', + parents: false, + progress: undefined, + strategy: 'trickle', + flush: true, + shardSplitThreshold: 1000, + mode: undefined, + mtime: undefined + } + + Object.keys(modification).forEach(key => { + options[key] = modification[key] + }) + + return options +} + +async function send (text, headers = {}) { + const form = new FormData() + form.append('file-0', Buffer.from(text), { + header: headers + }) + + return { + headers: form.getHeaders(), + payload: await streamToPromise(form) + } +} + +describe('write', () => { + const path = '/foo' + let ipfs + let content + + beforeEach(() => { + content = Buffer.alloc(0) + + ipfs = { + files: { + write: sinon.stub().callsFake(async (path, input) => { + for await (const buf of input) { + content = Buffer.concat([content, buf]) + } + + content = content.toString('utf8') + }) + } + } + }) + + it('should write to a file', async () => { + await http({ + method: 'POST', + url: `/api/v0/files/write?arg=${path}`, + ...await send('hello world') + }, { ipfs }) + + expect(ipfs.files.write.callCount).to.equal(1) + expect(ipfs.files.write.getCall(0)).to.have.nested.property('args[0]', path) + expect(ipfs.files.write.getCall(0)).to.have.nested.deep.property('args[2]', defaultOptions()) + expect(content).to.equal('hello world') + }) + + it('should write to a file and create parents', async () => { + await http({ + method: 'POST', + url: `/api/v0/files/write?arg=${path}&parents=true`, + ...await send('hello world') + }, { ipfs }) + + expect(ipfs.files.write.callCount).to.equal(1) + expect(ipfs.files.write.getCall(0)).to.have.nested.property('args[0]', path) + expect(ipfs.files.write.getCall(0)).to.have.nested.deep.property('args[2]', defaultOptions({ + parents: true + })) + expect(content).to.equal('hello world') + }) + + it('should write to a file and create it', async () => { + await http({ + method: 'POST', + url: `/api/v0/files/write?arg=${path}&create=true`, + ...await send('hello world') + }, { ipfs }) + + expect(ipfs.files.write.callCount).to.equal(1) + expect(ipfs.files.write.getCall(0)).to.have.nested.property('args[0]', path) + expect(ipfs.files.write.getCall(0)).to.have.nested.deep.property('args[2]', defaultOptions({ + create: true + })) + expect(content).to.equal('hello world') + }) + + it('should write to a file with an offset', async () => { + await http({ + method: 'POST', + url: `/api/v0/files/write?arg=${path}&offset=10`, + ...await send('hello world') + }, { ipfs }) + + expect(ipfs.files.write.callCount).to.equal(1) + expect(ipfs.files.write.getCall(0)).to.have.nested.property('args[0]', path) + expect(ipfs.files.write.getCall(0)).to.have.nested.deep.property('args[2]', defaultOptions({ + offset: 10 + })) + expect(content).to.equal('hello world') + }) + + it('should write to a file with a length', async () => { + await http({ + method: 'POST', + url: `/api/v0/files/write?arg=${path}&length=10`, + ...await send('hello world') + }, { ipfs }) + + expect(ipfs.files.write.callCount).to.equal(1) + expect(ipfs.files.write.getCall(0)).to.have.nested.property('args[0]', path) + expect(ipfs.files.write.getCall(0)).to.have.nested.deep.property('args[2]', defaultOptions({ + length: 10 + })) + expect(content).to.equal('hello world') + }) + + it('should write to a file and truncate it', async () => { + await http({ + method: 'POST', + url: `/api/v0/files/write?arg=${path}&truncate=true`, + ...await send('hello world') + }, { ipfs }) + + expect(ipfs.files.write.callCount).to.equal(1) + expect(ipfs.files.write.getCall(0)).to.have.nested.property('args[0]', path) + expect(ipfs.files.write.getCall(0)).to.have.nested.deep.property('args[2]', defaultOptions({ + truncate: true + })) + expect(content).to.equal('hello world') + }) + + it('should write to a file with raw leaves', async () => { + await http({ + method: 'POST', + url: `/api/v0/files/write?arg=${path}&rawLeaves=true`, + ...await send('hello world') + }, { ipfs }) + + expect(ipfs.files.write.callCount).to.equal(1) + expect(ipfs.files.write.getCall(0)).to.have.nested.property('args[0]', path) + expect(ipfs.files.write.getCall(0)).to.have.nested.deep.property('args[2]', defaultOptions({ + rawLeaves: true + })) + expect(content).to.equal('hello world') + }) + + it('should write to a file and reduce a single leaf to one node', async () => { + await http({ + method: 'POST', + url: `/api/v0/files/write?arg=${path}&reduceSingleLeafToSelf=true`, + ...await send('hello world') + }, { ipfs }) + + expect(ipfs.files.write.callCount).to.equal(1) + expect(ipfs.files.write.getCall(0)).to.have.nested.property('args[0]', path) + expect(ipfs.files.write.getCall(0)).to.have.nested.deep.property('args[2]', defaultOptions({ + reduceSingleLeafToSelf: true + })) + expect(content).to.equal('hello world') + }) + + it('should write to a file without flushing', async () => { + await http({ + method: 'POST', + url: `/api/v0/files/write?arg=${path}&flush=false`, + ...await send('hello world') + }, { ipfs }) + + expect(ipfs.files.write.callCount).to.equal(1) + expect(ipfs.files.write.getCall(0)).to.have.nested.property('args[0]', path) + expect(ipfs.files.write.getCall(0)).to.have.nested.deep.property('args[2]', defaultOptions({ + flush: false + })) + expect(content).to.equal('hello world') + }) + + it('should write to a file with a specified strategy', async () => { + await http({ + method: 'POST', + url: `/api/v0/files/write?arg=${path}&strategy=flat`, + ...await send('hello world') + }, { ipfs }) + + expect(ipfs.files.write.callCount).to.equal(1) + expect(ipfs.files.write.getCall(0)).to.have.nested.property('args[0]', path) + expect(ipfs.files.write.getCall(0)).to.have.nested.deep.property('args[2]', defaultOptions({ + strategy: 'flat' + })) + expect(content).to.equal('hello world') + }) + + it('should write to a file with a specified cid version', async () => { + await http({ + method: 'POST', + url: `/api/v0/files/write?arg=${path}&cidVersion=1`, + ...await send('hello world') + }, { ipfs }) + + expect(ipfs.files.write.callCount).to.equal(1) + expect(ipfs.files.write.getCall(0)).to.have.nested.property('args[0]', path) + expect(ipfs.files.write.getCall(0)).to.have.nested.deep.property('args[2]', defaultOptions({ + cidVersion: 1 + })) + expect(content).to.equal('hello world') + }) + + it('should write to a file with a specified codec', async () => { + await http({ + method: 'POST', + url: `/api/v0/files/write?arg=${path}&format=dag-cbor`, + ...await send('hello world') + }, { ipfs }) + + expect(ipfs.files.write.callCount).to.equal(1) + expect(ipfs.files.write.getCall(0)).to.have.nested.property('args[0]', path) + expect(ipfs.files.write.getCall(0)).to.have.nested.deep.property('args[2]', defaultOptions({ + format: 'dag-cbor' + })) + expect(content).to.equal('hello world') + }) + + it('should write to a file with a specified hash algorithm', async () => { + await http({ + method: 'POST', + url: `/api/v0/files/write?arg=${path}&hashAlg=sha3-256`, + ...await send('hello world') + }, { ipfs }) + + expect(ipfs.files.write.callCount).to.equal(1) + expect(ipfs.files.write.getCall(0)).to.have.nested.property('args[0]', path) + expect(ipfs.files.write.getCall(0)).to.have.nested.deep.property('args[2]', defaultOptions({ + hashAlg: 'sha3-256' + })) + expect(content).to.equal('hello world') + }) + + it('should write to a file with a specified shard split threshold', async () => { + await http({ + method: 'POST', + url: `/api/v0/files/write?arg=${path}&shardSplitThreshold=10`, + ...await send('hello world') + }, { ipfs }) + + expect(ipfs.files.write.callCount).to.equal(1) + expect(ipfs.files.write.getCall(0)).to.have.nested.property('args[0]', path) + expect(ipfs.files.write.getCall(0)).to.have.nested.deep.property('args[2]', defaultOptions({ + shardSplitThreshold: 10 + })) + expect(content).to.equal('hello world') + }) + + it('should write to a file with a specified mode', async () => { + const mode = '0577' + + await http({ + method: 'POST', + url: `/api/v0/files/write?arg=${path}`, + ...await send('hello world', { + mode + }) + }, { ipfs }) + + expect(ipfs.files.write.callCount).to.equal(1) + expect(ipfs.files.write.getCall(0)).to.have.nested.property('args[0]', path) + expect(ipfs.files.write.getCall(0)).to.have.nested.deep.property('args[2]', defaultOptions({ + mode: parseInt(mode, 8) + })) + expect(content).to.equal('hello world') + }) + + it('should write to a file with a specified mtime', async () => { + const mtime = 11 + + await http({ + method: 'POST', + url: `/api/v0/files/write?arg=${path}`, + ...await send('hello world', { + mtime + }) + }, { ipfs }) + + expect(ipfs.files.write.callCount).to.equal(1) + expect(ipfs.files.write.getCall(0)).to.have.nested.property('args[0]', path) + expect(ipfs.files.write.getCall(0)).to.have.nested.deep.property('args[2]', defaultOptions({ + mtime: { + secs: 11 + } + })) + expect(content).to.equal('hello world') + }) +}) diff --git a/test/node.js b/test/node.js new file mode 100644 index 0000000..3d35201 --- /dev/null +++ b/test/node.js @@ -0,0 +1,5 @@ +'use strict' + +require('./cli') +require('./core') +require('./http') diff --git a/test/read.spec.js b/test/read.spec.js deleted file mode 100644 index 5e00d27..0000000 --- a/test/read.spec.js +++ /dev/null @@ -1,151 +0,0 @@ -/* eslint-env mocha */ -'use strict' - -const chai = require('chai') -chai.use(require('dirty-chai')) -const expect = chai.expect -const createMfs = require('./helpers/create-mfs') -const createShardedDirectory = require('./helpers/create-sharded-directory') -const crypto = require('crypto') -const streamToBuffer = require('./helpers/stream-to-buffer') - -describe('read', () => { - let mfs - const smallFile = crypto.randomBytes(13) - - before(async () => { - mfs = await createMfs() - }) - - describe('read', () => { - it('reads a small file', async () => { - const filePath = '/small-file.txt' - - await mfs.write(filePath, smallFile, { - create: true - }) - - const buffer = await streamToBuffer(mfs.read(filePath)) - - expect(buffer).to.deep.equal(smallFile) - }) - - it('reads a file with an offset', async () => { - const path = `/some-file-${Math.random()}.txt` - const data = crypto.randomBytes(100) - const offset = 10 - - await mfs.write(path, data, { - create: true - }) - - const buffer = await streamToBuffer(mfs.read(path, { - offset - })) - - expect(buffer).to.deep.equal(data.slice(offset)) - }) - - it('reads a file with a length', async () => { - const path = `/some-file-${Math.random()}.txt` - const data = crypto.randomBytes(100) - const length = 10 - - await mfs.write(path, data, { - create: true - }) - - const buffer = await streamToBuffer(mfs.read(path, { - length - })) - - expect(buffer).to.deep.equal(data.slice(0, length)) - }) - - it('reads a file with a legacy count argument', async () => { - const path = `/some-file-${Math.random()}.txt` - const data = crypto.randomBytes(100) - const length = 10 - - await mfs.write(path, data, { - create: true - }) - - const buffer = await streamToBuffer(mfs.read(path, { - count: length - })) - - expect(buffer).to.deep.equal(data.slice(0, length)) - }) - - it('reads a file with an offset and a length', async () => { - const path = `/some-file-${Math.random()}.txt` - const data = crypto.randomBytes(100) - const offset = 10 - const length = 10 - - await mfs.write(path, data, { - create: true - }) - - const buffer = await streamToBuffer(mfs.read(path, { - offset, - length - })) - - expect(buffer).to.deep.equal(data.slice(offset, offset + length)) - }) - - it('reads a file with an offset and a legacy count argument', async () => { - const path = `/some-file-${Math.random()}.txt` - const data = crypto.randomBytes(100) - const offset = 10 - const length = 10 - - await mfs.write(path, data, { - create: true - }) - - const buffer = await streamToBuffer(mfs.read(path, { - offset, - count: length - })) - - expect(buffer).to.deep.equal(data.slice(offset, offset + length)) - }) - - it('refuses to read a directory', async () => { - const path = '/' - - try { - await streamToBuffer(mfs.read(path)) - throw new Error('Should have errored on trying to read a directory') - } catch (err) { - expect(err.code).to.equal('ERR_NOT_FILE') - } - }) - - it('refuses to read a non-existent file', async () => { - try { - await streamToBuffer(mfs.read(`/file-${Math.random()}.txt`)) - throw new Error('Should have errored on non-existent file') - } catch (err) { - expect(err.code).to.equal('ERR_NOT_FOUND') - } - }) - - it('reads file from inside a sharded directory', async () => { - const shardedDirPath = await createShardedDirectory(mfs) - const filePath = `${shardedDirPath}/file-${Math.random()}.txt` - const content = Buffer.from([0, 1, 2, 3, 4]) - - await mfs.write(filePath, content, { - create: true - }) - - const buffer = await streamToBuffer(mfs.read(filePath)) - - expect(buffer).to.deep.equal(content) - }) - }) -}) diff --git a/test/webworker.js b/test/webworker.js new file mode 100644 index 0000000..ed5d991 --- /dev/null +++ b/test/webworker.js @@ -0,0 +1,3 @@ +'use strict' + +require('./core')