Skip to content
This repository has been archived by the owner on Feb 12, 2024. It is now read-only.

Commit

Permalink
chore: move shard tests to sharding block
Browse files Browse the repository at this point in the history
  • Loading branch information
achingbrain committed Sep 23, 2020
1 parent ac5f0d6 commit 8ed00f9
Showing 1 changed file with 117 additions and 117 deletions.
234 changes: 117 additions & 117 deletions packages/interface-ipfs-core/src/files/write.js
Original file line number Diff line number Diff line change
Expand Up @@ -465,41 +465,6 @@ module.exports = (common, options) => {
expect(actualBytes).to.deep.equal(newDataStream)
})

it('shards a large directory when writing too many links to it', async () => {
const shardSplitThreshold = 10
const dirPath = `/sharded-dir-${Math.random()}`
const newFile = `file-${Math.random()}`
const newFilePath = `/${dirPath}/${newFile}`

await ipfs.files.mkdir(dirPath, {
shardSplitThreshold
})

for (let i = 0; i < shardSplitThreshold; i++) {
await ipfs.files.write(`/${dirPath}/file-${Math.random()}`, Uint8Array.from([0, 1, 2, 3]), {
create: true,
shardSplitThreshold
})
}

await expect(ipfs.files.stat(dirPath)).to.eventually.have.property('type', 'directory')

await ipfs.files.write(newFilePath, Uint8Array.from([0, 1, 2, 3]), {
create: true,
shardSplitThreshold
})

await expect(isShardAtPath(dirPath, ipfs)).to.eventually.be.true()
await expect(ipfs.files.stat(dirPath)).to.eventually.have.property('type', 'directory')

const files = await all(ipfs.files.ls(dirPath, {
long: true
}))

// new file should be in directory
expect(files.filter(file => file.name === newFile).pop()).to.be.ok()
})

it('writes a file with a different CID version to the parent', async () => {
const directory = `cid-versions-${Math.random()}`
const directoryPath = `/${directory}`
Expand Down Expand Up @@ -616,88 +581,6 @@ module.exports = (common, options) => {
expect(actualBytes).to.deep.equal(expectedBytes)
})

it('results in the same hash as a sharded directory created by the importer when adding a new file', async function () {
const {
nextFile,
dirWithSomeFiles,
dirPath
} = await createTwoShards(ipfs, 75)

await ipfs.files.cp(`/ipfs/${dirWithSomeFiles}`, dirPath)

await ipfs.files.write(nextFile.path, nextFile.content, {
create: true
})

const stats = await ipfs.files.stat(dirPath)
const updatedDirCid = stats.cid

await expect(isShardAtPath(dirPath, ipfs)).to.eventually.be.true()
expect(stats.type).to.equal('directory')
expect(updatedDirCid.toString()).to.equal('QmbLw9uCrQaFgweMskqMrsVKTwwakSg94GuMT3zht1P7CQ')
})

it('results in the same hash as a sharded directory created by the importer when creating a new subshard', async function () {
const {
nextFile,
dirWithSomeFiles,
dirPath
} = await createTwoShards(ipfs, 100)

await ipfs.files.cp(`/ipfs/${dirWithSomeFiles}`, dirPath)

await ipfs.files.write(nextFile.path, nextFile.content, {
create: true
})

const stats = await ipfs.files.stat(dirPath)
const updatedDirCid = stats.cid

expect(updatedDirCid.toString()).to.equal('QmcGTKoaZeMxVenyxnkP2riibE8vSEPobkN1oxvcEZpBW5')
})

it('results in the same hash as a sharded directory created by the importer when adding a file to a subshard', async function () {
const {
nextFile,
dirWithSomeFiles,
dirPath
} = await createTwoShards(ipfs, 82)

await ipfs.files.cp(`/ipfs/${dirWithSomeFiles}`, dirPath)

await ipfs.files.write(nextFile.path, nextFile.content, {
create: true
})

const stats = await ipfs.files.stat(dirPath)
const updatedDirCid = stats.cid

await expect(isShardAtPath(dirPath, ipfs)).to.eventually.be.true()
expect(stats.type).to.equal('directory')
expect(updatedDirCid.toString()).to.deep.equal('QmXeJ4ercHcxdiX7Vxm1Hit9AwsTNXcwCw5Ad32yW2HdHR')
})

it('results in the same hash as a sharded directory created by the importer when adding a file to a subshard of a subshard', async function () {
const {
nextFile,
dirWithSomeFiles,
dirPath
} = await createTwoShards(ipfs, 2187)

await ipfs.files.cp(`/ipfs/${dirWithSomeFiles}`, dirPath)

await ipfs.files.write(nextFile.path, nextFile.content, {
create: true
})

const stats = await ipfs.files.stat(dirPath)
const updatedDirCid = stats.cid

await expect(isShardAtPath(dirPath, ipfs)).to.eventually.be.true()
expect(stats.type).to.equal('directory')
expect(updatedDirCid.toString()).to.deep.equal('QmY4o7GNvr5eZPnT6k6ALp5zkQ4eiUkJQ6eeUNsdSiqS4f')
})

it('should write file and specify mode as a string', async function () {
const mode = '0321'
await testMode(mode, parseInt(mode, 8))
Expand Down Expand Up @@ -772,6 +655,123 @@ module.exports = (common, options) => {
ipfs = ipfsd.api
})

it('shards a large directory when writing too many links to it', async () => {
const shardSplitThreshold = 10
const dirPath = `/sharded-dir-${Math.random()}`
const newFile = `file-${Math.random()}`
const newFilePath = `/${dirPath}/${newFile}`

await ipfs.files.mkdir(dirPath, {
shardSplitThreshold
})

for (let i = 0; i < shardSplitThreshold; i++) {
await ipfs.files.write(`/${dirPath}/file-${Math.random()}`, Uint8Array.from([0, 1, 2, 3]), {
create: true,
shardSplitThreshold
})
}

await expect(ipfs.files.stat(dirPath)).to.eventually.have.property('type', 'directory')

await ipfs.files.write(newFilePath, Uint8Array.from([0, 1, 2, 3]), {
create: true,
shardSplitThreshold
})

await expect(isShardAtPath(dirPath, ipfs)).to.eventually.be.true()
await expect(ipfs.files.stat(dirPath)).to.eventually.have.property('type', 'directory')

const files = await all(ipfs.files.ls(dirPath, {
long: true
}))

// new file should be in directory
expect(files.filter(file => file.name === newFile).pop()).to.be.ok()
})

it('results in the same hash as a sharded directory created by the importer when adding a new file', async function () {
const {
nextFile,
dirWithSomeFiles,
dirPath
} = await createTwoShards(ipfs, 75)

await ipfs.files.cp(`/ipfs/${dirWithSomeFiles}`, dirPath)

await ipfs.files.write(nextFile.path, nextFile.content, {
create: true
})

const stats = await ipfs.files.stat(dirPath)
const updatedDirCid = stats.cid

await expect(isShardAtPath(dirPath, ipfs)).to.eventually.be.true()
expect(stats.type).to.equal('directory')
expect(updatedDirCid.toString()).to.equal('QmbLw9uCrQaFgweMskqMrsVKTwwakSg94GuMT3zht1P7CQ')
})

it('results in the same hash as a sharded directory created by the importer when creating a new subshard', async function () {
const {
nextFile,
dirWithSomeFiles,
dirPath
} = await createTwoShards(ipfs, 100)

await ipfs.files.cp(`/ipfs/${dirWithSomeFiles}`, dirPath)

await ipfs.files.write(nextFile.path, nextFile.content, {
create: true
})

const stats = await ipfs.files.stat(dirPath)
const updatedDirCid = stats.cid

expect(updatedDirCid.toString()).to.equal('QmcGTKoaZeMxVenyxnkP2riibE8vSEPobkN1oxvcEZpBW5')
})

it('results in the same hash as a sharded directory created by the importer when adding a file to a subshard', async function () {
const {
nextFile,
dirWithSomeFiles,
dirPath
} = await createTwoShards(ipfs, 82)

await ipfs.files.cp(`/ipfs/${dirWithSomeFiles}`, dirPath)

await ipfs.files.write(nextFile.path, nextFile.content, {
create: true
})

const stats = await ipfs.files.stat(dirPath)
const updatedDirCid = stats.cid

await expect(isShardAtPath(dirPath, ipfs)).to.eventually.be.true()
expect(stats.type).to.equal('directory')
expect(updatedDirCid.toString()).to.deep.equal('QmXeJ4ercHcxdiX7Vxm1Hit9AwsTNXcwCw5Ad32yW2HdHR')
})

it('results in the same hash as a sharded directory created by the importer when adding a file to a subshard of a subshard', async function () {
const {
nextFile,
dirWithSomeFiles,
dirPath
} = await createTwoShards(ipfs, 2187)

await ipfs.files.cp(`/ipfs/${dirWithSomeFiles}`, dirPath)

await ipfs.files.write(nextFile.path, nextFile.content, {
create: true
})

const stats = await ipfs.files.stat(dirPath)
const updatedDirCid = stats.cid

await expect(isShardAtPath(dirPath, ipfs)).to.eventually.be.true()
expect(stats.type).to.equal('directory')
expect(updatedDirCid.toString()).to.deep.equal('QmY4o7GNvr5eZPnT6k6ALp5zkQ4eiUkJQ6eeUNsdSiqS4f')
})

it('writes a file to an already sharded directory', async () => {
const shardedDirPath = await createShardedDirectory(ipfs)

Expand Down

0 comments on commit 8ed00f9

Please sign in to comment.