diff --git a/README.md b/README.md index 78af7621..cf959ab1 100644 --- a/README.md +++ b/README.md @@ -165,9 +165,9 @@ filesStream.on('data', (file) => file.content.pipe(process.stdout)) const Exporter = require('ipfs-unixfs-engine').Exporter ``` -### new Exporter(, ) +### new Exporter(, , ) -Uses the given [dag API] or an [ipld-resolver instance][] to fetch an IPFS [UnixFS][] object(s) by their multiaddress. +Uses the given [dag API][] or an [ipld-resolver instance][] to fetch an IPFS [UnixFS][] object(s) by their multiaddress. Creates a new readable stream in object mode that outputs objects of the form @@ -178,57 +178,6 @@ Creates a new readable stream in object mode that outputs objects of the form } ``` -Errors are received as with a normal stream, by listening on the `'error'` event to be emitted. - - -[dag API]: https://github.com/ipfs/interface-ipfs-core/blob/master/SPEC/DAG.md -[ipld-resolver instance]: https://github.com/ipld/js-ipld-resolver -[UnixFS]: https://github.com/ipfs/specs/tree/master/unixfs - -## Reader - -The `reader` allows you to receive part or all of a file as a [pull-stream]. - -#### Reader example - -```js -const readable = require('ipfs-unixfs-engine').readable -const pull = require('pull-stream') -const drain = require('pull-stream/sinks/collect') - -pull( - readable(cid, ipldResolver) - collect((error, chunks) => { - // do something with the file chunks and/or handle errors - }) -) -``` - -#### Reader API - -```js -const reader = require('ipfs-unixfs-engine').reader -``` - -### reader(, , , ) - -Uses the given [dag API][] or an [ipld-resolver instance][] to fetch an IPFS [UnixFS][] object by their multiaddress. - -Creates a new [pull-stream][] that sends the requested chunks of data as a series of [Buffer][] objects. - -```js -const readable = require('ipfs-unixfs-engine').readable -const pull = require('pull-stream') -const drain = require('pull-stream/sinks/drain') - -pull( - readable(cid, ipldResolver), - drain((chunk) => { - // do something with the file chunk - }) -) -``` - #### `begin` and `end` `begin` and `end` arguments can optionally be passed to the reader function. These follow the same semantics as the JavaScript [`Array.slice(begin, end)`][] method. @@ -240,14 +189,17 @@ A negative `begin` starts the slice from the end of the stream and a negative `e See [the tests](test/reader.js) for examples of using these arguments. ```js -const readable = require('ipfs-unixfs-engine').readable +const exporter = require('ipfs-unixfs-engine').exporter const pull = require('pull-stream') const drain = require('pull-stream/sinks/drain') pull( - readable(cid, ipldResolver, 0, 10) - drain((chunk) => { - // chunk is a Buffer containing only the first 10 bytes of the stream + exporter(cid, ipldResolver, { + begin: 0, + end: 10 + }) + drain((file) => { + // file.content() is a pull stream containing only the first 10 bytes of the file }) ) ``` @@ -257,23 +209,22 @@ pull( Errors are received by [pull-stream][] sinks. ```js -const readable = require('ipfs-unixfs-engine').readable +const exporter = require('ipfs-unixfs-engine').exporter const pull = require('pull-stream') const drain = require('pull-stream/sinks/collect') pull( - readable(cid, ipldResolver, 0, 10) + exporter(cid, ipldResolver) collect((error, chunks) => { // handle the error }) ) ``` -[pull-stream]: https://www.npmjs.com/package/pull-stream -[Buffer]: https://www.npmjs.com/package/buffer [dag API]: https://github.com/ipfs/interface-ipfs-core/blob/master/SPEC/DAG.md [ipld-resolver instance]: https://github.com/ipld/js-ipld-resolver [UnixFS]: https://github.com/ipfs/specs/tree/master/unixfs +[pull-stream]: https://www.npmjs.com/package/pull-stream [`Array.slice(begin, end)`]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Array/slice ## Contribute diff --git a/src/exporter/file.js b/src/exporter/file.js index 38259eb4..02b592a1 100644 --- a/src/exporter/file.js +++ b/src/exporter/file.js @@ -1,13 +1,14 @@ 'use strict' const traverse = require('pull-traverse') +const traverseSlice = require('./traverse-slice') const UnixFS = require('ipfs-unixfs') const CID = require('cids') const pull = require('pull-stream') const paramap = require('pull-paramap') // Logic to export a single (possibly chunked) unixfs file. -module.exports = (node, name, path, pathRest, resolve, size, dag, parent, depth) => { +module.exports = (node, name, path, pathRest, resolve, size, dag, parent, depth, begin, end) => { function getData (node) { try { const file = UnixFS.unmarshal(node.data) @@ -31,19 +32,27 @@ module.exports = (node, name, path, pathRest, resolve, size, dag, parent, depth) return pull.empty() } - let content = pull( - traverse.depthFirst(node, visitor), - pull.map(getData) - ) - const file = UnixFS.unmarshal(node.data) + const fileSize = size || file.fileSize() + + let content + + if (!isNaN(begin)) { + content = traverseSlice(node, dag, begin, end) + } else { + content = pull( + traverse.depthFirst(node, visitor), + pull.map(getData) + ) + } + return pull.values([{ depth: depth, content: content, name: name, path: path, hash: node.multihash, - size: size || file.fileSize(), + size: fileSize, type: 'file' }]) } diff --git a/src/exporter/index.js b/src/exporter/index.js index 14a174dd..365257b8 100644 --- a/src/exporter/index.js +++ b/src/exporter/index.js @@ -36,11 +36,13 @@ function pathBaseAndRest (path) { } const defaultOptions = { - maxDepth: Infinity + maxDepth: Infinity, + begin: undefined, + end: undefined } -module.exports = (path, dag, _options) => { - const options = Object.assign({}, defaultOptions, _options) +module.exports = (path, dag, options) => { + options = Object.assign({}, defaultOptions, options) let dPath try { diff --git a/src/exporter/resolve.js b/src/exporter/resolve.js index 1aa8976f..369d063c 100644 --- a/src/exporter/resolve.js +++ b/src/exporter/resolve.js @@ -32,14 +32,14 @@ function createResolver (dag, options, depth, parent) { return pull.error(new Error('no depth')) } if (item.object) { - return cb(null, resolveItem(item.object, item)) + return cb(null, resolveItem(item.object, item, options.begin, options.end)) } dag.get(new CID(item.multihash), (err, node) => { if (err) { return cb(err) } // const name = item.fromPathRest ? item.name : item.path - cb(null, resolveItem(node.value, item)) + cb(null, resolveItem(node.value, item, options.begin, options.end)) }) }), pull.flatten(), @@ -47,18 +47,18 @@ function createResolver (dag, options, depth, parent) { pull.filter((node) => node.depth <= options.maxDepth) ) - function resolveItem (node, item) { - return resolve(node, item.name, item.path, item.pathRest, item.size, dag, item.parent || parent, item.depth) + function resolveItem (node, item, begin, end) { + return resolve(node, item.name, item.path, item.pathRest, item.size, dag, item.parent || parent, item.depth, begin, end) } - function resolve (node, name, path, pathRest, size, dag, parentNode, depth) { + function resolve (node, name, path, pathRest, size, dag, parentNode, depth, begin, end) { const type = typeOf(node) const nodeResolver = resolvers[type] if (!nodeResolver) { return pull.error(new Error('Unkown node type ' + type)) } const resolveDeep = createResolver(dag, options, depth, node) - return nodeResolver(node, name, path, pathRest, resolveDeep, size, dag, parentNode, depth) + return nodeResolver(node, name, path, pathRest, resolveDeep, size, dag, parentNode, depth, begin, end) } } diff --git a/src/exporter/traverse-slice.js b/src/exporter/traverse-slice.js new file mode 100644 index 00000000..16826417 --- /dev/null +++ b/src/exporter/traverse-slice.js @@ -0,0 +1,106 @@ +'use strict' + +const CID = require('cids') +const pull = require('pull-stream') +const asyncValues = require('pull-async-values') +const asyncMap = require('pull-stream/throughs/async-map') +const map = require('pull-stream/throughs/map') +const UnixFS = require('ipfs-unixfs') +const waterfall = require('async/waterfall') + +module.exports = (fileNode, dag, begin = 0, end) => { + let streamPosition = 0 + + return pull( + asyncValues((cb) => { + const meta = UnixFS.unmarshal(fileNode.data) + + if (meta.type !== 'file') { + return cb(new Error(`Node ${fileNode} was not a file (was ${meta.type}), can only read files`)) + } + + const fileSize = meta.fileSize() + + if (!end || end > fileSize) { + end = fileSize + } + + if (begin < 0) { + begin = fileSize + begin + } + + if (end < 0) { + end = fileSize + end + } + + const links = fileNode.links + + if (!links || !links.length) { + if (meta.data && meta.data.length) { + // file was small enough to fit in one DAGNode so has no links + return cb(null, [(done) => done(null, meta.data)]) + } + + console.info('Erm why', fileNode, 'with begin', begin, 'end', end) + + return cb(new Error(`Path ${fileNode} had no links or data`)) + } + + const linkedDataSize = links.reduce((acc, curr) => acc + curr.size, 0) + const overhead = (linkedDataSize - meta.fileSize()) / links.length + + // create an array of functions to fetch link data + cb(null, links.map((link) => (done) => { + // DAGNode Links report unixfs object data sizes $overhead bytes (typically 14) + // larger than they actually are due to the protobuf wrapper + const bytesInLinkedObjectData = link.size - overhead + + if (begin > (streamPosition + bytesInLinkedObjectData)) { + // Start byte is after this block so skip it + streamPosition += bytesInLinkedObjectData + + return done() + } + + if (end < streamPosition) { + // End byte was before this block so skip it + streamPosition += bytesInLinkedObjectData + + return done() + } + + // transform the multihash to a cid, the cid to a node and the node to some data + waterfall([ + (next) => dag.get(new CID(link.multihash), next), + (node, next) => next(null, node.value.data), + (data, next) => next(null, UnixFS.unmarshal(data).data) + ], done) + })) + }), + asyncMap((loadLinkData, cb) => loadLinkData(cb)), + pull.filter(Boolean), + map((data) => { + const block = extractDataFromBlock(data, streamPosition, begin, end) + + streamPosition += data.length + + return block + }) + ) +} + +function extractDataFromBlock (block, streamPosition, begin, end) { + const blockLength = block.length + + if (end - streamPosition < blockLength) { + // If the end byte is in the current block, truncate the block to the end byte + block = block.slice(0, end - streamPosition) + } + + if (begin > streamPosition && begin < (streamPosition + blockLength)) { + // If the start byte is in the current block, skip to the start byte + block = block.slice(begin - streamPosition) + } + + return block +} diff --git a/src/index.js b/src/index.js index 740aae8b..9ca42824 100644 --- a/src/index.js +++ b/src/index.js @@ -2,4 +2,3 @@ exports.importer = exports.Importer = require('./importer') exports.exporter = exports.Exporter = require('./exporter') -exports.reader = exports.Reader = require('./reader') diff --git a/src/reader/index.js b/src/reader/index.js deleted file mode 100644 index 880a6d6c..00000000 --- a/src/reader/index.js +++ /dev/null @@ -1,137 +0,0 @@ -'use strict' - -const CID = require('cids') -const pull = require('pull-stream') -const asyncValues = require('pull-async-values') -const asyncMap = require('pull-stream/throughs/async-map') -const map = require('pull-stream/throughs/map') -const UnixFS = require('ipfs-unixfs') -const toB58String = require('multihashes').toB58String -const waterfall = require('async/waterfall') - -module.exports = (path, ipldResolver, begin = 0, end) => { - let streamPosition = 0 - - return pull( - asyncValues((cb) => { - waterfall([ - (next) => toCid(path, next), - (cid, next) => ipldResolver.get(cid, next), - (node, next) => { - const meta = UnixFS.unmarshal(node.value.data) - - if (meta.type !== 'file') { - return next(new Error(`Path ${path} was not a file (was ${meta.type}), can only read files`)) - } - - const fileSize = meta.fileSize() - - if (!end || end > fileSize) { - end = fileSize - } - - if (begin < 0) { - begin = fileSize + begin - } - - if (end < 0) { - end = fileSize + end - } - - const links = node.value.links - - if (!links || !links.length) { - if (meta.data && meta.data.length) { - // file was small enough to fit in one DAGNode so has no links - return next(null, [(done) => done(null, meta.data)]) - } - - return next(new Error(`Path ${path} had no links or data`)) - } - - const linkedDataSize = links.reduce((acc, curr) => acc + curr.size, 0) - const overhead = (linkedDataSize - meta.fileSize()) / links.length - - // create an array of functions to fetch link data - next(null, links.map((link) => (done) => { - // DAGNode Links report unixfs object data sizes $overhead bytes (typically 14) - // larger than they actually are due to the protobuf wrapper - const bytesInLinkedObjectData = link.size - overhead - - if (begin > (streamPosition + bytesInLinkedObjectData)) { - // Start byte is after this block so skip it - streamPosition += bytesInLinkedObjectData - - return done() - } - - if (end < streamPosition) { - // End byte was before this block so skip it - streamPosition += bytesInLinkedObjectData - - return done() - } - - // transform the multihash to a cid, the cid to a node and the node to some data - waterfall([ - (next) => toCid(link.multihash, next), - (cid, next) => ipldResolver.get(cid, next), - (node, next) => next(null, node.value.data), - (data, next) => next(null, UnixFS.unmarshal(data).data) - ], done) - })) - } - ], cb) - }), - asyncMap((loadLinkData, cb) => loadLinkData(cb)), - pull.filter(Boolean), - map((data) => { - const block = extractDataFromBlock(data, streamPosition, begin, end) - - streamPosition += data.length - - return block - }) - ) -} - -function toCid (input, callback) { - let path = input - let cid - - try { - if (Buffer.isBuffer(path)) { - path = toB58String(path) - } - - if (path.indexOf('/ipfs/') === 0) { - path = path.substring('/ipfs/'.length) - } - - if (path.charAt(path.length - 1) === '/') { - path = path.substring(0, path.length - 1) - } - - cid = new CID(path) - } catch (error) { - return callback(new Error(`Path '${input}' was invalid: ${error.message}`)) - } - - callback(null, cid) -} - -function extractDataFromBlock (block, streamPosition, begin, end) { - const blockLength = block.length - - if (end - streamPosition < blockLength) { - // If the end byte is in the current block, truncate the block to the end byte - block = block.slice(0, end - streamPosition) - } - - if (begin > streamPosition && begin < (streamPosition + blockLength)) { - // If the start byte is in the current block, skip to the start byte - block = block.slice(begin - streamPosition) - } - - return block -} diff --git a/test/browser.js b/test/browser.js index 760d2a24..6cf3280f 100644 --- a/test/browser.js +++ b/test/browser.js @@ -60,9 +60,6 @@ describe('IPFS data importing tests on the Browser', function () { // require('./exporter')(repo) // require('./exporter-subtree')(repo) - // Reader - require('./reader')(repo) - // Other require('./import-export')(repo) require('./import-export-nested-dir')(repo) diff --git a/test/exporter.js b/test/exporter.js index b5e4a3c1..83fe9f52 100644 --- a/test/exporter.js +++ b/test/exporter.js @@ -15,6 +15,7 @@ const loadFixture = require('aegir/fixtures') const unixFSEngine = require('./../src') const exporter = unixFSEngine.exporter +const importer = unixFSEngine.importer const bigFile = loadFixture('test/fixtures/1.2MiB.txt') @@ -22,6 +23,59 @@ module.exports = (repo) => { describe('exporter', () => { let ipld + function addAndReadTestFile ({file, begin, end, strategy = 'balanced', path = '/foo'}, cb) { + pull( + pull.values([{ + path, + content: file + }]), + importer(ipld, { + strategy + }), + pull.collect((error, nodes) => { + expect(error).to.not.exist() + expect(nodes.length).to.be.eql(1) + + pull( + exporter(nodes[0].multihash, ipld, { + begin, end + }), + pull.collect((error, files) => { + if (error) { + return cb(error) + } + + readFile(files[0], cb) + }) + ) + }) + ) + } + + function checkBytesThatSpanBlocks (strategy, cb) { + const bytesInABlock = 262144 + const bytes = Buffer.alloc(bytesInABlock + 100, 0) + + bytes[bytesInABlock - 1] = 1 + bytes[bytesInABlock] = 2 + bytes[bytesInABlock + 1] = 3 + + addAndReadTestFile({ + file: bytes, + begin: bytesInABlock - 1, + end: bytesInABlock + 2, + strategy + }, (error, data) => { + if (error) { + return cb(error) + } + + expect(data).to.deep.equal(Buffer.from([1, 2, 3])) + + cb() + }) + } + before(() => { const bs = new BlockService(repo) ipld = new Ipld(bs) @@ -73,6 +127,33 @@ module.exports = (repo) => { ) }) + it('export a chunk of a file with no links', (done) => { + const hash = 'QmQmZQxSKQppbsWfVzBvg59Cn3DKtsNVQ94bjAxg2h3Lb8' + const begin = 0 + const end = 5 + + pull( + zip( + pull( + ipld.getStream(new CID(hash)), + pull.map((res) => UnixFS.unmarshal(res.value.data)) + ), + exporter(hash, ipld, { + begin, + end + }) + ), + pull.collect((err, values) => { + expect(err).to.not.exist() + + const unmarsh = values[0][0] + const file = values[0][1] + + fileEql(file, unmarsh.data.slice(begin, end), done) + }) + ) + }) + it('export a small file with links', function (done) { this.timeout(30 * 1000) const hash = 'QmW7BDxEbGqxxSYVtn3peNPQgdDXbWkoQ6J1EFYAEuQV3Q' @@ -86,6 +167,25 @@ module.exports = (repo) => { ) }) + it('exports a chunk of a small file with links', function (done) { + this.timeout(30 * 1000) + const hash = 'QmW7BDxEbGqxxSYVtn3peNPQgdDXbWkoQ6J1EFYAEuQV3Q' + const begin = 0 + const end = 5 + + pull( + exporter(hash, ipld, { + begin, + end + }), + pull.collect((err, files) => { + expect(err).to.not.exist() + + fileEql(files[0], bigFile.slice(begin, end), done) + }) + ) + }) + it('export a small file with links using CID instead of multihash', function (done) { this.timeout(30 * 1000) const cid = new CID('QmW7BDxEbGqxxSYVtn3peNPQgdDXbWkoQ6J1EFYAEuQV3Q') @@ -100,6 +200,25 @@ module.exports = (repo) => { ) }) + it('exports a chunk of a small file with links using CID instead of multihash', function (done) { + this.timeout(30 * 1000) + const cid = new CID('QmW7BDxEbGqxxSYVtn3peNPQgdDXbWkoQ6J1EFYAEuQV3Q') + const begin = 0 + const end = 5 + + pull( + exporter(cid, ipld, { + begin, + end + }), + pull.collect((err, files) => { + expect(err).to.not.exist() + + fileEql(files[0], bigFile.slice(begin, end), done) + }) + ) + }) + it('export a large file > 5mb', function (done) { this.timeout(30 * 1000) const hash = 'QmRQgufjp9vLE8XK2LGKZSsPCFCF6e4iynCQtNB5X2HBKE' @@ -114,6 +233,47 @@ module.exports = (repo) => { ) }) + it('exports a chunk of a large file > 5mb', function (done) { + this.timeout(30 * 1000) + const hash = 'QmRQgufjp9vLE8XK2LGKZSsPCFCF6e4iynCQtNB5X2HBKE' + const begin = 0 + const end = 5 + + pull( + exporter(hash, ipld, { + begin, + end + }), + pull.collect((err, files) => { + expect(err).to.not.exist() + + expect(files[0]).to.have.property('path', 'QmRQgufjp9vLE8XK2LGKZSsPCFCF6e4iynCQtNB5X2HBKE') + fileEql(files[0], null, done) + }) + ) + }) + + it('exports a chunk of a large file > 5mb made from multiple blocks', function (done) { + this.timeout(30 * 1000) + const hash = 'QmRQgufjp9vLE8XK2LGKZSsPCFCF6e4iynCQtNB5X2HBKE' + const bytesInABlock = 262144 + const begin = bytesInABlock - 1 + const end = bytesInABlock + 1 + + pull( + exporter(hash, ipld, { + begin, + end + }), + pull.collect((err, files) => { + expect(err).to.not.exist() + + expect(files[0]).to.have.property('path', 'QmRQgufjp9vLE8XK2LGKZSsPCFCF6e4iynCQtNB5X2HBKE') + fileEql(files[0], null, done) + }) + ) + }) + it('export a directory', function (done) { this.timeout(30 * 1000) const hash = 'QmWChcSFMNcFkfeJtNd8Yru1rE6PhtCRfewi1tMwjkwKjN' @@ -205,6 +365,144 @@ module.exports = (repo) => { ) }) + it('reads bytes with a begin', (done) => { + addAndReadTestFile({ + file: Buffer.from([0, 1, 2, 3]), + begin: 1 + }, (error, data) => { + if (error) { + return done(error) + } + + expect(data).to.deep.equal(Buffer.from([1, 2, 3])) + + done() + }) + }) + + it('reads bytes with a negative begin', (done) => { + addAndReadTestFile({ + file: Buffer.from([0, 1, 2, 3]), + begin: -1 + }, (error, data) => { + if (error) { + return done(error) + } + + expect(data).to.deep.equal(Buffer.from([3])) + + done() + }) + }) + + it('reads bytes with an end', (done) => { + addAndReadTestFile({ + file: Buffer.from([0, 1, 2, 3]), + begin: 0, + end: 1 + }, (error, data) => { + if (error) { + return done(error) + } + + expect(data).to.deep.equal(Buffer.from([0])) + + done() + }) + }) + + it('reads bytes with a negative end', (done) => { + addAndReadTestFile({ + file: Buffer.from([0, 1, 2, 3, 4]), + begin: 2, + end: -1 + }, (error, data) => { + if (error) { + return done(error) + } + + expect(data).to.deep.equal(Buffer.from([2, 3])) + + done() + }) + }) + + it('reads bytes with an begin and an end', (done) => { + addAndReadTestFile({ + file: Buffer.from([0, 1, 2, 3, 4]), + begin: 1, + end: 4 + }, (error, data) => { + if (error) { + return done(error) + } + + expect(data).to.deep.equal(Buffer.from([1, 2, 3])) + + done() + }) + }) + + it('reads bytes with a negative begin and a negative end that point to the same byte', (done) => { + addAndReadTestFile({ + file: Buffer.from([0, 1, 2, 3, 4]), + begin: -1, + end: -1 + }, (error, data) => { + if (error) { + return done(error) + } + + expect(data).to.deep.equal(Buffer.from([])) + + done() + }) + }) + + it('reads bytes with a negative begin and a negative end', (done) => { + addAndReadTestFile({ + file: Buffer.from([0, 1, 2, 3, 4]), + begin: -2, + end: -1 + }, (error, data) => { + if (error) { + return done(error) + } + + expect(data).to.deep.equal(Buffer.from([3])) + + done() + }) + }) + + it('reads bytes to the end of the file when end is larger than the file', (done) => { + addAndReadTestFile({ + file: Buffer.from([0, 1, 2, 3]), + begin: 0, + end: 100 + }, (error, data) => { + if (error) { + return done(error) + } + + expect(data).to.deep.equal(Buffer.from([0, 1, 2, 3])) + + done() + }) + }) + + it('reads bytes with an offset and a length that span blocks using balanced layout', (done) => { + checkBytesThatSpanBlocks('balanced', done) + }) + + it('reads bytes with an offset and a length that span blocks using flat layout', (done) => { + checkBytesThatSpanBlocks('flat', done) + }) + + it('reads bytes with an offset and a length that span blocks using trickle layout', (done) => { + checkBytesThatSpanBlocks('trickle', done) + }) + // TODO: This needs for the stores to have timeouts, // otherwise it is impossible to predict if a file doesn't // really exist @@ -223,24 +521,34 @@ module.exports = (repo) => { }) } -function fileEql (f1, f2, done) { - pull( - f1.content, - pull.collect((err, data) => { - if (err) { - return done(err) +function fileEql (actual, expected, done) { + readFile(actual, (error, data) => { + if (error) { + return done(error) + } + + try { + if (expected) { + expect(data).to.eql(expected) + } else { + expect(data).to.exist() } + } catch (err) { + return done(err) + } + done() + }) +} - try { - if (f2) { - expect(Buffer.concat(data)).to.eql(f2) - } else { - expect(data).to.exist() - } - } catch (err) { - return done(err) +function readFile (file, done) { + pull( + file.content, + pull.collect((error, data) => { + if (error) { + return done(error) } - done() + + done(null, Buffer.concat(data)) }) ) } diff --git a/test/node.js b/test/node.js index 315235c5..8bc82510 100644 --- a/test/node.js +++ b/test/node.js @@ -61,9 +61,6 @@ describe('IPFS UnixFS Engine', () => { require('./exporter')(repo) require('./exporter-subtree')(repo) - // Reader - require('./reader')(repo) - // Other require('./import-export')(repo) require('./import-export-nested-dir')(repo) diff --git a/test/reader.js b/test/reader.js deleted file mode 100644 index 54a7866e..00000000 --- a/test/reader.js +++ /dev/null @@ -1,241 +0,0 @@ -/* eslint-env mocha */ -'use strict' - -const reader = require('../src').reader - -const chai = require('chai') -chai.use(require('dirty-chai')) -const expect = chai.expect -const BlockService = require('ipfs-block-service') -const Ipld = require('ipld') -const pull = require('pull-stream') -const values = require('pull-stream/sources/values') -const collect = require('pull-stream/sinks/collect') -const importer = require('./../src').importer - -module.exports = (repo) => { - describe('reader', function () { - let ipld - - function addAndReadTestFile ({file, begin, end, strategy = 'balanced'}, cb) { - pull( - values([{ - path: '/foo', - content: file - }]), - importer(ipld, { - strategy - }), - collect((error, nodes) => { - expect(error).to.not.exist() - expect(nodes.length).to.be.eql(1) - - pull( - reader(nodes[0].multihash, ipld, begin, end), - collect((error, results) => { - cb(error, Buffer.concat(results)) - }) - ) - }) - ) - } - - function checkBytesThatSpanBlocks (strategy, cb) { - const bytesInABlock = 262144 - const bytes = Buffer.alloc(bytesInABlock + 100, 0) - - bytes[bytesInABlock - 1] = 1 - bytes[bytesInABlock] = 2 - bytes[bytesInABlock + 1] = 3 - - addAndReadTestFile({ - file: bytes, - begin: bytesInABlock - 1, - end: bytesInABlock + 2, - strategy - }, (error, data) => { - if (error) { - return cb(error) - } - - expect(data).to.deep.equal(Buffer.from([1, 2, 3])) - - cb() - }) - } - - before(() => { - const bs = new BlockService(repo) - ipld = new Ipld(bs) - }) - - it('fails on invalid path', (done) => { - pull( - reader('?!?', ipld), - collect((error) => { - expect(error.message).to.contain("Path '?!?' was invalid: Non-base58 character") - - done() - }) - ) - }) - - it('fails on non-file', (done) => { - addAndReadTestFile({ - file: undefined - }, (error) => { - expect(error.message).to.contain('was not a file') - - done() - }) - }) - - it('fails on file with no links', (done) => { - addAndReadTestFile({ - file: Buffer.from([]) - }, (error) => { - expect(error.message).to.contain('had no links') - - done() - }) - }) - - it('reads bytes with a begin', (done) => { - addAndReadTestFile({ - file: Buffer.from([0, 1, 2, 3]), - begin: 1 - }, (error, data) => { - if (error) { - return done(error) - } - - expect(data).to.deep.equal(Buffer.from([1, 2, 3])) - - done() - }) - }) - - it('reads bytes with a negative begin', (done) => { - addAndReadTestFile({ - file: Buffer.from([0, 1, 2, 3]), - begin: -1 - }, (error, data) => { - if (error) { - return done(error) - } - - expect(data).to.deep.equal(Buffer.from([3])) - - done() - }) - }) - - it('reads bytes with an end', (done) => { - addAndReadTestFile({ - file: Buffer.from([0, 1, 2, 3]), - being: 0, - end: 1 - }, (error, data) => { - if (error) { - return done(error) - } - - expect(data).to.deep.equal(Buffer.from([0])) - - done() - }) - }) - - it('reads bytes with a negative end', (done) => { - addAndReadTestFile({ - file: Buffer.from([0, 1, 2, 3, 4]), - begin: 2, - end: -1 - }, (error, data) => { - if (error) { - return done(error) - } - - expect(data).to.deep.equal(Buffer.from([2, 3])) - - done() - }) - }) - - it('reads bytes with an begin and an end', (done) => { - addAndReadTestFile({ - file: Buffer.from([0, 1, 2, 3, 4]), - begin: 1, - end: 4 - }, (error, data) => { - if (error) { - return done(error) - } - - expect(data).to.deep.equal(Buffer.from([1, 2, 3])) - - done() - }) - }) - - it('reads bytes with a negative begin and a negative end that point to the same byte', (done) => { - addAndReadTestFile({ - file: Buffer.from([0, 1, 2, 3, 4]), - begin: -1, - end: -1 - }, (error, data) => { - if (error) { - return done(error) - } - - expect(data).to.deep.equal(Buffer.from([])) - - done() - }) - }) - - it('reads bytes with a negative begin and a negative end', (done) => { - addAndReadTestFile({ - file: Buffer.from([0, 1, 2, 3, 4]), - begin: -2, - end: -1 - }, (error, data) => { - if (error) { - return done(error) - } - - expect(data).to.deep.equal(Buffer.from([3])) - - done() - }) - }) - - it('reads bytes to the end of the file when end is larger than the file', (done) => { - addAndReadTestFile({ - file: Buffer.from([0, 1, 2, 3]), - begin: 0, - end: 100 - }, (error, data) => { - if (error) { - return done(error) - } - - expect(data).to.deep.equal(Buffer.from([0, 1, 2, 3])) - - done() - }) - }) - - it('reads bytes with an offset and a length that span blocks using balanced layout', (done) => { - checkBytesThatSpanBlocks('balanced', done) - }) - - it('reads bytes with an offset and a length that span blocks using flat layout', (done) => { - checkBytesThatSpanBlocks('flat', done) - }) - - it('reads bytes with an offset and a length that span blocks using trickle layout', (done) => { - checkBytesThatSpanBlocks('trickle', done) - }) - }) -}