diff --git a/src/importer/dir-flat.js b/src/importer/dir-flat.js index 70215ab8..a8d625b7 100644 --- a/src/importer/dir-flat.js +++ b/src/importer/dir-flat.js @@ -9,7 +9,6 @@ const DAGLink = dagPB.DAGLink const DAGNode = dagPB.DAGNode class DirFlat { - constructor (props) { this._children = {} Object.assign(this, props) @@ -81,7 +80,6 @@ class DirFlat { ], callback) } - } module.exports = createDirFlat diff --git a/src/importer/tree-builder.js b/src/importer/tree-builder.js index 3d9b8054..039dcb83 100644 --- a/src/importer/tree-builder.js +++ b/src/importer/tree-builder.js @@ -215,4 +215,3 @@ function createTreeBuilder (ipldResolver, _options) { function notEmpty (str) { return Boolean(str) } - diff --git a/test/browser.js b/test/browser.js index 4cc7f65b..9f319aae 100644 --- a/test/browser.js +++ b/test/browser.js @@ -45,7 +45,6 @@ describe('IPFS data importing tests on the Browser', function () { require('./test-consumable-buffer') require('./test-consumable-hash') require('./test-hamt') - require('./test-exporter')(repo) require('./test-importer')(repo) require('./test-importer-flush')(repo) require('./test-import-export')(repo) diff --git a/test/test-consumable-hash.js b/test/test-consumable-hash.js index 0807fed0..8eed3cb8 100644 --- a/test/test-consumable-hash.js +++ b/test/test-consumable-hash.js @@ -1,7 +1,9 @@ /* eslint-env mocha */ 'use strict' -const expect = require('chai').expect +const chai = require('chai') +chai.use(require('dirty-chai')) +const expect = chai.expect const crypto = require('crypto') const whilst = require('async/whilst') @@ -18,7 +20,7 @@ describe('consumable hash', () => { it('can take a 0 length value', (callback) => { hash('some value').take(0, (err, result) => { - expect(err).to.not.exist + expect(err).to.not.exist() expect(result).to.be.eql(0) callback() }) @@ -26,7 +28,7 @@ describe('consumable hash', () => { it('can take a 10 bit value', (callback) => { hash('some value').take(10, (err, result) => { - expect(err).to.not.exist + expect(err).to.not.exist() expect(result).to.be.eql(110) callback() }) @@ -39,7 +41,7 @@ describe('consumable hash', () => { () => iter > 0, (callback) => { h.take(10, (err, result) => { - expect(err).to.not.exist + expect(err).to.not.exist() values.push(result) expect(result).to.be.below(1024) expect(result).to.be.above(0) @@ -61,7 +63,7 @@ describe('consumable hash', () => { () => iter > 0, (callback) => { h.take(10, (err, result) => { - expect(err).to.not.exist + expect(err).to.not.exist() values.push(result) expect(result).to.be.eql(values.shift()) iter-- diff --git a/test/test-dirbuilder-sharding.js b/test/test-dirbuilder-sharding.js index 18300724..c7ea7741 100644 --- a/test/test-dirbuilder-sharding.js +++ b/test/test-dirbuilder-sharding.js @@ -4,14 +4,16 @@ const importer = require('./../src').importer const exporter = require('./../src').exporter +const chai = require('chai') +chai.use(require('dirty-chai')) +const expect = chai.expect const mh = require('multihashes') -const expect = require('chai').expect const BlockService = require('ipfs-block-service') const IPLDResolver = require('ipld-resolver') const pull = require('pull-stream') const pushable = require('pull-pushable') const whilst = require('async/whilst') -const timers = require('timers') +const setImmediate = require('async/setImmediate') const leftPad = require('left-pad') module.exports = (repo) => { @@ -40,12 +42,12 @@ module.exports = (repo) => { ]), importer(ipldResolver, options), pull.collect((err, nodes) => { - expect(err).to.not.exist + expect(err).to.not.exist() expect(nodes.length).to.be.eql(2) expect(nodes[0].path).to.be.eql('a/b') expect(nodes[1].path).to.be.eql('a') nonShardedHash = nodes[1].multihash - expect(nonShardedHash).to.exist + expect(nonShardedHash).to.exist() done() }) ) @@ -65,7 +67,7 @@ module.exports = (repo) => { ]), importer(ipldResolver, options), pull.collect((err, nodes) => { - expect(err).to.not.exist + expect(err).to.not.exist() expect(nodes.length).to.be.eql(2) expect(nodes[0].path).to.be.eql('a/b') expect(nodes[1].path).to.be.eql('a') @@ -81,7 +83,7 @@ module.exports = (repo) => { pull( exporter(nonShardedHash, ipldResolver), pull.collect((err, nodes) => { - expect(err).to.not.exist + expect(err).to.not.exist() expect(nodes.length).to.be.eql(2) const expectedHash = mh.toB58String(nonShardedHash) expect(nodes[0].path).to.be.eql(expectedHash) @@ -96,7 +98,7 @@ module.exports = (repo) => { ) function collected (err, content) { - expect(err).to.not.exist + expect(err).to.not.exist() expect(content.length).to.be.eql(1) expect(content[0].toString()).to.be.eql('i have the best bytes') done() @@ -107,7 +109,7 @@ module.exports = (repo) => { pull( exporter(shardedHash, ipldResolver), pull.collect((err, nodes) => { - expect(err).to.not.exist + expect(err).to.not.exist() expect(nodes.length).to.be.eql(2) const expectedHash = mh.toB58String(shardedHash) expect(nodes[0].path).to.be.eql(expectedHash) @@ -122,7 +124,7 @@ module.exports = (repo) => { ) function collected (err, content) { - expect(err).to.not.exist + expect(err).to.not.exist() expect(content.length).to.be.eql(1) expect(content[0].toString()).to.be.eql('i have the best bytes') done() @@ -140,7 +142,7 @@ module.exports = (repo) => { push, importer(ipldResolver), pull.collect((err, nodes) => { - expect(err).to.not.exist + expect(err).to.not.exist() expect(nodes.length).to.be.eql(maxDirs + 1) const last = nodes[nodes.length - 1] expect(last.path).to.be.eql('big') @@ -162,10 +164,10 @@ module.exports = (repo) => { content: pull.values([new Buffer(i.toString())]) } push.push(pushable) - timers.setTimeout(callback, 1) + setImmediate(callback) }, (err) => { - expect(err).to.not.exist + expect(err).to.not.exist() push.end() } ) @@ -188,13 +190,13 @@ module.exports = (repo) => { } function collected (err, content) { - expect(err).to.not.exist + expect(err).to.not.exist() entries[node.path] = { content: content.toString() } callback(null, node) } }), pull.collect((err, nodes) => { - expect(err).to.not.exist + expect(err).to.not.exist() const paths = Object.keys(entries).sort() expect(paths.length).to.be.eql(2001) paths.forEach(eachPath) @@ -207,8 +209,8 @@ module.exports = (repo) => { // first dir expect(path).to.be.eql(mh.toB58String(rootHash)) const entry = entries[path] - expect(entry).to.exist - expect(entry.content).to.not.exist + expect(entry).to.exist() + expect(entry.content).to.not.exist() return } // dir entries @@ -232,7 +234,7 @@ module.exports = (repo) => { push, importer(ipldResolver), pull.collect((err, nodes) => { - expect(err).to.not.exist + expect(err).to.not.exist() const last = nodes[nodes.length - 1] expect(last.path).to.be.eql('big') rootHash = last.multihash @@ -265,10 +267,10 @@ module.exports = (repo) => { i = 0 depth++ } - timers.setTimeout(callback, 1) + setImmediate(callback) }, (err) => { - expect(err).to.not.exist + expect(err).to.not.exist() push.end() } ) @@ -290,7 +292,7 @@ module.exports = (repo) => { } function collected (err, content) { - expect(err).to.not.exist + expect(err).to.not.exist() entries[node.path] = { content: content.toString() } callback(null, node) } @@ -299,7 +301,7 @@ module.exports = (repo) => { ) function collected (err, nodes) { - expect(err).to.not.exist + expect(err).to.not.exist() const paths = Object.keys(entries).sort() expect(paths.length).to.be.eql(maxDepth * maxDirs + maxDepth) let index = 0 @@ -314,8 +316,8 @@ module.exports = (repo) => { expect(path).to.be.eql(mh.toB58String(rootHash)) } const entry = entries[path] - expect(entry).to.exist - expect(entry.content).to.not.exist + expect(entry).to.exist() + expect(entry.content).to.not.exist() } else { // dir entries const pathElements = path.split('/') diff --git a/test/test-hamt.js b/test/test-hamt.js index 899f46a1..4ae49ed2 100644 --- a/test/test-hamt.js +++ b/test/test-hamt.js @@ -1,8 +1,10 @@ /* eslint-env mocha */ 'use strict' +const chai = require('chai') +chai.use(require('dirty-chai')) +const expect = chai.expect const crypto = require('crypto') -const expect = require('chai').expect const each = require('async/each') const eachSeries = require('async/eachSeries') @@ -28,8 +30,8 @@ describe('HAMT', () => { it('get unknown key returns undefined', (callback) => { bucket.get('unknown', (err, result) => { - expect(err).to.not.exist - expect(result).to.be.undefined + expect(err).to.not.exist() + expect(result).to.be.undefined() callback() }) }) @@ -40,7 +42,7 @@ describe('HAMT', () => { it('can get that value', (callback) => { bucket.get('key', (err, result) => { - expect(err).to.not.exist + expect(err).to.not.exist() expect(result).to.be.eql('value') callback() }) @@ -52,7 +54,7 @@ describe('HAMT', () => { it('can get that value', (callback) => { bucket.get('key', (err, result) => { - expect(err).to.not.exist + expect(err).to.not.exist() expect(result).to.be.eql('a different value') callback() }) @@ -68,8 +70,8 @@ describe('HAMT', () => { it('get deleted key returns undefined', (callback) => { bucket.get('key', (err, result) => { - expect(err).to.not.exist - expect(result).to.be.undefined + expect(err).to.not.exist() + expect(result).to.be.undefined() callback() }) }) @@ -104,19 +106,19 @@ describe('HAMT', () => { } bucket.get(head, (err, value) => { - expect(err).to.not.exist + expect(err).to.not.exist() expect(value).to.be.eql(head) bucket.del(head, afterDel) }) function afterDel (err) { - expect(err).to.not.exist + expect(err).to.not.exist() bucket.get(head, afterGet) } function afterGet (err, value) { - expect(err).to.not.exist - expect(value).to.be.undefined + expect(err).to.not.exist() + expect(value).to.be.undefined() each( keys, @@ -128,14 +130,14 @@ describe('HAMT', () => { function onEachKey (key, callback) { bucket.get(key, (err, value) => { - expect(err).to.not.exist + expect(err).to.not.exist() expect(value).to.be.eql(key) callback() }) } function reiterate (err) { - expect(err).to.not.exist + expect(err).to.not.exist() // break from stack on next iteration process.nextTick(iterate) } @@ -147,7 +149,7 @@ describe('HAMT', () => { it('can still find sole head', (callback) => { bucket.get(masterHead, (err, value) => { - expect(err).to.not.exist + expect(err).to.not.exist() expect(value).to.be.eql(masterHead) callback() }) @@ -172,7 +174,7 @@ describe('HAMT', () => { } eachSeries(keys, (key, callback) => bucket.put(key, key, callback), (err) => { - expect(err).to.not.exist + expect(err).to.not.exist() callback() }) }) diff --git a/test/test-importer-flush.js b/test/test-importer-flush.js index 5c58188a..87b6fc72 100644 --- a/test/test-importer-flush.js +++ b/test/test-importer-flush.js @@ -3,7 +3,9 @@ const createImporter = require('./../src').importer -const expect = require('chai').expect +const chai = require('chai') +chai.use(require('dirty-chai')) +const expect = chai.expect const BlockService = require('ipfs-block-service') const IPLDResolver = require('ipld-resolver') const pull = require('pull-stream') @@ -29,7 +31,7 @@ module.exports = (repo) => { return node }), pull.collect((err, files) => { - expect(err).to.not.exist + expect(err).to.not.exist() expect(files.length).to.be.eql(1) done() }) @@ -41,8 +43,8 @@ module.exports = (repo) => { }) importer.flush((err, hash) => { - expect(err).to.not.exist - expect(Buffer.isBuffer(hash)).to.be.true + expect(err).to.not.exist() + expect(Buffer.isBuffer(hash)).to.be.true() source.end() }) }) @@ -64,7 +66,7 @@ module.exports = (repo) => { return node }), pull.collect((err, files) => { - expect(err).to.not.exist + expect(err).to.not.exist() expect(count).to.be.eql(2) done() }) @@ -76,8 +78,8 @@ module.exports = (repo) => { }) importer.flush((err, hash) => { - expect(err).to.not.exist - expect(Buffer.isBuffer(hash)).to.be.true + expect(err).to.not.exist() + expect(Buffer.isBuffer(hash)).to.be.true() source.end() }) }) @@ -102,7 +104,7 @@ module.exports = (repo) => { return node }), pull.collect((err, files) => { - expect(err).to.not.exist + expect(err).to.not.exist() expect(count).to.be.eql(2) done() }) @@ -134,8 +136,8 @@ module.exports = (repo) => { currentDir = newDir } importer.flush((err, hash) => { - expect(err).to.not.exist - expect(Buffer.isBuffer(hash)).to.be.true + expect(err).to.not.exist() + expect(Buffer.isBuffer(hash)).to.be.true() testAllYielded(tree) if (currentDir.depth < maxDepth) { pushAndFlush() @@ -149,7 +151,7 @@ module.exports = (repo) => { function markDirAsYielded (node) { const dir = findDir(tree, node.path) if (node.path === dir.path) { - expect(dir.yielded).to.be.false + expect(dir.yielded).to.be.false() dir.yielded = true } } @@ -169,7 +171,7 @@ module.exports = (repo) => { function testAllYielded (tree) { if (tree.depth) { - expect(tree.yielded).to.be.true + expect(tree.yielded).to.be.true() } const childrenNames = Object.keys(tree.children) childrenNames.forEach((childName) => {