From daf1914a35c4d479a6e14a31476f9873ffa11640 Mon Sep 17 00:00:00 2001 From: David Dias Date: Sun, 22 Oct 2017 21:20:40 +0100 Subject: [PATCH 01/27] add function signatures --- SPEC/FILES.md | 220 +++++++++++++++++++++++++++++++++++++++++++++++++- 1 file changed, 218 insertions(+), 2 deletions(-) diff --git a/SPEC/FILES.md b/SPEC/FILES.md index 353782c2..e72f59bc 100644 --- a/SPEC/FILES.md +++ b/SPEC/FILES.md @@ -1,6 +1,8 @@ files API ========= +> The files API enables users to use the File System abstraction of IPFS. + #### `add` > Add files and data to IPFS. @@ -58,13 +60,64 @@ ipfs.files.add(files, function (err, files) { A great source of [examples][] can be found in the tests for this API. -#### `createAddStream` +#### `AddReadableStream` > Add files and data to IPFS using a transform stream. ##### `Go` **WIP** -##### `JavaScript` - ipfs.files.createAddStream([options], [callback]) +##### `JavaScript` - ipfs.files.AddReadableStream([options], [callback]) + +Provides a Transform stream, where objects can be written of the forms + +```js +{ + path: '/tmp/myfile.txt', + content: (Buffer or Readable stream) +} +``` + +`options` is an optional object argument containing the [DAG importer options](https://github.com/ipfs/js-ipfs-unixfs-engine#importer-api). + +`callback` must follow `function (err, stream) {}` signature, where `err` is an +error if the operation was not successful. `stream` will be a Transform stream, +to which tuples like the above two object formats can be written and [DAGNode][] +objects will be outputted. + +If no `callback` is passed, a promise is returned. + +**Example:** + +```JavaScript +ipfs.files.createAddStream(function (err, stream) { + stream.on('data', function (file) { + // 'file' will be of the form + // { + // path: '/tmp/myfile.txt', + // hash: 'QmHash' // base58 encoded multihash + // size: 123 + // } + }) + + stream.write({ + path: , + content: + }) + // write as many as you want + + stream.end() +}) +``` + +A great source of [examples][] can be found in the tests for this API. + +#### `AddPullStream` + +> Add files and data to IPFS using a pull-stream transform stream. + +##### `Go` **WIP** + +##### `JavaScript` - ipfs.files.AddPullStream([options], [callback]) Provides a Transform stream, where objects can be written of the forms @@ -139,6 +192,66 @@ ipfs.files.cat(ipfsPath, function (err, file) { A great source of [examples][] can be found in the tests for this API. +#### `catReadableStream` + +> Streams the file at the given IPFS multihash. + +##### `Go` **WIP** + +##### `JavaScript` - ipfs.files.catReadableStream(ipfsPath, [callback]) + +ipfsPath can be of type: + +- `multihash` is a [multihash][] which can be passed as + - Buffer, the raw Buffer of the multihash + - String, the base58 encoded version of the multihash +- String, including the ipfs handler, a multihash and a path to traverse to, ie: + - '/ipfs/QmXEmhrMpbVvTh61FNAxP9nU7ygVtyvZA8HZDUaqQCAb66' + - '/ipfs/QmXEmhrMpbVvTh61FNAxP9nU7ygVtyvZA8HZDUaqQCAb66/a.txt' + - 'QmXEmhrMpbVvTh61FNAxP9nU7ygVtyvZA8HZDUaqQCAb66/a.txt' + +`callback` must follow `function (err, stream) {}` signature, where `err` is an error if the operation was not successful and `stream` is a readable stream of the file. + +If no `callback` is passed, a promise is returned. + +```JavaScript +ipfs.files.cat(ipfsPath, function (err, file) { + // file will be a stream containing the data of the file requested +}) +``` + +A great source of [examples][] can be found in the tests for this API. + +#### `catPullStream` + +> Streams the file at the given IPFS multihash. + +##### `Go` **WIP** + +##### `JavaScript` - ipfs.files.catPullStream(ipfsPath, [callback]) + +ipfsPath can be of type: + +- `multihash` is a [multihash][] which can be passed as + - Buffer, the raw Buffer of the multihash + - String, the base58 encoded version of the multihash +- String, including the ipfs handler, a multihash and a path to traverse to, ie: + - '/ipfs/QmXEmhrMpbVvTh61FNAxP9nU7ygVtyvZA8HZDUaqQCAb66' + - '/ipfs/QmXEmhrMpbVvTh61FNAxP9nU7ygVtyvZA8HZDUaqQCAb66/a.txt' + - 'QmXEmhrMpbVvTh61FNAxP9nU7ygVtyvZA8HZDUaqQCAb66/a.txt' + +`callback` must follow `function (err, stream) {}` signature, where `err` is an error if the operation was not successful and `stream` is a readable stream of the file. + +If no `callback` is passed, a promise is returned. + +```JavaScript +ipfs.files.cat(ipfsPath, function (err, file) { + // file will be a stream containing the data of the file requested +}) +``` + +A great source of [examples][] can be found in the tests for this API. + #### `get` > Get [UnixFS][] files from IPFS. @@ -190,4 +303,107 @@ ipfs.files.get(multihashStr, function (err, stream) { A great source of [examples][] can be found in the tests for this API. +#### `getReadableStream` + +> Get [UnixFS][] files from IPFS. + +##### `Go` **WIP** + +##### `JavaScript` - ipfs.files.get(ipfsPath, [callback]) + +ipfsPath can be of type: + +- `multihash` is a [multihash][] which can be passed as + - Buffer, the raw Buffer of the multihash + - String, the base58 encoded version of the multihash +- String, including the ipfs handler, a multihash and a path to traverse to, ie: + - '/ipfs/QmXEmhrMpbVvTh61FNAxP9nU7ygVtyvZA8HZDUaqQCAb66' + - '/ipfs/QmXEmhrMpbVvTh61FNAxP9nU7ygVtyvZA8HZDUaqQCAb66/a.txt' + - 'QmXEmhrMpbVvTh61FNAxP9nU7ygVtyvZA8HZDUaqQCAb66/a.txt' + +`callback` must follow `function (err, stream) {}` signature, where `err` is an +error if the operation was not successful. `stream` will be a Readable stream in +[*object mode*](https://nodejs.org/api/stream.html#stream_object_mode), +outputting objects of the form + +```js +{ + path: '/tmp/myfile.txt', + content: +} +``` + +Here, each `path` corresponds to the name of a file, and `content` is a regular +Readable stream with the raw contents of that file. + +If no `callback` is passed, a promise is returned with the Readable stream. + +**Example:** + +```JavaScript +const multihashStr = 'QmQ2r6iMNpky5f1m4cnm3Yqw8VSvjuKpTcK1X7dBR1LkJF' + +ipfs.files.get(multihashStr, function (err, stream) { + stream.on('data', (file) => { + // write the file's path and contents to standard out + console.log(file.path) + file.content.pipe(process.stdout) + }) +}) +``` + +A great source of [examples][] can be found in the tests for this API. + +#### `getPullStream` + +> Get [UnixFS][] files from IPFS. + +##### `Go` **WIP** + +##### `JavaScript` - ipfs.files.getPullStream(ipfsPath, [callback]) + +ipfsPath can be of type: + +- `multihash` is a [multihash][] which can be passed as + - Buffer, the raw Buffer of the multihash + - String, the base58 encoded version of the multihash +- String, including the ipfs handler, a multihash and a path to traverse to, ie: + - '/ipfs/QmXEmhrMpbVvTh61FNAxP9nU7ygVtyvZA8HZDUaqQCAb66' + - '/ipfs/QmXEmhrMpbVvTh61FNAxP9nU7ygVtyvZA8HZDUaqQCAb66/a.txt' + - 'QmXEmhrMpbVvTh61FNAxP9nU7ygVtyvZA8HZDUaqQCAb66/a.txt' + +`callback` must follow `function (err, stream) {}` signature, where `err` is an +error if the operation was not successful. `stream` will be a Readable stream in +[*object mode*](https://nodejs.org/api/stream.html#stream_object_mode), +outputting objects of the form + +```js +{ + path: '/tmp/myfile.txt', + content: +} +``` + +Here, each `path` corresponds to the name of a file, and `content` is a regular +Readable stream with the raw contents of that file. + +If no `callback` is passed, a promise is returned with the Readable stream. + +**Example:** + +```JavaScript +const multihashStr = 'QmQ2r6iMNpky5f1m4cnm3Yqw8VSvjuKpTcK1X7dBR1LkJF' + +ipfs.files.get(multihashStr, function (err, stream) { + stream.on('data', (file) => { + // write the file's path and contents to standard out + console.log(file.path) + file.content.pipe(process.stdout) + }) +}) +``` + +A great source of [examples][] can be found in the tests for this API. + + [examples]: https://github.com/ipfs/interface-ipfs-core/blob/master/src/files.js From 8bd8c771e672ff22e69f915307a3d213c57dc348 Mon Sep 17 00:00:00 2001 From: David Dias Date: Mon, 23 Oct 2017 14:45:54 +0100 Subject: [PATCH 02/27] fix typo --- SPEC/FILES.md | 241 ++++++++++++++++++++++++-------------------------- 1 file changed, 115 insertions(+), 126 deletions(-) diff --git a/SPEC/FILES.md b/SPEC/FILES.md index e72f59bc..3f42104f 100644 --- a/SPEC/FILES.md +++ b/SPEC/FILES.md @@ -11,24 +11,27 @@ files API ##### `JavaScript` - ipfs.files.add(data, [options], [callback]) -Where `data` may be +Where `data` may be: +- a [`Buffer instance`][b] +- a [`Readable Stream`][rs] +- a [`Pull Stream`][ps] +- a Path (caveat: will only work in Node.js) +- a URL - an array of objects, each of the form ```JavaScript { - path: '/tmp/myfile.txt', - content: (Buffer or Readable stream) + path: '/tmp/myfile.txt', // The file path + content: // A Buffer, Readable Stream or Pull Stream with the contents of the file } ``` -- a `Buffer` instance -- a `Readable` stream - If no `content` is passed, then the path is treated as an empty directory `options` is an optional object argument that might include the following keys: - cid-version (integer, default 0): the CID version to use when storing the data (storage keys are based on the CID, including it's version) - progress (function): a function that will be called with the byte length of chunks as a file is added to ipfs. +- recursive (boolean): for when a Path is passed, this option can be enabled to add recursively all the files. - hashAlg || hash (string): multihash hashing algorithm to use `callback` must follow `function (err, res) {}` signature, where `err` is an error if the operation was not successful. `res` will be an array of: @@ -54,7 +57,7 @@ const files = [ ] ipfs.files.add(files, function (err, files) { - // 'files' will be an array of objects + // 'files' will be an array of objects containing paths and the multihashes of the files added }) ``` @@ -62,50 +65,49 @@ A great source of [examples][] can be found in the tests for this API. #### `AddReadableStream` -> Add files and data to IPFS using a transform stream. +> Add files and data to IPFS using a [Readable Stream][rs] of class Duplex. ##### `Go` **WIP** -##### `JavaScript` - ipfs.files.AddReadableStream([options], [callback]) +##### `JavaScript` - ipfs.files.AddReadableStream([options]) -> [Readable Stream][rs] -Provides a Transform stream, where objects can be written of the forms +Returns a Readable Stream of class Duplex, where objects can be written of the forms ```js { - path: '/tmp/myfile.txt', - content: (Buffer or Readable stream) + path: '/tmp/myfile.txt', // The file path + content: // A Buffer, Readable Stream or Pull Stream with the contents of the file } ``` -`options` is an optional object argument containing the [DAG importer options](https://github.com/ipfs/js-ipfs-unixfs-engine#importer-api). +`options` is an optional object argument that might include the following keys: -`callback` must follow `function (err, stream) {}` signature, where `err` is an -error if the operation was not successful. `stream` will be a Transform stream, -to which tuples like the above two object formats can be written and [DAGNode][] -objects will be outputted. +- cid-version (integer, default 0): the CID version to use when storing the data (storage keys are based on the CID, including it's version) +- progress (function): a function that will be called with the byte length of chunks as a file is added to ipfs. +- hashAlg || hash (string): multihash hashing algorithm to use If no `callback` is passed, a promise is returned. **Example:** ```JavaScript -ipfs.files.createAddStream(function (err, stream) { - stream.on('data', function (file) { - // 'file' will be of the form - // { - // path: '/tmp/myfile.txt', - // hash: 'QmHash' // base58 encoded multihash - // size: 123 - // } - }) +const stream = ipfs.files.AddReadableStream() +stream.on('data', function (file) { + // 'file' will be of the form + // { + // path: '/tmp/myfile.txt', + // hash: 'QmHash' // base58 encoded multihash + // size: 123 + // } +}) - stream.write({ - path: , - content: - }) - // write as many as you want +stream.write({ + path: + content: +}) +// write as many files as you want - stream.end() +stream.end() }) ``` @@ -113,58 +115,53 @@ A great source of [examples][] can be found in the tests for this API. #### `AddPullStream` -> Add files and data to IPFS using a pull-stream transform stream. +> Add files and data to IPFS using a [Pull Stream][ps]. ##### `Go` **WIP** -##### `JavaScript` - ipfs.files.AddPullStream([options], [callback]) +##### `JavaScript` - ipfs.files.AddPullStream([options]) -> [Pull Stream][ps] -Provides a Transform stream, where objects can be written of the forms +Returns a Pull Stream, where objects can be written of the forms ```js { - path: '/tmp/myfile.txt', - content: (Buffer or Readable stream) + path: '/tmp/myfile.txt', // The file path + content: // A Buffer, Readable Stream or Pull Stream with the contents of the file } ``` -`options` is an optional object argument containing the [DAG importer options](https://github.com/ipfs/js-ipfs-unixfs-engine#importer-api). +`options` is an optional object argument that might include the following keys: -`callback` must follow `function (err, stream) {}` signature, where `err` is an -error if the operation was not successful. `stream` will be a Transform stream, -to which tuples like the above two object formats can be written and [DAGNode][] -objects will be outputted. +- cid-version (integer, default 0): the CID version to use when storing the data (storage keys are based on the CID, including it's version) +- progress (function): a function that will be called with the byte length of chunks as a file is added to ipfs. +- hashAlg || hash (string): multihash hashing algorithm to use If no `callback` is passed, a promise is returned. **Example:** ```JavaScript -ipfs.files.createAddStream(function (err, stream) { - stream.on('data', function (file) { - // 'file' will be of the form +const stream = ipfs.files.AddPullStream() + +pull( + pull.values([ + { path: , content: } + ]), + stream, + pull.collect((err, values) => { + // values will be an array of objects, which one of the form // { // path: '/tmp/myfile.txt', // hash: 'QmHash' // base58 encoded multihash // size: 123 // } }) - - stream.write({ - path: , - content: - }) - // write as many as you want - - stream.end() -}) +) ``` -A great source of [examples][] can be found in the tests for this API. - #### `cat` -> Streams the file at the given IPFS multihash. +> Returns a file addressed by a valid IPFS Path. ##### `Go` **WIP** @@ -172,21 +169,27 @@ A great source of [examples][] can be found in the tests for this API. ipfsPath can be of type: -- `multihash` is a [multihash][] which can be passed as - - Buffer, the raw Buffer of the multihash - - String, the base58 encoded version of the multihash -- String, including the ipfs handler, a multihash and a path to traverse to, ie: +- [`cid`][cid] of type: + - [Buffer][b], the raw Buffer of the cid + - String, the base58 encoded version of the cid +- String, including the ipfs handler, a cid and a path to traverse to, ie: - '/ipfs/QmXEmhrMpbVvTh61FNAxP9nU7ygVtyvZA8HZDUaqQCAb66' - '/ipfs/QmXEmhrMpbVvTh61FNAxP9nU7ygVtyvZA8HZDUaqQCAb66/a.txt' - 'QmXEmhrMpbVvTh61FNAxP9nU7ygVtyvZA8HZDUaqQCAb66/a.txt' -`callback` must follow `function (err, stream) {}` signature, where `err` is an error if the operation was not successful and `stream` is a readable stream of the file. +`callback` must follow `function (err, file) {}` signature, where `err` is an error if the operation was not successful and `file` is a [Buffer][b] If no `callback` is passed, a promise is returned. +**Example:** + ```JavaScript ipfs.files.cat(ipfsPath, function (err, file) { - // file will be a stream containing the data of the file requested + if (err) { + throw err + } + + console.log(file.toString()) }) ``` @@ -194,59 +197,55 @@ A great source of [examples][] can be found in the tests for this API. #### `catReadableStream` -> Streams the file at the given IPFS multihash. +> Returns a [Readable Stream][rs] containing the contents of a file addressed by a valid IPFS Path. ##### `Go` **WIP** -##### `JavaScript` - ipfs.files.catReadableStream(ipfsPath, [callback]) +##### `JavaScript` - ipfs.files.catReadableStream(ipfsPath) -> [Readable Stream][rs] ipfsPath can be of type: -- `multihash` is a [multihash][] which can be passed as - - Buffer, the raw Buffer of the multihash - - String, the base58 encoded version of the multihash -- String, including the ipfs handler, a multihash and a path to traverse to, ie: +- [`cid`][cid] of type: + - [Buffer][b], the raw Buffer of the cid + - String, the base58 encoded version of the cid +- String, including the ipfs handler, a cid and a path to traverse to, ie: - '/ipfs/QmXEmhrMpbVvTh61FNAxP9nU7ygVtyvZA8HZDUaqQCAb66' - '/ipfs/QmXEmhrMpbVvTh61FNAxP9nU7ygVtyvZA8HZDUaqQCAb66/a.txt' - 'QmXEmhrMpbVvTh61FNAxP9nU7ygVtyvZA8HZDUaqQCAb66/a.txt' -`callback` must follow `function (err, stream) {}` signature, where `err` is an error if the operation was not successful and `stream` is a readable stream of the file. +Returns a [Readable Stream][rs] with the contents of the file. -If no `callback` is passed, a promise is returned. ```JavaScript -ipfs.files.cat(ipfsPath, function (err, file) { - // file will be a stream containing the data of the file requested -}) +const stream = ipfs.files.catReadableStream(ipfsPath) +// stream will be a stream containing the data of the file requested ``` A great source of [examples][] can be found in the tests for this API. #### `catPullStream` -> Streams the file at the given IPFS multihash. +> Returns a [Pull Stream][ps] containing the contents of a file addressed by a valid IPFS Path. ##### `Go` **WIP** -##### `JavaScript` - ipfs.files.catPullStream(ipfsPath, [callback]) +##### `JavaScript` - ipfs.files.catPullStream(ipfsPath) -> [Pull Stream][rs] ipfsPath can be of type: -- `multihash` is a [multihash][] which can be passed as - - Buffer, the raw Buffer of the multihash - - String, the base58 encoded version of the multihash -- String, including the ipfs handler, a multihash and a path to traverse to, ie: +- [`cid`][cid] of type: + - [Buffer][b], the raw Buffer of the cid + - String, the base58 encoded version of the cid +- String, including the ipfs handler, a cid and a path to traverse to, ie: - '/ipfs/QmXEmhrMpbVvTh61FNAxP9nU7ygVtyvZA8HZDUaqQCAb66' - '/ipfs/QmXEmhrMpbVvTh61FNAxP9nU7ygVtyvZA8HZDUaqQCAb66/a.txt' - 'QmXEmhrMpbVvTh61FNAxP9nU7ygVtyvZA8HZDUaqQCAb66/a.txt' -`callback` must follow `function (err, stream) {}` signature, where `err` is an error if the operation was not successful and `stream` is a readable stream of the file. - -If no `callback` is passed, a promise is returned. +Returns a [Pull Stream][ps] with the contents of the file. ```JavaScript -ipfs.files.cat(ipfsPath, function (err, file) { - // file will be a stream containing the data of the file requested +const stream = ipfs.files.catPullStream(ipfsPath) +// stream will be a stream containing the data of the file requested }) ``` @@ -254,7 +253,7 @@ A great source of [examples][] can be found in the tests for this API. #### `get` -> Get [UnixFS][] files from IPFS. +> Fetch a file or an entire directory tree from IPFS that is addressed by a valid IPFS Path. ##### `Go` **WIP** @@ -262,41 +261,36 @@ A great source of [examples][] can be found in the tests for this API. ipfsPath can be of type: -- `multihash` is a [multihash][] which can be passed as - - Buffer, the raw Buffer of the multihash - - String, the base58 encoded version of the multihash -- String, including the ipfs handler, a multihash and a path to traverse to, ie: +- [`cid`][cid] of type: + - [Buffer][b], the raw Buffer of the cid + - String, the base58 encoded version of the cid +- String, including the ipfs handler, a cid and a path to traverse to, ie: - '/ipfs/QmXEmhrMpbVvTh61FNAxP9nU7ygVtyvZA8HZDUaqQCAb66' - '/ipfs/QmXEmhrMpbVvTh61FNAxP9nU7ygVtyvZA8HZDUaqQCAb66/a.txt' - 'QmXEmhrMpbVvTh61FNAxP9nU7ygVtyvZA8HZDUaqQCAb66/a.txt' -`callback` must follow `function (err, stream) {}` signature, where `err` is an -error if the operation was not successful. `stream` will be a Readable stream in -[*object mode*](https://nodejs.org/api/stream.html#stream_object_mode), -outputting objects of the form +`callback` must follow `function (err, files) {}` signature, where `err` is an error if the operation was not successful. `files` is an array containing objects of the following form: ```js { path: '/tmp/myfile.txt', - content: + content: } ``` -Here, each `path` corresponds to the name of a file, and `content` is a regular -Readable stream with the raw contents of that file. +Here, each `path` corresponds to the name of a file, and `content` is a regular Readable stream with the raw contents of that file. -If no `callback` is passed, a promise is returned with the Readable stream. +If no `callback` is passed, a promise is returned. **Example:** ```JavaScript -const multihashStr = 'QmQ2r6iMNpky5f1m4cnm3Yqw8VSvjuKpTcK1X7dBR1LkJF' +const validCID = 'QmQ2r6iMNpky5f1m4cnm3Yqw8VSvjuKpTcK1X7dBR1LkJF' -ipfs.files.get(multihashStr, function (err, stream) { - stream.on('data', (file) => { - // write the file's path and contents to standard out +ipfs.files.get(validCID, function (err, files) { + files.forEach((file) => { console.log(file.path) - file.content.pipe(process.stdout) + console.log(file.path.toString()) }) }) ``` @@ -305,26 +299,23 @@ A great source of [examples][] can be found in the tests for this API. #### `getReadableStream` -> Get [UnixFS][] files from IPFS. +> Fetch a file or an entire directory tree from IPFS that is addressed by a valid IPFS Path. The files will be yielded as Readable Streams. ##### `Go` **WIP** -##### `JavaScript` - ipfs.files.get(ipfsPath, [callback]) +##### `JavaScript` - ipfs.files.getReadableStream(ipfsPath) -> [Readable Stream][rs] ipfsPath can be of type: -- `multihash` is a [multihash][] which can be passed as - - Buffer, the raw Buffer of the multihash - - String, the base58 encoded version of the multihash -- String, including the ipfs handler, a multihash and a path to traverse to, ie: +- [`cid`][cid] of type: + - [Buffer][b], the raw Buffer of the cid + - String, the base58 encoded version of the cid +- String, including the ipfs handler, a cid and a path to traverse to, ie: - '/ipfs/QmXEmhrMpbVvTh61FNAxP9nU7ygVtyvZA8HZDUaqQCAb66' - '/ipfs/QmXEmhrMpbVvTh61FNAxP9nU7ygVtyvZA8HZDUaqQCAb66/a.txt' - 'QmXEmhrMpbVvTh61FNAxP9nU7ygVtyvZA8HZDUaqQCAb66/a.txt' -`callback` must follow `function (err, stream) {}` signature, where `err` is an -error if the operation was not successful. `stream` will be a Readable stream in -[*object mode*](https://nodejs.org/api/stream.html#stream_object_mode), -outputting objects of the form +It returns a [Readable Stream][rs] in [Object mode](https://nodejs.org/api/stream.html#stream_object_mode) that will yield objects of the form: ```js { @@ -333,22 +324,17 @@ outputting objects of the form } ``` -Here, each `path` corresponds to the name of a file, and `content` is a regular -Readable stream with the raw contents of that file. - -If no `callback` is passed, a promise is returned with the Readable stream. - **Example:** ```JavaScript -const multihashStr = 'QmQ2r6iMNpky5f1m4cnm3Yqw8VSvjuKpTcK1X7dBR1LkJF' +const validCID = 'QmQ2r6iMNpky5f1m4cnm3Yqw8VSvjuKpTcK1X7dBR1LkJF' -ipfs.files.get(multihashStr, function (err, stream) { - stream.on('data', (file) => { - // write the file's path and contents to standard out - console.log(file.path) - file.content.pipe(process.stdout) - }) +const stream = ipfs.files.getReadableStream(validCID) + +stream.on('data', (file) => { + // write the file's path and contents to standard out + console.log(file.path) + console.log(file.path.toString()) }) ``` @@ -405,5 +391,8 @@ ipfs.files.get(multihashStr, function (err, stream) { A great source of [examples][] can be found in the tests for this API. - [examples]: https://github.com/ipfs/interface-ipfs-core/blob/master/src/files.js +[b]: https://www.npmjs.com/package/buffer +[rs]: https://www.npmjs.com/package/readable-stream +[ps]: https://www.npmjs.com/package/pull-stream +[cid]: https://www.npmjs.com/package/cids From d43f80eb8ac7de947136ed1b3f4ed0311165332c Mon Sep 17 00:00:00 2001 From: David Dias Date: Mon, 23 Oct 2017 15:14:16 +0100 Subject: [PATCH 03/27] complete update the spec --- SPEC/FILES.md | 58 +++++++++++++++++++++++++-------------------------- 1 file changed, 29 insertions(+), 29 deletions(-) diff --git a/SPEC/FILES.md b/SPEC/FILES.md index 3f42104f..890d07f2 100644 --- a/SPEC/FILES.md +++ b/SPEC/FILES.md @@ -63,13 +63,13 @@ ipfs.files.add(files, function (err, files) { A great source of [examples][] can be found in the tests for this API. -#### `AddReadableStream` +#### `addReadableStream` > Add files and data to IPFS using a [Readable Stream][rs] of class Duplex. ##### `Go` **WIP** -##### `JavaScript` - ipfs.files.AddReadableStream([options]) -> [Readable Stream][rs] +##### `JavaScript` - ipfs.files.addReadableStream([options]) -> [Readable Stream][rs] Returns a Readable Stream of class Duplex, where objects can be written of the forms @@ -91,7 +91,7 @@ If no `callback` is passed, a promise is returned. **Example:** ```JavaScript -const stream = ipfs.files.AddReadableStream() +const stream = ipfs.files.addReadableStream() stream.on('data', function (file) { // 'file' will be of the form // { @@ -113,13 +113,13 @@ stream.end() A great source of [examples][] can be found in the tests for this API. -#### `AddPullStream` +#### `addPullStream` > Add files and data to IPFS using a [Pull Stream][ps]. ##### `Go` **WIP** -##### `JavaScript` - ipfs.files.AddPullStream([options]) -> [Pull Stream][ps] +##### `JavaScript` - ipfs.files.addPullStream([options]) -> [Pull Stream][ps] Returns a Pull Stream, where objects can be written of the forms @@ -141,7 +141,7 @@ If no `callback` is passed, a promise is returned. **Example:** ```JavaScript -const stream = ipfs.files.AddPullStream() +const stream = ipfs.files.addPullStream() pull( pull.values([ @@ -342,51 +342,51 @@ A great source of [examples][] can be found in the tests for this API. #### `getPullStream` -> Get [UnixFS][] files from IPFS. +> Fetch a file or an entire directory tree from IPFS that is addressed by a valid IPFS Path. The files will be yielded as Readable Streams. ##### `Go` **WIP** -##### `JavaScript` - ipfs.files.getPullStream(ipfsPath, [callback]) +##### `JavaScript` - ipfs.files.getPullStream(ipfsPath) -> [Pull Stream][ps] ipfsPath can be of type: -- `multihash` is a [multihash][] which can be passed as - - Buffer, the raw Buffer of the multihash - - String, the base58 encoded version of the multihash -- String, including the ipfs handler, a multihash and a path to traverse to, ie: +- [`cid`][cid] of type: + - [Buffer][b], the raw Buffer of the cid + - String, the base58 encoded version of the cid +- String, including the ipfs handler, a cid and a path to traverse to, ie: - '/ipfs/QmXEmhrMpbVvTh61FNAxP9nU7ygVtyvZA8HZDUaqQCAb66' - '/ipfs/QmXEmhrMpbVvTh61FNAxP9nU7ygVtyvZA8HZDUaqQCAb66/a.txt' - 'QmXEmhrMpbVvTh61FNAxP9nU7ygVtyvZA8HZDUaqQCAb66/a.txt' -`callback` must follow `function (err, stream) {}` signature, where `err` is an -error if the operation was not successful. `stream` will be a Readable stream in -[*object mode*](https://nodejs.org/api/stream.html#stream_object_mode), -outputting objects of the form +It returns a [Pull Stream][os] that will yield objects of the form: ```js { path: '/tmp/myfile.txt', - content: + content: } ``` -Here, each `path` corresponds to the name of a file, and `content` is a regular -Readable stream with the raw contents of that file. - -If no `callback` is passed, a promise is returned with the Readable stream. - **Example:** ```JavaScript -const multihashStr = 'QmQ2r6iMNpky5f1m4cnm3Yqw8VSvjuKpTcK1X7dBR1LkJF' +const validCID = 'QmQ2r6iMNpky5f1m4cnm3Yqw8VSvjuKpTcK1X7dBR1LkJF' -ipfs.files.get(multihashStr, function (err, stream) { - stream.on('data', (file) => { - // write the file's path and contents to standard out - console.log(file.path) - file.content.pipe(process.stdout) +const stream = ipfs.files.getReadableStream(validCID) + +pull( + stream, + pull.collect((err, files) => { + if (err) { + throw err + } + + files.forEach((file) => { + console.log(file.path) + console.log(file.path.toString()) + }) }) -}) +) ``` A great source of [examples][] can be found in the tests for this API. From 946d6123972cd2a4d7e34e58b4c17914bd272a0c Mon Sep 17 00:00:00 2001 From: David Dias Date: Mon, 30 Oct 2017 09:08:01 +0000 Subject: [PATCH 04/27] chore: update deps --- package.json | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/package.json b/package.json index 6eebd0cf..acc8f1f1 100644 --- a/package.json +++ b/package.json @@ -35,6 +35,7 @@ "bl": "^1.2.1", "bs58": "^4.0.1", "chai": "^4.1.2", + "dirty-chai": "^2.0.1", "cids": "~0.5.2", "concat-stream": "^1.6.0", "detect-node": "^2.0.3", @@ -45,9 +46,9 @@ "multihashes": "~0.4.12", "multihashing-async": "~0.4.7", "peer-id": "~0.10.2", - "pull-stream": "^3.6.1", - "dirty-chai": "^2.0.1" + "pull-stream": "^3.6.1" }, + "devDependencies": {}, "contributors": [ "David Dias ", "Dmitriy Ryajov ", From ef5f02f7beca7e7206784de255fcb08e000eb4c5 Mon Sep 17 00:00:00 2001 From: David Dias Date: Mon, 30 Oct 2017 10:34:20 +0000 Subject: [PATCH 05/27] structure tests, add placeholders --- SPEC/FILES.md | 2 +- src/files.js | 990 +++++++++++++++++++++++++------------------------- 2 files changed, 501 insertions(+), 491 deletions(-) diff --git a/SPEC/FILES.md b/SPEC/FILES.md index 890d07f2..56ae64d0 100644 --- a/SPEC/FILES.md +++ b/SPEC/FILES.md @@ -18,7 +18,7 @@ Where `data` may be: - a [`Pull Stream`][ps] - a Path (caveat: will only work in Node.js) - a URL -- an array of objects, each of the form +- an array of objects, each of the form: ```JavaScript { path: '/tmp/myfile.txt', // The file path diff --git a/src/files.js b/src/files.js index cfc086d6..5b27cdc8 100644 --- a/src/files.js +++ b/src/files.js @@ -7,37 +7,36 @@ const chai = require('chai') const dirtyChai = require('dirty-chai') const expect = chai.expect chai.use(dirtyChai) -const bs58 = require('bs58') -const Readable = require('stream').Readable const loadFixture = require('aegir/fixtures') -const bl = require('bl') +const bs58 = require('bs58') const isNode = require('detect-node') +const Readable = require('readable-stream').Readable const concat = require('concat-stream') const through = require('through2') -const Buffer = require('safe-buffer').Buffer +const bl = require('bl') module.exports = (common) => { - describe('.files', function () { + describe.only('.files', function () { this.timeout(80 * 1000) - let smallFile - let bigFile - let directoryContent let ipfs - before((done) => { - smallFile = loadFixture(__dirname, '../test/fixtures/testfile.txt', 'interface-ipfs-core') - bigFile = loadFixture(__dirname, '../test/fixtures/15mb.random', 'interface-ipfs-core') - - directoryContent = { - 'pp.txt': loadFixture(__dirname, '../test/fixtures/test-folder/pp.txt', 'interface-ipfs-core'), - 'holmes.txt': loadFixture(__dirname, '../test/fixtures/test-folder/holmes.txt', 'interface-ipfs-core'), - 'jungle.txt': loadFixture(__dirname, '../test/fixtures/test-folder/jungle.txt', 'interface-ipfs-core'), - 'alice.txt': loadFixture(__dirname, '../test/fixtures/test-folder/alice.txt', 'interface-ipfs-core'), - 'files/hello.txt': loadFixture(__dirname, '../test/fixtures/test-folder/files/hello.txt', 'interface-ipfs-core'), - 'files/ipfs.txt': loadFixture(__dirname, '../test/fixtures/test-folder/files/ipfs.txt', 'interface-ipfs-core') - } + function fixture (path) { + return loadFixture(__dirname, path, 'interface-ipfs-core') + } + + const smallFile = fixture('../test/fixtures/testfile.txt') + const bigFile = fixture('../test/fixtures/15mb.random') + const directoryContent = { + 'pp.txt': fixture('../test/fixtures/test-folder/pp.txt'), + 'holmes.txt': fixture('../test/fixtures/test-folder/holmes.txt'), + 'jungle.txt': fixture('../test/fixtures/test-folder/jungle.txt'), + 'alice.txt': fixture('../test/fixtures/test-folder/alice.txt'), + 'files/hello.txt': fixture('../test/fixtures/test-folder/files/hello.txt'), + 'files/ipfs.txt': fixture('../test/fixtures/test-folder/files/ipfs.txt') + } + before((done) => { common.setup((err, factory) => { expect(err).to.not.exist() factory.spawnNode((err, node) => { @@ -50,351 +49,466 @@ module.exports = (common) => { after((done) => common.teardown(done)) - describe('callback API', () => { - describe('.add', () => { - it('stream', (done) => { - const buffered = Buffer.from('some data') - const expectedMultihash = 'QmVv4Wz46JaZJeH5PMV4LGbRiiMKEmszPYY3g6fjGnVXBS' - - const rs = new Readable() - rs.push(buffered) - rs.push(null) + describe('.add', () => { + it('a Buffer', (done) => { + const expectedMultihash = 'Qma4hjFTnCasJ8PVp3mZbZK5g2vGDT4LByLJ7m8ciyRFZP' - const arr = [] - const filePair = { - path: 'data.txt', - content: rs - } + ipfs.files.add(smallFile, (err, res) => { + expect(err).to.not.exist() + expect(res).to.have.length(1) + const file = res[0] + expect(file.hash).to.equal(expectedMultihash) + expect(file.path).to.equal(file.hash) + done() + }) + }) - arr.push(filePair) + it('a BIG buffer', (done) => { + const expectedMultihash = 'Qme79tX2bViL26vNjPsF3DP1R9rMKMvnPYJiKTTKPrXJjq' - ipfs.files.add(arr, (err, res) => { - expect(err).to.not.exist() - expect(res).to.be.length(1) - const file = res[0] - expect(file).to.exist() - expect(file.path).to.equal('data.txt') - expect(file.size).to.equal(17) - expect(file.hash).to.equal(expectedMultihash) - done() - }) + ipfs.files.add(bigFile, (err, res) => { + expect(err).to.not.exist() + expect(res).to.have.length(1) + const file = res[0] + expect(file.hash).to.equal(expectedMultihash) + expect(file.path).to.equal(file.hash) + done() }) + }) - it('buffer as tuple', (done) => { - const file = { - path: 'testfile.txt', - content: smallFile - } - const expectedMultihash = 'Qma4hjFTnCasJ8PVp3mZbZK5g2vGDT4LByLJ7m8ciyRFZP' + it('a BIG buffer with progress enabled', (done) => { + const expectedMultihash = 'Qme79tX2bViL26vNjPsF3DP1R9rMKMvnPYJiKTTKPrXJjq' - ipfs.files.add([file], (err, res) => { - expect(err).to.not.exist() + let progCount = 0 + let accumProgress = 0 + const handler = (p) => { + progCount += 1 + accumProgress = p + } - const file = res[0] - expect(file.hash).to.equal(expectedMultihash) - expect(file.path).to.equal('testfile.txt') - done() - }) + ipfs.files.add(bigFile, { progress: handler }, (err, res) => { + expect(err).to.not.exist() + expect(res).to.have.length(1) + const file = res[0] + expect(file.hash).to.equal(expectedMultihash) + expect(file.path).to.equal(file.hash) + expect(progCount).to.equal(58) + expect(accumProgress).to.equal(bigFile.byteLength) + done() }) + }) - it('buffer', (done) => { - const expectedMultihash = 'Qma4hjFTnCasJ8PVp3mZbZK5g2vGDT4LByLJ7m8ciyRFZP' + it('a Buffer as tuple', (done) => { + const file = { + path: 'testfile.txt', + content: smallFile + } + const expectedMultihash = 'Qma4hjFTnCasJ8PVp3mZbZK5g2vGDT4LByLJ7m8ciyRFZP' - ipfs.files.add(smallFile, (err, res) => { - expect(err).to.not.exist() - expect(res).to.have.length(1) - const file = res[0] - expect(file.hash).to.equal(expectedMultihash) - expect(file.path).to.equal(file.hash) - done() - }) + ipfs.files.add([file], (err, res) => { + expect(err).to.not.exist() + + const file = res[0] + expect(file.hash).to.equal(expectedMultihash) + expect(file.path).to.equal('testfile.txt') + done() }) + }) - it('BIG buffer', (done) => { - const expectedMultihash = 'Qme79tX2bViL26vNjPsF3DP1R9rMKMvnPYJiKTTKPrXJjq' + it('a Readable Stream', (done) => { + const buffered = Buffer.from('some data') + const expectedMultihash = 'QmVv4Wz46JaZJeH5PMV4LGbRiiMKEmszPYY3g6fjGnVXBS' - ipfs.files.add(bigFile, (err, res) => { - expect(err).to.not.exist() - expect(res).to.have.length(1) - const file = res[0] - expect(file.hash).to.equal(expectedMultihash) - expect(file.path).to.equal(file.hash) - done() - }) - }) + const rs = new Readable() + rs.push(buffered) + rs.push(null) - it('BIG buffer with progress', (done) => { - const expectedMultihash = 'Qme79tX2bViL26vNjPsF3DP1R9rMKMvnPYJiKTTKPrXJjq' + const arr = [] + const filePair = { + path: 'data.txt', + content: rs + } - let progCount = 0 - let accumProgress = 0 - const handler = (p) => { - progCount += 1 - accumProgress = p - } + arr.push(filePair) - ipfs.files.add(bigFile, { progress: handler }, (err, res) => { - expect(err).to.not.exist() - expect(res).to.have.length(1) - const file = res[0] - expect(file.hash).to.equal(expectedMultihash) - expect(file.path).to.equal(file.hash) - expect(progCount).to.equal(58) - expect(accumProgress).to.equal(bigFile.byteLength) - done() - }) + ipfs.files.add(arr, (err, res) => { + expect(err).to.not.exist() + expect(res).to.be.length(1) + const file = res[0] + expect(file).to.exist() + expect(file.path).to.equal('data.txt') + expect(file.size).to.equal(17) + expect(file.hash).to.equal(expectedMultihash) + done() }) + }) - it('add a nested dir as array', (done) => { - // Needs https://github.com/ipfs/js-ipfs-api/issues/339 to be fixed - // for js-ipfs-api + go-ipfs - if (!isNode) { return done() } - const content = (name) => ({ - path: `test-folder/${name}`, - content: directoryContent[name] - }) + it('add a nested directory as array of tupples', (done) => { + // Needs https://github.com/ipfs/js-ipfs-api/issues/339 to be fixed + // for js-ipfs-api + go-ipfs + if (!isNode) { return done() } - const emptyDir = (name) => ({ - path: `test-folder/${name}` - }) + const content = (name) => ({ + path: `test-folder/${name}`, + content: directoryContent[name] + }) - const expectedRootMultihash = 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP' + const emptyDir = (name) => ({ + path: `test-folder/${name}` + }) - const dirs = [ - content('pp.txt'), - content('holmes.txt'), - content('jungle.txt'), - content('alice.txt'), - emptyDir('empty-folder'), - content('files/hello.txt'), - content('files/ipfs.txt'), - emptyDir('files/empty') - ] + const expectedRootMultihash = 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP' - ipfs.files.add(dirs, (err, res) => { - expect(err).to.not.exist() - const root = res[res.length - 1] + const dirs = [ + content('pp.txt'), + content('holmes.txt'), + content('jungle.txt'), + content('alice.txt'), + emptyDir('empty-folder'), + content('files/hello.txt'), + content('files/ipfs.txt'), + emptyDir('files/empty') + ] - expect(root.path).to.equal('test-folder') - expect(root.hash).to.equal(expectedRootMultihash) - done() - }) + ipfs.files.add(dirs, (err, res) => { + expect(err).to.not.exist() + const root = res[res.length - 1] + + expect(root.path).to.equal('test-folder') + expect(root.hash).to.equal(expectedRootMultihash) + done() }) + }) - it('add a nested dir as array with progress', (done) => { - // Needs https://github.com/ipfs/js-ipfs-api/issues/339 to be fixed - // for js-ipfs-api + go-ipfs - if (!isNode) { return done() } + it('add a nested directory as array of tuppled with progress', (done) => { + // Needs https://github.com/ipfs/js-ipfs-api/issues/339 to be fixed + // for js-ipfs-api + go-ipfs + if (!isNode) { return done() } - const content = (name) => ({ - path: `test-folder/${name}`, - content: directoryContent[name] - }) + const content = (name) => ({ + path: `test-folder/${name}`, + content: directoryContent[name] + }) - const emptyDir = (name) => ({ - path: `test-folder/${name}` - }) + const emptyDir = (name) => ({ + path: `test-folder/${name}` + }) - const expectedRootMultihash = 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP' - - const dirs = [ - content('pp.txt'), - content('holmes.txt'), - content('jungle.txt'), - content('alice.txt'), - emptyDir('empty-folder'), - content('files/hello.txt'), - content('files/ipfs.txt'), - emptyDir('files/empty') - ] - - const total = dirs.reduce((i, entry) => { - return i + (entry.content ? entry.content.length : 0) - }, 0) - - let progCount = 0 - let accumProgress = 0 - const handler = (p) => { - progCount += 1 - accumProgress += p - } + const expectedRootMultihash = 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP' + + const dirs = [ + content('pp.txt'), + content('holmes.txt'), + content('jungle.txt'), + content('alice.txt'), + emptyDir('empty-folder'), + content('files/hello.txt'), + content('files/ipfs.txt'), + emptyDir('files/empty') + ] + + const total = dirs.reduce((i, entry) => { + return i + (entry.content ? entry.content.length : 0) + }, 0) + + let progCount = 0 + let accumProgress = 0 + const handler = (p) => { + progCount += 1 + accumProgress += p + } + + ipfs.files.add(dirs, { progress: handler }, (err, res) => { + expect(err).to.not.exist() + const root = res[res.length - 1] - ipfs.files.add(dirs, { progress: handler }, (err, res) => { - expect(err).to.not.exist() - const root = res[res.length - 1] + expect(root.path).to.equal('test-folder') + expect(root.hash).to.equal(expectedRootMultihash) + expect(progCount).to.equal(8) + expect(accumProgress).to.be.at.least(total) + done() + }) + }) - expect(root.path).to.equal('test-folder') - expect(root.hash).to.equal(expectedRootMultihash) - expect(progCount).to.equal(8) - expect(accumProgress).to.be.at.least(total) - done() - }) + it('fails in invalid input', (done) => { + const nonValid = 'sfdasfasfs' + + ipfs.files.add(nonValid, (err, result) => { + expect(err).to.exist() + done() }) + }) - describe('.createAddStream', () => { - it('stream of valid files and dirs', (done) => { - const content = (name) => ({ - path: `test-folder/${name}`, - content: directoryContent[name] - }) + it('Promise test', () => { + const expectedMultihash = 'Qma4hjFTnCasJ8PVp3mZbZK5g2vGDT4LByLJ7m8ciyRFZP' - const emptyDir = (name) => ({ - path: `test-folder/${name}` - }) + return ipfs.files.add(smallFile) + .then((res) => { + const file = res[0] + expect(file.hash).to.equal(expectedMultihash) + expect(file.path).to.equal(file.hash) + }) + }) + }) - const expectedRootMultihash = 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP' + describe('.addReadableStream', () => { + it('stream of valid files and dirs', (done) => { + const content = (name) => ({ + path: `test-folder/${name}`, + content: directoryContent[name] + }) - const files = [ - content('pp.txt'), - content('holmes.txt'), - content('jungle.txt'), - content('alice.txt'), - emptyDir('empty-folder'), - content('files/hello.txt'), - content('files/ipfs.txt'), - emptyDir('files/empty') - ] + const emptyDir = (name) => ({ + path: `test-folder/${name}` + }) - ipfs.files.createAddStream((err, stream) => { - expect(err).to.not.exist() + const expectedRootMultihash = 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP' - stream.on('data', (file) => { - if (file.path === 'test-folder') { - expect(file.hash).to.equal(expectedRootMultihash) - done() - } - }) + const files = [ + content('pp.txt'), + content('holmes.txt'), + content('jungle.txt'), + content('alice.txt'), + emptyDir('empty-folder'), + content('files/hello.txt'), + content('files/ipfs.txt'), + emptyDir('files/empty') + ] - files.forEach((file) => stream.write(file)) + ipfs.files.createAddStream((err, stream) => { + expect(err).to.not.exist() - stream.end() - }) + stream.on('data', (file) => { + if (file.path === 'test-folder') { + expect(file.hash).to.equal(expectedRootMultihash) + done() + } }) + + files.forEach((file) => stream.write(file)) + + stream.end() }) + }) + }) - it('fails in invalid input', (done) => { - const nonValid = 'sfdasfasfs' + describe('.addPullStream', () => { + it.skip('stream of valid files and dirs', (done) => {}) + }) - ipfs.files.add(nonValid, (err, result) => { - expect(err).to.exist() + describe('.cat', () => { + it('with a base58 string encoded multihash', (done) => { + const hash = 'Qma4hjFTnCasJ8PVp3mZbZK5g2vGDT4LByLJ7m8ciyRFZP' + + ipfs.files.cat(hash, (err, stream) => { + expect(err).to.not.exist() + stream.pipe(bl((err, data) => { + expect(err).to.not.exist() + expect(data.toString()).to.contain('Plz add me!') done() - }) + })) }) }) - describe('.cat', () => { - it('with a base58 string encoded multihash', (done) => { - const hash = 'Qma4hjFTnCasJ8PVp3mZbZK5g2vGDT4LByLJ7m8ciyRFZP' + it('with a multihash', (done) => { + const mhBuf = Buffer.from(bs58.decode('Qma4hjFTnCasJ8PVp3mZbZK5g2vGDT4LByLJ7m8ciyRFZP')) + ipfs.files.cat(mhBuf, (err, stream) => { + expect(err).to.not.exist() + stream.pipe(bl((err, data) => { + expect(err).to.not.exist() + expect(data.toString()).to.contain('Plz add me!') + done() + })) + }) + }) + + it('streams a large file', (done) => { + const hash = 'Qme79tX2bViL26vNjPsF3DP1R9rMKMvnPYJiKTTKPrXJjq' - ipfs.files.cat(hash, (err, stream) => { + ipfs.files.cat(hash, (err, stream) => { + expect(err).to.not.exist() + stream.pipe(bl((err, data) => { expect(err).to.not.exist() - stream.pipe(bl((err, data) => { - expect(err).to.not.exist() - expect(data.toString()).to.contain('Plz add me!') - done() - })) - }) + expect(data).to.deep.equal(bigFile) + done() + })) }) + }) + + it('with ipfs path', (done) => { + const ipfsPath = '/ipfs/Qma4hjFTnCasJ8PVp3mZbZK5g2vGDT4LByLJ7m8ciyRFZP' - it('with a multihash', (done) => { - const mhBuf = Buffer.from(bs58.decode('Qma4hjFTnCasJ8PVp3mZbZK5g2vGDT4LByLJ7m8ciyRFZP')) - ipfs.files.cat(mhBuf, (err, stream) => { + ipfs.files.cat(ipfsPath, (err, stream) => { + expect(err).to.not.exist() + stream.pipe(bl((err, data) => { expect(err).to.not.exist() - stream.pipe(bl((err, data) => { - expect(err).to.not.exist() - expect(data.toString()).to.contain('Plz add me!') - done() - })) + expect(data.toString()).to.contain('Plz add me!') + done() + })) + }) + }) + + it('with ipfs path, nested value', (done) => { + const file = { + path: 'a/testfile.txt', + content: smallFile + } + + ipfs.files.createAddStream((err, stream) => { + expect(err).to.not.exist() + + stream.on('data', (file) => { + if (file.path === 'a') { + ipfs.files.cat(`/ipfs/${file.hash}/testfile.txt`, (err, stream) => { + expect(err).to.not.exist() + stream.pipe(bl((err, data) => { + expect(err).to.not.exist() + expect(data.toString()).to.contain('Plz add me!') + done() + })) + }) + } }) + + stream.write(file) + stream.end() }) + }) - it('streams a large file', (done) => { - const hash = 'Qme79tX2bViL26vNjPsF3DP1R9rMKMvnPYJiKTTKPrXJjq' + it('Promise test', () => { + const hash = 'Qma4hjFTnCasJ8PVp3mZbZK5g2vGDT4LByLJ7m8ciyRFZP' - ipfs.files.cat(hash, (err, stream) => { - expect(err).to.not.exist() + return ipfs.files.cat(hash) + .then((stream) => { stream.pipe(bl((err, data) => { expect(err).to.not.exist() - expect(data).to.deep.equal(bigFile) - done() + expect(data.toString()).to.contain('Plz add me!') })) }) - }) + }) - it('with ipfs path', (done) => { - const ipfsPath = '/ipfs/Qma4hjFTnCasJ8PVp3mZbZK5g2vGDT4LByLJ7m8ciyRFZP' + it('errors on invalid key', () => { + const hash = 'somethingNotMultihash' - ipfs.files.cat(ipfsPath, (err, stream) => { - expect(err).to.not.exist() + return ipfs.files.cat(hash) + .catch((err) => { + expect(err).to.exist() + const errString = err.toString() + if (errString === 'Error: invalid ipfs ref path') { + expect(err.toString()).to.contain('Error: invalid ipfs ref path') + } + if (errString === 'Error: Invalid Key') { + expect(err.toString()).to.contain('Error: Invalid Key') + } + }) + }) + + it('with a multihash', () => { + const hash = Buffer.from(bs58.decode('Qma4hjFTnCasJ8PVp3mZbZK5g2vGDT4LByLJ7m8ciyRFZP')) + return ipfs.files.cat(hash) + .then((stream) => { stream.pipe(bl((err, data) => { expect(err).to.not.exist() expect(data.toString()).to.contain('Plz add me!') - done() })) }) - }) + }) + }) - it('with ipfs path, nested value', (done) => { - const file = { - path: 'a/testfile.txt', - content: smallFile - } + describe('.catReadableStream', () => { + it.skip('returns a Readable Stream for a multihash', (done) => {}) + }) - ipfs.files.createAddStream((err, stream) => { - expect(err).to.not.exist() + describe('.catPullStream', () => { + it.skip('returns a Pull Stream for a multihash', (done) => {}) + }) - stream.on('data', (file) => { - if (file.path === 'a') { - ipfs.files.cat(`/ipfs/${file.hash}/testfile.txt`, (err, stream) => { - expect(err).to.not.exist() - stream.pipe(bl((err, data) => { - expect(err).to.not.exist() - expect(data.toString()).to.contain('Plz add me!') - done() - })) - }) - } - }) + describe('.get', () => { + it('with a base58 encoded multihash', (done) => { + const hash = 'Qma4hjFTnCasJ8PVp3mZbZK5g2vGDT4LByLJ7m8ciyRFZP' + ipfs.files.get(hash, (err, stream) => { + expect(err).to.not.exist() - stream.write(file) - stream.end() - }) + let files = [] + stream.pipe(through.obj((file, enc, next) => { + file.content.pipe(concat((content) => { + files.push({ + path: file.path, + content: content + }) + next() + })) + }, () => { + expect(files).to.be.length(1) + expect(files[0].path).to.be.eql(hash) + expect(files[0].content.toString()).to.contain('Plz add me!') + done() + })) }) }) - describe('.get', () => { - it('with a base58 encoded multihash', (done) => { - const hash = 'Qma4hjFTnCasJ8PVp3mZbZK5g2vGDT4LByLJ7m8ciyRFZP' - ipfs.files.get(hash, (err, stream) => { - expect(err).to.not.exist() + it('with a multihash', (done) => { + const hash = 'Qma4hjFTnCasJ8PVp3mZbZK5g2vGDT4LByLJ7m8ciyRFZP' + const mhBuf = Buffer.from(bs58.decode(hash)) + ipfs.files.get(mhBuf, (err, stream) => { + expect(err).to.not.exist() - let files = [] - stream.pipe(through.obj((file, enc, next) => { - file.content.pipe(concat((content) => { - files.push({ - path: file.path, - content: content - }) - next() - })) - }, () => { - expect(files).to.be.length(1) - expect(files[0].path).to.be.eql(hash) - expect(files[0].content.toString()).to.contain('Plz add me!') - done() + let files = [] + stream.pipe(through.obj((file, enc, next) => { + file.content.pipe(concat((content) => { + files.push({ + path: file.path, + content: content + }) + next() })) - }) + }, () => { + expect(files).to.be.length(1) + expect(files[0].path).to.be.eql(hash) + expect(files[0].content.toString()).to.contain('Plz add me!') + done() + })) }) + }) - it('with a multihash', (done) => { - const hash = 'Qma4hjFTnCasJ8PVp3mZbZK5g2vGDT4LByLJ7m8ciyRFZP' - const mhBuf = Buffer.from(bs58.decode(hash)) - ipfs.files.get(mhBuf, (err, stream) => { - expect(err).to.not.exist() + it('large file', (done) => { + const hash = 'Qme79tX2bViL26vNjPsF3DP1R9rMKMvnPYJiKTTKPrXJjq' + ipfs.files.get(hash, (err, stream) => { + expect(err).to.not.exist() - let files = [] - stream.pipe(through.obj((file, enc, next) => { + // accumulate the files and their content + var files = [] + stream.pipe(through.obj((file, enc, next) => { + file.content.pipe(concat((content) => { + files.push({ + path: file.path, + content: content + }) + next() + })) + }, () => { + expect(files.length).to.equal(1) + expect(files[0].path).to.equal(hash) + expect(files[0].content).to.deep.equal(bigFile) + done() + })) + }) + }) + + it('directory', (done) => { + // Needs https://github.com/ipfs/js-ipfs-api/issues/339 to be fixed + // for js-ipfs-api + go-ipfs + if (!isNode) { return done() } + + const hash = 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP' + ipfs.files.get(hash, (err, stream) => { + expect(err).to.not.exist() + + // accumulate the files and their content + var files = [] + stream.pipe(through.obj((file, enc, next) => { + if (file.content) { file.content.pipe(concat((content) => { files.push({ path: file.path, @@ -402,22 +516,91 @@ module.exports = (common) => { }) next() })) - }, () => { - expect(files).to.be.length(1) - expect(files[0].path).to.be.eql(hash) - expect(files[0].content.toString()).to.contain('Plz add me!') - done() - })) - }) + } else { + files.push(file) + next() + } + }, () => { + files = files.sort((a, b) => { + if (a.path > b.path) return 1 + if (a.path < b.path) return -1 + return 0 + }) + // Check paths + var paths = files.map((file) => { + return file.path + }) + expect(paths).to.include.members([ + 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP', + 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/alice.txt', + 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/empty-folder', + 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/files', + 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/files/empty', + 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/files/hello.txt', + 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/files/ipfs.txt', + 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/holmes.txt', + 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/jungle.txt', + 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/pp.txt' + ]) + + // Check contents + const contents = files.map((file) => { + return file.content ? file.content.toString() : null + }) + expect(contents).to.include.members([ + directoryContent['alice.txt'].toString(), + directoryContent['files/hello.txt'].toString(), + directoryContent['files/ipfs.txt'].toString(), + directoryContent['holmes.txt'].toString(), + directoryContent['jungle.txt'].toString(), + directoryContent['pp.txt'].toString() + ]) + done() + })) }) + }) - it('large file', (done) => { - const hash = 'Qme79tX2bViL26vNjPsF3DP1R9rMKMvnPYJiKTTKPrXJjq' - ipfs.files.get(hash, (err, stream) => { - expect(err).to.not.exist() + it('with ipfs path, nested value', (done) => { + const file = { + path: 'a/testfile.txt', + content: smallFile + } - // accumulate the files and their content - var files = [] + ipfs.files.createAddStream((err, stream) => { + expect(err).to.not.exist() + + stream.on('data', (file) => { + if (file.path === 'a') { + ipfs.files.get(`/ipfs/${file.hash}/testfile.txt`, (err, stream) => { + expect(err).to.not.exist() + let files = [] + stream.pipe(through.obj((file, enc, next) => { + file.content.pipe(concat((content) => { + files.push({ + path: file.path, + content: content + }) + next() + })) + }, () => { + expect(files).to.be.length(1) + expect(files[0].content.toString()).to.contain('Plz add me!') + done() + })) + }) + } + }) + + stream.write(file) + stream.end() + }) + }) + + it('Promise test', () => { + const hash = 'Qma4hjFTnCasJ8PVp3mZbZK5g2vGDT4LByLJ7m8ciyRFZP' + return ipfs.files.get(hash).then((stream) => { + let files = [] + return new Promise((resolve, reject) => { stream.pipe(through.obj((file, enc, next) => { file.content.pipe(concat((content) => { files.push({ @@ -427,112 +610,26 @@ module.exports = (common) => { next() })) }, () => { - expect(files.length).to.equal(1) + expect(files).to.be.length(1) expect(files[0].path).to.equal(hash) - expect(files[0].content).to.deep.equal(bigFile) - done() - })) - }) - }) - - it('directory', (done) => { - // Needs https://github.com/ipfs/js-ipfs-api/issues/339 to be fixed - // for js-ipfs-api + go-ipfs - if (!isNode) { return done() } - - const hash = 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP' - ipfs.files.get(hash, (err, stream) => { - expect(err).to.not.exist() - - // accumulate the files and their content - var files = [] - stream.pipe(through.obj((file, enc, next) => { - if (file.content) { - file.content.pipe(concat((content) => { - files.push({ - path: file.path, - content: content - }) - next() - })) - } else { - files.push(file) - next() - } - }, () => { - files = files.sort((a, b) => { - if (a.path > b.path) return 1 - if (a.path < b.path) return -1 - return 0 - }) - // Check paths - var paths = files.map((file) => { - return file.path - }) - expect(paths).to.include.members([ - 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP', - 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/alice.txt', - 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/empty-folder', - 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/files', - 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/files/empty', - 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/files/hello.txt', - 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/files/ipfs.txt', - 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/holmes.txt', - 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/jungle.txt', - 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/pp.txt' - ]) - - // Check contents - const contents = files.map((file) => { - return file.content ? file.content.toString() : null - }) - expect(contents).to.include.members([ - directoryContent['alice.txt'].toString(), - directoryContent['files/hello.txt'].toString(), - directoryContent['files/ipfs.txt'].toString(), - directoryContent['holmes.txt'].toString(), - directoryContent['jungle.txt'].toString(), - directoryContent['pp.txt'].toString() - ]) - done() + expect(files[0].content.toString()).to.contain('Plz add me!') + resolve() })) }) }) + }) - it('with ipfs path, nested value', (done) => { - const file = { - path: 'a/testfile.txt', - content: smallFile + it('errors on invalid key', () => { + const hash = 'somethingNotMultihash' + return ipfs.files.get(hash).catch((err) => { + expect(err).to.exist() + const errString = err.toString() + if (errString === 'Error: invalid ipfs ref path') { + expect(err.toString()).to.contain('Error: invalid ipfs ref path') + } + if (errString === 'Error: Invalid Key') { + expect(err.toString()).to.contain('Error: Invalid Key') } - - ipfs.files.createAddStream((err, stream) => { - expect(err).to.not.exist() - - stream.on('data', (file) => { - if (file.path === 'a') { - ipfs.files.get(`/ipfs/${file.hash}/testfile.txt`, (err, stream) => { - expect(err).to.not.exist() - let files = [] - stream.pipe(through.obj((file, enc, next) => { - file.content.pipe(concat((content) => { - files.push({ - path: file.path, - content: content - }) - next() - })) - }, () => { - expect(files).to.be.length(1) - expect(files[0].content.toString()).to.contain('Plz add me!') - done() - })) - }) - } - }) - - stream.write(file) - stream.end() - }) }) }) @@ -585,99 +682,12 @@ module.exports = (common) => { }) }) - describe('promise API', () => { - describe('.add', () => { - const expectedMultihash = 'Qma4hjFTnCasJ8PVp3mZbZK5g2vGDT4LByLJ7m8ciyRFZP' - - it('buffer', () => { - return ipfs.files.add(smallFile) - .then((res) => { - const file = res[0] - expect(file.hash).to.equal(expectedMultihash) - expect(file.path).to.equal(file.hash) - }) - }) - }) - - describe('.cat', () => { - it('with a base58 multihash encoded string', () => { - const hash = 'Qma4hjFTnCasJ8PVp3mZbZK5g2vGDT4LByLJ7m8ciyRFZP' - - return ipfs.files.cat(hash) - .then((stream) => { - stream.pipe(bl((err, data) => { - expect(err).to.not.exist() - expect(data.toString()).to.contain('Plz add me!') - })) - }) - }) - - it('errors on invalid key', () => { - const hash = 'somethingNotMultihash' - - return ipfs.files.cat(hash) - .catch((err) => { - expect(err).to.exist() - const errString = err.toString() - if (errString === 'Error: invalid ipfs ref path') { - expect(err.toString()).to.contain('Error: invalid ipfs ref path') - } - if (errString === 'Error: Invalid Key') { - expect(err.toString()).to.contain('Error: Invalid Key') - } - }) - }) - - it('with a multihash', () => { - const hash = Buffer.from(bs58.decode('Qma4hjFTnCasJ8PVp3mZbZK5g2vGDT4LByLJ7m8ciyRFZP')) - return ipfs.files.cat(hash) - .then((stream) => { - stream.pipe(bl((err, data) => { - expect(err).to.not.exist() - expect(data.toString()).to.contain('Plz add me!') - })) - }) - }) - }) - - describe('.get', () => { - it('with a base58 encoded string', () => { - const hash = 'Qma4hjFTnCasJ8PVp3mZbZK5g2vGDT4LByLJ7m8ciyRFZP' - return ipfs.files.get(hash).then((stream) => { - let files = [] - return new Promise((resolve, reject) => { - stream.pipe(through.obj((file, enc, next) => { - file.content.pipe(concat((content) => { - files.push({ - path: file.path, - content: content - }) - next() - })) - }, () => { - expect(files).to.be.length(1) - expect(files[0].path).to.equal(hash) - expect(files[0].content.toString()).to.contain('Plz add me!') - resolve() - })) - }) - }) - }) + describe('.getReadableStream', () => { + it.skip('returns a Readable Stream of Readable Streams', () => {}) + }) - it('errors on invalid key', () => { - const hash = 'somethingNotMultihash' - return ipfs.files.get(hash).catch((err) => { - expect(err).to.exist() - const errString = err.toString() - if (errString === 'Error: invalid ipfs ref path') { - expect(err.toString()).to.contain('Error: invalid ipfs ref path') - } - if (errString === 'Error: Invalid Key') { - expect(err.toString()).to.contain('Error: Invalid Key') - } - }) - }) - }) + describe('.getPullStream', () => { + it.skip('returns a Pull Stream of Pull Streams', () => {}) }) }) } From c622832ac3975df1975b58e064517da89e6aac44 Mon Sep 17 00:00:00 2001 From: David Dias Date: Tue, 31 Oct 2017 09:37:30 +0000 Subject: [PATCH 06/27] apply cr --- SPEC/FILES.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/SPEC/FILES.md b/SPEC/FILES.md index 56ae64d0..959475ef 100644 --- a/SPEC/FILES.md +++ b/SPEC/FILES.md @@ -189,7 +189,7 @@ ipfs.files.cat(ipfsPath, function (err, file) { throw err } - console.log(file.toString()) + console.log(file.toString('utf8')) }) ``` @@ -290,7 +290,7 @@ const validCID = 'QmQ2r6iMNpky5f1m4cnm3Yqw8VSvjuKpTcK1X7dBR1LkJF' ipfs.files.get(validCID, function (err, files) { files.forEach((file) => { console.log(file.path) - console.log(file.path.toString()) + console.log(file.content.toString('utf8')) }) }) ``` From efb5f4ad471efbc3cc63e7e0302e211a4f9385e8 Mon Sep 17 00:00:00 2001 From: David Dias Date: Tue, 31 Oct 2017 10:29:18 +0000 Subject: [PATCH 07/27] update .add tests --- src/files.js | 211 ++++++++++++++++++++++++--------------------------- 1 file changed, 99 insertions(+), 112 deletions(-) diff --git a/src/files.js b/src/files.js index 5b27cdc8..eecd1c1a 100644 --- a/src/files.js +++ b/src/files.js @@ -25,15 +25,25 @@ module.exports = (common) => { return loadFixture(__dirname, path, 'interface-ipfs-core') } - const smallFile = fixture('../test/fixtures/testfile.txt') - const bigFile = fixture('../test/fixtures/15mb.random') - const directoryContent = { - 'pp.txt': fixture('../test/fixtures/test-folder/pp.txt'), - 'holmes.txt': fixture('../test/fixtures/test-folder/holmes.txt'), - 'jungle.txt': fixture('../test/fixtures/test-folder/jungle.txt'), - 'alice.txt': fixture('../test/fixtures/test-folder/alice.txt'), - 'files/hello.txt': fixture('../test/fixtures/test-folder/files/hello.txt'), - 'files/ipfs.txt': fixture('../test/fixtures/test-folder/files/ipfs.txt') + const smallFile = { + cid: 'Qma4hjFTnCasJ8PVp3mZbZK5g2vGDT4LByLJ7m8ciyRFZP', + data: fixture('../test/fixtures/testfile.txt') + } + const bigFile = { + cid: 'Qme79tX2bViL26vNjPsF3DP1R9rMKMvnPYJiKTTKPrXJjq', + data: fixture('../test/fixtures/15mb.random') + } + + const directory = { + cid: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP', + files: { + 'pp.txt': fixture('../test/fixtures/test-folder/pp.txt'), + 'holmes.txt': fixture('../test/fixtures/test-folder/holmes.txt'), + 'jungle.txt': fixture('../test/fixtures/test-folder/jungle.txt'), + 'alice.txt': fixture('../test/fixtures/test-folder/alice.txt'), + 'files/hello.txt': fixture('../test/fixtures/test-folder/files/hello.txt'), + 'files/ipfs.txt': fixture('../test/fixtures/test-folder/files/ipfs.txt') + } } before((done) => { @@ -49,116 +59,106 @@ module.exports = (common) => { after((done) => common.teardown(done)) - describe('.add', () => { + describe.only('.add', () => { it('a Buffer', (done) => { - const expectedMultihash = 'Qma4hjFTnCasJ8PVp3mZbZK5g2vGDT4LByLJ7m8ciyRFZP' - - ipfs.files.add(smallFile, (err, res) => { + ipfs.files.add(smallFile.data, (err, filesAdded) => { expect(err).to.not.exist() - expect(res).to.have.length(1) - const file = res[0] - expect(file.hash).to.equal(expectedMultihash) - expect(file.path).to.equal(file.hash) + + expect(filesAdded).to.have.length(1) + const file = filesAdded[0] + expect(file.hash).to.equal(smallFile.cid) + expect(file.path).to.equal(smallFile.cid) + expect(file.size).to.equal(smallFile.data.length) done() }) }) it('a BIG buffer', (done) => { - const expectedMultihash = 'Qme79tX2bViL26vNjPsF3DP1R9rMKMvnPYJiKTTKPrXJjq' - - ipfs.files.add(bigFile, (err, res) => { + ipfs.files.add(bigFile.data, (err, filesAdded) => { expect(err).to.not.exist() - expect(res).to.have.length(1) - const file = res[0] - expect(file.hash).to.equal(expectedMultihash) - expect(file.path).to.equal(file.hash) + + expect(filesAdded).to.have.length(1) + const file = filesAdded[0] + expect(file.hash).to.equal(bigFile.cid) + expect(file.path).to.equal(bigFile.cid) + expect(file.size).to.equal(bigFile.data.length) done() }) }) it('a BIG buffer with progress enabled', (done) => { - const expectedMultihash = 'Qme79tX2bViL26vNjPsF3DP1R9rMKMvnPYJiKTTKPrXJjq' - let progCount = 0 let accumProgress = 0 - const handler = (p) => { + function handler (p) { progCount += 1 accumProgress = p } - ipfs.files.add(bigFile, { progress: handler }, (err, res) => { + ipfs.files.add(bigFile.data, { progress: handler }, (err, filesAdded) => { expect(err).to.not.exist() - expect(res).to.have.length(1) - const file = res[0] - expect(file.hash).to.equal(expectedMultihash) - expect(file.path).to.equal(file.hash) + + expect(filesAdded).to.have.length(1) + const file = filesAdded[0] + expect(file.hash).to.equal(bigFile.cid) + expect(file.path).to.equal(bigFile.cid) + expect(file.size).to.equal(bigFile.data.length) + expect(progCount).to.equal(58) - expect(accumProgress).to.equal(bigFile.byteLength) + expect(accumProgress).to.equal(bigFile.data.length) done() }) }) it('a Buffer as tuple', (done) => { - const file = { - path: 'testfile.txt', - content: smallFile - } - const expectedMultihash = 'Qma4hjFTnCasJ8PVp3mZbZK5g2vGDT4LByLJ7m8ciyRFZP' + const tuple = { path: 'testfile.txt', content: smallFile.data } - ipfs.files.add([file], (err, res) => { + ipfs.files.add([ + tuple + ], (err, filesAdded) => { expect(err).to.not.exist() - const file = res[0] - expect(file.hash).to.equal(expectedMultihash) + expect(filesAdded).to.have.length(1) + const file = filesAdded[0] + expect(file.hash).to.equal(bigFile.cid) expect(file.path).to.equal('testfile.txt') + expect(file.size).to.equal(bigFile.data.length) + done() }) }) it('a Readable Stream', (done) => { - const buffered = Buffer.from('some data') - const expectedMultihash = 'QmVv4Wz46JaZJeH5PMV4LGbRiiMKEmszPYY3g6fjGnVXBS' + const expectedCid = 'QmVv4Wz46JaZJeH5PMV4LGbRiiMKEmszPYY3g6fjGnVXBS' const rs = new Readable() - rs.push(buffered) + rs.push(Buffer.from('some data')) rs.push(null) - const arr = [] - const filePair = { - path: 'data.txt', - content: rs - } - - arr.push(filePair) + const tuple = { path: 'data.txt', content: rs } - ipfs.files.add(arr, (err, res) => { + ipfs.files.add([tuple], (err, filesAdded) => { expect(err).to.not.exist() - expect(res).to.be.length(1) - const file = res[0] - expect(file).to.exist() + + expect(filesAdded).to.be.length(1) + const file = filesAdded[0] expect(file.path).to.equal('data.txt') expect(file.size).to.equal(17) - expect(file.hash).to.equal(expectedMultihash) + expect(file.hash).to.equal(expectedCid) done() }) }) - it('add a nested directory as array of tupples', (done) => { - // Needs https://github.com/ipfs/js-ipfs-api/issues/339 to be fixed - // for js-ipfs-api + go-ipfs + // Needs https://github.com/ipfs/js-ipfs-api/issues/339 + // to be fixed for js-ipfs-api + go-ipfs if (!isNode) { return done() } const content = (name) => ({ path: `test-folder/${name}`, - content: directoryContent[name] + content: directory.files[name] }) - const emptyDir = (name) => ({ - path: `test-folder/${name}` - }) - - const expectedRootMultihash = 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP' + const emptyDir = (name) => ({ path: `test-folder/${name}` }) const dirs = [ content('pp.txt'), @@ -176,7 +176,7 @@ module.exports = (common) => { const root = res[res.length - 1] expect(root.path).to.equal('test-folder') - expect(root.hash).to.equal(expectedRootMultihash) + expect(root.hash).to.equal(directory.cid) done() }) }) @@ -188,14 +188,10 @@ module.exports = (common) => { const content = (name) => ({ path: `test-folder/${name}`, - content: directoryContent[name] + content: directory.files[name] }) - const emptyDir = (name) => ({ - path: `test-folder/${name}` - }) - - const expectedRootMultihash = 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP' + const emptyDir = (name) => ({ path: `test-folder/${name}` }) const dirs = [ content('pp.txt'), @@ -224,7 +220,7 @@ module.exports = (common) => { const root = res[res.length - 1] expect(root.path).to.equal('test-folder') - expect(root.hash).to.equal(expectedRootMultihash) + expect(root.hash).to.equal(directory.cid) expect(progCount).to.equal(8) expect(accumProgress).to.be.at.least(total) done() @@ -241,13 +237,11 @@ module.exports = (common) => { }) it('Promise test', () => { - const expectedMultihash = 'Qma4hjFTnCasJ8PVp3mZbZK5g2vGDT4LByLJ7m8ciyRFZP' - - return ipfs.files.add(smallFile) - .then((res) => { - const file = res[0] - expect(file.hash).to.equal(expectedMultihash) - expect(file.path).to.equal(file.hash) + return ipfs.files.add(smallFile.data) + .then((filesAdded) => { + const file = filesAdded[0] + expect(file.hash).to.equal(smallFile.cid) + expect(file.path).to.equal(smallFile.cid) }) }) }) @@ -256,14 +250,10 @@ module.exports = (common) => { it('stream of valid files and dirs', (done) => { const content = (name) => ({ path: `test-folder/${name}`, - content: directoryContent[name] - }) - - const emptyDir = (name) => ({ - path: `test-folder/${name}` + content: directory.files[name] }) - const expectedRootMultihash = 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP' + const emptyDir = (name) => ({ path: `test-folder/${name}` }) const files = [ content('pp.txt'), @@ -281,7 +271,7 @@ module.exports = (common) => { stream.on('data', (file) => { if (file.path === 'test-folder') { - expect(file.hash).to.equal(expectedRootMultihash) + expect(file.hash).to.equal(directory.cid) done() } }) @@ -352,7 +342,7 @@ module.exports = (common) => { it('with ipfs path, nested value', (done) => { const file = { path: 'a/testfile.txt', - content: smallFile + content: smallFile.data } ipfs.files.createAddStream((err, stream) => { @@ -427,6 +417,7 @@ module.exports = (common) => { describe('.get', () => { it('with a base58 encoded multihash', (done) => { const hash = 'Qma4hjFTnCasJ8PVp3mZbZK5g2vGDT4LByLJ7m8ciyRFZP' + ipfs.files.get(hash, (err, stream) => { expect(err).to.not.exist() @@ -548,12 +539,12 @@ module.exports = (common) => { return file.content ? file.content.toString() : null }) expect(contents).to.include.members([ - directoryContent['alice.txt'].toString(), - directoryContent['files/hello.txt'].toString(), - directoryContent['files/ipfs.txt'].toString(), - directoryContent['holmes.txt'].toString(), - directoryContent['jungle.txt'].toString(), - directoryContent['pp.txt'].toString() + directory.files['alice.txt'].toString(), + directory.files['files/hello.txt'].toString(), + directory.files['files/ipfs.txt'].toString(), + directory.files['holmes.txt'].toString(), + directory.files['jungle.txt'].toString(), + directory.files['pp.txt'].toString() ]) done() })) @@ -576,10 +567,7 @@ module.exports = (common) => { let files = [] stream.pipe(through.obj((file, enc, next) => { file.content.pipe(concat((content) => { - files.push({ - path: file.path, - content: content - }) + files.push({ path: file.path, content: content }) next() })) }, () => { @@ -603,10 +591,7 @@ module.exports = (common) => { return new Promise((resolve, reject) => { stream.pipe(through.obj((file, enc, next) => { file.content.pipe(concat((content) => { - files.push({ - path: file.path, - content: content - }) + files.push({ path: file.path, content: content }) next() })) }, () => { @@ -621,16 +606,18 @@ module.exports = (common) => { it('errors on invalid key', () => { const hash = 'somethingNotMultihash' - return ipfs.files.get(hash).catch((err) => { - expect(err).to.exist() - const errString = err.toString() - if (errString === 'Error: invalid ipfs ref path') { - expect(err.toString()).to.contain('Error: invalid ipfs ref path') - } - if (errString === 'Error: Invalid Key') { - expect(err.toString()).to.contain('Error: Invalid Key') - } - }) + + return ipfs.files.get(hash) + .catch((err) => { + expect(err).to.exist() + const errString = err.toString() + if (errString === 'Error: invalid ipfs ref path') { + expect(err.toString()).to.contain('Error: invalid ipfs ref path') + } + if (errString === 'Error: Invalid Key') { + expect(err.toString()).to.contain('Error: Invalid Key') + } + }) }) describe('.ls', () => { From b4354d370ab2e7de68fc48d794f7f9e34e5e4dee Mon Sep 17 00:00:00 2001 From: David Dias Date: Tue, 31 Oct 2017 10:49:40 +0000 Subject: [PATCH 08/27] files.add done --- src/files.js | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/src/files.js b/src/files.js index eecd1c1a..bc930d98 100644 --- a/src/files.js +++ b/src/files.js @@ -17,7 +17,7 @@ const bl = require('bl') module.exports = (common) => { describe.only('.files', function () { - this.timeout(80 * 1000) + this.timeout(5 * 1000) let ipfs @@ -68,7 +68,8 @@ module.exports = (common) => { const file = filesAdded[0] expect(file.hash).to.equal(smallFile.cid) expect(file.path).to.equal(smallFile.cid) - expect(file.size).to.equal(smallFile.data.length) + // file.size counts the overhead by IPLD nodes and unixfs protobuf + expect(file.size).greaterThan(smallFile.data.length) done() }) }) @@ -81,7 +82,8 @@ module.exports = (common) => { const file = filesAdded[0] expect(file.hash).to.equal(bigFile.cid) expect(file.path).to.equal(bigFile.cid) - expect(file.size).to.equal(bigFile.data.length) + // file.size counts the overhead by IPLD nodes and unixfs protobuf + expect(file.size).greaterThan(bigFile.data.length) done() }) }) @@ -101,7 +103,6 @@ module.exports = (common) => { const file = filesAdded[0] expect(file.hash).to.equal(bigFile.cid) expect(file.path).to.equal(bigFile.cid) - expect(file.size).to.equal(bigFile.data.length) expect(progCount).to.equal(58) expect(accumProgress).to.equal(bigFile.data.length) @@ -119,9 +120,8 @@ module.exports = (common) => { expect(filesAdded).to.have.length(1) const file = filesAdded[0] - expect(file.hash).to.equal(bigFile.cid) + expect(file.hash).to.equal(smallFile.cid) expect(file.path).to.equal('testfile.txt') - expect(file.size).to.equal(bigFile.data.length) done() }) From f04cd2eeec52adfa0d28d042672b1a9a47bfcc1e Mon Sep 17 00:00:00 2001 From: David Dias Date: Tue, 31 Oct 2017 14:28:57 +0000 Subject: [PATCH 09/27] .addReadableStream --- src/files.js | 27 ++++++++++++++------------- 1 file changed, 14 insertions(+), 13 deletions(-) diff --git a/src/files.js b/src/files.js index bc930d98..dd0af102 100644 --- a/src/files.js +++ b/src/files.js @@ -59,7 +59,7 @@ module.exports = (common) => { after((done) => common.teardown(done)) - describe.only('.add', () => { + describe('.add', () => { it('a Buffer', (done) => { ipfs.files.add(smallFile.data, (err, filesAdded) => { expect(err).to.not.exist() @@ -246,7 +246,7 @@ module.exports = (common) => { }) }) - describe('.addReadableStream', () => { + describe.only('.addReadableStream', () => { it('stream of valid files and dirs', (done) => { const content = (name) => ({ path: `test-folder/${name}`, @@ -266,20 +266,21 @@ module.exports = (common) => { emptyDir('files/empty') ] - ipfs.files.createAddStream((err, stream) => { - expect(err).to.not.exist() - - stream.on('data', (file) => { - if (file.path === 'test-folder') { - expect(file.hash).to.equal(directory.cid) - done() - } - }) + const stream = ipfs.files.addReadableStream() - files.forEach((file) => stream.write(file)) + stream.on('error', (err) => { + expect(err).to.not.exist() + }) - stream.end() + stream.on('data', (file) => { + if (file.path === 'test-folder') { + expect(file.hash).to.equal(directory.cid) + done() + } }) + + files.forEach((file) => stream.write(file)) + stream.end() }) }) From 53df9dc64e4d66819186787b847a395a822e33d4 Mon Sep 17 00:00:00 2001 From: David Dias Date: Tue, 31 Oct 2017 15:04:13 +0000 Subject: [PATCH 10/27] .addPullStream --- src/files.js | 42 +++++++++++++++++++++++++++++++++++++++--- 1 file changed, 39 insertions(+), 3 deletions(-) diff --git a/src/files.js b/src/files.js index dd0af102..13ee8d99 100644 --- a/src/files.js +++ b/src/files.js @@ -11,6 +11,7 @@ const loadFixture = require('aegir/fixtures') const bs58 = require('bs58') const isNode = require('detect-node') const Readable = require('readable-stream').Readable +const pull = require('pull-stream') const concat = require('concat-stream') const through = require('through2') const bl = require('bl') @@ -246,7 +247,7 @@ module.exports = (common) => { }) }) - describe.only('.addReadableStream', () => { + describe('.addReadableStream', () => { it('stream of valid files and dirs', (done) => { const content = (name) => ({ path: `test-folder/${name}`, @@ -284,8 +285,43 @@ module.exports = (common) => { }) }) - describe('.addPullStream', () => { - it.skip('stream of valid files and dirs', (done) => {}) + describe.only('.addPullStream', () => { + it('stream of valid files and dirs', (done) => { + const content = (name) => ({ + path: `test-folder/${name}`, + content: directory.files[name] + }) + + const emptyDir = (name) => ({ path: `test-folder/${name}` }) + + const files = [ + content('pp.txt'), + content('holmes.txt'), + content('jungle.txt'), + content('alice.txt'), + emptyDir('empty-folder'), + content('files/hello.txt'), + content('files/ipfs.txt'), + emptyDir('files/empty') + ] + + const stream = ipfs.files.addPullStream() + + pull( + pull.values(files), + stream, + pull.collect((err, filesAdded) => { + expect(err).to.not.exist() + + filesAdded.forEach((file) => { + if (file.path === 'test-folder') { + expect(file.hash).to.equal(directory.cid) + done() + } + }) + }) + ) + }) }) describe('.cat', () => { From fcd7f740d56ddb619c8feb533136f228ff15f5fe Mon Sep 17 00:00:00 2001 From: David Dias Date: Tue, 31 Oct 2017 16:33:07 +0000 Subject: [PATCH 11/27] files.cat --- src/files.js | 106 ++++++++++++++++++--------------------------------- 1 file changed, 38 insertions(+), 68 deletions(-) diff --git a/src/files.js b/src/files.js index 13ee8d99..0a4210ba 100644 --- a/src/files.js +++ b/src/files.js @@ -9,6 +9,7 @@ const expect = chai.expect chai.use(dirtyChai) const loadFixture = require('aegir/fixtures') const bs58 = require('bs58') +const parallel = require('async/parallel') const isNode = require('detect-node') const Readable = require('readable-stream').Readable const pull = require('pull-stream') @@ -285,7 +286,7 @@ module.exports = (common) => { }) }) - describe.only('.addPullStream', () => { + describe('.addPullStream', () => { it('stream of valid files and dirs', (done) => { const content = (name) => ({ path: `test-folder/${name}`, @@ -324,123 +325,92 @@ module.exports = (common) => { }) }) - describe('.cat', () => { - it('with a base58 string encoded multihash', (done) => { - const hash = 'Qma4hjFTnCasJ8PVp3mZbZK5g2vGDT4LByLJ7m8ciyRFZP' + describe.only('.cat', () => { + before((done) => { + parallel([ + (cb) => ipfs.files.add(smallFile.data, cb), + (cb) => ipfs.files.add(bigFile.data, cb) + ], done) + }) - ipfs.files.cat(hash, (err, stream) => { + it('with a base58 string encoded multihash', (done) => { + ipfs.files.cat(smallFile.cid, (err, data) => { expect(err).to.not.exist() - stream.pipe(bl((err, data) => { - expect(err).to.not.exist() - expect(data.toString()).to.contain('Plz add me!') - done() - })) + expect(data.toString()).to.contain('Plz add me!') + done() }) }) it('with a multihash', (done) => { - const mhBuf = Buffer.from(bs58.decode('Qma4hjFTnCasJ8PVp3mZbZK5g2vGDT4LByLJ7m8ciyRFZP')) - ipfs.files.cat(mhBuf, (err, stream) => { + const cid = Buffer.from(bs58.decode(smallFile.cid)) + + ipfs.files.cat(cid, (err, data) => { expect(err).to.not.exist() - stream.pipe(bl((err, data) => { - expect(err).to.not.exist() - expect(data.toString()).to.contain('Plz add me!') - done() - })) + expect(data.toString()).to.contain('Plz add me!') + done() }) }) it('streams a large file', (done) => { - const hash = 'Qme79tX2bViL26vNjPsF3DP1R9rMKMvnPYJiKTTKPrXJjq' - - ipfs.files.cat(hash, (err, stream) => { + ipfs.files.cat(bigFile.cid, (err, data) => { expect(err).to.not.exist() - stream.pipe(bl((err, data) => { - expect(err).to.not.exist() - expect(data).to.deep.equal(bigFile) - done() - })) + expect(data).to.eql(bigFile.data) + done() }) }) it('with ipfs path', (done) => { - const ipfsPath = '/ipfs/Qma4hjFTnCasJ8PVp3mZbZK5g2vGDT4LByLJ7m8ciyRFZP' + const ipfsPath = '/ipfs/' + smallFile.cid - ipfs.files.cat(ipfsPath, (err, stream) => { + ipfs.files.cat(ipfsPath, (err, data) => { expect(err).to.not.exist() - stream.pipe(bl((err, data) => { - expect(err).to.not.exist() - expect(data.toString()).to.contain('Plz add me!') - done() - })) + expect(data.toString()).to.contain('Plz add me!') + done() }) }) it('with ipfs path, nested value', (done) => { - const file = { - path: 'a/testfile.txt', - content: smallFile.data - } + const file = { path: 'a/testfile.txt', content: smallFile.data } - ipfs.files.createAddStream((err, stream) => { + ipfs.files.add([file], (err, filesAdded) => { expect(err).to.not.exist() - stream.on('data', (file) => { + filesAdded.forEach((file) => { if (file.path === 'a') { - ipfs.files.cat(`/ipfs/${file.hash}/testfile.txt`, (err, stream) => { + ipfs.files.cat(`/ipfs/${file.hash}/testfile.txt`, (err, data) => { expect(err).to.not.exist() - stream.pipe(bl((err, data) => { - expect(err).to.not.exist() - expect(data.toString()).to.contain('Plz add me!') - done() - })) + expect(data.toString()).to.contain('Plz add me!') + done() }) } }) - - stream.write(file) - stream.end() }) }) it('Promise test', () => { - const hash = 'Qma4hjFTnCasJ8PVp3mZbZK5g2vGDT4LByLJ7m8ciyRFZP' - - return ipfs.files.cat(hash) - .then((stream) => { - stream.pipe(bl((err, data) => { - expect(err).to.not.exist() - expect(data.toString()).to.contain('Plz add me!') - })) + return ipfs.files.cat(smallFile.cid) + .then((data) => { + expect(data.toString()).to.contain('Plz add me!') }) }) it('errors on invalid key', () => { - const hash = 'somethingNotMultihash' + const invalidCid = 'somethingNotMultihash' - return ipfs.files.cat(hash) + return ipfs.files.cat(invalidCid) .catch((err) => { expect(err).to.exist() + const errString = err.toString() if (errString === 'Error: invalid ipfs ref path') { expect(err.toString()).to.contain('Error: invalid ipfs ref path') } + if (errString === 'Error: Invalid Key') { expect(err.toString()).to.contain('Error: Invalid Key') } }) }) - - it('with a multihash', () => { - const hash = Buffer.from(bs58.decode('Qma4hjFTnCasJ8PVp3mZbZK5g2vGDT4LByLJ7m8ciyRFZP')) - return ipfs.files.cat(hash) - .then((stream) => { - stream.pipe(bl((err, data) => { - expect(err).to.not.exist() - expect(data.toString()).to.contain('Plz add me!') - })) - }) - }) }) describe('.catReadableStream', () => { From 135c30db337b7ca466695df2ab9278ca358024d8 Mon Sep 17 00:00:00 2001 From: David Dias Date: Tue, 31 Oct 2017 17:07:11 +0000 Subject: [PATCH 12/27] .catReadableStream --- src/files.js | 20 ++++++++++++++++---- 1 file changed, 16 insertions(+), 4 deletions(-) diff --git a/src/files.js b/src/files.js index 0a4210ba..b9c9076d 100644 --- a/src/files.js +++ b/src/files.js @@ -325,7 +325,7 @@ module.exports = (common) => { }) }) - describe.only('.cat', () => { + describe('.cat', () => { before((done) => { parallel([ (cb) => ipfs.files.add(smallFile.data, cb), @@ -413,12 +413,24 @@ module.exports = (common) => { }) }) - describe('.catReadableStream', () => { - it.skip('returns a Readable Stream for a multihash', (done) => {}) + describe.only('.catReadableStream', () => { + before((done) => ipfs.files.add(bigFile.data, done)) + + it('returns a Readable Stream for a cid', (done) => { + const stream = ipfs.files.catReadableStream(bigFile.cid) + + stream.pipe(bl((err, data) => { + expect(err).to.not.exist() + expect(data).to.eql(bigFile.data) + done() + })) + }) }) describe('.catPullStream', () => { - it.skip('returns a Pull Stream for a multihash', (done) => {}) + before((done) => ipfs.files.add(bigFile.data, done)) + + it.skip('returns a Pull Stream for a cid', (done) => {}) }) describe('.get', () => { From 4192ecfa8a76bfcf54e63a1e7da1cec855702676 Mon Sep 17 00:00:00 2001 From: David Dias Date: Tue, 31 Oct 2017 18:02:13 +0000 Subject: [PATCH 13/27] catPulLStream --- src/files.js | 20 ++++++++++++++++---- 1 file changed, 16 insertions(+), 4 deletions(-) diff --git a/src/files.js b/src/files.js index b9c9076d..4bbd450d 100644 --- a/src/files.js +++ b/src/files.js @@ -413,7 +413,7 @@ module.exports = (common) => { }) }) - describe.only('.catReadableStream', () => { + describe('.catReadableStream', () => { before((done) => ipfs.files.add(bigFile.data, done)) it('returns a Readable Stream for a cid', (done) => { @@ -427,10 +427,22 @@ module.exports = (common) => { }) }) - describe('.catPullStream', () => { - before((done) => ipfs.files.add(bigFile.data, done)) + describe.only('.catPullStream', () => { + before((done) => ipfs.files.add(smallFile.data, done)) + + it('returns a Pull Stream for a cid', (done) => { + const stream = ipfs.files.catPullStream(smallFile.cid) - it.skip('returns a Pull Stream for a cid', (done) => {}) + pull( + stream, + pull.concat((err, data) => { + expect(err).to.not.exist() + expect(data.length).to.equal(smallFile.data.length) + expect(data).to.eql(smallFile.data.toString()) + done() + }) + ) + }) }) describe('.get', () => { From 79373d019fe6404ddebce87facf7ecbb71c48d08 Mon Sep 17 00:00:00 2001 From: David Dias Date: Fri, 3 Nov 2017 11:13:49 +0000 Subject: [PATCH 14/27] .get --- src/files.js | 258 ++++++++++++++++++++++----------------------------- 1 file changed, 110 insertions(+), 148 deletions(-) diff --git a/src/files.js b/src/files.js index 4bbd450d..2e95cd75 100644 --- a/src/files.js +++ b/src/files.js @@ -10,11 +10,10 @@ chai.use(dirtyChai) const loadFixture = require('aegir/fixtures') const bs58 = require('bs58') const parallel = require('async/parallel') +const series = require('async/series') const isNode = require('detect-node') const Readable = require('readable-stream').Readable const pull = require('pull-stream') -const concat = require('concat-stream') -const through = require('through2') const bl = require('bl') module.exports = (common) => { @@ -427,7 +426,7 @@ module.exports = (common) => { }) }) - describe.only('.catPullStream', () => { + describe('.catPullStream', () => { before((done) => ipfs.files.add(smallFile.data, done)) it('returns a Pull Stream for a cid', (done) => { @@ -445,76 +444,45 @@ module.exports = (common) => { }) }) - describe('.get', () => { - it('with a base58 encoded multihash', (done) => { - const hash = 'Qma4hjFTnCasJ8PVp3mZbZK5g2vGDT4LByLJ7m8ciyRFZP' + describe.only('.get', () => { + before((done) => { + parallel([ + (cb) => ipfs.files.add(smallFile.data, cb), + (cb) => ipfs.files.add(bigFile.data, cb) + ], done) + }) - ipfs.files.get(hash, (err, stream) => { + it('with a base58 encoded multihash', (done) => { + ipfs.files.get(smallFile.cid, (err, files) => { expect(err).to.not.exist() - let files = [] - stream.pipe(through.obj((file, enc, next) => { - file.content.pipe(concat((content) => { - files.push({ - path: file.path, - content: content - }) - next() - })) - }, () => { - expect(files).to.be.length(1) - expect(files[0].path).to.be.eql(hash) - expect(files[0].content.toString()).to.contain('Plz add me!') - done() - })) + expect(files).to.be.length(1) + expect(files[0].path).to.eql(smallFile.cid) + expect(files[0].content.toString('utf8')).to.contain('Plz add me!') + done() }) }) it('with a multihash', (done) => { - const hash = 'Qma4hjFTnCasJ8PVp3mZbZK5g2vGDT4LByLJ7m8ciyRFZP' - const mhBuf = Buffer.from(bs58.decode(hash)) - ipfs.files.get(mhBuf, (err, stream) => { + const cidBuf = Buffer.from(bs58.decode(smallFile.cid)) + ipfs.files.get(cidBuf, (err, files) => { expect(err).to.not.exist() - let files = [] - stream.pipe(through.obj((file, enc, next) => { - file.content.pipe(concat((content) => { - files.push({ - path: file.path, - content: content - }) - next() - })) - }, () => { - expect(files).to.be.length(1) - expect(files[0].path).to.be.eql(hash) - expect(files[0].content.toString()).to.contain('Plz add me!') - done() - })) + expect(files).to.be.length(1) + expect(files[0].path).to.eql(smallFile.cid) + expect(files[0].content.toString('utf8')).to.contain('Plz add me!') + done() }) }) it('large file', (done) => { - const hash = 'Qme79tX2bViL26vNjPsF3DP1R9rMKMvnPYJiKTTKPrXJjq' - ipfs.files.get(hash, (err, stream) => { + ipfs.files.get(bigFile.cid, (err, files) => { expect(err).to.not.exist() - // accumulate the files and their content - var files = [] - stream.pipe(through.obj((file, enc, next) => { - file.content.pipe(concat((content) => { - files.push({ - path: file.path, - content: content - }) - next() - })) - }, () => { - expect(files.length).to.equal(1) - expect(files[0].path).to.equal(hash) - expect(files[0].content).to.deep.equal(bigFile) - done() - })) + expect(files.length).to.equal(1) + expect(files[0].path).to.equal(bigFile.cid) + expect(files[0].content).to.eql(bigFile.data) + done() }) }) @@ -523,122 +491,116 @@ module.exports = (common) => { // for js-ipfs-api + go-ipfs if (!isNode) { return done() } - const hash = 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP' - ipfs.files.get(hash, (err, stream) => { - expect(err).to.not.exist() - - // accumulate the files and their content - var files = [] - stream.pipe(through.obj((file, enc, next) => { - if (file.content) { - file.content.pipe(concat((content) => { - files.push({ - path: file.path, - content: content - }) - next() - })) - } else { - files.push(file) - next() - } - }, () => { - files = files.sort((a, b) => { - if (a.path > b.path) return 1 - if (a.path < b.path) return -1 - return 0 + series([ + (cb) => { + const content = (name) => ({ + path: `test-folder/${name}`, + content: directory.files[name] }) - // Check paths - var paths = files.map((file) => { - return file.path + + const emptyDir = (name) => ({ path: `test-folder/${name}` }) + + const dirs = [ + content('pp.txt'), + content('holmes.txt'), + content('jungle.txt'), + content('alice.txt'), + emptyDir('empty-folder'), + content('files/hello.txt'), + content('files/ipfs.txt'), + emptyDir('files/empty') + ] + + ipfs.files.add(dirs, (err, res) => { + expect(err).to.not.exist() + const root = res[res.length - 1] + + expect(root.path).to.equal('test-folder') + expect(root.hash).to.equal(directory.cid) + cb() }) - expect(paths).to.include.members([ - 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP', - 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/alice.txt', - 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/empty-folder', - 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/files', - 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/files/empty', - 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/files/hello.txt', - 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/files/ipfs.txt', - 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/holmes.txt', - 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/jungle.txt', - 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/pp.txt' - ]) - - // Check contents - const contents = files.map((file) => { - return file.content ? file.content.toString() : null + }, + (cb) => { + ipfs.files.get(directory.cid, (err, files) => { + expect(err).to.not.exist() + + files = files.sort((a, b) => { + if (a.path > b.path) return 1 + if (a.path < b.path) return -1 + return 0 + }) + + // Check paths + const paths = files.map((file) => { return file.path }) + expect(paths).to.include.members([ + 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP', + 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/alice.txt', + 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/empty-folder', + 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/files', + 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/files/empty', + 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/files/hello.txt', + 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/files/ipfs.txt', + 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/holmes.txt', + 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/jungle.txt', + 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/pp.txt' + ]) + + // Check contents + const contents = files.map((file) => { + return file.content + ? file.content.toString() + : null + }) + + expect(contents).to.include.members([ + directory.files['alice.txt'].toString(), + directory.files['files/hello.txt'].toString(), + directory.files['files/ipfs.txt'].toString(), + directory.files['holmes.txt'].toString(), + directory.files['jungle.txt'].toString(), + directory.files['pp.txt'].toString() + ]) + cb() }) - expect(contents).to.include.members([ - directory.files['alice.txt'].toString(), - directory.files['files/hello.txt'].toString(), - directory.files['files/ipfs.txt'].toString(), - directory.files['holmes.txt'].toString(), - directory.files['jungle.txt'].toString(), - directory.files['pp.txt'].toString() - ]) - done() - })) - }) + } + ], done) }) it('with ipfs path, nested value', (done) => { const file = { path: 'a/testfile.txt', - content: smallFile + content: smallFile.data } - ipfs.files.createAddStream((err, stream) => { + ipfs.files.add([file], (err, filesAdded) => { expect(err).to.not.exist() - stream.on('data', (file) => { + filesAdded.forEach((file) => { if (file.path === 'a') { - ipfs.files.get(`/ipfs/${file.hash}/testfile.txt`, (err, stream) => { + ipfs.files.get(`/ipfs/${file.hash}/testfile.txt`, (err, files) => { expect(err).to.not.exist() - let files = [] - stream.pipe(through.obj((file, enc, next) => { - file.content.pipe(concat((content) => { - files.push({ path: file.path, content: content }) - next() - })) - }, () => { - expect(files).to.be.length(1) - expect(files[0].content.toString()).to.contain('Plz add me!') - done() - })) + expect(files).to.be.length(1) + expect(files[0].content.toString('utf8')).to.contain('Plz add me!') + done() }) } }) - - stream.write(file) - stream.end() }) }) it('Promise test', () => { - const hash = 'Qma4hjFTnCasJ8PVp3mZbZK5g2vGDT4LByLJ7m8ciyRFZP' - return ipfs.files.get(hash).then((stream) => { - let files = [] - return new Promise((resolve, reject) => { - stream.pipe(through.obj((file, enc, next) => { - file.content.pipe(concat((content) => { - files.push({ path: file.path, content: content }) - next() - })) - }, () => { - expect(files).to.be.length(1) - expect(files[0].path).to.equal(hash) - expect(files[0].content.toString()).to.contain('Plz add me!') - resolve() - })) + return ipfs.files.get(smallFile.cid) + .then((files) => { + expect(files).to.be.length(1) + expect(files[0].path).to.equal(smallFile.cid) + expect(files[0].content.toString()).to.contain('Plz add me!') }) - }) }) it('errors on invalid key', () => { - const hash = 'somethingNotMultihash' + const invalidCid = 'somethingNotMultihash' - return ipfs.files.get(hash) + return ipfs.files.get(invalidCid) .catch((err) => { expect(err).to.exist() const errString = err.toString() From d231a4917a3a8476bc40144e333274596c03d6d3 Mon Sep 17 00:00:00 2001 From: David Dias Date: Fri, 3 Nov 2017 11:55:03 +0000 Subject: [PATCH 15/27] getReadableStream --- src/files.js | 29 +++++++++++++++++++++++++---- 1 file changed, 25 insertions(+), 4 deletions(-) diff --git a/src/files.js b/src/files.js index 2e95cd75..2c91f7eb 100644 --- a/src/files.js +++ b/src/files.js @@ -14,6 +14,8 @@ const series = require('async/series') const isNode = require('detect-node') const Readable = require('readable-stream').Readable const pull = require('pull-stream') +const concat = require('concat-stream') +const through = require('through2') const bl = require('bl') module.exports = (common) => { @@ -444,7 +446,7 @@ module.exports = (common) => { }) }) - describe.only('.get', () => { + describe('.get', () => { before((done) => { parallel([ (cb) => ipfs.files.add(smallFile.data, cb), @@ -662,12 +664,31 @@ module.exports = (common) => { }) }) - describe('.getReadableStream', () => { - it.skip('returns a Readable Stream of Readable Streams', () => {}) + describe.only('.getReadableStream', () => { + before((done) => ipfs.files.add(smallFile.data, done)) + + it('returns a Readable Stream of Readable Streams', (done) => { + const stream = ipfs.files.getReadableStream(smallFile.cid) + + let files = [] + stream.pipe(through.obj((file, enc, next) => { + file.content.pipe(concat((content) => { + files.push({ path: file.path, content: content }) + next() + })) + }, () => { + expect(files).to.be.length(1) + expect(files[0].path).to.eql(smallFile.cid) + expect(files[0].content.toString()).to.contain('Plz add me!') + done() + })) + }) }) describe('.getPullStream', () => { - it.skip('returns a Pull Stream of Pull Streams', () => {}) + before((done) => ipfs.files.add(smallFile.data, done)) + + it.skip('returns a Pull Stream of Pull Streams', (done) => {}) }) }) } From 394ff27f817043048222976a4f39a67484d1dc0e Mon Sep 17 00:00:00 2001 From: David Dias Date: Fri, 3 Nov 2017 12:33:41 +0000 Subject: [PATCH 16/27] chore: fix travis --- .travis.yml | 10 +++++++++- src/files.js | 25 ++++++++++++++++++++++--- 2 files changed, 31 insertions(+), 4 deletions(-) diff --git a/.travis.yml b/.travis.yml index f87ae1e2..883a1b34 100644 --- a/.travis.yml +++ b/.travis.yml @@ -3,11 +3,19 @@ language: node_js matrix: include: - - node_js: stable + - node_js: 8 env: CXX=g++-4.8 script: - npm run lint + - npm run test + +before_script: + - export DISPLAY=:99.0 + - sh -e /etc/init.d/xvfb start + +after_success: + - npm run coverage-publish before_script: - export DISPLAY=:99.0 diff --git a/src/files.js b/src/files.js index 2c91f7eb..ffd764ed 100644 --- a/src/files.js +++ b/src/files.js @@ -19,7 +19,7 @@ const through = require('through2') const bl = require('bl') module.exports = (common) => { - describe.only('.files', function () { + describe('.files', function () { this.timeout(5 * 1000) let ipfs @@ -664,7 +664,7 @@ module.exports = (common) => { }) }) - describe.only('.getReadableStream', () => { + describe('.getReadableStream', () => { before((done) => ipfs.files.add(smallFile.data, done)) it('returns a Readable Stream of Readable Streams', (done) => { @@ -688,7 +688,26 @@ module.exports = (common) => { describe('.getPullStream', () => { before((done) => ipfs.files.add(smallFile.data, done)) - it.skip('returns a Pull Stream of Pull Streams', (done) => {}) + it('returns a Pull Stream of Pull Streams', (done) => { + const stream = ipfs.files.getPullStream(smallFile.cid) + + pull( + stream, + pull.collect((err, files) => { + expect(err).to.not.exist() + expect(files).to.be.length(1) + expect(files[0].path).to.eql(smallFile.cid) + pull( + files[0].content, + pull.concat((err, data) => { + expect(err).to.not.exist() + expect(data.toString()).to.contain('Plz add me!') + done() + }) + ) + }) + ) + }) }) }) } From 36d7f31c4ec9106aab118631e98ea3a4708841eb Mon Sep 17 00:00:00 2001 From: David Dias Date: Mon, 13 Nov 2017 10:01:12 +0000 Subject: [PATCH 17/27] chore: add ipfs.ls spec --- SPEC/FILES.md | 152 ++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 152 insertions(+) diff --git a/SPEC/FILES.md b/SPEC/FILES.md index 959475ef..66ad91b5 100644 --- a/SPEC/FILES.md +++ b/SPEC/FILES.md @@ -391,6 +391,158 @@ pull( A great source of [examples][] can be found in the tests for this API. +#### `ls` + +> Lists a directory from IPFS that is addressed by a valid IPFS Path. + +##### `Go` **WIP** + +##### `JavaScript` - ipfs.ls(ipfsPath, [callback]) + +> **Note:** ipfs.files.ls is currently only for MFS directories. The goal is to converge both functionality. + +ipfsPath can be of type: + +- [`cid`][cid] of type: + - [Buffer][b], the raw Buffer of the cid + - String, the base58 encoded version of the cid +- String, including the ipfs handler, a cid and a path to traverse to, ie: + - '/ipfs/QmXEmhrMpbVvTh61FNAxP9nU7ygVtyvZA8HZDUaqQCAb66' + - '/ipfs/QmXEmhrMpbVvTh61FNAxP9nU7ygVtyvZA8HZDUaqQCAb66/a.txt' + - 'QmXEmhrMpbVvTh61FNAxP9nU7ygVtyvZA8HZDUaqQCAb66/a.txt' + +`callback` must follow `function (err, files) {}` signature, where `err` is an error if the operation was not successful. `files` is an array containing objects of the following form: + +```js +{ + depth: 1, + name: 'alice.txt', + path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/alice.txt', + size: 11696, + hash: 'QmZyUEQVuRK3XV7L9Dk26pg6RVSgaYkiSTEdnT2kZZdwoi', + type: 'file' +} +``` + +If no `callback` is passed, a promise is returned. + +**Example:** + +```JavaScript +const validCID = 'QmQ2r6iMNpky5f1m4cnm3Yqw8VSvjuKpTcK1X7dBR1LkJF' + +ipfs.files.ls(validCID, function (err, files) { + files.forEach((file) => { + console.log(file.path) + }) +}) +``` + +A great source of [examples][] can be found in the tests for this API. + +#### `lsReadableStream` + +> Lists a directory from IPFS that is addressed by a valid IPFS Path. The list will be yielded as Readable Streams. + +##### `Go` **WIP** + +##### `JavaScript` - ipfs.lsReadableStream(ipfsPath) -> [Readable Stream][rs] + +> **Note:** ipfs.files.ls is currently only for MFS directories. The goal is to converge both functionality. + +ipfsPath can be of type: + +- [`cid`][cid] of type: + - [Buffer][b], the raw Buffer of the cid + - String, the base58 encoded version of the cid +- String, including the ipfs handler, a cid and a path to traverse to, ie: + - '/ipfs/QmXEmhrMpbVvTh61FNAxP9nU7ygVtyvZA8HZDUaqQCAb66' + - '/ipfs/QmXEmhrMpbVvTh61FNAxP9nU7ygVtyvZA8HZDUaqQCAb66/a.txt' + - 'QmXEmhrMpbVvTh61FNAxP9nU7ygVtyvZA8HZDUaqQCAb66/a.txt' + +It returns a [Readable Stream][rs] in [Object mode](https://nodejs.org/api/stream.html#stream_object_mode) that will yield objects of the form: + +```js +{ + depth: 1, + name: 'alice.txt', + path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/alice.txt', + size: 11696, + hash: 'QmZyUEQVuRK3XV7L9Dk26pg6RVSgaYkiSTEdnT2kZZdwoi', + type: 'file' +} +``` + +**Example:** + +```JavaScript +const validCID = 'QmQ2r6iMNpky5f1m4cnm3Yqw8VSvjuKpTcK1X7dBR1LkJF' + +const stream = ipfs.files.getReadableStream(validCID) + +stream.on('data', (file) => { + // write the file's path and contents to standard out + console.log(file.path) +}) +``` + +A great source of [examples][] can be found in the tests for this API. + +#### `lsPullStream` + +> Fetch a file or an entire directory tree from IPFS that is addressed by a valid IPFS Path. The files will be yielded as Readable Streams. + +##### `Go` **WIP** + +##### `JavaScript` - ipfs.lsPullStream(ipfsPath) -> [Pull Stream][ps] + +> **Note:** ipfs.files.ls is currently only for MFS directories. The goal is to converge both functionality. + + +ipfsPath can be of type: + +- [`cid`][cid] of type: + - [Buffer][b], the raw Buffer of the cid + - String, the base58 encoded version of the cid +- String, including the ipfs handler, a cid and a path to traverse to, ie: + - '/ipfs/QmXEmhrMpbVvTh61FNAxP9nU7ygVtyvZA8HZDUaqQCAb66' + - '/ipfs/QmXEmhrMpbVvTh61FNAxP9nU7ygVtyvZA8HZDUaqQCAb66/a.txt' + - 'QmXEmhrMpbVvTh61FNAxP9nU7ygVtyvZA8HZDUaqQCAb66/a.txt' + +It returns a [Pull Stream][os] that will yield objects of the form: + +```js +{ + depth: 1, + name: 'alice.txt', + path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/alice.txt', + size: 11696, + hash: 'QmZyUEQVuRK3XV7L9Dk26pg6RVSgaYkiSTEdnT2kZZdwoi', + type: 'file' +} +``` + +**Example:** + +```JavaScript +const validCID = 'QmQ2r6iMNpky5f1m4cnm3Yqw8VSvjuKpTcK1X7dBR1LkJF' + +const stream = ipfs.files.getReadableStream(validCID) + +pull( + stream, + pull.collect((err, files) => { + if (err) { + throw err + } + + files.forEach((file) => console.log(file.path)) + }) +) +``` + +A great source of [examples][] can be found in the tests for this API. + [examples]: https://github.com/ipfs/interface-ipfs-core/blob/master/src/files.js [b]: https://www.npmjs.com/package/buffer [rs]: https://www.npmjs.com/package/readable-stream From 90cc281c2cb085a89f5693f8c3c21239b47d97f3 Mon Sep 17 00:00:00 2001 From: David Dias Date: Tue, 14 Nov 2017 09:45:10 +0000 Subject: [PATCH 18/27] chore: update CI --- .travis.yml | 15 +-------------- circle.yml | 9 --------- 2 files changed, 1 insertion(+), 23 deletions(-) diff --git a/.travis.yml b/.travis.yml index 883a1b34..b305ff68 100644 --- a/.travis.yml +++ b/.travis.yml @@ -3,26 +3,13 @@ language: node_js matrix: include: - - node_js: 8 + - node_js: stable env: CXX=g++-4.8 script: - npm run lint - - npm run test - -before_script: - - export DISPLAY=:99.0 - - sh -e /etc/init.d/xvfb start - -after_success: - - npm run coverage-publish - -before_script: - - export DISPLAY=:99.0 - - sh -e /etc/init.d/xvfb start addons: - firefox: 'latest' apt: sources: - ubuntu-toolchain-r-test diff --git a/circle.yml b/circle.yml index 6af3aa6d..355d2261 100644 --- a/circle.yml +++ b/circle.yml @@ -2,15 +2,6 @@ machine: node: version: stable -dependencies: - pre: - - google-chrome --version - - wget -q -O - https://dl-ssl.google.com/linux/linux_signing_key.pub | sudo apt-key add - - - sudo sh -c 'echo "deb [arch=amd64] http://dl.google.com/linux/chrome/deb/ stable main" >> /etc/apt/sources.list.d/google.list' - - sudo apt-get update - - sudo apt-get --only-upgrade install google-chrome-stable - - google-chrome --version - test: override: - npm run lint From abb7c01830239607c20bcc85596a5f69a2a2773f Mon Sep 17 00:00:00 2001 From: David Dias Date: Tue, 14 Nov 2017 09:46:16 +0000 Subject: [PATCH 19/27] chore: fix CI and linting --- package.json | 6 +++--- src/files.js | 48 ++++++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 51 insertions(+), 3 deletions(-) diff --git a/package.json b/package.json index acc8f1f1..1f99c919 100644 --- a/package.json +++ b/package.json @@ -6,9 +6,9 @@ "scripts": { "test": "exit 0", "lint": "aegir lint", - "release": "aegir release node --no-docs", - "release-minor": "aegir release node --type minor --no-docs", - "release-major": "aegir release node --type major --no-docs", + "release": "aegir release -t node --no-docs", + "release-minor": "aegir release -t node --type minor --no-docs", + "release-major": "aegir release -t node --type major --no-docs", "coverage": "exit 0", "coverage-publish": "exit 0" }, diff --git a/src/files.js b/src/files.js index ffd764ed..a3a5a7a1 100644 --- a/src/files.js +++ b/src/files.js @@ -709,5 +709,53 @@ module.exports = (common) => { ) }) }) + + describe('.ls', () => { + it('with a base58 encoded string', (done) => { + const hash = 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP' + ipfs.ls(hash, (err, files) => { + expect(err).to.not.exist() + files.forEach((file) => delete file.content) + expect(files).to.deep.equal([ + { depth: 1, + name: 'alice.txt', + path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/alice.txt', + size: 11696, + hash: 'QmZyUEQVuRK3XV7L9Dk26pg6RVSgaYkiSTEdnT2kZZdwoi', + type: 'file' }, + { depth: 1, + name: 'empty-folder', + path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/empty-folder', + size: 4, + hash: 'QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn', + type: 'dir' }, + { depth: 1, + name: 'files', + path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/files', + size: 183, + hash: 'QmZ25UfTqXGz9RsEJFg7HUAuBcmfx5dQZDXQd2QEZ8Kj74', + type: 'dir' }, + { depth: 1, + name: 'holmes.txt', + path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/holmes.txt', + size: 582072, + hash: 'QmR4nFjTu18TyANgC65ArNWp5Yaab1gPzQ4D8zp7Kx3vhr', + type: 'file' }, + { depth: 1, + name: 'jungle.txt', + path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/jungle.txt', + size: 2305, + hash: 'QmT6orWioMiSqXXPGsUi71CKRRUmJ8YkuueV2DPV34E9y9', + type: 'file' }, + { depth: 1, + name: 'pp.txt', + path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/pp.txt', + size: 4551, + hash: 'QmVwdDCY4SPGVFnNCiZnX5CtzwWDn6kAM98JXzKxE3kCmn', + type: 'file' } ]) + done() + }) + }) + }) }) } From 9b2fb0f8fe2319afc3d803436dfe68814a2d1fce Mon Sep 17 00:00:00 2001 From: David Dias Date: Tue, 14 Nov 2017 10:04:52 +0000 Subject: [PATCH 20/27] fix directory test --- src/files.js | 44 +++++++++++++++++++++++++++++++++++++------- 1 file changed, 37 insertions(+), 7 deletions(-) diff --git a/src/files.js b/src/files.js index a3a5a7a1..12d4fbbe 100644 --- a/src/files.js +++ b/src/files.js @@ -19,7 +19,7 @@ const through = require('through2') const bl = require('bl') module.exports = (common) => { - describe('.files', function () { + describe.only('.files', function () { this.timeout(5 * 1000) let ipfs @@ -711,12 +711,41 @@ module.exports = (common) => { }) describe('.ls', () => { - it('with a base58 encoded string', (done) => { - const hash = 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP' - ipfs.ls(hash, (err, files) => { + before((done) => { + const content = (name) => ({ + path: `test-folder/${name}`, + content: directory.files[name] + }) + + const emptyDir = (name) => ({ path: `test-folder/${name}` }) + + const dirs = [ + content('pp.txt'), + content('holmes.txt'), + content('jungle.txt'), + content('alice.txt'), + emptyDir('empty-folder'), + content('files/hello.txt'), + content('files/ipfs.txt'), + emptyDir('files/empty') + ] + + ipfs.files.add(dirs, (err, res) => { expect(err).to.not.exist() - files.forEach((file) => delete file.content) - expect(files).to.deep.equal([ + const root = res[res.length - 1] + + expect(root.path).to.equal('test-folder') + expect(root.hash).to.equal(directory.cid) + done() + }) + }) + + it('with a base58 encoded CID', (done) => { + const cid = 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP' + ipfs.ls(cid, (err, files) => { + expect(err).to.not.exist() + + expect(files).to.eql([ { depth: 1, name: 'alice.txt', path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/alice.txt', @@ -752,7 +781,8 @@ module.exports = (common) => { path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/pp.txt', size: 4551, hash: 'QmVwdDCY4SPGVFnNCiZnX5CtzwWDn6kAM98JXzKxE3kCmn', - type: 'file' } ]) + type: 'file' } + ]) done() }) }) From 0b459fa0019541c4eee092e44a4000a5f8b311f3 Mon Sep 17 00:00:00 2001 From: David Dias Date: Tue, 14 Nov 2017 10:36:32 +0000 Subject: [PATCH 21/27] add remaining tests for .ls .lsReadableStream and .lsPullStream --- src/files.js | 161 +++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 161 insertions(+) diff --git a/src/files.js b/src/files.js index 12d4fbbe..9e5b4eb6 100644 --- a/src/files.js +++ b/src/files.js @@ -787,5 +787,166 @@ module.exports = (common) => { }) }) }) + + describe('.lsReadableStream', () => { + before((done) => { + const content = (name) => ({ + path: `test-folder/${name}`, + content: directory.files[name] + }) + + const emptyDir = (name) => ({ path: `test-folder/${name}` }) + + const dirs = [ + content('pp.txt'), + content('holmes.txt'), + content('jungle.txt'), + content('alice.txt'), + emptyDir('empty-folder'), + content('files/hello.txt'), + content('files/ipfs.txt'), + emptyDir('files/empty') + ] + + ipfs.files.add(dirs, (err, res) => { + expect(err).to.not.exist() + const root = res[res.length - 1] + + expect(root.path).to.equal('test-folder') + expect(root.hash).to.equal(directory.cid) + done() + }) + }) + + it('with a base58 encoded CID', (done) => { + const cid = 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP' + const stream = ipfs.lsReadableStream(cid) + + stream.pipe(concat((files) => { + expect(files).to.eql([ + { depth: 1, + name: 'alice.txt', + path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/alice.txt', + size: 11696, + hash: 'QmZyUEQVuRK3XV7L9Dk26pg6RVSgaYkiSTEdnT2kZZdwoi', + type: 'file' }, + { depth: 1, + name: 'empty-folder', + path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/empty-folder', + size: 4, + hash: 'QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn', + type: 'dir' }, + { depth: 1, + name: 'files', + path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/files', + size: 183, + hash: 'QmZ25UfTqXGz9RsEJFg7HUAuBcmfx5dQZDXQd2QEZ8Kj74', + type: 'dir' }, + { depth: 1, + name: 'holmes.txt', + path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/holmes.txt', + size: 582072, + hash: 'QmR4nFjTu18TyANgC65ArNWp5Yaab1gPzQ4D8zp7Kx3vhr', + type: 'file' }, + { depth: 1, + name: 'jungle.txt', + path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/jungle.txt', + size: 2305, + hash: 'QmT6orWioMiSqXXPGsUi71CKRRUmJ8YkuueV2DPV34E9y9', + type: 'file' }, + { depth: 1, + name: 'pp.txt', + path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/pp.txt', + size: 4551, + hash: 'QmVwdDCY4SPGVFnNCiZnX5CtzwWDn6kAM98JXzKxE3kCmn', + type: 'file' } + ]) + done() + })) + }) + }) + + describe('.lsPullStream', () => { + before((done) => { + const content = (name) => ({ + path: `test-folder/${name}`, + content: directory.files[name] + }) + + const emptyDir = (name) => ({ path: `test-folder/${name}` }) + + const dirs = [ + content('pp.txt'), + content('holmes.txt'), + content('jungle.txt'), + content('alice.txt'), + emptyDir('empty-folder'), + content('files/hello.txt'), + content('files/ipfs.txt'), + emptyDir('files/empty') + ] + + ipfs.files.add(dirs, (err, res) => { + expect(err).to.not.exist() + const root = res[res.length - 1] + + expect(root.path).to.equal('test-folder') + expect(root.hash).to.equal(directory.cid) + done() + }) + }) + + it('with a base58 encoded CID', (done) => { + const cid = 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP' + const stream = ipfs.lsPullStream(cid) + + pull( + stream, + pull.collect((err, files) => { + expect(err).to.not.exist() + + expect(files).to.eql([ + { depth: 1, + name: 'alice.txt', + path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/alice.txt', + size: 11696, + hash: 'QmZyUEQVuRK3XV7L9Dk26pg6RVSgaYkiSTEdnT2kZZdwoi', + type: 'file' }, + { depth: 1, + name: 'empty-folder', + path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/empty-folder', + size: 4, + hash: 'QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn', + type: 'dir' }, + { depth: 1, + name: 'files', + path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/files', + size: 183, + hash: 'QmZ25UfTqXGz9RsEJFg7HUAuBcmfx5dQZDXQd2QEZ8Kj74', + type: 'dir' }, + { depth: 1, + name: 'holmes.txt', + path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/holmes.txt', + size: 582072, + hash: 'QmR4nFjTu18TyANgC65ArNWp5Yaab1gPzQ4D8zp7Kx3vhr', + type: 'file' }, + { depth: 1, + name: 'jungle.txt', + path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/jungle.txt', + size: 2305, + hash: 'QmT6orWioMiSqXXPGsUi71CKRRUmJ8YkuueV2DPV34E9y9', + type: 'file' }, + { depth: 1, + name: 'pp.txt', + path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/pp.txt', + size: 4551, + hash: 'QmVwdDCY4SPGVFnNCiZnX5CtzwWDn6kAM98JXzKxE3kCmn', + type: 'file' } + ]) + done() + }) + ) + }) + }) }) } From 5a8caeaf47318561ab31273e791f7e5148590d0a Mon Sep 17 00:00:00 2001 From: David Dias Date: Tue, 14 Nov 2017 10:42:15 +0000 Subject: [PATCH 22/27] apply spec CR --- SPEC/FILES.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/SPEC/FILES.md b/SPEC/FILES.md index 66ad91b5..e595a6f2 100644 --- a/SPEC/FILES.md +++ b/SPEC/FILES.md @@ -478,7 +478,7 @@ It returns a [Readable Stream][rs] in [Object mode](https://nodejs.org/api/strea ```JavaScript const validCID = 'QmQ2r6iMNpky5f1m4cnm3Yqw8VSvjuKpTcK1X7dBR1LkJF' -const stream = ipfs.files.getReadableStream(validCID) +const stream = ipfs.files.lsReadableStream(validCID) stream.on('data', (file) => { // write the file's path and contents to standard out @@ -490,7 +490,7 @@ A great source of [examples][] can be found in the tests for this API. #### `lsPullStream` -> Fetch a file or an entire directory tree from IPFS that is addressed by a valid IPFS Path. The files will be yielded as Readable Streams. +> Fetch a file or an entire directory tree from IPFS that is addressed by a valid IPFS Path. The files will be yielded through a Pull Stream. ##### `Go` **WIP** From f6aba4ac4b5c2df202f9e23ed945234b8881eadf Mon Sep 17 00:00:00 2001 From: David Dias Date: Fri, 17 Nov 2017 11:02:23 +0000 Subject: [PATCH 23/27] enable all tests to fix https://github.com/ipfs/js-ipfs-api/issues/339 --- src/files.js | 17 +++-------------- 1 file changed, 3 insertions(+), 14 deletions(-) diff --git a/src/files.js b/src/files.js index 9e5b4eb6..7506d3ca 100644 --- a/src/files.js +++ b/src/files.js @@ -11,7 +11,6 @@ const loadFixture = require('aegir/fixtures') const bs58 = require('bs58') const parallel = require('async/parallel') const series = require('async/series') -const isNode = require('detect-node') const Readable = require('readable-stream').Readable const pull = require('pull-stream') const concat = require('concat-stream') @@ -152,10 +151,6 @@ module.exports = (common) => { }) it('add a nested directory as array of tupples', (done) => { - // Needs https://github.com/ipfs/js-ipfs-api/issues/339 - // to be fixed for js-ipfs-api + go-ipfs - if (!isNode) { return done() } - const content = (name) => ({ path: `test-folder/${name}`, content: directory.files[name] @@ -185,10 +180,6 @@ module.exports = (common) => { }) it('add a nested directory as array of tuppled with progress', (done) => { - // Needs https://github.com/ipfs/js-ipfs-api/issues/339 to be fixed - // for js-ipfs-api + go-ipfs - if (!isNode) { return done() } - const content = (name) => ({ path: `test-folder/${name}`, content: directory.files[name] @@ -288,7 +279,9 @@ module.exports = (common) => { }) describe('.addPullStream', () => { - it('stream of valid files and dirs', (done) => { + it('stream of valid files and dirs', function (done) { + this.timeout(20 * 1000) + const content = (name) => ({ path: `test-folder/${name}`, content: directory.files[name] @@ -489,10 +482,6 @@ module.exports = (common) => { }) it('directory', (done) => { - // Needs https://github.com/ipfs/js-ipfs-api/issues/339 to be fixed - // for js-ipfs-api + go-ipfs - if (!isNode) { return done() } - series([ (cb) => { const content = (name) => ({ From f24d83d8614a0bdce37bd9311d5ece27b1ff710a Mon Sep 17 00:00:00 2001 From: David Dias Date: Fri, 17 Nov 2017 11:34:46 +0000 Subject: [PATCH 24/27] remove dup ls test --- src/files.js | 66 +++++++--------------------------------------------- 1 file changed, 9 insertions(+), 57 deletions(-) diff --git a/src/files.js b/src/files.js index 7506d3ca..8101a6c1 100644 --- a/src/files.js +++ b/src/files.js @@ -319,7 +319,7 @@ module.exports = (common) => { }) }) - describe('.cat', () => { + describe.skip('.cat', () => { before((done) => { parallel([ (cb) => ipfs.files.add(smallFile.data, cb), @@ -407,7 +407,7 @@ module.exports = (common) => { }) }) - describe('.catReadableStream', () => { + describe.skip('.catReadableStream', () => { before((done) => ipfs.files.add(bigFile.data, done)) it('returns a Readable Stream for a cid', (done) => { @@ -421,7 +421,7 @@ module.exports = (common) => { }) }) - describe('.catPullStream', () => { + describe.skip('.catPullStream', () => { before((done) => ipfs.files.add(smallFile.data, done)) it('returns a Pull Stream for a cid', (done) => { @@ -439,7 +439,7 @@ module.exports = (common) => { }) }) - describe('.get', () => { + describe.skip('.get', () => { before((done) => { parallel([ (cb) => ipfs.files.add(smallFile.data, cb), @@ -603,57 +603,9 @@ module.exports = (common) => { } }) }) - - describe('.ls', () => { - it('with a base58 encoded string', (done) => { - const hash = 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP' - ipfs.ls(hash, (err, files) => { - expect(err).to.not.exist() - files.forEach((file) => delete file.content) - expect(files).to.deep.equal([ - { depth: 1, - name: 'alice.txt', - path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/alice.txt', - size: 11696, - hash: 'QmZyUEQVuRK3XV7L9Dk26pg6RVSgaYkiSTEdnT2kZZdwoi', - type: 'file' }, - { depth: 1, - name: 'empty-folder', - path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/empty-folder', - size: 4, - hash: 'QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn', - type: 'dir' }, - { depth: 1, - name: 'files', - path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/files', - size: 183, - hash: 'QmZ25UfTqXGz9RsEJFg7HUAuBcmfx5dQZDXQd2QEZ8Kj74', - type: 'dir' }, - { depth: 1, - name: 'holmes.txt', - path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/holmes.txt', - size: 582072, - hash: 'QmR4nFjTu18TyANgC65ArNWp5Yaab1gPzQ4D8zp7Kx3vhr', - type: 'file' }, - { depth: 1, - name: 'jungle.txt', - path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/jungle.txt', - size: 2305, - hash: 'QmT6orWioMiSqXXPGsUi71CKRRUmJ8YkuueV2DPV34E9y9', - type: 'file' }, - { depth: 1, - name: 'pp.txt', - path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/pp.txt', - size: 4551, - hash: 'QmVwdDCY4SPGVFnNCiZnX5CtzwWDn6kAM98JXzKxE3kCmn', - type: 'file' } ]) - done() - }) - }) - }) }) - describe('.getReadableStream', () => { + describe.skip('.getReadableStream', () => { before((done) => ipfs.files.add(smallFile.data, done)) it('returns a Readable Stream of Readable Streams', (done) => { @@ -674,7 +626,7 @@ module.exports = (common) => { }) }) - describe('.getPullStream', () => { + describe.skip('.getPullStream', () => { before((done) => ipfs.files.add(smallFile.data, done)) it('returns a Pull Stream of Pull Streams', (done) => { @@ -699,7 +651,7 @@ module.exports = (common) => { }) }) - describe('.ls', () => { + describe.skip('.ls', () => { before((done) => { const content = (name) => ({ path: `test-folder/${name}`, @@ -777,7 +729,7 @@ module.exports = (common) => { }) }) - describe('.lsReadableStream', () => { + describe.skip('.lsReadableStream', () => { before((done) => { const content = (name) => ({ path: `test-folder/${name}`, @@ -855,7 +807,7 @@ module.exports = (common) => { }) }) - describe('.lsPullStream', () => { + describe.skip('.lsPullStream', () => { before((done) => { const content = (name) => ({ path: `test-folder/${name}`, From 04cf417de495429dd811411f40821aca298e48c0 Mon Sep 17 00:00:00 2001 From: David Dias Date: Fri, 17 Nov 2017 12:08:59 +0000 Subject: [PATCH 25/27] .add tested in js-ipfs --- src/files.js | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/src/files.js b/src/files.js index 8101a6c1..8becb23c 100644 --- a/src/files.js +++ b/src/files.js @@ -319,7 +319,7 @@ module.exports = (common) => { }) }) - describe.skip('.cat', () => { + describe('.cat', () => { before((done) => { parallel([ (cb) => ipfs.files.add(smallFile.data, cb), @@ -348,6 +348,7 @@ module.exports = (common) => { it('streams a large file', (done) => { ipfs.files.cat(bigFile.cid, (err, data) => { expect(err).to.not.exist() + expect(data.length).to.equal(bigFile.data.length) expect(data).to.eql(bigFile.data) done() }) @@ -407,7 +408,7 @@ module.exports = (common) => { }) }) - describe.skip('.catReadableStream', () => { + describe('.catReadableStream', () => { before((done) => ipfs.files.add(bigFile.data, done)) it('returns a Readable Stream for a cid', (done) => { @@ -421,7 +422,7 @@ module.exports = (common) => { }) }) - describe.skip('.catPullStream', () => { + describe('.catPullStream', () => { before((done) => ipfs.files.add(smallFile.data, done)) it('returns a Pull Stream for a cid', (done) => { From 9d4eef8cf6814b3612ddd098683a8c6ec501862b Mon Sep 17 00:00:00 2001 From: David Dias Date: Fri, 17 Nov 2017 12:22:16 +0000 Subject: [PATCH 26/27] get tests also passing on js-ipfs --- src/files.js | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/src/files.js b/src/files.js index 8becb23c..c4c54e7e 100644 --- a/src/files.js +++ b/src/files.js @@ -440,7 +440,7 @@ module.exports = (common) => { }) }) - describe.skip('.get', () => { + describe('.get', () => { before((done) => { parallel([ (cb) => ipfs.files.add(smallFile.data, cb), @@ -477,6 +477,7 @@ module.exports = (common) => { expect(files.length).to.equal(1) expect(files[0].path).to.equal(bigFile.cid) + expect(files[0].content.length).to.eql(bigFile.data.length) expect(files[0].content).to.eql(bigFile.data) done() }) @@ -606,7 +607,7 @@ module.exports = (common) => { }) }) - describe.skip('.getReadableStream', () => { + describe('.getReadableStream', () => { before((done) => ipfs.files.add(smallFile.data, done)) it('returns a Readable Stream of Readable Streams', (done) => { @@ -627,7 +628,7 @@ module.exports = (common) => { }) }) - describe.skip('.getPullStream', () => { + describe('.getPullStream', () => { before((done) => ipfs.files.add(smallFile.data, done)) it('returns a Pull Stream of Pull Streams', (done) => { From 8c2ab7bb8e59c23320d21ea836a65ab05f9150bf Mon Sep 17 00:00:00 2001 From: David Dias Date: Fri, 17 Nov 2017 12:31:48 +0000 Subject: [PATCH 27/27] ls is implemented in js-ipfs too --- src/files.js | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/files.js b/src/files.js index c4c54e7e..3dde439a 100644 --- a/src/files.js +++ b/src/files.js @@ -18,7 +18,7 @@ const through = require('through2') const bl = require('bl') module.exports = (common) => { - describe.only('.files', function () { + describe('.files', function () { this.timeout(5 * 1000) let ipfs @@ -653,7 +653,7 @@ module.exports = (common) => { }) }) - describe.skip('.ls', () => { + describe('.ls', () => { before((done) => { const content = (name) => ({ path: `test-folder/${name}`, @@ -731,7 +731,7 @@ module.exports = (common) => { }) }) - describe.skip('.lsReadableStream', () => { + describe('.lsReadableStream', () => { before((done) => { const content = (name) => ({ path: `test-folder/${name}`, @@ -809,7 +809,7 @@ module.exports = (common) => { }) }) - describe.skip('.lsPullStream', () => { + describe('.lsPullStream', () => { before((done) => { const content = (name) => ({ path: `test-folder/${name}`,