Skip to content
This repository has been archived by the owner on Mar 23, 2023. It is now read-only.

fix: convert input to buffers before passing to aws-sdk #30

Merged
merged 3 commits into from
Sep 22, 2020
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
13 changes: 13 additions & 0 deletions .aegir.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
'use strict'

module.exports = {
webpack: {
node: {
// needed by core-util-is
Buffer: true,

// needed by nofilter
stream: true
}
}
}
5 changes: 2 additions & 3 deletions examples/full-s3-repo/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@
const IPFS = require('ipfs')
const { createRepo } = require('datastore-s3')
const toBuffer = require('it-to-buffer')
const last = require('it-last')

;(async () => {
// Create the repo
Expand All @@ -29,10 +28,10 @@ const last = require('it-last')
console.log('Version:', version.version)

// Once we have the version, let's add a file to IPFS
const { path, cid } = await last(node.add({
const { path, cid } = await node.add({
path: 'data.txt',
content: Buffer.from(require('crypto').randomBytes(1024 * 25))
}))
})

console.log('\nAdded file:', path, cid)

Expand Down
5 changes: 2 additions & 3 deletions examples/full-s3-repo/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -12,9 +12,8 @@
"dependencies": {
"aws-sdk": "^2.579.0",
"datastore-s3": "../../",
"ipfs": "^0.46.0",
"ipfs-repo": "^3.0.2",
"it-last": "^1.0.2",
"ipfs": "^0.50.2",
"ipfs-repo": "^6.0.3",
"it-to-buffer": "^1.0.2"
}
}
10 changes: 5 additions & 5 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -36,20 +36,20 @@
"homepage": "https://github.com/ipfs/js-datastore-s3#readme",
"dependencies": {
"buffer": "^5.6.0",
"datastore-core": "^1.1.0",
"interface-datastore": "^1.0.2"
"datastore-core": "^2.0.0",
"interface-datastore": "^2.0.0"
},
"devDependencies": {
"aegir": "^24.0.0",
"aegir": "^26.0.0",
"aws-sdk": "^2.579.0",
"chai": "^4.2.0",
"dirty-chai": "^2.0.1",
"ipfs-repo": "^3.0.2",
"ipfs-repo": "^6.0.3",
"stand-in": "^4.2.0"
},
"peerDependencies": {
"aws-sdk": "2.x",
"ipfs-repo": "^3.0.2"
"ipfs-repo": "^6.0.3"
},
"contributors": [
"Jacob Heun <[email protected]>",
Expand Down
40 changes: 28 additions & 12 deletions src/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -59,14 +59,14 @@ class S3Datastore extends Adapter {
* Store the given value under the key.
*
* @param {Key} key
* @param {Buffer} val
* @param {Uint8Array} val
* @returns {Promise}
*/
async put (key, val) {
try {
await this.opts.s3.upload({
Key: this._getFullKey(key),
Body: val
Body: Buffer.from(val, val.byteOffset, val.byteLength)
}).promise()
} catch (err) {
if (err.code === 'NoSuchBucket' && this.createIfMissing) {
Expand All @@ -81,16 +81,24 @@ class S3Datastore extends Adapter {
* Read from s3.
*
* @param {Key} key
* @returns {Promise<Buffer>}
* @returns {Promise<Uint8Array>}
*/
async get (key) {
try {
const data = await this.opts.s3.getObject({
Key: this._getFullKey(key)
}).promise()

// If a body was returned, ensure it's a Buffer
return data.Body ? Buffer.from(data.Body) : null
// If a body was returned, ensure it's a Uint8Array
if (ArrayBuffer.isView(data.Body)) {
if (data.Body instanceof Uint8Array) {
return data.Body
}

return Uint8Array.from(data.Body, data.Body.byteOffset, data.Body.byteLength)
}

return data.Body || null
} catch (err) {
if (err.statusCode === 404) {
throw Errors.notFoundError(err)
Expand Down Expand Up @@ -205,13 +213,21 @@ class S3Datastore extends Adapter {
}

for await (const key of it) {
const res = { key }
if (values) {
// Fetch the object Buffer from s3
res.value = await this.get(key)
}
try {
const res = { key }

if (values) {
// Fetch the object Buffer from s3
res.value = await this.get(key)
}

yield res
yield res
} catch (err) {
// key was deleted while we are iterating over the results
if (err.statusCode !== 404) {
throw err
}
}
}
}

Expand All @@ -227,7 +243,7 @@ class S3Datastore extends Adapter {
}).promise()
} catch (err) {
if (err.statusCode === 404) {
return this.put(new Key('/', false), Buffer.from(''))
return this.put(new Key('/', false), Uint8Array.from(''))
}

throw Errors.dbOpenFailedError(err)
Expand Down
9 changes: 6 additions & 3 deletions src/s3-repo.js
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,8 @@ const createRepo = (S3Store, options, s3Options) => {
accessKeyId,
secretAccessKey
}),
createIfMissing
createIfMissing,
sharding: true
}

// If no lock is given, create a mock lock
Expand All @@ -54,13 +55,15 @@ const createRepo = (S3Store, options, s3Options) => {
root: S3Store,
blocks: S3Store,
keys: S3Store,
datastore: S3Store
datastore: S3Store,
pins: S3Store
},
storageBackendOptions: {
root: storeConfig,
blocks: storeConfig,
keys: storeConfig,
datastore: storeConfig
datastore: storeConfig,
pins: storeConfig
},
lock: lock
})
Expand Down
13 changes: 13 additions & 0 deletions test/index.spec.js
Original file line number Diff line number Diff line change
Expand Up @@ -49,6 +49,19 @@ describe('S3Datastore', () => {
return store.put(new Key('/z/key'), Buffer.from('test data'))
})

it('should turn Uint8Arrays into Buffers', () => {
const s3 = new S3({ params: { Bucket: 'my-ipfs-bucket' } })
const store = new S3Store('.ipfs/datastore', { s3 })

standin.replace(s3, 'upload', function (stand, params) {
expect(Buffer.isBuffer(params.Body)).to.be.true()
stand.restore()
return s3Resolve(null)
})

return store.put(new Key('/z/key'), new TextEncoder().encode('test data'))
})

it('should create the bucket when missing if createIfMissing is true', () => {
const s3 = new S3({ params: { Bucket: 'my-ipfs-bucket' } })
const store = new S3Store('.ipfs/datastore', { s3, createIfMissing: true })
Expand Down