diff --git a/README.md b/README.md index 4e4f7fe..b893980 100644 --- a/README.md +++ b/README.md @@ -125,7 +125,7 @@ pull( // name: 'QmFoo...', // path: 'QmFoo...', // size: ... - // hash: Buffer + // cid: CID // content: undefined // type: 'dir' // }, { @@ -133,7 +133,7 @@ pull( // name: 'bar', // path: 'QmFoo.../bar', // size: ... - // hash: Buffer + // cid: CID // content: undefined // type: 'dir' // }, { @@ -141,7 +141,7 @@ pull( // name: 'baz.txt', // path: 'QmFoo.../bar/baz.txt', // size: ... - // hash: Buffer + // cid: CID // content: // type: 'file' // }] @@ -172,7 +172,7 @@ pull( // name: 'QmFoo...', // path: 'QmFoo...', // size: ... - // hash: Buffer + // cid: CID // content: undefined // type: 'dir' // }, { @@ -180,7 +180,7 @@ pull( // name: 'bar', // path: 'QmFoo.../bar', // size: ... - // hash: Buffer + // cid: CID // content: undefined // type: 'dir' // }] diff --git a/src/dir-flat.js b/src/dir-flat.js index c9903cb..a035bd3 100644 --- a/src/dir-flat.js +++ b/src/dir-flat.js @@ -16,7 +16,7 @@ function dirExporter (cid, node, name, path, pathRest, resolve, size, dag, paren name: name, depth: depth, path: path, - multihash: cid.buffer, + cid, size: node.size, type: 'dir' } @@ -35,7 +35,7 @@ function dirExporter (cid, node, name, path, pathRest, resolve, size, dag, paren size: link.size, name: link.name, path: path + '/' + link.name, - multihash: link.cid.buffer, + cid: link.cid, linkName: link.name, pathRest: pathRest.slice(1), type: 'dir' diff --git a/src/dir-hamt-sharded.js b/src/dir-hamt-sharded.js index 86f689f..841c129 100644 --- a/src/dir-hamt-sharded.js +++ b/src/dir-hamt-sharded.js @@ -21,7 +21,7 @@ function shardedDirExporter (cid, node, name, path, pathRest, resolve, size, dag name: name, depth: depth, path: path, - multihash: cid.buffer, + cid, size: node.size, type: 'dir' } @@ -47,7 +47,7 @@ function shardedDirExporter (cid, node, name, path, pathRest, resolve, size, dag depth: entryName ? depth + 1 : depth, name: entryName, path: entryPath, - multihash: link.cid.buffer, + cid: link.cid, pathRest: entryName ? pathRest.slice(1) : pathRest, parent: dir || parent } @@ -123,7 +123,7 @@ function shardedDirExporter (cid, node, name, path, pathRest, resolve, size, dag depth: entryName ? depth + 1 : depth, name: entryName, path: entryPath, - multihash: link.cid.buffer, + cid: link.cid, pathRest: entryName ? pathRest.slice(1) : pathRest, parent: dir || parent } diff --git a/src/file.js b/src/file.js index 9380e7c..02f9022 100644 --- a/src/file.js +++ b/src/file.js @@ -52,7 +52,7 @@ module.exports = (cid, node, name, path, pathRest, resolve, size, dag, parent, d content: once(Buffer.alloc(0)), name: name, path: path, - multihash: cid.buffer, + cid, size: fileSize, type: 'file' }) @@ -73,7 +73,7 @@ module.exports = (cid, node, name, path, pathRest, resolve, size, dag, parent, d content: content, name: name, path: path, - multihash: cid.buffer, + cid, size: fileSize, type: 'file' }]) diff --git a/src/index.js b/src/index.js index bbd003f..361b989 100644 --- a/src/index.js +++ b/src/index.js @@ -63,7 +63,7 @@ module.exports = (path, dag, options) => { return pull( values([{ - multihash: cid.buffer, + cid, name: dPath.base, path: dPath.base, pathRest: dPath.rest, @@ -77,7 +77,7 @@ module.exports = (path, dag, options) => { name: node.name, path: options.fullPath ? node.path : finalPathFor(node), size: node.size, - hash: node.multihash, + cid: node.cid, content: node.content, type: node.type } diff --git a/src/object.js b/src/object.js index 52cc2c3..3719701 100644 --- a/src/object.js +++ b/src/object.js @@ -23,7 +23,7 @@ module.exports = (cid, node, name, path, pathRest, resolve, size, dag, parent, d name: pathElem, path: newName, pathRest: pathRest.slice(1), - multihash: isCID && newNode, + cid: isCID && newNode, object: !isCID && newNode, parent: parent }]), diff --git a/src/raw.js b/src/raw.js index b0856fe..bfc8606 100644 --- a/src/raw.js +++ b/src/raw.js @@ -34,7 +34,7 @@ module.exports = (cid, node, name, path, pathRest, resolve, size, dag, parent, d return once({ depth, content: once(Buffer.alloc(0)), - hash: cid, + cid, name, path, size, @@ -53,7 +53,7 @@ module.exports = (cid, node, name, path, pathRest, resolve, size, dag, parent, d return once({ depth, content: once(extractDataFromBlock(node, 0, offset, offset + length)), - hash: cid, + cid, name, path, size, diff --git a/src/resolve.js b/src/resolve.js index 4786729..e3a0a70 100644 --- a/src/resolve.js +++ b/src/resolve.js @@ -7,7 +7,6 @@ const filter = require('pull-stream/throughs/filter') const flatten = require('pull-stream/throughs/flatten') const map = require('pull-stream/throughs/map') const paramap = require('pull-paramap') -const CID = require('cids') const waterfall = require('async/waterfall') const resolvers = { @@ -42,11 +41,9 @@ function createResolver (dag, options, depth, parent) { return cb(null, resolveItem(null, item.object, item, options)) } - const cid = new CID(item.multihash) - waterfall([ - (done) => dag.get(cid, done), - (node, done) => done(null, resolveItem(cid, node.value, item, options)) + (done) => dag.get(item.cid, done), + (node, done) => done(null, resolveItem(item.cid, node.value, item, options)) ], cb) }), flatten(), diff --git a/test/exporter-sharded.spec.js b/test/exporter-sharded.spec.js index b78dd54..57bb18b 100644 --- a/test/exporter-sharded.spec.js +++ b/test/exporter-sharded.spec.js @@ -111,7 +111,7 @@ describe('exporter sharded', function () { (exported, cb) => { const dir = exported.shift() - expect(dir.hash).to.deep.equal(directory.buffer) + expect(dir.cid.equals(directory)).to.be.true() expect(exported.length).to.equal(Object.keys(files).length) parallel( @@ -124,7 +124,7 @@ describe('exporter sharded', function () { } // validate the CID - expect(files[exported.name].cid.buffer).to.deep.equal(exported.hash) + expect(files[exported.name].cid.equals(exported.cid)).to.be.true() // validate the exported file content expect(files[exported.name].content).to.deep.equal(bufs[0]) @@ -176,7 +176,7 @@ describe('exporter sharded', function () { (exported, cb) => { const dir = exported.shift() - expect(dir.hash).to.deep.equal(dirCid.buffer) + expect(dir.cid.equals(dirCid)).to.be.true() expect(exported.length).to.equal(Object.keys(files).length) cb() diff --git a/test/exporter-subtree.spec.js b/test/exporter-subtree.spec.js index 741e181..a4577ef 100644 --- a/test/exporter-subtree.spec.js +++ b/test/exporter-subtree.spec.js @@ -51,7 +51,7 @@ describe('exporter subtree', () => { pull.collect((err, files) => cb(err, { cid, files })) ), ({ cid, files }, cb) => { - files.forEach(file => expect(file).to.have.property('hash')) + files.forEach(file => expect(file).to.have.property('cid')) expect(files.length).to.equal(1) expect(files[0].path).to.equal('200Bytes.txt') diff --git a/test/exporter.spec.js b/test/exporter.spec.js index 08c868a..e7f4fc2 100644 --- a/test/exporter.spec.js +++ b/test/exporter.spec.js @@ -199,7 +199,7 @@ describe('exporter', () => { function onFiles (err, files) { expect(err).to.equal(null) expect(files).to.have.length(1) - expect(files[0]).to.have.property('hash') + expect(files[0]).to.have.property('cid') expect(files[0]).to.have.property('path', result.cid.toBaseEncodedString()) fileEql(files[0], unmarsh.data, done) } @@ -487,7 +487,7 @@ describe('exporter', () => { pull.collect((err, files) => cb(err, { cid, files })) ), ({ cid, files }, cb) => { - files.forEach(file => expect(file).to.have.property('hash')) + files.forEach(file => expect(file).to.have.property('cid')) expect( files.map((file) => file.path) @@ -541,7 +541,7 @@ describe('exporter', () => { pull.collect((err, files) => cb(err, { cid, files })) ), ({ cid, files }, cb) => { - files.forEach(file => expect(file).to.have.property('hash')) + files.forEach(file => expect(file).to.have.property('cid')) expect( files.map((file) => file.path) @@ -1066,6 +1066,34 @@ describe('exporter', () => { ) } }) + + it('exports a raw leaf', (done) => { + pull( + pull.values([{ + path: '200Bytes.txt', + content: pull.values([smallFile]) + }]), + importer(ipld, { + rawLeaves: true + }), + pull.collect(collected) + ) + + function collected (err, files) { + expect(err).to.not.exist() + expect(files.length).to.equal(1) + + pull( + exporter(files[0].multihash, ipld), + pull.collect((err, files) => { + expect(err).to.not.exist() + expect(files.length).to.equal(1) + expect(CID.isCID(files[0].cid)).to.be.true() + fileEql(files[0], smallFile, done) + }) + ) + } + }) }) function fileEql (actual, expected, done) {