diff --git a/package.json b/package.json index e0d80e0c..8f88b9d4 100644 --- a/package.json +++ b/package.json @@ -52,15 +52,15 @@ "rimraf": "^2.5.4" }, "dependencies": { - "async": "^2.1.2", + "async": "^2.1.4", "cids": "^0.2.0", "ipfs-unixfs": "^0.1.5", - "ipld-dag-pb": "^0.8.0", - "ipld-resolver": "^0.2.0", + "ipld-dag-pb": "^0.9.1", + "ipld-resolver": "^0.3.0", "is-ipfs": "^0.2.1", "multihashes": "^0.2.2", "pull-block": "^1.0.2", - "pull-paramap": "^1.2.0", + "pull-paramap": "^1.2.1", "pull-pushable": "^2.0.1", "pull-stream": "^3.5.0", "pull-traverse": "^1.0.3", diff --git a/src/exporter/dir.js b/src/exporter/dir.js index dbb4d361..5a53c19a 100644 --- a/src/exporter/dir.js +++ b/src/exporter/dir.js @@ -24,7 +24,7 @@ module.exports = (node, name, ipldResolver) => { pull.values(node.links), pull.map((link) => ({ path: path.join(name, link.name), - hash: link.hash + hash: link.multihash })), paramap((item, cb) => ipldResolver.get(new CID(item.hash), (err, n) => { if (err) { diff --git a/src/exporter/file.js b/src/exporter/file.js index 0595efa7..9ec72cbb 100644 --- a/src/exporter/file.js +++ b/src/exporter/file.js @@ -20,7 +20,7 @@ module.exports = (node, name, ipldResolver) => { function visitor (node) { return pull( pull.values(node.links), - paramap((link, cb) => ipldResolver.get(new CID(link.hash), cb)) + paramap((link, cb) => ipldResolver.get(new CID(link.multihash), cb)) ) } diff --git a/src/importer/flush-tree.js b/src/importer/flush-tree.js index e71395e7..4d3e89a7 100644 --- a/src/importer/flush-tree.js +++ b/src/importer/flush-tree.js @@ -5,8 +5,7 @@ const UnixFS = require('ipfs-unixfs') const CID = require('cids') const dagPB = require('ipld-dag-pb') const mapValues = require('async/mapValues') -const parallel = require('async/parallel') - +const waterfall = require('async/waterfall') const DAGLink = dagPB.DAGLink const DAGNode = dagPB.DAGNode @@ -120,44 +119,37 @@ function traverse (tree, sizeIndex, path, ipldResolver, source, done) { // return this new node multihash const keys = Object.keys(tree) - - const ufsDir = new UnixFS('directory') - const node = new DAGNode(ufsDir.marshal()) - - keys.forEach((key) => { + const dir = new UnixFS('directory') + const links = keys.map((key) => { const b58mh = mh.toB58String(tree[key]) - const link = new DAGLink(key, sizeIndex[b58mh], tree[key]) - node.addRawLink(link) + return new DAGLink(key, sizeIndex[b58mh], tree[key]) }) - parallel([ - (cb) => node.multihash(cb), - (cb) => node.size(cb) - ], (err, res) => { + waterfall([ + (cb) => DAGNode.create(dir.marshal(), links, cb), + (node, cb) => { + sizeIndex[mh.toB58String(node.multihash)] = node.size + + ipldResolver.put({ + node: node, + cid: new CID(node.multihash) + }, (err) => cb(err, node)) + } + ], (err, node) => { if (err) { + source.push(new Error('failed to store dirNode')) return done(err) } - const multihash = res[0] - const size = res[1] - - sizeIndex[mh.toB58String(multihash)] = size - ipldResolver.put({ - node: node, - cid: new CID(multihash) - }, (err) => { - if (err) { - source.push(new Error('failed to store dirNode')) - } else if (path) { - source.push({ - path: path, - multihash: multihash, - size: size - }) - } - - done(null, multihash) - }) + if (path) { + source.push({ + path: path, + multihash: node.multihash, + size: node.size + }) + } + + done(null, node.multihash) }) }) } diff --git a/src/importer/index.js b/src/importer/index.js index a26ea662..7a3c5631 100644 --- a/src/importer/index.js +++ b/src/importer/index.js @@ -8,6 +8,7 @@ const pullWrite = require('pull-write') const parallel = require('async/parallel') const dagPB = require('ipld-dag-pb') const CID = require('cids') +const waterfall = require('async/waterfall') const fsc = require('./../chunker/fixed-size') const createAndStoreTree = require('./flush-tree') @@ -71,49 +72,38 @@ function makeWriter (source, files, ipldResolver) { } } -function createAndStoreDir (item, ipldResolver, cb) { +function createAndStoreDir (item, ipldResolver, callback) { // 1. create the empty dir dag node // 2. write it to the dag store const d = new UnixFS('directory') - const n = new DAGNode() - n.data = d.marshal() - - n.multihash((err, multihash) => { + waterfall([ + (cb) => DAGNode.create(d.marshal(), cb), + (node, cb) => { + ipldResolver.put({ + node: node, + cid: new CID(node.multihash) + }, (err) => cb(err, node)) + } + ], (err, node) => { if (err) { - return cb(err) + return callback(err) } - - ipldResolver.put({ - node: n, - cid: new CID(multihash) - }, (err) => { - if (err) { - return cb(err) - } - - n.size((err, size) => { - if (err) { - return cb(err) - } - - cb(null, { - path: item.path, - multihash: multihash, - size: size - }) - }) + callback(null, { + path: item.path, + multihash: node.multihash, + size: node.size }) }) } -function createAndStoreFile (file, ipldResolver, cb) { +function createAndStoreFile (file, ipldResolver, callback) { if (Buffer.isBuffer(file.content)) { file.content = pull.values([file.content]) } if (typeof file.content !== 'function') { - return cb(new Error('invalid content')) + return callback(new Error('invalid content')) } // 1. create the unixfs merkledag node @@ -128,44 +118,37 @@ function createAndStoreFile (file, ipldResolver, cb) { file.content, fsc(CHUNK_SIZE), pull.asyncMap((chunk, cb) => { - const l = new UnixFS('file', Buffer(chunk)) - const n = new DAGNode(l.marshal()) + const l = new UnixFS('file', new Buffer(chunk)) - n.multihash((err, multihash) => { + DAGNode.create(l.marshal(), (err, node) => { if (err) { return cb(err) } ipldResolver.put({ - node: n, - cid: new CID(multihash) + node: node, + cid: new CID(node.multihash) }, (err) => { if (err) { - return cb(new Error('Failed to store chunk')) + return cb(err) } - n.size((err, size) => { - if (err) { - return cb(err) - } - - cb(null, { - Hash: multihash, - Size: size, - leafSize: l.fileSize(), - Name: '' - }) + cb(null, { + Hash: node.multihash, + Size: node.size, + leafSize: l.fileSize(), + Name: '' }) }) }) }), pull.collect((err, leaves) => { if (err) { - return cb(err) + return callback(err) } if (leaves.length === 1) { - return cb(null, { + return callback(null, { path: file.path, multihash: leaves[0].Hash, size: leaves[0].Size @@ -173,43 +156,31 @@ function createAndStoreFile (file, ipldResolver, cb) { } // create a parent node and add all the leafs - const f = new UnixFS('file') - const n = new DAGNode() - for (let leaf of leaves) { + const links = leaves.map((leaf) => { f.addBlockSize(leaf.leafSize) - n.addRawLink( - new DAGLink(leaf.Name, leaf.Size, leaf.Hash) - ) - } - n.data = f.marshal() + return new DAGLink(leaf.Name, leaf.Size, leaf.Hash) + }) - n.multihash((err, multihash) => { + waterfall([ + (cb) => DAGNode.create(f.marshal(), links, cb), + (node, cb) => { + ipldResolver.put({ + node: node, + cid: new CID(node.multihash) + }, (err) => cb(err, node)) + } + ], (err, node) => { if (err) { - return cb(err) + return callback(err) } - ipldResolver.put({ - node: n, - cid: new CID(multihash) - }, (err) => { - if (err) { - return cb(err) - } - - n.size((err, size) => { - if (err) { - return cb(err) - } - - cb(null, { - path: file.path, - multihash: multihash, - size: size - }) - }) + callback(null, { + path: file.path, + multihash: node.multihash, + size: node.size }) }) }) diff --git a/test/test-importer.js b/test/test-importer.js index 98a1f6cf..34f5fe1d 100644 --- a/test/test-importer.js +++ b/test/test-importer.js @@ -19,8 +19,8 @@ function stringifyMh (files) { const bigFile = loadFixture(__dirname, 'fixtures/1.2MiB.txt') const smallFile = loadFixture(__dirname, 'fixtures/200Bytes.txt') -module.exports = function (repo) { - describe('importer', function () { +module.exports = (repo) => { + describe('importer', () => { let ipldResolver before(() => {