Skip to content
This repository was archived by the owner on Aug 12, 2020. It is now read-only.

Clean up #49

Merged
merged 3 commits into from
Jun 28, 2016
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
69 changes: 33 additions & 36 deletions README.md
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
# IPFS unixFS Engine
IPFS unixFS Engine
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

What's wrong with #? It's the same, in rendering.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I've been (since forever) doing ==== for the title, and now it is just autopilot.

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Ok. Let's not care.

==================

[![](https://img.shields.io/badge/made%20by-Protocol%20Labs-blue.svg?style=flat-square)](http://ipn.io)
[![](https://img.shields.io/badge/project-IPFS-blue.svg?style=flat-square)](http://ipfs.io/)
Expand Down Expand Up @@ -48,19 +49,19 @@ And write the importing logic:
```js
// Dependencies to create a DAG Service (where the dir will be imported into)
const memStore = require('abstract-blob-store')
const ipfsRepo = require('ipfs-repo')
const ipfsBlock = require('ipfs-block')
const ipfsBlockService = require('ipfs-block-service')
const ipfsMerkleDag = require('ipfs-merkle-dag')
const Repo = require('ipfs-repo')
const Block = require('ipfs-block')
const BlockService = require('ipfs-block-service')
const MerkleDag = require('ipfs-merkle-dag')
const fs = require('fs')

const repo = new ipfsRepo('', { stores: memStore })
const blocks = new ipfsBlockService(repo)
const dag = new ipfsMerkleDag.DAGService(blocks)
const repo = new Repo('', { stores: memStore })
const blockService = new BlockService(repo)
const dagService = new ipfsMerkleDag.DAGService(blocks)


const Importer = require('ipfs-unixfs-engine').importer
const add = new Importer(dag)
const Importer = require('ipfs-unixfs-engine').Importer
const filesAddStream = new Importer(dagService)

// An array to hold the return of nested file/dir info from the importer
// A root DAG Node is received upon completion
Expand All @@ -76,26 +77,24 @@ const input2 = {path: /tmp/foo/quxx, content: rs2}

// Listen for the data event from the importer stream

add.on('data', (info) => {
filesAddStream.on('data', (info) => {
res.push(info)
})

// The end event of the stream signals that the importer is done

add.on('end', () => {
console.log('Finished adding files!')
return
filesAddStream.on('end', () => {
console.log('Finished filesAddStreaming files!')
})

// Calling write on the importer to add the file/object tuples
// Calling write on the importer to filesAddStream the file/object tuples

add.write(input)
add.write(input2)
add.end()
filesAddStream.write(input)
filesAddStream.write(input2)
filesAddStream.end()
```

When run, the stat of DAG Node is outputted for each file on data event until the root:

```
{ multihash: <Buffer 12 20 bd e2 2b 57 3f 6f bd 7c cc 5a 11 7f 28 6c a2 9a 9f c0 90 e1 d4 16 d0 5f 42 81 ec 0c 2a 7f 7f 93>,
size: 39243,
Expand Down Expand Up @@ -143,38 +142,37 @@ Nodes.
### Example Exporter

```
const ipfsRepo = require('ipfs-repo')
const ipfsBlock = require('ipfs-block')
const ipfsBlockService = require('ipfs-block-service')
const ipfsMerkleDag = require('ipfs-merkle-dag')
const Repo = require('ipfs-repo')
const Block = require('ipfs-block')
const BlockService = require('ipfs-block-service')
const MerkleDAG = require('ipfs-merkle-dag')

const repo = new ipfsRepo('', { stores: memStore })
const blocks = new ipfsBlockService(repo)
const dag = new ipfsMerkleDag.DAGService(blocks)
const repo = new Repo('', { stores: memStore })
const blockService = new BlockService(repo)
const dagService = new MerkleDag.DAGService(blockService)

// Create an export readable object stream with the hash you want to export and a dag service

const exportEvent = Exporter(hash, dag)
const filesStream = Exporter(<multihash>, dag)

// Pipe the return stream to console

exportEvent.on('data', (result) => {
result.stream.pipe(process.stdout)
filesStream.on('data', (file) => {
file.content.pipe(process.stdout)
}
```

### Exporter: API

```js
const Exporter = require('ipfs-unixfs-engine').exporter
const Exporter = require('ipfs-unixfs-engine').Exporter
```

### new Exporter(hash, dagService)

Uses the given [DAG Service][] to fetch an IPFS [UnixFS][] object(s) by their
multiaddress.
Uses the given [DAG Service][] to fetch an IPFS [UnixFS][] object(s) by their multiaddress.

Creates a new readable stream in object mode that outputs objects of the
form
Creates a new readable stream in object mode that outputs objects of the form

```js
{
Expand All @@ -183,8 +181,7 @@ form
}
```

Errors are received as with a normal stream, by listening on the `'error'` event
to be emitted.
Errors are received as with a normal stream, by listening on the `'error'` event to be emitted.


[DAG Service]: https://github.com/vijayee/js-ipfs-merkle-dag/
Expand Down
102 changes: 43 additions & 59 deletions src/exporter.js
Original file line number Diff line number Diff line change
@@ -1,10 +1,9 @@
'use strict'

const debug = require('debug')
const log = debug('exporter')
log.err = debug('exporter:error')
const log = debug('unixfs')
log.err = debug('unixfs:error')
const isIPFS = require('is-ipfs')
const bs58 = require('bs58')
const UnixFS = require('ipfs-unixfs')
const series = require('run-series')
const Readable = require('readable-stream').Readable
Expand All @@ -21,13 +20,10 @@ function Exporter (hash, dagService, options) {
return new Exporter(hash, dagService, options)
}

// Sanitize hash.
// Sanitize hash
if (!isIPFS.multihash(hash)) {
throw new Error('not valid multihash')
}
if (Buffer.isBuffer(hash)) {
hash = bs58.encode(hash)
}

Readable.call(this, { objectMode: true })

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Why was this removed? It breaks functionality. :( Fixed in #52

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Thank you for catching it, tests didn't detect it

Expand All @@ -36,61 +32,52 @@ function Exporter (hash, dagService, options) {
this._read = (n) => {}

let fileExporter = (node, name, done) => {
let init = false
if (!done) {
throw new Error('done must be set')
}

if (!done) throw new Error('done must be set')
const contentRS = new Readable()
contentRS._read = () => {}

// Logic to export a single (possibly chunked) unixfs file.
var rs = new Readable()
if (node.links.length === 0) {
const unmarshaledData = UnixFS.unmarshal(node.data)
rs._read = () => {
if (init) {
return
}
init = true
rs.push(unmarshaledData.data)
rs.push(null)
}
this.push({ content: rs, path: name })
contentRS.push(unmarshaledData.data)
contentRS.push(null)
this.push({ content: contentRS, path: name })
done()
} else {
rs._read = () => {
if (init) {
return
const array = node.links.map((link) => {
return (cb) => {
dagService.get(link.hash, (err, res) => {
if (err) {
return cb(err)
}
var unmarshaledData = UnixFS.unmarshal(res.data)
contentRS.push(unmarshaledData.data)
cb()
})
}
init = true

const array = node.links.map((link) => {
return (cb) => {
dagService.get(link.hash, (err, res) => {
if (err) {
return cb(err)
}
var unmarshaledData = UnixFS.unmarshal(res.data)
rs.push(unmarshaledData.data)
cb()
})
}
})
series(array, (err, res) => {
if (err) {
rs.emit('error', err)
return
}
rs.push(null)
return
})
}
this.push({ content: rs, path: name })
})
series(array, (err) => {
if (err) {
return contentRS.emit('error', err)
}
contentRS.push(null)
})
this.push({ content: contentRS, path: name })
done()
}
}

// Logic to export a unixfs directory.
let dirExporter = (node, name, add, done) => {
if (!add) throw new Error('add must be set')
if (!done) throw new Error('done must be set')
if (!add) {
throw new Error('add must be set')
}
if (!done) {
throw new Error('done must be set')
}

this.push({content: null, path: name})

Expand All @@ -104,32 +91,29 @@ function Exporter (hash, dagService, options) {
}

// Traverse the DAG asynchronously
var self = this
fieldtrip([{ path: hash, hash: hash }], visit, (err) => {
fieldtrip([{path: hash, hash: hash}], visit.bind(this), (err) => {
if (err) {
self.emit('error', err)
return
return this.emit('error', err)
}
self.push(null)
this.push(null)
})

// Visit function: called once per node in the exported graph
function visit (item, add, done) {
dagService.get(item.hash, (err, fetchedNode) => {
dagService.get(item.hash, (err, node) => {
if (err) {
self.emit('error', err)
return
return this.emit('error', err)
}

const data = UnixFS.unmarshal(fetchedNode.data)
const data = UnixFS.unmarshal(node.data)
const type = data.type

if (type === 'directory') {
dirExporter(fetchedNode, item.path, add, done)
dirExporter(node, item.path, add, done)
}

if (type === 'file') {
fileExporter(fetchedNode, item.path, done)
fileExporter(node, item.path, done)
}
})
}
Expand Down
4 changes: 2 additions & 2 deletions src/importer.js
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
'use strict'

const debug = require('debug')
const log = debug('importer')
log.err = debug('importer:error')
const log = debug('unixfs')
log.err = debug('unixfs:error')
const fsc = require('./chunker-fixed-size')
const through2 = require('through2')
const merkleDAG = require('ipfs-merkle-dag')
Expand Down
40 changes: 20 additions & 20 deletions test/test-exporter.js
Original file line number Diff line number Diff line change
Expand Up @@ -88,52 +88,52 @@ module.exports = function (repo) {
})
testExport.pipe(concat((files) => {
expect(files[0].path).to.equal('QmWChcSFMNcFkfeJtNd8Yru1rE6PhtCRfewi1tMwjkwKjN')
expect(files[0].content).to.not.exist

expect(files[1].path).to.equal('QmWChcSFMNcFkfeJtNd8Yru1rE6PhtCRfewi1tMwjkwKjN/200Bytes.txt')
expect(files[1].content).to.exist

expect(files[2].path).to.equal('QmWChcSFMNcFkfeJtNd8Yru1rE6PhtCRfewi1tMwjkwKjN/dir-another')
expect(files[2].content).to.not.exist

expect(files[3].path).to.equal('QmWChcSFMNcFkfeJtNd8Yru1rE6PhtCRfewi1tMwjkwKjN/level-1')
expect(files[3].content).to.not.exist

expect(files[4].path).to.equal('QmWChcSFMNcFkfeJtNd8Yru1rE6PhtCRfewi1tMwjkwKjN/level-1/200Bytes.txt')
expect(files[4].content).to.exist

expect(files[5].path).to.equal('QmWChcSFMNcFkfeJtNd8Yru1rE6PhtCRfewi1tMwjkwKjN/level-1/level-2')
expect(files[5].content).to.not.exist

done()
}))
})

it('returns a null stream for dir', (done) => {
const hash = 'QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn' // This hash doesn't exist in the repo
const hash = 'QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn'
const bs = new BlockService(repo)
const ds = new DAGService(bs)
const testExport = exporter(hash, ds)

testExport.on('error', (err) => {
expect(err).to.not.exist
})
testExport.on('data', (dir) => {
expect(dir.content).to.equal(null)

testExport.on('data', (file) => {
expect(file.content).to.not.exist
done()
})
})

it('fails on non existent hash', (done) => {
const hash = 'QmWChcSFMNcFkfeJtNd8Yru1rE6PhtCRfewi1tMwjkwKj3' // This hash doesn't exist in the repo
// This hash doesn't exist in the repo
const hash = 'QmWChcSFMNcFkfeJtNd8Yru1rE6PhtCRfewi1tMwjkwKj3'
const bs = new BlockService(repo)
const ds = new DAGService(bs)
const testExport = exporter(hash, ds)
testExport.on('error', (err) => {
const error = err.toString()
expect(err).to.exist
const browser = error.includes('Error: key not found:')
const node = error.includes('no such file or directory')
// the browser and node js return different errors
if (browser) {
expect(error).to.contain('Error: key not found:')
done()
}
if (node) {
expect(error).to.contain('no such file or directory')
done()
}
if (!node && !browser) {
expect(node).to.equal(true)
done()
}
done()
})
})
})
Expand Down