Skip to content
This repository has been archived by the owner on Aug 12, 2020. It is now read-only.

Commit

Permalink
Merge pull request #8 from noffle/readme
Browse files Browse the repository at this point in the history
README and API improvements
  • Loading branch information
daviddias committed Mar 22, 2016
2 parents 5a400b4 + b9cd36b commit 782c862
Show file tree
Hide file tree
Showing 7 changed files with 110 additions and 169 deletions.
80 changes: 79 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
@@ -1,12 +1,90 @@
IPFS Data Importing
===================

> Import data into an IPFS DAG Service.
[![](https://img.shields.io/badge/made%20by-Protocol%20Labs-blue.svg?style=flat-square)](http://ipn.io)
[![](https://img.shields.io/badge/freenode-%23ipfs-blue.svg?style=flat-square)](http://webchat.freenode.net/?channels=%23ipfs)
[![Build Status](https://travis-ci.org/ipfs/js-ipfs-data-importing.svg?style=flat-square)](https://travis-ci.org/ipfs/js-ipfs-data-importing)
![](https://img.shields.io/badge/coverage-%3F-yellow.svg?style=flat-square)
[![Dependency Status](https://david-dm.org/ipfs/js-ipfs-data-importing.svg?style=flat-square)](https://david-dm.org/ipfs/js-ipfs-data-importing)
[![js-standard-style](https://img.shields.io/badge/code%20style-standard-brightgreen.svg?style=flat-square)](https://github.com/feross/standard)

> JavaScript implementation of the layout and chunking mechanisms used by IPFS
## Example

Let's create a little directory to import:
```sh
$ cd /tmp
$ mkdir foo
$ echo 'hello' > foo/bar
$ echo 'warld' > foo/quux
```

And write the importing logic:
```js
// Dependencies to create a DAG Service (where the dir will be imported into)
var memStore = require('abstract-blob-store')
var ipfsRepo = require('ipfs-repo')
var ipfsBlocks = require('ipfs-blocks')
var ipfsMerkleDag = require('ipfs-merkle-dag')

var repo = new ipfsRepo('', { stores: memStore })
var blocks = new ipfsBlocks.BlockService(repo)
var dag = new ipfsMerkleDag.DAGService(blocks)


var ipfsData = require('ipfs-data-importing')

// Import /tmp/foo
ipfsData.import('/tmp/foo', dag, {
recursive: true
}, done)

// A root DAG Node is received upon completion
function done (err, rootStat) {
if (err) { throw err }
console.log(rootStat)
}
```

When run, the stat of root DAG Node is outputted:

```
{ Hash: <Buffer 12 20 bd e2 2b 57 3f 6f bd 7c cc 5a 11 7f 28 6c a2 9a 9f c0 90 e1 d4 16 d0 5f 42 81 ec 0c 2a 7f 7f 93>,
Size: 59843,
Name: 'foo' }
```

## API

```js
var importer = require('ipfs-data-importing')
```

### importer.import(target, dagService, opts, cb)

`target` can be a `string`, `Buffer`, or `Stream`. When it's a string, the file
or directory structure rooted on the filesystem at `target` is imported, with
the hierarchy preserved. If a Buffer or Stream, a single DAG node will be
imported representing the buffer or stream's contents.

Uses the [DAG Service](https://github.com/vijayee/js-ipfs-merkle-dag/) instance
`dagService`. Accepts the following `opts`:

- `recursive`: whether to recurse into directories. Defaults to `false`.

Calls the callback `cb(err, stat)` on completion or error, where `stat` is an
object with the `Hash`, `Size`, and `Name` of the root
[`DAGNode`](https://github.com/vijayee/js-ipfs-merkle-dag/).

## install

With [npm](https://npmjs.org/) installed, run

```
$ npm install ipfs-data-importing
```

## license

ISC
1 change: 1 addition & 0 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@
},
"homepage": "https://github.com/diasdavid/js-ipfs-data-importing#readme",
"devDependencies": {
"block-stream2": "^1.1.0",
"brfs": "^1.4.3",
"bs58": "^3.0.0",
"buffer-loader": "0.0.1",
Expand Down
46 changes: 3 additions & 43 deletions src/chunker-fixed-size.js
Original file line number Diff line number Diff line change
@@ -1,45 +1,5 @@
var through2 = require('through2')
var chunker = require('block-stream2')

exports = module.exports = FixedSizeChunker

// The difference of this chunker compared to other fixed size chunkers
// available, is that it doesn't add padding the last chunk

function FixedSizeChunker (size) {
var stream = through2(transform, flush)

var buf = new Buffer(0)

function transform (chunk, enc, cb) {
var that = this

buf = Buffer.concat([buf, chunk])

if (buf.length >= size) {
slice()
}

function slice () {
var chunk = new Buffer(size, 'binary')
var newBuf = new Buffer(buf.length - size, 'binary')
buf.copy(chunk, 0, 0, size)
buf.copy(newBuf, 0, size, buf.length)
buf = newBuf
that.push(chunk)

if (buf.length >= size) {
return slice()
}
}

cb()
}

function flush (cb) {
// last chunk
this.push(buf)
cb()
}

return stream
exports = module.exports = function (size) {
return chunker({ size: size, zeroPadding: false })
}
43 changes: 17 additions & 26 deletions src/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -12,34 +12,29 @@ exports = module.exports
const CHUNK_SIZE = 262144

// Use a layout + chunkers to convert a directory (or file) to the layout format
exports.import = function (options, callback) {
// options.path : what to import
// options.buffer : import a buffer
// options.filename : optional file name for buffer
// options.stream : import a stream
exports.import = (target, dagService, options, callback) => {
if (typeof options === 'function') { callback = options; options = {} }

if (!target) { return callback(new Error('must specify target')) }
if (!dagService) { return callback(new Error('must specify dag service')) }

// options.recursive : follow dirs
// options.chunkers : obj with chunkers to each type of data, { default: dumb-chunker }
// options.dag-service : instance of block service
const dagService = options.dagService

if (options.buffer) {
if (!Buffer.isBuffer(options.buffer)) {
return callback(new Error('buffer importer must take a buffer'))
}
bufferImporter(options.buffer, callback)
} else if (options.stream) {
if (!(typeof options.stream.on === 'function')) {
return callback(new Error('stream importer must take a readable stream'))
}
options = options || {}

if (Buffer.isBuffer(target)) {
bufferImporter(target, callback)
} else if (typeof target.on === 'function') {
// TODO Create Stream Importer
// streamImporter(options.stream, callback)
return callback(new Error('stream importer has not been built yet'))
} else if (options.path) {
const stats = fs.statSync(options.path)
} else if (typeof target === 'string') {
const stats = fs.statSync(target)
if (stats.isFile()) {
fileImporter(options.path, callback)
fileImporter(target, callback)
} else if (stats.isDirectory() && options.recursive) {
dirImporter(options.path, callback)
dirImporter(target, callback)
} else {
return callback(new Error('recursive must be true to add a directory'))
}
Expand Down Expand Up @@ -219,13 +214,10 @@ exports.import = function (options, callback) {
if (err) {
return log.err(err)
}
// an optional file name provided
const fileName = options.filename

callback(null, {
Hash: parentNode.multihash(),
Size: parentNode.size(),
Name: fileName
Size: parentNode.size()
}) && cb()
})
}))
Expand All @@ -241,8 +233,7 @@ exports.import = function (options, callback) {

callback(null, {
Hash: fileNode.multihash(),
Size: fileNode.size(),
Name: options.filename
Size: fileNode.size()
})
})
}
Expand Down
12 changes: 3 additions & 9 deletions tests/buffer-test.js
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@ const marbuf = require('buffer!./test-data/200Bytes.txt.block')
module.exports = function (repo) {
describe('chunker: fixed size', function () {
this.timeout(10000)

it('256 Bytes chunks', function (done) {
var counter = 0
fileStream()
Expand Down Expand Up @@ -84,10 +85,7 @@ module.exports = function (repo) {
var bs = new BlockService(repo)
var ds = new DAGService(bs)
var buf = smallBuf
importer.import({
buffer: buf,
dagService: ds
}, function (err, stat) {
importer.import(buf, ds, function (err, stat) {
expect(err).to.not.exist
ds.get(stat.Hash, function (err, node) {
expect(err).to.not.exist
Expand All @@ -105,11 +103,7 @@ module.exports = function (repo) {
var buf = bigBuf
var bs = new BlockService(repo)
var ds = new DAGService(bs)
importer.import({
buffer: buf,
dagService: ds,
filename: 'Test.txt'
}, function (err, stat) {
importer.import(buf, ds, function (err, stat) {
expect(err).to.not.exist
ds.get(stat.Hash, function (err, node) {
expect(err).to.not.exist
Expand Down
64 changes: 0 additions & 64 deletions tests/test-fixed-size-chunker.js

This file was deleted.

0 comments on commit 782c862

Please sign in to comment.