streaming implementation of the dat replication protocol
JavaScript
Fetching latest commit…
Cannot retrieve the latest commit at this time.
Permalink
Failed to load latest commit information.
messages
test
.gitignore
.travis.yml
LICENSE
README.md
decode.js
encode.js
example.js
index.js
package.json

README.md

dat-replication-protocol

Streaming implementation of the dat replication protocol

npm install dat-replication-protocol

build status dat

Usage

var protocol = require('dat-replication-protocol')

var decode = protocol.decode()
var encode = protocol.encode()

decode.change(function(change, cb) {
  // received a change
  cb()
})

decode.blob(function(blob, cb) {
  // received a blob stream
  blob.on('data', function(data) {
    console.log(data)
  })
  blob.on('end', function() {
    cb()
  })
})

decode.finalize(function(cb) {
  // should finalize stuff
  cb()
})

// write changes data
encode.change({
  key: 'some-row-key',
  change: 0,
  from: 0,
  to: 1,
  value: new Buffer('some binary value')
}, functoin() {
  console.log('change was flushed')
})

var blob = encode.blob(12) // 12 is the length of the blob

blob.write('hello ')
blob.write('world\n')
blob.end()

encode.finalize() // end the encode stream

// set up the pipeline
e.pipe(d)

Wire format

Basically all changes and blobs are sent as multibuffers (varint prefixed).

--------------------------------------------------
|  varint length  |  single byte id  |  payload  |
--------------------------------------------------

Since blobs can be large they are treated as streams.

License

MIT