Skip to content
This repository was archived by the owner on Mar 10, 2020. It is now read-only.
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -52,6 +52,7 @@
"require-dir": "^0.3.0",
"rimraf": "^2.4.3",
"run-sequence": "^1.1.4",
"stream-equal": "^0.1.7",
"stream-http": "^2.0.2",
"uglify-js": "^2.4.24",
"vinyl-buffer": "^1.0.0",
Expand Down
27 changes: 17 additions & 10 deletions src/request-api.js
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,18 @@ const isNode = !global.window

// -- Internal

function parseChunkedJson (res, cb) {
const parsed = []
res.on('data', chunk => {
try {
parsed.push(JSON.parse(chunk))
} catch (err) {
// Browser quirks emit more than needed sometimes
}
})
res.on('end', () => cb(null, parsed))
}

function onRes (buffer, cb) {
return (err, res) => {
if (err) {
Expand All @@ -30,19 +42,14 @@ function onRes (buffer, cb) {
})
}

// console.log('stream:', stream, ' chunked:', chunkedObjects)

if (stream && !buffer) return cb(null, res)

if (chunkedObjects) {
const parsed = []
res.on('data', chunk => {
try {
parsed.push(JSON.parse(chunk))
} catch (err) {
// Browser quirks emit more than needed sometimes
}
})
res.on('end', () => cb(null, parsed))
return
if (isJson) return parseChunkedJson(res, cb)

return Wreck.read(res, null, cb)
}

Wreck.read(res, {json: isJson}, cb)
Expand Down
Binary file added test/15mb.random
Binary file not shown.
46 changes: 45 additions & 1 deletion test/tests.js
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
/* global describe it before */

const ipfsAPI = require('../src/index.js')
const streamEqual = require('stream-equal')
const assert = require('assert')
const path = require('path')
const File = require('vinyl')
Expand All @@ -12,11 +13,16 @@ const isNode = !global.window

const testfilePath = __dirname + '/testfile.txt'
let testfile
let testfileBig

if (isNode) {
testfile = require('fs').readFileSync(__dirname + '/testfile.txt')
testfileBig = require('fs').createReadStream(__dirname + '/15mb.random', { bufferSize: 128 })
// testfileBig = require('fs').createReadStream(__dirname + '/100mb.random', { bufferSize: 128 })
} else {
testfile = require('raw!./testfile.txt')
// browser goes nuts with a 100mb in memory
// testfileBig = require('raw!./100mb.random')
}

describe('IPFS Node.js API wrapper tests', () => {
Expand Down Expand Up @@ -116,12 +122,28 @@ describe('IPFS Node.js API wrapper tests', () => {
if (err) throw err

// assert.equal(res.length, 1)
const added = res[0] != null ? res[0] : res
const added = res[0] !== null ? res[0] : res
assert.equal(added.Hash, 'Qma4hjFTnCasJ8PVp3mZbZK5g2vGDT4LByLJ7m8ciyRFZP')
done()
})
})

it('add BIG buffer', function (done) {
if (!isNode) {
return done()
}
this.timeout(10000)

apiClients['a'].add(testfileBig, (err, res) => {
if (err) throw err

// assert.equal(res.length, 1)
const added = res[0] !== null ? res[0] : res
assert.equal(added.Hash, 'Qme79tX2bViL26vNjPsF3DP1R9rMKMvnPYJiKTTKPrXJjq')
done()
})
})

it('add path', function (done) {
if (!isNode) {
return done()
Expand Down Expand Up @@ -203,6 +225,28 @@ describe('IPFS Node.js API wrapper tests', () => {
})
})
})

it('cat BIG file', function (done) {
if (!isNode) {
return done()
}
this.timeout(1000000)

apiClients['a'].cat('Qme79tX2bViL26vNjPsF3DP1R9rMKMvnPYJiKTTKPrXJjq', (err, res) => {
if (err) {
throw err
}

testfileBig = require('fs').createReadStream(__dirname + '/15mb.random', { bufferSize: 128 })

// Do not blow out the memory of nodejs :)
streamEqual(res, testfileBig, (err, equal) => {
if (err) throw err
assert(equal)
done()
})
})
})
})

describe('.ls', function () {
Expand Down