Skip to content
This repository was archived by the owner on Feb 12, 2024. It is now read-only.
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
15 changes: 13 additions & 2 deletions src/cli/commands/daemon.js
Original file line number Diff line number Diff line change
Expand Up @@ -10,11 +10,22 @@ module.exports = {

describe: 'Start a long-running daemon process',

handler () {
builder: {
'enable-sharding-experiment': {
type: 'boolean',
defaultt: false
},
'enable-pubsub-experiment': {
type: 'booleam',
default: false
}
},

handler (argv) {
console.log('Initializing daemon...')

const repoPath = utils.getRepoPath()
httpAPI = new HttpAPI(repoPath)
httpAPI = new HttpAPI(process.env.IPFS_PATH, argv)

httpAPI.start((err) => {
if (err && err.code === 'ENOENT') {
Expand Down
16 changes: 15 additions & 1 deletion src/cli/commands/files/add.js
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@ const pull = require('pull-stream')
const paramap = require('pull-paramap')
const zip = require('pull-zip')
const toPull = require('stream-to-pull-stream')
const utils = require('../../utils')

const WRAPPER = 'wrapper/'

Expand Down Expand Up @@ -56,14 +57,27 @@ module.exports = {
alias: 'w',
type: 'boolean',
default: false
},
'enable-sharding-experiment': {
type: 'boolean',
defaultt: false
},
'shard-split-threshold': {
type: 'integer',
default: 1000
}
},

handler (argv) {
const inPath = checkPath(argv.file, argv.recursive)
const index = inPath.lastIndexOf('/') + 1
const options = {
strategy: argv.trickle ? 'trickle' : 'balanced'
strategy: argv.trickle ? 'trickle' : 'balanced',
shardSplitThreshold: argv.enableShardingExperiment ? argv.shardSplitThreshold : Infinity
}

if (argv.enableShardingExperiment && utils.isDaemonOn()) {
throw new Error('Error: Enabling the sharding experiment should be done on the daemon')
}
const ipfs = argv.ipfs

Expand Down
6 changes: 5 additions & 1 deletion src/core/components/files.js
Original file line number Diff line number Diff line change
Expand Up @@ -18,10 +18,14 @@ const Duplex = require('stream').Duplex

module.exports = function files (self) {
const createAddPullStream = (options) => {
const opts = Object.assign({}, {
shardSplitThreshold: self._options.EXPERIMENTAL.sharding ? 1000 : Infinity
}, options)

return pull(
pull.map(normalizeContent),
pull.flatten(),
importer(self._ipldResolver, options),
importer(self._ipldResolver, opts),
pull.asyncMap(prepareFile.bind(null, self))
)
}
Expand Down
3 changes: 3 additions & 0 deletions src/core/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -97,6 +97,9 @@ class IPFS extends EventEmitter {
if (this._options.EXPERIMENTAL.pubsub) {
this.log('EXPERIMENTAL pubsub is enabled')
}
if (this._options.EXPERIMENTAL.sharding) {
this.log('EXPERIMENTAL sharding is enabled')
}
this.state = require('./state')(this)

boot(this)
Expand Down
3 changes: 2 additions & 1 deletion src/http-api/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,8 @@ function HttpApi (repo, config) {
start: true,
config: config,
EXPERIMENTAL: {
pubsub: true
pubsub: true,
sharding: config && config.enableShardingExperiment
}
})
} catch (err) {
Expand Down
93 changes: 93 additions & 0 deletions test/core/files-sharding.spec.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,93 @@
/* eslint max-nested-callbacks: ["error", 8] */
/* eslint-env mocha */
'use strict'

const chai = require('chai')
const dirtyChai = require('dirty-chai')
const expect = chai.expect
chai.use(dirtyChai)
const pull = require('pull-stream')

const IPFS = require('../../src/core')
const createTempRepo = require('../utils/create-repo-node.js')

describe('files dir', () => {
const files = []
for (let i = 0; i < 1005; i++) {
files.push({
path: 'test-folder/' + i,
content: new Buffer('some content ' + i)
})
}

describe('without sharding', () => {
let ipfs

before((done) => {
ipfs = new IPFS({
repo: createTempRepo(),
config: {
Bootstrap: []
}
})
ipfs.once('start', done)
})

after((done) => {
ipfs.stop(done)
})

it('should be able to add dir without sharding', (done) => {
pull(
pull.values(files),
ipfs.files.createAddPullStream(),
pull.collect((err, results) => {
expect(err).to.not.exist()
const last = results[results.length - 1]
expect(last.path).to.be.eql('test-folder')
expect(last.hash).to.be.eql('QmWWM8ZV6GPhqJ46WtKcUaBPNHN5yQaFsKDSQ1RE73w94Q')
done()
})
)

after((done) => {
ipfs.stop(() => done()) // ignore stop errors
})
})
})

describe('with sharding', () => {
let ipfs

before((done) => {
ipfs = new IPFS({
repo: createTempRepo(),
config: {
Bootstrap: []
},
EXPERIMENTAL: {
sharding: true
}
})
ipfs.once('start', done)
})

after((done) => {
ipfs.stop(() => done()) // ignore stop errors
})

it('should be able to add dir with sharding', (done) => {
pull(
pull.values(files),
ipfs.files.createAddPullStream(),
pull.collect((err, results) => {
expect(err).to.not.exist()
const last = results[results.length - 1]
expect(last.path).to.be.eql('test-folder')
expect(last.hash).to.be.eql('QmZjYC1kWrLmiRYbEmGSo2PEpMixzT2k2xoCKSBzt8KDcy')
done()
})
)
})
})
})