Skip to content
This repository has been archived by the owner on Oct 20, 2020. It is now read-only.

Commit

Permalink
feat(ipfsObject): increase object limit
Browse files Browse the repository at this point in the history
  • Loading branch information
kenshyx committed Sep 20, 2016
1 parent b922375 commit d2a6a82
Show file tree
Hide file tree
Showing 4 changed files with 49,431 additions and 9 deletions.
2 changes: 1 addition & 1 deletion src/IpfsApiHelper.js
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ const statics_1 = require('./statics');
const is_ipfs_1 = require('is-ipfs');
class IpfsApiHelper {
constructor(provider) {
this.OBJECT_MAX_SIZE = 512 * 1024;
this.OBJECT_MAX_SIZE = 1.5 * 1024 * 1024;
this.REQUEST_TIMEOUT = 60 * 1000;
this.LINK_SYMBOL = '/';
this.apiClient = provider;
Expand Down
2 changes: 1 addition & 1 deletion src/IpfsApiHelper.ts
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ import { Readable } from 'stream';

export class IpfsApiHelper {
public apiClient: any;
public OBJECT_MAX_SIZE = 512 * 1024; // 512kb
public OBJECT_MAX_SIZE = 1.5 * 1024 * 1024; // 1.5mb
public REQUEST_TIMEOUT = 60 * 1000; // 60s
public LINK_SYMBOL = '/';

Expand Down
17 changes: 10 additions & 7 deletions tests/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@ describe('IpfsConnector', function () {
let instance = IpfsConnector.getInstance();
let binTarget = path.join(__dirname, 'bin');
let filePath = path.join(__dirname, 'stubs', 'example.json');
let bigObjHash = '';
let rootHash = '';
const logger = {
info: function () {
Expand Down Expand Up @@ -118,29 +119,31 @@ describe('IpfsConnector', function () {
it('should split when object is too big', function (done) {
instance.api.add(bigObject)
.then(hash => {
bigObjHash = hash;
instance.api._getStats(hash).then((stats) => {
expect(stats.NumLinks).to.be.above(0);
done();
});
})
.catch(err => {
console.log(err.message);
expect(err).to.be.undefined;
done();
});
});
it('should read big file', function (done) {
instance.api
.get("QmYZ63vj8KjipwiSKGatx7g8J5sWu6FyNqSUb88MRNAS9N")
.get(bigObjHash)
.then(bigBuffer=> {
expect(bigBuffer.length).to.equal(Buffer.from(JSON.stringify(bigObject)).length);
done();
})
});
it('should construct object link from hash', function (done) {
const expected = {};
expected[instance.api.LINK_SYMBOL] = "QmYZ63vj8KjipwiSKGatx7g8J5sWu6FyNqSUb88MRNAS9N";
expected[instance.api.LINK_SYMBOL] = bigObjHash;
instance.api
.constructObjLink("QmYZ63vj8KjipwiSKGatx7g8J5sWu6FyNqSUb88MRNAS9N")
.constructObjLink(bigObjHash)
.then((result)=> {
expect(result).to.deep.equal(expected);
done();
Expand All @@ -167,10 +170,10 @@ describe('IpfsConnector', function () {
a: 1,
b: 2
};
const inputLink = { c: '', d: '', e: 'QmYZ63vj8KjipwiSKGatx7g8J5sWu6FyNqSUb88MRNAS9N' };
const inputLink = { c: '', d: '', e: bigObjHash };
const subLevels = [{ c1: 5, c2: 6 }, {
d1: 'sdasdsadsad',
d2: 'QmYZ63vj8KjipwiSKGatx7g8J5sWu6FyNqSUb88MRNAS9N'
d2: bigObjHash
}];
let pool = subLevels.map(
(plainObj) => {
Expand All @@ -181,8 +184,8 @@ describe('IpfsConnector', function () {
a: 1,
b: 2,
c: { '/': 'QmTCMGWApewThNp64JBg9yzhiZGKKDHigS2Y45Tyg1HG8r' },
d: { '/': 'QmV3SDTMn98nvzPTWmgzGDxc8AcYsrKRQ2zG5Ck6cuC2QY' },
e: 'QmYZ63vj8KjipwiSKGatx7g8J5sWu6FyNqSUb88MRNAS9N'
d: { '/': 'QmQZe3rajd2VVF4vX8oaZCZBw1YLhH916L1xNjGVd9B8E4' },
e: bigObjHash
};
const runChecks = (hash) => {
const steps = [];
Expand Down

0 comments on commit d2a6a82

Please sign in to comment.