Skip to content
This repository has been archived by the owner on Jan 9, 2023. It is now read-only.

Commit

Permalink
Bug fixes, tests almost all green, removed unused/old code. etc.
Browse files Browse the repository at this point in the history
  • Loading branch information
stephen-palmer committed Nov 8, 2017
1 parent cd55c19 commit 2194795
Show file tree
Hide file tree
Showing 17 changed files with 1,696 additions and 1,602 deletions.
1 change: 1 addition & 0 deletions .nvmrc
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
v8.9.1
52 changes: 0 additions & 52 deletions lib/base_protocol_transform.js

This file was deleted.

30 changes: 6 additions & 24 deletions lib/cache/cache.js
Original file line number Diff line number Diff line change
Expand Up @@ -2,34 +2,16 @@

class Cache {
constructor() {}

getFileStream(type, guid, hash, callback) {
throw new Error("Not implemented!");
}

createPutTransaction(guid, hash, callback) {
throw new Error("Not implemented!");
}

endPutTransaction(transaction, callback) {
throw new Error("Not implemented!");
}

integrityCheck(doFix, callback) {
throw new Error("Not implemented!");
}

registerClusterWorker(worker) {
throw new Error("Not implemented!");
}
}

class PutTransaction {
constructor() {}

getWriteStream(type, size, callback) {
throw new Error("Not implemented!");
constructor(guid, hash) {
this._guid = guid;
this._hash = hash;
}

get guid() { return this._guid; }
get hash() { return this._hash; }
}

module.exports = {
Expand Down
20 changes: 9 additions & 11 deletions lib/cache/cache_debug.js
Original file line number Diff line number Diff line change
Expand Up @@ -16,10 +16,10 @@ class CacheDebug extends Cache {
}

getFileStream(type, guid, hash, callback) {
var size = Math.trunc(Math.random() * this[kOptions].minFileSize + this[kOptions].maxFileSize);
var slice = this[kBuffer].slice(0, size);
const size = Math.trunc(Math.random() * this[kOptions].minFileSize + this[kOptions].maxFileSize);
const slice = this[kBuffer].slice(0, size);

var stream = new Readable({
const stream = new Readable({
read() {
this.push(slice);
this.push(null);
Expand All @@ -37,21 +37,19 @@ class CacheDebug extends Cache {
callback();
}

integrityCheck(doFix, callback) {
callback(null, 0);
}

registerClusterWorker(worker) {}
}

class PutTransactionDebug extends PutTransaction {
constructor() {
super();
constructor(guid, hash) {
super(guid, hash);
}

getWriteStream(type, size, callback) {
var stream = new Writable({
write(chunk, encoding, callback) { callback(); }
const stream = new Writable({
write(chunk, encoding, callback) {
callback();
}
});

callback(null, stream);
Expand Down
87 changes: 33 additions & 54 deletions lib/cache/cache_membuf.js
Original file line number Diff line number Diff line change
Expand Up @@ -40,18 +40,18 @@ class CacheMembuf extends Cache {
}

static _calcIndexKey(type, guid, hash) {
var h = crypto.createHash('sha256');
const h = crypto.createHash('sha256');
h.update(type);
h.update(guid);
h.update(hash);
return h.digest('hex');
}

static _findFreeBlockIndex(size) {
var best = -1;
var min = 0;
var max = CacheMembuf._freeBlocks.length - 1;
var guess;
let best = -1;
let min = 0;
let max = CacheMembuf._freeBlocks.length - 1;
let guess;

while (min <= max) {
guess = (min + max) >> 1;
Expand Down Expand Up @@ -88,9 +88,9 @@ class CacheMembuf extends Cache {
CacheMembuf._freeBlock(key);

// Find the best free block to use
var i = CacheMembuf._findFreeBlockIndex(size);
const i = CacheMembuf._findFreeBlockIndex(size);
if(i >= 0) {
var block = CacheMembuf._freeBlocks[i];
const block = CacheMembuf._freeBlocks[i];
CacheMembuf._index[key] = Object.assign({}, block);
CacheMembuf._index[key].size = size;

Expand Down Expand Up @@ -122,30 +122,30 @@ class CacheMembuf extends Cache {
}

static _addFileToCache(type, guid, hash, buffer) {
var key = CacheMembuf._calcIndexKey(type, guid, hash);
var fileSize = buffer.length;
var entry = CacheMembuf._reserveBlock(key, fileSize);
const key = CacheMembuf._calcIndexKey(type, guid, hash);
const entry = CacheMembuf._reserveBlock(key, buffer.length);

helpers.log(consts.LOG_DBG, "Saving file: pageIndex = " + entry.pageIndex + " pageOffset = " + entry.pageOffset + " size = " + entry.size);

buffer.copy(CacheMembuf._pages[entry.pageIndex], 0, entry.pageOffset, fileSize);
buffer.copy(CacheMembuf._pages[entry.pageIndex], entry.pageOffset, 0, buffer.length);
}

getFileStream(type, guid, hash, callback) {
var key = CacheMembuf._calcIndexKey(type, guid, hash);
const key = CacheMembuf._calcIndexKey(type, guid, hash);
if(CacheMembuf._index.hasOwnProperty(key)) {
var entry = CacheMembuf._index[key];
var slice = CacheMembuf._pages[entry.pageIndex].slice(entry.pageOffset, entry.pageOffset + entry.size);
var stream = new Readable({
const entry = CacheMembuf._index[key];
const slice = CacheMembuf._pages[entry.pageIndex].slice(entry.pageOffset, entry.pageOffset + entry.size);
const stream = new Readable({
read() {
this.push(slice);
this.push(null);
}
});

callback(null, {size: entry.size, stream: stream});
}
else {
callback(null, null);
callback(new Error("File not found for (" + type + ") " + guid.toString('hex') + "-" + hash.toString('hex')));
}
}

Expand All @@ -154,71 +154,50 @@ class CacheMembuf extends Cache {
}

endPutTransaction(transaction, callback) {
var files = transaction.getFiles();
const files = transaction.getFiles();
files.forEach(function(file) {
CacheMembuf._addFileToCache.call(this, file.type, transaction.guid, transaction.hash, file.buffer);
});

callback();
}


integrityCheck(doFix, callback) {
return super.integrityCheck(doFix, callback);
}

registerClusterWorker(worker) {
return super.registerClusterWorker(worker);
// Not implemented
}
}

class PutTransactionMembuf extends PutTransaction {
constructor(guid, hash) {
super();
this._buffers = {
a: null,
i: null,
r: null
};

super(guid, hash);
this._files = { a: {}, i: {}, r: {} };
this._finished = [];

this._guid = guid;
this._hash = hash;
}

getFiles() {
return this._finished;
}

get guid() {
return this._guid;
}

get hash() {
return this._hash;
}

getWriteStream(type, size, callback) {
var self = this;
const self = this;

if(type !== 'a' && type !== 'i' && type !== 'r') {
return callback(new Error("Unrecognized type '" + type + "' for transaction."));
}

this._buffers[type] = Buffer.alloc(size, 0, 'ascii');
this._bufferPos = 0;

var buffer = this._buffers[type];
this._files[type].buffer = Buffer.alloc(size, 0, 'ascii');
this._files[type].pos = 0;

var stream = new Writable({
const stream = new Writable({
write(chunk, encoding, callback) {
if(buffer.length - self._bufferPos >= chunk.length) {
chunk.copy(buffer, self._bufferPos, 0, chunk.length);
self._bufferPos += chunk.length;
const file = self._files[type];

if (file.buffer.length - file.pos >= chunk.length) {
chunk.copy(file.buffer, file.pos, 0, chunk.length);
file.pos += chunk.length;

if(self._bufferPos === size) {
self._finished.push({type: type, buffer: self._buffers[type]});
if (file.pos === size) {
self._finished.push({type: type, buffer: file.buffer});
}
}
else {
Expand Down

0 comments on commit 2194795

Please sign in to comment.