Skip to content
This repository has been archived by the owner on Jan 9, 2023. It is now read-only.

Commit

Permalink
Backing out logging changes
Browse files Browse the repository at this point in the history
  • Loading branch information
stephen-palmer committed Apr 23, 2019
1 parent 442e58c commit ee0eb97
Show file tree
Hide file tree
Showing 7 changed files with 21 additions and 38 deletions.
2 changes: 1 addition & 1 deletion import.js
Original file line number Diff line number Diff line change
Expand Up @@ -74,7 +74,7 @@ async function importTransactionFile(filePath, addressString, defaultPort) {
}

try {
helpers.shouldLog(consts.LOG_DBG) && helpers.log(consts.LOG_DBG, `Begin transaction for ${helpers.GUIDBufferToString(guid)}-${hash.toString('hex')}`);
helpers.log(consts.LOG_DBG, `Begin transaction for ${helpers.GUIDBufferToString(guid)}-${hash.toString('hex')}`);
await client.beginTransaction(guid, hash);
}
catch (err) {
Expand Down
2 changes: 1 addition & 1 deletion lib/cache/cache_fs.js
Original file line number Diff line number Diff line change
Expand Up @@ -107,7 +107,7 @@ class CacheFS extends CacheBase {

const promises = transaction.files.map((file) =>
self._writeFileToCache(file.type, transaction.guid, transaction.hash, file.file)
.then(filePath => helpers.shouldLog(consts.LOG_DBG) && helpers.log(consts.LOG_DBG, `Added file to cache: ${file.size} ${filePath}`)));
.then(filePath => helpers.log(consts.LOG_TEST, `Added file to cache: ${file.size} ${filePath}`)));

return Promise.all(promises);
}
Expand Down
8 changes: 4 additions & 4 deletions lib/cache/cache_ram.js
Original file line number Diff line number Diff line change
Expand Up @@ -110,10 +110,10 @@ class CacheRAM extends CacheBase {
}

if(freeBlock.fileId) {
helpers.shouldLog(consts.LOG_DBG) && helpers.log(consts.LOG_DBG, `Allocated existing block of size ${freeBlock.size} for ${key}, last accessed ${freeBlock.lastAccessTime}`);
helpers.log(consts.LOG_DBG, `Allocated existing block of size ${freeBlock.size} for ${key}, last accessed ${freeBlock.lastAccessTime}`);
}
else {
helpers.shouldLog(consts.LOG_DBG) && helpers.log(consts.LOG_DBG, `Allocated free block of size ${freeBlock.size} for key ${key}`);
helpers.log(consts.LOG_DBG, `Allocated free block of size ${freeBlock.size} for key ${key}`);
}

// Clone the free block, then set it's file id and size
Expand Down Expand Up @@ -173,7 +173,7 @@ class CacheRAM extends CacheBase {

const entry = this._reserveBlock(key, buffer.length);

helpers.shouldLog(consts.LOG_DBG) && helpers.log(consts.LOG_DBG, `Saving file key: ${key} pageIndex: ${entry.pageIndex} pageOffset: ${entry.pageOffset} size: ${entry.size}`);
helpers.log(consts.LOG_TEST, `Saving file key: ${key} pageIndex: ${entry.pageIndex} pageOffset: ${entry.pageOffset} size: ${entry.size}`);

buffer.copy(this._pages[entry.pageIndex], entry.pageOffset, 0, buffer.length);

Expand Down Expand Up @@ -215,7 +215,7 @@ class CacheRAM extends CacheBase {

const promises = pages.map(async page => {
const file = path.join(cachePath, page.index);
helpers.shouldLog(consts.LOG_DBG) && helpers.log(consts.LOG_DBG, `Loading page file at ${file}`);
helpers.log(consts.LOG_DBG, `Loading page file at ${file}`);

const stats = await fs.stat(file);
if(stats.size !== page.size) throw new Error(`Unrecognized/invalid page file '${file}'`);
Expand Down
8 changes: 4 additions & 4 deletions lib/cache/reliability_manager.js
Original file line number Diff line number Diff line change
Expand Up @@ -67,15 +67,15 @@ class ReliabilityManager {
}

if(this._options.multiClient && params.clientId === entry.clientId) {
helpers.shouldLog(consts.LOG_DBG) && helpers.log(consts.LOG_DBG, `Ignoring duplicate transaction for GUID: ${params.guidStr} Hash: ${params.hashStr} from previous client (multiClient = true)`);
helpers.log(consts.LOG_DBG, `Ignoring duplicate transaction for GUID: ${params.guidStr} Hash: ${params.hashStr} from previous client (multiClient = true)`);
return entry;
}

entry.clientId = params.clientId;

if(entry.versionHash === params.versionHashStr) {
entry.factor += 1;
helpers.shouldLog(consts.LOG_DBG) && helpers.log(consts.LOG_DBG, `GUID: ${params.guidStr} Hash: ${params.hashStr} ReliabilityFactor: ${entry.factor}`);
helpers.log(consts.LOG_DBG, `GUID: ${params.guidStr} Hash: ${params.hashStr} ReliabilityFactor: ${entry.factor}`);
}
else {
entry.state = ReliabilityManager.reliabilityStates.Unreliable;
Expand Down Expand Up @@ -116,11 +116,11 @@ class ReliabilityManager {
if(info.state === ReliabilityManager.reliabilityStates.Unreliable && this._options.saveUnreliableVersionArtifacts) {
const unreliableFilePath = path.join(this._cachePath, kUnreliableRootDir, params.guidStr, params.hashStr);
await trx.writeFilesToPath(unreliableFilePath);
helpers.shouldLog(consts.LOG_DBG) && helpers.log(consts.LOG_DBG, `Unreliable version artifacts saved to ${unreliableFilePath}`);
helpers.log(consts.LOG_DBG, `Unreliable version artifacts saved to ${unreliableFilePath}`);
}

if(info.state !== ReliabilityManager.reliabilityStates.ReliableNew) {
helpers.shouldLog(consts.LOG_DBG) && helpers.log(consts.LOG_DBG, `Invalidating transaction from client at ${trx.clientAddress} for GUID: ${params.guidStr} Hash: ${params.hashStr} ReliabilityState: ${info.state.toString()}`);
helpers.log(consts.LOG_DBG, `Invalidating transaction from client at ${trx.clientAddress} for GUID: ${params.guidStr} Hash: ${params.hashStr} ReliabilityState: ${info.state.toString()}`);
await trx.invalidate();
}
}
Expand Down
25 changes: 4 additions & 21 deletions lib/helpers.js
Original file line number Diff line number Diff line change
Expand Up @@ -136,44 +136,27 @@ async function readDir(dir, fileCallback) {

exports.readDir = readDir;

function shouldLog(lvl) { return lvl <= logLevel; }
exports.shouldLog = shouldLog;

/**
*
* @param {Number} lvl
* @param {String} msg
*/
exports.log = exports.defaultLogger = (lvl, msg) => {
if(!shouldLog(lvl)) return;
if(lvl > logLevel) return;
console.log(msg);
};


const kMasterLogPrefix = "[Cluster:M] ";
const workerLogPrefix = {};

/**
*
* @param {Number} lvl
* @param {String} msg
*/
exports.defaultClusterLogger = (lvl, msg) => {
if (!shouldLog(lvl)) return;

if(cluster.isMaster) {
process.stdout.write(kMasterLogPrefix);
} else {
const id = cluster.worker.id;

if(!workerLogPrefix.hasOwnProperty(id)) {
workerLogPrefix[id] = `[Cluster:${id}] `;
}

process.stdout.write(workerLogPrefix[id]);
if (lvl <= logLevel) {
const prefix = cluster.isMaster ? "[Cluster:M] " : `[Cluster:${cluster.worker.id}] `;
console.log(`${prefix}${msg}`);
}

console.log(msg);
};

/**
Expand Down
10 changes: 5 additions & 5 deletions lib/server/command_processor.js
Original file line number Diff line number Diff line change
Expand Up @@ -327,7 +327,7 @@ class CommandProcessor extends Duplex {
item.exists = true;
item.size = info.size;
this._sendFileQueueCount++;
helpers.shouldLog(consts.LOG_DBG) && helpers.log(consts.LOG_DBG, `Adding file to send queue, size ${info.size}`);
helpers.log(consts.LOG_DBG, `Adding file to send queue, size ${info.size}`);
}
catch(err) {
// Ignore error
Expand All @@ -349,13 +349,13 @@ class CommandProcessor extends Duplex {
*/
async _onTransactionStart(guid, hash) {
if(this._trx !== null) {
helpers.shouldLog(consts.LOG_DBG) && helpers.log(consts.LOG_DBG, "Cancel previous transaction");
helpers.log(consts.LOG_DBG, "Cancel previous transaction");
this._trx = null;
}

this._trx = await this[kCache].createPutTransaction(guid, hash);
this._trx.clientAddress = this[kSource].clientAddress;
helpers.shouldLog(consts.LOG_DBG) && helpers.log(consts.LOG_DBG, `Start transaction for GUID: ${helpers.GUIDBufferToString(guid)} Hash: ${hash.toString('hex')}`);
helpers.log(consts.LOG_DBG, `Start transaction for GUID: ${helpers.GUIDBufferToString(guid)} Hash: ${hash.toString('hex')}`);
}

/**
Expand All @@ -370,7 +370,7 @@ class CommandProcessor extends Duplex {

await this[kCache].endPutTransaction(this._trx);
this.emit('onTransactionEnd', this._trx);
helpers.shouldLog(consts.LOG_DBG) && helpers.log(consts.LOG_DBG, `End transaction for GUID: ${helpers.GUIDBufferToString(this._trx.guid)} Hash: ${this._trx.hash.toString('hex')}`);
helpers.log(consts.LOG_DBG, `End transaction for GUID: ${helpers.GUIDBufferToString(this._trx.guid)} Hash: ${this._trx.hash.toString('hex')}`);
this._trx = null;
}

Expand All @@ -395,7 +395,7 @@ class CommandProcessor extends Duplex {
}
});

helpers.shouldLog(consts.LOG_DBG) && helpers.log(consts.LOG_DBG, `PUT rejected from non-whitelisted IP: ${this._trx.clientAddress}`);
helpers.log(consts.LOG_DBG, `PUT rejected from non-whitelisted IP: ${this._trx.clientAddress}`);
}

this._putStream.promiseWrite = promisify(this._putStream.write).bind(this._putStream);
Expand Down
4 changes: 2 additions & 2 deletions lib/unity_cache_server.js
Original file line number Diff line number Diff line change
Expand Up @@ -232,7 +232,7 @@ class UnityCacheServer {
}

cache.on('cleanup_delete_item', item => {
helpers.shouldLog(consts.LOG_DBG) && helpers.log(consts.LOG_DBG, item);
helpers.log(consts.LOG_DBG, item);
});

cache.on('cleanup_delete_finish', data => {
Expand Down Expand Up @@ -280,7 +280,7 @@ class UnityCacheServer {

class FakeSpinner {
set text(msg) {
helpers.shouldLog(consts.LOG_DBG) && helpers.log(consts.LOG_DBG, msg);
helpers.log(consts.LOG_DBG, msg);
}

start(msg) { this.text = msg; };
Expand Down

0 comments on commit ee0eb97

Please sign in to comment.