From cd69c93b1764a46cf0b17ff050f8896c0c217850 Mon Sep 17 00:00:00 2001 From: Marcus Cavanaugh Date: Tue, 31 Mar 2015 13:53:20 -0700 Subject: [PATCH] Bug 976850 - [email/backend] Refactor GELAM tests. r=asuth --- apps/email/js/ext/accountcommon.js | 94 +- apps/email/js/ext/accountmixins.js | 26 +- apps/email/js/ext/activesync/account.js | 100 +- apps/email/js/ext/activesync/configurator.js | 47 +- apps/email/js/ext/activesync/folder.js | 71 +- apps/email/js/ext/activesync/jobs.js | 30 +- apps/email/js/ext/composite/account.js | 23 +- apps/email/js/ext/composite/configurator.js | 6 +- apps/email/js/ext/composite/incoming.js | 72 +- apps/email/js/ext/cronsync.js | 92 +- apps/email/js/ext/disaster-recovery.js | 10 +- apps/email/js/ext/errbackoff.js | 37 +- .../js/ext/ext/activesync-lib/protocol.js | 2 +- apps/email/js/ext/ext/axeshim-browserbox.js | 27 +- apps/email/js/ext/ext/axeshim-smtpclient.js | 17 +- apps/email/js/ext/ext/browserbox.js | 2 +- apps/email/js/ext/ext/co.js | 236 ++ apps/email/js/ext/ext/equal.js | 120 ++ apps/email/js/ext/ext/mailbuild.js | 2 +- apps/email/js/ext/ext/mimefuncs.js | 2 +- apps/email/js/ext/ext/rdcommon/deferred.js | 68 - apps/email/js/ext/ext/rdcommon/extransform.js | 213 -- apps/email/js/ext/ext/rdcommon/log.js | 1911 ----------------- apps/email/js/ext/ext/rdcommon/logreaper.js | 169 -- apps/email/js/ext/ext/rdcommon/microtime.js | 16 - apps/email/js/ext/ext/rdcommon/testcontext.js | 741 ------- apps/email/js/ext/ext/rdcommon/testdriver.js | 779 ------- apps/email/js/ext/imap/account.js | 83 +- apps/email/js/ext/imap/client.js | 12 +- apps/email/js/ext/imap/folder.js | 103 +- apps/email/js/ext/imap/imapchew.js | 5 +- apps/email/js/ext/imap/jobs.js | 45 +- apps/email/js/ext/imap/probe.js | 17 +- apps/email/js/ext/jobmixins.js | 21 +- apps/email/js/ext/logic.js | 895 ++++++++ apps/email/js/ext/mailapi.js | 4 + apps/email/js/ext/mailbridge.js | 50 +- apps/email/js/ext/mailchew.js | 13 +- apps/email/js/ext/mailslice.js | 216 +- apps/email/js/ext/mailuniverse.js | 286 ++- apps/email/js/ext/oauth.js | 24 +- apps/email/js/ext/pop3/account.js | 33 +- apps/email/js/ext/pop3/jobs.js | 32 +- apps/email/js/ext/pop3/pop3.js | 31 +- apps/email/js/ext/pop3/probe.js | 25 +- apps/email/js/ext/pop3/sync.js | 61 +- apps/email/js/ext/searchfilter.js | 42 +- apps/email/js/ext/slog.js | 264 --- apps/email/js/ext/smtp/account.js | 30 +- apps/email/js/ext/smtp/client.js | 12 +- apps/email/js/ext/smtp/probe.js | 18 +- apps/email/js/ext/worker-config.js | 3 +- apps/email/js/ext/worker-setup.js | 4 +- 53 files changed, 2047 insertions(+), 5195 deletions(-) create mode 100644 apps/email/js/ext/ext/co.js create mode 100644 apps/email/js/ext/ext/equal.js delete mode 100644 apps/email/js/ext/ext/rdcommon/deferred.js delete mode 100644 apps/email/js/ext/ext/rdcommon/extransform.js delete mode 100644 apps/email/js/ext/ext/rdcommon/log.js delete mode 100644 apps/email/js/ext/ext/rdcommon/logreaper.js delete mode 100644 apps/email/js/ext/ext/rdcommon/microtime.js delete mode 100644 apps/email/js/ext/ext/rdcommon/testcontext.js delete mode 100644 apps/email/js/ext/ext/rdcommon/testdriver.js create mode 100644 apps/email/js/ext/logic.js delete mode 100644 apps/email/js/ext/slog.js diff --git a/apps/email/js/ext/accountcommon.js b/apps/email/js/ext/accountcommon.js index 1f2cb842730a..9d7367d159ad 100644 --- a/apps/email/js/ext/accountcommon.js +++ b/apps/email/js/ext/accountcommon.js @@ -5,7 +5,7 @@ define( [ './a64', - './slog', + 'logic', './allback', 'require', 'module', @@ -13,7 +13,7 @@ define( ], function( $a64, - slog, + logic, allback, require, $module, @@ -309,9 +309,9 @@ exports.fillConfigPlaceholders = fillConfigPlaceholders; * }, * } */ -function Autoconfigurator(_LOG) { - this._LOG = _LOG; +function Autoconfigurator() { this.timeout = AUTOCONFIG_TIMEOUT_MS; + logic.defineScope(this, 'Autoconfigurator'); } exports.Autoconfigurator = Autoconfigurator; Autoconfigurator.prototype = { @@ -352,14 +352,15 @@ Autoconfigurator.prototype = { */ _getXmlConfig: function getXmlConfig(url) { return new Promise(function(resolve, reject) { - slog.log('autoconfig.xhr:start', { method: 'GET', url: url }); + + var scope = logic.subscope(this, { method: 'GET', url: url }); + logic(scope, 'xhr:start'); var xhr = new XMLHttpRequest({mozSystem: true}); xhr.open('GET', url, true); xhr.timeout = this.timeout; xhr.onload = function() { - slog.log('autoconfig.xhr:end', { method: 'GET', url: url, - status: xhr.status }); + logic(scope, 'xhr:end', { status: xhr.status }); if (xhr.status < 200 || xhr.status >= 300) { reject('status' + xhr.status); return; @@ -391,14 +392,12 @@ Autoconfigurator.prototype = { // to only assign that once until is fixed. xhr.ontimeout = function() { - slog.log('autoconfig.xhr:end', { method: 'GET', url: url, - status: 'timeout' }); + logic(scope, 'xhr:end', { status: 'timeout' }); reject('timeout'); }; xhr.onerror = function() { - slog.log('autoconfig.xhr:end', { method: 'GET', url: url, - status: 'error' }); + logic(scope, 'xhr:end', { status: 'error' }); reject('error'); }; @@ -409,8 +408,7 @@ Autoconfigurator.prototype = { xhr.send(); } catch(e) { - slog.log('autoconfig.xhr:end', { method: 'GET', url: url, - status: 'sync-error' }); + logic(scope, 'xhr:end', { status: 'sync-error' }); reject('status404'); } }.bind(this)); @@ -436,8 +434,8 @@ Autoconfigurator.prototype = { */ _checkAutodiscoverUrl: function(url) { return new Promise(function(resolve, reject) { - slog.log('autoconfig.autodiscoverProbe:start', - { method: 'POST', url: url }); + var scope = logic.subscope(this, { method: 'POST', url: url }); + logic(scope, 'autodiscoverProbe:start'); var xhr = new XMLHttpRequest({mozSystem: true}); xhr.open('POST', url, true); xhr.timeout = this.timeout; @@ -452,8 +450,7 @@ Autoconfigurator.prototype = { }.bind(this); xhr.onload = function() { - slog.log('autoconfig.autodiscoverProbe:end', - { method: 'POST', url: url, status: xhr.status }); + logic(scope, 'autodiscoverProbe:end', { status: xhr.status }); if (xhr.status === 401) { victory(); return; @@ -462,14 +459,12 @@ Autoconfigurator.prototype = { }; xhr.ontimeout = function() { - slog.log('autoconfig.autodiscoverProbe:end', - { method: 'POST', url: url, status: 'timeout' }); + logic(scope, 'autodiscoverProbe:end', { status: 'timeout' }); reject('timeout'); }; xhr.onerror = function() { - slog.log('autoconfig.autodiscoverProbe:end', - { method: 'POST', url: url, status: 'error' }); + logic(scope, 'autodiscoverProbe:end', { status: 'error' }); reject('error'); }; @@ -477,8 +472,7 @@ Autoconfigurator.prototype = { xhr.send(null); } catch(e) { - slog.log('autoconfig.autodiscoverProbe:end', - { method: 'POST', url: url, status: 'sync-error' }); + logic(scope, 'autodiscoverProbe:end', { status: 'sync-error' }); reject('status404'); } }.bind(this)); @@ -536,7 +530,9 @@ Autoconfigurator.prototype = { */ _getMX: function getMX(domain) { return new Promise(function(resolve, reject) { - slog.log('autoconfig.mxLookup:begin', { domain: domain }); + + var scope = logic.subscope(this, { domain: domain }); + logic(scope, 'mxLookup:begin'); var xhr = new XMLHttpRequest({mozSystem: true}); xhr.open('GET', 'https://live.mozillamessaging.com/dns/mx/' + encodeURIComponent(domain), true); @@ -558,20 +554,17 @@ Autoconfigurator.prototype = { } } } - slog.log('autoconfig.mxLookup:end', - { domain: domain, 'raw': normStr, normalized: mxDomain, - reporting: reportDomain }); + logic(scope, 'mxLookup:end', + { 'raw': normStr, normalized: mxDomain, reporting: reportDomain }); resolve(reportDomain); }; xhr.ontimeout = function() { - slog.log('autoconfig.mxLookup:end', - { domain: domain, status: 'timeout' }); + logic(scope, 'mxLookup:end', { status: 'timeout' }); reject('timeout'); }; xhr.onerror = function() { - slog.log('autoconfig.mxLookup:end', - { domain: domain, status: 'error' }); + logic(scope, 'mxLookup:end', { status: 'error' }); reject('error'); }; @@ -683,7 +676,8 @@ Autoconfigurator.prototype = { var emailParts = emailAddress.split('@'); var emailLocalPart = emailParts[0], emailDomainPart = emailParts[1]; var domain = emailDomainPart.toLowerCase(); - slog.log('autoconfig:begin', { domain: domain }); + var scope = logic.subscope(this, { domain: domain }); + logic(scope, 'autoconfig:begin'); // Call this when we find a usable config setting to perform appropriate // normalization, logging, and promise resolution. @@ -700,31 +694,25 @@ Autoconfigurator.prototype = { } else { result = 'no-config-info'; } - slog.log( - 'autoconfig:end', - { - domain: domain, result: result, source: source, - configInfo: configInfo - }); + logic(scope, 'autoconfig:end', { + result: result, + source: source, + configInfo: configInfo + }); resolve({ result: result, source: source, configInfo: configInfo }); }.bind(this); // Call this if we can't find a configuration. var failsafeFailure = function(err) { - slog.error( - 'autoconfig:end', - { - domain: domain, - err: { - message: err && err.message, - stack: err && err.stack - } - }); + logic(this, 'autoconfig:end', { err: { + message: err && err.message, + stack: err && err.stack + }}); resolve({ result: 'no-config-info', configInfo: null }); }.bind(this); // Helper that turns a rejection into a null and outputs a log entry. var coerceRejectionToNull = function(err) { - slog.log('autoconfig:coerceRejection', { err: err }); + logic(scope, 'autoconfig:coerceRejection', { err: err }); return null; }.bind(this); @@ -834,12 +822,12 @@ Autoconfigurator.prototype = { var config = results.configInfo; requireConfigurator(config.type, function (mod) { mod.configurator.tryToCreateAccount(universe, userDetails, config, - callback, this._LOG); + callback); }); return; } - slog.warn('autoconfig.legacyCreateFail', { result: results.result }); + logic(this, 'legacyCreateFail', { result: results.result }); // need-oauth2 is not supported via this code-path; coerce to a config // failure... callback('no-config-info'); @@ -867,11 +855,11 @@ function recreateAccount(universe, oldVersion, accountInfo, callback) { } exports.recreateAccount = recreateAccount; -function tryToManuallyCreateAccount(universe, userDetails, domainInfo, callback, - _LOG) { +function tryToManuallyCreateAccount(universe, userDetails, domainInfo, + callback) { requireConfigurator(domainInfo.type, function (mod) { mod.configurator.tryToCreateAccount(universe, userDetails, domainInfo, - callback, _LOG); + callback); }); } exports.tryToManuallyCreateAccount = tryToManuallyCreateAccount; diff --git a/apps/email/js/ext/accountmixins.js b/apps/email/js/ext/accountmixins.js index 205c479328f0..dbea76f42608 100644 --- a/apps/email/js/ext/accountmixins.js +++ b/apps/email/js/ext/accountmixins.js @@ -1,6 +1,7 @@ define(function(require, exports) { var DisasterRecovery = require('./disaster-recovery'); +var logic = require('logic'); /** * The no-op operation for job operations that are not implemented. @@ -106,15 +107,24 @@ exports.runOp = function runOp(op, mode, callback) { }.bind(this); DisasterRecovery.setCurrentAccountOp(this, op, jobCompletedCallback); - this._LOG.runOp_begin(mode, op.type, null, op); - // _LOG supports wrapping calls, but we want to be able to strip out all - // logging, and that wouldn't work. + + // Legacy tests: + logic(this, 'runOp_begin', { mode: mode, type: op.type, op: op }); + // New-style tests: + Object.defineProperty(op, '_logicAsyncEvent', { + configurable: true, + enumerable: false, // So that we don't try to JSONify it. + value: logic.startAsync(this, 'runOp', { + mode: mode, type: op.type, op: op + }) + }); + try { method.call(this._jobDriver, op, jobCompletedCallback); } catch (ex) { DisasterRecovery.clearCurrentAccountOp(this); - this._LOG.opError(mode, op.type, ex); + logic(this, 'opError', { mode: mode, type: op.type, ex: ex }); } }; @@ -218,11 +228,12 @@ exports.normalizeFolderHierarchy = function() { */ exports.saveAccountState = function(reuseTrans, callback, reason) { if (!this._alive) { - this._LOG.accountDeleted('saveAccountState'); + logic(this, 'accountDeleted', { reason: 'saveAccountState' }); return null; } - this._LOG.saveAccountState_begin(reason, null); + logic(this, 'saveAccountState_begin', { reason: reason, + folderSaveCount: null }); // Indicate save is active, in case something, like // signaling the end of a sync, needs to run after @@ -249,7 +260,8 @@ exports.saveAccountState = function(reuseTrans, callback, reason) { this._deadFolderIds, function stateSaved() { this._saveAccountStateActive = false; - this._LOG.saveAccountState_end(reason, folderSaveCount); + logic(this, 'saveAccountState_end', { reason: reason, + folderSaveCount: folderSaveCount }); // NB: we used to log when the save completed, but it ended up being // annoying to the unit tests since we don't block our actions on diff --git a/apps/email/js/ext/activesync/account.js b/apps/email/js/ext/activesync/account.js index dae61986607a..911a1020de24 100644 --- a/apps/email/js/ext/activesync/account.js +++ b/apps/email/js/ext/activesync/account.js @@ -4,7 +4,7 @@ define( [ - 'rdcommon/log', + 'logic', '../a64', '../accountmixins', '../mailslice', @@ -21,7 +21,7 @@ define( 'exports' ], function( - $log, + logic, $a64, $acctmixins, $mailslice, @@ -77,7 +77,7 @@ function lazyConnection(cbIndex, fn, failString) { } function ActiveSyncAccount(universe, accountDef, folderInfos, dbConn, - receiveProtoConn, _parentLog) { + receiveProtoConn) { this.universe = universe; this.id = accountDef.id; this.accountDef = accountDef; @@ -95,7 +95,8 @@ function ActiveSyncAccount(universe, accountDef, folderInfos, dbConn, this._db = dbConn; - this._LOG = LOGFAB.ActiveSyncAccount(this, _parentLog, this.id); + logic.defineScope(this, 'Account', { accountId: this.id, + accountType: 'activesync' }); if (receiveProtoConn) { this.conn = receiveProtoConn; @@ -132,7 +133,7 @@ function ActiveSyncAccount(universe, accountDef, folderInfos, dbConn, this._folderStorages[folderId] = new $mailslice.FolderStorage(this, folderId, folderInfo, this._db, - $asfolder.ActiveSyncFolderSyncer, this._LOG); + $asfolder.ActiveSyncFolderSyncer); this._serverIdToFolderId[folderInfo.$meta.serverId] = folderId; this.folders.push(folderInfo.$meta); } @@ -235,15 +236,12 @@ ActiveSyncAccount.prototype = { _attachLoggerToConnection: function(conn) { - // Use a somewhat unique-ish value for the id so that if we re-create the - // connection it's obvious it's different from the previous connection. - var logger = LOGFAB.ActiveSyncConnection(conn, this._LOG, - Date.now() % 1000); - if (logger.logLevel === 'safe') { - conn.onmessage = this._onmessage_safe.bind(this, logger); - } - else if (logger.logLevel === 'dangerous') { - conn.onmessage = this._onmessage_dangerous.bind(this, logger); + logic.defineScope(conn, 'ActiveSyncConnection', + { connectionId: logic.uniqueId() }); + if (!logic.isCensored) { + conn.onmessage = this._onmessage_dangerous.bind(this, conn); + } else { + conn.onmessage = this._onmessage_safe.bind(this, conn); } }, @@ -251,13 +249,17 @@ ActiveSyncAccount.prototype = { * Basic onmessage ActiveSync protocol logging function. This does not * include user data and is intended for safe circular logging purposes. */ - _onmessage_safe: function onmessage(logger, + _onmessage_safe: function onmessage(conn, type, special, xhr, params, extraHeaders, sentData, response) { if (type === 'options') { - logger.options(special, xhr.status, response); + logic(conn, 'options', { special: special, + status: xhr.status, + response: response }); } else { - logger.command(type, special, xhr.status); + logic(conn, 'command', { type: type, + special: special, + status: xhr.status }); } }, @@ -266,10 +268,12 @@ ActiveSyncAccount.prototype = { * intended to log user data for unit testing purposes or very specialized * debugging only. */ - _onmessage_dangerous: function onmessage(logger, + _onmessage_dangerous: function onmessage(conn, type, special, xhr, params, extraHeaders, sentData, response) { if (type === 'options') { - logger.options(special, xhr.status, response); + logic(conn, 'options', { special: special, + status: xhr.status, + response: response }); } else { var sentXML, receivedXML; @@ -291,8 +295,14 @@ ActiveSyncAccount.prototype = { receivedXML = 'parse problem'; } } - logger.command(type, special, xhr.status, params, extraHeaders, sentXML, - receivedXML); + + logic(conn, 'command', { type: type, + special: special, + status: xhr.status, + params: params, + extraHeaders: extraHeaders, + sentXML: sentXML, + receivedXML: receivedXML }); } }, @@ -368,7 +378,6 @@ ActiveSyncAccount.prototype = { }, shutdown: function asa_shutdown(callback) { - this._LOG.__die(); if (callback) callback(); }, @@ -381,7 +390,7 @@ ActiveSyncAccount.prototype = { sliceFolderMessages: function asa_sliceFolderMessages(folderId, bridgeHandle) { var storage = this._folderStorages[folderId], - slice = new $mailslice.MailSlice(bridgeHandle, storage, this._LOG); + slice = new $mailslice.MailSlice(bridgeHandle, storage); storage.sliceOpenMostRecent(slice); }, @@ -389,7 +398,7 @@ ActiveSyncAccount.prototype = { searchFolderMessages: function(folderId, bridgeHandle, phrase, whatToSearch) { var storage = this._folderStorages[folderId], slice = new $searchfilter.SearchSlice(bridgeHandle, storage, phrase, - whatToSearch, this._LOG); + whatToSearch); storage.sliceOpenSearch(slice); return slice; }, @@ -602,7 +611,7 @@ ActiveSyncAccount.prototype = { console.log('Added folder ' + displayName + ' (' + folderId + ')'); this._folderStorages[folderId] = new $mailslice.FolderStorage(this, folderId, folderInfo, this._db, - $asfolder.ActiveSyncFolderSyncer, this._LOG); + $asfolder.ActiveSyncFolderSyncer); this._serverIdToFolderId[serverId] = folderId; var folderMeta = folderInfo.$meta; @@ -658,7 +667,7 @@ ActiveSyncAccount.prototype = { * complete, taking the new folder storage */ _recreateFolder: function asa__recreateFolder(folderId, callback) { - this._LOG.recreateFolder(folderId); + logic(this, 'recreateFolder', { folderId: folderId }); var folderInfo = this._folderInfos[folderId]; folderInfo.$impl = { nextId: 0, @@ -678,8 +687,7 @@ ActiveSyncAccount.prototype = { this.saveAccountState(null, function() { var newStorage = new $mailslice.FolderStorage(self, folderId, folderInfo, self._db, - $asfolder.ActiveSyncFolderSyncer, - self._LOG); + $asfolder.ActiveSyncFolderSyncer); for (var iter in Iterator(self._folderStorages[folderId]._slices)) { var slice = iter[1]; slice._storage = newStorage; @@ -1021,40 +1029,4 @@ ActiveSyncAccount.prototype = { } }; -var LOGFAB = exports.LOGFAB = $log.register($module, { - ActiveSyncAccount: { - type: $log.ACCOUNT, - events: { - createFolder: {}, - deleteFolder: {}, - recreateFolder: { id: false }, - /** - * XXX: this is really an error/warning, but to make the logging less - * confusing, treat it as an event. - */ - accountDeleted: { where: false }, - }, - asyncJobs: { - runOp: { mode: true, type: true, error: true, op: false }, - saveAccountState: { reason: true, folderSaveCount: true }, - }, - errors: { - opError: { mode: false, type: false, ex: $log.EXCEPTION }, - } - }, - - ActiveSyncConnection: { - type: $log.CONNECTION, - events: { - options: { special: false, status: false, result: false }, - command: { name: false, special: false, status: false }, - }, - TEST_ONLY_events: { - options: {}, - command: { params: false, extraHeaders: false, sent: false, - response: false }, - }, - }, -}); - }); // end define diff --git a/apps/email/js/ext/activesync/configurator.js b/apps/email/js/ext/activesync/configurator.js index 91ae94844c6d..df44b2c679f8 100644 --- a/apps/email/js/ext/activesync/configurator.js +++ b/apps/email/js/ext/activesync/configurator.js @@ -4,8 +4,7 @@ define( [ - 'rdcommon/log', - 'slog', + 'logic', '../accountcommon', '../a64', './account', @@ -15,8 +14,7 @@ define( 'exports' ], function( - $log, - slog, + logic, $accountcommon, $a64, $asacct, @@ -72,12 +70,14 @@ function checkServerCertificate(url, callback) { }; } +var scope = logic.scope('ActivesyncConfigurator'); + exports.account = $asacct; exports.configurator = { timeout: 30 * 1000, _getFullDetailsFromAutodiscover: function($asproto, userDetails, url, callback) { - slog.log('activesync.configurator.autodiscover:begin', { url: url }); + logic(scope, 'autodiscover:begin', { url: url }); $asproto.raw_autodiscover( url, userDetails.emailAddress, userDetails.password, self.timeout, /* redirects are okay */ false, @@ -95,16 +95,14 @@ exports.configurator = { failureDetails.status = error.status; } else if (error instanceof $asproto.AutodiscoverDomainError) { - slog.log('activesync.configurator.autodiscover.error', - { message: error.message }); + logic(scope, 'autodiscover.error', { message: error.message }); } - slog.log('activesync.configurator.autodiscover:end', - { url: url, err: failureType }); + logic(scope, 'autodiscover:end', { url: url, err: failureType }); callback(failureType, null, failureDetails); return; } - slog.log('activesync.configurator.autodiscover:end', - { url: url, server: config.mobileSyncServer.url }); + logic(scope, 'autodiscover:end', + { url: url, server: config.mobileSyncServer.url }); var autoconfig = { type: 'activesync', @@ -130,8 +128,7 @@ exports.configurator = { * conceivable that in the future the manual config mode could use this * path. */ - tryToCreateAccount: function(universe, userDetails, domainInfo, callback, - _LOG) { + tryToCreateAccount: function(universe, userDetails, domainInfo, callback) { require(['activesync/protocol'], function ($asproto) { if (domainInfo.incoming.autodiscoverEndpoint) { this._getFullDetailsFromAutodiscover( @@ -145,21 +142,19 @@ exports.configurator = { // Otherwise we have a config and should continue the creation // process. this._createAccountUsingFullInfo( - universe, userDetails, fullConfigInfo, callback, $asproto, - _LOG); + universe, userDetails, fullConfigInfo, callback, $asproto); }.bind(this)); return; } // We should have full config info then. Just call direct in. this._createAccountUsingFullInfo(universe, userDetails, domainInfo, - callback, $asproto, _LOG); + callback, $asproto); }.bind(this)); }, _createAccountUsingFullInfo: function(universe, userDetails, domainInfo, - callback, $asproto, _LOG) { - slog.log('activesync.configurator.create:begin', - { server: domainInfo.incoming.server }); + callback, $asproto) { + logic(scope, 'create:begin', { server: domainInfo.incoming.server }); var credentials = { username: domainInfo.incoming.username, password: userDetails.password, @@ -212,8 +207,11 @@ exports.configurator = { }); return; } - slog.log('activesync.configurator.create:end', - { server: domainInfo.incoming.server, err: failureType }); + logic(scope, 'create:end', { + server: domainInfo.incoming.server, + err: failureType + }); + callback(failureType, null, failureDetails); return; } @@ -251,8 +249,11 @@ exports.configurator = { ] }; - slog.log('activesync.configurator.create:end', - { server: domainInfo.incoming.server, id: accountId }); + logic(scope, 'create:end', { + server: domainInfo.incoming.server, + id: accountId + }); + self._loadAccount(universe, accountDef, conn, function (account) { callback(null, account, null); }); diff --git a/apps/email/js/ext/activesync/folder.js b/apps/email/js/ext/activesync/folder.js index 9f04fe9083a2..54550641b612 100644 --- a/apps/email/js/ext/activesync/folder.js +++ b/apps/email/js/ext/activesync/folder.js @@ -1,6 +1,6 @@ define( [ - 'rdcommon/log', + 'logic', '../date', '../syncbase', '../allback', @@ -17,7 +17,7 @@ define( 'exports' ], function( - $log, + logic, $date, $sync, allback, @@ -110,10 +110,12 @@ function lazyConnection(cbIndex, fn, failString) { } -function ActiveSyncFolderConn(account, storage, _parentLog) { +function ActiveSyncFolderConn(account, storage) { this._account = account; this._storage = storage; - this._LOG = LOGFAB.ActiveSyncFolderConn(this, _parentLog, storage.folderId); + logic.defineScope(this, 'ActiveSyncFolderConn', + { folderId: storage.folderId, + accountId: account.id }); this.folderMeta = storage.folderMeta; @@ -364,7 +366,7 @@ ActiveSyncFolderConn.prototype = { else { filterType = Type.NoFilter; } - folderConn._LOG.inferFilterType(filterType); + logic(folderConn, 'inferFilterType', { filterType: filterType }); callback(null, filterType); }); return; @@ -375,7 +377,7 @@ ActiveSyncFolderConn.prototype = { // round-trip where we'd normally get a zero syncKey from the server. folderConn.syncKey = '0'; } - folderConn._LOG.inferFilterType(filterType); + logic(folderConn, 'inferFilterType', { filterType: filterType }); callback(null, filterType); }); }), @@ -1067,7 +1069,7 @@ ActiveSyncFolderConn.prototype = { var type = snippetOnly ? 'plain' : bodyRep.type; var data = $mailchew.processMessageContent(bodyContent, type, !snippetOnly, - true, this._LOG); + true); header.snippet = data.snippet; bodyRep.isDownloaded = !snippetOnly; @@ -1096,7 +1098,7 @@ ActiveSyncFolderConn.prototype = { changedMessages = 0, deletedMessages = 0; - this._LOG.sync_begin(null, null, null); + logic(this, 'sync_begin'); var self = this; this._enumerateFolderChanges(function (error, added, changed, deleted, moreAvailable) { @@ -1107,13 +1109,17 @@ ActiveSyncFolderConn.prototype = { // If we got a bad sync key, we'll end up creating a new connection, // so just clear out the old storage to make this connection unusable. folderConn._storage = null; - folderConn._LOG.sync_end(null, null, null); + logic(folderConn, 'sync_end', { + full: null, changed: null, deleted: null + }); }); return; } else if (error) { // Sync is over! - folderConn._LOG.sync_end(null, null, null); + logic(folderConn, 'sync_end', { + full: null, changed: null, deleted: null + }); doneCallback(error); return; } @@ -1180,8 +1186,11 @@ ActiveSyncFolderConn.prototype = { // Note: For the second argument here, we report the number of // messages we saw that *changed*. This differs from IMAP, which // reports the number of messages it *saw*. - folderConn._LOG.sync_end(addedMessages, changedMessages, - deletedMessages); + logic(folderConn, 'sync_end', { + full: addedMessages, + changed: changedMessages, + deleted: deletedMessages + }); storage.markSyncRange($sync.OLDEST_SYNC_DATE, accuracyStamp, 'XXX', accuracyStamp); doneCallback(null, null, messagesSeen); @@ -1368,14 +1377,15 @@ ActiveSyncFolderConn.prototype = { }), }; -function ActiveSyncFolderSyncer(account, folderStorage, _parentLog) { +function ActiveSyncFolderSyncer(account, folderStorage) { this._account = account; this.folderStorage = folderStorage; - this._LOG = LOGFAB.ActiveSyncFolderSyncer(this, _parentLog, - folderStorage.folderId); + logic.defineScope(this, 'ActiveSyncFolderSyncer', + { accountId: account.id, + folderId: folderStorage.folderId }); - this.folderConn = new ActiveSyncFolderConn(account, folderStorage, this._LOG); + this.folderConn = new ActiveSyncFolderConn(account, folderStorage); } exports.ActiveSyncFolderSyncer = ActiveSyncFolderSyncer; ActiveSyncFolderSyncer.prototype = { @@ -1472,34 +1482,7 @@ ActiveSyncFolderSyncer.prototype = { shutdown: function() { this.folderConn.shutdown(); - this._LOG.__die(); - }, + } }; -var LOGFAB = exports.LOGFAB = $log.register($module, { - ActiveSyncFolderConn: { - type: $log.CONNECTION, - subtype: $log.CLIENT, - events: { - inferFilterType: { filterType: false }, - }, - asyncJobs: { - sync: { - newMessages: true, changedMessages: true, deletedMessages: true, - }, - }, - errors: { - htmlParseError: { ex: $log.EXCEPTION }, - htmlSnippetError: { ex: $log.EXCEPTION }, - textChewError: { ex: $log.EXCEPTION }, - textSnippetError: { ex: $log.EXCEPTION }, - }, - }, - ActiveSyncFolderSyncer: { - type: $log.DATABASE, - events: { - } - }, -}); - }); // end define diff --git a/apps/email/js/ext/activesync/jobs.js b/apps/email/js/ext/activesync/jobs.js index 57bda3b0a3fc..628ee0a69898 100644 --- a/apps/email/js/ext/activesync/jobs.js +++ b/apps/email/js/ext/activesync/jobs.js @@ -1,6 +1,6 @@ define( [ - 'rdcommon/log', + 'logic', 'mix', '../jobmixins', '../drafts/jobs', @@ -12,7 +12,7 @@ define( 'exports' ], function( - $log, + logic, mix, $jobmixins, draftsJobs, @@ -45,14 +45,15 @@ function lazyConnection(cbIndex, fn, failString) { }; } -function ActiveSyncJobDriver(account, state, _parentLog) { +function ActiveSyncJobDriver(account, state) { this.account = account; // XXX for simplicity for now, let's assume that ActiveSync GUID's are // maintained across folder moves. this.resilientServerIds = true; this._heldMutexReleasers = []; - this._LOG = LOGFAB.ActiveSyncJobDriver(this, _parentLog, this.account.id); + logic.defineScope(this, 'ActiveSyncJobDriver', + { accountId: this.account.id }); this._state = state; // (we only need to use one as a proxy for initialization) @@ -89,7 +90,7 @@ ActiveSyncJobDriver.prototype = { callback(syncer.folderConn, storage); } catch (ex) { - self._LOG.callbackErr(ex); + logic(self, 'callbackErr', { ex: ex }); } }); } else { @@ -97,7 +98,7 @@ ActiveSyncJobDriver.prototype = { callback(syncer.folderConn, storage); } catch (ex) { - self._LOG.callbackErr(ex); + logic(self, 'callbackErr', { ex: ex }); } } }); @@ -468,21 +469,4 @@ ActiveSyncJobDriver.prototype = { mix(ActiveSyncJobDriver.prototype, draftsJobs.draftsMixins); -var LOGFAB = exports.LOGFAB = $log.register($module, { - ActiveSyncJobDriver: { - type: $log.DAEMON, - events: { - savedAttachment: { storage: true, mimeType: true, size: true }, - saveFailure: { storage: false, mimeType: false, error: false }, - }, - TEST_ONLY_events: { - saveFailure: { filename: false }, - }, - errors: { - callbackErr: { ex: $log.EXCEPTION }, - }, - - }, -}); - }); // end define diff --git a/apps/email/js/ext/composite/account.js b/apps/email/js/ext/composite/account.js index 536946a72e27..1a3de8ab9f55 100644 --- a/apps/email/js/ext/composite/account.js +++ b/apps/email/js/ext/composite/account.js @@ -4,7 +4,7 @@ define( [ - 'rdcommon/log', + 'logic', '../accountcommon', '../a64', '../accountmixins', @@ -15,7 +15,7 @@ define( 'exports' ], function( - $log, + logic, $accountcommon, $a64, $acctmixins, @@ -39,11 +39,11 @@ var PIECE_ACCOUNT_TYPE_TO_CLASS = { * fact that IMAP and SMTP are not actually bundled tightly together. */ function CompositeAccount(universe, accountDef, folderInfo, dbConn, - receiveProtoConn, - _LOG) { + receiveProtoConn) { this.universe = universe; this.id = accountDef.id; this.accountDef = accountDef; + logic.defineScope(this, 'Account', { accountId: this.id }); // Currently we don't persist the disabled state of an account because it's // easier for the UI to be edge-triggered right now and ensure that the @@ -63,29 +63,22 @@ function CompositeAccount(universe, accountDef, folderInfo, dbConn, this.identities = accountDef.identities; if (!PIECE_ACCOUNT_TYPE_TO_CLASS.hasOwnProperty(accountDef.receiveType)) { - _LOG.badAccountType(accountDef.receiveType); + logic(this, 'badAccountType', { type: accountDef.receiveType }); } if (!PIECE_ACCOUNT_TYPE_TO_CLASS.hasOwnProperty(accountDef.sendType)) { - _LOG.badAccountType(accountDef.sendType); + logic(this, 'badAccountType', { type: accountDef.sendType }); } this._receivePiece = new PIECE_ACCOUNT_TYPE_TO_CLASS[accountDef.receiveType]( universe, this, accountDef.id, accountDef.credentials, accountDef.receiveConnInfo, - folderInfo, dbConn, _LOG, receiveProtoConn); + folderInfo, dbConn, receiveProtoConn); this._sendPiece = new PIECE_ACCOUNT_TYPE_TO_CLASS[accountDef.sendType]( universe, this, accountDef.id, accountDef.credentials, - accountDef.sendConnInfo, dbConn, _LOG); - - // We used to hold onto the Universe's logger, but that wasn't right. The - // receiving account piece is usually what we want. In this case we're doing - // this so that MailUniverse can report the runOp_end for improved - // correctness. In the "slog" future we'll just use a common log object for - // this CompositeAccount and all the pieces, which will make this non-sketchy. - this._LOG = this._receivePiece._LOG; + accountDef.sendConnInfo, dbConn); // expose public lists that are always manipulated in place. this.folders = this._receivePiece.folders; diff --git a/apps/email/js/ext/composite/configurator.js b/apps/email/js/ext/composite/configurator.js index 359ddf6782df..56763b82ce8d 100644 --- a/apps/email/js/ext/composite/configurator.js +++ b/apps/email/js/ext/composite/configurator.js @@ -4,7 +4,7 @@ define( [ - 'rdcommon/log', + 'logic', '../accountcommon', '../a64', '../allback', @@ -14,7 +14,7 @@ define( 'exports' ], function( - $log, + logic, $accountcommon, $a64, $allback, @@ -29,7 +29,7 @@ var allbackMaker = $allback.allbackMaker; exports.account = $account; exports.configurator = { tryToCreateAccount: function(universe, userDetails, domainInfo, - callback, _LOG) { + callback) { var credentials, incomingInfo, smtpConnInfo, incomingType; if (domainInfo) { incomingType = (domainInfo.type === 'imap+smtp' ? 'imap' : 'pop3'); diff --git a/apps/email/js/ext/composite/incoming.js b/apps/email/js/ext/composite/incoming.js index cd14d35a6784..11021b834ee9 100644 --- a/apps/email/js/ext/composite/incoming.js +++ b/apps/email/js/ext/composite/incoming.js @@ -1,7 +1,7 @@ define([ - 'rdcommon/log', '../a64', '../accountmixins', '../mailslice', + 'logic', '../a64', '../accountmixins', '../mailslice', '../searchfilter', '../util', '../db/folder_info_rep', 'require', 'exports'], - function(log, $a64, $acctmixins, $mailslice, + function(logic, $a64, $acctmixins, $mailslice, $searchfilter, $util, $folder_info, require, exports) { var bsearchForInsert = $util.bsearchForInsert; @@ -27,7 +27,7 @@ function cmpFolderPubPath(a, b) { function CompositeIncomingAccount( FolderSyncer, universe, compositeAccount, accountId, credentials, - connInfo, folderInfos, dbConn, _parentLog, existingProtoConn) { + connInfo, folderInfos, dbConn, existingProtoConn) { this.universe = universe; this.compositeAccount = compositeAccount; @@ -115,7 +115,7 @@ function CompositeIncomingAccount( folderStorages[folderId] = new $mailslice.FolderStorage(this, folderId, folderInfo, this._db, - FolderSyncer, this._LOG); + FolderSyncer); folderPubs.push(folderInfo.$meta); } this.folders.sort(function(a, b) { @@ -178,7 +178,7 @@ CompositeIncomingAccount.prototype = { }; this._folderStorages[folderId] = new $mailslice.FolderStorage(this, folderId, folderInfo, this._db, - this.FolderSyncer, this._LOG); + this.FolderSyncer); var folderMeta = folderInfo.$meta; var idx = bsearchForInsert(this.folders, folderMeta, cmpFolderPubPath); @@ -215,7 +215,7 @@ CompositeIncomingAccount.prototype = { * implementation. */ _recreateFolder: function(folderId, callback) { - this._LOG.recreateFolder(folderId); + logic(this, 'recreateFolder', { folderId: folderId }); var folderInfo = this._folderInfos[folderId]; folderInfo.$impl = { nextId: 0, @@ -234,8 +234,7 @@ CompositeIncomingAccount.prototype = { this.saveAccountState(null, function() { var newStorage = new $mailslice.FolderStorage(self, folderId, folderInfo, self._db, - self.FolderSyncer, - self._LOG); + self.FolderSyncer); for (var iter in Iterator(self._folderStorages[folderId]._slices)) { var slice = iter[1]; slice._storage = newStorage; @@ -295,7 +294,7 @@ CompositeIncomingAccount.prototype = { rawConn = null; } if (!errString) { - self._LOG.deleteFolder(folderMeta.path); + logic(self, 'deleteFolder', { path: folderMeta.path }); self._forgetFolder(folderId); } if (callback) @@ -329,7 +328,7 @@ CompositeIncomingAccount.prototype = { */ sliceFolderMessages: function(folderId, bridgeHandle) { var storage = this._folderStorages[folderId], - slice = new $mailslice.MailSlice(bridgeHandle, storage, this._LOG); + slice = new $mailslice.MailSlice(bridgeHandle, storage); storage.sliceOpenMostRecent(slice); }, @@ -337,7 +336,7 @@ CompositeIncomingAccount.prototype = { searchFolderMessages: function(folderId, bridgeHandle, phrase, whatToSearch) { var storage = this._folderStorages[folderId], slice = new $searchfilter.SearchSlice(bridgeHandle, storage, phrase, - whatToSearch, this._LOG); + whatToSearch); storage.sliceOpenSearch(slice); return slice; }, @@ -374,55 +373,4 @@ CompositeIncomingAccount.prototype = { }, }; -exports.LOGFAB_DEFINITION = { - CompositeIncomingAccount: { - type: log.ACCOUNT, - events: { - createFolder: {}, - deleteFolder: {}, - recreateFolder: { id: false }, - - createConnection: {}, - reuseConnection: {}, - releaseConnection: {}, - deadConnection: { why: true }, - unknownDeadConnection: {}, - connectionMismatch: {}, - - /** - * XXX: this is really an error/warning, but to make the logging less - * confusing, treat it as an event. - */ - accountDeleted: { where: false }, - - /** - * The maximum connection limit has been reached, we are intentionally - * not creating an additional one. - */ - maximumConnsNoNew: {}, - }, - TEST_ONLY_events: { - deleteFolder: { path: false }, - - createConnection: { label: false }, - reuseConnection: { label: false }, - releaseConnection: { folderId: false, label: false }, - deadConnection: { folder: false }, - connectionMismatch: {}, - }, - errors: { - connectionError: {}, - folderAlreadyHasConn: { folderId: false }, - opError: { mode: false, type: false, ex: log.EXCEPTION }, - }, - asyncJobs: { - checkAccount: { err: null }, - runOp: { mode: true, type: true, error: true, op: false }, - saveAccountState: { reason: true, folderSaveCount: true }, - }, - TEST_ONLY_asyncJobs: { - }, - }, -}; - }); // end define diff --git a/apps/email/js/ext/cronsync.js b/apps/email/js/ext/cronsync.js index 8c638d4aa733..0ec14a17a7e8 100644 --- a/apps/email/js/ext/cronsync.js +++ b/apps/email/js/ext/cronsync.js @@ -23,7 +23,7 @@ define( [ - 'rdcommon/log', + 'logic', './worker-router', './slice_bridge_proxy', './mailslice', @@ -33,7 +33,7 @@ define( 'exports' ], function( - $log, + logic, $router, $sliceBridgeProxy, $mailslice, @@ -90,12 +90,12 @@ var SliceBridgeProxy = $sliceBridgeProxy.SliceBridgeProxy; * being closed, you want to make sure that if you're doing anything like * scheduling snippet downloads that you do that first. */ -function makeHackedUpSlice(storage, callback, parentLog) { +function makeHackedUpSlice(storage, callback) { var fakeBridgeThatEatsStuff = { __sendMessage: function() {} }, proxy = new SliceBridgeProxy(fakeBridgeThatEatsStuff, 'cron'), - slice = new $mailslice.MailSlice(proxy, storage, parentLog), + slice = new $mailslice.MailSlice(proxy, storage), oldStatusMethod = proxy.sendStatus, newHeaders = []; @@ -144,10 +144,10 @@ function makeHackedUpSlice(storage, callback, parentLog) { * The brains behind periodic account synchronization; only created by the * universe once it has loaded its configuration and accounts. */ -function CronSync(universe, _logParent) { +function CronSync(universe) { this._universe = universe; - this._LOG = LOGFAB.CronSync(this, null, _logParent); + logic.defineScope(this, 'CronSync'); this._activeSlices = []; @@ -184,7 +184,7 @@ function CronSync(universe, _logParent) { exports.CronSync = CronSync; CronSync.prototype = { _killSlices: function() { - this._LOG.killSlices(this._activeSlices.length); + logic(this, 'killSlices', { count: this._activeSlices.length }); this._activeSlices.forEach(function(slice) { slice.die(); }); @@ -200,7 +200,7 @@ CronSync.prototype = { if (!this._completedEnsureSync) return; - this._LOG.ensureSync_begin(); + logic(this, 'ensureSync_begin'); this._completedEnsureSync = false; debug('ensureSync called'); @@ -231,11 +231,13 @@ CronSync.prototype = { * have completed. */ syncAccount: function(account, doneCallback) { + var scope = logic.subscope(this, { accountId: account.id }); + // - Skip syncing if we are offline or the account is disabled if (!this._universe.online || !account.enabled) { - debug('syncAcount early exit: online: ' + + debug('syncAccount early exit: online: ' + this._universe.online + ', enabled: ' + account.enabled); - this._LOG.syncSkipped(account.id); + logic(scope, 'syncSkipped'); doneCallback(); return; } @@ -250,11 +252,11 @@ CronSync.prototype = { // sync if it is sufficiently recent. // - Initiate a sync of the folder covering the desired time range. - this._LOG.syncAccount_begin(account.id); - this._LOG.syncAccountHeaders_begin(account.id, null); + logic(scope, 'syncAccount_begin'); + logic(scope, 'syncAccountHeaders_begin'); var slice = makeHackedUpSlice(storage, function(newHeaders) { - this._LOG.syncAccountHeaders_end(account.id, newHeaders); + logic(scope, 'syncAccountHeaders_end', { headers: newHeaders }); this._activeSlices.splice(this._activeSlices.indexOf(slice), 1); // Reduce headers to the minimum number and data set needed for @@ -278,10 +280,10 @@ CronSync.prototype = { // POP3 downloads snippets as part of the sync process, there is no // need to call downloadBodies. if (account.accountDef.type === 'pop3+smtp') { - this._LOG.syncAccount_end(account.id); + logic(scope, 'syncAccount_end'); inboxDone([newHeaders.length, notifyHeaders]); } else if (this._universe.online) { - this._LOG.syncAccountSnippets_begin(account.id); + logic(scope, 'syncAccountSnippets_begin'); this._universe.downloadBodies( newHeaders.slice( 0, $syncbase.CRONSYNC_MAX_SNIPPETS_TO_FETCH_PER_ACCOUNT), @@ -290,25 +292,25 @@ CronSync.prototype = { }, function() { debug('Notifying for ' + newHeaders.length + ' headers'); - this._LOG.syncAccountSnippets_end(account.id); - this._LOG.syncAccount_end(account.id); + logic(scope, 'syncAccountSnippets_end'); + logic(scope, 'syncAccount_end'); inboxDone([newHeaders.length, notifyHeaders]); }.bind(this)); } else { - this._LOG.syncAccount_end(account.id); + logic(scope, 'syncAccount_end'); debug('UNIVERSE OFFLINE. Notifying for ' + newHeaders.length + ' headers'); inboxDone([newHeaders.length, notifyHeaders]); } } else { - this._LOG.syncAccount_end(account.id); + logic(scope, 'syncAccount_end'); inboxDone(); } // Kill the slice. This will release the connection and result in its // death if we didn't schedule snippet downloads above. slice.die(); - }.bind(this), this._LOG); + }.bind(this)); this._activeSlices.push(slice); // Pass true to force contacting the server. @@ -320,14 +322,14 @@ CronSync.prototype = { var outboxStorage = account.getFolderStorageForFolderId(outboxFolder.id); if (outboxStorage.getKnownMessageCount() > 0) { var outboxDone = latch.defer('outbox'); - this._LOG.sendOutbox_begin(account.id); + logic(scope, 'sendOutbox_begin'); this._universe.sendOutboxMessages( account, { reason: 'syncAccount' }, function() { - this._LOG.sendOutbox_end(account.id); + logic(scope, 'sendOutbox_end'); outboxDone(); }.bind(this)); } @@ -353,7 +355,7 @@ CronSync.prototype = { }, onRequestSync: function(accountIds) { - this._LOG.requestSyncFired(accountIds); + logic(this, 'requestSyncFired', { accountIds: accountIds }); if (!accountIds) return; @@ -363,7 +365,7 @@ CronSync.prototype = { ids = []; this._cronsyncing = true; - this._LOG.cronSync_begin(); + logic(this, 'cronSync_begin'); this._universe.__notifyStartedCronSync(accountIds); // Make sure the acount IDs are still valid. This is to protect agains @@ -413,7 +415,7 @@ CronSync.prototype = { } this._universe.__notifyStoppedCronSync(accountsResults); - this._LOG.syncAccounts_end(accountsResults); + logic(this, 'syncAccounts_end', { accountsResults: accountsResults }); }.bind(this); this._checkSyncDone(); @@ -426,7 +428,7 @@ CronSync.prototype = { return; } - this._LOG.syncAccounts_begin(); + logic(this, 'syncAccounts_begin'); targetAccounts.forEach(function(account) { this.syncAccount(account, function (result) { if (result) { @@ -457,7 +459,7 @@ CronSync.prototype = { if (this._onSyncDone) { this._onSyncDone(); this._onSyncDone = null; - this._LOG.cronSync_end(); + logic(this, 'cronSync_end'); } }, @@ -470,7 +472,7 @@ CronSync.prototype = { */ onSyncEnsured: function() { this._completedEnsureSync = true; - this._LOG.ensureSync_end(); + logic(this, 'ensureSync_end'); this._checkSyncDone(); }, @@ -480,39 +482,5 @@ CronSync.prototype = { } }; -var LOGFAB = exports.LOGFAB = $log.register($module, { - CronSync: { - type: $log.DAEMON, - events: { - requestSyncFired: { accountIds: false }, - killSlices: { count: false }, - syncSkipped: { id: true }, - }, - TEST_ONLY_events: { - }, - asyncJobs: { - cronSync: {}, - ensureSync: {}, - syncAccounts: { accountsResults: false }, - syncAccount: { id: true }, - // The actual slice refresh, leads to syncAccountSnippets if there were - // any new headers - syncAccountHeaders: { id: true, newHeaders: false }, - // If we have new headers we will fetch snippets. This starts when we - // issue the request and stops when we get our callback, meaning there - // will be an entirely contained downloadBodies job-op. - syncAccountSnippets: { id: true }, - sendOutbox: { id: true }, - }, - TEST_ONLY_asyncJobs: { - }, - errors: { - }, - calls: { - }, - TEST_ONLY_calls: { - }, - }, -}); }); // end define diff --git a/apps/email/js/ext/disaster-recovery.js b/apps/email/js/ext/disaster-recovery.js index 9ee1d8346de8..558da7febca5 100644 --- a/apps/email/js/ext/disaster-recovery.js +++ b/apps/email/js/ext/disaster-recovery.js @@ -19,14 +19,14 @@ define(function(require) { * resources we can. */ - var slog = require('./slog'); + var logic = require('./logic'); var socketToAccountMap = new WeakMap(); var accountToOperationMap = new WeakMap(); - var DisasterRecovery = { + var scope = logic.scope('DisasterRecovery'); - // Monitor in-progress job operations in case we must abort. + var DisasterRecovery = { setCurrentAccountOp: function(account, op, jobCompletedCallback) { accountToOperationMap.set(account, { @@ -88,7 +88,7 @@ define(function(require) { } } - slog.error('disaster-recovery:exception', { + logic(scope, 'exception', { accountId: account && account.id, op: op, error: e, @@ -105,9 +105,9 @@ define(function(require) { // See if we can recover in any way. if (account) { if (op) { + logic(scope, 'finished-job', { error: e }); console.warn('Force-completing in-progress op:', op); jobDoneCallback('disastrous-error'); - slog.log('disaster-recovery:finished-job', { error: e }); } else { console.warn('No job operation was currently running.'); } diff --git a/apps/email/js/ext/errbackoff.js b/apps/email/js/ext/errbackoff.js index 276b71991fba..7cb94ddc0fc8 100644 --- a/apps/email/js/ext/errbackoff.js +++ b/apps/email/js/ext/errbackoff.js @@ -48,13 +48,13 @@ define( [ './date', - 'rdcommon/log', + 'logic', 'module', 'exports' ], function( $date, - $log, + logic, $module, exports ) { @@ -89,7 +89,7 @@ exports.TEST_useTimeoutFunc = function(func) { * ]] * ] */ -function BackoffEndpoint(name, listener, parentLog) { +function BackoffEndpoint(name, listener) { /** @oneof[ * @case['healthy'] * @case['unreachable'] @@ -102,8 +102,10 @@ function BackoffEndpoint(name, listener, parentLog) { */ this.state = 'healthy'; this._iNextBackoff = 0; - this._LOG = LOGFAB.BackoffEndpoint(this, parentLog, name); - this._LOG.state(this.state); + + logic.defineScope(this, 'BackoffEndpoint', { name: name }); + + logic(this, 'state', { state: this.state }); this._badResources = {}; @@ -114,7 +116,7 @@ BackoffEndpoint.prototype = { if (this.state === newState) return; this.state = newState; - this._LOG.state(newState); + logic(this, 'state', { state: newState }); if (this.listener) this.listener.onEndpointStateChange(newState); }, @@ -140,7 +142,7 @@ BackoffEndpoint.prototype = { * } */ noteConnectFailureMaybeRetry: function(reachable) { - this._LOG.connectFailure(reachable); + logic(this, 'connectFailure', { reachable: reachable }); if (this.state === 'shutdown') return false; @@ -170,7 +172,7 @@ BackoffEndpoint.prototype = { * requests. */ noteBrokenConnection: function() { - this._LOG.connectFailure(true); + logic(this, 'connectFailure', { reachable: true }); this._setState('broken'); this._iNextBackoff = BACKOFF_DURATIONS.length; @@ -217,23 +219,8 @@ BackoffEndpoint.prototype = { }, }; -exports.createEndpoint = function(name, listener, parentLog) { - return new BackoffEndpoint(name, listener, parentLog); +exports.createEndpoint = function(name, listener) { + return new BackoffEndpoint(name, listener); }; -var LOGFAB = exports.LOGFAB = $log.register($module, { - BackoffEndpoint: { - type: $log.TASK, - subtype: $log.CLIENT, - stateVars: { - state: false, - }, - events: { - connectFailure: { reachable: true }, - }, - errors: { - } - }, -}); - }); // end define diff --git a/apps/email/js/ext/ext/activesync-lib/protocol.js b/apps/email/js/ext/ext/activesync-lib/protocol.js index b8643cb1f1db..da89efcd8917 100644 --- a/apps/email/js/ext/ext/activesync-lib/protocol.js +++ b/apps/email/js/ext/ext/activesync-lib/protocol.js @@ -629,7 +629,7 @@ // Add extra headers if we have any. if (aExtraHeaders) { for (var iter in Iterator(aExtraHeaders)) { - var key = iter[0], value = iter[1]; + var key = iter[0], key = iter[1]; xhr.setRequestHeader(key, value); } } diff --git a/apps/email/js/ext/ext/axeshim-browserbox.js b/apps/email/js/ext/ext/axeshim-browserbox.js index 2b1e0634eab0..e60424bba539 100644 --- a/apps/email/js/ext/ext/axeshim-browserbox.js +++ b/apps/email/js/ext/ext/axeshim-browserbox.js @@ -2,22 +2,35 @@ * Customized shim for browserbox to use 'slog' with configurable logging level * that can be cranked up. */ -define(function() { - var slog = require('slog'); - var slogTag = 'browserbox'; +define(function(require) { + var logic = require('logic'); + var scope = logic.scope('BrowserBox'); return { + /** + * Provide a .debug for things that are *only* logged when + * sensitive logging is enabled. This exists right now mainly for + * the benefit of the email.js libs. We're tying "debug" to + * logSensitiveData both because we haven't audited the use of + * debug and also because it is indeed a bit chatty. + * + * TODO: Address the logging detail level as a separate issue, + * ideally while working with whiteout.io to fancify the email.js + * logging slightly. + */ debug: function(ignoredTag, msg) { - slog.debug(slogTag, { msg: msg }); + if (!logic.isCensored) { + logic(scope, 'debug', { msg: msg }); + } }, log: function(ignoredTag, msg) { - slog.log(slogTag, { msg: msg }); + logic(scope, 'log', { msg: msg }); }, warn: function(ignoredTag, msg) { - slog.warn(slogTag, { msg: msg }); + logic(scope, 'warn', { msg: msg }); }, error: function(ignoredTag, msg) { - slog.error(slogTag, { msg: msg }); + logic(scope, 'error', { msg: msg }); } }; }); diff --git a/apps/email/js/ext/ext/axeshim-smtpclient.js b/apps/email/js/ext/ext/axeshim-smtpclient.js index 0e74f9926c80..c0440d8d0290 100644 --- a/apps/email/js/ext/ext/axeshim-smtpclient.js +++ b/apps/email/js/ext/ext/axeshim-smtpclient.js @@ -2,22 +2,25 @@ * Customized shim for browserbox to use 'slog' with configurable logging level * that can be cranked up. */ -define(function() { - var slog = require('slog'); - var slogTag = 'smtpclient'; +define(function(require) { + var logic = require('logic'); + var scope = logic.scope('SmtpClient'); return { + // see axeshim-browserbox's comment about '.debug' debug: function(ignoredTag, msg) { - slog.debug(slogTag, { msg: msg }); + if (!logic.isCensored) { + logic(scope, 'debug', { msg: msg }); + } }, log: function(ignoredTag, msg) { - slog.log(slogTag, { msg: msg }); + logic(scope, 'log', { msg: msg }); }, warn: function(ignoredTag, msg) { - slog.warn(slogTag, { msg: msg }); + logic(scope, 'warn', { msg: msg }); }, error: function(ignoredTag, msg) { - slog.error(slogTag, { msg: msg }); + logic(scope, 'error', { msg: msg }); } }; }); diff --git a/apps/email/js/ext/ext/browserbox.js b/apps/email/js/ext/ext/browserbox.js index f45a2a1fa55b..dda9daab9cbc 100644 --- a/apps/email/js/ext/ext/browserbox.js +++ b/apps/email/js/ext/ext/browserbox.js @@ -2132,4 +2132,4 @@ } return BrowserBox; -})); \ No newline at end of file +})); diff --git a/apps/email/js/ext/ext/co.js b/apps/email/js/ext/ext/co.js new file mode 100644 index 000000000000..06e7f334a9de --- /dev/null +++ b/apps/email/js/ext/ext/co.js @@ -0,0 +1,236 @@ +define(function(require) { +/** + * slice() reference. + */ + +var slice = Array.prototype.slice; + +/** + * Expose `co`. + */ + +co['default'] = co.co = co; + +/** + * Wrap the given generator `fn` into a + * function that returns a promise. + * This is a separate function so that + * every `co()` call doesn't create a new, + * unnecessary closure. + * + * @param {GeneratorFunction} fn + * @return {Function} + * @api public + */ + +co.wrap = function (fn) { + createPromise.__generatorFunction__ = fn; + return createPromise; + function createPromise() { + return co.call(this, fn.apply(this, arguments)); + } +}; + +/** + * Execute the generator function or a generator + * and return a promise. + * + * @param {Function} fn + * @return {Promise} + * @api public + */ + +function co(gen) { + var ctx = this; + if (typeof gen === 'function') gen = gen.call(this); + // we wrap everything in a promise to avoid promise chaining, + // which leads to memory leak errors. + // see https://github.com/tj/co/issues/180 + return new Promise(function(resolve, reject) { + onFulfilled(); + + /** + * @param {Mixed} res + * @return {Promise} + * @api private + */ + + function onFulfilled(res) { + var ret; + try { + ret = gen.next(res); + } catch (e) { + return reject(e); + } + next(ret); + } + + /** + * @param {Error} err + * @return {Promise} + * @api private + */ + + function onRejected(err) { + var ret; + try { + ret = gen.throw(err); + } catch (e) { + return reject(e); + } + next(ret); + } + + /** + * Get the next value in the generator, + * return a promise. + * + * @param {Object} ret + * @return {Promise} + * @api private + */ + + function next(ret) { + if (ret.done) return resolve(ret.value); + var value = toPromise.call(ctx, ret.value); + if (value && isPromise(value)) return value.then(onFulfilled, onRejected); + return onRejected(new TypeError('You may only yield a function, promise, generator, array, or object, ' + + 'but the following object was passed: "' + String(ret.value) + '"')); + } + }); +} + +/** + * Convert a `yield`ed value into a promise. + * + * @param {Mixed} obj + * @return {Promise} + * @api private + */ + +function toPromise(obj) { + if (!obj) return obj; + if (isPromise(obj)) return obj; + if (isGeneratorFunction(obj) || isGenerator(obj)) return co.call(this, obj); + if ('function' == typeof obj) return thunkToPromise.call(this, obj); + if (Array.isArray(obj)) return arrayToPromise.call(this, obj); + if (isObject(obj)) return objectToPromise.call(this, obj); + return obj; +} + +/** + * Convert a thunk to a promise. + * + * @param {Function} + * @return {Promise} + * @api private + */ + +function thunkToPromise(fn) { + var ctx = this; + return new Promise(function (resolve, reject) { + fn.call(ctx, function (err, res) { + if (err) return reject(err); + if (arguments.length > 2) res = slice.call(arguments, 1); + resolve(res); + }); + }); +} + +/** + * Convert an array of "yieldables" to a promise. + * Uses `Promise.all()` internally. + * + * @param {Array} obj + * @return {Promise} + * @api private + */ + +function arrayToPromise(obj) { + return Promise.all(obj.map(toPromise, this)); +} + +/** + * Convert an object of "yieldables" to a promise. + * Uses `Promise.all()` internally. + * + * @param {Object} obj + * @return {Promise} + * @api private + */ + +function objectToPromise(obj){ + var results = new obj.constructor(); + var keys = Object.keys(obj); + var promises = []; + for (var i = 0; i < keys.length; i++) { + var key = keys[i]; + var promise = toPromise.call(this, obj[key]); + if (promise && isPromise(promise)) defer(promise, key); + else results[key] = obj[key]; + } + return Promise.all(promises).then(function () { + return results; + }); + + function defer(promise, key) { + // predefine the key in the result + results[key] = undefined; + promises.push(promise.then(function (res) { + results[key] = res; + })); + } +} + +/** + * Check if `obj` is a promise. + * + * @param {Object} obj + * @return {Boolean} + * @api private + */ + +function isPromise(obj) { + return 'function' == typeof obj.then; +} + +/** + * Check if `obj` is a generator. + * + * @param {Mixed} obj + * @return {Boolean} + * @api private + */ + +function isGenerator(obj) { + return 'function' == typeof obj.next && 'function' == typeof obj.throw; +} + +/** + * Check if `obj` is a generator function. + * + * @param {Mixed} obj + * @return {Boolean} + * @api private + */ +function isGeneratorFunction(obj) { + var constructor = obj.constructor; + if (!constructor) return false; + if ('GeneratorFunction' === constructor.name || 'GeneratorFunction' === constructor.displayName) return true; + return isGenerator(constructor.prototype); +} + +/** + * Check for plain object. + * + * @param {Mixed} val + * @return {Boolean} + * @api private + */ + +function isObject(val) { + return Object == val.constructor; +} + + return co; +}); diff --git a/apps/email/js/ext/ext/equal.js b/apps/email/js/ext/ext/equal.js new file mode 100644 index 000000000000..26883094febb --- /dev/null +++ b/apps/email/js/ext/ext/equal.js @@ -0,0 +1,120 @@ +/** + * Sane equivalence checking, originally from loggest's rdcommon/log.js. + */ + +/* ***** BEGIN LICENSE BLOCK ***** + * Version: MPL 1.1/GPL 2.0/LGPL 2.1 + * + * The contents of this file are subject to the Mozilla Public License Version + * 1.1 (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at: + * http://www.mozilla.org/MPL/ + * + * Software distributed under the License is distributed on an "AS IS" basis, + * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License + * for the specific language governing rights and limitations under the + * License. + * + * The Original Code is Mozilla Raindrop Code. + * + * The Initial Developer of the Original Code is + * The Mozilla Foundation + * Portions created by the Initial Developer are Copyright (C) 2011 + * the Initial Developer. All Rights Reserved. + * + * Contributor(s): + * Andrew Sutherland + * + * Alternatively, the contents of this file may be used under the terms of + * either the GNU General Public License Version 2 or later (the "GPL"), or + * the GNU Lesser General Public License Version 2.1 or later (the "LGPL"), + * in which case the provisions of the GPL or the LGPL are applicable instead + * of those above. If you wish to allow use of your version of this file only + * under the terms of either the GPL or the LGPL, and not to allow others to + * use your version of this file under the terms of the MPL, indicate your + * decision by deleting the provisions above and replace them with the notice + * and other provisions required by the GPL or the LGPL. If you do not delete + * the provisions above, a recipient may use your version of this file under + * the terms of any one of the MPL, the GPL or the LGPL. + * + * ***** END LICENSE BLOCK ***** */ + +/** + * This module exports a single comparison function: + * + * equal(a, b) -> boolean + * + */ +define(function(require) { + + /** + * Maximum comparison depth for argument equivalence in expectation checking. + * This value gets bumped every time I throw something at it that fails that + * still seems reasonable to me. + */ + var COMPARE_DEPTH = 6; + + function boundedCmpObjs(a, b, depthLeft) { + var aAttrCount = 0, bAttrCount = 0, key, nextDepth = depthLeft - 1; + + if ('toJSON' in a) + a = a.toJSON(); + if ('toJSON' in b) + b = b.toJSON(); + + for (key in a) { + aAttrCount++; + if (!(key in b)) + return false; + + if (depthLeft) { + if (!equal(a[key], b[key], nextDepth)) + return false; + } + else { + if (a[key] !== b[key]) + return false; + } + } + // the theory is that if every key in a is in b and its value is equal, and + // there are the same number of keys in b, then they must be equal. + for (key in b) { + bAttrCount++; + } + if (aAttrCount !== bAttrCount) + return false; + return true; + } + + /** + * @return[Boolean]{ + * True when equivalent, false when not equivalent. + * } + */ + function equal(a, b, depthLeft) { + if (depthLeft === undefined) { + depthLeft = COMPARE_DEPTH; + } + var ta = typeof(a), tb = typeof(b); + if (ta !== 'object' || (tb !== ta) || (a == null) || (b == null)) + return a === b; + // fast-path for identical objects + if (a === b) + return true; + if (Array.isArray(a)) { + if (!Array.isArray(b)) + return false; + if (a.length !== b.length) + return false; + for (var iArr = 0; iArr < a.length; iArr++) { + if (!equal(a[iArr], b[iArr], depthLeft - 1)) + return false; + } + return true; + } + return boundedCmpObjs(a, b, depthLeft); + } + + return equal; + +}); // end define diff --git a/apps/email/js/ext/ext/mailbuild.js b/apps/email/js/ext/ext/mailbuild.js index 5f1622799eba..d4bef3577ac2 100644 --- a/apps/email/js/ext/ext/mailbuild.js +++ b/apps/email/js/ext/ext/mailbuild.js @@ -727,4 +727,4 @@ }; return MimeNode; -})); \ No newline at end of file +})); diff --git a/apps/email/js/ext/ext/mimefuncs.js b/apps/email/js/ext/ext/mimefuncs.js index 426c74f09ad5..474f99d10268 100644 --- a/apps/email/js/ext/ext/mimefuncs.js +++ b/apps/email/js/ext/ext/mimefuncs.js @@ -1174,4 +1174,4 @@ } return mimefuncs; -})); \ No newline at end of file +})); diff --git a/apps/email/js/ext/ext/rdcommon/deferred.js b/apps/email/js/ext/ext/rdcommon/deferred.js deleted file mode 100644 index 2f54ea7e1b2e..000000000000 --- a/apps/email/js/ext/ext/rdcommon/deferred.js +++ /dev/null @@ -1,68 +0,0 @@ -define(function() { - - var nextId = 1; - var activeDeferreds = {}; - var unhandledRejectionHandler = null; - - /** - * Simulate a Deferred in the style of the Q async library, so that - * we don't need to depend on the full Q library. Tracks unhandled - * rejections and unresolved deferreds for debugging. - * - * @param {String} name - * Optional name for debugging. - */ - var Deferred = function(name) { - var self = this; - this.id = nextId++; - this.name = name; - this.stack = new Error().stack; - this.promise = new Promise(function(resolve, reject) { - self.resolve = function(val) { - delete activeDeferreds[self.id]; - self.resolved = true; - self.value = val; - resolve(val); - }; - self.reject = function(val) { - self.promise.catch(function(e) { - if (unhandledRejectionHandler) { - unhandledRejectionHandler(e); - } - }); - delete activeDeferreds[self.id]; - self.rejected = true; - self.value = val; - reject(val); - }; - }); - - activeDeferreds[this.id] = this; - - this.value = null; - this.resolved = false; - this.rejected = false; - }; - - Deferred.prototype.toString = function() { - return ''; - }; - - Deferred.setUnhandledRejectionHandler = function(rejectionHandler) { - unhandledRejectionHandler = rejectionHandler; - }; - - Deferred.getAllActiveDeferreds = function() { - var vals = []; - for (var key in activeDeferreds) { - vals.push(activeDeferreds[key]); - } - return vals; - } - - Deferred.clearActiveDeferreds = function() { - activeDeferreds = {}; - } - - return Deferred; -}); diff --git a/apps/email/js/ext/ext/rdcommon/extransform.js b/apps/email/js/ext/ext/rdcommon/extransform.js deleted file mode 100644 index 2a2cedd1ab5a..000000000000 --- a/apps/email/js/ext/ext/rdcommon/extransform.js +++ /dev/null @@ -1,213 +0,0 @@ -/* ***** BEGIN LICENSE BLOCK ***** - * Version: MPL 1.1/GPL 2.0/LGPL 2.1 - * - * The contents of this file are subject to the Mozilla Public License Version - * 1.1 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at: - * http://www.mozilla.org/MPL/ - * - * Software distributed under the License is distributed on an "AS IS" basis, - * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License - * for the specific language governing rights and limitations under the - * License. - * - * The Original Code is Mozilla Raindrop Code. - * - * The Initial Developer of the Original Code is - * The Mozilla Foundation - * Portions created by the Initial Developer are Copyright (C) 2011 - * the Initial Developer. All Rights Reserved. - * - * Contributor(s): - * Andrew Sutherland - * - * Alternatively, the contents of this file may be used under the terms of - * either the GNU General Public License Version 2 or later (the "GPL"), or - * the GNU Lesser General Public License Version 2.1 or later (the "LGPL"), - * in which case the provisions of the GPL or the LGPL are applicable instead - * of those above. If you wish to allow use of your version of this file only - * under the terms of either the GPL or the LGPL, and not to allow others to - * use your version of this file under the terms of the MPL, indicate your - * decision by deleting the provisions above and replace them with the notice - * and other provisions required by the GPL or the LGPL. If you do not delete - * the provisions above, a recipient may use your version of this file under - * the terms of any one of the MPL, the GPL or the LGPL. - * - * ***** END LICENSE BLOCK ***** */ - -/** - * Exception transformation/normalization logic from the soon-to-be-dead - * jstut "esther" speculative test framework. (Loggest and ArbPL are descended - * replacements for it.) - * - * This defines a "defineStackTrace" method on Error as a side-effect which - * means no one else but us is allowed to try that trick. It's unclear what - * impact this has on the node default handlers... although I'm sure it will - * become obvious real quick. - **/ - -define( - [ - 'require', - 'exports' - ], - function( - require, - exports - ) { - -var baseUrl; -// XXX previous requirejs web magic... -if (false) { - baseUrl = require.s.contexts._.config.baseUrl; - if (baseUrl.length > 3 && baseUrl.substring(0, 3) === "../") { - var targUrl = document.location.origin + document.location.pathname; - // strip down to the parent directory (lose file or just trailing "/") - targUrl = targUrl.substring(0, targUrl.lastIndexOf("/")); - // eat the relative bits of the baseUrl - while (baseUrl.length >= 3 && baseUrl.substring(0, 3) === "../") { - targUrl = targUrl.substring(0, targUrl.lastIndexOf("/")); - baseUrl = baseUrl.substring(3); - } - baseUrl = targUrl + baseUrl + "/"; - console.log("baseUrl", baseUrl); - } -} -else { - // XXX ALMOND hack; don't even try and find node path where there is none - /* - require(['path'], function($path) { - baseUrl = $path.resolve('../..'); - }); - */ -} - - - -function uneval(x) { - return JSON.stringify(x); -} - -function simplifyFilename(filename) { - if (!filename) - return filename; - // simple hack to eliminate jetpack ridiculousness where we have - // "LONGPATH -> LONGPATH -> LONGPATH -> actualThing.js" - if (filename.length > 96) { - var lastSlash = filename.lastIndexOf('/'); - if (lastSlash !== -1) - return filename.substring(lastSlash+1); - } - // can we reduce it? - if (baseUrl && filename.substring(0, baseUrl.length) === baseUrl) { - // we could take this a step further and do path analysis. - return filename.substring(baseUrl.length); - } - return filename; -} - -// Thunk the stack format in v8 -Error.prepareStackTrace = function(e, frames) { - var o = []; - for (var i = 0; i < frames.length; i++) { - var frame = frames[i]; - o.push({ - filename: simplifyFilename(frame.getFileName()), - lineNo: frame.getLineNumber(), - funcName: frame.getFunctionName(), - }); - } - return o; -}; -// raise the limit in case of super-nested require()s -//Error.stackTraceLimit = 64; - -// XXX not sure if this even works since Error is not supposed to be -// configurable... provide a captureStackTrace method -// nb: and obviously, in independent sandboxes, this does jack... -if (!Error.captureStackTrace) { - Error.captureStackTrace = function(who, errType) { - try { - throw new Error(); - } - catch(ex) { - var sframes = ex.stack.split("\n"), frames = who.stack = [], match; - for (var i = 0; i < sframes.length; i++) { - if ((match = SM_STACK_FORMAT.exec(sframes[i]))) { - frames.push({ - filename: simplifyFilename(match[2]), - lineNo: match[3], - funcName: match[1], - }); - } - } - } - }; -} - -exports.gimmeStack = function() { - var obj = {}; - Error.captureStackTrace(obj); - // pop off captureStackTrace and us. - return obj.stack.slice(2); -} - -var SM_STACK_FORMAT = /^(.*)@(.+):(\d+)$/; - -// this is biased towards v8/chromium for now -/** - * - */ -exports.transformException = function transformException(e) { - // it's conceivable someone - if (!(e instanceof Error) && - // under jetpack, we are losing hard, probably because of the sandbox - // issue where everybody gets their own fundamentals, so check for stack. - (!e || typeof(e) !== "object" || !("stack" in e))) { - return { - n: "Object", - m: "" + e, - f: [], - }; - } - - var stack = e.stack; - // evidence of v8 thunk? - if (Array.isArray(stack)) { - return { - n: e.name, - m: e.message, - f: stack, - }; - } - - // handle the spidermonkey case, XXX maybe - var o = { - n: e.name, - m: e.message, - f: [], - }; - if (stack) { - var sframes = stack.split("\n"), frames = o.f, match; - for (var i = 0; i < sframes.length; i++) { - if ((match = SM_STACK_FORMAT.exec(sframes[i]))) { - frames.push({ - filename: simplifyFilename(match[2]), - lineNo: match[3], - funcName: match[1], - }); - } - } - } - // otherwise this is probably an XPConnect exception... - else if (e.filename) { - o.f.push({ - filename: e.filename, - lineNo: e.lineNumber, - funcName: '', - }); - } - return o; -}; - -}); // end define diff --git a/apps/email/js/ext/ext/rdcommon/log.js b/apps/email/js/ext/ext/rdcommon/log.js deleted file mode 100644 index 13dbec358ee7..000000000000 --- a/apps/email/js/ext/ext/rdcommon/log.js +++ /dev/null @@ -1,1911 +0,0 @@ -/* ***** BEGIN LICENSE BLOCK ***** - * Version: MPL 1.1/GPL 2.0/LGPL 2.1 - * - * The contents of this file are subject to the Mozilla Public License Version - * 1.1 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at: - * http://www.mozilla.org/MPL/ - * - * Software distributed under the License is distributed on an "AS IS" basis, - * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License - * for the specific language governing rights and limitations under the - * License. - * - * The Original Code is Mozilla Raindrop Code. - * - * The Initial Developer of the Original Code is - * The Mozilla Foundation - * Portions created by the Initial Developer are Copyright (C) 2011 - * the Initial Developer. All Rights Reserved. - * - * Contributor(s): - * Andrew Sutherland - * - * Alternatively, the contents of this file may be used under the terms of - * either the GNU General Public License Version 2 or later (the "GPL"), or - * the GNU Lesser General Public License Version 2.1 or later (the "LGPL"), - * in which case the provisions of the GPL or the LGPL are applicable instead - * of those above. If you wish to allow use of your version of this file only - * under the terms of either the GPL or the LGPL, and not to allow others to - * use your version of this file under the terms of the MPL, indicate your - * decision by deleting the provisions above and replace them with the notice - * and other provisions required by the GPL or the LGPL. If you do not delete - * the provisions above, a recipient may use your version of this file under - * the terms of any one of the MPL, the GPL or the LGPL. - * - * ***** END LICENSE BLOCK ***** */ - -/** - * Raindrop-specific testing/logging setup; right now holds initial 'loggest' - * implementation details that should get refactored out into their own - * thing. - * - * The permutations of logger logic is getting a bit ugly and may be burning - * more cycles than is strictly necessary. The long-term plan is some kind - * of simple (runtime) code generation. The biggest win for that is considered - * that it will simplify our code in here and generate an obvious byproduct - * that is easily understood. In cases where startup time is a concern, the - * generated code can also be persisted (like via RequireJS optimizer stage). - * This is not happening yet. - * - * - * There is a need for raindrop-specific logging logic because names tend to - * be application specific things as well as the determination of what is - * interesting. - * - * @typedef[ListyLogEntry @list[ - * @param[eventName String] - * @rest[Object] - * ]]{ - * The current format is meant to be generally human-readable. We put the - * name of the event at the front because it most concisely expresses what - * is happening. We put the details of the event after that, with the - * timestamp second from last and the global sequence number last. The timing - * information goes last because the timestamp (uS) is going to tend to be a - * big number that is hard for a human to process, but serves as a nice visual - * delimiter for the sequence id that comes after that humans can understand. - * It is not useful to have it earlier because it would offset the details of - * the event too far from the event name. - * } - * @typedef[ActorUniqueName Number]{ - * A positive (> 0) unique value for the effective namespace. - * } - * @typedef[ThingUniqueName Number]{ - * A negative (< 0) unique value for the effective namespace. - * } - * @typedef[UniqueName @oneof[ActorUniqueName ThingUniqueName]]{ - * Actor/logger names are positive, thing names are negative. We do this so - * that even without resolving the identifiers we can present a human - * comprehensible understanding of semantic identifiers. - * } - * @typedef[SemanticIdent @oneof[ - * @case[String]{ - * A human readable string with no special significance. - * } - * @case[@listof[@oneof[UniqueName String]]]{ - * A list containing human-readable strings with interspersed references to - * loggers/actors and things. When displayed, the unique name references - * should be replaced with custom display objects (possibly just hyperlinks) - * which should include a human-understandable representation of what the - * name is referencing. Entries in the list should be joined so that - * whitespace is inserted if the adjacent object is not a string or the - * string does not already contain whitespace or punctuation that does not - * require whitespace at the given point. More specifically, the "inside" - * of parentheses/brackets/braces and the left side of - * colons/semicolons/commas do not require whitespace. We also - * automatically insert commas-with-whitespace between consecutive named - * references. - * - * String literals must not be adjacent to other string literals; you must - * coalesce them. The whitespace logic can optimize based on this - * assumption. - * } - * ]] - * @typedef[HierLogFrag @dict[ - * @key[loggerIdent String]{ - * The schema name that defines this logger; the key in the dictionary - * passed to `register`. - * } - * @key[semanticIdent SemanticIdent]{ - * Explains to humans what this logger is about. It is not required to be - * unique, but if code always passes in the same constant string, it's - * probably not being super helpful. - * - * Examples include: - * - Test case names. - * - Parameterized test steps. (Client A sending a message to Client B.) - * - Parameterized connections. (Server A talking to Server B.) - * } - * @key[uniqueName UniqueName]{ - * A unique identifier not previously used in the effective namespace - * of the root HierLogFrag for this tree and all its descendents. - * } - * @key[born #:optional TimestampUS]{ - * Timestamp of when this logger was instantiated. - * } - * @key[died #:optional TimestampUS]{ - * Timestamp of when this logger was marked dead. - * } - * @key[entries @listof[ListyLogEntry]]{ - * The log entries for this logger this time-slice. - * } - * @key[kids #:optional @listof[HierLogFrag]]{ - * Log fragments of loggers deemed to be conceptually children of the logger - * that produced this logger. For example, an HTTP server would have a - * logger and its connection workers would be loggers that are children of - * the server. - * } - * ]]{ - * Loggers are organized into hierarchies - * } - * @typedef[HierLogTimeSlice @dict[ - * @key[begin TimestampUS] - * @key[end TimestampUS] - * @key[logFrag HierLogFrag] - * ]]{ - * - * } - * - * @typedef[ActorLifecycleNotifFunc @func[ - * @args[ - * @param[event @oneof["attach" "dead"]] - * @param[instance Object]{ - * The instance associated with the logger. - * } - * @param[logger Logger] - * ] - * ]]{ - * Notification function to be invoked when an actor gets attached to its - * matching logger. - * } - * - * == Original Brainstorming == - * + Unit Test Understanding - * - Want to know what the participants are and the high-level messages that - * are being exchanged, plus the ability to drill down into the messages. - * => logging should expose the actor (with type available) - * => message transmission should optionally have high-level logging - * associated in a way that provides us with the message or lets us - * sniff the payload - * + Unit Test Failure Diagnosis - * - Want to know what a good run looked like, and the differences between - * this run and that run. - * => the viewer has access to a data-store. - * + Debugging (General) - * - Want to be able to trace message delivery and related activities - * across the system. - * => Use global names where possible, perhaps identity key and message - * hashes and TCP endpoint identifiers should allow reconstitution. - * x> Having clients pass around extra identifiers seems dangerous. (Do - * not provide attackers with anything they do not already have, - * although debugging tools will of course make making use of that - * info easier.) - * + System Understanding (Initial, non-live, investigative) - * - Likely want what unit test understanding provides but with higher level - * capabilities. - * + System Understanding (Steady-state with testing system) - * - Likely want initial understanding unit test-level data but with only - * the traffic information and no ability to see the (private) data. - * + Automated Performance Runs / Regression Detection - * - Want timestamps of progress of message delivery. - * - Want easily comparable data. - * + At Scale Performance Understanding - * - Want to know throughput, latency of the various parts of the system, - * plus the ability to sample specific trace timelines. - * + At Scale Debugging of specific failures (ex: 1 user having trouble) - * - Want to be able to enable logging for the specific user, trace - * across the system. - * - * + General - * - Want to be able to easily diff for notable changes... - * => Markup or something should indicate values that will vary between - * runs. (Maybe as part of context?) - * - * + Logging efficiency - * - Want minimal impact when not enabled. - * - But willing to accept some hit for the benefit of logging. - * - Assume JITs can try and help us out if we help them. - * - Don't want to clutter up the code with logging code. - * - Don't want debugging logging code that can compromise privacy - * accidentally active. - * => Use decoration/monkeypatching for debugging logging, isolated in - * a sub-tree that can be completely excluded from the production - * build process. Have the decoration/monkeypatching be loud - * about what it's doing or able to fail, etc. - * - Nice if it's obvious that we can log/trace at a point. - * => Place always-on event logging in the code at hand. - * => Use (pre-computed) conditionals or maybe alternate classes for - * runtime optional logging. - * - * + Storage / Transit efficiency - * - Want logging for test runs broken up into initialization logging and - * per-test compartments. - * => Time-bucketing (per "channel") likely sufficient for debugging logging - * purposes. - * => Performance stuff that can't be reduced to time-series probably wants - * its own channel, and its data should be strongly biased to aggregates. - **/ - -define( - [ - './deferred', - './microtime', - './extransform', - 'exports' - ], - function( - Deferred, - $microtime, - $extransform, - exports - ) { - -var rawGimmeStack = $extransform.gimmeStack; -var gimmeStack = function() { - // Slice off the logger calling us and ourselves. - return rawGimmeStack().slice(2); -}; - -/** - * Per-thread/process sequence identifier to provide unambiguous ordering of - * logging events in the hopeful event we go faster than the timestamps can - * track. - * - * The long-term idea is that this gets periodically reset in an unambiguous - * fashion. Because we also package timestamps in the logs, right now we - * can get away with just making sure not to reset the sequence more than - * once in a given timestamp unit (currently 1 microsecond). This seems - * quite do-able. - * - * Note: Timestamp granularity was initially millisecond level, which was when - * this really was important. - */ -var gSeq = 0; - -exports.getCurrentSeq = function() { - return gSeq; -}; - -/** - * Per-thread/process next unique actor/logger name to allocate. - */ -var gUniqueActorName = 1; -/** - * Per-thread/process next unique thing name to allocate. - */ -var gUniqueThingName = -1; - -var ThingProto = exports.ThingProto = { - get digitalName() { - return this.__diginame; - }, - set digitalName(val) { - this.__diginame = val; - }, - toString: function() { - return '[Thing:' + this.__type + ']'; - }, - toJSON: function() { - var o = { - type: this.__type, - name: this.__name, - dname: this.__diginame, - uniqueName: this._uniqueName, - }; - if (this.__hardcodedFamily) - o.family = this.__hardcodedFamily; - return o; - }, -}; - -/** - * Create a thing with the given type, name, and prototype hierarchy and which - * is allocated with a unique name. - * - * This should not be called directly by user code; it is being surfaced for use - * by `testcontext.js` in order to define things with names drawn from an - * over-arching global namespace. The caller needs to take on the - * responsibility of exposing the thing via a logger or the like. - */ -exports.__makeThing = function makeThing(type, humanName, digitalName, proto) { - var thing; - if (proto === undefined) - proto = ThingProto; - thing = Object.create(proto); - - thing.__type = type; - thing.__name = humanName; - thing.__diginame = digitalName; - thing.__hardcodedFamily = null; - thing._uniqueName = gUniqueThingName--; - return thing; -}; - -function NOP() { -} - -/** - * Dummy logger prototype; instances gather statistics but do not generate - * detailed log events. - */ -var DummyLogProtoBase = { - _kids: undefined, - logLevel: 'dummy', - toString: function() { - return '[DummyLog]'; - }, - toJSON: function() { - // will this actually break JSON.stringify or just cause it to not use us? - throw new Error("I WAS NOT PLANNING ON BEING SERIALIZED"); - }, - __die: NOP, - __updateIdent: NOP, -}; - -/** - * Full logger prototype; instances accumulate log details but are intended by - * policy to not log anything considered user-private. This differs from - * `TestLogProtoBase` which, in the name of debugging and system understanding - * can capture private data but which should accordingly be test data. - */ -var LogProtoBase = { - /** - * For use by `TestContext` to poke things' names in. Actors'/loggers' names - * are derived from the list of kids. An alternate mechanism might be in - * order for this, since it is so extremely specialized. This was - * determined better than adding yet another generic logger mechanism until - * a need is shown or doing monkeypatching; at least for the time-being. - */ - _named: null, - logLevel: 'safe', - toJSON: function() { - var jo = { - loggerIdent: this.__defName, - semanticIdent: this._ident, - uniqueName: this._uniqueName, - born: this._born, - died: this._died, - events: this._eventMap, - entries: this._entries, - kids: this._kids - }; - if (this.__latchedVars.length) { - var latchedVars = this.__latchedVars, olv = {}; - for (var i = 0; i < latchedVars.length; i++) { - olv[latchedVars[i]] = this[':' + latchedVars[i]]; - } - jo.latched = olv; - } - if (this._named) - jo.named = this._named; - return jo; - }, - __die: function() { - this._died = $microtime.now(); - if (this.__FAB._onDeath) - this.__FAB._onDeath(this); - }, - __updateIdent: function(ident) { - // NOTE: you need to update useSemanticIdent if you change this. - // normalize all object references to unique name references. - if (Array.isArray(ident)) { - var normIdent = []; - for (var i = 0; i < ident.length; i++) { - var identBit = ident[i]; - if (typeof(identBit) !== "object" || identBit == null) - normIdent.push(identBit); - else - normIdent.push(identBit._uniqueName); - } - ident = normIdent; - } - this._ident = ident; - }, -}; - -/** - * Test (full) logger prototype; instances generate notifications for actor - * expectation checking on all calls and observe arguments that may contain - * user-private data (but which should only contain definitively non-private - * test data.) - * - * For simplicity of implementation, this class currently just takes the - * functions implemented by LogProtoBase and wraps them with a parameterized - * decorator. - */ -var TestLogProtoBase = Object.create(LogProtoBase); -TestLogProtoBase.logLevel = 'dangerous'; -TestLogProtoBase.__unexpectedEntry = function(iEntry, unexpEntry) { - var entry = ['!unexpected', unexpEntry]; - this._entries[iEntry] = entry; -}; - -TestLogProtoBase.__mismatchEntry = function(iEntry, expected, actual) { - var entry = ['!mismatch', expected, actual]; - this._entries[iEntry] = entry; -}; - -TestLogProtoBase.__failedExpectation = function(exp) { - var entry = ['!failedexp', exp, $microtime.now(), gSeq++]; - this._entries.push(entry); -}; - -TestLogProtoBase.__die = function() { - this._died = $microtime.now(); - var testActor = this._actor; - if (testActor) { - if (testActor._expectDeath) { - testActor._expectDeath = false; - testActor.__loggerFired(); - } - if (testActor._lifecycleListener) - testActor._lifecycleListener.call(null, 'dead', this.__instance, this); - } -}; - -var DIED_EVENTNAME = '(died)', DIED_EXP = [DIED_EVENTNAME]; - -var TestActorProtoBase = { - toString: function() { - return '[Actor ' + this.__defName + ': ' + this.__name + ']'; - }, - toJSON: function() { - return { - actorIdent: this.__defName, - semanticIdent: this.__name, - uniqueName: this._uniqueName, - parentUniqueName: this._parentUniqueName, - loggerUniqueName: this._logger ? this._logger._uniqueName : null, - }; - }, - - /** - * Invoked to attach a logger to an instance; exists to provide the - * possibility to generate a notification event. - */ - __attachToLogger: function(logger) { - logger._actor = this; - this._logger = logger; - if (this._lifecycleListener) - this._lifecycleListener.call(null, 'attach', logger.__instance, logger); - }, - - /** - * Invoke a notification function when this actor gets attached to its - * matching logger. This function should be invoked as soon as possible - * after the creation of the actor. - * - * @args[ - * @param[func ActorLifecycleNotifFunc] - * ] - */ - attachLifecycleListener: function(func) { - this._lifecycleListener = func; - }, - - /** - * Indicate that the caller is going to schedule some test events - * asynchronously while the step is running, so we should make sure to - * forbid our actor from resolving itself before a matching call to - * `asyncEventsAllDoneDoResolve` is made. - */ - asyncEventsAreComingDoNotResolve: function() { - if (!this._activeForTestStep) - throw new Error("Attempt to set expectations on an actor (" + - this.__defName + ": " + this.__name + ") that is not " + - "participating in this test step!"); - if (this._resolved) - throw new Error("Attempt to add expectations when already resolved!"); - - // (sorta evil-hack) - // We can reuse the _expectDeath flag as a means to ensure that we don't - // resolve the promise prematurely, although it's semantically suspect. - // (And bad things will happen if the test logger does actually die...) - if (this._expectDeath) - throw new Error("death expectation incompatible with async events"); - this._expectDeath = true; - }, - - /** - * Indiate that the caller is all done dynamically scheduling test events - * while a test step is running, and that accordingly we can allow our - * test actor to resolve its promise when all the events have completed. - */ - asyncEventsAllDoneDoResolve: function() { - // stop saying we are expecting our death; new events will trigger - // resolution - this._expectDeath = false; - // pretend something happened to potentially trigger things now. - this.__loggerFired(); - }, - - /** - * Expect nothing to be logged this turn, and therefore also that no - * expectations will be added. - */ - expectNothing: function() { - if (this._expectations.length) - throw new Error("Already expecting something this turn! " + - JSON.stringify(this._expectations[0])); - this._expectNothing = true; - }, - - /** - * Indicate that the only expectation we have on this actor is that its - * logger will die during this step. - */ - expectOnly__die: function() { - if (!this._activeForTestStep) - throw new Error("Attempt to set expectations on an actor (" + - this.__defName + ": " + this.__name + ") that is not " + - "participating in this test step!"); - if (this._resolved) - throw new Error("Attempt to add expectations when already resolved!"); - - if (this._expectDeath) - throw new Error("Already expecting our death! " + - "Are you using asyncEventsAreComingDoNotResolve?"); - this._expectDeath = true; - }, - - /** - * Set this actor to use 'set' matching for only this round; the list of - * expectations will be treated as an unordered set of expectations to - * match instead of an ordered list that must be matched exactly in order. - * Failures will still be generated if an entry is encountered that does not - * have a corresponding entry in the expectation list. - * - * One side-effect of this mode is that we no longer can detect what - * constitutes a mismatch, so we call everything unexpected that doesn't - * match. - */ - expectUseSetMatching: function() { - this._unorderedSetMode = true; - }, - - /** - * Prepare for activity in a test step. If we do not already have a paired - * logger, this will push us onto the tracking list so we will be paired when - * the logger is created. - */ - __prepForTestStep: function(testRuntimeContext) { - if (!this._logger) - testRuntimeContext.reportPendingActor(this); - // we should have no expectations going into a test step. - if (this._activeForTestStep) - this.__resetExpectations(); - this._activeForTestStep = true; - // and also all current entries should not be considered for expectations - // (We originally considered that we could let loggers accumulate entries - // in the background and then specify expectations about them in a - // subsequent step. That seems confusing. Seems far better for us to - // just slice a single step into multiple perspectives...) - if (this._logger) - this._iEntry = this._logger._entries.length; - }, - - /** - * Issue a promise that will be resolved when all expectations of this actor - * have been resolved. If no expectations have been issued, just return - * null. - */ - __waitForExpectations: function() { - if (this._expectNothing && - (this._expectations.length || this._iExpectation)) - return false; - // Fail immediately if a synchronous check already failed. (It would - // have tried to generate a rejection, but there was no deferral at the - // time.) - if (!this._expectationsMetSoFar) - return false; - if ((this._iExpectation >= this._expectations.length) && - (this._expectDeath ? (this._logger && this._logger._died) : true)) { - this._resolved = true; - return this._expectationsMetSoFar; - } - - if (!this._deferred) { - this._deferred = new Deferred(); - } - return this._deferred.promise; - }, - - __stepCleanup: null, - - /** - * Cleanup state at the end of the step; also, check if we moved into a - * failure state after resolving our promise. - * - * @return["success" Boolean]{ - * True if everything is (still) satisfied, false if a failure occurred - * at some point. - * } - */ - __resetExpectations: function() { - if (this.__stepCleanup) - this.__stepCleanup(); - - var expectationsWereMet = this._expectationsMetSoFar; - this._expectationsMetSoFar = true; - // kill all processed entries. - this._iExpectation = 0; - this._ignore = null; - this._expectations.splice(0, this._expectations.length); - this._expectNothing = false; - this._expectDeath = false; - this._unorderedSetMode = false; - this._deferred = null; - this._resolved = false; - this._activeForTestStep = false; - return expectationsWereMet; - }, - - __failUnmetExpectations: function() { - if (this._iExpectation < this._expectations.length && this._logger) { - for (var i = this._iExpectation; i < this._expectations.length; i++) { - this._logger.__failedExpectation(this._expectations[i]); - } - } - if (this._expectDeath && !this._logger._died) - this._logger.__failedExpectation(DIED_EXP); - }, - - /** - * Invoked by the test-logger associated with this actor to let us know that - * something has been logged so that we can perform an expectation check and - * fulfill our promise/reject our promise, as appropriate. - */ - __loggerFired: function() { - // we can't do anything if we don't have an actor. - var entries = this._logger._entries, expy, entry; - // -- unordered mode - if (this._unorderedSetMode) { - - while (this._iExpectation < this._expectations.length && - this._iEntry < entries.length) { - entry = entries[this._iEntry++]; - // ignore meta-entries (which are prefixed with a '!') - if (entry[0][0] === "!") - continue; - // ignore ignored entries - if (this._ignore && this._ignore.hasOwnProperty(entry[0])) - continue; - - // - try all the expectations for a match - var foundMatch = false; - for (var iExp = this._iExpectation; iExp < this._expectations.length; - iExp++) { - expy = this._expectations[iExp]; - - // - on matches, reorder the expectation and bump our pointer - if (expy[0] === entry[0] && - this['_verify_' + expy[0]](expy, entry)) { - if (iExp !== this._iExpectation) { - this._expectations[iExp] = this._expectations[this._iExpectation]; - this._expectations[this._iExpectation] = expy; - } - this._iExpectation++; - foundMatch = true; - break; - } - } - if (!foundMatch) { - this._logger.__unexpectedEntry(this._iEntry - 1, entry); - this._expectationsMetSoFar = false; - if (this._deferred) - this._deferred.reject([this.__defName, expy, entry]); - } - } - - // - generate an unexpected failure if we ran out of expectations - if ((this._iExpectation === this._expectations.length) && - (entries.length > this._iEntry)) { - // note: as below, there is no point trying to generate a rejection - // at this stage. - this._expectationsMetSoFar = false; - // no need to -1 because we haven't incremented past the entry. - this._logger.__unexpectedEntry(this._iEntry, entries[this._iEntry]); - // do increment past... - this._iEntry++; - } - // - generate success if we have used up our expectations - else if ((this._iExpectation >= this._expectations.length) && - this._deferred && - (this._expectDeath ? (this._logger && this._logger._died) - : true)) { - this._resolved = true; - this._deferred.resolve(); - } - return; - } - - // -- ordered mode - while (this._iExpectation < this._expectations.length && - this._iEntry < entries.length) { - expy = this._expectations[this._iExpectation]; - entry = entries[this._iEntry++]; - - // ignore meta-entries (which are prefixed with a '!') - if (entry[0][0] === "!") - continue; - // ignore ignored entries - if (this._ignore && this._ignore.hasOwnProperty(entry[0])) - continue; - - // Currently, require exact pairwise matching between entries and - // expectations. - if (expy[0] !== entry[0]) { - this._logger.__unexpectedEntry(this._iEntry - 1, entry); - // (fallout, triggers error) - } - else if (!this['_verify_' + expy[0]](expy, entry)) { - this._logger.__mismatchEntry(this._iEntry - 1, expy, entry); - // things did line up correctly though, so boost the expecation number - // so we don't convert subsequent expectations into unexpected ones. - this._iExpectation++; - // (fallout, triggers error) - } - else { - this._iExpectation++; - continue; - } - // (only bad cases fall out without hitting a continue) - if (this._expectationsMetSoFar) { - this._expectationsMetSoFar = false; - if (this._deferred) - this._deferred.reject([this.__defName, expy, entry]); - } - return; - } - // - unexpected log events should count as failure - // We only care if: 1) we were marked active, 2) we had at least one - // expectation this step OR we were explicitly marked to have no - // expectations this step. - // Because we will already have resolved() our promise if we get here, - // it's up to the test driver to come back and check us for this weird - // failure, possibly after waiting a tick to see if any additional events - // come in. - if (this._activeForTestStep && - ((this._expectations.length && - (this._iExpectation === this._expectations.length) && - (entries.length > this._iEntry)) || - (!this._expectations.length && - this._expectNothing))) { - // Only get upset if this is not an ignored event. - if (!this._ignore || - !this._ignore.hasOwnProperty(entries[this._iEntry][0])) { - this._expectationsMetSoFar = false; - this._logger.__unexpectedEntry(this._iEntry, entries[this._iEntry]); - } - // We intentionally increment iEntry because otherwise we'll keep marking - // the same entry as unexpected when that is in fact not what we desire. - // In previous parts of this function it made sense not to increment, but - // here it just causes confusion. - this._iEntry++; - } - - if ((this._iExpectation >= this._expectations.length) && this._deferred && - (this._expectDeath ? (this._logger && this._logger._died) : true)) { - this._resolved = true; - this._deferred.resolve(); - } - }, -}; -exports.TestActorProtoBase = TestActorProtoBase; - -/** - * Recursive traverse objects looking for (and eliding) very long strings. We - * do this because our logs are getting really large (6 megs!), and a likely - * source of useless bloat are the encrypted message strings. Although we - * care how big the strings get, the reality is that until we switch to - * avro/a binary encoding, they are going to bloat horribly under JSON, - * especially when nested levels of encryption and JSON enter the picture. - * - * We will go a maximum of 3 layers deep. Because this complicates having an - * efficient fast-path where we detect that we don't need to clone-and-modify, - * we currently always just clone-and-modify. - */ -function simplifyInsaneObjects(obj, dtype, curDepth) { - if (obj == null || typeof(obj) !== "object") - return obj; - if (!curDepth) - curDepth = 0; - var nextDepth = curDepth + 1; - var limitStrings = 64; - - if (dtype) { - if (dtype === 'tostring') { - if (Array.isArray(obj)) - return obj.join(''); - else if (typeof(obj) !== 'string') - return obj.toString(); - } - } - - var oot = {}; - for (var key in obj) { - var val = obj[key]; - switch (typeof(val)) { - case "string": - if (limitStrings && val.length > limitStrings) { - oot[key] = "OMITTED STRING, originally " + val.length + - " bytes long"; - } - else { - oot[key] = val; - } - break; - case "object": - if (val == null || - Array.isArray(val) || - ("toJSON" in val) || - curDepth >= 2) { - oot[key] = val; - } - else { - oot[key] = simplifyInsaneObjects(val, null, nextDepth); - } - break; - default: - oot[key] = val; - break; - } - } - return oot; -} - -/** - * Maximum comparison depth for argument equivalence in expectation checking. - * This value gets bumped every time I throw something at it that fails that - * still seems reasonable to me. - */ -var COMPARE_DEPTH = 6; -function boundedCmpObjs(a, b, depthLeft) { - var aAttrCount = 0, bAttrCount = 0, key, nextDepth = depthLeft - 1; - - if ('toJSON' in a) - a = a.toJSON(); - if ('toJSON' in b) - b = b.toJSON(); - - for (key in a) { - aAttrCount++; - if (!(key in b)) - return false; - - if (depthLeft) { - if (!smartCompareEquiv(a[key], b[key], nextDepth)) - return false; - } - else { - if (a[key] !== b[key]) - return false; - } - } - // the theory is that if every key in a is in b and its value is equal, and - // there are the same number of keys in b, then they must be equal. - for (key in b) { - bAttrCount++; - } - if (aAttrCount !== bAttrCount) - return false; - return true; -} - -/** - * @return[Boolean]{ - * True when equivalent, false when not equivalent. - * } - */ -function smartCompareEquiv(a, b, depthLeft) { - var ta = typeof(a), tb = typeof(b); - if (ta !== 'object' || (tb !== ta) || (a == null) || (b == null)) - return a === b; - // fast-path for identical objects - if (a === b) - return true; - if (Array.isArray(a)) { - if (!Array.isArray(b)) - return false; - if (a.length !== b.length) - return false; - for (var iArr = 0; iArr < a.length; iArr++) { - if (!smartCompareEquiv(a[iArr], b[iArr], depthLeft - 1)) - return false; - } - return true; - } - return boundedCmpObjs(a, b, depthLeft); -} -exports.smartCompareEquiv = smartCompareEquiv; - -function makeIgnoreFunc(name) { - return function ignoreFunc() { - if (!this._ignore) - this._ignore = {}; - this._ignore[name] = true; - }; -}; - -/** - * Builds the logging and testing helper classes for the `register` driver. - * - * It operates in a similar fashion to wmsy's ProtoFab mechanism; state is - * provided to helpers by lexically closed over functions. No code generation - * is used, but it's intended to be an option. - */ -function LoggestClassMaker(moduleFab, name) { - this.moduleFab = moduleFab; - this.name = name; - - this._latchedVars = []; - - // steady-state minimal logging logger (we always want statistics!) - var dummyProto = this.dummyProto = Object.create(DummyLogProtoBase); - dummyProto.__defName = name; - dummyProto.__latchedVars = this._latchedVars; - dummyProto.__FAB = this.moduleFab; - - // full-logging logger - var logProto = this.logProto = Object.create(LogProtoBase); - logProto.__defName = name; - logProto.__latchedVars = this._latchedVars; - logProto.__FAB = this.moduleFab; - - // testing full-logging logger - var testLogProto = this.testLogProto = Object.create(TestLogProtoBase); - testLogProto.__defName = name; - testLogProto.__latchedVars = this._latchedVars; - testLogProto.__FAB = this.moduleFab; - - // testing actor for expectations, etc. - var testActorProto = this.testActorProto = Object.create(TestActorProtoBase); - testActorProto.__defName = name; - - /** Maps helper names to their type for collision reporting by `_define`. */ - this._definedAs = {}; -} -LoggestClassMaker.prototype = { - /** - * Name collision detection helper; to be invoked prior to defining a name - * with the type of name being defined so we can tell you both types that - * are colliding. - */ - _define: function(name, type) { - if (this._definedAs.hasOwnProperty(name)) { - throw new Error("Attempt to define '" + name + "' as a " + type + - " when it is already defined as a " + - this._definedAs[name] + "!"); - } - this._definedAs[name] = type; - }, - - /** - * Wrap a logProto method to be a testLogProto invocation that generates a - * constraint checking thing. - */ - _wrapLogProtoForTest: function(name) { - var logFunc = this.logProto[name]; - this.testLogProto[name] = function() { - var rval = logFunc.apply(this, arguments); - var testActor = this._actor; - if (testActor) - testActor.__loggerFired(); - return rval; - }; - }, - - addStateVar: function(name) { - this._define(name, 'state'); - - this.dummyProto[name] = NOP; - - var stateStashName = ':' + name; - this.logProto[name] = function(val) { - var oldVal = this[stateStashName]; - // only log the transition if it's an actual transition - if (oldVal === val) - return; - this[stateStashName] = val; - this._entries.push([name, val, $microtime.now(), gSeq++]); - }; - - this._wrapLogProtoForTest(name); - - this.testActorProto['expect_' + name] = function(val) { - if (!this._activeForTestStep) - throw new Error("Attempt to set expectations on an actor (" + - this.__defName + ": " + this.__name + ") that is not " + - "participating in this test step!"); - if (this._resolved) - throw new Error("Attempt to add expectations when already resolved!"); - // If we are being told to ignore stuff this round, eat the expectation. - if (!this._ignore || !this._ignore[name]) - this._expectations.push([name, gimmeStack(), val]); - return this; - }; - this.testActorProto['ignore_' + name] = makeIgnoreFunc(name); - this.testActorProto['_verify_' + name] = function(exp, entry) { - return smartCompareEquiv(exp[2], entry[1], COMPARE_DEPTH); - }; - }, - /** - * Dubious mechanism to allow logger objects to be used like a task - * construct that can track success/failure or some other terminal state. - * Contrast with state-vars which are intended to track an internal state - * for analysis but not to serve as a summarization of the application - * object's life. - * This is being brought into being for the unit testing framework so that - * we can just use the logger hierarchy as the actual result hierarchy. - * This may be a horrible idea. - * - * This currently does not generate or support the expectation subsystem - * since the only use right now is the testing subsystem. - */ - addLatchedState: function(name) { - this._define(name, 'latchedState'); - this._latchedVars.push(name); - var latchedName = ':' + name; - - this.testLogProto[name] = this.logProto[name] = this.dummyProto[name] = - function(val) { - this[latchedName] = val; - }; - }, - addEvent: function(name, args, testOnlyLogArgs) { - this._define(name, 'event'); - - var numArgs = 0, useArgs = []; - for (var key in args) { - numArgs++; - useArgs.push(args[key]); - } - - this.dummyProto[name] = function() { - this._eventMap[name] = (this._eventMap[name] || 0) + 1; - }; - - this.logProto[name] = function() { - this._eventMap[name] = (this._eventMap[name] || 0) + 1; - var entry = [name]; - for (var iArg = 0; iArg < numArgs; iArg++) { - if (useArgs[iArg] === EXCEPTION) { - var arg = arguments[iArg]; - entry.push($extransform.transformException(arg)); - } - else { - entry.push(arguments[iArg]); - } - } - entry.push($microtime.now()); - entry.push(gSeq++); - this._entries.push(entry); - }; - - if (!testOnlyLogArgs) { - this._wrapLogProtoForTest(name); - } - else { - var numTestOnlyArgs = 0, useTestArgs = []; - for (key in testOnlyLogArgs) { - numTestOnlyArgs++; - useTestArgs.push(testOnlyLogArgs[key]); - } - this.testLogProto[name] = function() { - this._eventMap[name] = (this._eventMap[name] || 0) + 1; - var entry = [name], iArg; - for (iArg = 0; iArg < numArgs; iArg++) { - if (useArgs[iArg] === EXCEPTION) { - var arg = arguments[iArg]; - entry.push($extransform.transformException(arg)); - } - else { - entry.push(arguments[iArg]); - } - } - entry.push($microtime.now()); - entry.push(gSeq++); - // ++ new bit - for (var iEat=0; iEat < numTestOnlyArgs; iEat++, iArg++) { - entry.push(simplifyInsaneObjects(arguments[iArg], useTestArgs[iEat])); - } - // -- end new bit - this._entries.push(entry); - // ++ firing bit... - var testActor = this._actor; - if (testActor) - testActor.__loggerFired(); - }; - } - - this.testActorProto['expect_' + name] = function() { - if (!this._activeForTestStep) - throw new Error("Attempt to set expectations on an actor (" + - this.__defName + ": " + this.__name + ") that is not " + - "participating in this test step!"); - if (this._resolved) - throw new Error("Attempt to add expectations when already resolved!"); - - var exp = [name, gimmeStack()]; - for (var iArg = 0; iArg < arguments.length; iArg++) { - if (useArgs[iArg] && useArgs[iArg] !== EXCEPTION) { - exp.push(arguments[iArg]); - } - } - // If we are being told to ignore stuff this round, eat the expectation. - if (!this._ignore || !this._ignore[name]) - this._expectations.push(exp); - return this; - }; - this.testActorProto['ignore_' + name] = makeIgnoreFunc(name); - this.testActorProto['_verify_' + name] = function(tupe, entry) { - // only check arguments we had expectations for. - for (var iArg = 2; iArg < tupe.length; iArg++) { - if (!smartCompareEquiv(tupe[iArg], entry[iArg - 1], COMPARE_DEPTH)) - return false; - } - return true; - }; - }, - addAsyncJob: function(name, args, testOnlyLogArgs) { - var name_begin = name + '_begin', name_end = name + '_end'; - this.dummyProto[name_begin] = NOP; - this.dummyProto[name_end] = NOP; - - var numArgs = 0, numTestOnlyArgs = 0, useArgs = [], useTestArgs = []; - for (var key in args) { - numArgs++; - useArgs.push(args[key]); - } - - this.logProto[name_begin] = function() { - this._eventMap[name_begin] = (this._eventMap[name_begin] || 0) + 1; - var entry = [name_begin]; - for (var iArg = 0; iArg < numArgs; iArg++) { - if (useArgs[iArg] === EXCEPTION) { - var arg = arguments[iArg]; - entry.push($extransform.transformException(arg)); - } - else { - entry.push(arguments[iArg]); - } - } - entry.push($microtime.now()); - entry.push(gSeq++); - this._entries.push(entry); - }; - this.logProto[name_end] = function() { - this._eventMap[name_end] = (this._eventMap[name_end] || 0) + 1; - var entry = [name_end]; - for (var iArg = 0; iArg < numArgs; iArg++) { - if (useArgs[iArg] === EXCEPTION) { - var arg = arguments[iArg]; - entry.push($extransform.transformException(arg)); - } - else { - entry.push(arguments[iArg]); - } - } - entry.push($microtime.now()); - entry.push(gSeq++); - this._entries.push(entry); - }; - - if (!testOnlyLogArgs) { - this._wrapLogProtoForTest(name_begin); - this._wrapLogProtoForTest(name_end); - } - else { - for (key in testOnlyLogArgs) { - numTestOnlyArgs++; - useTestArgs.push(testOnlyLogArgs[key]); - } - // cut-paste-modify of the above... - this.testLogProto[name_begin] = function() { - this._eventMap[name_begin] = (this._eventMap[name_begin] || 0) + 1; - var entry = [name_begin]; - for (var iArg = 0; iArg < numArgs; iArg++) { - if (useArgs[iArg] === EXCEPTION) { - var arg = arguments[iArg]; - entry.push($extransform.transformException(arg)); - } - else { - entry.push(arguments[iArg]); - } - } - entry.push($microtime.now()); - entry.push(gSeq++); - // ++ new bit - for (var iEat=0; iEat < numTestOnlyArgs; iEat++, iArg++) { - entry.push(simplifyInsaneObjects(arguments[iArg], useTestArgs[iEat])); - } - // -- end new bit - this._entries.push(entry); - // ++ firing bit... - var testActor = this._actor; - if (testActor) - testActor.__loggerFired(); - }; - this.testLogProto[name_end] = function() { - this._eventMap[name_end] = (this._eventMap[name_end] || 0) + 1; - var entry = [name_end]; - for (var iArg = 0; iArg < numArgs; iArg++) { - if (useArgs[iArg] === EXCEPTION) { - var arg = arguments[iArg]; - entry.push($extransform.transformException(arg)); - } - else { - entry.push(arguments[iArg]); - } - } - entry.push($microtime.now()); - entry.push(gSeq++); - // ++ new bit - for (var iEat=0; iEat < numTestOnlyArgs; iEat++, iArg++) { - entry.push(simplifyInsaneObjects(arguments[iArg], useTestArgs[iEat])); - } - // -- end new bit - this._entries.push(entry); - // ++ firing bit... - var testActor = this._actor; - if (testActor) - testActor.__loggerFired(); - }; - } - - this.testActorProto['expect_' + name_begin] = function() { - if (!this._activeForTestStep) - throw new Error("Attempt to set expectations on an actor (" + - this.__defName + ": " + this.__name + ") that is not " + - "participating in this test step!"); - if (this._resolved) - throw new Error("Attempt to add expectations when already resolved!"); - - var exp = [name_begin, gimmeStack()]; - for (var iArg = 0; iArg < arguments.length; iArg++) { - if (useArgs[iArg] && useArgs[iArg] !== EXCEPTION) - exp.push(arguments[iArg]); - } - // If we are being told to ignore stuff this round, eat the expectation. - if (!this._ignore || !this._ignore[name_begin]) - this._expectations.push(exp); - return this; - }; - this.testActorProto['ignore_' + name_begin] = makeIgnoreFunc(name_begin); - this.testActorProto['expect_' + name_end] = function() { - if (!this._activeForTestStep) - throw new Error("Attempt to set expectations on an actor (" + - this.__defName + ": " + this.__name + ") that is not " + - "participating in this test step!"); - if (this._resolved) - throw new Error("Attempt to add expectations when already resolved!"); - - var exp = [name_end, gimmeStack()]; - for (var iArg = 0; iArg < arguments.length; iArg++) { - if (useArgs[iArg] && useArgs[iArg] !== EXCEPTION) - exp.push(arguments[iArg]); - } - // If we are being told to ignore stuff this round, eat the expectation. - if (!this._ignore || !this._ignore[name_end]) - this._expectations.push(exp); - return this; - }; - this.testActorProto['ignore_' + name_end] = makeIgnoreFunc(name_end); - this.testActorProto['_verify_' + name_begin] = - this.testActorProto['_verify_' + name_end] = function(tupe, entry) { - // only check arguments we had expectations for. - for (var iArg = 2; iArg < tupe.length; iArg++) { - if (!smartCompareEquiv(tupe[iArg], entry[iArg - 1], COMPARE_DEPTH)) - return false; - } - return true; - }; - }, - /** - * Call like: loggedCall(logArg1, ..., logArgN, useAsThis, func, - * callArg1, ... callArgN); - */ - addCall: function(name, logArgs, testOnlyLogArgs) { - this._define(name, 'call'); - - var numLogArgs = 0, numTestOnlyArgs = 0, useArgs = [], useTestArgs = []; - for (var key in logArgs) { - numLogArgs++; - useArgs.push(logArgs[key]); - } - - this.dummyProto[name] = function() { - var rval; - try { - rval = arguments[numLogArgs+1].apply( - arguments[numLogArgs], Array.prototype.slice.call(arguments, - numLogArgs+2)); - } - catch(ex) { - // (call errors are events) - this._eventMap[name] = (this._eventMap[name] || 0) + 1; - rval = ex; - } - return rval; - }; - - this.logProto[name] = function() { - var rval, iArg; - var entry = [name]; - for (iArg = 0; iArg < numLogArgs; iArg++) { - entry.push(arguments[iArg]); - } - entry.push($microtime.now()); - entry.push(gSeq++); - // push this prior to the call for ordering reasons (the call can log - // entries too!) - this._entries.push(entry); - try { - rval = arguments[numLogArgs+1].apply( - arguments[numLogArgs], Array.prototype.slice.call(arguments, iArg+2)); - entry.push($microtime.now()); - entry.push(gSeq++); - entry.push(null); - } - catch(ex) { - entry.push($microtime.now()); - entry.push(gSeq++); - // We can't push the exception directly because its "arguments" payload - // can have rich object references that will cause issues during JSON - // serialization. We most care that it can create circular references, - // but also are not crazy about serializing potentially huge object - // graphs. This might be a great place to perform some logHelper - // style transformations. - entry.push($extransform.transformException(ex)); - // (call errors are events) - this._eventMap[name] = (this._eventMap[name] || 0) + 1; - rval = ex; - } - - return rval; - }; - - if (!testOnlyLogArgs) { - this._wrapLogProtoForTest(name); - } - else { - for (key in testOnlyLogArgs) { - numTestOnlyArgs++; - useTestArgs.push(testOnlyLogArgs[key]); - } - // cut-paste-modify of the above... - this.testLogProto[name] = function() { - var rval, iArg; - var entry = [name]; - for (iArg = 0; iArg < numLogArgs; iArg++) { - entry.push(arguments[iArg]); - } - entry.push($microtime.now()); - entry.push(gSeq++); - // push this prior to the call for ordering reasons (the call can log - // entries too!) - this._entries.push(entry); - try { - rval = arguments[numLogArgs+1].apply( - arguments[numLogArgs], Array.prototype.slice.call(arguments, iArg+2)); - entry.push($microtime.now()); - entry.push(gSeq++); - entry.push(null); - // ++ new bit - iArg += 2; - for (var iEat=0; iEat < numTestOnlyArgs; iEat++, iArg++) { - entry.push(simplifyInsaneObjects(arguments[iArg], useTestArgs[iEat])); - } - // -- end new bit - } - catch(ex) { - entry.push($microtime.now()); - entry.push(gSeq++); - // We can't push the exception directly because its "arguments" payload - // can have rich object references that will cause issues during JSON - // serialization. We most care that it can create circular references, - // but also are not crazy about serializing potentially huge object - // graphs. This might be a great place to perform some logHelper - // style transformations. - entry.push($extransform.transformException(ex)); - // ++ new bit - iArg += 2; - for (var iEat=0; iEat < numTestOnlyArgs; iEat++, iArg++) { - entry.push(simplifyInsaneObjects(arguments[iArg], useTestArgs[iEat])); - } - // -- end new bit - // (call errors are events) - this._eventMap[name] = (this._eventMap[name] || 0) + 1; - rval = ex; - } - - // ++ firing bit... - var testActor = this._actor; - if (testActor) - testActor.__loggerFired(); - return rval; - }; - } - - // XXX we have no way to indicate we expect/desire an assertion - // (we will just explode on any logged exception) - this.testActorProto['expect_' + name] = function() { - if (!this._activeForTestStep) - throw new Error("Attempt to set expectations on an actor (" + - this.__defName + ": " + this.__name + ") that is not " + - "participating in this test step!"); - if (this._resolved) - throw new Error("Attempt to add expectations when already resolved!"); - - var exp = [name, gimmeStack()]; - for (var iArg = 0; iArg < arguments.length; iArg++) { - if (useArgs[iArg]) - exp.push(arguments[iArg]); - } - // If we are being told to ignore stuff this round, eat the expectation. - if (!this._ignore || !this._ignore[name]) - this._expectations.push(exp); - return this; - }; - this.testActorProto['ignore_' + name] = makeIgnoreFunc(name); - this.testActorProto['_verify_' + name] = function(tupe, entry) { - // report failure if an exception was returned! - if (entry.length > numLogArgs + numTestOnlyArgs + 6) { - return false; - } - // only check arguments we had expectations for. - for (var iArg = 2; iArg < tupe.length; iArg++) { - if (!smartCompareEquiv(tupe[iArg], entry[iArg - 1], COMPARE_DEPTH)) - return false; - } - return true; - }; - }, - addError: function(name, args) { - this._define(name, 'error'); - - var numArgs = 0, useArgs = []; - for (var key in args) { - numArgs++; - useArgs.push(args[key]); - } - - this.dummyProto[name] = function() { - this._eventMap[name] = (this._eventMap[name] || 0) + 1; - }; - - this.logProto[name] = function() { - this._eventMap[name] = (this._eventMap[name] || 0) + 1; - var entry = [name]; - for (var iArg = 0; iArg < numArgs; iArg++) { - if (useArgs[iArg] === EXCEPTION) { - var arg = arguments[iArg]; - entry.push($extransform.transformException(arg)); - } - else { - entry.push(arguments[iArg]); - } - } - entry.push($microtime.now()); - entry.push(gSeq++); - this._entries.push(entry); - }; - - this._wrapLogProtoForTest(name); - - this.testActorProto['expect_' + name] = function() { - if (!this._activeForTestStep) - throw new Error("Attempt to set expectations on an actor (" + - this.__defName + ": " + this.__name + ") that is not " + - "participating in this test step!"); - if (this._resolved) - throw new Error("Attempt to add expectations when already resolved!"); - - var exp = [name]; - for (var iArg = 0; iArg < arguments.length; iArg++) { - if (useArgs[iArg] && useArgs[iArg] !== EXCEPTION) - exp.push(arguments[iArg]); - } - // If we are being told to ignore stuff this round, eat the expectation. - if (!this._ignore || !this._ignore[name]) - this._expectations.push(exp); - return this; - }; - this.testActorProto['ignore_' + name] = makeIgnoreFunc(name); - this.testActorProto['_verify_' + name] = function(tupe, entry) { - // only check arguments we had expectations for. - for (var iArg = 2; iArg < tupe.length; iArg++) { - if (!smartCompareEquiv(tupe[iArg], entry[iArg - 1], COMPARE_DEPTH)) - return false; - } - return true; - }; - }, - /** - * Process the description of how to map the semantic ident list. Currently - * we do absolutely nothing with this on the generation side, but the blob - * is used by log processing logic to stitch stuff together in the UI. - * - * We might end up using this on the generation side when under test so - * that we can better link loggers with actors in the face of potential - * ambiguity about who goes with which actor. The counter-argument to that - * idea is that during functional testing we don't want that much activity - * going on. When performance testing, we would want that, but in that - * case we won't be running with actors anyways. - */ - useSemanticIdent: function(args) { - }, - - makeFabs: function() { - var moduleFab = this.moduleFab; - - var dummyCon = function dummyConstructor() { - this._eventMap = {}; - }; - dummyCon.prototype = this.dummyProto; - - var loggerCon = function loggerConstructor(ident) { - this.__updateIdent(ident); - this._uniqueName = gUniqueActorName++; - this._eventMap = {}; - this._entries = []; - this._born = $microtime.now(); - this._died = null; - this._kids = null; - }; - loggerCon.prototype = this.logProto; - - var testerCon = function testerLoggerConstructor(ident) { - loggerCon.call(this, ident); - this._actor = null; - }; - testerCon.prototype = this.testLogProto; - - var testActorCon = function testActorConstructor(name, _parentUniqueName) { - this.__name = name; - this._uniqueName = gUniqueActorName++; - this._parentUniqueName = _parentUniqueName; - // initially undefined, goes null when we register for pairing, goes to - // the logger instance when paired. - this._logger = undefined; - this._ignore = null; - this._expectations = []; - this._expectationsMetSoFar = true; - this._expectNothing = false; - this._expectDeath = false; - this._unorderedSetMode = false; - this._activeForTestStep = false; - this._iEntry = this._iExpectation = 0; - this._lifecycleListener = null; - }; - testActorCon.prototype = this.testActorProto; - this.moduleFab._actorCons[this.name] = testActorCon; - - /** - * Determine what type of logger to create, whether to tell other things - * in the system about it, etc. - */ - var loggerDecisionFab = function loggerDecisionFab(implInstance, - parentLogger, ident) { - var logger, tester; - // - Testing - if ((tester = (moduleFab._underTest || loggerDecisionFab._underTest))) { -//console.error("MODULE IS UNDER TEST FOR: " + testerCon.prototype.__defName); - if (typeof(parentLogger) === "string") - throw new Error("A string can't be a logger => not a valid parent"); - logger = new testerCon(ident); - logger.__instance = implInstance; - parentLogger = tester.reportNewLogger(logger, parentLogger); - } - // - Logging - else if (moduleFab._generalLog || testerCon._generalLog) { -//console.error("general logger for: " + testerCon.prototype.__defName); - logger = new loggerCon(ident); - } - // - Statistics Only - else { -//console.error("statistics only for: " + testerCon.prototype.__defName); - return new dummyCon(); - } - - if (parentLogger) { - if (parentLogger._kids === undefined) { - } - else if (parentLogger._kids === null) { - parentLogger._kids = [logger]; - } - else { - parentLogger._kids.push(logger); - } - } - return logger; - }; - this.moduleFab[this.name] = loggerDecisionFab; - }, -}; - -var LEGAL_FABDEF_KEYS = [ - 'implClass', 'type', 'subtype', 'topBilling', 'semanticIdent', 'dicing', - 'stateVars', 'latchState', 'events', 'asyncJobs', 'calls', 'errors', - 'TEST_ONLY_calls', 'TEST_ONLY_events', 'TEST_ONLY_asyncJobs', - 'LAYER_MAPPING', -]; - -function augmentFab(mod, fab, defs) { - var testActors = fab._testActors, rawDefs = fab._rawDefs; - - for (var defName in defs) { - var key, loggerDef = defs[defName], testOnlyMeta; - rawDefs[defName] = loggerDef; - - for (key in loggerDef) { - if (LEGAL_FABDEF_KEYS.indexOf(key) === -1) { - throw new Error("key '" + key + "' is not a legal log def key"); - } - } - - var maker = new LoggestClassMaker(fab, defName); - - if ("semanticIdent" in loggerDef) { - maker.useSemanticIdent(loggerDef.semanticIdent); - } - if ("stateVars" in loggerDef) { - for (key in loggerDef.stateVars) { - maker.addStateVar(key); - } - } - if ("latchState" in loggerDef) { - for (key in loggerDef.latchState) { - maker.addLatchedState(key); - } - } - if ("events" in loggerDef) { - var testOnlyEventsDef = null; - if ("TEST_ONLY_events" in loggerDef) - testOnlyEventsDef = loggerDef.TEST_ONLY_events; - for (key in loggerDef.events) { - testOnlyMeta = null; - if (testOnlyEventsDef && testOnlyEventsDef.hasOwnProperty(key)) - testOnlyMeta = testOnlyEventsDef[key]; - maker.addEvent(key, loggerDef.events[key], testOnlyMeta); - } - } - if ("asyncJobs" in loggerDef) { - var testOnlyAsyncJobsDef = null; - if ("TEST_ONLY_asyncJobs" in loggerDef) - testOnlyAsyncJobsDef = loggerDef.TEST_ONLY_asyncJobs; - for (key in loggerDef.asyncJobs) { - testOnlyMeta = null; - if (testOnlyAsyncJobsDef && testOnlyAsyncJobsDef.hasOwnProperty(key)) - testOnlyMeta = testOnlyAsyncJobsDef[key]; - maker.addAsyncJob(key, loggerDef.asyncJobs[key], testOnlyMeta); - } - } - if ("calls" in loggerDef) { - var testOnlyCallsDef = null; - if ("TEST_ONLY_calls" in loggerDef) - testOnlyCallsDef = loggerDef.TEST_ONLY_calls; - for (key in loggerDef.calls) { - testOnlyMeta = null; - if (testOnlyCallsDef && testOnlyCallsDef.hasOwnProperty(key)) - testOnlyMeta = testOnlyCallsDef[key]; - maker.addCall(key, loggerDef.calls[key], testOnlyMeta); - } - } - if ("errors" in loggerDef) { - for (key in loggerDef.errors) { - maker.addError(key, loggerDef.errors[key]); - } - } - - maker.makeFabs(); - } - - return fab; -}; -exports.__augmentFab = augmentFab; - -var ALL_KNOWN_FABS = []; - -/** - * Do not turn on event-logging without an explicit call to - * `enableGeneralLogging`. This is done because logging is a memory leak - * without a known consumer. - */ -var GENERAL_LOG_DEFAULT = false; -var UNDER_TEST_DEFAULT = false; - -exports.register = function register(mod, defs) { - var fab = { - _generalLog: GENERAL_LOG_DEFAULT, - _underTest: UNDER_TEST_DEFAULT, - _actorCons: {}, - _rawDefs: {}, - _onDeath: null - }; - ALL_KNOWN_FABS.push(fab); - return augmentFab(mod, fab, defs); -}; - -/** - * Provide schemas for every logger that has been registered. - */ -exports.provideSchemaForAllKnownFabs = function schemaForAllKnownFabs() { - var schema = { $v: 2 }; - for (var i = 0; i < ALL_KNOWN_FABS.length; i++) { - var rawDefs = ALL_KNOWN_FABS[i]._rawDefs; - for (var key in rawDefs) { - schema[key] = rawDefs[key]; - } - } - return schema; -}; - -var BogusTester = { - reportNewLogger: function(logger, parentLogger) { - // No one cares, this is just a way to get the tester constructors - // triggered. - return parentLogger; - }, -}; - -/** - * Turn on logging at an event granularity. - */ -exports.enableGeneralLogging = function() { - GENERAL_LOG_DEFAULT = true; - for (var i = 0; i < ALL_KNOWN_FABS.length; i++) { - var logfab = ALL_KNOWN_FABS[i]; - logfab._generalLog = true; - } -}; - -/** - * Mark all logfabs under test so we get full log data; DO NOT USE THIS UNDER - * NON-DEVELOPMENT PURPOSES BECAUSE USER DATA CAN BE ENTRAINED AND THAT IS VERY - * BAD. - * - * Note: No effort is made to avoid marking any logfabs as under test. This - * would be a problem if used while the testing subsystem is active, but you - * shouldn't do that. - */ -exports.DEBUG_markAllFabsUnderTest = function() { - UNDER_TEST_DEFAULT = BogusTester; - for (var i = 0; i < ALL_KNOWN_FABS.length; i++) { - var logfab = ALL_KNOWN_FABS[i]; - - logfab._underTest = BogusTester; - } -}; - -/** - * For EXTREME debugging similar to DEBUG_markAllFabsUnderTest; log entries are - * both: - * 1) Passed to dumpFunc as they are logged so we get the logs in the "adb - * logcat" output. - * 2) Retained so you better have hooked up circular logging or you better have - * infinite memory. - * - * @param {Function} dumpFunc - * This should resemble the standard mozilla dump() func wherein we must - * provide newlines. (And we definitely will provide them.) - */ -exports.DEBUG_realtimeLogEverything = function(dumpFunc) { - var EverythingTester = { - reportNewLogger: function(logger, parentLogger) { - logger._actor = { - __loggerFired: function() { - var entry = logger._entries[logger._entries.length - 1]; - // Let's look like: LoggerType(semanticIdent)["name", ...] - dumpFunc(logger.__defName + '(' + logger._ident + ')' + - JSON.stringify(entry) + '\n'); - } - }; - return parentLogger; - } - }; - UNDER_TEST_DEFAULT = EverythingTester; - for (var i = 0; i < ALL_KNOWN_FABS.length; i++) { - var logfab = ALL_KNOWN_FABS[i]; - - logfab._underTest = EverythingTester; - } -}; - - -/** - * Evolutionary stopgap debugging helper to be able to put a module/logfab into - * a mode of operation where it dumps all of its loggers' entries to - * console.log when they die. - */ -exports.DEBUG_dumpEntriesOnDeath = function(logfab) { - logfab._generalLog = true; - logfab._onDeath = function(logger) { - console.log("!! DIED:", logger.__defName, logger._ident); - console.log(JSON.stringify(logger._entries, null, 2)); - }; -}; - -exports.DEBUG_dumpAllFabEntriesOnDeath = function() { - for (var i = 0; i < ALL_KNOWN_FABS.length; i++) { - var logfab = ALL_KNOWN_FABS[i]; - exports.DEBUG_dumpEntriesOnDeath(logfab); - } -}; - -// role information -exports.CONNECTION = 'connection'; -exports.SERVER = 'server'; -exports.CLIENT = 'client'; -exports.TASK = 'task'; -exports.DAEMON = 'daemon'; -exports.DATABASE = 'database'; -exports.CRYPTO = 'crypto'; -exports.QUERY = 'query'; -exports.ACCOUNT = 'account'; -exports.LOGGING = 'log'; - -exports.TEST_DRIVER = 'testdriver'; -exports.TEST_GROUP = 'testgroup'; -exports.TEST_CASE = 'testcase'; -exports.TEST_PERMUTATION = 'testperm'; -exports.TEST_STEP = 'teststep'; -exports.TEST_LAZY = 'testlazy'; - -exports.TEST_SYNTHETIC_ACTOR = 'test:synthactor'; - -// argument information -var EXCEPTION = exports.EXCEPTION = 'exception'; -/** - * In short, something that we can JSON.stringify without throwing an exception - * and that is strongly expected to have a reasonable, bounded size. This - * value is *not* snapshotted when it is provided, and so should be immutable - * for this to not turn out confusing. - */ -var JSONABLE = exports.JSONABLE = 'jsonable'; -var TOSTRING = exports.TOSTRING = 'tostring'; -/** - * XXX speculative, we currently are just using JSON.stringify and putting - * toJSON methods on complex objects that there is no benefit from recursively - * traversing. - * - * An object that could be anything, including resulting in deep or cyclic - * data structures. We will serialize type information where available. This - * will necessarily be more expensive to serialize than a `JSONABLE` data - * structure. This type of data *is snapshotted* when logged, allowing it to - * be used on mutable data structures. - * - * A data-biased raw-object will just report the type of instances it encounters - * unless they have a toJSON method, in which case it will invoke that. - */ -var RAWOBJ_DATABIAS = exports.RAWOBJ_DATABIAS = 'jsonable'; //'rawobj:databias'; - -//////////////////////////////////////////////////////////////////////////////// -// State/Delta Representation Support -// -// Specialized schema support to allow, by convention, the log viewer to -// visualize simple containment hierarchies and display annotations on those -// hierarchies. Each entry in the hierarchy requires a unique name. -// -// The reconstruction mechanism works like so: -// - For each logger, we latch any STATEREP we observe as the current state. -// - Statereps are visualized as a simple hierarchy. -// - Annotations (STATEANNO) affect display by colorizing/exposing a string on -// the object indexed by name. For now, we use numbers to convey -// semantic colorization desires: -1 is deletion/red, 0 is notable/yellow, -// 1 is addition/green. -// - Deltas combine an annotation entry relevant to the prior state, the new -// state, and annotations relevant to the new state. For example, -// expressing a deletion and an addition would have us annotate the -// deleted item in the pre-state and the added item in the post-state. - -/** - * Simple state representation. - */ -var STATEREP = exports.STATEREP = 'staterep'; -var STATEANNO = exports.STATEANNO = 'stateanno'; -var STATEDELTA = exports.STATEDELTA = 'statedelta'; - -//////////////////////////////////////////////////////////////////////////////// - -}); // end define diff --git a/apps/email/js/ext/ext/rdcommon/logreaper.js b/apps/email/js/ext/ext/rdcommon/logreaper.js deleted file mode 100644 index 77a75abc8e75..000000000000 --- a/apps/email/js/ext/ext/rdcommon/logreaper.js +++ /dev/null @@ -1,169 +0,0 @@ -/* ***** BEGIN LICENSE BLOCK ***** - * Version: MPL 1.1/GPL 2.0/LGPL 2.1 - * - * The contents of this file are subject to the Mozilla Public License Version - * 1.1 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at: - * http://www.mozilla.org/MPL/ - * - * Software distributed under the License is distributed on an "AS IS" basis, - * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License - * for the specific language governing rights and limitations under the - * License. - * - * The Original Code is Mozilla Raindrop Code. - * - * The Initial Developer of the Original Code is - * The Mozilla Foundation - * Portions created by the Initial Developer are Copyright (C) 2011 - * the Initial Developer. All Rights Reserved. - * - * Contributor(s): - * Andrew Sutherland - * - * Alternatively, the contents of this file may be used under the terms of - * either the GNU General Public License Version 2 or later (the "GPL"), or - * the GNU Lesser General Public License Version 2.1 or later (the "LGPL"), - * in which case the provisions of the GPL or the LGPL are applicable instead - * of those above. If you wish to allow use of your version of this file only - * under the terms of either the GPL or the LGPL, and not to allow others to - * use your version of this file under the terms of the MPL, indicate your - * decision by deleting the provisions above and replace them with the notice - * and other provisions required by the GPL or the LGPL. If you do not delete - * the provisions above, a recipient may use your version of this file under - * the terms of any one of the MPL, the GPL or the LGPL. - * - * ***** END LICENSE BLOCK ***** */ - -/** - * Mechanism for periodic log hierarchy traversal and transmission of the - * serialized data, forgetting about the logging entries after transmitted. We - * additionally may perform interesting-ness analysis and only transmit data - * or send an out-of-band notification if something interesting has happened, - * such as an error being reported. - * - * Log transmission and reconstruction is slightly more complicated than just - * serializing a hierarchy because the lifetime of the loggers is expected to - * be much longer than our log transmission interval. - **/ - -define( - [ - './log', - './microtime', - 'exports' - ], - function( - $log, - $microtime, - exports - ) { - -var EMPTY = []; - -function LogReaper(rootLogger) { - this._rootLogger = rootLogger; - this._lastTimestamp = null; - this._lastSeq = null; -} -exports.LogReaper = LogReaper; -LogReaper.prototype = { - /** - * Process a logger, producing a time slice representation. - * - * Our strategy is roughly to manually traverse the logger hiearchy and: - * - Ignore loggers with no entries/events and no notably active children that - * were already alive at the last reaping and have not died, not mentioning - * them at all in the output fragment. This can also be thought of as: - * - Emit loggers that have been born. - * - Emit loggers that have died. - * - Emit loggers with entries/events. - * - Emit loggers whose children have had notable activity so that the - * hierarchy can be known. - * - Emit loggers that have experienced a semantic ident change. - * - * Potential future optimizations: - */ - reapHierLogTimeSlice: function() { - var rootLogger = this._rootLogger, - startSeq, startTimestamp; - if (this._lastTimestamp === null) { - startSeq = 0; - startTimestamp = rootLogger._born; - } - else { - startSeq = this._lastSeq + 1; - startTimestamp = this._lastTimestamp; - } - var endSeq = $log.getCurrentSeq(), - endTimestamp = this._lastTimestamp = $microtime.now(); - - function traverseLogger(logger) { - var empty = true; - // speculatively start populating an output representation - var outrep = logger.toJSON(); - outrep.events = null; - outrep.kids = null; - - // - check born/death - // actually, being born doesn't generate an event, so ignore. - //if (logger._born >= startTimestamp) - // empty = false; - if (logger._died !== null) - empty = false; - - // - check events - var outEvents = null; - for (var eventKey in logger._eventMap) { - var eventVal = logger._eventMap[eventKey]; - if (eventVal) { - empty = false; - if (outEvents === null) - outrep.events = outEvents = {}; - outEvents[eventKey] = eventVal; - logger._eventMap[eventKey] = 0; - } - } - - // - check and reap entries - if (outrep.entries.length) { - empty = false; - // (we keep/use outrep.entries, and zero the logger's entries) - logger._entries = []; - } - else { - // Avoid subsequent mutation of the list mutating our representation - // and without creating gratuitous garbage by using a shared empty - // list for such cases. - outrep.entries = EMPTY; - } - - // - check and reap children - if (logger._kids && logger._kids.length) { - for (var iKid = 0; iKid < logger._kids.length; iKid++) { - var kidLogger = logger._kids[iKid]; - var kidrep = traverseLogger(kidLogger); - if (kidrep) { - if (!outrep.kids) - outrep.kids = []; - outrep.kids.push(kidrep); - empty = false; - } - // reap (and adjust iteration) - if (kidLogger._died !== null) - logger._kids.splice(iKid--, 1); - } - } - - return (empty ? null : outrep); - } - - return { - begin: startTimestamp, - end: endTimestamp, - logFrag: traverseLogger(rootLogger), - }; - }, -}; - -}); // end define diff --git a/apps/email/js/ext/ext/rdcommon/microtime.js b/apps/email/js/ext/ext/rdcommon/microtime.js deleted file mode 100644 index bfe787eb4a00..000000000000 --- a/apps/email/js/ext/ext/rdcommon/microtime.js +++ /dev/null @@ -1,16 +0,0 @@ -define(function (require) { - // workers won't have this, of course... - if (window && window.performance && window.performance.now) { - return { - now: function () { - return window.performance.now() * 1000; - } - }; - } - - return { - now: function () { - return Date.now() * 1000; - } - }; -}); diff --git a/apps/email/js/ext/ext/rdcommon/testcontext.js b/apps/email/js/ext/ext/rdcommon/testcontext.js deleted file mode 100644 index 12d683b23449..000000000000 --- a/apps/email/js/ext/ext/rdcommon/testcontext.js +++ /dev/null @@ -1,741 +0,0 @@ -/* ***** BEGIN LICENSE BLOCK ***** - * Version: MPL 1.1/GPL 2.0/LGPL 2.1 - * - * The contents of this file are subject to the Mozilla Public License Version - * 1.1 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at: - * http://www.mozilla.org/MPL/ - * - * Software distributed under the License is distributed on an "AS IS" basis, - * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License - * for the specific language governing rights and limitations under the - * License. - * - * The Original Code is Mozilla Raindrop Code. - * - * The Initial Developer of the Original Code is - * The Mozilla Foundation - * Portions created by the Initial Developer are Copyright (C) 2011 - * the Initial Developer. All Rights Reserved. - * - * Contributor(s): - * Andrew Sutherland - * - * Alternatively, the contents of this file may be used under the terms of - * either the GNU General Public License Version 2 or later (the "GPL"), or - * the GNU Lesser General Public License Version 2.1 or later (the "LGPL"), - * in which case the provisions of the GPL or the LGPL are applicable instead - * of those above. If you wish to allow use of your version of this file only - * under the terms of either the GPL or the LGPL, and not to allow others to - * use your version of this file under the terms of the MPL, indicate your - * decision by deleting the provisions above and replace them with the notice - * and other provisions required by the GPL or the LGPL. If you do not delete - * the provisions above, a recipient may use your version of this file under - * the terms of any one of the MPL, the GPL or the LGPL. - * - * ***** END LICENSE BLOCK ***** */ - -/** - * Raindrop-specific testing setup, friends with log.js; right now holds parts - * of the 'loggest' implementation involving only testing (and which should - * end up in their own project initially.) - * - * All classes in this file are definition-support and data structures only; - * they do not directly run the tests themselves, although some data-structures - * are only populated as a byproduct of function execution. Namely, - * TestContexts are populated and fed to `TestCase` functions during the - * execution phase, producing test step definitions as a byproduct. The - * actual run-logic lives in `testdriver.js`. - * - * Note, however, that the classes in this file do hold the loggers associated - * with their runtime execution. - **/ - -define( - [ - './log', - 'exports' - ], - function( - $log, - exports - ) { - -const UNSPECIFIED_STEP_TIMEOUT_MS = exports.UNSPECIFIED_STEP_TIMEOUT_MS = null; -const STEP_TIMEOUT_MS = exports.STEP_TIMEOUT_MS = 1000; - -/** - * Data-record class for test steps; no built-in logic. - */ -function TestStep(_log, kind, descBits, actors, testFunc, isBoring, groupName) { - this.kind = kind; - this.descBits = descBits; - this.actors = actors; - this.testFunc = testFunc; - this.timeoutMS = UNSPECIFIED_STEP_TIMEOUT_MS; - - this.log = LOGFAB.testStep(this, _log, descBits); - this.log.boring(isBoring); - if (groupName) - this.log.group(groupName); -} -TestStep.prototype = { - toString: function() { - return '[TestStep]'; - }, - toJSON: function() { - return {type: 'TestStep'}; - }, -}; - -/** - * TestContexts are used to create actors and define the actions that define - * the steps of the test. Each context corresponds with a specific run of a - * test case. In a test case with only 1 permutation, there will be just one - * `TestContext`, but in a case with N permutations, there will be N - * `TestContext`s. - * - * There is some wastefulness to this approach since all of the steps are - * re-defined and the step functions get new closures, etc. This is done in - * the name of safety (no accidental object re-use) and consistency with the - * Jasmine idiom. - */ -function TestContext(testCase, permutationIndex) { - this.__testCase = testCase; - this._permIdx = permutationIndex; - this._permutations = 1; - this.__steps = []; - this._deferredSteps = null; - - this._log = LOGFAB.testCasePermutation(this, testCase.log, - permutationIndex); - // this is a known-but-null-by-default thing that gets copied to the JSON - // blob when present. - this._log._named = {}; - - /** - * The name of the step group we are currently defining. This is intended - * as a lightweight tagging mechanism for steps so that we can use a wmsy - * interposing viewslice to delineate separate groups without having to - * add another layer of explicit hierarchy. - */ - this._definingGroup = null; - - this._actors = []; -} -TestContext.prototype = { - toString: function() { - return '[TestContext]'; - }, - toJSON: function() { - return {type: 'TestContext'}; - }, - - /** - * Allow the test to explicitly set the latched 'variant' on the - * TestCasePermutation logger for log consumers. This is being introduced - * so GELAM tests can identify if a run was for an IMAP run or an AS run. - */ - setPermutationVariant: function(variant) { - this._log.variant(variant); - }, - - /** - * Mix-in contributions from testhelper actorMixins or thingMixins entries. - */ - _mixinFromHelperDefs: function(target, what, type, invokeConstructor, - constructorArgs) { - var useDict = what + 'Mixins'; - - var helperDefs = this.__testCase.definer.__testHelperDefs; - if (helperDefs) { - for (var iHelpDef = 0; iHelpDef < helperDefs.length; iHelpDef++) { - var helperDef = helperDefs[iHelpDef]; - - if (!(useDict in helperDef) || - !helperDef[useDict].hasOwnProperty(type)) - continue; - var mixyBits = helperDef[useDict][type]; - for (var key in mixyBits) { - target[key] = mixyBits[key]; - } - } - - if (invokeConstructor && '__constructor' in target) - target.__constructor.apply(target, constructorArgs); - } - }, - - /** - * A testing stand-in for a player in the test that does stuff; for example, a - * client or a server. An actor correlates with and is associated with - * exactly one logger. You use the actor to specify expectations about - * what that logger will log for the implementing class that is driving it. - * Actors may also expose convenience functions that directly manipulate the - * underlying implementation class. The convenience functions may - * automatically generate expectations. - * - * Actors are paired with their logger at logger creation time. You define - * the actor to the testing framework using this method AND name it in a test - * step in order to get it pushed on the watch-list prior to causing the - * associated logger to be created. Convenience functions can automate this - * process but still need to abide by it. - * - * An actor itself is not a logger and by default does not contain a secret - * internal logger. However, testhelper implementations tend to create - * synthetic actors that self-create a logger implementation of their own - * defined in the same file. This allows the testhelper to define events - * that can be waited on. - */ - actor: function actor(type, name, opts, optionalParentActor) { - var fabs = this.__testCase.definer.__logfabs; - for (var iFab = 0; iFab < fabs.length; iFab++) { - var actorDir = fabs[iFab]._actorCons; - if (actorDir.hasOwnProperty(type)) { - // - create the actor - var actor = new actorDir[type]( - name, optionalParentActor ? optionalParentActor._uniqueName : null); - // tell it about us, the operational context - actor.T = this; - actor.RT = this.__testCase.definer._runtimeContext; - - // - augment with test helpers - // (from an efficiency perspective, we might be better off creating a - // parameterized prototype descendent during the defineTestsFor call - // since we can establish linkages at that point.) - this._mixinFromHelperDefs(actor, 'actor', type, false); - - // - poke it into our logger for reporting. - this._log._named[actor._uniqueName] = actor; - - // - invoke the constructor helper if it has one - if ("__constructor" in actor) { - this._log.actorConstructor(type, name, - actor, actor.__constructor, - actor, opts); - } - - return actor; - } - } - throw new Error("Unknown actor type '" + type + "'"); - }, - - /** - * Create a actor/logger combo that only has a single event type "event" with - * a single checked argument. Intended to be an alternative to creating your - * own custom logger or complicating the test framework. - */ - lazyLogger: function lazyLogger(name) { - // create the actor - var actor = new LAZYLOGFAB._actorCons.lazyLogger(name); - actor.T = this; - actor.RT = this.__testCase.definer._runtimeContext; - this._log._named[actor._uniqueName] = actor; - // set our global to that when we create the logger, it gets linked up... - // (this happens at the bottom of this file, and the global gets cleared) - gNextLazyLoggerActor = actor; - // figure out the parent logger by getting at the TestRuntimeContext which - // we can find on the definer - var parentLogger = this.__testCase.definer._runtimeContext.peekLogger(); - // create the logger so it immediately bonds with the actor - var logger = LAZYLOGFAB.lazyLogger(null, parentLogger, name); - - // directly copy across/bind the logger's event method for simplicity - // XXX this is brittle if we add other methods - actor.event = logger.event.bind(logger); - actor.eventD = logger.eventD.bind(logger); - actor.value = logger.value.bind(logger); - actor.namedValue = logger.namedValue.bind(logger); - actor.namedValueD = logger.namedValueD.bind(logger); - actor.error = logger.error.bind(logger); - - return actor; - }, - - /** - * An conceptual object in the test, usually represented as relatively inert - * data structures that the actors create/modify/etc. Things do not have - * associated loggers but are sufficiently notable that they will be named by - * (test) loggers and their movement throughout a distributed system can be - * derived. A thing may have multiple names/representations throughout its - * life cycle. Much of the point of the thing abstraction is to allow us to - * tie all those representations together. - * - * Simple thing naming just lets us bind a name to a public key or the like. - * - * Complex thing naming and reconstruction is accomplished by using consistent - * argument names across logging layers that are made known to the - * reconstruction layer. Message layering/containment is accomplished - * by logging an event when the encapsulation/decapsulation occurs that - * contains both identifiers. - * - * Because complex things can be exist and may need to be named prior to the - * true name they will eventually know, they are given unique identifiers - * within their containing namespaces. Simple things are just reusing the - * infrastructure and don't really need the unique name support. - * - * Things, like actors, can have convenience functions placed onto their - * prototype chain. - * - * @args[ - * @param[type String] - * @param[humanName String] - * @param[digitalName #:optional String]{ - * If the thing is a crypto key, the public key which we should map to the - * human name when we see it. - * } - * ] - */ - thing: function thing(type, humanName, digitalName) { - var thang = $log.__makeThing(type, humanName, digitalName); - this._mixinFromHelperDefs(thang, 'thing', type, true, []); - // poke it into our logger for reporting. - this._log._named[thang._uniqueName] = thang; - return thang; - }, - - ownedThing: function ownedThing(actor, type, humanName, digitalName) { - var thang = $log.__makeThing(type, humanName, digitalName); - this._mixinFromHelperDefs(thang, 'thing', type, true, []); - if (!actor._logger._named) - actor._logger._named = {}; - actor._logger._named[thang._uniqueName] = thang; - return thang; - }, - - _newStep: function(kind, args, isBoring) { - var actors = [], descBits = []; - // args[:-1] are actors/description intermixed, args[-1] is the testfunc - var iArg; - for (iArg = 0; iArg < args.length - 1; iArg++) { - var arg = args[iArg]; - // we allow the contents of arrays to be spliced in for the benefit of - // test helper functions that get mixed in. - if (Array.isArray(arg)) { - for (var iNestedArg = 0; iNestedArg < arg.length; iNestedArg++) { - var nestedArg = arg[iNestedArg]; - if ($log.TestActorProtoBase.isPrototypeOf(nestedArg)) - actors.push(nestedArg); - descBits.push(nestedArg); - } - } - else { - if ($log.TestActorProtoBase.isPrototypeOf(arg)) - actors.push(arg); - descBits.push(arg); - } - } - var testFunc = args[iArg]; - var step = new TestStep(this._log, kind, descBits, actors, testFunc, - isBoring, this._definingGroup); - this.__steps.push(step); - return step; - }, - - _newDeferredStep: function(kind, args, isBoring) { - if (!this._deferredSteps) - this._deferredSteps = []; - this._deferredSteps.push([kind, args, isBoring]); - return null; - }, - - __postSetupFunc: function() { - if (this._deferredSteps) { - for (var i = 0; i < this._deferredSteps.length; i++) { - var stepDef = this._deferredSteps[i]; - this._newStep(stepDef[0], stepDef[1], stepDef[2]); - } - this._deferredSteps = null; - } - }, - - group: function group(groupName) { - this._definingGroup = groupName; - }, - - /** - * Defines a test step/action. Each action has a description that is made - * up of strings and actors (defined via `entity`). All actors - * participating in/relevant to the test step must be named. The last - * argument is always the test function to run to initiate the step/action. - * - * The step/action is marked complete when all of the expectations have been - * correctly satisfied. The step fails and the test is aborted if unexpected - * non-boring logging invocations occur for the actors involved in the - * step. - * - * Actors defined in a test-case that are not involved in the step/action - * accumulate their entries which will be considered in the next step they - * are involved in, save for any entries filtered to be boring during that - * step. This is intended to allow actions that have side-effects that - * affect multiple actors to be decomposed into specific pairwise - * interactions for clarity. - */ - action: function action() { - return this._newStep('action', arguments, false); - }, - - /** - * Defines a test step that just checks the state of things and does not - * affect anything. - */ - check: function action() { - return this._newStep('check', arguments, false); - }, - - /** - * Defines a step where two or more alternative actions should be run. - * Implicitly results in the test case as a whole being run a sufficient - * number of times to satisfy all contained permutations. - */ - permutation: function permutation(variesDesc, variants) { - var numVariants = variants.length; - this._permutations *= numVariants; - - // The last numVariants steps should be what is handed to us. If this - // is not the case, we are boned. - var baseStep = this.__steps.length - numVariants; - for (var i = 0; i < numVariants.length; i++) { - if (variants[i] !== this.__steps[baseStep]) - throw new Error("Step sequence invariant violation"); - } - // (use the splice retval rather than the passed in for extra safety) - var saferVariants = this.__steps.splice(baseStep, numVariants); - this.__steps.push(saferVariants); - }, - - /** - * Define a setup test step. While operationally the same as an action, - * setup steps are treated specially for reporting and aggregation purposes. - * Setup steps have less focus in the reporting UI, and a test that fails - * during its setup steps is treated differently than a test that fails - * during an action step. The theory is that you should look at the tests - * that are failing during an action step before tests failing during a setup - * step because the setup failures are likely an outgrowth of the action - * failures of lower level tests. - */ - setup: function() { - return this._newStep('setup', arguments, true); - }, - - /** - * Setup test step defined by a convenience helper and which should - * accordingly be marked as boring. - */ - convenienceSetup: function() { - return this._newStep('setup', arguments, true); - }, - - /** - * Define a cleanup test step to perform any shutdown procedures to cleanup - * after a test that garbage collection would not take care of on its own. - * These steps should usually be automatically generated by testhelper - * logic for entities to match automatically generated setup steps. They - * should also preferably be synchronous/fast. - * - * In the event that any step in a test fails, we still attempt to run all of - * the cleanup steps, even though they may also experience failures. - */ - cleanup: function() { - return this._newStep('cleanup', arguments, true); - }, - - /** - * A cleanup step defined by a convenience helper that is added to the current - * list of steps as it stands right now. Contrast with - * `convenienceDeferredCleanup` which defines a step that - */ - convenienceCleanup: function() { - return this._newStep('cleanup', arguments, true); - }, - - /** - * A cleanup step defined by a convenience helper which is only added to the - * list of steps after the function defining the test case has finished - * executing. - */ - convenienceDeferredCleanup: function() { - return this._newDeferredStep('cleanup', arguments, true); - }, -}; -exports.TestContext = TestContext; - -function TestCase(definer, kind, desc, setupFunc) { - this.definer = definer; - this.kind = kind; - this.desc = desc; - this.setupFunc = setupFunc; - - this.log = LOGFAB.testCase(this, definer._log, desc); - - this.context = null; -} -TestCase.prototype = { - toString: function() { - return '[TestCase]'; - }, - toJSON: function() { - return {type: 'TestCase'}; - }, -}; - -function TestDefiner(modname, logfabs, testHelpers, tags) { - this.__logfabs = logfabs; - this.__testHelperDefs = testHelpers; - this.__tags = tags; - - this._log = LOGFAB.testDefiner(this, null, modname); - this._runtimeContext = null; - - this.__testCases = []; -} -TestDefiner.prototype = { - toString: function() { - return '[TestDefine]'; - }, - toJSON: function() { - return {type: 'TestDefiner'}; - }, - - _newCase: function(kind, desc, setupFunc) { - var testCase = new TestCase(this, kind, desc, setupFunc); - this.__testCases.push(testCase); - }, - - _newSimpleCase: function(kind, desc, testFunc) { - var testCase = new TestCase(this, kind, desc, function(T) { - if (testFunc.length === 0) { - T.action(desc, testFunc); - } - else { - var lazy = T.lazyLogger('lazy'); - T.action(desc, lazy, function() { - testFunc(lazy); - }); - } - }); - this.__testCases.push(testCase); - }, - - /** - * Something that does not happen outside of a unit testing environment but - * serves as a useful functional test. - */ - artificialCase: function artificialCase(desc, setupFunc) { - this._newCase('artificial', desc, setupFunc); - }, - - /** - * Something realistic that is expected to happen a lot. - */ - commonCase: function commonCase(desc, setupFunc) { - this._newCase('common', desc, setupFunc); - }, - - /** - * Something realistic that is expected to happen rarely. - */ - edgeCase: function edgeCase(desc, setupFunc) { - this._newCase('edge', desc, setupFunc); - }, - - /** - * A single-step test case; appropriate for simple unit tests. - */ - commonSimple: function commonSimple(desc, testFunc) { - this._newSimpleCase('common', desc, testFunc); - }, - - DISABLED_artificialCase: function() { - }, - DISABLED_commonCase: function() { - }, - DISABLED_edgeCase: function() { - }, - DISABLED_commonSimple: function() { - }, -}; - -exports.defineTestsFor = function defineTestsFor(testModule, logfabs, - testHelpers, tags) { - if (logfabs == null) - logfabs = []; - else if (!Array.isArray(logfabs)) - logfabs = [logfabs]; - else // need to be able to mutate it - logfabs = logfabs.concat(); - if (testHelpers == null) - testHelpers = []; - else if (!Array.isArray(testHelpers)) - testHelpers = [testHelpers]; - // smoosh any testhelper logfab deps in. - for (var iHelper = 0; iHelper < testHelpers.length; iHelper++) { - var testHelper = testHelpers[iHelper]; - if ("LOGFAB_DEPS" in testHelper) { - // want to eliminate dupes, so we can't just concat - for (var iFab = 0; iFab < testHelper.LOGFAB_DEPS.length; iFab++) { - var logfab = testHelper.LOGFAB_DEPS[iFab]; - if (logfabs.indexOf(logfab) === -1) - logfabs.push(logfab); - } - } - // transitively traverse/(idempotent) merge testhelpers; works because - // we're adding stuff to the outer loop as we go and length is not cached - if ('TESTHELPER_DEPS' in testHelper) { - for (var iSub = 0; iSub < testHelper.TESTHELPER_DEPS.length; iSub++) { - var subHelper = testHelper.TESTHELPER_DEPS[iSub]; - if (testHelpers.indexOf(subHelper) === -1) - testHelpers.push(subHelper); - } - } - } -console.log("defining tests for", testModule.id); - return new TestDefiner(testModule.id, logfabs, testHelpers, tags); -}; - -var LOGFAB = exports.LOGFAB = $log.register(null, { - testDefiner: { - //implClass: TestDefiner, - type: $log.TEST_DRIVER, - subtype: $log.TEST_GROUP, - asyncJobs: { - run: {}, - }, - latchState: { - result: false, - } - }, - testCase: { - //implClass: TestCase, - type: $log.TEST_DRIVER, - subtype: $log.TEST_CASE, - asyncJobs: { - run: {}, - }, - latchState: { - result: false, - /** - * Optional string that identifies the variant of the test. For example, - * "imap" for a test case run against an IMAP server, "activesync" for - * the same test run against an activesync server, etc. - */ - variant: false - }, - }, - testCasePermutation: { - //implClass: TestContext, - type: $log.TEST_DRIVER, - subtype: $log.TEST_PERMUTATION, - asyncJobs: { - run: {}, - }, - calls: { - setupFunc: {}, - actorConstructor: {actorType: false, actorName: false}, - }, - latchState: { - result: false, - /** Same as on testCase. */ - variant: false - } - }, - /** - * Log container for the execution of the test step that is expected to get - * exposed to the UI, but separately from its descendents which end up - * flattened and (probably) visualized like a sequence diagram. - * - * For a healthy test run we expect the 'run' async job to bracket the - * stepFunc invocation and that's it. For an unhealthy run any of the - * errors defined below can show up. - */ - testStep: { - //implClass: TestStep, - type: $log.TEST_DRIVER, - subtype: $log.TEST_STEP, - - asyncJobs: { - run: {}, - }, - calls: { - stepFunc: {}, - }, - latchState: { - boring: false, - result: false, - group: false, - }, - errors: { - /** - * The test step did not complete (because not all actor expectations - * were fulfilled) within the allowed time duration. This will also - * be accompanied by 1) expectation failures being generated on all the - * loggers whose actors still had pending expectations, and 2) unresolved - * promise errors for all outstanding promises here on the step as - * "unresolvedPromise" errors. - */ - timeout: {}, - /** - * An actor that was expected to be active this test step never had a - * logger get created that ended up associated with it. - */ - actorNeverGotLogger: { type: false, name: false }, - /** - * We heard about an uncaught exception via global means: node's - * "uncaughException" event, RequireJS' "require.onError" handler, or - * a Q (promise) rejection that no one listened for. The Q case could - * be an explicit rejection or an exception that the Q internals - * converted into a rejection. We may end up breaking out non-expection - * rejections into their own handler. - */ - uncaughtException: { ex: $log.EXCEPTION }, - /** - * Generated on timeout using Q introspection capabilities (when - * available) - */ - unresolvedPromise: { annotation: false }, - }, - }, -}); -// Test contexts always want logging. -LOGFAB._generalLog = true; -// other people should stay away from this dude -var LAZYLOGFAB = exports.__LAZYLOGFAB = $log.register(null, { - /** - * Very generic logger to simplify test development... - */ - lazyLogger: { - type: $log.TEST_LAZY, - subtype: $log.TEST_LAZY, - events: { - event: { name: true }, - eventD: { name: true, detail: false }, - value: { value: true }, - namedValue: { name: true, value: true }, - // provide detail that should not be part of the expectation - namedValueD: { name: true, value: true, detail: false }, - }, - errors: { - error: {what: $log.EXCEPTION}, - }, - }, -}); - -var gNextLazyLoggerActor = null; -// lazy loggers are always under test! -LAZYLOGFAB.lazyLogger._underTest = { - reportNewLogger: function(logger, currentParent) { - if (gNextLazyLoggerActor) { - gNextLazyLoggerActor._logger = logger; - logger._actor = gNextLazyLoggerActor; - if (!currentParent && gNextLazyLoggerActor.RT._loggerStack.length) { - currentParent = gNextLazyLoggerActor.RT._loggerStack[ - gNextLazyLoggerActor.RT._loggerStack.length - 1]; - } - gNextLazyLoggerActor = null; - } - return currentParent; - } -}; - -}); // end define diff --git a/apps/email/js/ext/ext/rdcommon/testdriver.js b/apps/email/js/ext/ext/rdcommon/testdriver.js deleted file mode 100644 index 63da8d978e3c..000000000000 --- a/apps/email/js/ext/ext/rdcommon/testdriver.js +++ /dev/null @@ -1,779 +0,0 @@ -/* ***** BEGIN LICENSE BLOCK ***** - * Version: MPL 1.1/GPL 2.0/LGPL 2.1 - * - * The contents of this file are subject to the Mozilla Public License Version - * 1.1 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at: - * http://www.mozilla.org/MPL/ - * - * Software distributed under the License is distributed on an "AS IS" basis, - * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License - * for the specific language governing rights and limitations under the - * License. - * - * The Original Code is Mozilla Raindrop Code. - * - * The Initial Developer of the Original Code is - * The Mozilla Foundation - * Portions created by the Initial Developer are Copyright (C) 2011 - * the Initial Developer. All Rights Reserved. - * - * Contributor(s): - * Andrew Sutherland - * - * Alternatively, the contents of this file may be used under the terms of - * either the GNU General Public License Version 2 or later (the "GPL"), or - * the GNU Lesser General Public License Version 2.1 or later (the "LGPL"), - * in which case the provisions of the GPL or the LGPL are applicable instead - * of those above. If you wish to allow use of your version of this file only - * under the terms of either the GPL or the LGPL, and not to allow others to - * use your version of this file under the terms of the MPL, indicate your - * decision by deleting the provisions above and replace them with the notice - * and other provisions required by the GPL or the LGPL. If you do not delete - * the provisions above, a recipient may use your version of this file under - * the terms of any one of the MPL, the GPL or the LGPL. - * - * ***** END LICENSE BLOCK ***** */ - -/** - * - **/ - -define( - [ - './deferred', - './testcontext', - './extransform', - 'require', - 'exports' - ], - function( - Deferred, - $testcontext, - $extransform, - require, - exports - ) { - -/** - * What should be the timeout for test steps where an explicit duration has - * not been specified? This can currently be clobbered by the test runner, - * which is why it's not a constant. - */ -var DEFAULT_STEP_TIMEOUT_MS = $testcontext.STEP_TIMEOUT_MS; - -/** - * The runtime context interacts with the log fab subsystem to indicate that we - * are in a testing mode and to associate actors with loggers. - */ -function TestRuntimeContext(envOptions, fileBlackboard) { - this._loggerStack = []; - this._pendingActorsByLoggerType = {}; - this._captureAllLoggersByType = {}; - this.envOptions = envOptions || {}; - // Scratch space (aka blackboard) for the current test case. - this.caseBlackboard = {}; - // Scratch space for the current test file; intended to be used for test - // resources that might get spun up and left up for efficiency, legacy, or - // other reasons. - this.fileBlackboard = fileBlackboard; - - /** - * Strictly increasing value for use in tests that want a relative time - * ordering for comparison purposes. Intentionally separate from the logging - * subsystem's global sequence identifier because they have different - * purposes and scopes. - */ - this.testDomainSeq = 0; - - this._liveActors = null; -} -TestRuntimeContext.prototype = { - toString: function() { - return '[TestRuntimeContext]'; - }, - toJSON: function() { - return {type: 'TestRuntimeContext'}; - }, - - /** - * Push a logger onto the logger stack; the top of the stack becomes the - * parent logger for loggers that do not have an explicit parent logger at - * creation time. - */ - pushLogger: function(logger) { - this._loggerStack.push(logger); - }, - - /** - * Remove a specific logger from the logger stack. While the caller should - * be confident they are at the top of the stack, it's not required for - * data-structure correctness. (We should possibly be asserting in that - * case...) - */ - popLogger: function(logger) { - var idx = this._loggerStack.lastIndexOf(logger); - if (idx !== -1) - this._loggerStack.splice(idx, 1); - }, - - /** - * Used by actors preparing for a test step to register themselves for - * association with a logger of the matching type. - */ - reportPendingActor: function(actor) { - var type = actor.__defName; - if (!this._pendingActorsByLoggerType.hasOwnProperty(type)) - this._pendingActorsByLoggerType[type] = [actor]; - else - this._pendingActorsByLoggerType[type].push(actor); - }, - - /** - * Hackish mechanism to deal with the case where a bunch of loggers may be - * created all at once and where our code only wants a subset of them, - * indexed by name. We stash the loggers by their name into the dict IFF - * their names are simple (string or number). We should probably also support - * a list so that more complex names could also be inspected... - */ - captureAllLoggersByType: function(type, dict) { - if (dict) - this._captureAllLoggersByType[type] = dict; - else - delete this._captureAllLoggersByType[type]; - }, - - /** - * Logfabs that are told about this context invoke this method when creating a - * new logger so that we can hook up actors and insert containing parents. - * - * @args[ - * @param[logger Logger] - * @param[curParentLogger @oneof[null Logger]]{ - * The explicit parent of this logger, if one was provided to the logfab. - * } - * ] - * @return[@oneof[null Logger]]{ - * The parent to use for this logger. This will replace whatever value - * was passed in via `curParentLogger`, so `curParentLogger` should be - * returned in the intent is not to override the value. - * } - */ - reportNewLogger: function(logger, curParentLogger) { - // - associate with any pending actors - var type = logger.__defName; - if (this._pendingActorsByLoggerType.hasOwnProperty(type) && - this._pendingActorsByLoggerType[type].length) { - var actor = this._pendingActorsByLoggerType[type].shift(); - actor.__attachToLogger(logger); - // There is no need to generate a fake __loggerFired notification because - // the logger is brand new and cannot have any entries at this point. - } - else if (this._captureAllLoggersByType.hasOwnProperty(type) && - typeof(logger._ident) !== 'object') { - this._captureAllLoggersByType[type][logger._ident] = logger; - } - - // - if there is no explicit parent, use the top of the logger stack - if (!curParentLogger && this._loggerStack.length) - return this._loggerStack[this._loggerStack.length - 1]; - return curParentLogger; - }, - - /** - * Allows actor mix-in methods that contain nested sub-actors to report their - * sub-actors as active this step, allowing them to be used for expectations. - */ - reportActiveActorThisStep: function(actor) { - if (!actor) - throw new Error("You are passing in a null actor!"); - if (this._liveActors === null) - throw new Error("We are not in a step!"); - if (actor._activeForTestStep) - return; - this._liveActors.push(actor); - actor.__prepForTestStep(this); - }, - - peekLogger: function() { - if (this._loggerStack.length) - return this._loggerStack[this._loggerStack.length - 1]; - return null; - }, - -}; - -/** - * Consolidates the logic to run tests. - */ -function TestDefinerRunner(testDefiner, superDebug, exposeToTestOptions, - resultsReporter) { - if (!testDefiner) - throw new Error("No test definer provided!"); - this._testDefiner = testDefiner; - // Dictionary passed in from higher up the stack to expose to the tests - this._exposeToTestOptions = exposeToTestOptions; - // Scratchpad that lasts for the duration of all included tests - this._fileBlackboard = {}; - this._resultsReporter = resultsReporter; - // created before each test case is run - this._runtimeContext = null; - this._superDebug = superDebug; - - this._logBadThingsToLogger = null; -} -exports.TestDefinerRunner = TestDefinerRunner; -TestDefinerRunner.prototype = { - toString: function() { - return '[TestDefinerRunner]'; - }, - toJSON: function() { - return {type: 'TestDefinerRunner'}; - }, - - /** - * Asynchronously run a test step, non-rejecting promise-style. - * - * @return[Boolean]{ - * A boolean indicator of whether the step passed. - * } - */ - runTestStep: function(step) { - const superDebug = this._superDebug; - if (superDebug) - superDebug("====== Running Step: " + step.log._ident); - var iActor, actor; - - this._logBadThingsToLogger = step.log; - - var liveActors = this._runtimeContext._liveActors = step.actors.concat(); - - // -- notify the actors about their imminent use in a step - // (intentionally step.actors rather that liveActors) - for (iActor = 0; iActor < step.actors.length; iActor++) { - actor = step.actors[iActor]; - actor.__prepForTestStep(this._runtimeContext); - } - - // -- initiate the test function - step.log.run_begin(); - // (this wraps and handles failures!) - var rval = step.log.stepFunc(null, step.testFunc); - // any kind of exception in the function is a failure. - if (rval instanceof Error) { - if (superDebug) - superDebug(" :( encountered an error in the step func:", rval); - step.log.run_end(); - step.log.result('fail'); - return Promise.resolve(false); - } - - // -- wait on actors' expectations (if any) promise-style - if (superDebug) - superDebug(" there are", liveActors.length, "live actors this step", - "up from", step.actors.length, "step-defined actors"); - var promises = [], allGood = true; - for (iActor = 0; iActor < liveActors.length; iActor++) { - actor = liveActors[iActor]; - var waitVal = actor.__waitForExpectations(); - if (waitVal.then) { - promises.push(waitVal); - if (superDebug) - superDebug(" actor", actor.__defName, actor.__name, - "generated a promise"); - } - // if it's not a promise, it must be a boolean - else if (!waitVal) { - if (superDebug) { - var whySad; - if (actor._expectNothing && - (actor._expectations.length || actor._iExpectation)) - whySad = 'expected nothing, got something'; - else if (!actor._expectationsMetSoFar) - whySad = 'expectations not met after ' + actor._iExpectation; - else - whySad = 'unsure'; - superDebug(" :( waitVal synchronously resolved to false on " + actor + - " because: " + whySad); - } - allGood = false; - } - } - - if (!promises.length) { - step.log.run_end(); - step.log.result(allGood ? 'pass' : 'fail'); - - // clear out all the actors, however! - for (iActor = 0; iActor < liveActors.length; iActor++) { - actor = liveActors[iActor]; - actor.__resetExpectations(); - } - this._runtimeContext._liveActors = null; - - return Promise.resolve(allGood); - } - else { - // create a deferred so we can generate a timeout. - var deferred = new Deferred(), self = this; - - function failStep() { - // - tell the actors to fail any remaining expectations - for (var iActor = 0; iActor < liveActors.length; iActor++) { - actor = liveActors[iActor]; - if (!actor._logger) - step.log.actorNeverGotLogger(actor.__defName, actor.__name); - else - actor.__failUnmetExpectations(); - actor.__resetExpectations(); - } - self._runtimeContext._liveActors = null; - - // - generate errors for outstanding promises... - Deferred.getAllActiveDeferreds().forEach(function(deferred) { - step.log.unresolvedPromise(deferred); - }); - - if (superDebug) - superDebug(' :( timeout, fail'); - step.log.timeout(); - step.log.result('fail'); - deferred.resolve(false); - deferred = null; - } - - // -- timeout handler - var countdownTimer = setTimeout(function() { - if (self._superDebug) - self._superDebug("!! timeout fired, deferred?", deferred !== null); - if (!deferred) return; - failStep(); - }, step.timeoutMS || DEFAULT_STEP_TIMEOUT_MS); - // -- promise resolution/rejection handler - if (this._superDebug) - this._superDebug("waiting on", promises.length, "promises"); - Promise.all(promises).then(function passed() { - if (self._superDebug) - self._superDebug("!! all resolved, deferred?", deferred !== null); - if (!deferred) return; - clearTimeout(countdownTimer); - - // We should have passed, but it's possible that some logger generated - // events after the list of expectations. It was too late for it to - // generate a rejection at that point, so we need to check now. - var passed = true; - // - tell the actors we are done with this round - for (var iActor = 0; iActor < liveActors.length; iActor++) { - actor = liveActors[iActor]; - // detect if we ended up with a weird error. - if (!actor.__resetExpectations()) { - passed = false; - if (superDebug) - superDebug(' :( weird actor error on: ' + actor); - } - } - self._runtimeContext._liveActors = null; - - step.log.run_end(); - step.log.result(passed ? 'pass' : 'fail'); - deferred.resolve(allGood); - deferred = null; - }, function failed(expPair) { - if (self._superDebug) - self._superDebug("!! failed, deferred?", deferred !== null); - if (!deferred) return; - // XXX we should do something with the failed expectation pair... - clearTimeout(countdownTimer); - - failStep(); - }); - return deferred.promise; - } - }, - - /** - * Synchronously skip a test step, generating appropriate logging/reporting - * byproducts so it's clear the step was skipped rather than disappearing - * from the radar. - */ - skipTestStep: function(step) { - step.log.result('skip'); - return Promise.resolve(true); - }, - - /** - * Run a specific permutation of a test-case. The zeroth case of a - * permutation is special as it is also when the number of permutations is - * actually determined. - * XXX we don't actually do anything with permutations right now. - * - * @return[Boolean]{ - * A boolean indicator of whether the test passed. - * } - */ - runTestCasePermutation: function(testCase, permutationNum) { - var self = this; - return new Promise(function(resolve, reject) { - - if (self._superDebug) - self._superDebug("========= Begin Case: " + testCase.desc + "\n"); - - // -- create / setup the context - testCase.log.run_begin(); - var defContext = new $testcontext.TestContext(testCase, 0); - // Expose test variants at the testCase and testCasePermutation levels - // (which have always been and likely will continue to remain equivalent.) - if (this._exposeToTestOptions && this._exposeToTestOptions.variant) { - testCase.log.variant(this._exposeToTestOptions.variant); - defContext.setPermutationVariant(this._exposeToTestOptions.variant); - } - defContext._log.run_begin(); - - // - push the context's logger on the runtime logging stack - // (We want all new logged objects to be associated with the context since - // it should bound their lifetimes. Although it is interesting to know - // what specific step a logger came-to-life, we expect that to occur via - // cross-referencing. If we anchored loggers in their creating step then - // the hierarchy would be extremely confusing.) - self._runtimeContext.pushLogger(defContext._log); - - // - execute the test-case definition function with the context - var rval = defContext._log.setupFunc({}, testCase.setupFunc, defContext, - self._runtimeContext); - if (rval instanceof Error) { - // in the event we threw during the case setup phase, it's a failure. - if (self._superDebug) - self._superDebug(' :( setup func error thrown! ' + rval); - defContext._log.result('fail'); - testCase.log.result('fail'); - reject(false); - } - defContext.__postSetupFunc(); - - // -- process the steps - // In event of a setup/action failure, change to only running cleanup steps. - var allPassed = true, iStep = 0; - function runNextStep(passed) { - if (!passed) - allPassed = false; - // -- done case - if (iStep >= defContext.__steps.length) { - // - pop the test-case logger from the logging context stack - self._runtimeContext.popLogger(defContext._log); - - if (self._superDebug) - self._superDebug("========= Done Case: " + testCase.desc + "\n"); - // - resolve! - defContext._log.result(allPassed ? 'pass' : 'fail'); - defContext._log.run_end(); - testCase.log.result(allPassed ? 'pass' : 'fail'); - testCase.log.run_end(); - resolve(allPassed); - return; - } - - // -- yet another step case - var step = defContext.__steps[iStep++]; - var runIt = allPassed || (step.kind === 'cleanup'); - if (runIt) - self.runTestStep(step).then(runNextStep); - else // for stack simplicity, run the skip in a when, but not required. - self.skipTestStep(step).then(runNextStep); - } - runNextStep(true); - }.bind(this)); - }, - - runTestCase: function(testCase) { - // create a fresh context every time - this._runtimeContext = new TestRuntimeContext(this._exposeToTestOptions, - this._fileBlackboard); - // mark things as under test, and tell them about the new context - this._markDefinerUnderTest(this._testDefiner); - return this.runTestCasePermutation(testCase, 0); - }, - - _markDefinerUnderTest: function(definer) { - definer._runtimeContext = this._runtimeContext; - for (var iFab = 0; iFab < definer.__logfabs.length; iFab++) { - definer.__logfabs[iFab]._underTest = this._runtimeContext; - } - }, - - _clearDefinerUnderTest: function(definer) { - definer._runtimeContext = null; - for (var iFab = 0; iFab < definer.__logfabs.length; iFab++) { - definer.__logfabs[iFab]._underTest = null; - } - }, - - runAll: function(errorTrapper, overrideStepDuration) { - if (overrideStepDuration) - DEFAULT_STEP_TIMEOUT_MS = overrideStepDuration; - -//console.error(" runAll()"); - var deferred = new Deferred("TestDefinerRunner.runAll"), - iTestCase = 0, definer = this._testDefiner, - self = this; - - definer._log.run_begin(); - // -- next case - function runNextTestCase() { -//console.error(" runNextTestCase()"); - // - all done - if (iTestCase >= definer.__testCases.length) { - errorTrapper.removeListener('exit', earlyBailHandler); - errorTrapper.removeListener('uncaughtException', - uncaughtExceptionHandler); - - definer._log.run_end(); - self._clearDefinerUnderTest(definer); - - Deferred.clearActiveDeferreds(); - -//console.error(" resolving!"); - deferred.resolve(self); - return; - } - var testCase = definer.__testCases[iTestCase++]; - self.runTestCase(testCase).then(runNextTestCase); - } - - // node.js will automatically terminate when the event loop says there is - // nothing left to do. We register a listener to detect this and promote - // it to a last-ditch failure case. Note that this is not a recoverable - // state; there will be no more event loop ticks in an auto-termination - // and so we can't depend on promises, etc. Buffers will be flushed, - // however. - function earlyBailHandler() { - console.error("IMMINENT EVENT LOOP TERMINATION IMPLYING BAD TEST, " + - "DUMPING LOG."); - self.reportResults(); - } - errorTrapper.once('exit', earlyBailHandler); - - /** - * Log uncaught exceptions to the currently active test step. - */ - function uncaughtExceptionHandler(ex) { - if (self._logBadThingsToLogger) - self._logBadThingsToLogger.uncaughtException(ex); - } - errorTrapper.on('uncaughtException', uncaughtExceptionHandler); - - // Spit out some logs when a Deferred fails without being - // intercepted by a rejection handler. We used to do some fancy - // stuff with Q here, attempting to gather the relevant frame from - // a stack trace. - Deferred.setUnhandledRejectionHandler(uncaughtExceptionHandler); - - runNextTestCase(); - return deferred.promise; - }, - - /** - * Trigger immediate reporting of the results to the result reporter supplied - * to our constructor. This exists at all because in node.js and xpcshell - * style modes of operation, it's possible for our event loop to terminate - * prematurely in a way that we can't really stop, so we need to get our - * results out to stderr and be done. We don't want to eliminate this - * functionality, but it's more generic now and the stream stuff is not - * required. - */ - reportResults: function() { - var definer = this._testDefiner; - // - accumulate the schemas of all the (potentially) involved schema dudes. - var schema = {}, key, rawDef; - // populate the schema with the test logger schemas - rawDef = $testcontext.LOGFAB._rawDefs; - for (key in rawDef) { - schema[key] = rawDef[key]; - } - rawDef = $testcontext.__LAZYLOGFAB._rawDefs; - for (key in rawDef) { - schema[key] = rawDef[key]; - } - - // and now add in the schemas used by the test - for (var iFab = 0; iFab < definer.__logfabs.length; iFab++) { - rawDef = definer.__logfabs[iFab]._rawDefs; - for (key in rawDef) { - schema[key] = rawDef[key]; - } - } - var dumpObj = { - schema: schema, - log: definer._log, - }; - this._resultsReporter(dumpObj); - } -}; - - -function detectAndReportJsonCycles(obj) { - - var objStack = []; - var traverseStack = []; - function recurse(what) { - if (what == null || typeof(what) !== 'object') - return; - - // - cycle? - if (objStack.indexOf(what) !== -1) { - console.error("CYCLE with traversal", traverseStack); - return; - } - objStack.push(what); - traverseStack.push("."); - var level = traverseStack.length - 1; - - var use; - if ("toJSON" in what) - use = what.toJSON(); - else - use = what; - - for (var key in use) { - // JSON traversal is shallow; nb: we could use ES5 instead of this hack - if (!use.hasOwnProperty(key)) - continue; - var val = use[key]; - traverseStack[level] = key; - recurse(val); - } - - objStack.pop(); - traverseStack.pop(); - } - recurse(obj); -} - -/** - * In the event require()ing a test module fails, we want to report this - * so it's not just like the test disappears from the radar. - */ -function reportTestModuleRequireFailures(testModuleName, moduleName, variant, - exceptions, resultsReporter) { - var dumpObj = { - schema: $testcontext.LOGFAB._rawDefs, - fileFailure: { - fileName: testModuleName, - moduleName: moduleName, - variant: variant, - exceptions: exceptions.map($extransform.transformException), - } - }; - resultsReporter(dumpObj); -} - -/** - * Run the tests defined in a single module that we require (so that we can - * handle errors in the require() process). - * - * @return[success Boolean] - */ -exports.runTestsFromModule = function runTestsFromModule(testModuleName, - runOptions, - ErrorTrapper, - superDebug) { - var deferred = new Deferred("runTestsFromModule:" + testModuleName); - var runner; - function itAllGood() { - if (superDebug) - superDebug('All tests in "' + testModuleName + '" run, ' + - 'generating results.'); - runner.reportResults(); - deferred.resolve(true); - }; - - var resultsReporter = - runOptions.resultsReporter || - makeStreamResultsReporter(ErrorTrapper.reliableOutput); - - var variant = null; - if (runOptions && runOptions.variant) - variant = runOptions.variant; - - // nutshell: - // * r.js previously would still invoke our require callback function in - // the event of a failure because our error handler did not actually - // throw, but just ate the error. So we would generate errors at that - // point. - // * now r.js no longer issues the callback because it performs a return when - // invoking the callback, so we generate the error when the error happens. - // This does mean that if there are multiple errors, we will only see one - // of them before giving up, but many times the subsequent errors were - // just fall-out from modules' evaluating to null. - var alreadyBailed = false; - ErrorTrapper.callbackOnError(function explodey(err, moduleName) { -//console.error("ERROR TRAPPAH"); - if (alreadyBailed) - return; - reportTestModuleRequireFailures(testModuleName, moduleName, variant, - [err], resultsReporter); - deferred.resolve(true); - alreadyBailed = true; -//console.error("ERROR TRAPPAH2"); - }); - require([testModuleName], function(tmod) { -//console.error("IN TEST MODULE INVOC"); - // XXX per the above, this bit is moot now and should be removed unless - // r.js changes behaviour (from our perspective) again. - // If there was a problem, tmod will be null (and we will have trapped - // an error.) - var trappedErrors = ErrorTrapper.gobbleAndStopTrappingErrors(); - if (alreadyBailed) - return; - if (trappedErrors.length) { - reportTestModuleRequireFailures(testModuleName, '', variant, - trappedErrors, resultsReporter); - deferred.resolve(true); - return; - } - if (!tmod.TD) { - var fakeError = new Error("Test module: '" + testModuleName + - "' does not export a 'TD' symbol!"); - reportTestModuleRequireFailures(testModuleName, testModuleName, variant, - [fakeError], resultsReporter); - deferred.resolve(true); - return; - } - - // now that it is loaded, run it - if (runOptions.hasOwnProperty('defaultStepDuration')) - DEFAULT_STEP_TIMEOUT_MS = runOptions.defaultStepDuration; - runner = new TestDefinerRunner( - tmod.TD, superDebug, runOptions.exposeToTest, - resultsReporter); - runner.runAll(ErrorTrapper).then(itAllGood, itAllGood); - }); - return deferred.promise; -}; - -/** - * Make a result reporting function that logs to the provided output function - * (Which should be console.error on node and something dump/print-ish on - * xpcshell.) - */ -function makeStreamResultsReporter(outputFunc) { - return function reportToStream(jsonnableObj) { - // - dump - outputFunc("##### LOGGEST-TEST-RUN-BEGIN #####"); - try { - outputFunc(JSON.stringify(jsonnableObj)); - } - catch (ex) { - console.error("JSON problem:", ex.message, ex.stack, ex); - try { - detectAndReportJsonCycles(jsonnableObj.log); - } - catch(exx) { - console.error("exx y", exx); - } - } - outputFunc("##### LOGGEST-TEST-RUN-END #####"); - }; -} - - -}); // end define diff --git a/apps/email/js/ext/imap/account.js b/apps/email/js/ext/imap/account.js index 6905e43ed590..92e205ab344d 100644 --- a/apps/email/js/ext/imap/account.js +++ b/apps/email/js/ext/imap/account.js @@ -1,7 +1,6 @@ define( [ - 'rdcommon/log', - 'slog', + 'logic', '../a64', '../accountmixins', '../allback', @@ -21,8 +20,7 @@ define( 'exports' ], function( - $log, - slog, + logic, $a64, $acctmixins, $allback, @@ -59,9 +57,13 @@ function cmpFolderPubPath(a, b) { */ function ImapAccount(universe, compositeAccount, accountId, credentials, connInfo, folderInfos, - dbConn, - _parentLog, existingProtoConn) { - this._LOG = LOGFAB.ImapAccount(this, _parentLog, accountId); + dbConn, existingProtoConn) { + + // Using the generic 'Account' here, as current tests don't + // distinguish between events on ImapAccount vs. CompositeAccount. + logic.defineScope(this, 'Account', { accountId: accountId, + accountType: 'imap' }); + CompositeIncomingAccount.apply( this, [$imapfolder.ImapFolderSyncer].concat(Array.slice(arguments))); @@ -100,14 +102,13 @@ function ImapAccount(universe, compositeAccount, accountId, credentials, * } */ this._demandedConns = []; - this._backoffEndpoint = $errbackoff.createEndpoint('imap:' + this.id, this, - this._LOG); + this._backoffEndpoint = $errbackoff.createEndpoint('imap:' + this.id, this); if (existingProtoConn) this._reuseConnection(existingProtoConn); this._jobDriver = new $imapjobs.ImapJobDriver( - this, this._folderInfos.$mutationState, this._LOG); + this, this._folderInfos.$mutationState); /** * Flag to allow us to avoid calling closeBox to close a folder. This avoids @@ -274,14 +275,15 @@ var properties = { var connInfo = this._ownedConns[i]; // It's concerning if the folder already has a connection... if (demandInfo.folderId && connInfo.folderId === demandInfo.folderId) - this._LOG.folderAlreadyHasConn(demandInfo.folderId); + logic(this, 'folderAlreadyHasConn', { folderId: demandInfo.folderId }); if (connInfo.inUseBy) continue; connInfo.inUseBy = demandInfo; this._demandedConns.shift(); - this._LOG.reuseConnection(demandInfo.folderId, demandInfo.label); + logic(this, 'reuseConnection', + { folderId: demandInfo.folderId, label: demandInfo.label }); demandInfo.callback(connInfo.conn); return true; } @@ -326,13 +328,13 @@ var properties = { // this eats all future notifications, so we need to splice... this._ownedConns.splice(i, 1); connInfo.conn.client.close(); - this._LOG.deadConnection('unused', null); + logic(this, 'deadConnection', { reason: 'unused' }); } }, _makeConnectionIfPossible: function() { if (this._ownedConns.length >= this._maxConnsAllowed) { - this._LOG.maximumConnsNoNew(); + logic(this, 'maximumConnsNoNew'); return; } if (this._pendingConn) { @@ -350,7 +352,10 @@ var properties = { this._pendingConn = true; // Dynamically load the probe/imap code to speed up startup. require(['./client'], function ($imapclient) { - this._LOG.createConnection(whyFolderId, whyLabel); + logic(this, 'createConnection', { + folderId: whyFolderId, + label: whyLabel + }); $imapclient.createImapConnection( this._credentials, @@ -386,7 +391,10 @@ var properties = { callback && callback(null); }.bind(this)) .catch(function(err) { - this._LOG.deadConnection('connect-error', whyFolderId); + logic(this, 'deadConnection', { + reason: 'connect-error', + folderId: whyFolderId + }); if (errorutils.shouldReportProblem(err)) { this.universe.__reportAccountProblem( @@ -448,9 +456,11 @@ var properties = { for (var i = 0; i < this._ownedConns.length; i++) { var connInfo = this._ownedConns[i]; if (connInfo.conn === conn) { - this._LOG.deadConnection('closed', - connInfo.inUseBy && - connInfo.inUseBy.folderId); + logic(this, 'deadConnection', { + reason: 'closed', + folderId: connInfo.inUseBy && + connInfo.inUseBy.folderId + }); if (connInfo.inUseBy && connInfo.inUseBy.deathback) connInfo.inUseBy.deathback(conn); connInfo.inUseBy = null; @@ -462,7 +472,7 @@ var properties = { conn.onerror = function(err) { err = $imapclient.normalizeImapError(conn, err); - this._LOG.connectionError(err); + logic(this, 'connectionError', { error: err }); console.error('imap:onerror', JSON.stringify({ error: err, host: this._connInfo.hostname, @@ -477,8 +487,10 @@ var properties = { if (connInfo.conn === conn) { if (resourceProblem) this._backoffEndpoint(connInfo.inUseBy.folderId); - this._LOG.releaseConnection(connInfo.inUseBy.folderId, - connInfo.inUseBy.label); + logic(this, 'releaseConnection', { + folderId: connInfo.inUseBy.folderId, + label: connInfo.inUseBy.label + }); connInfo.inUseBy = null; // We just freed up a connection, it may be appropriate to close it. @@ -486,7 +498,7 @@ var properties = { return; } } - this._LOG.connectionMismatch(); + logic(this, 'connectionMismatch'); }, ////////////////////////////////////////////////////////////////////////////// @@ -644,7 +656,7 @@ var properties = { self._namespaces.provisional = false; - slog.log('imap:list-namespaces', { + logic(self, 'list-namespaces', { namespaces: namespaces }); @@ -661,6 +673,8 @@ var properties = { folderPubsByPath[folderPub.path] = folderPub; } + var syncScope = logic.scope('ImapFolderSync'); + // - walk the boxes function walkBoxes(boxLevel, pathDepth, parentId) { boxLevel.forEach(function(box) { @@ -691,7 +705,7 @@ var properties = { meta.name = box.name; meta.delim = delim; - slog.log('imap:folder-sync:existing', { + logic(syncScope, 'folder-sync:existing', { type: type, name: box.name, path: path, @@ -703,7 +717,7 @@ var properties = { } // - new to us! else { - slog.log('imap:folder-sync:add', { + logic(syncScope, 'folder-sync:add', { type: type, name: box.name, path: path, @@ -731,9 +745,9 @@ var properties = { // Never delete our localdrafts or outbox folder. if ($mailslice.FolderStorage.isTypeLocalOnly(folderPub.type)) continue; - slog.log('imap:delete-dead-folder', { - type: folderPub.type, - id: folderPub.id + logic(syncScope, 'delete-dead-folder', { + folderType: folderPub.type, + folderId: folderPub.id }); // It must have gotten deleted! this._forgetFolder(folderPub.id); @@ -881,15 +895,14 @@ var properties = { } } - this._LOG.__die(); if (!liveConns && callback) callback(); }, checkAccount: function(listener) { - this._LOG.checkAccount_begin(null); + logic(this, 'checkAccount_begin'); this._makeConnection(function(err) { - this._LOG.checkAccount_end(err); + logic(this, 'checkAccount_end', { error: err }); listener(err); }.bind(this), null, 'check'); }, @@ -910,10 +923,4 @@ for (var k in properties) { Object.getOwnPropertyDescriptor(properties, k)); } -// Share the log configuration with composite, since we desire general -// parity between IMAP and POP3 for simplicity when possible. -var LOGFAB = exports.LOGFAB = $log.register($module, { - ImapAccount: incoming.LOGFAB_DEFINITION.CompositeIncomingAccount -}); - }); // end define diff --git a/apps/email/js/ext/imap/client.js b/apps/email/js/ext/imap/client.js index dce67dc3a7d3..a1652d6a321c 100644 --- a/apps/email/js/ext/imap/client.js +++ b/apps/email/js/ext/imap/client.js @@ -8,7 +8,7 @@ define(function(require, exports) { var BrowserBox = require('browserbox'); var ImapClient = require('browserbox-imap'); var imapHandler = require('imap-handler'); - var slog = require('slog'); + var logic = require('logic'); var syncbase = require('../syncbase'); var errorutils = require('../errorutils'); var oauth = require('../oauth'); @@ -25,6 +25,8 @@ define(function(require, exports) { // nothing } + var scope = logic.scope('ImapClient'); + /** * Open a connection to an IMAP server. * @@ -81,7 +83,7 @@ define(function(require, exports) { conn.onauth = function() { clearTimeout(connectTimeout); - slog.info('imap:connected', connInfo); + logic(scope, 'connected', { connInfo: connInfo }); conn.onauth = conn.onerror = noop; resolve(conn); }; @@ -110,7 +112,7 @@ define(function(require, exports) { credsUpdatedCallback); }); } else { - slog.error('imap:connect-error', { + logic(scope, 'connect-error', { error: errorString }); throw errorString; @@ -138,7 +140,7 @@ define(function(require, exports) { .toUpperCase().trim(); if (['NO', 'BAD'].indexOf(cmd) !== -1) { - slog.log('imap:protocol-error', { + logic(scope, 'protocol-error', { humanReadable: response.humanReadable, responseCode: response.code, // Include the command structure @@ -280,7 +282,7 @@ define(function(require, exports) { protocolLevelError || 'unknown'); - slog.error('imap:normalized-error', { + logic(scope, 'normalized-error', { error: err, errorName: err && err.name, errorMessage: err && err.message, diff --git a/apps/email/js/ext/imap/folder.js b/apps/email/js/ext/imap/folder.js index 51aface8fe18..3c999023093e 100644 --- a/apps/email/js/ext/imap/folder.js +++ b/apps/email/js/ext/imap/folder.js @@ -1,24 +1,22 @@ define( [ - 'rdcommon/log', + 'logic', '../a64', '../allback', '../date', '../syncbase', '../util', - '../slog', 'module', 'require', 'exports' ], function( - $log, + logic, $a64, $allback, $date, $sync, $util, - slog, $module, require, exports @@ -115,10 +113,14 @@ var MAX_FETCH_BYTES = (Math.pow(2, 32) - 1); * we should do a SEARCH for new messages. It is that search that will update * our accuracy information and only that. */ -function ImapFolderConn(account, storage, _parentLog) { +function ImapFolderConn(account, storage) { this._account = account; this._storage = storage; - this._LOG = LOGFAB.ImapFolderConn(this, _parentLog, storage.folderId); + + logic.defineScope(this, 'ImapFolderConn', { + accountId: account.id, + folderId: storage.folderId + }); this._conn = null; this.box = null; @@ -399,9 +401,12 @@ ImapFolderConn.prototype = { * ] */ _lazySyncDateRange: function(startTS, endTS, accuracyStamp, - doneCallback, progressCallback) { + doneCallback, progressCallback) { + + var scope = logic.subscope(this, { startTS: startTS, endTS: endTS }); + if (startTS && endTS && SINCE(startTS, endTS)) { - this._LOG.illegalSync(startTS, endTS); + logic(scope, 'illegalSync'); doneCallback('invariant'); return; } @@ -411,7 +416,7 @@ ImapFolderConn.prototype = { var completed = false; console.log('syncDateRange:', startTS, endTS); - this._LOG.syncDateRange_begin(null, null, null, startTS, endTS); + logic(scope, 'syncDateRange_begin'); // IMAP Search @@ -436,7 +441,9 @@ ImapFolderConn.prototype = { if (completed) return; completed = true; - this._LOG.syncDateRange_end(0, 0, 0, startTS, endTS); + logic(scope, 'syncDateRange_end', { + full: 0, flags: 0, deleted: 0 + }); doneCallback('aborted'); }.bind(this), progressCallback, @@ -450,7 +457,7 @@ ImapFolderConn.prototype = { var dbStartTS = (startTS ? startTS - $sync.IMAP_SEARCH_AMBIGUITY_MS : null); var dbEndTS = (endTS ? endTS + $sync.IMAP_SEARCH_AMBIGUITY_MS : null); - slog.log('imap:database-lookup', { + logic(scope, 'database-lookup', { dbStartTS: dbStartTS, dbEndTS: dbEndTS }); @@ -485,7 +492,7 @@ ImapFolderConn.prototype = { if (shouldBisect) { // mark the bisection abort... - self._LOG.syncDateRange_end(null, null, null, startTS, endTS); + logic(scope, 'syncDateRange_end'); var bisectInfo = { oldStartTS: startTS, oldEndTS: endTS, @@ -558,7 +565,7 @@ ImapFolderConn.prototype = { // New if (!localHeader && hasServer) { imapSyncOptions.newUIDs.push(uid); - slog.log('imap:new-uid', { uid: uid }); + logic(scope, 'new-uid', { uid: uid }); } // Updated else if (localHeader && hasServer) { @@ -567,13 +574,13 @@ ImapFolderConn.prototype = { if (localHeader.imapMissingInSyncRange) { localHeader.imapMissingInSyncRange = null; - slog.log('imap:found-missing-uid', { uid: uid }); + logic(scope, 'found-missing-uid', { uid: uid }); storage.updateMessageHeader( localHeader.date, localHeader.id, true, localHeader, /* body hint */ null, latch.defer(), { silent: true }); } - slog.log('imap:updated-uid', { uid: uid }); + logic(scope, 'updated-uid', { uid: uid }); } // Deleted or Ambiguously Deleted else if (localHeader && !hasServer) { @@ -626,18 +633,18 @@ ImapFolderConn.prototype = { // a single date and a range.) if (missingRange.startTS <= date - fuzz && missingRange.endTS >= date + fuzz) { - slog.log('imap:unambiguously-deleted-uid', - { uid: uid, missingRange: missingRange}); + logic(scope, 'unambiguously-deleted-uid', + { uid: uid, missingRange: missingRange }); storage.deleteMessageHeaderAndBodyUsingHeader(localHeader); numDeleted++; } // Or we haven't looked far enough... maybe it will show up // later. We've already marked the updated "missing" range above. else { - slog.log('imap:ambiguously-missing-uid', - { uid: uid, missingRange: missingRange, - rangeToDelete: { startTS: date - fuzz, endTS: date + fuzz }, - syncRange: { startTS: startTS, endTS: endTS }}); + logic(scope, 'ambiguously-missing-uid', + { uid: uid, missingRange: missingRange, + rangeToDelete: { startTS: date - fuzz, endTS: date + fuzz }, + syncRange: { startTS: startTS, endTS: endTS }}); storage.updateMessageHeader( localHeader.date, localHeader.id, true, localHeader, /* body hint */ null, latch.defer(), { silent: true }); @@ -652,8 +659,11 @@ ImapFolderConn.prototype = { var uidSync = new $imapsync.Sync(imapSyncOptions); uidSync.onprogress = progressCallback; uidSync.oncomplete = function(newCount, knownCount) { - self._LOG.syncDateRange_end(newCount, knownCount, numDeleted, - startTS, endTS); + logic(scope, 'syncDateRange_end', { + full: newCount, + flags: knownCount, + deleted: numDeleted + }); // BrowserBox returns an integer modseq, but it's opaque and // we already deal with strings, so cast it here. @@ -840,7 +850,7 @@ ImapFolderConn.prototype = { return; } - $imapchew.updateMessageWithFetch(header, body, req, resp, this._LOG); + $imapchew.updateMessageWithFetch(header, body, req, resp); header.bytesToDownloadForBodyDisplay = $imapchew.calculateBytesToDownloadForImapBodyDisplay(body); @@ -1006,16 +1016,17 @@ ImapFolderConn.prototype = { }, shutdown: function() { - this._LOG.__die(); }, }; -function ImapFolderSyncer(account, folderStorage, _parentLog) { +function ImapFolderSyncer(account, folderStorage) { this._account = account; this.folderStorage = folderStorage; - this._LOG = LOGFAB.ImapFolderSyncer(this, _parentLog, folderStorage.folderId); - + logic.defineScope(this, 'ImapFolderSyncer', { + accountId: account.id, + folderId: folderStorage.folderId + }); this._syncSlice = null; /** @@ -1078,7 +1089,7 @@ function ImapFolderSyncer(account, folderStorage, _parentLog) { */ this._curSyncDoneCallback = null; - this.folderConn = new ImapFolderConn(account, folderStorage, this._LOG); + this.folderConn = new ImapFolderConn(account, folderStorage); } exports.ImapFolderSyncer = ImapFolderSyncer; ImapFolderSyncer.prototype = { @@ -1502,7 +1513,6 @@ console.log("folder message count", folderMessageCount, shutdown: function() { this.folderConn.shutdown(); - this._LOG.__die(); }, }; @@ -1522,37 +1532,4 @@ function GmailMessageStorage() { GmailMessageStorage.prototype = { }; -var LOGFAB = exports.LOGFAB = $log.register($module, { - ImapFolderConn: { - type: $log.CONNECTION, - subtype: $log.CLIENT, - events: { - }, - TEST_ONLY_events: { - }, - errors: { - callbackErr: { ex: $log.EXCEPTION }, - - htmlParseError: { ex: $log.EXCEPTION }, - htmlSnippetError: { ex: $log.EXCEPTION }, - textChewError: { ex: $log.EXCEPTION }, - textSnippetError: { ex: $log.EXCEPTION }, - - // Attempted to sync with an empty or inverted range. - illegalSync: { startTS: false, endTS: false }, - }, - asyncJobs: { - syncDateRange: { - newMessages: true, existingMessages: true, deletedMessages: true, - start: false, end: false, - }, - }, - }, - ImapFolderSyncer: { - type: $log.DATABASE, - events: { - } - }, -}); // end LOGFAB - }); // end define diff --git a/apps/email/js/ext/imap/imapchew.js b/apps/email/js/ext/imap/imapchew.js index df7b3efabbae..3711b7bf5fab 100644 --- a/apps/email/js/ext/imap/imapchew.js +++ b/apps/email/js/ext/imap/imapchew.js @@ -445,7 +445,7 @@ exports.chewHeaderAndBodyStructure = function(msg, folderId, newMsgId) { * // and set its value. * */ -exports.updateMessageWithFetch = function(header, body, req, res, _LOG) { +exports.updateMessageWithFetch = function(header, body, req, res) { var bodyRep = body.bodyReps[req.bodyRepIndex]; // check if the request was unbounded or we got back less bytes then we @@ -464,8 +464,7 @@ exports.updateMessageWithFetch = function(header, body, req, res, _LOG) { bodyRep.amountDownloaded += res.bytesFetched; var data = $mailchew.processMessageContent( - res.text, bodyRep.type, bodyRep.isDownloaded, req.createSnippet, _LOG - ); + res.text, bodyRep.type, bodyRep.isDownloaded, req.createSnippet); if (req.createSnippet) { header.snippet = data.snippet; diff --git a/apps/email/js/ext/imap/jobs.js b/apps/email/js/ext/imap/jobs.js index 382bbf63f5af..933d6eaabbb7 100644 --- a/apps/email/js/ext/imap/jobs.js +++ b/apps/email/js/ext/imap/jobs.js @@ -65,8 +65,7 @@ define( [ - 'rdcommon/log', - 'slog', + 'logic', 'mix', '../jobmixins', '../drafts/jobs', @@ -76,8 +75,7 @@ define( 'exports' ], function( - $log, - slog, + logic, mix, $jobmixins, draftsJobs, @@ -252,12 +250,12 @@ function(name, containOtherFolders, parentFolderInfo, personalNamespace) { * } **/ -function ImapJobDriver(account, state, _parentLog) { +function ImapJobDriver(account, state) { this.account = account; this.resilientServerIds = false; this._heldMutexReleasers = []; - this._LOG = LOGFAB.ImapJobDriver(this, _parentLog, this.account.id); + logic.defineScope(this, 'ImapJobDriver', { accountId: this.account.id }); this._state = state; // (we only need to use one as a proxy for initialization) @@ -317,7 +315,7 @@ ImapJobDriver.prototype = { callback(syncer.folderConn, storage); } catch (ex) { - self._LOG.callbackErr(ex); + logic(self, 'callbackErr', { ex: ex }); } }; @@ -362,12 +360,12 @@ ImapJobDriver.prototype = { * there is no need to release it directly. */ _acquireConnWithoutFolder: function(label, callback, deathback) { - this._LOG.acquireConnWithoutFolder_begin(label); + logic(this, 'acquireConnWithoutFolder_begin', { label: label }); var self = this; this.account.__folderDemandsConnection( null, label, function(conn) { - self._LOG.acquireConnWithoutFolder_end(label); + logic(self, 'acquireConnWithoutFolder_end', { label: label }); self._heldMutexReleasers.push(function() { self.account.__folderDoneWithConnection(conn, false, false); }); @@ -375,7 +373,7 @@ ImapJobDriver.prototype = { callback(conn); } catch (ex) { - self._LOG.callbackErr(ex); + logic(self, 'callbackErr', { ex: ex }); } }, deathback @@ -1019,16 +1017,18 @@ ImapJobDriver.prototype = { personalNamespace); var path = derivedInfo.path; + var scope = logic.subscope(this, { _path: path }); + var gotConn = function(conn) { // - create the box // Paths are private. - slog.log('imap:creatingFolder', { _path: path }); + logic(scope, 'creatingFolder', { _path: path }); conn.createMailbox(path, addBoxCallback); }.bind(this); var addBoxCallback = function(err, alreadyExists) { if (err) { - slog.error('imap:createFolderErr', { _path: path, err: err }); + logic(scope, 'createFolderErr', { err: err }); // TODO: do something clever in terms of making sure the folder didn't // already exist and the server just doesn't like to provide the // ALREADYEXISTS response. @@ -1041,8 +1041,7 @@ ImapJobDriver.prototype = { return; } - slog.log('imap:createdFolder', - { _path: path, alreadyExists: alreadyExists }); + logic(scope, 'createdFolder', { alreadyExists: alreadyExists }); // We originally (under imap.js) would do a LIST against the folder for // the path we thought we just created and then we would use that to @@ -1196,23 +1195,5 @@ HighLevelJobDriver.prototype = { mix(ImapJobDriver.prototype, draftsJobs.draftsMixins); -var LOGFAB = exports.LOGFAB = $log.register($module, { - ImapJobDriver: { - type: $log.DAEMON, - events: { - savedAttachment: { storage: true, mimeType: true, size: true }, - saveFailure: { storage: false, mimeType: false, error: false }, - }, - TEST_ONLY_events: { - saveFailure: { filename: false }, - }, - asyncJobs: { - acquireConnWithoutFolder: { label: false }, - }, - errors: { - callbackErr: { ex: $log.EXCEPTION }, - }, - }, -}); }); // end define diff --git a/apps/email/js/ext/imap/probe.js b/apps/email/js/ext/imap/probe.js index 7af861039aa2..01a847c74ff2 100644 --- a/apps/email/js/ext/imap/probe.js +++ b/apps/email/js/ext/imap/probe.js @@ -6,11 +6,11 @@ define([ 'browserbox', - 'slog', + 'logic', './client', '../syncbase', 'exports' -], function(BrowserBox, slog, imapclient, syncbase, exports) { +], function(BrowserBox, logic, imapclient, syncbase, exports) { /** * Log in to test credentials, passing the established connection @@ -25,9 +25,10 @@ define([ * reject => String (normalized) */ exports.probeAccount = function(credentials, connInfo) { - slog.info('probe:imap:connecting', { - connInfo: connInfo - }); + + var scope = logic.scope('ImapProber'); + + logic(scope, 'connecting', { connInfo: connInfo }); var conn; return imapclient.createImapConnection( @@ -38,17 +39,17 @@ define([ // here, as the caller should have already passed a valid // accessToken during account setup. This might indicate a // problem with our OAUTH handling, so log it just in case. - slog.warn('probe:imap:credentials-updated'); + logic(scope, 'credentials-updated'); } ).then(function(newConn) { conn = newConn; - slog.info('probe:imap:success'); + logic(scope, 'success'); return { conn: conn }; }) .catch(function(err) { // Normalize the error before passing it on. err = imapclient.normalizeImapError(conn, err); - slog.error('probe:imap:error', { error: err }); + logic(scope, 'error', { error: err }); if (conn) { conn.close(); } diff --git a/apps/email/js/ext/jobmixins.js b/apps/email/js/ext/jobmixins.js index 49988e9a4911..367a5328f75c 100644 --- a/apps/email/js/ext/jobmixins.js +++ b/apps/email/js/ext/jobmixins.js @@ -12,6 +12,7 @@ define( './syncbase', './mailslice', './headerCounter', + 'logic', 'exports', 'require' ], @@ -24,6 +25,7 @@ define( $sync, $mailslice, $count, + logic, exports, require ) { @@ -327,7 +329,7 @@ exports.do_download = function(op, callback) { } else { pendingCbs++; saveToDeviceStorage( - self._LOG, blob, storeTo, registerDownload[i], + self, blob, storeTo, registerDownload[i], partInfo.name, partInfo, next); } } @@ -358,18 +360,24 @@ exports.do_download = function(op, callback) { * encounter a collision. */ var saveToDeviceStorage = exports.saveToDeviceStorage = -function(_LOG, blob, storeTo, registerDownload, filename, partInfo, cb, +function(scope, blob, storeTo, registerDownload, filename, partInfo, cb, isRetry) { var self = this; var callback = function(success, error, savedFilename, registered) { if (success) { - _LOG.savedAttachment(storeTo, blob.type, blob.size); + logic(scope, 'savedAttachment', { storeTo: storeTo, + type: blob.type, + size: blob.size }); console.log('saved attachment to', storeTo, savedFilename, 'type:', blob.type, 'registered:', registered); partInfo.file = [storeTo, savedFilename]; cb(); } else { - _LOG.saveFailure(storeTo, blob.type, error, filename); + logic(scope, 'saveFailure', { storeTo: storeTo, + type: blob.type, + size: blob.size, + error: error, + filename: filename }); console.warn('failed to save attachment to', storeTo, filename, 'type:', blob.type); // if we failed to unique the file after appending junk, just give up @@ -384,7 +392,8 @@ function(_LOG, blob, storeTo, registerDownload, filename, partInfo, cb, idxLastPeriod = filename.length; filename = filename.substring(0, idxLastPeriod) + '-' + $date.NOW() + filename.substring(idxLastPeriod); - saveToDeviceStorage(_LOG, blob, storeTo, registerDownload, + + saveToDeviceStorage(scope, blob, storeTo, registerDownload, filename, partInfo, cb, true); } }; @@ -810,7 +819,7 @@ exports._partitionAndAccessFoldersSequentially = function( callOnConnLoss(); } catch (ex) { - self._LOG.callbackErr(ex); + self.log.error('callbackErr', { ex: ex }); } } terminated = true; diff --git a/apps/email/js/ext/logic.js b/apps/email/js/ext/logic.js new file mode 100644 index 000000000000..3a3f0b50f608 --- /dev/null +++ b/apps/email/js/ext/logic.js @@ -0,0 +1,895 @@ +/** + * Logic is a structured logging system with bonus features for tracking + * asynchronous code flow and simple unit testing. + * + * This docstring is a quick tutorial. + * + ******************************************************************************* + * SCOPES + * + * Every log must be associated with a Scope. + * + * A Scope is just a wrapper around a namespace and a set of default arguments. + * When you hear "Scope", think "Logger". You could create a scope like so: + * + * var scope = logic.scope('Animal'); + * + * Then, you'd do this to log events (see below for more on logging): + * + * logic(scope, 'createdAnimal'); // shorthand for logic.event(...) + * + * However, it's inconvenient to pass around scopes, just like it's inconvenient + * to pass around loggers. So Logic allows you to associate a Scope with an + * object, like a class instance, and use that object in place of the Scope: + * + * function Animal(name) { + * logic.defineScope(this, 'Animal'); + * logic(this, 'createdAnimal'); + * } + * + * Scopes have two properties: a namespace and default details. When you log + * an event, it absorbs these things from its associated scope. + * + * More about Scopes later; let's talk Events. + * + ******************************************************************************* + * EVENT BASICS + * + * Logic operates under the principle that "everything is an event", and your + * program's execution flow can be encoded into a big list of events. Rather + * than tracking hierarchical relationships in logged events itself, Logic + * stores relevant information (such as a namespace) on each and every event. + * + * Every event, when serialized, is just a simple self-describing JSON payload. + * + * By distilling program execution into a linear sequence of Events, we can + * later reconstruct additional hierarchy or metadata by analyzing the resulting + * stream, rather than pushing the "burden of understanding" onto the logging + * framework itself. + * + * While you can log events with "logic.event(...)", we also expose "logic(...)" + * as a convenient shorthand. + * + * Events consist of the following: + * + * scope: + * The associated scope, which lends a 'namespace' and default details. + * type: + * The type of the event. You define what it means. + * details: + * A key-value object of additional details. Any default details associated + * with the scope are merged into these details. + * + * So given the following code: + * + * function Animal(name) { + * logic.defineScope('Animal', { name: name }); + * logic(this, 'animalCreated', { why: 'because' }); + * } + * Animal.prototype.say = function(what) { + * logic(this, 'said', { what: what }); + * } + * new Animal('Riker').say('sup'); + * + * Logic would output something like the following: + * + * [ + * { namespace: 'Animal', + * type: 'animalCreated', + * details: { name: 'Riker', why: 'because' } }, + * { namespace: 'Animal', + * type: 'said', + * details: { name: 'Riker', what: 'sup' } } + * ] + * + * Notice how every event receives a copy of the details it has been passed? + * This makes events self-describing. Note that the 'name' detail, passed in + * logic.defineScope, is also copied to each event. + * + * It's often useful to log several things with a set of additional details, and + * for that, we have subscopes: + * + * var subscope = logic.subscope(animal, { color: 'brown' }) + * logic(subscope, 'run') // ==> { details: { color: 'brown', name: 'Riker' }} + * + * There is no explicit concept of hierarchy. Rather, we expect to reconstruct + * anything we need when viewing the logs later (i.e. in logic-inspector). + * + * There is also no concept of log levels. In practice, the logs we want + * bug-reporters to see are console logs, not logic events, and only we can + * understand what the chain of complex events means in context. For instance, + * errors are often expected in unit tests, where it doesn't make sense to + * categorically treat them as bright-red errors. (The distinction between + * log/info/warn events is often unclear, but perhaps a case could be made for + * distinguishing errors.) + * + * In general, our logs should just objectively report what happens, leaving + * logic-inspector to decide what's important. + * + ******************************************************************************* + * ASYNC and AWAIT + * + * Tracking events _within_ an individual scope is nice, but often we need to + * track asynchronous events that get passed around. For that, Logic provides + * 'logic.async' and 'logic.await', two primitives to annotate async behavior. + * + * var promise = logic.async(this, 'does a job', (resolve) => {...}) + * + * logic.await(otherScope, 'waiting for job done', promise) + * .then(...) + * + * Logic will then log events corresponding to the Promise's resolution and + * state (such as which events depend on other events) so that we can later + * reconstruct graphs of the code flow and dependencies. With those two + * primitives, we could construct a graph like the following: + * + * [ Animal ] [ Owner ] + * __________________ + * ________________ | ASYNC | + * | AWAIT dog bowl | | Filling dog bowl | + * | | | | + * | | | | + * |________________| |__________________| + * done <--------------------/ + * + * Unfortunately, it's hard to display all that information such that it doesn't + * get in the way. :mcav attempted to add digraph-like views to logic-inspector, + * but didn't have much success making it usable yet. + * + ******************************************************************************* + * TESTING + * + * To write tests against your logic logs, Logic provides the 'logic.match' + * function. + * + * var promise = logic + * .match('Animal', 'animalCreated', { name: 'Riker' }) + * .match('Animal', 'say') + * .failIfMatched('Animal', 'died'); + * + * In the snippet above, the first logic.match call returns an object that has + * `.then` and `.match`, so that you can treat it like a Promise as well as + * easily chain further expectations. The promise chain will resolve after all + * of those conditions have matched, or a timeout has been reached. + * + * See test_disaster_recovery.js for an example test using these primitives. + */ +define(function(require) { + var evt = require('evt'); + var equal = require('equal'); + + /** + * The `logic` module is callable, as a shorthand for `logic.event()`. + */ + function logic() { + return logic.event.apply(logic, arguments); + } + + evt.mix(logic); + + /** + * Create a new Scope with the given namespace and default details. + * + * @param {string} namespace + * @param {object|null} defaultDetails + */ + logic.scope = function(namespace, defaultDetails) { + return new Scope(namespace, defaultDetails); + }; + + var objectToScope = new WeakMap(); + + function toScope(scope) { + if (!(scope instanceof Scope)) { + scope = objectToScope.get(scope); + if (!scope) { + throw new Error('Invalid scope ' + scope + + ' passed to logic.event(); ' + + 'did you remember to call logic.defineScope()? ' + + new Error().stack); + } + } + return scope; + } + + /** + * Most often, scopes and namespaces map one-to-one with class instances. With + * defineScope(), you can associate a Scope with an object, and then use that + * object in place of the scope. For instance: + * + * function MyClass() { + * logic.defineScope(this, 'MyClass'); + * logic.event(this, 'initialized'); + * } + */ + logic.defineScope = function(obj, namespace, defaultDetails) { + // Default to the object's class name, if available. + if (!namespace && obj && obj.constructor && obj.constructor.name) { + namespace = obj.constructor.name; + } + var scope = new Scope(namespace, defaultDetails); + objectToScope.set(obj, scope); + return scope; + }; + + /** + * Sometimes, you may want to log several events, each with shared + * details. With logic.subscope(), you can create a child scope that + * shares the same namespace, but adds additional default details + * onto each message. For instance: + * + * logic.defineScope(this, 'Account', { accountId: 1 }); + * var scope = logic.subscope(this, { action: 'move' }); + * logic.log(scope, 'start'); + * // event: Account/start { accountId: 1, action: 'move' } + */ + logic.subscope = function(scope, defaultDetails) { + scope = toScope(scope); + return new Scope(scope.namespace, into(shallowClone(scope.defaultDetails), + shallowClone(defaultDetails))); + }; + + /** + * Emit an event. `logic(...)` is shorthand for `logic.event(...)`. + * See the module docs for more about events. + * + * @param {Scope} scope + * The scope (i.e. "namespace") for this event. + * @param {string} type + * A string, typically camelCased, describing the event taking place. + * @param {object} details + * Optional details about this event, such as identifiers or parameters. + * These details will be mixed in with any default details specified + * by the Scope. + */ + logic.event = function(scope, type, details) { + scope = toScope(scope); + + // Give others a chance to intercept this event before we do lots of hard + // JSON object work. + var isDefaultPrevented = false; + var preprocessEvent = { + scope: scope, + namespace: scope.namespace, + type: type, + details: details, + preventDefault: function() { + isDefaultPrevented = true; + } + }; + logic.emit('preprocessEvent', preprocessEvent); + + if (isDefaultPrevented) { + return { id: 0 }; // async/await require a return object regardless. + } + + type = preprocessEvent.type; + details = preprocessEvent.details; + + if (typeof type !== 'string') { + throw new Error('Invalid "type" passed to logic.event(); ' + + 'expected a string, got "' + type + '"'); + } + + if (scope.defaultDetails) { + if(isPlainObject(details)) { + details = into(shallowClone(scope.defaultDetails), + shallowClone(details)); + } else { + details = shallowClone(scope.defaultDetails); + } + } else { + details = shallowClone(details); + } + + var event = new LogicEvent(scope, type, details); + logic.emit('censorEvent', event); + logic.emit('event', event); + + if (logic.realtimeLogEverything) { + dump('logic: ' + event.toString() + '\n'); + } + + return event; + }; + + + // True when being run within a test. + logic.underTest = false; + + /** + * Immediately fail the current test with the given exception. If no test is + * in progress, an error is logged, but no exception is thrown. In other + * words, logic.fail() will NOT throw on you. + * + * @param {object} ex + * Exception object, as with Promise.reject() + */ + logic.fail = function(ex) { + console.error('Not in a test, cannot logic.fail(' + ex + ')'); + }; + + + var nextId = 1; + + /** + * Return a sequential unique identifier, unique for users of this module + * instance. + */ + logic.uniqueId = function() { + return nextId++; + }; + + // Hacky way to pass around a global config: + logic.isCensored = false; + logic.realtimeLogEverything = false; + + var interceptions = {}; + + /** + * Provide a named hook which can be intercepted by tests. + */ + logic.interceptable = function(type, fn) { + if (interceptions[type]) { + return interceptions[type](); + } else { + return fn(); + } + }; + + /** + * Intercept a named logic.interceptable by calling your function instead. + */ + logic.interceptOnce = function(type, replacementFn) { + var prevFn = interceptions[type]; + interceptions[type] = function() { + interceptions[type] = prevFn; + return replacementFn(); + }; + } + + /** + * Return a Promise-like object that is fulfilled when an event + * matching the given details is logged. Chainable. + * + * detailPredicate is optional and can be any of the following: + * + * an object: + * Checks to see if the given object is a SUBSET of the event's details. + * + * a function: + * The event matches if detailPredicate(event.details) returns true. + * + * @param {string} ns + * @param {string} type + * @param {object|function} detailPredicate + */ + logic.match = function(ns, type, detailPredicate) { + return new LogicMatcher( + LogicMatcher.normalizeMatchArgs(ns, type, detailPredicate)); + } + + + function MismatchError(matcher, event) { + this.matcher = matcher; + this.event = event; + } + + MismatchError.prototype = Object.create(Error.prototype, { + constructor: { value: MismatchError }, + toString: { value: function() { + if (this.matcher.not) { + return 'MismatchError: expected ' + this.event + + ' to not occur (failIfMatched ' + this.matcher + ').'; + } else { + return 'MismatchError: expected ' + this.event + + ' to match ' + this.matcher + '.'; + } + }} + }); + + + /** + * This is the object returned from `logic.match`. It acts as a Promise that + * resolves when a matching event has been logged. + */ + function LogicMatcher(opts) { + this.matchedLogs = opts.prevMatcher ? opts.prevMatcher.matchedLogs : []; + this.capturedLogs = []; + this.ns = opts.ns; + this.type = opts.type; + this.detailPredicate = opts.detailPredicate; + this.failOnMismatchedDetails = true; + this.not = opts.not; + this.timeoutMS = 2000; + this.resolved = false; + this.anotherMatcherNeedsMyLogs = false; + + if (opts.prevMatcher) { + // Tell the previous matcher to not remove its event listener until we've + // had a chance to pull out any logs which occured between its resolution + // and our start. + opts.prevMatcher.anotherMatcherNeedsMyLogs = true; + } + + logic.defineScope(this, 'LogicMatcher'); + + var prevPromise = opts.prevPromise || Promise.resolve(); + + if (this.not) { + this.promise = prevPromise.then(() => { + this.capturedLogs.some((event) => { + if ((!this.ns || event.namespace === this.ns) && + event.matches(this.type, this.detailPredicate)) { + throw new MismatchError(this, event); + } + }); + }); + } else if (this.type) { + this.promise = new Promise((resolve, reject) => { + // Once any previous match has been resolved, + // subscribe to a following match. + var subscribeToNextMatch = () => { + var timeoutId = setTimeout(() => { + reject(new Error('LogicMatcherTimeout: ' + this)); + }, this.timeoutMS); + + // Promise chains have "dead spots" in between resolution + // callbacks. For instance: + // [promise1.then] [promise2.then] + // other events could be logged --^ + // + // We could miss those events in the middle by just setting + // up a new listener for each LogicMatcher. Instead, since + // every matcher has a pointer to its prevMatcher, we can + // just grab the missing logs from there. + var resolveThisMatcher = (event) => { + this.resolved = true; + this.capturedLogs = []; // Extra events will go here. + if (!this.anotherMatcherNeedsMyLogs) { + this.removeMatchListener(); + } + }; + + var matchFn = (event) => { + this.capturedLogs.push(event); + if (this.resolved) { + return; + } + + if (this.ns && event.namespace !== this.ns || + event.type !== this.type) { + return false; // did not match + } + if (event.matches(this.type, this.detailPredicate)) { + resolveThisMatcher(event); + this.matchedLogs.push(event); + clearTimeout(timeoutId); + logic(this, 'match', { ns: this.ns, + type: this.type, + event: event }); + resolve(event); + return true; + } else { + if (this.failOnMismatchedDetails) { + resolveThisMatcher(event); + reject(new MismatchError(this, event)); + return true; // matched + } else { + // Ignore mismatched events; maybe we'll match later. + } + } + return false; // not done yet, didn't find a match + }; + + this.removeMatchListener = () => { + logic.removeListener('event', matchFn); + }; + + logic.on('event', matchFn); + + if (opts.prevMatcher) { + var prevLogs = opts.prevMatcher.capturedLogs; + // Run matchFn on prevLogs until one of them matches. + var matchIndex = prevLogs.findIndex(matchFn); + // Then, we get to start by capturing all logs that have occured in + // the intervening time: + if (matchIndex !== -1) { + this.capturedLogs = prevLogs.slice(matchIndex + 1); + } + // Now that we're done with the previous matcher, it doesn't need to + // listen to events any more. + opts.prevMatcher.removeMatchListener(); + } + } + + if (prevPromise) { + prevPromise.then(subscribeToNextMatch, (e) => reject(e) ); + } else { + try { + subscribeToNextMatch(); + } catch(e) { + reject(e); + } + } + }); + } else { + // This is the '.then()' case; we still want to return a + // LogicMatcher so they can chain, but without any further expectations. + this.promise = prevPromise; + } + } + + LogicMatcher.normalizeMatchArgs = function(ns, type, details) { + // 'ns' is optional + if (typeof type === 'object') { + details = type; + type = ns; + ns = null; + } + return { ns: ns, type: type, detailPredicate: details }; + } + + LogicMatcher.prototype = { + + /** + * Same as `logic.match`. + */ + match(ns, type, details) { + var args = LogicMatcher.normalizeMatchArgs(ns, type, details); + args.prevMatcher = this; + args.prevPromise = this.promise; + return new LogicMatcher(args); + }, + + /** + * Look at THE LOGS ALREADY CAPTURED by this LogicMatcher, and fail if any + * of them match this one. + */ + failIfMatched(ns, type, details) { + var args = LogicMatcher.normalizeMatchArgs(ns, type, details); + args.not = true; + args.prevMatcher = this; + args.prevPromise = this.promise; + return new LogicMatcher(args); + }, + + /** + * Like Promise.then(); resolves with an array of matched logs. + */ + then(fn, catchFn) { + return new LogicMatcher({ + prevPromise: this.promise.then(() => { + var ret = fn(this.matchedLogs.slice()); + if (ret instanceof Promise) { + ret = new LogicMatcher({ + prevPromise: ret + }); + } + return ret; + }, catchFn) + }); + }, + + toString() { + return ''; + } + } + + function Scope(namespace, defaultDetails) { + this.namespace = namespace; + + if (defaultDetails && !isPlainObject(defaultDetails)) { + throw new Error('Invalid defaultDetails; expected a plain-old object: ' + + defaultDetails); + } + this.defaultDetails = defaultDetails; + } + + function ObjectSimplifier(opts) { + opts = opts || {}; + this.maxDepth = opts.maxDepth || 10; + this.maxStringLength = opts.maxStringLength || 1000; + this.maxArrayLength = opts.maxArrayLength || 1000; + this.maxObjectLength = opts.maxObjectLength || 10; + } + + ObjectSimplifier.prototype = { + simplify: function(x) { + return this._simplify(x, 0, new WeakSet()); + }, + + _simplify: function(x, depth, cacheSet) { + if (cacheSet.has(x)) { + return '(cycle)'; + } + if (typeof x === 'number') { + return x; + } else if (typeof x === 'string') { + return x.slice(0, this.maxStringLength); + } else if (x && x.BYTES_PER_ELEMENT) { + // TypedArray + return x.slice(0, this.maxArrayLength); + } else if (Array.isArray(x)) { + if (depth < this.maxDepth) { + return x.slice(0, this.maxArrayLength) + .map((element) => this._simplify(element, depth + 1, cacheSet)); + } else { + return '[Array length=' + x.length + ']'; + } + } else if (x && typeof x === 'object') { + cacheSet.add(x); + if (!isPlainObject(x)) { + if (x.toJSON) { + return this._simplify(x.toJSON(), depth, cacheSet); + } else if (x.toString) { + return this._simplify(x.toString(), depth, cacheSet); + } else { + return '(?)'; + } + } else { + if (depth < this.maxDepth) { + var retObj = {}; + var idx = 0; + for (var key in x) { + if (idx > this.maxObjectLength) { + break; + } + retObj[key] = this._simplify(x[key], depth + 1, cacheSet); + idx++; + } + return retObj; + } else if (x.toString) { + return this._simplify(x.toString(), depth, cacheSet); + } else { + return '(object?)'; + } + } + } else if (typeof x === 'function') { + return '(function)'; + } else { + return x; + } + } + } + + function LogicEvent(scope, type, details) { + if (!(scope instanceof Scope)) { + throw new Error('Invalid "scope" passed to LogicEvent(); ' + + 'did you remember to call logic.defineScope()?'); + } + + this.scope = scope; + this.type = type; + this.details = details; + this.time = Date.now(); + this.id = logic.uniqueId(); + this.jsonRepresentation = { + namespace: this.scope.namespace, + type: this.type, + details: new ObjectSimplifier().simplify(this.details), + time: this.time, + id: this.id + }; + } + + LogicEvent.fromJSON = function(data) { + var event = new LogicEvent(new Scope(data.namespace), + data.type, + data.details); + event.time = data.time; + event.id = data.id; + return event; + } + + LogicEvent.prototype = { + get namespace() { + return this.scope.namespace; + }, + + toJSON: function() { + return this.jsonRepresentation; + }, + + toString: function() { + return ''; + }, + + /** + * Return true if this event matches the given predicate, using the same + * rules as `logic.match()`. + * + * @param {string} type + * @param {object|function|null} detailPredicate + */ + matches: function(type, detailPredicate) { + if (this.type !== type) { + return false; + } + + if (typeof detailPredicate === 'function') { + return !!detailPredicate(this.details); + } else if (isPlainObject(detailPredicate)) { + for (var key in detailPredicate) { + var expected = detailPredicate && detailPredicate[key]; + var actual = this.details && this.details[key]; + if (actual === undefined) { + actual = null; // For actual comparison, undefined equates to null. + } + + if (expected === undefined) { + continue; // We don't care about these. + } else if (!this.details || + !equal(expected, actual)) { + return false; + } + } + return true; + } else if (detailPredicate != null) { + return equal(this.details, detailPredicate); + } else { + return true; + } + } + }; + + function isPlainObject(obj) { + if (!obj || typeof obj !== 'object') { + return false; + } + // Object.create(null) has no .toString(). + if (obj.toString && (obj.toString() !== '[object Object]')) { + return false; + } + for (var k in obj) { + if (typeof k === 'function') { + return false; + } + } + return true; + } + + logic.isPlainObject = isPlainObject; + + //---------------------------------------------------------------- + // Promises + + var promiseToStartEventMap = new WeakMap(); + var promiseToResultEventMap = new WeakMap(); + + /** + * For those cases when your logic starts in one place but ends in + * another, logic.async is slightly inconvenient. This function + * tracks an async event much like `logic.async`, except that this + * helper pulls out 'resolve' and 'reject' to allow you to log + * completion elsewhere. + * + * @return An object with 'resolve' and 'reject' properties. + */ + logic.startAsync = function(scope, type, details) { + var resolve, reject; + var promise = logic.async(scope, type, details, (_resolve, _reject) => { + resolve = _resolve; + reject = _reject; + }); + return { + resolve: resolve, + reject: reject + }; + } + + /** + * A tracked version of `new Promise()`, where `fn` here is your promise + * executor function. As with `logic.event()`, details is optional, but type + * is required. Events will be logged to track the promise's resolution. + */ + logic.async = function(scope, type, details, fn) { + if (!fn && typeof details === 'function') { + fn = details; + details = null; + } + + scope = logic.subscope(scope, details); + + var startEvent; + var promise = new Promise((resolve, reject) => { + startEvent = logic(scope, 'begin ' + type, { + asyncStatus: 0, // 'pending', as per Promise's private 'status' property. + asyncName: type + }); + + fn((result) => { + promiseToResultEventMap.set(promise, logic(scope, type, { + asyncStatus: 1, // 'resolved' + sourceEventIds: [startEvent.id], + result: result + })); + resolve(result); + }, (error) => { + promiseToResultEventMap.set(promise, logic(scope, type, { + asyncStatus: 2, // 'rejected' + sourceEventIds: [startEvent.id], + error: error + })); + reject(error); + }); + }); + + promiseToStartEventMap.set(promise, startEvent); + return promise; + }; + + /** + * Wraps a Promise, logging events that say "I'm waiting for this Promise" and + * "I finally got this Promise's result". If the originating promise was + * created with `logic.async`, we can link the two semantically. + */ + logic.await = function(scope, type, details, promise) { + if (!promise && details.then) { + promise = details; + details = null; + } + + scope = logic.subscope(scope, details).subscope(scope); + + var startEvent = promiseToStartEventMap.get(promise); + var awaitEvent = logic.event(scope, 'await ' + type, { + awaitStatus: 0, // 'pending', as per Promise's private 'status' property. + sourceEventIds: startEvent ? [startEvent.id] : null, + awaitName: type + }); + + return promise.then((result) => { + var resultEvent = promiseToResultEventMap.get(promise); + logic(scope, type, { + awaitStatus: 1, // 'resolved' + result: result, + sourceEventIds: (resultEvent + ? [resultEvent.id, awaitEvent.id] + : [awaitEvent.id]) + }); + return result; + }, (error) => { + var resultEvent = promiseToResultEventMap.get(promise); + logic(scope, type, { + awaitStatus: 2, // 'rejected' + error: error, + sourceEventIds: (resultEvent + ? [resultEvent.id, awaitEvent.id] + : [awaitEvent.id]) + }); + throw error; + }); + }; + + function shallowClone(x) { + if (isPlainObject(x)) { + var ret = {}; + for (var key in x) { + ret[key] = x[key]; + } + return ret; + } else { + return x; + } + } + + /** + * Merge `source` into `target`. + */ + function into(target, source) { + if (!target) { + target = {}; + } + for (var key in source) { + target[key] = source[key]; + } + return target; + } + + + return logic; +}); diff --git a/apps/email/js/ext/mailapi.js b/apps/email/js/ext/mailapi.js index c53f065e1b7c..bbbd8c4f9989 100644 --- a/apps/email/js/ext/mailapi.js +++ b/apps/email/js/ext/mailapi.js @@ -661,6 +661,10 @@ var ContactCache = exports.ContactCache = { * generate N callbacks when 1 will do. */ resolvePeep: function(addressPair) { + if (!addressPair) { + console.error("NO ADDRESS PAIR?", new Error().stack); + return; + } var emailAddress = addressPair.address; var entry = this._contactCache[emailAddress], contact, peep; var contactsAPI = navigator.mozContacts; diff --git a/apps/email/js/ext/mailbridge.js b/apps/email/js/ext/mailbridge.js index ff1d51bb9f95..d9f5389afe3b 100644 --- a/apps/email/js/ext/mailbridge.js +++ b/apps/email/js/ext/mailbridge.js @@ -4,7 +4,7 @@ define( [ - 'rdcommon/log', + 'logic', './util', './mailchew-strings', './date', @@ -14,7 +14,7 @@ define( 'exports' ], function( - $log, + logic, $imaputil, $mailchewStrings, $date, @@ -97,7 +97,8 @@ function MailBridge(universe, name) { this.universe = universe; this.universe.registerBridge(this); - this._LOG = LOGFAB.MailBridge(this, universe._LOG, name); + logic.defineScope(this, 'MailBridge', { name: name }); + /** @dictof[@key[handle] @value[BridgedViewSlice]]{ live slices } */ this._slices = {}; /** @dictof[@key[namespace] @value[@listof[BridgedViewSlice]]] */ @@ -138,10 +139,19 @@ MailBridge.prototype = { __receiveMessage: function mb___receiveMessage(msg) { var implCmdName = '_cmd_' + msg.type; if (!(implCmdName in this)) { - this._LOG.badMessageType(msg.type); + logic(this, 'badMessageType', { type: msg.type }); return; } - var rval = this._LOG.cmd(msg.type, this, this[implCmdName], msg); + logic(this, 'cmd', { + type: msg.type, + msg: msg + }); + try { + this[implCmdName](msg); + } catch(ex) { + logic.fail(ex); + return; // note that we did not throw + } }, _cmd_ping: function mb__cmd_ping(msg) { @@ -686,7 +696,7 @@ MailBridge.prototype = { _cmd_refreshHeaders: function mb__cmd_refreshHeaders(msg) { var proxy = this._slices[msg.handle]; if (!proxy) { - this._LOG.badSliceHandle(msg.handle); + logic(this, 'badSliceHandle', { handle: msg.handle }); return; } @@ -697,7 +707,7 @@ MailBridge.prototype = { _cmd_growSlice: function mb__cmd_growSlice(msg) { var proxy = this._slices[msg.handle]; if (!proxy) { - this._LOG.badSliceHandle(msg.handle); + logic(this, 'badSliceHandle', { handle: msg.handle }); return; } @@ -708,7 +718,7 @@ MailBridge.prototype = { _cmd_shrinkSlice: function mb__cmd_shrinkSlice(msg) { var proxy = this._slices[msg.handle]; if (!proxy) { - this._LOG.badSliceHandle(msg.handle); + logic(this, 'badSliceHandle', { handle: msg.handle }); return; } @@ -720,7 +730,7 @@ MailBridge.prototype = { _cmd_killSlice: function mb__cmd_killSlice(msg) { var proxy = this._slices[msg.handle]; if (!proxy) { - this._LOG.badSliceHandle(msg.handle); + logic(this, 'badSliceHandle', { handle: msg.handle }); return; } @@ -1343,26 +1353,4 @@ MailBridge.prototype = { }; -var LOGFAB = exports.LOGFAB = $log.register($module, { - MailBridge: { - type: $log.DAEMON, - events: { - // NB: under unit test, this is not used and bridgeSnoop is used instead. - send: { type: true }, - }, - TEST_ONLY_events: { - send: { msg: false }, - }, - errors: { - badMessageType: { type: true }, - badSliceHandle: { handle: true }, - }, - calls: { - cmd: { command: true }, - }, - TEST_ONLY_calls: { - }, - }, -}); - }); // end define diff --git a/apps/email/js/ext/mailchew.js b/apps/email/js/ext/mailchew.js index 564a65b4d04c..0415fb5ee4a0 100644 --- a/apps/email/js/ext/mailchew.js +++ b/apps/email/js/ext/mailchew.js @@ -15,6 +15,7 @@ define( [ 'exports', + 'logic', './util', './mailchew-strings', './quotechew', @@ -22,6 +23,7 @@ define( ], function( exports, + logic, $util, $mailchewStrings, $quotechew, @@ -30,6 +32,7 @@ define( var DESIRED_SNIPPET_LENGTH = 100; +var scope = logic.scope('MailChew'); /** * Generate the default compose body for a new e-mail @@ -320,7 +323,7 @@ exports.mergeUserTextWithHTML = function mergeReplyTextWithHTML(text, html) { * Generate the snippet and parsed body from the message body's content. */ exports.processMessageContent = function processMessageContent( - content, type, isDownloaded, generateSnippet, _LOG) { + content, type, isDownloaded, generateSnippet) { // Strip any trailing newline. if (content.slice(-1) === '\n') { @@ -334,7 +337,7 @@ exports.processMessageContent = function processMessageContent( parsedContent = $quotechew.quoteProcessTextBody(content); } catch (ex) { - _LOG.textChewError(ex); + logic(scope, 'textChewError', { ex: ex }); // An empty content rep is better than nothing. parsedContent = []; } @@ -346,7 +349,7 @@ exports.processMessageContent = function processMessageContent( ); } catch (ex) { - _LOG.textSnippetError(ex); + logic(scope, 'textSnippetError', { ex: ex }); snippet = ''; } } @@ -357,7 +360,7 @@ exports.processMessageContent = function processMessageContent( snippet = $htmlchew.generateSnippet(content); } catch (ex) { - _LOG.htmlSnippetError(ex); + logic(scope, 'htmlSnippetError', { ex: ex }); snippet = ''; } } @@ -366,7 +369,7 @@ exports.processMessageContent = function processMessageContent( parsedContent = $htmlchew.sanitizeAndNormalizeHtml(content); } catch (ex) { - _LOG.htmlParseError(ex); + logic(scope, 'htmlParseError', { ex: ex }); parsedContent = ''; } } diff --git a/apps/email/js/ext/mailslice.js b/apps/email/js/ext/mailslice.js index b553ed75e153..3b4d990e6190 100755 --- a/apps/email/js/ext/mailslice.js +++ b/apps/email/js/ext/mailslice.js @@ -52,8 +52,7 @@ define(function(require, exports, module) { -var $log = require('rdcommon/log'); -var slog = require('./slog'); +var logic = require('logic'); var $util = require('./util'); var $a64 = require('./a64'); var $allback = require('./allback'); @@ -128,11 +127,12 @@ var SYNC_START_MINIMUM_PROGRESS = 0.02; * a zeroTimeout can fire on the event loop. In order to keep the UI responsive, * We force flushes if we have more than 5 pending slices to send. */ -function MailSlice(bridgeHandle, storage, _parentLog) { +function MailSlice(bridgeHandle, storage) { this._bridgeHandle = bridgeHandle; bridgeHandle.__listener = this; this._storage = storage; - this._LOG = LOGFAB.MailSlice(this, _parentLog, bridgeHandle._handle); + + logic.defineScope(this, 'MailSlice', { bridgeHandle: bridgeHandle._handle }); // The time range of the headers we are looking at right now. this.startTS = null; @@ -352,7 +352,7 @@ MailSlice.prototype = { if (!this._bridgeHandle) return; - this._LOG.headersAppended(headers); + logic(this, 'headersAppended', { headers: headers }); if (insertAt === -1) insertAt = this.headers.length; this.headers.splice.apply(this.headers, [insertAt, 0].concat(headers)); @@ -429,7 +429,7 @@ MailSlice.prototype = { this.endUID = header.id; } - this._LOG.headerAdded(idx, header); + logic(this, 'headerAdded', { index: idx, header: header }); this._bridgeHandle.sendSplice(idx, 0, [header], Boolean(this.waitingOnData), Boolean(this.waitingOnData)); @@ -449,7 +449,7 @@ MailSlice.prototype = { if (idx !== null) { // There is no identity invariant to ensure this is already true. this.headers[idx] = header; - this._LOG.headerModified(idx, header); + logic(this, 'headerModified', { index: idx, header: header }); this._bridgeHandle.sendUpdate([idx, header]); } }, @@ -463,7 +463,7 @@ MailSlice.prototype = { var idx = bsearchMaybeExists(this.headers, header, cmpHeaderYoungToOld); if (idx !== null) { - this._LOG.headerRemoved(idx, header); + logic(this, 'headerRemoved', { index: idx, header: header }); this._bridgeHandle.sendSplice(idx, 1, [], Boolean(this.waitingOnData), Boolean(this.waitingOnData)); @@ -498,7 +498,6 @@ MailSlice.prototype = { this._bridgeHandle = null; this.desiredHeaders = 0; this._storage.dyingSlice(this); - this._LOG.__die(); }, get isDead() { @@ -840,7 +839,7 @@ var FOLDER_DB_VERSION = exports.FOLDER_DB_VERSION = 3; * ]] */ function FolderStorage(account, folderId, persistedFolderInfo, dbConn, - FolderSyncer, _parentLog) { + FolderSyncer) { /** Our owning account. */ this._account = account; this._imapDb = dbConn; @@ -849,7 +848,10 @@ function FolderStorage(account, folderId, persistedFolderInfo, dbConn, this.folderMeta = persistedFolderInfo.$meta; this._folderImpl = persistedFolderInfo.$impl; - this._LOG = LOGFAB.FolderStorage(this, _parentLog, folderId); + logic.defineScope(this, 'FolderStorage', { + accountId: account.id, + folderId: folderId + }); /** * @listof[AccuracyRangeInfo]{ @@ -1001,8 +1003,7 @@ function FolderStorage(account, folderId, persistedFolderInfo, dbConn, this._curSyncSlice = null; this._messagePurgeScheduled = false; - this.folderSyncer = FolderSyncer && new FolderSyncer(account, this, - this._LOG); + this.folderSyncer = FolderSyncer && new FolderSyncer(account, this); } exports.FolderStorage = FolderStorage; @@ -1064,7 +1065,7 @@ FolderStorage.prototype = { headerBlocks: this._dirtyHeaderBlocks, bodyBlocks: this._dirtyBodyBlocks, }; - this._LOG.generatePersistenceInfo(pinfo); + logic(this, 'generatePersistenceInfo', { info: pinfo }); this._dirtyHeaderBlocks = {}; this._dirtyBodyBlocks = {}; this._dirty = false; @@ -1087,20 +1088,24 @@ FolderStorage.prototype = { _invokeNextMutexedCall: function() { var callInfo = this._mutexQueue[0], self = this, done = false; this._mutexedCallInProgress = true; - this._LOG.mutexedCall_begin(callInfo.name); + logic(this, 'mutexedCall_begin', { name: callInfo.name }); try { var mutexedOpDone = function(err) { if (done) { - self._LOG.tooManyCallbacks(callInfo.name); + logic(self, 'tooManyCallbacks', { name: callInfo.name }); return; } - self._LOG.mutexedCall_end(callInfo.name); - slog.log('mailslice:mutex-released', - { folderId: self.folderId, err: err }); + logic(self, 'mutexedCall_end', { name: callInfo.name }); + logic(self, 'mailslice:mutex-released', + { folderId: self.folderId, err: err }); + done = true; if (self._mutexQueue[0] !== callInfo) { - self._LOG.mutexInvariantFail(callInfo.name, self._mutexQueue[0].name); + logic(self, 'mutexInvariantFail', { + callName: callInfo.name, + mutexName: self._mutexQueue[0].name + }); return; } self._mutexQueue.shift(); @@ -1116,7 +1121,7 @@ FolderStorage.prototype = { callInfo.func(mutexedOpDone); } catch (ex) { - this._LOG.mutexedOpErr(ex); + logic(this, 'mutexedOpErr', { ex: ex }); } }, @@ -1232,7 +1237,10 @@ FolderStorage.prototype = { // does not. But just silently return since there's little to be gained // from blowing up the world. if (idx === -1) { - this._LOG.badDeletionRequest('header', null, uid); + logic(this, 'badDeletionRequest', { + header: header, + uid: uid + }); return; } header = block.headers[idx]; @@ -1364,7 +1372,7 @@ FolderStorage.prototype = { var idx = block.ids.indexOf(id); var body = block.bodies[id]; if (idx === -1 || !body) { - this._LOG.bodyBlockMissing(id, idx, !!body); + logic(this, 'bodyBlockMissing', { id: id, index: idx, hasBody: !!body }); return; } block.ids.splice(idx, 1); @@ -1579,8 +1587,10 @@ FolderStorage.prototype = { * server-id. */ _discardCachedBlockUsingDateAndID: function(type, date, id) { + var scope = logic.subscope(this, { type: type, date: date, id: id }); + var blockInfoList, loadedBlockInfoList, blockMap, dirtyMap; - this._LOG.discardFromBlock(type, date, id); + logic(scope, 'discardFromBlock'); if (type === 'header') { blockInfoList = this._headerBlockInfos; loadedBlockInfoList = this._loadedHeaderBlockInfos; @@ -1600,7 +1610,7 @@ FolderStorage.prototype = { // Asking to discard something that does not exist in a block is a // violated assumption. Log an error. if (!info) { - this._LOG.badDiscardRequest(type, date, id); + logic(scope, 'badDiscardRequest'); return; } @@ -1611,7 +1621,7 @@ FolderStorage.prototype = { // Violated assumption if the block is dirty if (dirtyMap.hasOwnProperty(blockId)) { - this._LOG.badDiscardRequest(type, date, id); + logic(scope, 'badDiscardRequest'); return; } @@ -2222,7 +2232,7 @@ FolderStorage.prototype = { toCall(); } catch (ex) { - this._LOG.callbackErr(ex); + logic(this, 'callbackErr', { ex: ex }); } } }, @@ -2261,8 +2271,9 @@ FolderStorage.prototype = { var self = this; function onLoaded(block) { if (!block) - self._LOG.badBlockLoad(type, blockId); - self._LOG.loadBlock_end(type, blockId, block); + logic(self, 'badBlockLoad', { type: type, blockId: blockId }); + logic(self, 'loadBlock_end', + { type: type, blockId: blockId, block: block }); if (type === 'header') { self._headerBlocks[blockId] = block; self._loadedHeaderBlockInfos.push(blockInfo); @@ -2279,7 +2290,7 @@ FolderStorage.prototype = { listeners[i](block); } catch (ex) { - self._LOG.callbackErr(ex); + logic(self, 'callbackErr', { ex: ex }); } } @@ -2300,7 +2311,7 @@ FolderStorage.prototype = { } } - this._LOG.loadBlock_begin(type, blockId); + logic(this, 'loadBlock_begin', { type: type, blockId: blockId }); if (type === 'header') this._imapDb.loadHeaderBlock(this.folderId, blockId, onLoaded); else @@ -2309,7 +2320,8 @@ FolderStorage.prototype = { _deleteFromBlock: function ifs__deleteFromBlock(type, date, id, callback) { var blockInfoList, loadedBlockInfoList, blockMap, deleteFromBlock; - this._LOG.deleteFromBlock(type, date, id); + var scope = logic.subscope(this, { type: type, date: date, id: id }); + logic(scope, 'deleteFromBlock'); if (type === 'header') { blockInfoList = this._headerBlockInfos; loadedBlockInfoList = this._loadedHeaderBlockInfos; @@ -2329,7 +2341,7 @@ FolderStorage.prototype = { // If someone is asking for us to delete something, there should definitely // be a block that includes it! if (!info) { - this._LOG.badDeletionRequest(type, date, id); + log('badDeletionRequest'); return; } @@ -3453,10 +3465,12 @@ FolderStorage.prototype = { var iHeadBlockInfo = headerPair[0]; var headBlockInfo = headerPair[1]; + var scope = logic.subscope(this, { date: date, id: id }); + if (!headBlockInfo) { // The iteration request is somehow not current; log an error and return // an empty result set. - this._LOG.badIterationStart(date, id); + logic(scope, 'badIterationStart'); messageCallback([], false); return; } @@ -3475,7 +3489,7 @@ FolderStorage.prototype = { if (iHeader === null) { iHeader = headerBlock.ids.indexOf(id); if (iHeader === -1) { - self._LOG.badIterationStart(date, id); + logic(scope, 'badIterationStart'); toFill = 0; } iHeader--; @@ -3650,7 +3664,7 @@ FolderStorage.prototype = { * believe it to be safely empty. */ markSyncedToDawnOfTime: function() { - this._LOG.syncedToDawnOfTime(); + logic(this, 'syncedToDawnOfTime'); // We can just expand the first accuracy range structure to stretch to the // dawn of time and nuke the rest. @@ -3684,7 +3698,7 @@ FolderStorage.prototype = { // Otherwise, pop the range to get rid of the info. This is a defensive // programming thing; we do not expect this case to happen, so we log. else { - this._LOG.accuracyRangeSuspect(lastRange); + logic(this, 'accuracyRangeSuspect', { lastRange: lastRange }); aranges.pop(); } }, @@ -3830,12 +3844,12 @@ FolderStorage.prototype = { date, id); if (posInfo[1] === null) { - this._LOG.headerNotFound(); + logic(this, 'headerNotFound'); try { callback(null); } catch (ex) { - this._LOG.callbackErr(ex); + logic(this, 'callbackErr', { ex: ex }); } return; } @@ -3845,12 +3859,12 @@ FolderStorage.prototype = { var idx = headerBlock.ids.indexOf(id); var headerInfo = headerBlock.headers[idx] || null; if (!headerInfo) - self._LOG.headerNotFound(); + logic(self, 'headerNotFound'); try { callback(headerInfo); } catch (ex) { - self._LOG.callbackErr(ex); + logic(self, 'callbackErr', { ex: ex }); } }); return; @@ -3859,12 +3873,12 @@ FolderStorage.prototype = { idx = block.ids.indexOf(id), headerInfo = block.headers[idx] || null; if (!headerInfo) - this._LOG.headerNotFound(); + logic(this, 'headerNotFound'); try { callback(headerInfo); } catch (ex) { - this._LOG.callbackErr(ex); + logic(this, 'callbackErr', { ex: ex }); } }, @@ -3914,7 +3928,8 @@ FolderStorage.prototype = { this.folderMeta.unreadCount++; } - this._LOG.addMessageHeader(header.date, header.id, header.srvid); + logic(this, 'addMessageHeader', + { date: header.date, id: header.id, srvid: header.srvid }); this.headerCount += 1; @@ -3986,7 +4001,7 @@ FolderStorage.prototype = { slice._onAddingHeader(header); } catch (ex) { - this._LOG.callbackErr(ex); + logic(this, 'callbackErr', { ex: ex }); } } @@ -3994,7 +4009,7 @@ FolderStorage.prototype = { slice.onHeaderAdded(header, body, false, true); } catch (ex) { - this._LOG.callbackErr(ex); + logic(this, 'callbackErr', { ex: ex }); } } } @@ -4072,7 +4087,8 @@ FolderStorage.prototype = { self._dirty = true; self._dirtyHeaderBlocks[info.blockId] = block; - self._LOG.updateMessageHeader(header.date, header.id, header.srvid); + logic(self, 'updateMessageHeader', + { date: header.date, id: header.id, srvid: header.srvid }); if (self._slices.length > (self._curSyncSlice ? 1 : 0)) { for (var iSlice = 0; iSlice < self._slices.length; iSlice++) { @@ -4094,7 +4110,7 @@ FolderStorage.prototype = { slice.onHeaderModified(header, body); } catch (ex) { - this._LOG.callbackErr(ex); + logic(this, 'callbackErr', { ex: ex }); } } } @@ -4129,7 +4145,7 @@ FolderStorage.prototype = { var blockId = this._serverIdHeaderBlockMapping[srvid]; if (srvid === undefined) { - this._LOG.serverIdMappingMissing(srvid); + logic(this, 'serverIdMappingMissing', { srvid: srvid }); return; } @@ -4176,7 +4192,7 @@ FolderStorage.prototype = { var blockId = this._serverIdHeaderBlockMapping[srvid]; if (srvid === undefined) { - this._LOG.serverIdMappingMissing(srvid); + logic(this, 'serverIdMappingMissing', { srvid: srvid }); return false; } @@ -4276,7 +4292,7 @@ FolderStorage.prototype = { var blockId = this._serverIdHeaderBlockMapping[srvid]; if (srvid === undefined) { - this._LOG.serverIdMappingMissing(srvid); + logic(this, 'serverIdMappingMissing', { srvid: srvid }); return; } @@ -4310,7 +4326,11 @@ FolderStorage.prototype = { this, header, bodyInfo, callback)); return; } - this._LOG.addMessageBody(header.date, header.id, header.srvid, bodyInfo); + logic(this, 'addMessageBody', + { date: header.date, + id: header.id, + srvid: header.srvid, + bodyInfo: bodyInfo }); // crappy size estimates where we assume the world is ASCII and so a UTF-8 // encoding will take exactly 1 byte per character. @@ -4475,12 +4495,12 @@ FolderStorage.prototype = { posInfo = this._findRangeObjIndexForDateAndID(this._bodyBlockInfos, date, id); if (posInfo[1] === null) { - this._LOG.bodyNotFound(); + logic(this, 'bodyNotFound'); try { callback(null); } catch (ex) { - this._LOG.callbackErr(ex); + logic(this, 'callbackErr', { ex: ex }); } return; } @@ -4489,12 +4509,12 @@ FolderStorage.prototype = { this._loadBlock('body', bodyBlockInfo, function(bodyBlock) { var bodyInfo = bodyBlock.bodies[id] || null; if (!bodyInfo) - self._LOG.bodyNotFound(); + logic(self, 'bodyNotFound'); try { callback(bodyInfo); } catch (ex) { - self._LOG.callbackErr(ex); + logic(self, 'callbackErr', { ex: ex }); } }); return; @@ -4502,12 +4522,12 @@ FolderStorage.prototype = { var block = this._bodyBlocks[bodyBlockInfo.blockId], bodyInfo = block.bodies[id] || null; if (!bodyInfo) - this._LOG.bodyNotFound(); + logic(this, 'bodyNotFound'); try { callback(bodyInfo); } catch (ex) { - this._LOG.callbackErr(ex); + logic(this, 'callbackErr', { ex: ex }); } }, @@ -4662,7 +4682,6 @@ FolderStorage.prototype = { this._slices[i].die(); } this.folderSyncer.shutdown(); - this._LOG.__die(); }, /** @@ -4676,84 +4695,5 @@ FolderStorage.prototype = { }, }; -var LOGFAB = exports.LOGFAB = $log.register(module, { - MailSlice: { - type: $log.QUERY, - events: { - headersAppended: {}, - headerAdded: { index: false }, - headerModified: { index: false }, - headerRemoved: { index: false }, - }, - TEST_ONLY_events: { - headersAppended: { headers: false }, - headerAdded: { header: false }, - headerModified: { header: false }, - headerRemoved: { header: false }, - }, - }, - FolderStorage: { - type: $log.DATABASE, - events: { - addMessageHeader: { date: false, id: false, srvid: false }, - addMessageBody: { date: false, id: false, srvid: false }, - - updateMessageHeader: { date: false, id: false, srvid: false }, - updateMessageBody: { date: false, id: false }, - - generatePersistenceInfo: {}, - - // For now, logging date and uid is useful because the general logging - // level will show us if we are trying to redundantly delete things. - // Also, date and uid are opaque identifiers with very little entropy - // on their own. (The danger is in correlation with known messages, - // but that is likely to be useful in the debugging situations where logs - // will be sufaced.) - deleteFromBlock: { type: false, date: false, id: false }, - - discardFromBlock: { type: false, date: false, id: false }, - - // This was an error but the test results viewer UI is not quite smart - // enough to understand the difference between expected errors and - // unexpected errors, so this is getting downgraded for now. - headerNotFound: {}, - bodyNotFound: {}, - - syncedToDawnOfTime: {}, - }, - TEST_ONLY_events: { - addMessageBody: { body: false }, - generatePersistenceInfo: { details: false } - }, - asyncJobs: { - loadBlock: { type: false, blockId: false }, - mutexedCall: { name: true }, - }, - TEST_ONLY_asyncJobs: { - loadBlock: { block: false }, - }, - errors: { - callbackErr: { ex: $log.EXCEPTION }, - - badBlockLoad: { type: false, blockId: false }, - - // Exposing date/uid at a general level is deemed okay because they are - // opaque identifiers and the most likely failure models involve the - // values being ridiculous (and therefore not legal). - badIterationStart: { date: false, id: false }, - badDeletionRequest: { type: false, date: false, id: false }, - badDiscardRequest: { type: false, date: false, id: false }, - bodyBlockMissing: { id: false, idx: false, dict: false }, - serverIdMappingMissing: { srvid: false }, - - accuracyRangeSuspect: { arange: false }, - - mutexedOpErr: { err: $log.EXCEPTION }, - - tooManyCallbacks: { name: false }, - mutexInvariantFail: { fireName: false, curName: false }, - } - }, -}); // end LOGFAB }); // end define diff --git a/apps/email/js/ext/mailuniverse.js b/apps/email/js/ext/mailuniverse.js index 8f70affa87ed..87d3dcb79d3e 100644 --- a/apps/email/js/ext/mailuniverse.js +++ b/apps/email/js/ext/mailuniverse.js @@ -4,9 +4,7 @@ /*global define, console, window, Blob */ define( [ - 'rdcommon/log', - 'rdcommon/logreaper', - 'slog', + 'logic', './a64', './date', './syncbase', @@ -19,9 +17,7 @@ define( 'exports' ], function( - $log, - $logreaper, - slog, + logic, $a64, $date, $syncbase, @@ -46,10 +42,10 @@ define( var MAX_MUTATIONS_FOR_UNDO = 10; /** - * When debug logging is enabled, how many second's worth of samples should - * we keep? + * When debug logging is enabled, how long should we store logs in the + * circular buffer? */ -var MAX_LOG_BACKLOG = 30; +var MAX_LOG_BACKLOG_MS = 30000; /** * Creates a method to add to MailUniverse that calls a method @@ -370,16 +366,59 @@ function MailUniverse(callAfterBigBang, online, testOptions) { this._boundQueueDeferredOps = this._queueDeferredOps.bind(this); this.config = null; - this._logReaper = null; - this._logBacklog = null; + this._logBacklog = []; - this._LOG = null; this._db = new $maildb.MailDB(testOptions); this._cronSync = null; var self = this; this._db.getConfig(function(configObj, accountInfos, lazyCarryover) { function setupLogging(config) { - if (self.config.debugLogging) { + + // To avoid being overly verbose, and to avoid revealing private + // information in logs (unless we've explicitly enabled it), we censor + // event details when in secretDebugMode and for console logs. + function censorLogs() { + logic.isCensored = true; + + function censorValue(value) { + if (value && (value.suid || value.srvid)) { + return { + date: value.date, + suid: value.suid, + srvid: value.srvid + }; + } else if (value && typeof value === 'object') { + return value.toString(); + } else { + return value; + } + } + + // We: + // - Remove properties starting with an underscore. + // - Process one level of Arrays. + // - Allow primitives to pass through. + // - Objects get stringified unless they are a mail header, + // in which case we return just the date/suid/srvid. + logic.on('censorEvent', function(e) { + if (logic.isPlainObject(e.details)) { + for (var key in e.details) { + var value = e.details[key]; + if (key[0] === '_') { + delete e.details[key]; + } else if (Array.isArray(value)) { + // Include one level of arrays. + e.details[key] = value.map(censorValue); + } else { + e.details[key] = censorValue(value); + } + } + } + }); + } + + if (self.config.debugLogging) { + if (self.config.debugLogging === 'realtime-dangerous') { console.warn('!!!'); console.warn('!!! REALTIME USER-DATA ENTRAINING LOGGING ENABLED !!!'); @@ -391,14 +430,12 @@ function MailUniverse(callAfterBigBang, online, testOptions) { console.warn('OF EMAILS, maybe some PASSWORDS. This was turned on'); console.warn('via the secret debug mode UI. Use it to turn us off:'); console.warn('https://wiki.mozilla.org/Gaia/Email/SecretDebugMode'); - $log.DEBUG_realtimeLogEverything(dump); - slog.setSensitiveDataLoggingEnabled(true); + logic.realtimeLogEverything = true; } else if (self.config.debugLogging !== 'dangerous') { console.warn('GENERAL LOGGING ENABLED!'); console.warn('(CIRCULAR EVENT LOGGING WITH NON-SENSITIVE DATA)'); - $log.enableGeneralLogging(); - slog.setSensitiveDataLoggingEnabled(false); + censorLogs(); } else { console.warn('!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!'); @@ -411,9 +448,50 @@ function MailUniverse(callAfterBigBang, online, testOptions) { console.warn('If you forget how to turn us off, see:'); console.warn('https://wiki.mozilla.org/Gaia/Email/SecretDebugMode'); console.warn('...................................................'); - $log.DEBUG_markAllFabsUnderTest(); - slog.setSensitiveDataLoggingEnabled(true); } + } else if (!logic.underTest) { + censorLogs(); + + var NAMESPACES_TO_ALWAYS_LOG = [ + 'BrowserBox', + 'SmtpClient', + 'ActivesyncConfigurator', + 'ImapFolderSync', + 'Pop3Prober', + 'Autoconfigurator', + 'DisasterRecovery', + 'ImapClient', + 'ImapJobDriver', + 'Oauth', + 'Pop3FolderSyncer', + 'SmtpProber' + ]; + + // If we don't have debug logging enabled, bail out early by + // short-circuiting any events that wouldn't be logged anyway. + logic.on('preprocessEvent', function(e) { + var eventShouldBeLogged = ( + NAMESPACES_TO_ALWAYS_LOG.indexOf(e.namespace) !== -1 || + // The smtp portion uses a namespace of 'Account', but we want it. + (e.namespace === 'Account' && + e.details && e.details.accountType === 'smtp') || + // We also want these. + e.type === 'allOpsCompleted' || + e.type === 'mailslice:mutex-released' + ); + + if (!eventShouldBeLogged) { + e.preventDefault(); + } + }); + + // Then, since only the logs we care about make it this far, we can log + // all remaining events here. + logic.on('event', function(e) { + var obj = e.toJSON(); + dump('[' + obj.namespace + '] ' + obj.type + + ' ' + JSON.stringify(obj.details) + '\n'); + }); } } @@ -423,11 +501,14 @@ function MailUniverse(callAfterBigBang, online, testOptions) { if (configObj) { self.config = configObj; setupLogging(); - self._LOG = LOGFAB.MailUniverse(self, null, null); + + logic.defineScope(self, 'MailUniverse'); + if (self.config.debugLogging) self._enableCircularLogging(); - self._LOG.configLoaded(self.config, accountInfos); + logic(self, 'configLoaded', + { config: self.config, accountInfos: accountInfos }); function done() { doneCount += 1; @@ -459,23 +540,28 @@ function MailUniverse(callAfterBigBang, online, testOptions) { debugLogging: lazyCarryover ? lazyCarryover.config.debugLogging : false }; setupLogging(); - self._LOG = LOGFAB.MailUniverse(self, null, null); + + logic.defineScope(self, 'MailUniverse'); + if (self.config.debugLogging) self._enableCircularLogging(); self._db.saveConfig(self.config); // - Try to re-create any accounts using old account infos. if (lazyCarryover) { - this._LOG.configMigrating_begin(lazyCarryover); + logic(self, 'configMigrating_begin', { lazyCarryover: lazyCarryover }); var waitingCount = lazyCarryover.accountInfos.length; var oldVersion = lazyCarryover.oldVersion; var accountRecreated = function(accountInfo, err) { - this._LOG.recreateAccount_end(accountInfo.type, accountInfo.id, err); + logic(self, 'recreateAccount_end', + { type: accountInfo.type, + id: accountInfo.id, + error: err }); // We don't care how they turn out, just that they get a chance // to run to completion before we call our bootstrap complete. if (--waitingCount === 0) { - this._LOG.configMigrating_end(null); + logic(self, 'configMigrating_end'); this._initFromConfig(); callAfterBigBang(); } @@ -483,8 +569,10 @@ function MailUniverse(callAfterBigBang, online, testOptions) { for (i = 0; i < lazyCarryover.accountInfos.length; i++) { var accountInfo = lazyCarryover.accountInfos[i]; - this._LOG.recreateAccount_begin(accountInfo.type, accountInfo.id, - null); + logic(this, 'recreateAccount_begin', + { type: accountInfo.type, + id: accountInfo.id, + error: null }); $acctcommon.recreateAccount( self, oldVersion, accountInfo, accountRecreated.bind(this, accountInfo)); @@ -493,7 +581,7 @@ function MailUniverse(callAfterBigBang, online, testOptions) { return; } else { - self._LOG.configCreated(self.config); + logic(self, 'configCreated', { config: self.config }); } } self._initFromConfig(); @@ -505,28 +593,22 @@ MailUniverse.prototype = { ////////////////////////////////////////////////////////////////////////////// // Logging _enableCircularLogging: function() { - this._logReaper = new $logreaper.LogReaper(this._LOG); this._logBacklog = []; - window.setInterval( - function() { - var logTimeSlice = this._logReaper.reapHierLogTimeSlice(); - // if nothing interesting happened, this could be empty, yos. - if (logTimeSlice.logFrag) { - this._logBacklog.push(logTimeSlice); - // throw something away if we've got too much stuff already - if (this._logBacklog.length > MAX_LOG_BACKLOG) - this._logBacklog.shift(); - } - }.bind(this), - 1000); + logic.on('event', (event) => { + this._logBacklog.push(event.toJSON()); + // Remove any events we've kept for longer than MAX_LOG_BACKLOG_MS. + var oldestTimeAllowed = Date.now() - MAX_LOG_BACKLOG_MS; + while (this._logBacklog.length && + this._logBacklog[0].time < oldestTimeAllowed) { + this._logBacklog.shift(); + } + }); }, - createLogBacklogRep: function(id) { + createLogBacklogRep: function() { return { - type: 'backlog', - id: id, - schema: $log.provideSchemaForAllKnownFabs(), - backlog: this._logBacklog, + type: 'logic', + events: this._logBacklog }; }, @@ -561,7 +643,7 @@ MailUniverse.prototype = { * Perform initial initialization based on our configuration. */ _initFromConfig: function() { - this._cronSync = new $cronsync.CronSync(this, this._LOG); + this._cronSync = new $cronsync.CronSync(this); }, /** @@ -740,7 +822,7 @@ MailUniverse.prototype = { }, learnAboutAccount: function(details) { - var configurator = new $acctcommon.Autoconfigurator(this._LOG); + var configurator = new $acctcommon.Autoconfigurator(); return configurator.learnAboutAccount(details); }, @@ -762,12 +844,12 @@ MailUniverse.prototype = { if (domainInfo) { $acctcommon.tryToManuallyCreateAccount(this, userDetails, domainInfo, - callback, this._LOG); + callback); } else { // XXX: store configurator on this object so we can abort the connections // if necessary. - var configurator = new $acctcommon.Autoconfigurator(this._LOG); + var configurator = new $acctcommon.Autoconfigurator(); configurator.tryToCreateAccount(this, userDetails, callback); } }, @@ -831,11 +913,11 @@ MailUniverse.prototype = { receiveProtoConn, callback) { $acctcommon.accountTypeToClass(accountDef.type, function (constructor) { if (!constructor) { - this._LOG.badAccountType(accountDef.type); + logic(this, 'badAccountType', { type: accountDef.type }); return; } var account = new constructor(this, accountDef, folderInfo, this._db, - receiveProtoConn, this._LOG); + receiveProtoConn); this.accounts.push(account); this._accountsById[account.id] = account; @@ -901,7 +983,8 @@ MailUniverse.prototype = { if (account.problems.indexOf(problem) !== -1) { suppress = true; } - this._LOG.reportProblem(problem, suppress, account.id); + logic(this, 'reportProblem', + { problem: problem, suppress: suppress, accountId: account.id }); if (suppress) { return; } @@ -935,7 +1018,7 @@ MailUniverse.prototype = { }, clearAccountProblems: function(account) { - this._LOG.clearAccountProblems(account.id); + logic(this, 'clearAccountProblems', { accountId: account.id }); // TODO: this would be a great time to have any slices that had stalled // syncs do whatever it takes to make them happen again. account.enabled = true; @@ -998,14 +1081,14 @@ MailUniverse.prototype = { var curTrans = null; var latch = $allback.latch(); - this._LOG.saveUniverseState_begin(); + logic(this, 'saveUniverseState_begin'); for (var iAcct = 0; iAcct < this.accounts.length; iAcct++) { var account = this.accounts[iAcct]; curTrans = account.saveAccountState(curTrans, latch.defer(account.id), 'saveUniverse'); } latch.then(function() { - this._LOG.saveUniverseState_end(); + logic(this, 'saveUniverseState_end'); if (callback) { callback(); }; @@ -1040,8 +1123,6 @@ MailUniverse.prototype = { this._cronSync.shutdown(); } this._db.close(); - if (this._LOG) - this._LOG.__die(); if (!this.accounts.length) callback(); @@ -1199,14 +1280,16 @@ MailUniverse.prototype = { // type. case 'defer': if (++op.tryCount < $syncbase.MAX_OP_TRY_COUNT) { - this._LOG.opDeferred(op.type, op.longtermId); + logic(this, 'opDeferred', { type: op.type, + longtermId: op.longtermId }); this._deferOp(account, op); removeFromServerQueue = true; break; } // fall-through to an error default: - this._LOG.opGaveUp(op.type, op.longtermId); + logic(this, 'opGaveUp', { type: op.type, + longtermId: op.longtermId }); op.lifecycle = 'moot'; op.localStatus = 'unknown'; op.serverStatus = 'moot'; @@ -1265,7 +1348,20 @@ MailUniverse.prototype = { console.log('runOp_end(' + wasMode + ': ' + JSON.stringify(op).substring(0, 160) + ')\n'); - account._LOG.runOp_end(wasMode, op.type, err, op); + logic(account, 'runOp_end', + { mode: wasMode, + type: op.type, + error: err, + op: op }); + + // Complete the asynchronous log event pertaining to 'runOp'. + if (op._logicAsyncEvent) { + if (err) { + op._logicAsyncEvent.reject(err); + } else { + op._logicAsyncEvent.resolve(); + } + } var callback; if (completeOp) { @@ -1352,8 +1448,11 @@ MailUniverse.prototype = { serverQueue = queues.server, localQueue = queues.local; + var scope = logic.subscope(this, { type: op.type, + longtermId: op.longtermId }); + if (serverQueue[0] !== op) - this._LOG.opInvariantFailure(); + logic(scope, 'opInvariantFailure'); // Should we attempt to retry (but fail if tryCount is reached)? var maybeRetry = false; @@ -1369,7 +1468,7 @@ MailUniverse.prototype = { // Defer the operation if we still want to do the thing, but skip // deferring if we are now trying to undo the thing. if (op.serverStatus === 'doing' && op.lifecycle === 'do') { - this._LOG.opDeferred(op.type, op.longtermId); + logic(scope, 'opDeferred'); this._deferOp(account, op); } // remove the op from the queue, but don't mark it completed @@ -1389,13 +1488,13 @@ MailUniverse.prototype = { maybeRetry = true; break; case 'failure-give-up': - this._LOG.opGaveUp(op.type, op.longtermId); + logic(scope, 'opGaveUp'); // we complete the op, but the error flag is propagated op.lifecycle = 'moot'; op.serverStatus = 'moot'; break; case 'moot': - this._LOG.opMooted(op.type, op.longtermId); + logic(scope, 'opMooted'); // we complete the op, but the error flag is propagated op.lifecycle = 'moot'; op.serverStatus = 'moot'; @@ -1427,7 +1526,7 @@ MailUniverse.prototype = { break; // this is the same thing as defer. case 'bailed': - this._LOG.opDeferred(op.type, op.longtermId); + logic(scope, 'opDeferred'); this._deferOp(account, op); completeOp = false; break; @@ -1459,7 +1558,7 @@ MailUniverse.prototype = { consumeOp = false; } else { - this._LOG.opTryLimitReached(op.type, op.longtermId); + logic(scope, 'opTryLimitReached'); // we complete the op, but the error flag is propagated op.lifecycle = 'moot'; op.serverStatus = 'moot'; @@ -1471,8 +1570,19 @@ MailUniverse.prototype = { console.log('runOp_end(' + wasMode + ': ' + JSON.stringify(op).substring(0, 160) + ')\n'); - account._LOG.runOp_end(wasMode, op.type, err, op); - + logic(account, 'runOp_end', { mode: wasMode, + type: op.type, + error: err, + op: op }); + + // Complete the asynchronous log event pertaining to 'runOp'. + if (op._logicAsyncEvent) { + if (err) { + op._logicAsyncEvent.reject(err); + } else { + op._logicAsyncEvent.resolve(); + } + } // Some completeOp callbacks want to wait for account // save but they are triggered before save is attempted, @@ -1540,7 +1650,7 @@ MailUniverse.prototype = { } catch(ex) { console.log(ex.message, ex.stack); - this._LOG.opCallbackErr(lastOp.type); + logic(this, 'opCallbackErr', { type: lastOp.type }); } } @@ -1566,7 +1676,8 @@ MailUniverse.prototype = { this._opCompletionListenersByAccount[account.id](account); this._opCompletionListenersByAccount[account.id] = null; } - slog.log('allOpsCompleted', { account: account.id }); + logic(this, 'allOpsCompleted', { accountId: account.id }); + // - Tell the account so it can clean-up its connections, etc. // (We do this after notifying listeners for the connection cleanup case @@ -2318,37 +2429,4 @@ MailUniverse.prototype = { ////////////////////////////////////////////////////////////////////////////// }; -var LOGFAB = exports.LOGFAB = $log.register($module, { - MailUniverse: { - type: $log.ACCOUNT, - events: { - configCreated: {}, - configLoaded: {}, - createAccount: { type: true, id: false }, - reportProblem: { type: true, suppressed: true, id: false }, - clearAccountProblems: { id: false }, - opDeferred: { type: true, id: false }, - opTryLimitReached: { type: true, id: false }, - opGaveUp: { type: true, id: false }, - opMooted: { type: true, id: false }, - }, - TEST_ONLY_events: { - configCreated: { config: false }, - configMigrating: { lazyCarryover: false }, - configLoaded: { config: false, accounts: false }, - createAccount: { name: false }, - }, - asyncJobs: { - configMigrating: {}, - recreateAccount: { type: true, id: false, err: false }, - saveUniverseState: {} - }, - errors: { - badAccountType: { type: true }, - opCallbackErr: { type: false }, - opInvariantFailure: {}, - }, - }, -}); - }); // end define diff --git a/apps/email/js/ext/oauth.js b/apps/email/js/ext/oauth.js index 36877d8345af..4e630bcff148 100644 --- a/apps/email/js/ext/oauth.js +++ b/apps/email/js/ext/oauth.js @@ -2,7 +2,7 @@ define(function(require, exports) { var errorutils = require('./errorutils'); var syncbase = require('./syncbase'); - var slog = require('./slog'); + var logic = require('logic'); var date = require('./date'); /** @@ -21,6 +21,8 @@ define(function(require, exports) { // Extra timeout padding for oauth tokens. var TIMEOUT_MS = 30 * 1000; + var scope = logic.scope('Oauth'); + /** * Decides if a renew may be feasible to do. Does not allow renew within a * time window. This kind of renew is only used as a last ditch effort to get @@ -83,7 +85,7 @@ define(function(require, exports) { oauth2.accessToken = newTokenData.accessToken; oauth2.expireTimeMS = newTokenData.expireTimeMS; - slog.log('oauth:credentials-changed', { + logic(scope, 'credentials-changed', { _accessToken: oauth2.accessToken, expireTimeMS: oauth2.expireTimeMS }); @@ -93,7 +95,7 @@ define(function(require, exports) { } }); } else { - slog.log('oauth:credentials-ok'); + logic(scope, 'credentials-ok'); // Not OAUTH; everything is fine. return Promise.resolve(false); } @@ -112,10 +114,10 @@ define(function(require, exports) { * failure: {String} normalized errorString */ function renewAccessToken(oauthInfo) { - slog.log('oauth:renewing-access-token'); + logic(scope, 'renewing-access-token'); return new Promise(function(resolve, reject) { oauthInfo._transientLastRenew = date.PERFNOW(); - var xhr = slog.interceptable('oauth:renew-xhr', function() { + var xhr = logic.interceptable('oauth:renew-xhr', function() { return new XMLHttpRequest({ mozSystem: true }); }); xhr.open('POST', oauthInfo.tokenEndpoint, true); @@ -141,15 +143,15 @@ define(function(require, exports) { } catch (ex) { } - slog.error('oauth:xhr-fail', - { tokenEndpoint: oauthInfo.tokenEndpoint, - status: xhr.status, errResp: errResp }); + logic(scope, 'xhr-fail', + { tokenEndpoint: oauthInfo.tokenEndpoint, + status: xhr.status, errResp: errResp }); reject('needs-oauth-reauth'); } else { try { var data = JSON.parse(xhr.responseText); if (data && data.access_token) { - slog.log('oauth:got-access-token', { + logic(scope, 'got-access-token', { _accessToken: data.access_token }); // OAUTH returns an expire time as "seconds from now"; @@ -164,13 +166,13 @@ define(function(require, exports) { expireTimeMS: expireTimeMS }); } else { - slog.error('oauth:no-access-token', { + logic(scope, 'no-access-token', { data: xhr.responseText }); reject('needs-oauth-reauth'); } } catch(e) { - slog.error('oauth:bad-json', { + logic(scope, 'bad-json', { error: e, data: xhr.responseText }); diff --git a/apps/email/js/ext/pop3/account.js b/apps/email/js/ext/pop3/account.js index 64da29c476db..9c4c5f718b0a 100644 --- a/apps/email/js/ext/pop3/account.js +++ b/apps/email/js/ext/pop3/account.js @@ -1,5 +1,5 @@ define([ - 'rdcommon/log', + 'logic', '../errbackoff', '../composite/incoming', './sync', @@ -11,7 +11,7 @@ define([ 'require', 'exports'], function( - log, + logic, errbackoff, incoming, pop3sync, @@ -32,9 +32,10 @@ var CompositeIncomingAccount = incoming.CompositeIncomingAccount; * CompositeIncomingAccount. */ function Pop3Account(universe, compositeAccount, accountId, credentials, - connInfo, folderInfos, dbConn, _parentLog, - existingProtoConn) { - this._LOG = LOGFAB.Pop3Account(this, _parentLog, accountId); + connInfo, folderInfos, dbConn, existingProtoConn) { + logic.defineScope(this, 'Account', { accountId: accountId, + accountType: 'pop3' }); + CompositeIncomingAccount.apply( this, [pop3sync.Pop3FolderSyncer].concat(Array.slice(arguments))); @@ -43,8 +44,7 @@ function Pop3Account(universe, compositeAccount, accountId, credentials, // to access a mailbox at a given time, so there's no connection pool. this._conn = null; this._pendingConnectionRequests = []; - this._backoffEndpoint = - errbackoff.createEndpoint('pop3:' + this.id, this, this._LOG); + this._backoffEndpoint = errbackoff.createEndpoint('pop3:' + this.id, this); // If we have an existing connection from setting up the account, we // can reuse that during the first sync. @@ -59,7 +59,7 @@ function Pop3Account(universe, compositeAccount, accountId, credentials, this.ensureEssentialOfflineFolders(); this._jobDriver = new pop3jobs.Pop3JobDriver( - this, this._folderInfos.$mutationState, this._LOG); + this, this._folderInfos.$mutationState); } exports.Account = exports.Pop3Account = Pop3Account; Pop3Account.prototype = Object.create(CompositeIncomingAccount.prototype); @@ -133,7 +133,7 @@ var properties = { this._conn = true; // Dynamically load the probe/pop3 code to speed up startup. require(['./pop3', './probe'], function(pop3, pop3probe) { - this._LOG.createConnection(whyLabel); + logic(this, 'createConnection', { label: whyLabel }); var opts = { host: this._connInfo.hostname, port: this._connInfo.port, @@ -144,7 +144,7 @@ var properties = { username: this._credentials.username, password: this._credentials.password, }; - if (this._LOG) opts._logParent = this._LOG; + var conn = this._conn = new pop3.Pop3Client(opts, function(err) { if (err) { // Failed to get the connection: @@ -213,7 +213,7 @@ var properties = { throw new Error("No such folder: " + folderId); } var folderMeta = this._folderInfos[folderId].$meta; - self._LOG.deleteFolder(folderMeta.path); + logic(self, 'deleteFolder', { path: folderMeta.path }); self._forgetFolder(folderId); callback && callback(null, folderMeta); }, @@ -229,7 +229,6 @@ var properties = { if (this._conn && this._conn.close) { this._conn.close(); } - this._LOG.__die(); callback && callback(); }, @@ -248,9 +247,9 @@ var properties = { } this._conn = null; } - this._LOG.checkAccount_begin(null); + logic(this, 'checkAccount_begin'); this.withConnection(function(err) { - this._LOG.checkAccount_end(err); + logic(this, 'checkAccount_end', { error: err }); callback(err); }.bind(this), 'checkAccount'); }, @@ -307,11 +306,5 @@ for (var k in properties) { Object.getOwnPropertyDescriptor(properties, k)); } -// Share the log configuration with CompositeIncomingAccount, since we -// desire general parity between IMAP and POP3 for simplicity. -var LOGFAB = exports.LOGFAB = log.register(module, { - Pop3Account: incoming.LOGFAB_DEFINITION.CompositeIncomingAccount -}); - }); // end define diff --git a/apps/email/js/ext/pop3/jobs.js b/apps/email/js/ext/pop3/jobs.js index 70546f0ad284..c9a435c25324 100644 --- a/apps/email/js/ext/pop3/jobs.js +++ b/apps/email/js/ext/pop3/jobs.js @@ -1,6 +1,6 @@ -define(['module', 'exports', 'rdcommon/log', '../allback', 'mix', +define(['module', 'exports', 'logic', '../allback', 'mix', '../jobmixins', '../drafts/jobs', './pop3'], - function(module, exports, log, allback, mix, + function(module, exports, logic, allback, mix, jobmixins, draftsJobs, pop3) { /** @@ -11,13 +11,13 @@ define(['module', 'exports', 'rdcommon/log', '../allback', 'mix', * execute local_do_move, but not do_move. It is assumed that unit * tests will ensure we've implemented all required jobs. */ -function Pop3JobDriver(account, state, _parentLog) { - this._LOG = LOGFAB.Pop3JobDriver(this, _parentLog, account.id); - +function Pop3JobDriver(account, state) { this.account = account; this.resilientServerIds = true; // once assigned, the server never changes IDs this._heldMutexReleasers = []; + logic.defineScope(this, 'Pop3JobDriver', { accountId: account.id }); + // For tracking state as used in jobmixins: this._stateDelta = {}; this._state = state; @@ -49,7 +49,7 @@ Pop3JobDriver.prototype = { // The folderSyncer is like IMAP/ActiveSync's folderConn. callback(storage.folderSyncer, storage); } catch (ex) { - this._LOG.callbackErr(ex); + logic(this, 'callbackErr', { ex: ex }); } }.bind(this)); }, @@ -185,24 +185,4 @@ Pop3JobDriver.prototype = { mix(Pop3JobDriver.prototype, draftsJobs.draftsMixins); - -var LOGFAB = exports.LOGFAB = log.register(module, { - Pop3JobDriver: { - type: log.DAEMON, - events: { - savedAttachment: { storage: true, mimeType: true, size: true }, - saveFailure: { storage: false, mimeType: false, error: false }, - }, - TEST_ONLY_events: { - saveFailure: { filename: false }, - }, - asyncJobs: { - acquireConnWithoutFolder: { label: false }, - }, - errors: { - callbackErr: { ex: log.EXCEPTION }, - }, - }, -}); - }); // end define diff --git a/apps/email/js/ext/pop3/pop3.js b/apps/email/js/ext/pop3/pop3.js index 0bfd2660675d..10c42513d208 100644 --- a/apps/email/js/ext/pop3/pop3.js +++ b/apps/email/js/ext/pop3/pop3.js @@ -1,9 +1,9 @@ -define(['module', 'exports', 'rdcommon/log', 'tcp-socket', 'md5', +define(['module', 'exports', 'logic', 'tcp-socket', 'md5', './transport', 'mimeparser', 'imap/imapchew', 'syncbase', 'date', 'mimefuncs', './mime_mapper', 'allback'], -function(module, exports, log, tcpSocket, md5, +function(module, exports, logic, tcpSocket, md5, transport, MimeParser, imapchew, syncbase, dateMod, mimefuncs, mimeMapper, allback) { @@ -94,8 +94,7 @@ function(module, exports, log, tcpSocket, md5, options.debug = options.debug || false; options.authMethods = ['apop', 'sasl', 'user-pass']; - this._LOG = options._logParent ? - LOGFAB.Pop3Client(this, options._logParent, Date.now() % 1000) : null; + logic.defineScope(this, 'Pop3Client'); if (options.preferredAuthMethod) { // if we prefer a certain auth method, try that first. @@ -884,8 +883,7 @@ function(module, exports, log, tcpSocket, md5, bytesFetched: content.length, text: content }; - imapchew.updateMessageWithFetch( - rep.header, rep.bodyInfo, req, res, this._LOG); + imapchew.updateMessageWithFetch(rep.header, rep.bodyInfo, req, res); } } @@ -971,25 +969,4 @@ function(module, exports, log, tcpSocket, md5, return s; } -var LOGFAB = exports.LOGFAB = log.register(module, { - Pop3Client: { - type: log.CONNECTION, - subtype: log.CLIENT, - events: { - }, - TEST_ONLY_events: { - }, - errors: { - htmlParseError: { ex: log.EXCEPTION }, - htmlSnippetError: { ex: log.EXCEPTION }, - textChewError: { ex: log.EXCEPTION }, - textSnippetError: { ex: log.EXCEPTION }, - }, - asyncJobs: { - }, - }, -}); // end LOGFAB - -Pop3Client._LOG = LOGFAB.Pop3Client(); - }); // end define diff --git a/apps/email/js/ext/pop3/probe.js b/apps/email/js/ext/pop3/probe.js index 9aafc05ca89f..8cc87e88f6af 100644 --- a/apps/email/js/ext/pop3/probe.js +++ b/apps/email/js/ext/pop3/probe.js @@ -1,10 +1,13 @@ define([ './pop3', '../syncbase', - 'slog', + 'logic', '../errorutils', 'exports' -], function(pop3, syncbase, slog, errorutils, exports) { +], function(pop3, syncbase, logic, errorutils, exports) { + +var scope = logic.scope('Pop3Prober'); + /** * Validate connection information for an account and verify that the @@ -24,9 +27,7 @@ exports.probeAccount = function(credentials, connInfo) { connTimeout: syncbase.CONNECT_TIMEOUT_MS }; - slog.info('probe:pop3:connecting', { - connInfo: connInfo - }); + logic(scope, 'connecting', { connInfo: connInfo }); var resolve, reject; var promise = new Promise(function(_resolve, _reject) { @@ -50,7 +51,7 @@ exports.probeAccount = function(credentials, connInfo) { resolve(conn); } else if (err.err) { // Uh, this server must not support TOP. That sucks. - slog.error('probe:pop3:server-not-great', { why: 'no TOP' }); + logic(scope, 'server-not-great', { why: 'no TOP' }); reject('pop-server-not-great'); } else { // if the error was socket-level or something, let it pass @@ -66,7 +67,7 @@ exports.probeAccount = function(credentials, connInfo) { resolve(conn); } else if (err.err) { // They must not support UIDL. Not good enough. - slog.error('probe:pop3:server-not-great', { why: 'no UIDL' }); + logic(scope, 'server-not-great', { why: 'no UIDL' }); reject('pop-server-not-great'); } else { // if the error was socket-level or something, let it pass @@ -80,7 +81,7 @@ exports.probeAccount = function(credentials, connInfo) { return promise .then(function(conn) { - slog.info('probe:pop3:success'); + logic(scope, 'success'); return { conn: conn, timezoneOffset: null @@ -88,7 +89,7 @@ exports.probeAccount = function(credentials, connInfo) { }) .catch(function(err) { err = normalizePop3Error(err); - slog.info('probe:pop3:error', { error: err }); + logic(scope, 'error', { error: err }); if (conn) { conn.close(); } @@ -150,11 +151,7 @@ var normalizePop3Error = exports.normalizePop3Error = function(err) { errorutils.analyzeException(err) || 'unknown'); - slog.log('probe:pop3:normalized-error', { - error: err, - reportAs: reportAs - }); - + logic(scope, 'normalized-error', { error: err, reportAs: reportAs }); return reportAs; }; diff --git a/apps/email/js/ext/pop3/sync.js b/apps/email/js/ext/pop3/sync.js index 8fcb05b53efd..031320560faf 100644 --- a/apps/email/js/ext/pop3/sync.js +++ b/apps/email/js/ext/pop3/sync.js @@ -1,7 +1,7 @@ -define(['rdcommon/log', 'slog', '../util', 'module', 'require', 'exports', +define(['logic', '../util', 'module', 'require', 'exports', '../mailchew', '../syncbase', '../date', '../jobmixins', '../allback', './pop3'], -function(log, slog, util, module, require, exports, +function(logic, util, module, require, exports, mailchew, sync, date, jobmixins, allback, pop3) { @@ -17,10 +17,15 @@ var PASTWARDS = 1; * IMAP/ActiveSync, but we fast-path out of sync operations if the * folder we're looking at isn't the inbox. */ -function Pop3FolderSyncer(account, storage, _parentLog) { - this._LOG = LOGFAB.Pop3FolderSyncer(this, _parentLog, storage.folderId); +function Pop3FolderSyncer(account, storage) { this.account = account; this.storage = storage; + + logic.defineScope(this, 'Pop3FolderSyncer', { + accountId: account.id, + folderId: storage.folderId + }); + // Only sync folders if this is the inbox. Other folders are client-side only. this.isInbox = (storage.folderMeta.type === 'inbox'); } @@ -219,7 +224,7 @@ Pop3FolderSyncer.prototype = { // attachment. var registerDownload = true; jobmixins.saveToDeviceStorage( - this._LOG, att.file, 'sdcard', registerDownload, att.name, att, + self, att.file, 'sdcard', registerDownload, att.name, att, latch.defer()); // When saveToDeviceStorage completes, att.file will // be a reference to the file on the sdcard. @@ -409,8 +414,7 @@ Pop3FolderSyncer.prototype = { }, shutdown: function() { - // No real cleanup necessary here; just log that we died. - this._LOG.__die(); + // Nothing to do here either. }, /** @@ -460,17 +464,17 @@ Pop3FolderSyncer.prototype = { function(conn, syncType, slice, realDoneCallback, progressCallback) { // if we could not establish a connection, abort the sync. var self = this; - slog.log('pop3.sync:begin', { syncType: syncType }); + logic(self, 'sync:begin', { syncType: syncType }); // Avoid invoking realDoneCallback multiple times. Cleanup when we switch // sync to promises/tasks. var doneFired = false; var doneCallback = function(err) { if (doneFired) { - slog.log('pop3.sync:duplicateDone', { syncType: syncType, err: err }); + logic(self, 'sync:duplicateDone', { syncType: syncType, err: err }); return; } - slog.log('pop3.sync:end', { syncType: syncType, err: err }); + logic(self, 'sync:end', { syncType: syncType, err: err }); doneFired = true; // coerce the rich error object to a string error code; currently // refreshSlice only likes 'unknown' and 'aborted' so just run with @@ -507,7 +511,7 @@ Pop3FolderSyncer.prototype = { saveNeeded = this._performTestAdditionsAndDeletions(latch.defer()); } else { saveNeeded = true; - this._LOG.sync_begin(); + logic(this, 'sync_begin'); var fetchDoneCb = latch.defer(); var closeExpected = false; @@ -578,7 +582,7 @@ Pop3FolderSyncer.prototype = { overflowMessages.forEach(function(message) { this.storeOverflowMessageUidl(message.uidl, message.size); }, this); - this._LOG.overflowMessages(overflowMessages.length); + logic(this, 'overflowMessages', { count: overflowMessages.length }); } // When all of the messages have been persisted to disk, indicate @@ -604,7 +608,7 @@ Pop3FolderSyncer.prototype = { } if (this.isInbox) { - this._LOG.sync_end(); + logic(this, 'sync_end'); } // Don't notify completion until the save completes, if relevant. if (saveNeeded) { @@ -654,36 +658,5 @@ function range(end) { return ret; } -var LOGFAB = exports.LOGFAB = log.register(module, { - Pop3FolderSyncer: { - type: log.CONNECTION, - subtype: log.CLIENT, - events: { - savedAttachment: { storage: true, mimeType: true, size: true }, - saveFailure: { storage: false, mimeType: false, error: false }, - overflowMessages: { count: true }, - }, - TEST_ONLY_events: { - }, - errors: { - callbackErr: { ex: log.EXCEPTION }, - - htmlParseError: { ex: log.EXCEPTION }, - htmlSnippetError: { ex: log.EXCEPTION }, - textChewError: { ex: log.EXCEPTION }, - textSnippetError: { ex: log.EXCEPTION }, - - // Attempted to sync with an empty or inverted range. - illegalSync: { startTS: false, endTS: false }, - }, - asyncJobs: { - sync: {}, - syncDateRange: { - newMessages: true, existingMessages: true, deletedMessages: true, - start: false, end: false, - }, - }, - }, -}); // end LOGFAB }); // end define diff --git a/apps/email/js/ext/searchfilter.js b/apps/email/js/ext/searchfilter.js index d5087fd8a01b..e49fdc0eafda 100755 --- a/apps/email/js/ext/searchfilter.js +++ b/apps/email/js/ext/searchfilter.js @@ -77,7 +77,7 @@ define( [ - 'rdcommon/log', + 'logic', './util', './allback', './syncbase', @@ -87,7 +87,7 @@ define( 'exports' ], function( - $log, + logic, $util, allback, $syncbase, @@ -506,7 +506,7 @@ var CONTEXT_CHARS_AFTER = 40; /** * */ -function SearchSlice(bridgeHandle, storage, phrase, whatToSearch, _parentLog) { +function SearchSlice(bridgeHandle, storage, phrase, whatToSearch) { console.log('sf: creating SearchSlice:', phrase); this._bridgeHandle = bridgeHandle; bridgeHandle.__listener = this; @@ -514,7 +514,11 @@ console.log('sf: creating SearchSlice:', phrase); bridgeHandle.userCanGrowDownwards = false; this._storage = storage; - this._LOG = LOGFAB.SearchSlice(this, _parentLog, bridgeHandle._handle); + logic.defineScope(this, 'SearchSlice'); + + // XXX: This helps test_search_slice do its job, in a world where + // we no longer have loggers associated with specific instances. + SearchSlice._TEST_latestInstance = this; // These correspond to the range of headers that we have searched to generate // the current set of matched headers. Our matches will always be fully @@ -722,7 +726,8 @@ SearchSlice.prototype = { console.log(logPrefix, 'willHave', willHave, 'of', this.desiredHeaders, 'want more?', wantMore); var insertAt = dir === -1 ? 0 : this.headers.length; - this._LOG.headersAppended(insertAt, matchPairs); + logic(this, 'headersAppended', { insertAt: insertAt, + matchPairs: matchPairs }); this.headers.splice.apply(this.headers, [insertAt, 0].concat(matchPairs)); @@ -875,7 +880,7 @@ SearchSlice.prototype = { // though. this.desiredHeaders = this.headers.length; - this._LOG.headerAdded(idx, wrappedHeader); + logic(this, 'headerAdded', { index: idx, header: wrappedHeader }); this.headers.splice(idx, 0, wrappedHeader); this.headerCount = this.headers.length + (this.atBottom ? 0 : this.IMAGINARY_MESSAGE_COUNT_WHEN_NOT_AT_BOTTOM); @@ -911,7 +916,8 @@ SearchSlice.prototype = { // Update the header in the match and send it out. var existingMatch = this.headers[idx]; existingMatch.header = header; - this._LOG.headerModified(idx, existingMatch); + logic(this, 'headerModified', { index: idx, + existingMatch: existingMatch }); this._bridgeHandle.sendUpdate([idx, existingMatch]); return; } @@ -972,7 +978,7 @@ SearchSlice.prototype = { var idx = bsearchMaybeExists(this.headers, wrappedHeader, cmpMatchHeadersYoungToOld); if (idx !== null) { - this._LOG.headerRemoved(idx, wrappedHeader); + logic(this, 'headerRemoved', { index: idx, header: wrappedHeader }); this.headers.splice(idx, 1); this.headerCount = this.headers.length + (this.atBottom ? 0 : this.IMAGINARY_MESSAGE_COUNT_WHEN_NOT_AT_BOTTOM); @@ -1095,27 +1101,7 @@ SearchSlice.prototype = { die: function() { this._storage.dyingSlice(this); this._bridgeHandle = null; - this._LOG.__die(); }, }; -var LOGFAB = exports.LOGFAB = $log.register($module, { - SearchSlice: { - type: $log.QUERY, - events: { - headersAppended: { index: false }, - headerAdded: { index: false }, - headerModified: { index: false }, - headerRemoved: { index: false }, - }, - TEST_ONLY_events: { - headersAppended: { headers: false }, - headerAdded: { header: false }, - headerModified: { header: false }, - headerRemoved: { header: false }, - }, - }, -}); // end LOGFAB - - }); // end define diff --git a/apps/email/js/ext/slog.js b/apps/email/js/ext/slog.js deleted file mode 100644 index 3da9047c566e..000000000000 --- a/apps/email/js/ext/slog.js +++ /dev/null @@ -1,264 +0,0 @@ -/** - * slog: Structured Logging (WIP/Exploratory) - * - * A precursor to the future described in ; - * WIP and intended to be exploratory as we figure out how to actually - * implement the full-on test refactoring. - * - * Behaves similarly to console.log and friends, with some - * enhancements and conventions: - * - * - JSON objects passed in are stringified automatically. - * - * - Using the slog.log(errorName, errorDetails) format, you can - * integrate assertions into unit tests (more below). - * - * - Private keys (right now, those with an underscore, but welcome to - * change) are hidden from the JSON representation by default, - * unless super-secret debug mode is enabled. - * - * Usage: - * - * slog.log('imap:error', { - * user: 'foo', - * _pass: 'bar' // Private, due to the underscore. - * }); - * - * The LogChecker for unit tests allows you to assert on logged events or that - * events should not be logged. Things are a little hacky right now. - * - * Current each LogChecker uses one lazyLogger to track the things that must be - * logged and one lazyLogger to track the things that must not be logged. - * - * The "must" subscribes to logs with that name type until all of its "musts" - * have been resolved, then it unsubscribes. There is currently no way to - * express that after those things are logged that we should never see any - * more logs of that type. (But based on the lazyLogger semantics if we - * didn't remove our event listener, it would do what we want.) - * - * The "must not" creates a lazy logger that is supposed to expect nothing - * to be logged and subscribes to that log type, logging it if it sees it. - * - * Sequence-wise, each LogChecker expects everything it is told to happen - * sequentially. If you don't want this requirement enforced, then use - * separate LogChecker instances, one for each sequential thread of execution - * you want. - * - * var log = new LogChecker(T, RT); - * log.mustLog('imap:error', function(details) { - * return details.user === 'foo'; - * }); - */ -define('slog', function(require, exports, module) { - var $log = require('rdcommon/log'); - var evt = require('evt'); - - var logSensitiveData = false; - exports.setSensitiveDataLoggingEnabled = function(enabled) { - logSensitiveData = enabled; - exports.log('meta:sensitive-logging', { enabled: enabled }); - }; - - var logEmitter = new evt.Emitter(); - - exports.resetEmitter = function() { - logEmitter = new evt.Emitter(); - }; - - - var LogChecker = exports.LogChecker = function(T, RT, name) { - this.T = T; - this.RT = RT; - this.eLazy = this.T.lazyLogger(name); - this.eNotLogLazy = null; - this._subscribedTo = {}; - this._interceptions = {}; - }; - - - //////////////////////////////////////////////////////////////// - // Interceptions: Hook into predefined 'intercept' log events, - // allowing you to dynamically "mock" internal details without - // a lot of boilerplate. - - var interceptions = { }; // Map of { logName: handler } - - /** - * Intercept one instance of an interceptible log event, causing the - * corresponding `slog.interceptable()` call to return the value of - * replaceFn instead (one time). The interception _must_ occur, i.e. - * this function also calls `mustLog(name)` itself. - */ - LogChecker.prototype.interceptOnce = function(name, replaceFn) { - this.mustLog(name); - var handlers = interceptions[name] = (interceptions[name] || []); - handlers.push(replaceFn); - }; - - /** - * Log an event designed to provide a "hook" for tests to modify - * internal behavior and/or mocks. Usage: - * - * var addFn = slog.interceptable('my-xhr', function() { - * return new XMLHttpRequest(); - * }); - * - * // by default, addFn ==> the XHR object, unless you intercept - * // the behavior in a test: - * - * logChecker.intercept('my-xhr', function() { - * return new SomeMockXHR(); - * }); - * - * // (now addFn ==> SomeMockXHR instance) - */ - exports.interceptable = function(name, fn) { - var handler; - if (interceptions[name]) { - handler = interceptions[name].shift(); - } - - if (handler) { - exports.log(name); - return handler(fn); - } else { - return fn(); - } - }; - - /** - * Assert that a log with the given name, and optionally matching - * the given predicate function, is logged during this test step. - * - * @param {String} name - * @param {function(details) => boolean} [predicate] - * Optional predicate; called with the 'details' (second argument) - * of the slog.log() call. Return true if the log matched. Alternately, - * if this is an object, we will use the loggest nested equivalence - * checking logic. - */ - LogChecker.prototype.mustLog = function(name, /* optional */ predicate) { - var eLazy = this.eLazy; - - var queued = this._subscribedTo[name]; - if (queued === undefined) { - queued = this._subscribedTo[name] = []; - logEmitter.on(name, function(details) { - var predicate = queued.shift(); - try { - if (predicate === null) { - eLazy.namedValue(name, details); - } else { - var result = true; - if (predicate) { - result = predicate(details); - } - eLazy.namedValueD(name, result, details); - } - } catch(e) { - console.error('Exception running LogChecker predicate:', e); - } - // When we run out of things that must be logged, stop listening. - if (queued.length === 0) { - logEmitter.removeListener(name); - } - }); - } - - this.RT.reportActiveActorThisStep(eLazy); - if (typeof(predicate) === 'object') { - // If it's an object, just expect that as the payload - eLazy.expect_namedValue(name, predicate); - queued.push(null); - } else { - // But for a predicate (or omitted predicate), expect it to return - // true. But also pass the value through as a detail - eLazy.expect_namedValueD(name, true); - queued.push(predicate); - } - }; - - /** - * Assert that a log with the given name, and optionally matching - * the given predicate function, is NOT logged during this test - * step. This is the inverse of `mustLog`. - * - * @param {String} name - * @param {function(details) => boolean} [predicate] - * Optional predicate; called with the 'details' (second argument) - * of the slog.log() call. Return true if the log matched. - */ - LogChecker.prototype.mustNotLog = function(name, /* optional */ predicate) { - var notLogLazy = this.eNotLogLazy; - if (!notLogLazy) { - notLogLazy = this.eNotLogLazy = this.T.lazyLogger('slog'); - } - this.RT.reportActiveActorThisStep(notLogLazy); - notLogLazy.expectNothing(); - - logEmitter.once(name, function(details) { - try { - var result = true; - if (predicate) { - result = predicate(details); - } - notLogLazy.namedValue(name, JSON.stringify(details)); - } catch(e) { - console.error('Exception running LogChecker predicate:', e); - } - }.bind(this)); - }; - - /** - * Provides slog.log(), slog.info(), slog.warn(), and slog.error(). - * Call these methods with a conventional string as the first argument, - * and JSONifiable details in the second argument. - * - * Mark sensitive details with an underscore-prefixed top-level key; - * these fields will be stripped from the log output unless - * sensitive debug logging is enabled. - */ - ['log', 'info', 'warn', 'error'].forEach(function(name) { - exports[name] = function(logName, details) { - var orig = console[name].bind(console, '[slog]'); - - logEmitter.emit(logName, details); - - orig.apply(console, Array.slice(arguments).map(function(arg) { - - if (typeof arg === 'object') { - // Remove private properties - var publicKeys = {}; - for (var key in arg) { - if (logSensitiveData || key[0] !== '_') { - publicKeys[key] = arg[key]; - } - } - try { - return JSON.stringify(publicKeys); - } catch(e) { - return '[un-JSONifiable ' + arg + ']'; - } - } else { - return arg; - } - })); - }; - }); - - /** - * Provide a slog.debug for things that are *only* logged when sensitive - * logging is enabled. This exists right now mainly for the benefit of the - * email.js libs. We're tying "debug" to logSensitiveData both because we - * haven't audited the use of debug and also because it is indeed a bit - * chatty. - * - * TODO: Address the logging detail level as a separate issue, ideally while - * working with whiteout.io to fancify the email.js logging slightly. - */ - exports.debug = function(logName, details) { - if (logSensitiveData) { - exports.log(logName, details); - } - }; -}); diff --git a/apps/email/js/ext/smtp/account.js b/apps/email/js/ext/smtp/account.js index edadbbec6c69..33ad5f1cce2d 100644 --- a/apps/email/js/ext/smtp/account.js +++ b/apps/email/js/ext/smtp/account.js @@ -1,12 +1,14 @@ define(function(require) { -var slog = require('slog'); +var logic = require('logic'); var client = require('./client'); var DisasterRecovery = require('../disaster-recovery'); function SmtpAccount(universe, compositeAccount, accountId, credentials, connInfo) { this.universe = universe; + logic.defineScope(this, 'Account', { accountId: accountId, + accountType: 'smtp' }); this.compositeAccount = compositeAccount; this.accountId = accountId; this.credentials = credentials; @@ -91,6 +93,7 @@ SmtpAccount.prototype = { * ] */ sendMessage: function(composer, callback) { + var scope = this; this.establishConnection({ /** * Send the envelope. @@ -100,9 +103,7 @@ SmtpAccount.prototype = { */ sendEnvelope: function(conn) { var envelope = composer.getEnvelope(); - slog.log('smtp:sendEnvelope', { - _envelope: envelope - }); + logic(scope, 'sendEnvelope', { _envelope: envelope }); conn.useEnvelope(envelope); }, @@ -116,9 +117,9 @@ SmtpAccount.prototype = { */ sendMessage: function(conn) { // Then send the actual message if everything was cool - slog.log('smtp:building-blob'); + logic(scope, 'building-blob'); composer.withMessageBlob({ includeBcc: false }, function(blob) { - slog.log('smtp:sending-blob', { size: blob.size }); + logic(scope, 'sending-blob', { size: blob.size }); // simplesmtp's SMTPClient does not understand Blobs, so we // issue the write directly. All that it cares about is // knowing whether our data payload included a trailing @@ -141,14 +142,14 @@ SmtpAccount.prototype = { * The send succeeded. */ onSendComplete: function(conn) { - slog.log('smtp:sent'); + logic(scope, 'smtp:sent'); callback(null); }, /** * The send failed. */ onError: function(err, badAddresses) { - slog.error('smtp:error', { + logic(scope, 'smtp:error', { error: err, badAddresses: badAddresses }); @@ -211,6 +212,7 @@ SmtpAccount.prototype = { * onError(err, badAddresses) -- send failed (or connection error) */ establishConnection: function(callbacks) { + var scope = this; var conn; var sendingMessage = false; client.createSmtpConnection( @@ -245,11 +247,11 @@ SmtpAccount.prototype = { // We sent the envelope; see if we can now send the message. conn.onready = function(badRecipients) { - slog.log('smtp:onready'); + logic(scope, 'onready'); if (badRecipients.length) { conn.close(); - slog.warn('smtp:bad-recipients', { badRecipients: badRecipients }); + logic(scope, 'bad-recipients', { badRecipients: badRecipients }); callbacks.onError('bad-recipient', badRecipients); } else { sendingMessage = true; @@ -262,10 +264,10 @@ SmtpAccount.prototype = { conn.close(); if (success) { - slog.log('smtp:sent'); + logic(scope, 'sent'); callbacks.onSendComplete(conn); } else { - slog.error('smtp:send-failed'); + logic(scope, 'send-failed'); // We don't have an error to reference here, but we stored // the most recent SMTP error, which should tell us why the // server rejected the message. @@ -282,13 +284,13 @@ SmtpAccount.prototype = { }; conn.onclose = function() { - slog.log('smtp:onclose'); + logic(scope, 'onclose'); var idx = this._activeConnections.indexOf(conn); if (idx !== -1) { this._activeConnections.splice(idx, 1); } else { - slog.error('smtp:dead-unknown-connection'); + logic(scope, 'dead-unknown-connection'); } }.bind(this); }.bind(this)) diff --git a/apps/email/js/ext/smtp/client.js b/apps/email/js/ext/smtp/client.js index aa2e2de74dbb..fbedfa854fac 100644 --- a/apps/email/js/ext/smtp/client.js +++ b/apps/email/js/ext/smtp/client.js @@ -5,7 +5,7 @@ */ define(function(require, exports) { - var slog = require('slog'); + var logic = require('logic'); var SmtpClient = require('smtpclient'); var syncbase = require('../syncbase'); var oauth = require('../oauth'); @@ -17,6 +17,8 @@ define(function(require, exports) { clearTimeout = clearFn; }; + var scope = logic.scope('SmtpClient'); + /** * Create an SMTP connection using the given credentials and * connection info, returning a Promise. @@ -52,7 +54,7 @@ define(function(require, exports) { xoauth2: credentials.oauth2 ? credentials.oauth2.accessToken : null }; - slog.log('smtp:connect', { + logic(scope, 'connect', { _auth: auth, usingOauth2: !!credentials.oauth2, connInfo: connInfo @@ -86,7 +88,7 @@ define(function(require, exports) { conn.onidle = function() { clearConnectTimeout(); - slog.info('smtp:connected', connInfo); + logic(scope, 'connected', connInfo); conn.onidle = conn.onclose = conn.onerror = function() { /* noop */ }; resolve(conn); }; @@ -120,7 +122,7 @@ define(function(require, exports) { credsUpdatedCallback); }); } else { - slog.error('smtp:connect-error', { + logic(scope, 'connect-error', { error: errorString, connInfo: connInfo }); @@ -275,7 +277,7 @@ define(function(require, exports) { normalizedError = err; } - slog.log('smtp:analyzed-error', { + logic(scope, 'analyzed-error', { statusCode: err.statusCode, enhancedStatus: err.enhancedStatus, rawError: rawError, diff --git a/apps/email/js/ext/smtp/probe.js b/apps/email/js/ext/smtp/probe.js index 673781f47a5d..ca57f1f29929 100644 --- a/apps/email/js/ext/smtp/probe.js +++ b/apps/email/js/ext/smtp/probe.js @@ -1,4 +1,6 @@ -define(['slog', './client', 'exports'], function(slog, client, exports) { +define(['logic', './client', 'exports'], function(logic, client, exports) { + + var scope = logic.scope('SmtpProber'); /** * Validate that we find an SMTP server using the connection info @@ -16,7 +18,8 @@ define(['slog', './client', 'exports'], function(slog, client, exports) { * change their address after setup. */ exports.probeAccount = function(credentials, connInfo) { - slog.info('probe:smtp:connecting', { + + logic(scope, 'connecting', { _credentials: credentials, connInfo: connInfo }); @@ -30,13 +33,13 @@ define(['slog', './client', 'exports'], function(slog, client, exports) { // here, as the caller should have already passed a valid // accessToken during account setup. This might indicate a // problem with our OAUTH handling, so log it just in case. - slog.warn('probe:smtp:credentials-updated'); + logic(scope, 'credentials-updated'); } ).then(function(newConn) { conn = newConn; return verifyAddress(conn, connInfo.emailAddress); }).then(function() { - slog.info('probe:smtp:success'); + logic(scope, 'success'); conn.close(); return conn; }).catch(function(err) { @@ -47,7 +50,7 @@ define(['slog', './client', 'exports'], function(slog, client, exports) { conn.close(); } - slog.error('probe:smtp:error', { + logic(scope, 'error', { error: errorString, connInfo: connInfo }); @@ -65,7 +68,10 @@ define(['slog', './client', 'exports'], function(slog, client, exports) { * reject => {Object} some sort of error */ function verifyAddress(conn, emailAddress) { - slog.log('probe:smtp:checking-address-validity'); + logic(scope, 'checking-address-validity', { + ns: 'SmtpProber', + _address: emailAddress + }); return new Promise(function(resolve, reject) { conn.useEnvelope({ from: emailAddress, diff --git a/apps/email/js/ext/worker-config.js b/apps/email/js/ext/worker-config.js index c99c3ddde44b..68d03c38a5db 100644 --- a/apps/email/js/ext/worker-config.js +++ b/apps/email/js/ext/worker-config.js @@ -49,6 +49,8 @@ 'bleach.js': 'ext/bleach.js', 'browserbox': 'ext/browserbox', 'browserbox-imap': 'ext/browserbox-imap', + 'co': 'ext/co', + 'equal': 'ext/equal', 'evt': 'ext/evt', 'imap-handler': 'ext/imap-handler', 'mailbuild': 'ext/mailbuild', @@ -59,7 +61,6 @@ 'mimetypes': 'ext/mimetypes', 'mix': 'ext/mix', 'punycode': 'ext/punycode', - 'rdcommon': 'ext/rdcommon', 'safe-base64': 'ext/safe-base64', 'smtpclient': 'ext/smtpclient', 'stringencoding': 'ext/stringencoding', diff --git a/apps/email/js/ext/worker-setup.js b/apps/email/js/ext/worker-setup.js index fb6c3b0032b0..453196c4963a 100644 --- a/apps/email/js/ext/worker-setup.js +++ b/apps/email/js/ext/worker-setup.js @@ -2,12 +2,14 @@ define( [ './worker-router', './mailbridge', + 'logic', './mailuniverse', 'exports' ], function( $router, $mailbridge, + logic, $mailuniverse, exports ) { @@ -26,7 +28,7 @@ function createBridgePair(universe) { var sendMessage = routerInfo.sendMessage; TMB.__sendMessage = function(msg) { - TMB._LOG.send(msg.type, msg); + logic(TMB, 'send', { type: msg.type, msg: msg }); sendMessage(null, msg); };