From ffd9e5f6132ebbdb1485dc731c3a9a14b067920e Mon Sep 17 00:00:00 2001 From: Brett Kercher Date: Mon, 11 Jun 2018 10:09:59 -0500 Subject: [PATCH 1/5] Add an option to run the cleanup algorithm optimized for low memory usage --- README.md | 1 + cleanup.js | 13 +++-- config/default.yml | 1 + lib/cache/cache_fs.js | 121 ++++++++++++++++++++++++++++++++++++++++-- lib/helpers.js | 24 +++++++++ 5 files changed, 151 insertions(+), 9 deletions(-) diff --git a/README.md b/README.md index 4f401ff..3ad4489 100644 --- a/README.md +++ b/README.md @@ -169,6 +169,7 @@ Command | Description -s, --max-cache-size | Override the configured maximum cache size. Files will be removed from the cache until the max cache size is satisfied, using a Least Recently Used search. A value of 0 disables this check. -d, --delete | Delete cached files that match the configured criteria. Without this, the default behavior is to dry-run which will print diagnostic information only. -D, --daemon | Daemon mode. Execute the cleanup script at the given interval in seconds as a foreground process. +-m, --low-memory | Run the cleanup algorithm optimized for low memory usage. -h, --help | Show usage information. ### Notes diff --git a/cleanup.js b/cleanup.js index 210e74c..160f1e4 100644 --- a/cleanup.js +++ b/cleanup.js @@ -37,7 +37,8 @@ program.description("Unity Cache Server - Cache Cleanup\n\n Removes old files f .option('-s, --max-cache-size ', 'Override the configured maximum cache size. Files will be removed from the cache until the max cache size is satisfied, using a Least Recently Used search. A value of 0 disables this check.', myParseInt) .option('-d, --delete', 'Delete cached files that match the configured criteria. Without this, the default behavior is to dry-run which will print diagnostic information only.') .option('-D, --daemon ', 'Daemon mode: execute the cleanup script at the given interval in seconds as a foreground process.', myParseInt) - .option('--NODE_CONFIG_DIR=', 'Specify the directory to search for config files. This is equivalent to setting the NODE_CONFIG_DIR environment variable. Without this option, the built-in configuration is used.'); + .option('--NODE_CONFIG_DIR=', 'Specify the directory to search for config files. This is equivalent to setting the NODE_CONFIG_DIR environment variable. Without this option, the built-in configuration is used.') + .option('-m, --low-memory', 'Run the cleanup algorithm optimized for low memory usage.'); program.parse(process.argv); @@ -66,6 +67,10 @@ if(program.hasOwnProperty('maxCacheSize')) { cacheOpts.cleanupOptions.maxCacheSize = program.maxCacheSize; } +if(program.hasOwnProperty('lowMemory')) { + cacheOpts.cleanupOptions.lowMemory = program.lowMemory; +} + const dryRun = !program.delete; const logLevel = helpers.getLogLevel(); @@ -82,14 +87,13 @@ cache.on('cleanup_delete_finish', data => { } }); -const msg = 'Gathering cache files for expiration'; let spinner = null; if(logLevel < consts.LOG_DBG && logLevel >= consts.LOG_INFO) { spinner = ora({color: 'white'}); cache.on('cleanup_search_progress', data => { - spinner.text = `${msg} (${data.deleteCount} of ${data.cacheCount} files, ${filesize(data.deleteSize)})`; + spinner.text = `${data.msg} (${data.deleteCount} of ${data.cacheCount} files, ${filesize(data.deleteSize)})`; }); cache.on('cleanup_search_finish', () => { @@ -98,11 +102,12 @@ if(logLevel < consts.LOG_DBG && logLevel >= consts.LOG_INFO) { } else if(logLevel === consts.LOG_DBG) { cache.on('cleanup_search_progress', data => { - const txt = `${msg} (${data.deleteCount} of ${data.cacheCount} files, ${filesize(data.deleteSize)})`; + const txt = `${data.msg} (${data.deleteCount} of ${data.cacheCount} files, ${filesize(data.deleteSize)})`; helpers.log(consts.LOG_DBG, txt); }); } +const msg = 'Gathering cache files for expiration'; function doCleanup() { if (spinner) spinner.start(msg); cache.cleanup(dryRun) diff --git a/config/default.yml b/config/default.yml index d8d88ba..e254188 100644 --- a/config/default.yml +++ b/config/default.yml @@ -21,6 +21,7 @@ Cache: cleanupOptions: expireTimeSpan: "P30D" maxCacheSize: 0 + lowMemory: false persistence: true persistenceOptions: autosave: true diff --git a/lib/cache/cache_fs.js b/lib/cache/cache_fs.js index 7be2145..53d4d09 100644 --- a/lib/cache/cache_fs.js +++ b/lib/cache/cache_fs.js @@ -110,15 +110,23 @@ class CacheFS extends CacheBase { } cleanup(dryRun = true) { - const self = this; - const expireDuration = moment.duration(this._options.cleanupOptions.expireTimeSpan); + const minFileAccessTime = moment().subtract(expireDuration).toDate(); + const maxCacheSize = this._options.cleanupOptions.maxCacheSize; + if(!expireDuration.isValid() || expireDuration.asMilliseconds() === 0) { return Promise.reject(new Error("Invalid expireTimeSpan option")); } - const minFileAccessTime = moment().subtract(expireDuration).toDate(); - const maxCacheSize = this._options.cleanupOptions.maxCacheSize; + if(this._options.cleanupOptions.lowMemory) { + return this.cleanup_low_mem(dryRun, expireDuration, minFileAccessTime, maxCacheSize); + } + + return this.cleanup_fast(dryRun, expireDuration, minFileAccessTime, maxCacheSize); + } + + cleanup_fast(dryRun = true, expireDuration, minFileAccessTime, maxCacheSize) { + const self = this; const allItems = []; const deleteItems = []; @@ -130,7 +138,8 @@ class CacheFS extends CacheBase { cacheCount: allItems.length, cacheSize: cacheSize, deleteCount: deleteItems.length, - deleteSize: deleteSize + deleteSize: deleteSize, + msg: 'Gathering cache files for expiration', }; }; @@ -182,6 +191,108 @@ class CacheFS extends CacheBase { self.emit('cleanup_delete_finish', progressData()); }); } + + cleanup_low_mem(dryRun = true, expireDuration, minFileAccessTime, maxCacheSize) { + + const self = this; + + let cacheCount = 0; + let cacheSize = 0; + let deleteSize = 0; + let deletedItemCount = 0; + let deleteItems = []; + let verb = dryRun ? 'Gathering' : 'Removing'; + let spinnerMessage = verb + ' expired files'; + + const progressData = () => { + return { + cacheCount: cacheCount, + cacheSize: cacheSize, + deleteCount: deleteItems.length + deletedItemCount, + deleteSize: deleteSize, + msg: spinnerMessage, + }; + }; + + const progressEvent = () => self.emit('cleanup_search_progress', progressData()); + + progressEvent(); + const progressTimer = setInterval(progressEvent, 250); + + return helpers.readDir(self._cachePath, async (item) => { + + if(item.stats.isDirectory()) return next(); + + cacheSize += item.stats.size; + cacheCount ++; + + if(item.stats.atime < minFileAccessTime) { + deleteSize += item.stats.size; + deletedItemCount++; + self.emit('cleanup_delete_item', item.path); + if(!dryRun) { + await fs.unlink(item.path); + } + } + }).then(async () => { + if (maxCacheSize <= 0 || cacheSize - deleteSize < maxCacheSize) { + return; + } + + let needsSorted = false; + cacheCount = 0; + spinnerMessage = 'Gathering files to delete to satisfy Max cache size'; + + await helpers.readDir(self._cachePath, (item) => { + if(item.stats.isDirectory()) return next(); + + if(item.stats.atime < minFileAccessTime) { + // already expired items are handled in the previous pass + return next(); + } + + item = {path: item.path, stats: pick(item.stats, ['atime', 'size'])}; + cacheCount++; + + if (cacheSize - deleteSize > maxCacheSize) { + deleteSize += item.stats.size; + deleteItems.push(item); + needsSorted = true; + } + else { + if(needsSorted) { + deleteItems.sort((a, b) => { return a.stats.atime > b.stats.atime }); + needsSorted = false; + } + + let i = deleteItems[deleteItems.length - 1]; // i is the MRU out of the current delete list + + if (item.stats.atime > i.stats.atime) { + helpers.insertSorted(item, deleteItems, (a, b) => { return a.stats.atime > b.stats.atime }); + deleteSize += item.stats.size; + + if (cacheSize - (deleteSize - i.stats.size) < maxCacheSize) { + deleteItems.pop(); + deleteSize -= i.stats.size; + } + } + } + }); + }) + .then(async () => { + clearTimeout(progressTimer); + self.emit('cleanup_search_finish', progressData()); + + for(const d of deleteItems) { + self.emit('cleanup_delete_item', d.path); + if(!dryRun) { + await fs.unlink(d.path); + } + } + + self.emit('cleanup_delete_finish', progressData()); + }); + } } class PutTransactionFS extends PutTransaction { diff --git a/lib/helpers.js b/lib/helpers.js index ef1c3aa..ceb06d9 100644 --- a/lib/helpers.js +++ b/lib/helpers.js @@ -199,3 +199,27 @@ exports.resolveCacheModule = (module, rootPath) => { modulePath = path.resolve(rootPath, 'lib/cache', module); return require(modulePath); }; + +exports.insertSorted = (item, arr, compare) => { + arr.splice(locationOf(item, arr, compare) + 1, 0, item); + return arr; +}; + +function locationOf(item, array, compare, start, end) { + if (array.length === 0) + return -1; + + start = start || 0; + end = end || array.length; + + let pivot = (start + end) >> 1; + + let c = compare(item, array[pivot]); + if (end - start <= 1) return c == -1 ? pivot - 1 : pivot; + + switch (c) { + case -1: return locationOf(item, array, compare, start, pivot); + case 0: return pivot; + case 1: return locationOf(item, array, compare, pivot, end); + } +} \ No newline at end of file From ab302506a09e570b346382ac00c1de880cdffc2c Mon Sep 17 00:00:00 2001 From: Brett Kercher Date: Tue, 12 Jun 2018 10:42:20 -0500 Subject: [PATCH 2/5] Integrate reliability manager changes, and fix redundancy issues --- lib/cache/cache_fs.js | 74 ++++++++++++++++++++----------------------- 1 file changed, 35 insertions(+), 39 deletions(-) diff --git a/lib/cache/cache_fs.js b/lib/cache/cache_fs.js index 53d4d09..37f4dcd 100644 --- a/lib/cache/cache_fs.js +++ b/lib/cache/cache_fs.js @@ -43,7 +43,7 @@ class CacheFS extends CacheBase { const fileName = path.basename(filePath).toLowerCase(); const matches = /^([0-9a-f]{32})-([0-9a-f]{32})\./.exec(fileName); const result = { guidStr: "", hashStr: ""}; - if(matches.length === 3) { + if(matches && matches.length === 3) { result.guidStr = matches[1]; result.hashStr = matches[2]; } @@ -126,8 +126,6 @@ class CacheFS extends CacheBase { } cleanup_fast(dryRun = true, expireDuration, minFileAccessTime, maxCacheSize) { - const self = this; - const allItems = []; const deleteItems = []; let cacheSize = 0; @@ -143,12 +141,12 @@ class CacheFS extends CacheBase { }; }; - const progressEvent = () => self.emit('cleanup_search_progress', progressData()); + const progressEvent = () => this.emit('cleanup_search_progress', progressData()); progressEvent(); const progressTimer = setInterval(progressEvent, 250); - return helpers.readDir(self._cachePath, (item) => { + return helpers.readDir(this._cachePath, (item) => { item = {path: item.path, stats: pick(item.stats, ['atime', 'size'])}; allItems.push(item); cacheSize += item.stats.size; @@ -167,35 +165,20 @@ class CacheFS extends CacheBase { } clearTimeout(progressTimer); - self.emit('cleanup_search_finish', progressData()); + this.emit('cleanup_search_finish', progressData()); await Promise.all( deleteItems.map(async (d) => { - const guidHash = CacheFS._extractGuidAndHashFromFilepath(d.path); - - // Make sure we're only deleting valid cached files - if(guidHash.guidStr.length === 0 || guidHash.hashStr.length === 0) - return; - - if(!dryRun) { - await fs.unlink(d.path); - if(this.reliabilityManager !== null) { - this.reliabilityManager.removeEntry(guidHash.guidStr, guidHash.hashStr); - } - } - - self.emit('cleanup_delete_item', d.path); + await this.delete_cache_item(dryRun, d); }) ); - self.emit('cleanup_delete_finish', progressData()); + this.emit('cleanup_delete_finish', progressData()); }); } cleanup_low_mem(dryRun = true, expireDuration, minFileAccessTime, maxCacheSize) { - const self = this; - let cacheCount = 0; let cacheSize = 0; let deleteSize = 0; @@ -214,12 +197,12 @@ class CacheFS extends CacheBase { }; }; - const progressEvent = () => self.emit('cleanup_search_progress', progressData()); + const progressEvent = () => this.emit('cleanup_search_progress', progressData()); progressEvent(); const progressTimer = setInterval(progressEvent, 250); - return helpers.readDir(self._cachePath, async (item) => { + return helpers.readDir(this._cachePath, async (item) => { if(item.stats.isDirectory()) return next(); @@ -229,13 +212,10 @@ class CacheFS extends CacheBase { if(item.stats.atime < minFileAccessTime) { deleteSize += item.stats.size; deletedItemCount++; - self.emit('cleanup_delete_item', item.path); - if(!dryRun) { - await fs.unlink(item.path); - } + await this.delete_cache_item(dryRun, item); } }).then(async () => { - if (maxCacheSize <= 0 || cacheSize - deleteSize < maxCacheSize) { + if (maxCacheSize <= 0 || cacheSize - deleteSize <= maxCacheSize) { return; } @@ -243,7 +223,7 @@ class CacheFS extends CacheBase { cacheCount = 0; spinnerMessage = 'Gathering files to delete to satisfy Max cache size'; - await helpers.readDir(self._cachePath, (item) => { + await helpers.readDir(this._cachePath, (item) => { if(item.stats.isDirectory()) return next(); if(item.stats.atime < minFileAccessTime) { @@ -281,18 +261,34 @@ class CacheFS extends CacheBase { }) .then(async () => { clearTimeout(progressTimer); - self.emit('cleanup_search_finish', progressData()); + this.emit('cleanup_search_finish', progressData()); - for(const d of deleteItems) { - self.emit('cleanup_delete_item', d.path); - if(!dryRun) { - await fs.unlink(d.path); - } - } + await Promise.all( + deleteItems.map(async (d) => { + await this.delete_cache_item(dryRun, d); + }) + ); - self.emit('cleanup_delete_finish', progressData()); + this.emit('cleanup_delete_finish', progressData()); }); } + + async delete_cache_item(dryRun = true, item) { + const guidHash = CacheFS._extractGuidAndHashFromFilepath(item.path); + + // Make sure we're only deleting valid cached files + if(guidHash.guidStr.length === 0 || guidHash.hashStr.length === 0) + return; + + if(!dryRun) { + await fs.unlink(item.path); + if(this.reliabilityManager !== null) { + this.reliabilityManager.removeEntry(guidHash.guidStr, guidHash.hashStr); + } + } + + this.emit('cleanup_delete_item', item.path); + } } class PutTransactionFS extends PutTransaction { From e2af20cebfb44c25fdf32c977c6b90a1df80d80f Mon Sep 17 00:00:00 2001 From: Brett Kercher Date: Tue, 12 Jun 2018 14:45:35 -0500 Subject: [PATCH 3/5] Add unit tests and fix some backwards logic --- lib/cache/cache_fs.js | 9 ++- test/cache_fs.js | 139 ++++++++++++++++++++++++++++++++++++++++++ test/helpers.js | 23 +++++++ 3 files changed, 168 insertions(+), 3 deletions(-) diff --git a/lib/cache/cache_fs.js b/lib/cache/cache_fs.js index 37f4dcd..60c8764 100644 --- a/lib/cache/cache_fs.js +++ b/lib/cache/cache_fs.js @@ -234,7 +234,7 @@ class CacheFS extends CacheBase { item = {path: item.path, stats: pick(item.stats, ['atime', 'size'])}; cacheCount++; - if (cacheSize - deleteSize > maxCacheSize) { + if (cacheSize - deleteSize >= maxCacheSize) { deleteSize += item.stats.size; deleteItems.push(item); needsSorted = true; @@ -247,8 +247,11 @@ class CacheFS extends CacheBase { let i = deleteItems[deleteItems.length - 1]; // i is the MRU out of the current delete list - if (item.stats.atime > i.stats.atime) { - helpers.insertSorted(item, deleteItems, (a, b) => { return a.stats.atime > b.stats.atime }); + if (item.stats.atime < i.stats.atime) { + deleteItems = helpers.insertSorted(item, deleteItems, (a, b) => { + if (a.stats.atime === b.stats.atime) return 0; + return a.stats.atime < b.stats.atime ? -1 : 1 + }); deleteSize += item.stats.size; if (cacheSize - (deleteSize - i.stats.size) < maxCacheSize) { diff --git a/test/cache_fs.js b/test/cache_fs.js index fc4ef49..e7e1e16 100644 --- a/test/cache_fs.js +++ b/test/cache_fs.js @@ -196,6 +196,145 @@ describe("Cache: FS", () => { assert(!rmEntry); }); }); + + describe("cleanup_low_memory", () => { + it("should remove files that have not been accessed within a given timespan (ASP.NET style)", async () => { + const opts = Object.assign({}, cacheOpts); + opts.cleanupOptions = { + expireTimeSpan: "P1D", + maxCacheSize: 0, + lowMemory: true, + }; + + await cache.init(opts); + const file1 = await addFileToCache(moment().subtract(2, 'days').toDate()); + const file2 = await addFileToCache(moment().subtract(2, 'days').toDate()); + const file3 = await addFileToCache(moment().toDate()); + + await cache.cleanup(false); + + assert(!await fs.pathExists(file1.path)); + assert(!await fs.pathExists(file2.path)); + assert(await fs.pathExists(file3.path)); + }); + + it("should remove files that have not been accessed within a given timespan (ISO 8601 style)", async () => { + const opts = Object.assign({}, cacheOpts); + opts.cleanupOptions = { + expireTimeSpan: "1.00:00:00", + maxCacheSize: 0, + lowMemory: true, + }; + + await cache.init(opts); + const file1 = await addFileToCache(moment().subtract(2, 'days').toDate()); + const file2 = await addFileToCache(moment().subtract(2, 'days').toDate()); + const file3 = await addFileToCache(moment().toDate()); + + assert(await fs.pathExists(file1.path)); + assert(await fs.pathExists(file2.path)); + assert(await fs.pathExists(file3.path)); + + await cache.cleanup(false); + + assert(!await fs.pathExists(file1.path)); + assert(!await fs.pathExists(file2.path)); + assert(await fs.pathExists(file3.path)); + }); + + it("should remove files in least-recently-used order until the overall cache size is lower than a given threshold", async () => { + const opts = Object.assign({}, cacheOpts); + opts.cleanupOptions = { + expireTimeSpan: "P30D", + maxCacheSize: MIN_FILE_SIZE * 2 + 1, + lowMemory: true, + }; + + await cache.init(opts); + const file1 = await addFileToCache(moment().toDate()); + const file2 = await addFileToCache(moment().subtract(1, 'days').toDate()); + const file3 = await addFileToCache(moment().subtract(5, 'days').toDate()); + + assert(await fs.pathExists(file1.path)); + assert(await fs.pathExists(file2.path)); + assert(await fs.pathExists(file3.path)); + + await cache.cleanup(false); + + assert(await fs.pathExists(file1.path)); + assert(await fs.pathExists(file2.path)); + assert(!await fs.pathExists(file3.path)); + + opts.cleanupOptions.maxCacheSize = MIN_FILE_SIZE + 1; + cache._options = opts; + + await cache.cleanup(false); + + assert(await fs.pathExists(file1.path)); + assert(!await fs.pathExists(file2.path)); + }); + + it("should emit events while processing files", async () => { + const opts = Object.assign({}, cacheOpts); + opts.cleanupOptions = { + expireTimeSpan: "P30D", + maxCacheSize: 1, + lowMemory: true, + }; + + await cache.init(opts); + await addFileToCache(moment().toDate()); + + let cleanup_search_progress = false; + let cleanup_search_finish = false; + let cleanup_delete_item = false; + let cleanup_delete_finish = false; + + cache.on('cleanup_search_progress', () => cleanup_search_progress = true) + .on('cleanup_search_finish', () => cleanup_search_finish = true) + .on('cleanup_delete_item', () => cleanup_delete_item = true) + .on('cleanup_delete_finish', () => cleanup_delete_finish = true); + + return cache.cleanup(false).then(() => { + assert(cleanup_search_progress); + assert(cleanup_search_finish); + assert(cleanup_delete_item); + assert(cleanup_delete_finish); + }); + }); + + it("should not delete any files if the dryRun option is true", async () => { + const opts = Object.assign({}, cacheOpts); + opts.cleanupOptions = { + expireTimeSpan: "P30D", + maxCacheSize: 1, + lowMemory: true, + }; + + await cache.init(opts); + const file = await addFileToCache(moment().toDate()); + cache.cleanup(true); + assert(await fs.pathExists(file.path)); + }); + + it("should remove versions from the reliability manager, when in high reliability mode", async () => { + const opts = Object.assign({}, cacheOpts); + opts.cleanupOptions = { + expireTimeSpan: "P30D", + maxCacheSize: 1, + lowMemory: true, + }; + + await cache.init(opts); + const file = await addFileToCache(moment().toDate()); + let rmEntry = cache.reliabilityManager.getEntry(file.guidStr, file.hashStr); + assert(rmEntry); + + await cache.cleanup(false); + rmEntry = cache.reliabilityManager.getEntry(file.guidStr, file.hashStr); + assert(!rmEntry); + }); + }); }); describe("PutTransaction API", () => { diff --git a/test/helpers.js b/test/helpers.js index 6f5d154..e431c42 100644 --- a/test/helpers.js +++ b/test/helpers.js @@ -181,4 +181,27 @@ describe("Helper functions", () => { }); }); }); + + describe("insertSorted", () => { + it("should insert an element into the correct position in an array", async () => { + let arr = [1, 2, 4, 5]; + arr = helpers.insertSorted(3, arr, (a, b) => { + if (a === b) return 0; + return a < b ? -1 : 1 + }); + + assert.equal(arr[2], 3); + + }); + it("should insert an element into the correct position in an empty array", async () => { + let arr = []; + arr = helpers.insertSorted(3, arr, (a, b) => { + if (a === b) return 0; + return a < b ? -1 : 1 + }); + + assert.equal(arr[0], 3); + + }); + }); }); \ No newline at end of file From 9b9ac9016997a7751a0a441f86ec09f69d7abaf6 Mon Sep 17 00:00:00 2001 From: Brett Kercher Date: Tue, 12 Jun 2018 16:11:23 -0500 Subject: [PATCH 4/5] remove option for low memory, make low memory algorithm default --- README.md | 1 - cleanup.js | 13 ++-- config/default.yml | 1 - lib/cache/cache_fs.js | 64 +------------------- test/cache_fs.js | 138 ------------------------------------------ 5 files changed, 5 insertions(+), 212 deletions(-) diff --git a/README.md b/README.md index 3ad4489..4f401ff 100644 --- a/README.md +++ b/README.md @@ -169,7 +169,6 @@ Command | Description -s, --max-cache-size | Override the configured maximum cache size. Files will be removed from the cache until the max cache size is satisfied, using a Least Recently Used search. A value of 0 disables this check. -d, --delete | Delete cached files that match the configured criteria. Without this, the default behavior is to dry-run which will print diagnostic information only. -D, --daemon | Daemon mode. Execute the cleanup script at the given interval in seconds as a foreground process. --m, --low-memory | Run the cleanup algorithm optimized for low memory usage. -h, --help | Show usage information. ### Notes diff --git a/cleanup.js b/cleanup.js index 160f1e4..210e74c 100644 --- a/cleanup.js +++ b/cleanup.js @@ -37,8 +37,7 @@ program.description("Unity Cache Server - Cache Cleanup\n\n Removes old files f .option('-s, --max-cache-size ', 'Override the configured maximum cache size. Files will be removed from the cache until the max cache size is satisfied, using a Least Recently Used search. A value of 0 disables this check.', myParseInt) .option('-d, --delete', 'Delete cached files that match the configured criteria. Without this, the default behavior is to dry-run which will print diagnostic information only.') .option('-D, --daemon ', 'Daemon mode: execute the cleanup script at the given interval in seconds as a foreground process.', myParseInt) - .option('--NODE_CONFIG_DIR=', 'Specify the directory to search for config files. This is equivalent to setting the NODE_CONFIG_DIR environment variable. Without this option, the built-in configuration is used.') - .option('-m, --low-memory', 'Run the cleanup algorithm optimized for low memory usage.'); + .option('--NODE_CONFIG_DIR=', 'Specify the directory to search for config files. This is equivalent to setting the NODE_CONFIG_DIR environment variable. Without this option, the built-in configuration is used.'); program.parse(process.argv); @@ -67,10 +66,6 @@ if(program.hasOwnProperty('maxCacheSize')) { cacheOpts.cleanupOptions.maxCacheSize = program.maxCacheSize; } -if(program.hasOwnProperty('lowMemory')) { - cacheOpts.cleanupOptions.lowMemory = program.lowMemory; -} - const dryRun = !program.delete; const logLevel = helpers.getLogLevel(); @@ -87,13 +82,14 @@ cache.on('cleanup_delete_finish', data => { } }); +const msg = 'Gathering cache files for expiration'; let spinner = null; if(logLevel < consts.LOG_DBG && logLevel >= consts.LOG_INFO) { spinner = ora({color: 'white'}); cache.on('cleanup_search_progress', data => { - spinner.text = `${data.msg} (${data.deleteCount} of ${data.cacheCount} files, ${filesize(data.deleteSize)})`; + spinner.text = `${msg} (${data.deleteCount} of ${data.cacheCount} files, ${filesize(data.deleteSize)})`; }); cache.on('cleanup_search_finish', () => { @@ -102,12 +98,11 @@ if(logLevel < consts.LOG_DBG && logLevel >= consts.LOG_INFO) { } else if(logLevel === consts.LOG_DBG) { cache.on('cleanup_search_progress', data => { - const txt = `${data.msg} (${data.deleteCount} of ${data.cacheCount} files, ${filesize(data.deleteSize)})`; + const txt = `${msg} (${data.deleteCount} of ${data.cacheCount} files, ${filesize(data.deleteSize)})`; helpers.log(consts.LOG_DBG, txt); }); } -const msg = 'Gathering cache files for expiration'; function doCleanup() { if (spinner) spinner.start(msg); cache.cleanup(dryRun) diff --git a/config/default.yml b/config/default.yml index e254188..d8d88ba 100644 --- a/config/default.yml +++ b/config/default.yml @@ -21,7 +21,6 @@ Cache: cleanupOptions: expireTimeSpan: "P30D" maxCacheSize: 0 - lowMemory: false persistence: true persistenceOptions: autosave: true diff --git a/lib/cache/cache_fs.js b/lib/cache/cache_fs.js index 60c8764..e7c0a3f 100644 --- a/lib/cache/cache_fs.js +++ b/lib/cache/cache_fs.js @@ -118,67 +118,6 @@ class CacheFS extends CacheBase { return Promise.reject(new Error("Invalid expireTimeSpan option")); } - if(this._options.cleanupOptions.lowMemory) { - return this.cleanup_low_mem(dryRun, expireDuration, minFileAccessTime, maxCacheSize); - } - - return this.cleanup_fast(dryRun, expireDuration, minFileAccessTime, maxCacheSize); - } - - cleanup_fast(dryRun = true, expireDuration, minFileAccessTime, maxCacheSize) { - const allItems = []; - const deleteItems = []; - let cacheSize = 0; - let deleteSize = 0; - - const progressData = () => { - return { - cacheCount: allItems.length, - cacheSize: cacheSize, - deleteCount: deleteItems.length, - deleteSize: deleteSize, - msg: 'Gathering cache files for expiration', - }; - }; - - const progressEvent = () => this.emit('cleanup_search_progress', progressData()); - - progressEvent(); - const progressTimer = setInterval(progressEvent, 250); - - return helpers.readDir(this._cachePath, (item) => { - item = {path: item.path, stats: pick(item.stats, ['atime', 'size'])}; - allItems.push(item); - cacheSize += item.stats.size; - if(item.stats.atime < minFileAccessTime) { - deleteSize += item.stats.size; - deleteItems.push(item); - } - }).then(async () => { - if(maxCacheSize > 0 && cacheSize - deleteSize > maxCacheSize) { - allItems.sort((a, b) => { return a.stats.atime > b.stats.atime }); - for(const item of allItems) { - deleteSize += item.stats.size; - deleteItems.push(item); - if(cacheSize - deleteSize <= maxCacheSize) break; - } - } - - clearTimeout(progressTimer); - this.emit('cleanup_search_finish', progressData()); - - await Promise.all( - deleteItems.map(async (d) => { - await this.delete_cache_item(dryRun, d); - }) - ); - - this.emit('cleanup_delete_finish', progressData()); - }); - } - - cleanup_low_mem(dryRun = true, expireDuration, minFileAccessTime, maxCacheSize) { - let cacheCount = 0; let cacheSize = 0; let deleteSize = 0; @@ -261,8 +200,7 @@ class CacheFS extends CacheBase { } } }); - }) - .then(async () => { + }).then(async () => { clearTimeout(progressTimer); this.emit('cleanup_search_finish', progressData()); diff --git a/test/cache_fs.js b/test/cache_fs.js index e7e1e16..f996c62 100644 --- a/test/cache_fs.js +++ b/test/cache_fs.js @@ -197,144 +197,6 @@ describe("Cache: FS", () => { }); }); - describe("cleanup_low_memory", () => { - it("should remove files that have not been accessed within a given timespan (ASP.NET style)", async () => { - const opts = Object.assign({}, cacheOpts); - opts.cleanupOptions = { - expireTimeSpan: "P1D", - maxCacheSize: 0, - lowMemory: true, - }; - - await cache.init(opts); - const file1 = await addFileToCache(moment().subtract(2, 'days').toDate()); - const file2 = await addFileToCache(moment().subtract(2, 'days').toDate()); - const file3 = await addFileToCache(moment().toDate()); - - await cache.cleanup(false); - - assert(!await fs.pathExists(file1.path)); - assert(!await fs.pathExists(file2.path)); - assert(await fs.pathExists(file3.path)); - }); - - it("should remove files that have not been accessed within a given timespan (ISO 8601 style)", async () => { - const opts = Object.assign({}, cacheOpts); - opts.cleanupOptions = { - expireTimeSpan: "1.00:00:00", - maxCacheSize: 0, - lowMemory: true, - }; - - await cache.init(opts); - const file1 = await addFileToCache(moment().subtract(2, 'days').toDate()); - const file2 = await addFileToCache(moment().subtract(2, 'days').toDate()); - const file3 = await addFileToCache(moment().toDate()); - - assert(await fs.pathExists(file1.path)); - assert(await fs.pathExists(file2.path)); - assert(await fs.pathExists(file3.path)); - - await cache.cleanup(false); - - assert(!await fs.pathExists(file1.path)); - assert(!await fs.pathExists(file2.path)); - assert(await fs.pathExists(file3.path)); - }); - - it("should remove files in least-recently-used order until the overall cache size is lower than a given threshold", async () => { - const opts = Object.assign({}, cacheOpts); - opts.cleanupOptions = { - expireTimeSpan: "P30D", - maxCacheSize: MIN_FILE_SIZE * 2 + 1, - lowMemory: true, - }; - - await cache.init(opts); - const file1 = await addFileToCache(moment().toDate()); - const file2 = await addFileToCache(moment().subtract(1, 'days').toDate()); - const file3 = await addFileToCache(moment().subtract(5, 'days').toDate()); - - assert(await fs.pathExists(file1.path)); - assert(await fs.pathExists(file2.path)); - assert(await fs.pathExists(file3.path)); - - await cache.cleanup(false); - - assert(await fs.pathExists(file1.path)); - assert(await fs.pathExists(file2.path)); - assert(!await fs.pathExists(file3.path)); - - opts.cleanupOptions.maxCacheSize = MIN_FILE_SIZE + 1; - cache._options = opts; - - await cache.cleanup(false); - - assert(await fs.pathExists(file1.path)); - assert(!await fs.pathExists(file2.path)); - }); - - it("should emit events while processing files", async () => { - const opts = Object.assign({}, cacheOpts); - opts.cleanupOptions = { - expireTimeSpan: "P30D", - maxCacheSize: 1, - lowMemory: true, - }; - - await cache.init(opts); - await addFileToCache(moment().toDate()); - - let cleanup_search_progress = false; - let cleanup_search_finish = false; - let cleanup_delete_item = false; - let cleanup_delete_finish = false; - - cache.on('cleanup_search_progress', () => cleanup_search_progress = true) - .on('cleanup_search_finish', () => cleanup_search_finish = true) - .on('cleanup_delete_item', () => cleanup_delete_item = true) - .on('cleanup_delete_finish', () => cleanup_delete_finish = true); - - return cache.cleanup(false).then(() => { - assert(cleanup_search_progress); - assert(cleanup_search_finish); - assert(cleanup_delete_item); - assert(cleanup_delete_finish); - }); - }); - - it("should not delete any files if the dryRun option is true", async () => { - const opts = Object.assign({}, cacheOpts); - opts.cleanupOptions = { - expireTimeSpan: "P30D", - maxCacheSize: 1, - lowMemory: true, - }; - - await cache.init(opts); - const file = await addFileToCache(moment().toDate()); - cache.cleanup(true); - assert(await fs.pathExists(file.path)); - }); - - it("should remove versions from the reliability manager, when in high reliability mode", async () => { - const opts = Object.assign({}, cacheOpts); - opts.cleanupOptions = { - expireTimeSpan: "P30D", - maxCacheSize: 1, - lowMemory: true, - }; - - await cache.init(opts); - const file = await addFileToCache(moment().toDate()); - let rmEntry = cache.reliabilityManager.getEntry(file.guidStr, file.hashStr); - assert(rmEntry); - - await cache.cleanup(false); - rmEntry = cache.reliabilityManager.getEntry(file.guidStr, file.hashStr); - assert(!rmEntry); - }); - }); }); describe("PutTransaction API", () => { From 8973523dbedad2127145b11bf65c315ac187b006 Mon Sep 17 00:00:00 2001 From: Brett Kercher Date: Tue, 12 Jun 2018 16:14:56 -0500 Subject: [PATCH 5/5] re-add dynamic message for spinner --- cleanup.js | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/cleanup.js b/cleanup.js index 210e74c..3f1d0f3 100644 --- a/cleanup.js +++ b/cleanup.js @@ -82,14 +82,13 @@ cache.on('cleanup_delete_finish', data => { } }); -const msg = 'Gathering cache files for expiration'; let spinner = null; if(logLevel < consts.LOG_DBG && logLevel >= consts.LOG_INFO) { spinner = ora({color: 'white'}); cache.on('cleanup_search_progress', data => { - spinner.text = `${msg} (${data.deleteCount} of ${data.cacheCount} files, ${filesize(data.deleteSize)})`; + spinner.text = `${data.msg} (${data.deleteCount} of ${data.cacheCount} files, ${filesize(data.deleteSize)})`; }); cache.on('cleanup_search_finish', () => { @@ -98,11 +97,12 @@ if(logLevel < consts.LOG_DBG && logLevel >= consts.LOG_INFO) { } else if(logLevel === consts.LOG_DBG) { cache.on('cleanup_search_progress', data => { - const txt = `${msg} (${data.deleteCount} of ${data.cacheCount} files, ${filesize(data.deleteSize)})`; + const txt = `${data.msg} (${data.deleteCount} of ${data.cacheCount} files, ${filesize(data.deleteSize)})`; helpers.log(consts.LOG_DBG, txt); }); } +const msg = 'Gathering cache files for expiration'; function doCleanup() { if (spinner) spinner.start(msg); cache.cleanup(dryRun)