Skip to content

Commit

Permalink
feat(backups): update VM backups cache
Browse files Browse the repository at this point in the history
Instead of complete invalidation/regeneration on creation/deletion.
  • Loading branch information
julien-f committed Sep 7, 2022
1 parent 31aaa96 commit 0027978
Show file tree
Hide file tree
Showing 4 changed files with 84 additions and 36 deletions.
108 changes: 82 additions & 26 deletions @xen-orchestra/backups/RemoteAdapter.js
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@ const zlib = require('zlib')

const { BACKUP_DIR } = require('./_getVmBackupDir.js')
const { cleanVm } = require('./_cleanVm.js')
const { formatFilenameDate } = require('./_filenameDate.js')
const { getTmpDir } = require('./_getTmpDir.js')
const { isMetadataFile } = require('./_backupType.js')
const { isValidXva } = require('./_isValidXva.js')
Expand Down Expand Up @@ -224,11 +225,31 @@ class RemoteAdapter {
return promise
}

#removeVmBackupsFromCache(backups) {
// will not throw
asyncMap(
Object.entries(
groupBy(
backups.map(_ => _._filename),
dirname
)
),
([dir, filenames]) =>
this.#updateCache(dir + '/cache.json.gz', backups => {
for (const filename of filenames) {
delete backups[filename]
}
})
)
}

async deleteDeltaVmBackups(backups) {
const handler = this._handler

// this will delete the json, unused VHDs will be detected by `cleanVm`
await asyncMapSettled(backups, ({ _filename }) => handler.unlink(_filename))

this.#removeVmBackupsFromCache(backups)
}

async deleteMetadataBackup(backupId) {
Expand Down Expand Up @@ -256,6 +277,8 @@ class RemoteAdapter {
await asyncMapSettled(backups, ({ _filename, xva }) =>
Promise.all([handler.unlink(_filename), handler.unlink(resolveRelativeFromFile(_filename, xva))])
)

this.#removeVmBackupsFromCache(backups)
}

deleteVmBackup(file) {
Expand All @@ -281,9 +304,6 @@ class RemoteAdapter {
// don't merge in main process, unused VHDs will be merged in the next backup run
await this.cleanVm(dir, { remove: true, logWarn: warn })
}

const dedupedVmUuid = new Set(metadatas.map(_ => _.vm.uuid))
await asyncMap(dedupedVmUuid, vmUuid => this.invalidateVmBackupListCache(vmUuid))
}

#getCompressionType() {
Expand Down Expand Up @@ -458,8 +478,39 @@ class RemoteAdapter {
return backupsByPool
}

#getVmBackupsCache(vmUuid) {
return `${BACKUP_DIR}/${vmUuid}/cache.json.gz`
}

async #readCache(path) {
try {
return JSON.parse(await fromCallback(zlib.gunzip, await this.handler.readFile(path)))
} catch (error) {
if (error.code !== 'ENOENT') {
warn('#readCache', { error, path })
}
}
}

async #updateCache(path, fn) {
const cache = await this.#readCache(path)
if (cache !== undefined) {
fn(cache)

await this.#writeCache(path, cache)
}
}

async #writeCache(path, data) {
try {
await this.handler.writeFile(path, await fromCallback(zlib.gzip, JSON.stringify(data)), { flags: 'w' })
} catch (error) {
warn('#writeCache', { error, path })
}
}

async invalidateVmBackupListCache(vmUuid) {
await this.handler.unlink(`${BACKUP_DIR}/${vmUuid}/cache.json.gz`)
await this.handler.unlink(this.#getVmBackupsCache(vmUuid))
}

async #getCachabledDataListVmBackups(dir) {
Expand Down Expand Up @@ -498,41 +549,25 @@ class RemoteAdapter {
// if cache is missing or broken => regenerate it and return

async _readCacheListVmBackups(vmUuid) {
const dir = `${BACKUP_DIR}/${vmUuid}`
const path = `${dir}/cache.json.gz`
const path = this.#getVmBackupsCache(vmUuid)

try {
const gzipped = await this.handler.readFile(path)
const text = await fromCallback(zlib.gunzip, gzipped)
return JSON.parse(text)
} catch (error) {
if (error.code !== 'ENOENT') {
warn('Cache file was unreadable', { vmUuid, error })
}
const cache = await this.#readCache(path)
if (cache !== undefined) {
return cache
}

// nothing cached, or cache unreadable => regenerate it
const backups = await this.#getCachabledDataListVmBackups(dir)
const backups = await this.#getCachabledDataListVmBackups(`${BACKUP_DIR}/${vmUuid}`)
if (backups === undefined) {
return
}

// detached async action, will not reject
this.#writeVmBackupsCache(path, backups)
this.#writeCache(path, backups)

return backups
}

async #writeVmBackupsCache(cacheFile, backups) {
try {
const text = JSON.stringify(backups)
const zipped = await fromCallback(zlib.gzip, text)
await this.handler.writeFile(cacheFile, zipped, { flags: 'w' })
} catch (error) {
warn('writeVmBackupsCache', { cacheFile, error })
}
}

async listVmBackups(vmUuid, predicate) {
const backups = []
const cached = await this._readCacheListVmBackups(vmUuid)
Expand Down Expand Up @@ -571,6 +606,27 @@ class RemoteAdapter {
return backups.sort(compareTimestamp)
}

async writeVmBackupMetadata(vmUuid, metadata) {
const path = `/${BACKUP_DIR}/${vmUuid}/${formatFilenameDate(metadata.timestamp)}.json`

await this.handler.outputFile(path, JSON.stringify(metadata), {
dirMode: this._dirMode,
})

// will not throw
this.#updateCache(this.#getVmBackupsCache(vmUuid), backups => {
backups[path] = {
...metadata,

// these values are required in the cache
_filename: path,
id: path,
}
})

return path
}

async writeVhd(path, input, { checksum = true, validator = noop } = {}) {
const handler = this._handler

Expand Down
5 changes: 1 addition & 4 deletions @xen-orchestra/backups/writers/DeltaBackupWriter.js
Original file line number Diff line number Diff line change
Expand Up @@ -189,7 +189,6 @@ exports.DeltaBackupWriter = class DeltaBackupWriter extends MixinBackupWriter(Ab
}/${adapter.getVhdFileName(basename)}`
)

const metadataFilename = (this._metadataFileName = `${backupDir}/${basename}.json`)
const metadataContent = {
jobId,
mode: job.mode,
Expand Down Expand Up @@ -254,9 +253,7 @@ exports.DeltaBackupWriter = class DeltaBackupWriter extends MixinBackupWriter(Ab
}
})
metadataContent.size = size
await handler.outputFile(metadataFilename, JSON.stringify(metadataContent), {
dirMode: backup.config.dirMode,
})
this._metadataFileName = await adapter.writeVmBackupMetadata(vm.uuid, metadataContent)

// TODO: run cleanup?
}
Expand Down
6 changes: 1 addition & 5 deletions @xen-orchestra/backups/writers/FullBackupWriter.js
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,6 @@ exports.FullBackupWriter = class FullBackupWriter extends MixinBackupWriter(Abst
const { job, scheduleId, vm } = backup

const adapter = this._adapter
const handler = adapter.handler
const backupDir = getVmBackupDir(vm.uuid)

// TODO: clean VM backup directory
Expand All @@ -50,7 +49,6 @@ exports.FullBackupWriter = class FullBackupWriter extends MixinBackupWriter(Abst
const dataBasename = basename + '.xva'
const dataFilename = backupDir + '/' + dataBasename

const metadataFilename = `${backupDir}/${basename}.json`
const metadata = {
jobId: job.id,
mode: job.mode,
Expand All @@ -74,9 +72,7 @@ exports.FullBackupWriter = class FullBackupWriter extends MixinBackupWriter(Abst
return { size: sizeContainer.size }
})
metadata.size = sizeContainer.size
await handler.outputFile(metadataFilename, JSON.stringify(metadata), {
dirMode: backup.config.dirMode,
})
await adapter.writeVmBackupMetadata(vm.uuid, metadata)

if (!deleteFirst) {
await deleteOldBackups()
Expand Down
1 change: 0 additions & 1 deletion @xen-orchestra/backups/writers/_MixinBackupWriter.js
Original file line number Diff line number Diff line change
Expand Up @@ -71,6 +71,5 @@ exports.MixinBackupWriter = (BaseClass = Object) =>
const remotePath = handler._getRealPath()
await MergeWorker.run(remotePath)
}
await this._adapter.invalidateVmBackupListCache(this._backup.vm.uuid)
}
}

0 comments on commit 0027978

Please sign in to comment.