diff --git a/@xen-orchestra/backups/_cleanVm.js b/@xen-orchestra/backups/_cleanVm.js index 6afbfdd082b..e27468af226 100644 --- a/@xen-orchestra/backups/_cleanVm.js +++ b/@xen-orchestra/backups/_cleanVm.js @@ -119,7 +119,7 @@ const listVhds = async (handler, vmDir) => { const list = await handler.list(vdiDir, { filter: file => isVhdFile(file) || INTERRUPTED_VHDS_REG.test(file), }) - aliases[vdiDir] = list.filter(vhd => isVhdAlias(vhd)) + aliases[vdiDir] = list.filter(vhd => isVhdAlias(vhd)).map(file => `${vdiDir}/${file}`) list.forEach(file => { const res = INTERRUPTED_VHDS_REG.exec(file) if (res === null) { @@ -249,15 +249,11 @@ exports.cleanVm = async function cleanVm( } } - // 2022-01-17 - FBP & JFT - Temporary disable aliases checking as it appears problematic - // // check if alias are correct // check if all vhd in data subfolder have a corresponding alias - // await asyncMap(Object.keys(aliases), async dir => { - // await checkAliases(aliases[dir], `${dir}/data`, { handler, onLog, remove }) - // }) - // Avoid a ESLint unused variable - noop(aliases) + await asyncMap(Object.keys(aliases), async dir => { + await checkAliases(aliases[dir], `${dir}/data`, { handler, onLog, remove }) + }) // remove VHDs with missing ancestors { diff --git a/@xen-orchestra/backups/writers/_MixinBackupWriter.js b/@xen-orchestra/backups/writers/_MixinBackupWriter.js index 62d3cb72ec8..5fc18516e85 100644 --- a/@xen-orchestra/backups/writers/_MixinBackupWriter.js +++ b/@xen-orchestra/backups/writers/_MixinBackupWriter.js @@ -44,13 +44,14 @@ exports.MixinBackupWriter = (BaseClass = Object) => async afterBackup() { const { disableMergeWorker } = this._backup.config + // merge worker only compatible with local remotes + const { handler } = this._adapter + const willMergeInWorker = !disableMergeWorker && typeof handler._getRealPath === 'function' - const { merge } = await this._cleanVm({ remove: true, merge: disableMergeWorker }) + const { merge } = await this._cleanVm({ remove: true, merge: !willMergeInWorker }) await this.#lock.dispose() - // merge worker only compatible with local remotes - const { handler } = this._adapter - if (merge && !disableMergeWorker && typeof handler._getRealPath === 'function') { + if (merge && willMergeInWorker) { const taskFile = join(MergeWorker.CLEAN_VM_QUEUE, formatFilenameDate(new Date())) + '-' + diff --git a/@xen-orchestra/fs/src/s3.js b/@xen-orchestra/fs/src/s3.js index ea1b70f0342..d8d584b194e 100644 --- a/@xen-orchestra/fs/src/s3.js +++ b/@xen-orchestra/fs/src/s3.js @@ -19,8 +19,25 @@ const MAX_OBJECT_SIZE = 1024 * 1024 * 1024 * 1024 * 5 // 5TB const IDEAL_FRAGMENT_SIZE = Math.ceil(MAX_OBJECT_SIZE / MAX_PARTS_COUNT) // the smallest fragment size that still allows a 5TB upload in 10000 fragments, about 524MB const { warn } = createLogger('xo:fs:s3') - + // some objectstorage provider like backblaze, can answer a 500/503 routinely + // in this case we should retry, and let their load balancing do its magic + // https://www.backblaze.com/b2/docs/calling.html#error_handling + const retryOptions = { + delays: [100, 200, 500, 1000, 2000], + when: e => e.code === 'InternalError', + onRetry(error) { + warn('retrying writing file', { + attemptNumber: this.attemptNumber, + delay: this.delay, + error, + file: this.arguments[0], + }) + }, + } export default class S3Handler extends RemoteHandlerAbstract { + + + constructor(remote, _opts) { super(remote) const { allowUnauthorized, host, path, username, password, protocol, region } = parse(remote.url) @@ -123,21 +140,7 @@ export default class S3Handler extends RemoteHandlerAbstract { } } - // some objectstorage provider like backblaze, can answer a 500/503 routinely - // in this case we should retry, and let their load balancing do its magic - // https://www.backblaze.com/b2/docs/calling.html#error_handling - @decorateWith(pRetry.wrap, { - delays: [100, 200, 500, 1000, 2000], - when: e => e.code === 'InternalError', - onRetry(error) { - warn('retrying writing file', { - attemptNumber: this.attemptNumber, - delay: this.delay, - error, - file: this.arguments[0], - }) - }, - }) + @decorateWith(pRetry.wrap, retryOptions) async _writeFile(file, data, options) { return this._s3.putObject({ ...this._createParams(file), Body: data }) } diff --git a/packages/vhd-lib/index.js b/packages/vhd-lib/index.js index 65b0376c551..e659e8863af 100644 --- a/packages/vhd-lib/index.js +++ b/packages/vhd-lib/index.js @@ -1,3 +1,4 @@ + exports.chainVhd = require('./chain') exports.checkFooter = require('./checkFooter') exports.checkVhdChain = require('./checkChain') @@ -12,3 +13,6 @@ exports.VhdDirectory = require('./Vhd/VhdDirectory').VhdDirectory exports.VhdFile = require('./Vhd/VhdFile').VhdFile exports.VhdSynthetic = require('./Vhd/VhdSynthetic').VhdSynthetic exports.Constants = require('./_constants') +const {isVhdAlias, resolveAlias} = require('./_resolveAlias') +exports.isVhdAlias = isVhdAlias +exports.resolveAlias = resolveAlias