diff --git a/.eslintrc.js b/.eslintrc.js index 108081979fd..0a20b80eef1 100644 --- a/.eslintrc.js +++ b/.eslintrc.js @@ -28,9 +28,10 @@ module.exports = { }, }, { - files: ['*.spec.{,c,m}js'], - excludedFiles: ['@vates/nbd-client', '@vates/otp'], + files: ['*.{spec,test}.{,c,m}js'], rules: { + 'n/no-unpublished-require': 'off', + 'n/no-unpublished-import': 'off', 'n/no-unsupported-features/node-builtins': [ 'error', { diff --git a/@vates/async-each/index.spec.js b/@vates/async-each/index.test.js similarity index 59% rename from @vates/async-each/index.spec.js rename to @vates/async-each/index.test.js index 20e85e3eecb..a46af2edf4d 100644 --- a/@vates/async-each/index.spec.js +++ b/@vates/async-each/index.test.js @@ -1,6 +1,8 @@ 'use strict' -/* eslint-env jest */ +const { describe, it, beforeEach } = require('test') +const assert = require('assert').strict +const { spy } = require('sinon') const { asyncEach } = require('./') @@ -34,12 +36,18 @@ describe('asyncEach', () => { }) it('works', async () => { - const iteratee = jest.fn(async () => {}) + const iteratee = spy(async () => {}) await asyncEach.call(thisArg, iterable, iteratee, { concurrency: 1 }) - expect(iteratee.mock.instances).toEqual(Array.from(values, () => thisArg)) - expect(iteratee.mock.calls).toEqual(Array.from(values, (value, index) => [value, index, iterable])) + assert.deepStrictEqual( + iteratee.thisValues, + Array.from(values, () => thisArg) + ) + assert.deepStrictEqual( + iteratee.args, + Array.from(values, (value, index) => [value, index, iterable]) + ) }) ;[1, 2, 4].forEach(concurrency => { it('respects a concurrency of ' + concurrency, async () => { @@ -49,7 +57,7 @@ describe('asyncEach', () => { values, async () => { ++running - expect(running).toBeLessThanOrEqual(concurrency) + assert.deepStrictEqual(running <= concurrency, true) await randomDelay() --running }, @@ -59,42 +67,52 @@ describe('asyncEach', () => { }) it('stops on first error when stopOnError is true', async () => { + const tracker = new assert.CallTracker() + const error = new Error() - const iteratee = jest.fn((_, i) => { + const iteratee = tracker.calls((_, i) => { if (i === 1) { throw error } - }) + }, 2) + assert.deepStrictEqual( + await rejectionOf(asyncEach(iterable, iteratee, { concurrency: 1, stopOnError: true })), + error + ) - expect(await rejectionOf(asyncEach(iterable, iteratee, { concurrency: 1, stopOnError: true }))).toBe(error) - expect(iteratee).toHaveBeenCalledTimes(2) + tracker.verify() }) it('rejects AggregateError when stopOnError is false', async () => { const errors = [] - const iteratee = jest.fn(() => { + const iteratee = spy(() => { const error = new Error() errors.push(error) throw error }) const error = await rejectionOf(asyncEach(iterable, iteratee, { stopOnError: false })) - expect(error.errors).toEqual(errors) - expect(iteratee.mock.calls).toEqual(Array.from(values, (value, index) => [value, index, iterable])) + assert.deepStrictEqual(error.errors, errors) + assert.deepStrictEqual( + iteratee.args, + Array.from(values, (value, index) => [value, index, iterable]) + ) }) it('can be interrupted with an AbortSignal', async () => { + const tracker = new assert.CallTracker() + const ac = new AbortController() - const iteratee = jest.fn((_, i) => { + const iteratee = tracker.calls((_, i) => { if (i === 1) { ac.abort() } + }, 2) + await assert.rejects(asyncEach(iterable, iteratee, { concurrency: 1, signal: ac.signal }), { + message: 'asyncEach aborted', }) - await expect(asyncEach(iterable, iteratee, { concurrency: 1, signal: ac.signal })).rejects.toThrow( - 'asyncEach aborted' - ) - expect(iteratee).toHaveBeenCalledTimes(2) + tracker.verify() }) }) ) diff --git a/@vates/async-each/package.json b/@vates/async-each/package.json index 2079d92f39b..28da0a44274 100644 --- a/@vates/async-each/package.json +++ b/@vates/async-each/package.json @@ -29,6 +29,12 @@ "node": ">=8.10" }, "scripts": { - "postversion": "npm publish --access public" + "postversion": "npm publish --access public", + "test": "node--test" + }, + "devDependencies": { + "sinon": "^14.0.1", + "tap": "^16.3.0", + "test": "^3.2.1" } } diff --git a/@vates/coalesce-calls/index.spec.js b/@vates/coalesce-calls/index.test.js similarity index 74% rename from @vates/coalesce-calls/index.spec.js rename to @vates/coalesce-calls/index.test.js index 1fe505d7d29..396f33f6a4a 100644 --- a/@vates/coalesce-calls/index.spec.js +++ b/@vates/coalesce-calls/index.test.js @@ -1,6 +1,7 @@ 'use strict' -/* eslint-env jest */ +const { describe, it } = require('test') +const assert = require('assert') const { coalesceCalls } = require('./') @@ -23,13 +24,13 @@ describe('coalesceCalls', () => { const promise2 = fn(defer2.promise) defer1.resolve('foo') - expect(await promise1).toBe('foo') - expect(await promise2).toBe('foo') + assert.strictEqual(await promise1, 'foo') + assert.strictEqual(await promise2, 'foo') const defer3 = pDefer() const promise3 = fn(defer3.promise) defer3.resolve('bar') - expect(await promise3).toBe('bar') + assert.strictEqual(await promise3, 'bar') }) }) diff --git a/@vates/coalesce-calls/package.json b/@vates/coalesce-calls/package.json index 000e79341c5..4833aa104ad 100644 --- a/@vates/coalesce-calls/package.json +++ b/@vates/coalesce-calls/package.json @@ -30,6 +30,10 @@ "node": ">=8.10" }, "scripts": { - "postversion": "npm publish --access public" + "postversion": "npm publish --access public", + "test": "node--test" + }, + "devDependencies": { + "test": "^3.2.1" } } diff --git a/@vates/compose/index.spec.js b/@vates/compose/index.test.js similarity index 62% rename from @vates/compose/index.spec.js rename to @vates/compose/index.test.js index 8425c5f32ba..fdfe0dba73e 100644 --- a/@vates/compose/index.spec.js +++ b/@vates/compose/index.test.js @@ -1,6 +1,7 @@ 'use strict' -/* eslint-env jest */ +const { describe, it } = require('test') +const assert = require('node:assert').strict const { compose } = require('./') @@ -9,43 +10,42 @@ const mul3 = x => x * 3 describe('compose()', () => { it('throws when no functions is passed', () => { - expect(() => compose()).toThrow(TypeError) - expect(() => compose([])).toThrow(TypeError) + assert.throws(() => compose(), TypeError) + assert.throws(() => compose([]), TypeError) }) it('applies from left to right', () => { - expect(compose(add2, mul3)(5)).toBe(21) + assert.strictEqual(compose(add2, mul3)(5), 21) }) it('accepts functions in an array', () => { - expect(compose([add2, mul3])(5)).toBe(21) + assert.strictEqual(compose([add2, mul3])(5), 21) }) it('can apply from right to left', () => { - expect(compose({ right: true }, add2, mul3)(5)).toBe(17) + assert.strictEqual(compose({ right: true }, add2, mul3)(5), 17) }) it('accepts options with functions in an array', () => { - expect(compose({ right: true }, [add2, mul3])(5)).toBe(17) + assert.strictEqual(compose({ right: true }, [add2, mul3])(5), 17) }) it('can compose async functions', async () => { - expect( + assert.strictEqual( await compose( { async: true }, async x => x + 2, async x => x * 3 - )(5) - ).toBe(21) + )(5), + 21 + ) }) it('forwards all args to first function', () => { - expect.assertions(1) - const expectedArgs = [Math.random(), Math.random()] compose( (...args) => { - expect(args).toEqual(expectedArgs) + assert.deepEqual(args, expectedArgs) }, // add a second function to avoid the one function special case Function.prototype @@ -53,15 +53,13 @@ describe('compose()', () => { }) it('forwards context to all functions', () => { - expect.assertions(2) - const expectedThis = {} compose( function () { - expect(this).toBe(expectedThis) + assert.strictEqual(this, expectedThis) }, function () { - expect(this).toBe(expectedThis) + assert.strictEqual(this, expectedThis) } ).call(expectedThis) }) diff --git a/@vates/compose/package.json b/@vates/compose/package.json index 25c793b56ce..80451e19c9b 100644 --- a/@vates/compose/package.json +++ b/@vates/compose/package.json @@ -19,6 +19,10 @@ "node": ">=7.6" }, "scripts": { - "postversion": "npm publish --access public" + "postversion": "npm publish --access public", + "test": "node--test" + }, + "devDependencies": { + "test": "^3.2.1" } } diff --git a/@vates/decorate-with/index.spec.js b/@vates/decorate-with/index.test.js similarity index 98% rename from @vates/decorate-with/index.spec.js rename to @vates/decorate-with/index.test.js index 3ab1968b2ab..9b75bef39ce 100644 --- a/@vates/decorate-with/index.spec.js +++ b/@vates/decorate-with/index.test.js @@ -1,7 +1,7 @@ 'use strict' const assert = require('assert') -const { describe, it } = require('tap').mocha +const { describe, it } = require('test') const { decorateClass, decorateWith, decorateMethodsWith, perInstance } = require('./') diff --git a/@vates/decorate-with/package.json b/@vates/decorate-with/package.json index 22b1f0a39e0..299bf30845f 100644 --- a/@vates/decorate-with/package.json +++ b/@vates/decorate-with/package.json @@ -26,9 +26,9 @@ }, "scripts": { "postversion": "npm publish --access public", - "test": "tap" + "test": "node--test" }, "devDependencies": { - "tap": "^16.0.1" + "test": "^3.2.1" } } diff --git a/@vates/disposable/debounceResource.spec.js b/@vates/disposable/debounceResource.test.js similarity index 65% rename from @vates/disposable/debounceResource.spec.js rename to @vates/disposable/debounceResource.test.js index c342979f580..475f276a4d2 100644 --- a/@vates/disposable/debounceResource.spec.js +++ b/@vates/disposable/debounceResource.test.js @@ -1,16 +1,17 @@ 'use strict' -/* eslint-env jest */ +const { describe, it } = require('test') +const { useFakeTimers, spy, assert } = require('sinon') const { createDebounceResource } = require('./debounceResource') -jest.useFakeTimers() +const clock = useFakeTimers() describe('debounceResource()', () => { it('calls the resource disposer after 10 seconds', async () => { const debounceResource = createDebounceResource() const delay = 10e3 - const dispose = jest.fn() + const dispose = spy() const resource = await debounceResource( Promise.resolve({ @@ -22,10 +23,10 @@ describe('debounceResource()', () => { resource.dispose() - expect(dispose).not.toBeCalled() + assert.notCalled(dispose) - jest.advanceTimersByTime(delay) + clock.tick(delay) - expect(dispose).toBeCalled() + assert.called(dispose) }) }) diff --git a/@vates/disposable/deduped.spec.js b/@vates/disposable/deduped.test.js similarity index 69% rename from @vates/disposable/deduped.spec.js rename to @vates/disposable/deduped.test.js index 38f43946df9..d01e2132425 100644 --- a/@vates/disposable/deduped.spec.js +++ b/@vates/disposable/deduped.test.js @@ -1,13 +1,14 @@ 'use strict' -/* eslint-env jest */ +const { describe, it } = require('test') +const { spy, assert } = require('sinon') const { deduped } = require('./deduped') describe('deduped()', () => { it('calls the resource function only once', async () => { const value = {} - const getResource = jest.fn(async () => ({ + const getResource = spy(async () => ({ value, dispose: Function.prototype, })) @@ -17,13 +18,13 @@ describe('deduped()', () => { const { value: v1 } = await dedupedGetResource() const { value: v2 } = await dedupedGetResource() - expect(getResource).toHaveBeenCalledTimes(1) - expect(v1).toBe(value) - expect(v2).toBe(value) + assert.calledOnce(getResource) + assert.match(v1, value) + assert.match(v2, value) }) it('only disposes the source disposable when its all copies dispose', async () => { - const dispose = jest.fn() + const dispose = spy() const getResource = async () => ({ value: '', dispose, @@ -36,35 +37,35 @@ describe('deduped()', () => { d1() - expect(dispose).not.toHaveBeenCalled() + assert.notCalled(dispose) d2() - expect(dispose).toHaveBeenCalledTimes(1) + assert.calledOnce(dispose) }) it('works with sync factory', () => { const value = {} - const dispose = jest.fn() + const dispose = spy() const dedupedGetResource = deduped(() => ({ value, dispose })) const d1 = dedupedGetResource() - expect(d1.value).toBe(value) + assert.match(d1.value, value) const d2 = dedupedGetResource() - expect(d2.value).toBe(value) + assert.match(d2.value, value) d1.dispose() - expect(dispose).not.toHaveBeenCalled() + assert.notCalled(dispose) d2.dispose() - expect(dispose).toHaveBeenCalledTimes(1) + assert.calledOnce(dispose) }) it('no race condition on dispose before async acquisition', async () => { - const dispose = jest.fn() + const dispose = spy() const dedupedGetResource = deduped(async () => ({ value: 42, dispose })) const d1 = await dedupedGetResource() @@ -73,6 +74,6 @@ describe('deduped()', () => { d1.dispose() - expect(dispose).not.toHaveBeenCalled() + assert.notCalled(dispose) }) }) diff --git a/@vates/disposable/package.json b/@vates/disposable/package.json index 2e898a45d83..df2dbce7d88 100644 --- a/@vates/disposable/package.json +++ b/@vates/disposable/package.json @@ -19,12 +19,17 @@ "node": ">=8.10" }, "scripts": { - "postversion": "npm publish --access public" + "postversion": "npm publish --access public", + "test": "node--test" }, "dependencies": { "@vates/multi-key-map": "^0.1.0", "@xen-orchestra/async-map": "^0.1.2", "@xen-orchestra/log": "^0.3.0", "ensure-array": "^1.0.0" + }, + "devDependencies": { + "sinon": "^14.0.1", + "test": "^3.2.1" } } diff --git a/@vates/multi-key-map/index.spec.js b/@vates/multi-key-map/index.test.js similarity index 77% rename from @vates/multi-key-map/index.spec.js rename to @vates/multi-key-map/index.test.js index cfc94f28136..2d8a593d4ee 100644 --- a/@vates/multi-key-map/index.spec.js +++ b/@vates/multi-key-map/index.test.js @@ -1,6 +1,7 @@ 'use strict' -/* eslint-env jest */ +const { describe, it } = require('test') +const assert = require('node:assert') const { MultiKeyMap } = require('./') @@ -28,9 +29,9 @@ describe('MultiKeyMap', () => { keys.forEach((key, i) => { // copy the key to make sure the array itself is not the key - expect(map.get(key.slice())).toBe(values[i]) + assert.strictEqual(map.get(key.slice()), values[i]) map.delete(key.slice()) - expect(map.get(key.slice())).toBe(undefined) + assert.strictEqual(map.get(key.slice()), undefined) }) }) }) diff --git a/@vates/multi-key-map/package.json b/@vates/multi-key-map/package.json index 05a5d471dbd..9e8185b7443 100644 --- a/@vates/multi-key-map/package.json +++ b/@vates/multi-key-map/package.json @@ -23,6 +23,10 @@ "node": ">=8.10" }, "scripts": { - "postversion": "npm publish --access public" + "postversion": "npm publish --access public", + "test": "node--test" + }, + "devDependencies": { + "test": "^3.2.1" } } diff --git a/@vates/otp/index.spec.mjs b/@vates/otp/index.spec.mjs index b17f681b6ee..8b52180bcbb 100644 --- a/@vates/otp/index.spec.mjs +++ b/@vates/otp/index.spec.mjs @@ -1,5 +1,4 @@ import { strict as assert } from 'node:assert' -// eslint-disable-next-line n/no-unpublished-import import { describe, it } from 'tap/mocha' import { diff --git a/@vates/predicates/.USAGE.md b/@vates/predicates/.USAGE.md index be38f3ceb21..f0cee9ef05d 100644 --- a/@vates/predicates/.USAGE.md +++ b/@vates/predicates/.USAGE.md @@ -1,7 +1,7 @@ `undefined` predicates are ignored and `undefined` is returned if all predicates are `undefined`, this permits the most efficient composition: ```js -const compositePredicate = every(undefined, some(predicate2, undefined)) +const compositePredicate = not(every(undefined, some(not(predicate2), undefined))) // ends up as @@ -36,6 +36,21 @@ isBetween3And10(10) // → false ``` +### `not(predicate)` + +> Returns a predicate that returns the negation of the predicate. + +```js +const isEven = n => n % 2 === 0 +const isOdd = not(isEven) + +isOdd(1) +// true + +isOdd(2) +// false +``` + ### `some(predicates)` > Returns a predicate that returns `true` iff some predicate returns `true`. diff --git a/@vates/predicates/README.md b/@vates/predicates/README.md index b83161a3b4f..09824a9a043 100644 --- a/@vates/predicates/README.md +++ b/@vates/predicates/README.md @@ -19,7 +19,7 @@ Installation of the [npm package](https://npmjs.org/package/@vates/predicates): `undefined` predicates are ignored and `undefined` is returned if all predicates are `undefined`, this permits the most efficient composition: ```js -const compositePredicate = every(undefined, some(predicate2, undefined)) +const compositePredicate = not(every(undefined, some(not(predicate2), undefined))) // ends up as @@ -54,6 +54,21 @@ isBetween3And10(10) // → false ``` +### `not(predicate)` + +> Returns a predicate that returns the negation of the predicate. + +```js +const isEven = n => n % 2 === 0 +const isOdd = not(isEven) + +isOdd(1) +// true + +isOdd(2) +// false +``` + ### `some(predicates)` > Returns a predicate that returns `true` iff some predicate returns `true`. diff --git a/@vates/predicates/index.js b/@vates/predicates/index.js index d6132eb3d37..12d3bc4f5b9 100644 --- a/@vates/predicates/index.js +++ b/@vates/predicates/index.js @@ -51,6 +51,22 @@ exports.every = function every() { } } +const notPredicateTag = {} +exports.not = function not(predicate) { + if (isDefinedPredicate(predicate)) { + if (predicate.tag === notPredicateTag) { + return predicate.predicate + } + + function notPredicate() { + return !predicate.apply(this, arguments) + } + notPredicate.predicate = predicate + notPredicate.tag = notPredicateTag + return notPredicate + } +} + exports.some = function some() { const predicates = handleArgs.apply(this, arguments) const n = predicates.length diff --git a/@vates/predicates/index.spec.js b/@vates/predicates/index.spec.js index 02d354e6020..a8e9a58d70a 100644 --- a/@vates/predicates/index.spec.js +++ b/@vates/predicates/index.spec.js @@ -3,20 +3,14 @@ const assert = require('assert/strict') const { describe, it } = require('tap').mocha -const { every, some } = require('./') +const { every, not, some } = require('./') const T = () => true const F = () => false -const testArgsHandling = fn => { - it('returns undefined if all predicates are undefined', () => { +const testArgHandling = fn => { + it('returns undefined if predicate is undefined', () => { assert.equal(fn(undefined), undefined) - assert.equal(fn([undefined]), undefined) - }) - - it('returns the predicate if only a single one is passed', () => { - assert.equal(fn(undefined, T), T) - assert.equal(fn([undefined, T]), T) }) it('throws if it receives a non-predicate', () => { @@ -24,6 +18,15 @@ const testArgsHandling = fn => { error.value = 3 assert.throws(() => fn(3), error) }) +} + +const testArgsHandling = fn => { + testArgHandling(fn) + + it('returns the predicate if only a single one is passed', () => { + assert.equal(fn(undefined, T), T) + assert.equal(fn([undefined, T]), T) + }) it('forwards this and arguments to predicates', () => { const thisArg = 'qux' @@ -36,17 +39,21 @@ const testArgsHandling = fn => { }) } -const runTests = (fn, truthTable) => +const runTests = (fn, acceptMultiple, truthTable) => it('works', () => { truthTable.forEach(([result, ...predicates]) => { + if (acceptMultiple) { + assert.equal(fn(predicates)(), result) + } else { + assert.equal(predicates.length, 1) + } assert.equal(fn(...predicates)(), result) - assert.equal(fn(predicates)(), result) }) }) describe('every', () => { testArgsHandling(every) - runTests(every, [ + runTests(every, true, [ [true, T, T], [false, T, F], [false, F, T], @@ -54,9 +61,22 @@ describe('every', () => { ]) }) +describe('not', () => { + testArgHandling(not) + + it('returns the original predicate if negated twice', () => { + assert.equal(not(not(T)), T) + }) + + runTests(not, false, [ + [true, F], + [false, T], + ]) +}) + describe('some', () => { testArgsHandling(some) - runTests(some, [ + runTests(some, true, [ [true, T, T], [true, T, F], [true, F, T], diff --git a/@vates/read-chunk/index.spec.js b/@vates/read-chunk/index.test.js similarity index 58% rename from @vates/read-chunk/index.spec.js rename to @vates/read-chunk/index.test.js index a16c552d926..ebd242b90c7 100644 --- a/@vates/read-chunk/index.spec.js +++ b/@vates/read-chunk/index.test.js @@ -1,6 +1,7 @@ 'use strict' -/* eslint-env jest */ +const { describe, it } = require('test') +const assert = require('node:assert').strict const { Readable } = require('stream') @@ -11,42 +12,42 @@ makeStream.obj = Readable.from describe('readChunk', () => { it('returns null if stream is empty', async () => { - expect(await readChunk(makeStream([]))).toBe(null) + assert.strictEqual(await readChunk(makeStream([])), null) }) it('returns null if the stream is already ended', async () => { const stream = await makeStream([]) await readChunk(stream) - expect(await readChunk(stream)).toBe(null) + assert.strictEqual(await readChunk(stream), null) }) describe('with binary stream', () => { it('returns the first chunk of data', async () => { - expect(await readChunk(makeStream(['foo', 'bar']))).toEqual(Buffer.from('foo')) + assert.deepEqual(await readChunk(makeStream(['foo', 'bar'])), Buffer.from('foo')) }) it('returns a chunk of the specified size (smaller than first)', async () => { - expect(await readChunk(makeStream(['foo', 'bar']), 2)).toEqual(Buffer.from('fo')) + assert.deepEqual(await readChunk(makeStream(['foo', 'bar']), 2), Buffer.from('fo')) }) it('returns a chunk of the specified size (larger than first)', async () => { - expect(await readChunk(makeStream(['foo', 'bar']), 4)).toEqual(Buffer.from('foob')) + assert.deepEqual(await readChunk(makeStream(['foo', 'bar']), 4), Buffer.from('foob')) }) it('returns less data if stream ends', async () => { - expect(await readChunk(makeStream(['foo', 'bar']), 10)).toEqual(Buffer.from('foobar')) + assert.deepEqual(await readChunk(makeStream(['foo', 'bar']), 10), Buffer.from('foobar')) }) it('returns an empty buffer if the specified size is 0', async () => { - expect(await readChunk(makeStream(['foo', 'bar']), 0)).toEqual(Buffer.alloc(0)) + assert.deepEqual(await readChunk(makeStream(['foo', 'bar']), 0), Buffer.alloc(0)) }) }) describe('with object stream', () => { it('returns the first chunk of data verbatim', async () => { const chunks = [{}, {}] - expect(await readChunk(makeStream.obj(chunks))).toBe(chunks[0]) + assert.strictEqual(await readChunk(makeStream.obj(chunks)), chunks[0]) }) }) }) @@ -62,15 +63,15 @@ const rejectionOf = promise => describe('readChunkStrict', function () { it('throws if stream is empty', async () => { const error = await rejectionOf(readChunkStrict(makeStream([]))) - expect(error).toBeInstanceOf(Error) - expect(error.message).toBe('stream has ended without data') - expect(error.chunk).toEqual(undefined) + assert(error instanceof Error) + assert.strictEqual(error.message, 'stream has ended without data') + assert.strictEqual(error.chunk, undefined) }) it('throws if stream ends with not enough data', async () => { const error = await rejectionOf(readChunkStrict(makeStream(['foo', 'bar']), 10)) - expect(error).toBeInstanceOf(Error) - expect(error.message).toBe('stream has ended with not enough data') - expect(error.chunk).toEqual(Buffer.from('foobar')) + assert(error instanceof Error) + assert.strictEqual(error.message, 'stream has ended with not enough data') + assert.deepEqual(error.chunk, Buffer.from('foobar')) }) }) diff --git a/@vates/read-chunk/package.json b/@vates/read-chunk/package.json index 7a499f662de..7778118d348 100644 --- a/@vates/read-chunk/package.json +++ b/@vates/read-chunk/package.json @@ -24,10 +24,14 @@ "node": ">=8.10" }, "scripts": { - "postversion": "npm publish --access public" + "postversion": "npm publish --access public", + "test": "node--test" }, "author": { "name": "Vates SAS", "url": "https://vates.fr" + }, + "devDependencies": { + "test": "^3.2.1" } } diff --git a/@xen-orchestra/async-map/index.spec.js b/@xen-orchestra/async-map/index.test.js similarity index 68% rename from @xen-orchestra/async-map/index.spec.js rename to @xen-orchestra/async-map/index.test.js index 3392d5f50b0..705b247204a 100644 --- a/@xen-orchestra/async-map/index.spec.js +++ b/@xen-orchestra/async-map/index.test.js @@ -1,6 +1,8 @@ 'use strict' -/* eslint-env jest */ +const { describe, it } = require('test') +const assert = require('assert').strict +const sinon = require('sinon') const { asyncMapSettled } = require('./') @@ -9,26 +11,29 @@ const noop = Function.prototype describe('asyncMapSettled', () => { it('works', async () => { const values = [Math.random(), Math.random()] - const spy = jest.fn(async v => v * 2) + const spy = sinon.spy(async v => v * 2) const iterable = new Set(values) // returns an array containing the result of each calls - expect(await asyncMapSettled(iterable, spy)).toEqual(values.map(value => value * 2)) + assert.deepStrictEqual( + await asyncMapSettled(iterable, spy), + values.map(value => value * 2) + ) for (let i = 0, n = values.length; i < n; ++i) { // each call receive the current item as sole argument - expect(spy.mock.calls[i]).toEqual([values[i]]) + assert.deepStrictEqual(spy.args[i], [values[i]]) // each call as this bind to the iterable - expect(spy.mock.instances[i]).toBe(iterable) + assert.deepStrictEqual(spy.thisValues[i], iterable) } }) it('can use a specified thisArg', () => { const thisArg = {} - const spy = jest.fn() + const spy = sinon.spy() asyncMapSettled(['foo'], spy, thisArg) - expect(spy.mock.instances[0]).toBe(thisArg) + assert.deepStrictEqual(spy.thisValues[0], thisArg) }) it('rejects only when all calls as resolved', async () => { @@ -55,19 +60,22 @@ describe('asyncMapSettled', () => { // wait for all microtasks to settle await new Promise(resolve => setImmediate(resolve)) - expect(hasSettled).toBe(false) + assert.strictEqual(hasSettled, false) defers[1].resolve() // wait for all microtasks to settle await new Promise(resolve => setImmediate(resolve)) - expect(hasSettled).toBe(true) - await expect(promise).rejects.toBe(error) + assert.strictEqual(hasSettled, true) + await assert.rejects(promise, error) }) it('issues when latest promise rejects', async () => { const error = new Error() - await expect(asyncMapSettled([1], () => Promise.reject(error))).rejects.toBe(error) + await assert.rejects( + asyncMapSettled([1], () => Promise.reject(error)), + error + ) }) }) diff --git a/@xen-orchestra/async-map/package.json b/@xen-orchestra/async-map/package.json index d3a8fbc491a..2a2782af41c 100644 --- a/@xen-orchestra/async-map/package.json +++ b/@xen-orchestra/async-map/package.json @@ -31,6 +31,11 @@ "lodash": "^4.17.4" }, "scripts": { - "postversion": "npm publish" + "postversion": "npm publish", + "test": "node--test" + }, + "devDependencies": { + "sinon": "^14.0.1", + "test": "^3.2.1" } } diff --git a/@xen-orchestra/backups/_cleanVm.integ.spec.js b/@xen-orchestra/backups/_cleanVm.integ.spec.js index 5d25442192b..5492d17aaef 100644 --- a/@xen-orchestra/backups/_cleanVm.integ.spec.js +++ b/@xen-orchestra/backups/_cleanVm.integ.spec.js @@ -14,7 +14,8 @@ const { VhdFile, Constants, VhdDirectory, VhdAbstract } = require('vhd-lib') const { checkAliases } = require('./_cleanVm') const { dirname, basename } = require('path') -let tempDir, adapter, handler, jobId, vdiId, basePath +let tempDir, adapter, handler, jobId, vdiId, basePath, relativePath +const rootPath = 'xo-vm-backups/VMUUID/' jest.setTimeout(60000) @@ -25,7 +26,8 @@ beforeEach(async () => { adapter = new RemoteAdapter(handler) jobId = uniqueId() vdiId = uniqueId() - basePath = `vdis/${jobId}/${vdiId}` + relativePath = `vdis/${jobId}/${vdiId}` + basePath = `${rootPath}/${relativePath}` await fs.mkdirp(`${tempDir}/${basePath}`) }) @@ -81,13 +83,13 @@ test('It remove broken vhd', async () => { const logInfo = message => { loggued += message } - await adapter.cleanVm('/', { remove: false, logInfo, logWarn: logInfo, lock: false }) + await adapter.cleanVm(rootPath, { remove: false, logInfo, logWarn: logInfo, lock: false }) expect(loggued).toEqual(`VHD check error`) // not removed - expect((await handler.list(basePath)).length).toEqual(1) + expect(await handler.list(basePath)).toEqual(['notReallyAVhd.vhd']) // really remove it - await adapter.cleanVm('/', { remove: true, logInfo, logWarn: () => {}, lock: false }) - expect((await handler.list(basePath)).length).toEqual(0) + await adapter.cleanVm(rootPath, { remove: true, logInfo, logWarn: () => {}, lock: false }) + expect(await handler.list(basePath)).toEqual([]) }) test('it remove vhd with missing or multiple ancestors', async () => { @@ -121,7 +123,7 @@ test('it remove vhd with missing or multiple ancestors', async () => { const logInfo = message => { loggued += message + '\n' } - await adapter.cleanVm('/', { remove: true, logInfo, logWarn: logInfo, lock: false }) + await adapter.cleanVm(rootPath, { remove: true, logInfo, logWarn: logInfo, lock: false }) const deletedOrphanVhd = loggued.match(/deleting orphan VHD/g) || [] expect(deletedOrphanVhd.length).toEqual(1) // only one vhd should have been deleted @@ -132,12 +134,12 @@ test('it remove vhd with missing or multiple ancestors', async () => { test('it remove backup meta data referencing a missing vhd in delta backup', async () => { // create a metadata file marking child and orphan as ok await handler.writeFile( - `metadata.json`, + `${rootPath}/metadata.json`, JSON.stringify({ mode: 'delta', vhds: [ - `${basePath}/orphan.vhd`, - `${basePath}/child.vhd`, + `${relativePath}/orphan.vhd`, + `${relativePath}/child.vhd`, // abandonned.json is not here ], }) @@ -160,39 +162,39 @@ test('it remove backup meta data referencing a missing vhd in delta backup', asy const logInfo = message => { loggued += message + '\n' } - await adapter.cleanVm('/', { remove: true, logInfo, logWarn: logInfo, lock: false }) + await adapter.cleanVm(rootPath, { remove: true, logInfo, logWarn: logInfo, lock: false }) let matched = loggued.match(/deleting unused VHD/g) || [] expect(matched.length).toEqual(1) // only one vhd should have been deleted // a missing vhd cause clean to remove all vhds await handler.writeFile( - `metadata.json`, + `${rootPath}/metadata.json`, JSON.stringify({ mode: 'delta', vhds: [ - `${basePath}/deleted.vhd`, // in metadata but not in vhds - `${basePath}/orphan.vhd`, - `${basePath}/child.vhd`, + `deleted.vhd`, // in metadata but not in vhds + `orphan.vhd`, + `child.vhd`, // abandonned.vhd is not here anymore ], }), { flags: 'w' } ) loggued = '' - await adapter.cleanVm('/', { remove: true, logInfo, logWarn: () => {}, lock: false }) + await adapter.cleanVm(rootPath, { remove: true, logInfo, logWarn: () => {}, lock: false }) matched = loggued.match(/deleting unused VHD/g) || [] expect(matched.length).toEqual(2) // all vhds (orphan and child ) should have been deleted }) test('it merges delta of non destroyed chain', async () => { await handler.writeFile( - `metadata.json`, + `${rootPath}/metadata.json`, JSON.stringify({ mode: 'delta', size: 12000, // a size too small vhds: [ - `${basePath}/grandchild.vhd`, // grand child should not be merged - `${basePath}/child.vhd`, + `${relativePath}/grandchild.vhd`, // grand child should not be merged + `${relativePath}/child.vhd`, // orphan is not here, he should be merged in child ], }) @@ -219,15 +221,15 @@ test('it merges delta of non destroyed chain', async () => { const logInfo = message => { loggued.push(message) } - await adapter.cleanVm('/', { remove: true, logInfo, logWarn: logInfo, lock: false }) + await adapter.cleanVm(rootPath, { remove: true, logInfo, logWarn: logInfo, lock: false }) expect(loggued[0]).toEqual(`incorrect backup size in metadata`) loggued = [] - await adapter.cleanVm('/', { remove: true, merge: true, logInfo, logWarn: () => {}, lock: false }) + await adapter.cleanVm(rootPath, { remove: true, merge: true, logInfo, logWarn: () => {}, lock: false }) const [merging] = loggued expect(merging).toEqual(`merging VHD chain`) - const metadata = JSON.parse(await handler.readFile(`metadata.json`)) + const metadata = JSON.parse(await handler.readFile(`${rootPath}/metadata.json`)) // size should be the size of children + grand children after the merge expect(metadata.size).toEqual(209920) @@ -241,11 +243,11 @@ test('it merges delta of non destroyed chain', async () => { test('it finish unterminated merge ', async () => { await handler.writeFile( - `metadata.json`, + `${rootPath}/metadata.json`, JSON.stringify({ mode: 'delta', size: 209920, - vhds: [`${basePath}/orphan.vhd`, `${basePath}/child.vhd`], + vhds: [`${relativePath}/orphan.vhd`, `${relativePath}/child.vhd`], }) ) @@ -271,7 +273,7 @@ test('it finish unterminated merge ', async () => { }) ) - await adapter.cleanVm('/', { remove: true, merge: true, logWarn: () => {}, lock: false }) + await adapter.cleanVm(rootPath, { remove: true, merge: true, logWarn: () => {}, lock: false }) // merging is already tested in vhd-lib, don't retest it here (and theses vhd are as empty as my stomach at 12h12) // only check deletion @@ -367,20 +369,20 @@ describe('tests multiple combination ', () => { // the metadata file await handler.writeFile( - `metadata.json`, + `${rootPath}/metadata.json`, JSON.stringify({ mode: 'delta', vhds: [ - `${basePath}/grandchild.vhd` + (useAlias ? '.alias.vhd' : ''), // grand child should not be merged - `${basePath}/child.vhd` + (useAlias ? '.alias.vhd' : ''), - `${basePath}/clean.vhd` + (useAlias ? '.alias.vhd' : ''), + `${relativePath}/grandchild.vhd` + (useAlias ? '.alias.vhd' : ''), // grand child should not be merged + `${relativePath}/child.vhd` + (useAlias ? '.alias.vhd' : ''), + `${relativePath}/clean.vhd` + (useAlias ? '.alias.vhd' : ''), ], }) ) - await adapter.cleanVm('/', { remove: true, merge: true, logWarn: () => {}, lock: false }) + await adapter.cleanVm(rootPath, { remove: true, merge: true, logWarn: () => {}, lock: false }) - const metadata = JSON.parse(await handler.readFile(`metadata.json`)) + const metadata = JSON.parse(await handler.readFile(`${rootPath}/metadata.json`)) // size should be the size of children + grand children + clean after the merge expect(metadata.size).toEqual(vhdMode === 'file' ? 314880 : undefined) @@ -414,7 +416,7 @@ describe('tests multiple combination ', () => { test('it cleans orphan merge states ', async () => { await handler.writeFile(`${basePath}/.orphan.vhd.merge.json`, '') - await adapter.cleanVm('/', { remove: true, logWarn: () => {}, lock: false }) + await adapter.cleanVm(rootPath, { remove: true, logWarn: () => {}, lock: false }) expect(await handler.list(basePath)).toEqual([]) }) diff --git a/@xen-orchestra/cron/index.spec.js b/@xen-orchestra/cron/index.test.js similarity index 62% rename from @xen-orchestra/cron/index.spec.js rename to @xen-orchestra/cron/index.test.js index b62184bf999..d5b54a92340 100644 --- a/@xen-orchestra/cron/index.spec.js +++ b/@xen-orchestra/cron/index.test.js @@ -1,24 +1,20 @@ -/* eslint-env jest */ - 'use strict' +const test = require('test') +const assert = require('assert').strict +const sinon = require('sinon') + const { createSchedule } = require('./') -jest.useFakeTimers() +const clock = sinon.useFakeTimers() const wrap = value => () => value -describe('issues', () => { +test('issues', async t => { let originalDateNow - beforeAll(() => { - originalDateNow = Date.now - }) - afterAll(() => { - Date.now = originalDateNow - originalDateNow = undefined - }) + originalDateNow = Date.now - test('stop during async execution', async () => { + await t.test('stop during async execution', async () => { let nCalls = 0 let resolve, promise @@ -35,20 +31,20 @@ describe('issues', () => { job.start() Date.now = wrap(+schedule.next(1)[0]) - jest.runAllTimers() + clock.runAll() - expect(nCalls).toBe(1) + assert.strictEqual(nCalls, 1) job.stop() resolve() await promise - jest.runAllTimers() - expect(nCalls).toBe(1) + clock.runAll() + assert.strictEqual(nCalls, 1) }) - test('stop then start during async job execution', async () => { + await t.test('stop then start during async job execution', async () => { let nCalls = 0 let resolve, promise @@ -65,9 +61,9 @@ describe('issues', () => { job.start() Date.now = wrap(+schedule.next(1)[0]) - jest.runAllTimers() + clock.runAll() - expect(nCalls).toBe(1) + assert.strictEqual(nCalls, 1) job.stop() job.start() @@ -76,7 +72,10 @@ describe('issues', () => { await promise Date.now = wrap(+schedule.next(1)[0]) - jest.runAllTimers() - expect(nCalls).toBe(2) + clock.runAll() + assert.strictEqual(nCalls, 2) }) + + Date.now = originalDateNow + originalDateNow = undefined }) diff --git a/@xen-orchestra/cron/next.spec.js b/@xen-orchestra/cron/next.test.js similarity index 67% rename from @xen-orchestra/cron/next.spec.js rename to @xen-orchestra/cron/next.test.js index b95807b6517..3876141a71a 100644 --- a/@xen-orchestra/cron/next.spec.js +++ b/@xen-orchestra/cron/next.test.js @@ -1,7 +1,8 @@ -/* eslint-env jest */ - 'use strict' +const { describe, it } = require('test') +const assert = require('assert').strict + const mapValues = require('lodash/mapValues') const moment = require('moment-timezone') @@ -25,24 +26,24 @@ describe('next()', () => { }, ([pattern, result], title) => it(title, () => { - expect(N(pattern)).toBe(result) + assert.strictEqual(N(pattern), result) }) ) it('select first between month-day and week-day', () => { - expect(N('* * 10 * wen')).toBe('2018-04-10T00:00') - expect(N('* * 12 * wen')).toBe('2018-04-11T00:00') + assert.strictEqual(N('* * 10 * wen'), '2018-04-10T00:00') + assert.strictEqual(N('* * 12 * wen'), '2018-04-11T00:00') }) it('select the last available day of a month', () => { - expect(N('* * 29 feb *')).toBe('2020-02-29T00:00') + assert.strictEqual(N('* * 29 feb *'), '2020-02-29T00:00') }) it('fails when no solutions has been found', () => { - expect(() => N('0 0 30 feb *')).toThrow('no solutions found for this schedule') + assert.throws(() => N('0 0 30 feb *'), { message: 'no solutions found for this schedule' }) }) it('select the first sunday of the month', () => { - expect(N('* * * * 0', '2018-03-31T00:00')).toBe('2018-04-01T00:00') + assert.strictEqual(N('* * * * 0', '2018-03-31T00:00'), '2018-04-01T00:00') }) }) diff --git a/@xen-orchestra/cron/package.json b/@xen-orchestra/cron/package.json index 18913139c06..3642be62cae 100644 --- a/@xen-orchestra/cron/package.json +++ b/@xen-orchestra/cron/package.json @@ -38,6 +38,11 @@ "moment-timezone": "^0.5.14" }, "scripts": { - "postversion": "npm publish" + "postversion": "npm publish", + "test": "node--test" + }, + "devDependencies": { + "sinon": "^14.0.1", + "test": "^3.2.1" } } diff --git a/@xen-orchestra/cron/parse.spec.js b/@xen-orchestra/cron/parse.spec.js deleted file mode 100644 index 92e5694e09e..00000000000 --- a/@xen-orchestra/cron/parse.spec.js +++ /dev/null @@ -1,49 +0,0 @@ -/* eslint-env jest */ - -'use strict' - -const parse = require('./parse') - -describe('parse()', () => { - it('works', () => { - expect(parse('0 0-10 */10 jan,2,4-11/3 *')).toEqual({ - minute: [0], - hour: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10], - dayOfMonth: [1, 11, 21, 31], - month: [0, 2, 4, 7, 10], - }) - }) - - it('correctly parse months', () => { - expect(parse('* * * 0,11 *')).toEqual({ - month: [0, 11], - }) - expect(parse('* * * jan,dec *')).toEqual({ - month: [0, 11], - }) - }) - - it('correctly parse days', () => { - expect(parse('* * * * mon,sun')).toEqual({ - dayOfWeek: [0, 1], - }) - }) - - it('reports missing integer', () => { - expect(() => parse('*/a')).toThrow('minute: missing integer at character 2') - expect(() => parse('*')).toThrow('hour: missing integer at character 1') - }) - - it('reports invalid aliases', () => { - expect(() => parse('* * * jan-foo *')).toThrow('month: missing alias or integer at character 10') - }) - - it('dayOfWeek: 0 and 7 bind to sunday', () => { - expect(parse('* * * * 0')).toEqual({ - dayOfWeek: [0], - }) - expect(parse('* * * * 7')).toEqual({ - dayOfWeek: [0], - }) - }) -}) diff --git a/@xen-orchestra/cron/parse.test.js b/@xen-orchestra/cron/parse.test.js new file mode 100644 index 00000000000..916a4c82835 --- /dev/null +++ b/@xen-orchestra/cron/parse.test.js @@ -0,0 +1,50 @@ +'use strict' + +const { describe, it } = require('test') +const assert = require('assert').strict + +const parse = require('./parse') + +describe('parse()', () => { + it('works', () => { + assert.deepStrictEqual(parse('0 0-10 */10 jan,2,4-11/3 *'), { + minute: [0], + hour: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10], + dayOfMonth: [1, 11, 21, 31], + month: [0, 2, 4, 7, 10], + }) + }) + + it('correctly parse months', () => { + assert.deepStrictEqual(parse('* * * 0,11 *'), { + month: [0, 11], + }) + assert.deepStrictEqual(parse('* * * jan,dec *'), { + month: [0, 11], + }) + }) + + it('correctly parse days', () => { + assert.deepStrictEqual(parse('* * * * mon,sun'), { + dayOfWeek: [0, 1], + }) + }) + + it('reports missing integer', () => { + assert.throws(() => parse('*/a'), { message: 'minute: missing integer at character 2' }) + assert.throws(() => parse('*'), { message: 'hour: missing integer at character 1' }) + }) + + it('reports invalid aliases', () => { + assert.throws(() => parse('* * * jan-foo *'), { message: 'month: missing alias or integer at character 10' }) + }) + + it('dayOfWeek: 0 and 7 bind to sunday', () => { + assert.deepStrictEqual(parse('* * * * 0'), { + dayOfWeek: [0], + }) + assert.deepStrictEqual(parse('* * * * 7'), { + dayOfWeek: [0], + }) + }) +}) diff --git a/@xen-orchestra/fs/src/_encryptor.js b/@xen-orchestra/fs/src/_encryptor.js index 2125bcb600b..2f841b24b35 100644 --- a/@xen-orchestra/fs/src/_encryptor.js +++ b/@xen-orchestra/fs/src/_encryptor.js @@ -3,6 +3,11 @@ const { readChunk } = require('@vates/read-chunk') const crypto = require('crypto') export const DEFAULT_ENCRYPTION_ALGORITHM = 'aes-256-gcm' +export const UNENCRYPTED_ALGORITHM = 'none' + +export function isLegacyEncryptionAlgorithm(algorithm) { + return algorithm !== UNENCRYPTED_ALGORITHM && algorithm !== DEFAULT_ENCRYPTION_ALGORITHM +} function getEncryptor(algorithm = DEFAULT_ENCRYPTION_ALGORITHM, key) { if (key === undefined) { diff --git a/@xen-orchestra/fs/src/abstract.js b/@xen-orchestra/fs/src/abstract.js index 8b4d1446e8d..00e49dceda9 100644 --- a/@xen-orchestra/fs/src/abstract.js +++ b/@xen-orchestra/fs/src/abstract.js @@ -370,19 +370,21 @@ export default class RemoteHandlerAbstract { JSON.parse(data) } catch (error) { // can be enoent, bad algorithm, or broeken json ( bad key or algorithm) - if ( - error.code === 'ENOENT' || // no encryption on non empty remote - (await this._canWriteMetadata()) // any other error , but on empty remote - ) { - info('will update metadata of this remote') - return this._createMetadata() + if (encryptionAlgorithm !== 'none') { + if (await this._canWriteMetadata()) { + // any other error , but on empty remote => update with remote settings + + info('will update metadata of this remote') + return this._createMetadata() + } else { + warn( + `The encryptionKey settings of this remote does not match the key used to create it. You won't be able to read any data from this remote`, + { error } + ) + // will probably send a ERR_OSSL_EVP_BAD_DECRYPT if key is incorrect + throw error + } } - warn( - `The encryptionKey settings of this remote does not match the key used to create it. You won't be able to read any data from this remote`, - { error } - ) - // will probably send a ERR_OSSL_EVP_BAD_DECRYPT if key is incorrect - throw error } } diff --git a/@xen-orchestra/fs/src/abstract.spec.js b/@xen-orchestra/fs/src/abstract.spec.js index 21f9984b14a..0c0b5a08af0 100644 --- a/@xen-orchestra/fs/src/abstract.spec.js +++ b/@xen-orchestra/fs/src/abstract.spec.js @@ -1,8 +1,8 @@ /* eslint-env jest */ import { DEFAULT_ENCRYPTION_ALGORITHM, _getEncryptor } from './_encryptor' -import { getHandler } from '.' -import { pFromCallback, TimeoutError } from 'promise-toolbox' +import { Disposable, pFromCallback, TimeoutError } from 'promise-toolbox' +import { getSyncedHandler } from '.' import AbstractHandler from './abstract' import fs from 'fs-extra' import rimraf from 'rimraf' @@ -22,6 +22,8 @@ class TestHandler extends AbstractHandler { } } +const noop = Function.prototype + jest.useFakeTimers() describe('closeFile()', () => { @@ -109,31 +111,25 @@ describe('rmdir()', () => { }) describe('encryption', () => { - let handler, dir - + let dir beforeEach(async () => { dir = await pFromCallback(cb => tmp.dir(cb)) }) afterAll(async () => { - await handler?.forget() - handler = undefined - await pFromCallback(cb => rimraf(dir, cb)) }) - it('sync should create metadata if missing (not encrypted)', async () => { - handler = getHandler({ url: `file://${dir}` }) - await handler._checkMetadata() - expect(await fs.readdir(dir)).toEqual(['encryption.json', 'metadata.json']) + it('sync should NOT create metadata if missing (not encrypted)', async () => { + await Disposable.use(getSyncedHandler({ url: `file://${dir}` }), noop) - const encryption = JSON.parse(await fs.readFile(`${dir}/encryption.json`, 'utf-8')) - expect(encryption.algorithm).toEqual('none') - expect(async () => JSON.parse(await fs.readFile(`${dir}/metadata.json`))).not.toThrowError() + expect(await fs.readdir(dir)).toEqual([]) }) it('sync should create metadata if missing (encrypted)', async () => { - handler = getHandler({ url: `file://${dir}?encryptionKey="73c1838d7d8a6088ca2317fb5f29cd00"` }) - await handler._checkMetadata() + await Disposable.use( + getSyncedHandler({ url: `file://${dir}?encryptionKey="73c1838d7d8a6088ca2317fb5f29cd00"` }), + noop + ) expect(await fs.readdir(dir)).toEqual(['encryption.json', 'metadata.json']) @@ -144,11 +140,10 @@ describe('encryption', () => { }) it('sync should not modify existing metadata', async () => { - handler = getHandler({ url: `file://${dir}` }) await fs.writeFile(`${dir}/encryption.json`, `{"algorithm": "none"}`) await fs.writeFile(`${dir}/metadata.json`, `{"random": "NOTSORANDOM"}`) - await handler._checkMetadata() + await Disposable.use(await getSyncedHandler({ url: `file://${dir}` }), noop) const encryption = JSON.parse(await fs.readFile(`${dir}/encryption.json`, 'utf-8')) expect(encryption.algorithm).toEqual('none') @@ -157,34 +152,37 @@ describe('encryption', () => { }) it('should modify metadata if empty', async () => { - handler = getHandler({ url: `file://${dir}` }) - await handler._checkMetadata() - await handler.forget() - handler = getHandler({ url: `file://${dir}?encryptionKey="73c1838d7d8a6088ca2317fb5f29cd00"` }) - await handler._checkMetadata() + await Disposable.use(getSyncedHandler({ url: `file://${dir}` }), noop) + // nothing created without encryption + + await Disposable.use( + getSyncedHandler({ url: `file://${dir}?encryptionKey="73c1838d7d8a6088ca2317fb5f29cd00"` }), + noop + ) let encryption = JSON.parse(await fs.readFile(`${dir}/encryption.json`, 'utf-8')) expect(encryption.algorithm).toEqual(DEFAULT_ENCRYPTION_ALGORITHM) - await handler.forget() - handler = getHandler({ url: `file://${dir}` }) - await handler._checkMetadata() + + await Disposable.use(getSyncedHandler({ url: `file://${dir}` }), noop) encryption = JSON.parse(await fs.readFile(`${dir}/encryption.json`, 'utf-8')) expect(encryption.algorithm).toEqual('none') }) - it('sync should work with encrypted', async () => { - const encryptor = _getEncryptor(DEFAULT_ENCRYPTION_ALGORITHM, '73c1838d7d8a6088ca2317fb5f29cd91') + it( + 'sync should work with encrypted', + Disposable.wrap(async function* () { + const encryptor = _getEncryptor(DEFAULT_ENCRYPTION_ALGORITHM, '73c1838d7d8a6088ca2317fb5f29cd91') - await fs.writeFile(`${dir}/encryption.json`, `{"algorithm": "${DEFAULT_ENCRYPTION_ALGORITHM}"}`) - await fs.writeFile(`${dir}/metadata.json`, encryptor.encryptData(`{"random": "NOTSORANDOM"}`)) + await fs.writeFile(`${dir}/encryption.json`, `{"algorithm": "${DEFAULT_ENCRYPTION_ALGORITHM}"}`) + await fs.writeFile(`${dir}/metadata.json`, encryptor.encryptData(`{"random": "NOTSORANDOM"}`)) - handler = getHandler({ url: `file://${dir}?encryptionKey="73c1838d7d8a6088ca2317fb5f29cd91"` }) - await handler._checkMetadata() + const handler = yield getSyncedHandler({ url: `file://${dir}?encryptionKey="73c1838d7d8a6088ca2317fb5f29cd91"` }) - const encryption = JSON.parse(await fs.readFile(`${dir}/encryption.json`, 'utf-8')) - expect(encryption.algorithm).toEqual(DEFAULT_ENCRYPTION_ALGORITHM) - const metadata = JSON.parse(await handler.readFile(`./metadata.json`)) - expect(metadata.random).toEqual('NOTSORANDOM') - }) + const encryption = JSON.parse(await fs.readFile(`${dir}/encryption.json`, 'utf-8')) + expect(encryption.algorithm).toEqual(DEFAULT_ENCRYPTION_ALGORITHM) + const metadata = JSON.parse(await handler.readFile(`./metadata.json`)) + expect(metadata.random).toEqual('NOTSORANDOM') + }) + ) it('sync should fail when changing key on non empty remote ', async () => { const encryptor = _getEncryptor(DEFAULT_ENCRYPTION_ALGORITHM, '73c1838d7d8a6088ca2317fb5f29cd91') @@ -193,13 +191,16 @@ describe('encryption', () => { await fs.writeFile(`${dir}/metadata.json`, encryptor.encryptData(`{"random": "NOTSORANDOM"}`)) // different key but empty remote => ok - handler = getHandler({ url: `file://${dir}?encryptionKey="73c1838d7d8a6088ca2317fb5f29cd00"` }) - await expect(handler._checkMetadata()).resolves.not.toThrowError() + await Disposable.use( + getSyncedHandler({ url: `file://${dir}?encryptionKey="73c1838d7d8a6088ca2317fb5f29cd00"` }), + noop + ) - // rmote is now non empty : can't modify key anymore + // remote is now non empty : can't modify key anymore await fs.writeFile(`${dir}/nonempty.json`, 'content') - handler = getHandler({ url: `file://${dir}?encryptionKey="73c1838d7d8a6088ca2317fb5f29cd10"` }) - await expect(handler._checkMetadata()).rejects.toThrowError() + await expect( + Disposable.use(getSyncedHandler({ url: `file://${dir}?encryptionKey="73c1838d7d8a6088ca2317fb5f29cd10"` }), noop) + ).rejects.toThrowError() }) it('sync should fail when changing algorithm', async () => { @@ -212,7 +213,8 @@ describe('encryption', () => { // remote is now non empty : can't modify key anymore await fs.writeFile(`${dir}/nonempty.json`, 'content') - handler = getHandler({ url: `file://${dir}?encryptionKey="73c1838d7d8a6088ca2317fb5f29cd91"` }) - await expect(handler._checkMetadata()).rejects.toThrowError() + await expect( + Disposable.use(getSyncedHandler({ url: `file://${dir}?encryptionKey="73c1838d7d8a6088ca2317fb5f29cd91"` }), noop) + ).rejects.toThrowError() }) }) diff --git a/@xen-orchestra/fs/src/index.js b/@xen-orchestra/fs/src/index.js index dd0655ee6ad..5599b23e45c 100644 --- a/@xen-orchestra/fs/src/index.js +++ b/@xen-orchestra/fs/src/index.js @@ -5,6 +5,7 @@ import RemoteHandlerLocal from './local' import RemoteHandlerNfs from './nfs' import RemoteHandlerS3 from './s3' import RemoteHandlerSmb from './smb' +export { DEFAULT_ENCRYPTION_ALGORITHM, UNENCRYPTED_ALGORITHM, isLegacyEncryptionAlgorithm } from './_encryptor' const HANDLERS = { file: RemoteHandlerLocal, diff --git a/@xen-orchestra/mixins/Config.mjs b/@xen-orchestra/mixins/Config.mjs index 970fdd99f39..7012dd3a806 100644 --- a/@xen-orchestra/mixins/Config.mjs +++ b/@xen-orchestra/mixins/Config.mjs @@ -58,7 +58,9 @@ export default class Config { // internal arg const processor = arguments.length > 2 ? arguments[2] : identity - let prev + // unique value to ensure first run even if the value is `undefined` + let prev = {} + const watcher = config => { try { const value = processor(niceGet(config, path)) diff --git a/@xen-orchestra/mixins/_parseBasicAuth.mjs b/@xen-orchestra/mixins/_parseBasicAuth.mjs index 91c0ca8b2c7..a44cee9bf3e 100644 --- a/@xen-orchestra/mixins/_parseBasicAuth.mjs +++ b/@xen-orchestra/mixins/_parseBasicAuth.mjs @@ -15,6 +15,8 @@ export function parseBasicAuth(header) { const i = credentials.indexOf(':') if (i === -1) { credentials = { token: credentials } + } else if (i === credentials.length - 1) { + credentials = { token: credentials.slice(0, i) } } else { // https://datatracker.ietf.org/doc/html/rfc3986#section-3.2.1 credentials = { diff --git a/CHANGELOG.unreleased.md b/CHANGELOG.unreleased.md index 6ea9b4d14a6..179a63a460b 100644 --- a/CHANGELOG.unreleased.md +++ b/CHANGELOG.unreleased.md @@ -8,12 +8,16 @@ > Users must be able to say: “Nice enhancement, I'm eager to test it” - [Backup/Encryption] Use `aes-256-gcm` instead of `aes-256-ccm` to mitigate [padding oracle attacks](https://en.wikipedia.org/wiki/Padding_oracle_attack) (PR [#6447](https://github.com/vatesfr/xen-orchestra/pull/6447)) +- [Settings/Remote] Display `lock` icon for encrypted remote and a warning if the remote uses a legacy encryption algorithm (PR [#6465](https://github.com/vatesfr/xen-orchestra/pull/6465)) +- `xo-server`'s logs can now be sent to an external Syslog server - [Delta Backup] Use [NBD](https://en.wikipedia.org/wiki/Network_block_device) to download disks (PR [#6461](https://github.com/vatesfr/xen-orchestra/pull/6461)) ### Bug fixes > Users must be able to say: “I had this issue, happy to know it's fixed” +- Really enable by default the embedded HTTP/HTTPS proxy + ### Packages to release > When modifying a package, add it here with its release type. @@ -32,11 +36,16 @@ - @vates/nbd-client major - @vates/otp major +- @vates/predicates minor - @vates/read-chunk patch +- @xen-orchestra/backups patch - @xen-orchestra/fs minor - @xen-orchestra/log minor +- vhd-cli patch +- vhd-lib patch - xo-remote-parser patch +- xo-server minor - xo-server-transport-nagios patch -- xo-web patch +- xo-web minor diff --git a/packages/vhd-cli/commands/check.js b/packages/vhd-cli/commands/check.js index 489a48afc1f..21c85241054 100644 --- a/packages/vhd-cli/commands/check.js +++ b/packages/vhd-cli/commands/check.js @@ -2,8 +2,9 @@ const { VhdFile, checkVhdChain } = require('vhd-lib') const getopts = require('getopts') -const { getHandler } = require('@xen-orchestra/fs') +const { getSyncedHandler } = require('@xen-orchestra/fs') const { resolve } = require('path') +const { Disposable } = require('promise-toolbox') const checkVhd = (handler, path) => new VhdFile(handler, path).readHeaderAndFooter() @@ -16,14 +17,14 @@ module.exports = async function check(rawArgs) { }) const check = chain ? checkVhdChain : checkVhd - - const handler = getHandler({ url: 'file:///' }) - for (const vhd of args) { - try { - await check(handler, resolve(vhd)) - console.log('ok:', vhd) - } catch (error) { - console.error('nok:', vhd, error) + await Disposable.use(getSyncedHandler({ url: 'file:///' }), async handler => { + for (const vhd of args) { + try { + await check(handler, resolve(vhd)) + console.log('ok:', vhd) + } catch (error) { + console.error('nok:', vhd, error) + } } - } + }) } diff --git a/packages/vhd-cli/commands/info.js b/packages/vhd-cli/commands/info.js index e27c3e9b542..73b410b33f1 100644 --- a/packages/vhd-cli/commands/info.js +++ b/packages/vhd-cli/commands/info.js @@ -1,7 +1,7 @@ 'use strict' const { Constants, VhdFile } = require('vhd-lib') -const { getHandler } = require('@xen-orchestra/fs') +const { getSyncedHandler } = require('@xen-orchestra/fs') const { openVhd } = require('vhd-lib/openVhd') const { resolve } = require('path') const Disposable = require('promise-toolbox/Disposable') @@ -91,11 +91,11 @@ async function showList(handler, paths) { } module.exports = async function info(args) { - const handler = getHandler({ url: 'file:///' }) - - if (args.length === 1) { - return showDetails(handler, args[0]) - } + await Disposable.use(getSyncedHandler({ url: 'file:///' }), async handler => { + if (args.length === 1) { + return showDetails(handler, args[0]) + } - return showList(handler, args) + return showList(handler, args) + }) } diff --git a/packages/vhd-cli/commands/merge.js b/packages/vhd-cli/commands/merge.js index 8a4adf4d6b6..0a4e28fc0e1 100644 --- a/packages/vhd-cli/commands/merge.js +++ b/packages/vhd-cli/commands/merge.js @@ -1,28 +1,30 @@ 'use strict' const { Bar } = require('cli-progress') -const { getHandler } = require('@xen-orchestra/fs') +const { getSyncedHandler } = require('@xen-orchestra/fs') const { mergeVhdChain } = require('vhd-lib/merge') const { resolve } = require('path') +const { Disposable } = require('promise-toolbox') module.exports = async function merge(args) { if (args.length < 2 || args.some(_ => _ === '-h' || _ === '--help')) { return `Usage: ${this.command} ` } - const handler = getHandler({ url: 'file:///' }) - let bar - await mergeVhdChain(handler, [resolve(args[1]), resolve(args[0])], { - onProgress({ done, total }) { - if (bar === undefined) { - bar = new Bar({ - format: 'merging [{bar}] {percentage}% | ETA: {eta}s | {value}/{total}', - }) - bar.start(total, done) - } else { - bar.update(done) - } - }, + await Disposable.use(getSyncedHandler({ url: 'file:///' }), async handler => { + let bar + await mergeVhdChain(handler, [resolve(args[1]), resolve(args[0])], { + onProgress({ done, total }) { + if (bar === undefined) { + bar = new Bar({ + format: 'merging [{bar}] {percentage}% | ETA: {eta}s | {value}/{total}', + }) + bar.start(total, done) + } else { + bar.update(done) + } + }, + }) + bar.stop() }) - bar.stop() } diff --git a/packages/vhd-cli/commands/synthetize.js b/packages/vhd-cli/commands/synthetize.js index cf26c594b49..bcd902aa586 100644 --- a/packages/vhd-cli/commands/synthetize.js +++ b/packages/vhd-cli/commands/synthetize.js @@ -3,16 +3,17 @@ const path = require('path') const { createSyntheticStream } = require('vhd-lib') const { createWriteStream } = require('fs') -const { getHandler } = require('@xen-orchestra/fs') +const { getSyncedHandler } = require('@xen-orchestra/fs') +const { Disposable } = require('promise-toolbox') module.exports = async function synthetize(args) { if (args.length < 2 || args.some(_ => _ === '-h' || _ === '--help')) { return `Usage: ${this.command} ` } - - const handler = getHandler({ url: 'file:///' }) - const stream = await createSyntheticStream(handler, path.resolve(args[0])) - return new Promise((resolve, reject) => { - stream.on('error', reject).pipe(createWriteStream(args[1]).on('error', reject).on('finish', resolve)) + await Disposable.use(getSyncedHandler({ url: 'file:///' }), async handler => { + const stream = await createSyntheticStream(handler, path.resolve(args[0])) + return new Promise((resolve, reject) => { + stream.on('error', reject).pipe(createWriteStream(args[1]).on('error', reject).on('finish', resolve)) + }) }) } diff --git a/packages/vhd-lib/Vhd/VhdDirectory.integ.spec.js b/packages/vhd-lib/Vhd/VhdDirectory.integ.spec.js index 27a0b4c9214..2266a1794ef 100644 --- a/packages/vhd-lib/Vhd/VhdDirectory.integ.spec.js +++ b/packages/vhd-lib/Vhd/VhdDirectory.integ.spec.js @@ -5,22 +5,28 @@ const rimraf = require('rimraf') const tmp = require('tmp') const fs = require('fs-extra') -const { getHandler, getSyncedHandler } = require('@xen-orchestra/fs') +const { getSyncedHandler } = require('@xen-orchestra/fs') const { Disposable, pFromCallback } = require('promise-toolbox') const { openVhd, VhdDirectory } = require('../') const { createRandomFile, convertFromRawToVhd, convertToVhdDirectory } = require('../tests/utils') let tempDir = null +let handler +let disposeHandler jest.setTimeout(60000) beforeEach(async () => { tempDir = await pFromCallback(cb => tmp.dir(cb)) + const d = await getSyncedHandler({ url: `file://${tempDir}` }) + handler = d.value + disposeHandler = d.dispose }) afterEach(async () => { await pFromCallback(cb => rimraf(tempDir, cb)) + disposeHandler() }) test('Can coalesce block', async () => { @@ -45,12 +51,11 @@ test('Can coalesce block', async () => { await convertToVhdDirectory(childRawDirectoryName, childDirectoryFileName, childDirectoryName) await Disposable.use(async function* () { - const handler = getHandler({ url: 'file://' }) - const parentVhd = yield openVhd(handler, parentDirectoryName, { flags: 'w' }) + const parentVhd = yield openVhd(handler, 'parent.dir.vhd', { flags: 'w' }) await parentVhd.readBlockAllocationTable() - const childFileVhd = yield openVhd(handler, childFileName) + const childFileVhd = yield openVhd(handler, 'childFile.vhd') await childFileVhd.readBlockAllocationTable() - const childDirectoryVhd = yield openVhd(handler, childDirectoryName) + const childDirectoryVhd = yield openVhd(handler, 'childDir.vhd') await childDirectoryVhd.readBlockAllocationTable() let childBlockData = (await childDirectoryVhd.readBlock(0)).data @@ -83,7 +88,6 @@ test('compressed blocks and metadata works', async () => { await createRandomFile(rawFileName, initalSize) await convertFromRawToVhd(rawFileName, vhdName) await Disposable.use(async function* () { - const handler = yield getSyncedHandler({ url: `file://${tempDir}` }) const vhd = yield openVhd(handler, 'parent.vhd') await vhd.readBlockAllocationTable() const compressedVhd = yield VhdDirectory.create(handler, 'compressed.vhd', { compression: 'gzip' }) diff --git a/packages/vhd-lib/Vhd/VhdFile.integ.spec.js b/packages/vhd-lib/Vhd/VhdFile.integ.spec.js index ceb12cf7f22..763f9414fb9 100644 --- a/packages/vhd-lib/Vhd/VhdFile.integ.spec.js +++ b/packages/vhd-lib/Vhd/VhdFile.integ.spec.js @@ -7,7 +7,7 @@ const fs = require('fs-extra') const getStream = require('get-stream') const rimraf = require('rimraf') const tmp = require('tmp') -const { getHandler } = require('@xen-orchestra/fs') +const { getSyncedHandler } = require('@xen-orchestra/fs') const { Disposable, pFromCallback } = require('promise-toolbox') const { randomBytes } = require('crypto') @@ -24,15 +24,22 @@ const { } = require('../tests/utils') let tempDir = null +let handler +let disposeHandler jest.setTimeout(60000) beforeEach(async () => { tempDir = await pFromCallback(cb => tmp.dir(cb)) + + const d = await getSyncedHandler({ url: `file://${tempDir}` }) + handler = d.value + disposeHandler = d.dispose }) afterEach(async () => { await pFromCallback(cb => rimraf(tempDir, cb)) + disposeHandler() }) test('respect the checkSecondFooter flag', async () => { @@ -42,8 +49,6 @@ test('respect the checkSecondFooter flag', async () => { const vhdFileName = `${tempDir}/randomfile.vhd` await convertFromRawToVhd(rawFileName, vhdFileName) - const handler = getHandler({ url: `file://${tempDir}` }) - const size = await handler.getSize('randomfile.vhd') const fd = await handler.openFile('randomfile.vhd', 'r+') const buffer = Buffer.alloc(512, 0) @@ -64,9 +69,8 @@ test('blocks can be moved', async () => { await createRandomFile(rawFileName, initalSize) const vhdFileName = `${tempDir}/randomfile.vhd` await convertFromRawToVhd(rawFileName, vhdFileName) - const handler = getHandler({ url: 'file://' }) - const originalSize = await handler.getSize(rawFileName) - const newVhd = new VhdFile(handler, vhdFileName) + const originalSize = await handler.getSize('randomfile') + const newVhd = new VhdFile(handler, 'randomfile.vhd') await newVhd.readHeaderAndFooter() await newVhd.readBlockAllocationTable() await newVhd._freeFirstBlockSpace(8000000) @@ -79,8 +83,7 @@ test('the BAT MSB is not used for sign', async () => { const randomBuffer = await pFromCallback(cb => randomBytes(SECTOR_SIZE, cb)) const emptyFileName = `${tempDir}/empty.vhd` await execa('qemu-img', ['create', '-fvpc', emptyFileName, '1.8T']) - const handler = getHandler({ url: 'file://' }) - const vhd = new VhdFile(handler, emptyFileName) + const vhd = new VhdFile(handler, 'empty.vhd') await vhd.readHeaderAndFooter() await vhd.readBlockAllocationTable() // we want the bit 31 to be on, to prove it's not been used for sign @@ -98,7 +101,7 @@ test('the BAT MSB is not used for sign', async () => { const recoveredFileName = `${tempDir}/recovered` const recoveredFile = await fs.open(recoveredFileName, 'w') try { - const vhd2 = new VhdFile(handler, emptyFileName) + const vhd2 = new VhdFile(handler, 'empty.vhd') await vhd2.readHeaderAndFooter() await vhd2.readBlockAllocationTable() for (let i = 0; i < vhd.header.maxTableEntries; i++) { @@ -126,9 +129,8 @@ test('writeData on empty file', async () => { await createRandomFile(rawFileName, mbOfRandom) await execa('qemu-img', ['create', '-fvpc', emptyFileName, mbOfRandom + 'M']) const randomData = await fs.readFile(rawFileName) - const handler = getHandler({ url: 'file://' }) - const originalSize = await handler.getSize(rawFileName) - const newVhd = new VhdFile(handler, emptyFileName) + const originalSize = await handler.getSize('randomfile') + const newVhd = new VhdFile(handler, 'empty.vhd') await newVhd.readHeaderAndFooter() await newVhd.readBlockAllocationTable() await newVhd.writeData(0, randomData) @@ -145,9 +147,8 @@ test('writeData in 2 non-overlaping operations', async () => { await createRandomFile(rawFileName, mbOfRandom) await execa('qemu-img', ['create', '-fvpc', emptyFileName, mbOfRandom + 'M']) const randomData = await fs.readFile(rawFileName) - const handler = getHandler({ url: 'file://' }) - const originalSize = await handler.getSize(rawFileName) - const newVhd = new VhdFile(handler, emptyFileName) + const originalSize = await handler.getSize('randomfile') + const newVhd = new VhdFile(handler, 'empty.vhd') await newVhd.readHeaderAndFooter() await newVhd.readBlockAllocationTable() const splitPointSectors = 2 @@ -165,9 +166,8 @@ test('writeData in 2 overlaping operations', async () => { await createRandomFile(rawFileName, mbOfRandom) await execa('qemu-img', ['create', '-fvpc', emptyFileName, mbOfRandom + 'M']) const randomData = await fs.readFile(rawFileName) - const handler = getHandler({ url: 'file://' }) - const originalSize = await handler.getSize(rawFileName) - const newVhd = new VhdFile(handler, emptyFileName) + const originalSize = await handler.getSize('randomfile') + const newVhd = new VhdFile(handler, 'empty.vhd') await newVhd.readHeaderAndFooter() await newVhd.readBlockAllocationTable() const endFirstWrite = 3 @@ -185,9 +185,8 @@ test('BAT can be extended and blocks moved', async () => { const vhdFileName = `${tempDir}/randomfile.vhd` await createRandomFile(rawFileName, initalSize) await convertFromRawToVhd(rawFileName, vhdFileName) - const handler = getHandler({ url: 'file://' }) - const originalSize = await handler.getSize(rawFileName) - const newVhd = new VhdFile(handler, vhdFileName) + const originalSize = await handler.getSize('randomfile') + const newVhd = new VhdFile(handler, 'randomfile.vhd') await newVhd.readHeaderAndFooter() await newVhd.readBlockAllocationTable() await newVhd.ensureBatSize(2000) @@ -214,12 +213,11 @@ test('Can coalesce block', async () => { await convertToVhdDirectory(childRawDirectoryName, childDirectoryFileName, childDirectoryName) await Disposable.use(async function* () { - const handler = getHandler({ url: 'file://' }) - const parentVhd = yield openVhd(handler, parentFileName, { flags: 'r+' }) + const parentVhd = yield openVhd(handler, 'parent.vhd', { flags: 'r+' }) await parentVhd.readBlockAllocationTable() - const childFileVhd = yield openVhd(handler, childFileName) + const childFileVhd = yield openVhd(handler, 'childFile.vhd') await childFileVhd.readBlockAllocationTable() - const childDirectoryVhd = yield openVhd(handler, childDirectoryName) + const childDirectoryVhd = yield openVhd(handler, 'childDir.vhd') await childDirectoryVhd.readBlockAllocationTable() await parentVhd.mergeBlock(childFileVhd, 0) diff --git a/packages/vhd-lib/Vhd/VhdFile.js b/packages/vhd-lib/Vhd/VhdFile.js index d32471d4e15..37641918110 100644 --- a/packages/vhd-lib/Vhd/VhdFile.js +++ b/packages/vhd-lib/Vhd/VhdFile.js @@ -83,7 +83,9 @@ exports.VhdFile = class VhdFile extends VhdAbstract { } static async open(handler, path, { flags, checkSecondFooter = true } = {}) { - assert(!handler.isEncrypted, `VHDFile implementation is not compatible with encrypted remote`) + if (handler.isEncrypted) { + throw new Error(`VHDFile implementation is not compatible with encrypted remote`) + } const fd = await handler.openFile(path, flags ?? 'r+') const vhd = new VhdFile(handler, fd) // openning a file for reading does not trigger EISDIR as long as we don't really read from it : diff --git a/packages/vhd-lib/merge.integ.spec.js b/packages/vhd-lib/merge.integ.spec.js index f4c34fbf577..147d9ed96ef 100644 --- a/packages/vhd-lib/merge.integ.spec.js +++ b/packages/vhd-lib/merge.integ.spec.js @@ -5,7 +5,7 @@ const fs = require('fs-extra') const rimraf = require('rimraf') const tmp = require('tmp') -const { getHandler } = require('@xen-orchestra/fs') +const { getSyncedHandler } = require('@xen-orchestra/fs') const { pFromCallback } = require('promise-toolbox') const { VhdFile, chainVhd } = require('./index') @@ -14,15 +14,21 @@ const { _cleanupVhds: cleanupVhds, mergeVhdChain } = require('./merge') const { checkFile, createRandomFile, convertFromRawToVhd } = require('./tests/utils') let tempDir = null - +let handler +let disposeHandler jest.setTimeout(60000) beforeEach(async () => { tempDir = await pFromCallback(cb => tmp.dir(cb)) + + const d = await getSyncedHandler({ url: `file://${tempDir}` }) + handler = d.value + disposeHandler = d.dispose }) afterEach(async () => { await pFromCallback(cb => rimraf(tempDir, cb)) + disposeHandler() }) test('merge works in normal cases', async () => { @@ -32,7 +38,6 @@ test('merge works in normal cases', async () => { const childRandomFileName = `small_randomfile` const parentFileName = `parent.vhd` const child1FileName = `child1.vhd` - const handler = getHandler({ url: `file://${tempDir}` }) await createRandomFile(`${tempDir}/${parentRandomFileName}`, mbOfFather) await convertFromRawToVhd(`${tempDir}/${parentRandomFileName}`, `${tempDir}/${parentFileName}`) @@ -70,7 +75,6 @@ test('it can resume a simple merge ', async () => { const mbOfChildren = 4 const parentRandomFileName = `${tempDir}/randomfile` const childRandomFileName = `${tempDir}/small_randomfile` - const handler = getHandler({ url: `file://${tempDir}` }) await createRandomFile(`${tempDir}/randomfile`, mbOfFather) await convertFromRawToVhd(`${tempDir}/randomfile`, `${tempDir}/parent.vhd`) @@ -169,29 +173,28 @@ test('it can resume a multiple merge ', async () => { const parentFileName = `${tempDir}/parent.vhd` const childFileName = `${tempDir}/child.vhd` const grandChildFileName = `${tempDir}/grandchild.vhd` - const handler = getHandler({ url: 'file://' }) await createRandomFile(parentRandomFileName, mbOfFather) await convertFromRawToVhd(parentRandomFileName, parentFileName) await createRandomFile(childRandomFileName, mbOfChildren) await convertFromRawToVhd(childRandomFileName, childFileName) - await chainVhd(handler, parentFileName, handler, childFileName, true) + await chainVhd(handler, 'parent.vhd', handler, 'child.vhd', true) await createRandomFile(grandChildRandomFileName, mbOfGrandChildren) await convertFromRawToVhd(grandChildRandomFileName, grandChildFileName) - await chainVhd(handler, childFileName, handler, grandChildFileName, true) + await chainVhd(handler, 'child.vhd', handler, 'grandchild.vhd', true) - const parentVhd = new VhdFile(handler, parentFileName) + const parentVhd = new VhdFile(handler, 'parent.vhd') await parentVhd.readHeaderAndFooter() - const childVhd = new VhdFile(handler, childFileName) + const childVhd = new VhdFile(handler, 'child.vhd') await childVhd.readHeaderAndFooter() - const grandChildVhd = new VhdFile(handler, grandChildFileName) + const grandChildVhd = new VhdFile(handler, 'grandchild.vhd') await grandChildVhd.readHeaderAndFooter() await handler.writeFile( - `${tempDir}/.parent.vhd.merge.json`, + `.parent.vhd.merge.json`, JSON.stringify({ parent: { header: parentVhd.header.checksum, @@ -205,12 +208,12 @@ test('it can resume a multiple merge ', async () => { // should fail since the merge state file has only data of parent and child await expect( - async () => await mergeVhdChain(handler, [parentFileName, childFileName, grandChildFileName]) + async () => await mergeVhdChain(handler, ['parent.vhd', 'child.vhd', 'grandchild.vhd']) ).rejects.toThrow() // merge - await handler.unlink(`${tempDir}/.parent.vhd.merge.json`) + await handler.unlink(`.parent.vhd.merge.json`) await handler.writeFile( - `${tempDir}/.parent.vhd.merge.json`, + `.parent.vhd.merge.json`, JSON.stringify({ parent: { header: parentVhd.header.checksum, @@ -219,11 +222,11 @@ test('it can resume a multiple merge ', async () => { header: grandChildVhd.header.checksum, }, currentBlock: 1, - childPath: [childVhd, grandChildVhd], + childPath: ['child.vhd', 'grandchild.vhd'], }) ) // it should succeed - await mergeVhdChain(handler, [parentFileName, childFileName, grandChildFileName]) + await mergeVhdChain(handler, ['parent.vhd', 'child.vhd', 'grandchild.vhd']) }) test('it merge multiple child in one pass ', async () => { @@ -236,25 +239,25 @@ test('it merge multiple child in one pass ', async () => { const parentFileName = `${tempDir}/parent.vhd` const childFileName = `${tempDir}/child.vhd` const grandChildFileName = `${tempDir}/grandchild.vhd` - const handler = getHandler({ url: 'file://' }) + await createRandomFile(parentRandomFileName, mbOfFather) await convertFromRawToVhd(parentRandomFileName, parentFileName) await createRandomFile(childRandomFileName, mbOfChildren) await convertFromRawToVhd(childRandomFileName, childFileName) - await chainVhd(handler, parentFileName, handler, childFileName, true) + await chainVhd(handler, 'parent.vhd', handler, 'child.vhd', true) await createRandomFile(grandChildRandomFileName, mbOfGrandChildren) await convertFromRawToVhd(grandChildRandomFileName, grandChildFileName) - await chainVhd(handler, childFileName, handler, grandChildFileName, true) + await chainVhd(handler, 'child.vhd', handler, 'grandchild.vhd', true) // merge - await mergeVhdChain(handler, [parentFileName, childFileName, grandChildFileName]) + await mergeVhdChain(handler, ['parent.vhd', 'child.vhd', 'grandchild.vhd']) // check that vhd is still valid await checkFile(grandChildFileName) - const parentVhd = new VhdFile(handler, grandChildFileName) + const parentVhd = new VhdFile(handler, 'grandchild.vhd') await parentVhd.readHeaderAndFooter() await parentVhd.readBlockAllocationTable() @@ -277,8 +280,6 @@ test('it merge multiple child in one pass ', async () => { }) test('it cleans vhd mergedfiles', async () => { - const handler = getHandler({ url: `file://${tempDir}` }) - await handler.writeFile('parent', 'parentData') await handler.writeFile('child1', 'child1Data') await handler.writeFile('child2', 'child2Data') diff --git a/packages/vhd-lib/openVhd.integ.spec.js b/packages/vhd-lib/openVhd.integ.spec.js index f6b84102cbe..2b6ba4646d9 100644 --- a/packages/vhd-lib/openVhd.integ.spec.js +++ b/packages/vhd-lib/openVhd.integ.spec.js @@ -4,6 +4,7 @@ const rimraf = require('rimraf') const tmp = require('tmp') +const fs = require('node:fs/promises') const { getSyncedHandler } = require('@xen-orchestra/fs') const { Disposable, pFromCallback } = require('promise-toolbox') @@ -31,13 +32,13 @@ test('It opens a vhd file ( alias or not)', async () => { const vhdFileName = `${tempDir}/randomfile.vhd` await convertFromRawToVhd(rawFileName, vhdFileName) await Disposable.use(async function* () { - const handler = yield getSyncedHandler({ url: 'file://' }) - const vhd = yield openVhd(handler, vhdFileName) + const handler = yield getSyncedHandler({ url: `file://${tempDir}` }) + const vhd = yield openVhd(handler, 'randomfile.vhd') expect(vhd.header.cookie).toEqual('cxsparse') expect(vhd.footer.cookie).toEqual('conectix') - const aliasFileName = `${tempDir}/out.alias.vhd` - await VhdAbstract.createAlias(handler, aliasFileName, vhdFileName) + const aliasFileName = `out.alias.vhd` + await VhdAbstract.createAlias(handler, aliasFileName, 'randomfile.vhd') const alias = yield openVhd(handler, aliasFileName) expect(alias.header.cookie).toEqual('cxsparse') expect(alias.footer.cookie).toEqual('conectix') @@ -50,15 +51,77 @@ test('It opens a vhd directory', async () => { await createRandomVhdDirectory(vhdDirectory, initalSize) await Disposable.use(async function* () { - const handler = yield getSyncedHandler({ url: 'file://' }) - const vhd = yield openVhd(handler, vhdDirectory) + const handler = yield getSyncedHandler({ url: `file://${tempDir}` }) + const vhd = yield openVhd(handler, 'randomfile.dir') expect(vhd.header.cookie).toEqual('cxsparse') expect(vhd.footer.cookie).toEqual('conectix') - const aliasFileName = `${tempDir}/out.alias.vhd` - await VhdAbstract.createAlias(handler, aliasFileName, vhdDirectory) + const aliasFileName = `out.alias.vhd` + await VhdAbstract.createAlias(handler, aliasFileName, 'randomfile.dir') const alias = yield openVhd(handler, aliasFileName) expect(alias.header.cookie).toEqual('cxsparse') expect(alias.footer.cookie).toEqual('conectix') }) }) + +test('It fails correctly when opening a broken vhd', async () => { + const initalSize = 4 + + // emtpy file + await expect( + Disposable.use(async function* () { + const handler = yield getSyncedHandler({ url: `file://${tempDir}` }) + yield openVhd(handler, 'randomfile.vhd') + }) + ).rejects.toThrow() + + const rawFileName = `${tempDir}/randomfile.vhd` + await createRandomFile(rawFileName, initalSize) + // broken file + await expect( + Disposable.use(async function* () { + const handler = yield getSyncedHandler({ url: `file://${tempDir}` }) + yield openVhd(handler, 'randomfile.vhd') + }) + ).rejects.toThrow() + + // empty dir + await fs.mkdir(`${tempDir}/dir.vhd`) + await expect( + Disposable.use(async function* () { + const handler = yield getSyncedHandler({ url: `file://${tempDir}` }) + const vhd = yield openVhd(handler, 'dir.vhd') + await vhd.readBlockAllocationTable() + }) + ).rejects.toThrow() + // dir with missing parts + await createRandomVhdDirectory(`${tempDir}/dir.vhd`, initalSize) + + const targets = ['header', 'footer', 'bat'] + for (const target of targets) { + await fs.rename(`${tempDir}/dir.vhd/${target}`, `${tempDir}/dir.vhd/moved`) + await expect( + Disposable.use(async function* () { + const handler = yield getSyncedHandler({ url: `file://${tempDir}` }) + const vhd = yield openVhd(handler, 'dir.vhd') + await vhd.readBlockAllocationTable() + }) + ).rejects.toThrow() + await fs.rename(`${tempDir}/dir.vhd/moved`, `${tempDir}/dir.vhd/${target}`) + } +}) + +test('It fails correctly when opening a vhdfile on an encrypted remote', async () => { + const initalSize = 4 + const rawFileName = `${tempDir}/randomfile.vhd` + await expect( + Disposable.use(async function* () { + const handler = yield getSyncedHandler({ + url: `file://${tempDir}?encryptionKey="73c1838d7d8a6088ca2317fb5f29cd00"`, + }) + + await createRandomFile(rawFileName, initalSize) + yield openVhd(handler, 'randomfile.vhd') + }) + ).rejects.toThrow() +}) diff --git a/packages/vhd-lib/openVhd.js b/packages/vhd-lib/openVhd.js index ce7632b9b2c..8852cbafe46 100644 --- a/packages/vhd-lib/openVhd.js +++ b/packages/vhd-lib/openVhd.js @@ -9,8 +9,7 @@ exports.openVhd = async function openVhd(handler, path, opts) { try { return await VhdFile.open(handler, resolved, opts) } catch (e) { - // if the remote is encrypted, trying to open a VhdFile will throw an assertion error before checking if the path is a directory, therefore we should try to open a VhdDirectory anyway. - if (e.code !== 'EISDIR' && e.code !== 'ERR_ASSERTION') { + if (e.code !== 'EISDIR') { throw e } return await VhdDirectory.open(handler, resolved, opts) diff --git a/packages/xo-server/config.toml b/packages/xo-server/config.toml index 6546df2af2c..69e2f0b8fd9 100644 --- a/packages/xo-server/config.toml +++ b/packages/xo-server/config.toml @@ -140,6 +140,8 @@ port = 80 # Display all logs with level >=, regardless of their namespace level = 'info' +[logs.transport.console] + [plugins] [remoteOptions] diff --git a/packages/xo-server/package.json b/packages/xo-server/package.json index 5c4a4dfa663..158cfe9b314 100644 --- a/packages/xo-server/package.json +++ b/packages/xo-server/package.json @@ -117,9 +117,11 @@ "serve-static": "^1.13.1", "set-cookie-parser": "^2.3.5", "source-map-support": "^0.5.16", + "split-log": "^1.0.12", "split2": "^4.1.0", "stoppable": "^1.0.5", "subleveldown": "^6.0.1", + "syslog-client": "^1.1.1", "tar-stream": "^2.0.1", "tmp": "^0.2.1", "unzipper": "^0.10.5", diff --git a/packages/xo-server/sample.config.toml b/packages/xo-server/sample.config.toml index e37a5ca26a2..d404301e368 100644 --- a/packages/xo-server/sample.config.toml +++ b/packages/xo-server/sample.config.toml @@ -145,6 +145,12 @@ port = 80 #===================================================================== +# Uncomment this section to export the logs to an external syslog +#[log.transport.syslog] +#target = 'tcp://syslog.company.lan:514' + +#===================================================================== + # Connection to the Redis server. [redis] # Unix sockets can be used diff --git a/packages/xo-server/src/xo-mixins/logs/index.mjs b/packages/xo-server/src/xo-mixins/logs/index.mjs index 3a1cbd7a941..cd665cc3655 100644 --- a/packages/xo-server/src/xo-mixins/logs/index.mjs +++ b/packages/xo-server/src/xo-mixins/logs/index.mjs @@ -1,4 +1,3 @@ -import transportConsole from '@xen-orchestra/log/transports/console.js' import { configure } from '@xen-orchestra/log/configure.js' import { defer, fromEvent } from 'promise-toolbox' @@ -10,15 +9,20 @@ export default class Logs { app.hooks.on('clean', () => this._gc()) - const defaultTransport = transportConsole() - app.config.watch('logs', ({ filter, level, transport = defaultTransport }) => { - configure([ - { - filter: [process.env.DEBUG, filter], - level, - transport, - }, - ]) + app.config.watch('logs', ({ filter, level, transport: transportsObject }) => { + const transports = [] + for (const id of Object.keys(transportsObject)) { + const { disabled = false, ...transport } = transportsObject[id] + if (!disabled) { + transports.push({ type: id, ...transport }) + } + } + + configure({ + filter: [process.env.DEBUG, filter], + level, + transport: transports, + }) }) } diff --git a/packages/xo-server/src/xo-mixins/remotes.mjs b/packages/xo-server/src/xo-mixins/remotes.mjs index 2c666afdde1..37df84cb9b7 100644 --- a/packages/xo-server/src/xo-mixins/remotes.mjs +++ b/packages/xo-server/src/xo-mixins/remotes.mjs @@ -1,6 +1,11 @@ import asyncMapSettled from '@xen-orchestra/async-map/legacy.js' import { format, parse } from 'xo-remote-parser' -import { getHandler } from '@xen-orchestra/fs' +import { + DEFAULT_ENCRYPTION_ALGORITHM, + getHandler, + isLegacyEncryptionAlgorithm, + UNENCRYPTED_ALGORITHM, +} from '@xen-orchestra/fs' import { ignoreErrors, timeout } from 'promise-toolbox' import { noSuchObject } from 'xo-common/api-errors.js' import { synchronized } from 'decorator-synchronized' @@ -124,6 +129,17 @@ export default class { return } + let encryption + + if (this._handlers[remote.id] !== undefined) { + const algorithm = this._handlers[remote.id]._encryptor?.algorithm ?? UNENCRYPTED_ALGORITHM + encryption = { + algorithm, + isLegacy: isLegacyEncryptionAlgorithm(algorithm), + recommendedAlgorithm: DEFAULT_ENCRYPTION_ALGORITHM, + } + } + const promise = remote.proxy !== undefined ? this._app.callProxyMethod(remote.proxy, 'remote.getInfo', { @@ -134,7 +150,10 @@ export default class { try { await timeout.call( promise.then(info => { - remotesInfo[remote.id] = info + remotesInfo[remote.id] = { + ...info, + encryption, + } }), 5e3 ) diff --git a/packages/xo-web/src/common/intl/messages.js b/packages/xo-web/src/common/intl/messages.js index 21af75bbedf..2807b7c25a9 100644 --- a/packages/xo-web/src/common/intl/messages.js +++ b/packages/xo-web/src/common/intl/messages.js @@ -611,6 +611,10 @@ const messages = { remoteEncryptionKey: 'Encrypt all new data sent to this remote', remoteEncryptionKeyStorageLocation: "You won't be able to get your data back if you lose the encryption key. The encryption key is saved in the XO config backup, they should be secured correctly. Be careful, if you saved it on an encrypted remote, then you won't be able to access it without the remote encryption key.", + encryption: 'Encryption', + remoteEncryptionLegacy: + 'A legacy encryption algorithm is used ({algorithm}), please create a new remote with the recommended algorithm {recommendedAlgorithm}', + // ------ New Storage ----- newSr: 'New SR', diff --git a/packages/xo-web/src/xo-app/settings/remotes/index.js b/packages/xo-web/src/xo-app/settings/remotes/index.js index d3bc93383ee..d575c039a3b 100644 --- a/packages/xo-web/src/xo-app/settings/remotes/index.js +++ b/packages/xo-web/src/xo-app/settings/remotes/index.js @@ -124,6 +124,36 @@ const COLUMN_PROXY = { name: _('proxy'), } +const COLUMN_ENCRYPTION = { + itemRenderer: remote => { + // remote.info?.encryption undefined means that remote is not enabled and synced + // we don't have the agorithm used at this step + if (remote.info?.encryption === undefined) { + return remote.encryptionKey !== undefined ? : null + } else { + // remote enabled and not encrypted + if (remote.info.encryption.algorithm === 'none') { + return null + } + const { algorithm, isLegacy, recommendedAlgorithm } = remote.info.encryption + return ( + + + + + + {isLegacy && ( + + + + )} + + ) + } + }, + name: _('encryption'), +} + const fixRemoteUrl = remote => editRemote(remote, { url: format(remote) }) const COLUMNS_LOCAL_REMOTE = [ COLUMN_NAME, @@ -141,6 +171,7 @@ const COLUMNS_LOCAL_REMOTE = [ }, COLUMN_STATE, COLUMN_DISK, + COLUMN_ENCRYPTION, COLUMN_SPEED, COLUMN_PROXY, ] @@ -198,6 +229,7 @@ const COLUMNS_NFS_REMOTE = [ }, COLUMN_STATE, COLUMN_DISK, + COLUMN_ENCRYPTION, COLUMN_SPEED, COLUMN_PROXY, ] @@ -245,6 +277,7 @@ const COLUMNS_SMB_REMOTE = [ ), name: _('remoteAuth'), }, + COLUMN_ENCRYPTION, COLUMN_SPEED, COLUMN_PROXY, ] @@ -300,6 +333,7 @@ const COLUMNS_S3_REMOTE = [ ), name: 'Key', }, + COLUMN_ENCRYPTION, COLUMN_SPEED, COLUMN_PROXY, ] diff --git a/scripts/npmignore b/scripts/npmignore index adf2af52e34..f942830a4b3 100644 --- a/scripts/npmignore +++ b/scripts/npmignore @@ -2,28 +2,30 @@ /benchmark/ /benchmarks/ -*.bench.js -*.bench.js.map +*.bench.{,c,m}js +*.bench.{,c,m}js.map /coverage/ /examples/ -example.js -example.js.map -*.example.js -*.example.js.map +example.{,c,m}js +example.{,c,m}js.map +*.example.{,c,m}js +*.example.{,c,m}js.map /fixture/ /fixtures/ -*.fixture.js -*.fixture.js.map -*.fixtures.js -*.fixtures.js.map +*.fixture.{,c,m}js +*.fixture.{,c,m}js.map +*.fixtures.{,c,m}js +*.fixtures.{,c,m}js.map /test/ /tests/ -*.spec.js -*.spec.js.map +*.spec.{,c,m}js +*.spec.{,c,m}js.map +*.test.{,c,m}js +*.test.{,c,m}js.map __snapshots__/ diff --git a/yarn.lock b/yarn.lock index c55cdf4cf5d..1ea117bd42a 100644 --- a/yarn.lock +++ b/yarn.lock @@ -2492,20 +2492,34 @@ resolved "https://registry.yarnpkg.com/@sindresorhus/is/-/is-0.14.0.tgz#9fb3a3cf3132328151f353de4632e01e52102bea" integrity sha512-9NET910DNaIPngYnLLPeg+Ogzqsi9uM4mSboU5y6p8S5DzMTVEsJZrawi+BoDNUVBa2DhJqQYUFvMDfgU062LQ== -"@sinonjs/commons@^1.7.0": +"@sinonjs/commons@^1.6.0", "@sinonjs/commons@^1.7.0", "@sinonjs/commons@^1.8.3": version "1.8.3" resolved "https://registry.yarnpkg.com/@sinonjs/commons/-/commons-1.8.3.tgz#3802ddd21a50a949b6721ddd72da36e67e7f1b2d" integrity sha512-xkNcLAn/wZaX14RPlwizcKicDk9G3F8m2nU3L7Ukm5zBgTwiT0wsoFAHx9Jq56fJA1z/7uKGtCRu16sOUCLIHQ== dependencies: type-detect "4.0.8" -"@sinonjs/fake-timers@^9.1.2": +"@sinonjs/fake-timers@>=5", "@sinonjs/fake-timers@^9.1.2": version "9.1.2" resolved "https://registry.yarnpkg.com/@sinonjs/fake-timers/-/fake-timers-9.1.2.tgz#4eaab737fab77332ab132d396a3c0d364bd0ea8c" integrity sha512-BPS4ynJW/o92PUR4wgriz2Ud5gpST5vz6GQfMixEDK0Z8ZCUv2M7SkBLykH56T++Xs+8ln9zTGbOvNGIe02/jw== dependencies: "@sinonjs/commons" "^1.7.0" +"@sinonjs/samsam@^6.1.1": + version "6.1.1" + resolved "https://registry.yarnpkg.com/@sinonjs/samsam/-/samsam-6.1.1.tgz#627f7f4cbdb56e6419fa2c1a3e4751ce4f6a00b1" + integrity sha512-cZ7rKJTLiE7u7Wi/v9Hc2fs3Ucc3jrWeMgPHbbTCeVAB2S0wOBbYlkJVeNSL04i7fdhT8wIbDq1zhC/PXTD2SA== + dependencies: + "@sinonjs/commons" "^1.6.0" + lodash.get "^4.4.2" + type-detect "^4.0.8" + +"@sinonjs/text-encoding@^0.7.1": + version "0.7.2" + resolved "https://registry.yarnpkg.com/@sinonjs/text-encoding/-/text-encoding-0.7.2.tgz#5981a8db18b56ba38ef0efb7d995b12aa7b51918" + integrity sha512-sXXKG+uL9IrKqViTtao2Ws6dy0znu9sOaP1di/jKGW1M6VssO8vlpXCQcpZ+jisQ1tTFAC5Jo/EOzFbggBagFQ== + "@szmarczak/http-timer@^1.1.2": version "1.1.2" resolved "https://registry.yarnpkg.com/@szmarczak/http-timer/-/http-timer-1.1.2.tgz#b1665e2c461a2cd92f4c1bbf50d5454de0d4b421" @@ -6934,6 +6948,11 @@ diff@^4.0.1, diff@^4.0.2: resolved "https://registry.yarnpkg.com/diff/-/diff-4.0.2.tgz#60f3aecb89d5fae520c11aa19efc2bb982aade7d" integrity sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A== +diff@^5.0.0: + version "5.1.0" + resolved "https://registry.yarnpkg.com/diff/-/diff-5.1.0.tgz#bc52d298c5ea8df9194800224445ed43ffc87e40" + integrity sha512-D+mk+qE8VC/PAUrlAU34N+VfXev0ghe5ywmpqrawphmVZc1bEfn56uo9qpyGp1p4xpzOHkSW4ztBd6L7Xx4ACw== + diffie-hellman@^5.0.0: version "5.0.3" resolved "https://registry.yarnpkg.com/diffie-hellman/-/diffie-hellman-5.0.3.tgz#40e8ee98f55a2149607146921c63e1ae5f3d2875" @@ -11613,6 +11632,11 @@ just-debounce@^1.0.0: resolved "https://registry.yarnpkg.com/just-debounce/-/just-debounce-1.1.0.tgz#2f81a3ad4121a76bc7cb45dbf704c0d76a8e5ddf" integrity sha512-qpcRocdkUmf+UTNBYx5w6dexX5J31AKK1OmPwH630a83DdVVUIngk55RSAiIGpQyoH0dlr872VHfPjnQnK1qDQ== +just-extend@^4.0.2: + version "4.2.1" + resolved "https://registry.yarnpkg.com/just-extend/-/just-extend-4.2.1.tgz#ef5e589afb61e5d66b24eca749409a8939a8c744" + integrity sha512-g3UB796vUFIY90VIv/WX3L2c8CS2MdWUww3CNrYmqza1Fg0DURc2K/O4YrnklBdQarSJ/y8JnJYDGc+1iumQjg== + just-reduce-object@^1.0.3: version "1.2.1" resolved "https://registry.yarnpkg.com/just-reduce-object/-/just-reduce-object-1.2.1.tgz#92845dedc4c5da34df5e5ad6a4bf62f21fdc37f5" @@ -13095,6 +13119,17 @@ nice-try@^1.0.4: resolved "https://registry.yarnpkg.com/nice-try/-/nice-try-1.0.5.tgz#a3378a7696ce7d223e88fc9b764bd7ef1089e366" integrity sha512-1nh45deeb5olNY7eX82BkPO7SSxR5SSYJiPTrTdFUVYwAl8CKMA5N9PjTYkHiRjisVcxcQ1HXdLhx2qxxJzLNQ== +nise@^5.1.1: + version "5.1.1" + resolved "https://registry.yarnpkg.com/nise/-/nise-5.1.1.tgz#ac4237e0d785ecfcb83e20f389185975da5c31f3" + integrity sha512-yr5kW2THW1AkxVmCnKEh4nbYkJdB3I7LUkiUgOvEkOp414mc2UMaHMA7pjq1nYowhdoJZGwEKGaQVbxfpWj10A== + dependencies: + "@sinonjs/commons" "^1.8.3" + "@sinonjs/fake-timers" ">=5" + "@sinonjs/text-encoding" "^0.7.1" + just-extend "^4.0.2" + path-to-regexp "^1.7.0" + no-case@^2.2.0: version "2.3.2" resolved "https://registry.yarnpkg.com/no-case/-/no-case-2.3.2.tgz#60b813396be39b3f1288a4c1ed5d1e7d28b464ac" @@ -14197,6 +14232,13 @@ path-to-regexp@0.1.7: resolved "https://registry.yarnpkg.com/path-to-regexp/-/path-to-regexp-0.1.7.tgz#df604178005f522f15eb4490e7247a1bfaa67f8c" integrity sha512-5DFkuoqlv1uYQKxy8omFBeJPQcdoE07Kv2sferDCrAq1ohOU+MSDswDIbnx3YAM60qIOnYa53wBhXW0EbMonrQ== +path-to-regexp@^1.7.0: + version "1.8.0" + resolved "https://registry.yarnpkg.com/path-to-regexp/-/path-to-regexp-1.8.0.tgz#887b3ba9d84393e87a0a0b9f4cb756198b53548a" + integrity sha512-n43JRhlUKUAlibEJhPeir1ncUID16QnEjNpwzNdO3Lm4ywrBpBZ5oLD0I6br9evr1Y9JTqwRtAh7JLoOzAQdVA== + dependencies: + isarray "0.0.1" + path-to-regexp@^6.2.1: version "6.2.1" resolved "https://registry.yarnpkg.com/path-to-regexp/-/path-to-regexp-6.2.1.tgz#d54934d6798eb9e5ef14e7af7962c945906918e5" @@ -16668,6 +16710,18 @@ sink-transform@^2.0.0: concat-stream "^1.4.8" readable-stream "^2.0.0" +sinon@^14.0.1: + version "14.0.1" + resolved "https://registry.yarnpkg.com/sinon/-/sinon-14.0.1.tgz#9f02e13ad86b695c0c554525e3bf7f8245b31a9c" + integrity sha512-JhJ0jCiyBWVAHDS+YSjgEbDn7Wgz9iIjA1/RK+eseJN0vAAWIWiXBdrnb92ELPyjsfreCYntD1ORtLSfIrlvSQ== + dependencies: + "@sinonjs/commons" "^1.8.3" + "@sinonjs/fake-timers" "^9.1.2" + "@sinonjs/samsam" "^6.1.1" + diff "^5.0.0" + nise "^5.1.1" + supports-color "^7.2.0" + sisteransi@^1.0.5: version "1.0.5" resolved "https://registry.yarnpkg.com/sisteransi/-/sisteransi-1.0.5.tgz#134d681297756437cc05ca01370d3a7a571075ed" @@ -16948,6 +17002,14 @@ speedometer@~1.0.0: resolved "https://registry.yarnpkg.com/speedometer/-/speedometer-1.0.0.tgz#cd671cb06752c22bca3370e2f334440be4fc62e2" integrity sha512-lgxErLl/7A5+vgIIXsh9MbeukOaCb2axgQ+bKCdIE+ibNT4XNYGNCR1qFEGq6F+YDASXK3Fh/c5FgtZchFolxw== +split-log@^1.0.12: + version "1.0.12" + resolved "https://registry.yarnpkg.com/split-log/-/split-log-1.0.12.tgz#f53216d86039fccc0b69241dc1929b51539be6f2" + integrity sha512-goseewD3PJ2ilS8Wg6Fse/I+c3bZ1tRvlU2iXYi6PYg/6mDbd41FlqNSr0E1pD9Enem3wRm/XanJrTzmiMnMuQ== + dependencies: + lodash "^4.17.4" + strftime "^0.10.0" + split-string@^3.0.1, split-string@^3.0.2: version "3.1.0" resolved "https://registry.yarnpkg.com/split-string/-/split-string-3.1.0.tgz#7cb09dda3a86585705c64b39a6466038682e8fe2" @@ -17134,6 +17196,11 @@ stream-splicer@^2.0.0: inherits "^2.0.1" readable-stream "^2.0.2" +strftime@^0.10.0: + version "0.10.1" + resolved "https://registry.yarnpkg.com/strftime/-/strftime-0.10.1.tgz#108af1176a7d5252cfbddbdb2af044dfae538389" + integrity sha512-nVvH6JG8KlXFPC0f8lojLgEsPA18lRpLZ+RrJh/NkQV2tqOgZfbas8gcU8SFgnnqR3rWzZPYu6N2A3xzs/8rQg== + strict-event-emitter-types@~2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/strict-event-emitter-types/-/strict-event-emitter-types-2.0.0.tgz#05e15549cb4da1694478a53543e4e2f4abcf277f" @@ -17207,6 +17274,18 @@ string.prototype.matchall@^4.0.7: regexp.prototype.flags "^1.4.1" side-channel "^1.0.4" +string.prototype.replaceall@^1.0.6: + version "1.0.6" + resolved "https://registry.yarnpkg.com/string.prototype.replaceall/-/string.prototype.replaceall-1.0.6.tgz#566cba7c413713d0b1a85c5dba98b31f8db38196" + integrity sha512-OA8VDhE7ssNFlyoDXUHxw6V5cjgPrtosyJKqJX5i1P5tV9eUynsbhx1yz0g+Ye4fjFwAxhKLxt8GSRx2Aqc+Sw== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.3" + es-abstract "^1.19.1" + get-intrinsic "^1.1.1" + has-symbols "^1.0.2" + is-regex "^1.1.4" + string.prototype.trim@^1.2.1: version "1.2.6" resolved "https://registry.yarnpkg.com/string.prototype.trim/-/string.prototype.trim-1.2.6.tgz#824960787db37a9e24711802ed0c1d1c0254f83e" @@ -17467,7 +17546,7 @@ supports-color@^6.1.0: dependencies: has-flag "^3.0.0" -supports-color@^7.0.0, supports-color@^7.1.0: +supports-color@^7.0.0, supports-color@^7.1.0, supports-color@^7.2.0: version "7.2.0" resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-7.2.0.tgz#1b7dcdcb32b8138801b3e478ba6a51caa89648da" integrity sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw== @@ -17538,6 +17617,11 @@ syntax-error@^1.1.1: dependencies: acorn-node "^1.2.0" +syslog-client@^1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/syslog-client/-/syslog-client-1.1.1.tgz#bdb28de3b5b7eb28a11352ec3eb78e55aed2ab6b" + integrity sha512-c3qKw8JzCuHt0mwrzKQr8eqOc3RB28HgOpFuwGMO3GLscVpfR+0ECevWLZq/yIJTbx3WTb3QXBFVpTFtKAPDrw== + tap-mocha-reporter@^5.0.3: version "5.0.3" resolved "https://registry.yarnpkg.com/tap-mocha-reporter/-/tap-mocha-reporter-5.0.3.tgz#3e261b2a43092ba8bc0cb67a89b33e283decee05" @@ -17691,6 +17775,14 @@ test-exclude@^6.0.0: glob "^7.1.4" minimatch "^3.0.4" +test@^3.2.1: + version "3.2.1" + resolved "https://registry.yarnpkg.com/test/-/test-3.2.1.tgz#8876adae35e86c4bccbcfdf5b018493b1c9501be" + integrity sha512-D9eN4OxdhyYS3xHSsAh5A0e+UhaOPxeREwBHTknZHoVFd4TqnPtkVrQ7lIUATPgpO9vvGg1D+TyMckVmUyaEig== + dependencies: + minimist "^1.2.6" + string.prototype.replaceall "^1.0.6" + tether@^1.3.7: version "1.4.7" resolved "https://registry.yarnpkg.com/tether/-/tether-1.4.7.tgz#d56a818590d8fe72e387f77a67f93ab96d8e1fb2" @@ -18020,7 +18112,7 @@ type-check@~0.3.2: dependencies: prelude-ls "~1.1.2" -type-detect@4.0.8: +type-detect@4.0.8, type-detect@^4.0.8: version "4.0.8" resolved "https://registry.yarnpkg.com/type-detect/-/type-detect-4.0.8.tgz#7646fb5f18871cfbb7749e69bd39a6388eb7450c" integrity sha512-0fr/mIH1dlO+x7TlcMy+bIDqKPsw/70tVyeHW787goQjhmqaZe10uwLujubK9q9Lg6Fiho1KUKDYz0Z7k7g5/g==