diff --git a/src/json-crdt-patch/constants.ts b/src/json-crdt-patch/constants.ts index a5096df459..39762ab764 100644 --- a/src/json-crdt-patch/constants.ts +++ b/src/json-crdt-patch/constants.ts @@ -12,6 +12,14 @@ export const enum SESSION { */ SERVER = 1, + /** + * Use this session ID when you want to apply a patch globally, without + * attributing it to any specific user. For example, when the initial document + * is created, the default patch can be applied on all clients to result in + * the same initial state on all clients. + */ + GLOBAL = 2, + /** Max allowed session ID, they are capped at 53-bits. */ MAX = 9007199254740991, } diff --git a/src/json-crdt/file/File.ts b/src/json-crdt/file/File.ts deleted file mode 100644 index 15dd480931..0000000000 --- a/src/json-crdt/file/File.ts +++ /dev/null @@ -1,220 +0,0 @@ -import {Model} from '../model'; -import {PatchLog} from '../history/PatchLog'; -import {printTree} from '../../util/print/printTree'; -import {decodeModel, decodeNdjsonComponents, decodePatch, decodeSeqCborComponents} from './util'; -import {Patch} from '../../json-crdt-patch'; -import {FileModelEncoding} from './constants'; -import type {encode as encodeCompact} from '../../json-crdt-patch/codec/compact/encode'; -import type {encode as encodeVerbose} from '../../json-crdt-patch/codec/verbose/encode'; -import type {CborEncoder} from '../../json-pack/cbor/CborEncoder'; -import type {JsonEncoder} from '../../json-pack/json/JsonEncoder'; -import type {Encoder as StructuralEncoderCompact} from '../codec/structural/compact/Encoder'; -import type {Encoder as StructuralEncoderVerbose} from '../codec/structural/verbose/Encoder'; -import type {Decoder as SidecarDecoder} from '../codec/sidecar/binary/Decoder'; -import type {Encoder as SidecarEncoder} from '../codec/sidecar/binary/Encoder'; -import type * as types from './types'; -import type {Printable} from '../../util/print/types'; - -export interface FileOptions { - jsonEncoder?: JsonEncoder; - cborEncoder?: CborEncoder; - structuralCompactEncoder?: StructuralEncoderCompact; - structuralVerboseEncoder?: StructuralEncoderVerbose; - sidecarEncoder?: SidecarEncoder; - sidecarDecoder?: SidecarDecoder; - patchCompactEncoder?: typeof encodeCompact; - patchVerboseEncoder?: typeof encodeVerbose; -} - -export class File implements Printable { - public static unserialize(components: types.FileReadSequence, options: FileOptions = {}): File { - const [view, metadata, model, history, ...frontier] = components; - const modelFormat = metadata[1]; - let decodedModel: Model | null = null; - if (model) { - const isSidecar = modelFormat === FileModelEncoding.SidecarBinary; - if (isSidecar) { - const decoder = options.sidecarDecoder; - if (!decoder) throw new Error('NO_SIDECAR_DECODER'); - if (!(model instanceof Uint8Array)) throw new Error('NOT_BLOB'); - decodedModel = decoder.decode(view, model); - } else { - decodedModel = decodeModel(model); - } - } - let log: PatchLog | null = null; - if (history) { - const [start, patches] = history; - if (start) { - log = new PatchLog(() => decodeModel(start)); - for (const patch of patches) log.end.applyPatch(decodePatch(patch)); - } - } - if (!log) throw new Error('NO_HISTORY'); - if (!decodedModel) decodedModel = log.replayToEnd(); - if (frontier.length) { - for (const patch of frontier) { - const patchDecoded = decodePatch(patch); - decodedModel.applyPatch(patchDecoded); - log.end.applyPatch(patchDecoded); - } - } - const file = new File(decodedModel, log); - return file; - } - - public static fromNdjson(blob: Uint8Array, options: FileOptions = {}): File { - const components = decodeNdjsonComponents(blob); - return File.unserialize(components as types.FileReadSequence, options); - } - - public static fromSeqCbor(blob: Uint8Array, options: FileOptions = {}): File { - const components = decodeSeqCborComponents(blob); - return File.unserialize(components as types.FileReadSequence, options); - } - - public static fromModel(model: Model, options: FileOptions = {}): File { - return new File(model, PatchLog.fromNewModel(model), options); - } - - constructor( - public readonly model: Model, - public readonly log: PatchLog, - protected readonly options: FileOptions = {}, - ) {} - - public apply(patch: Patch): void { - const id = patch.getId(); - if (!id) return; - this.model.applyPatch(patch); - this.log.end.applyPatch(patch); - } - - /** - * @todo Remove synchronization from here. Make `File` just responsible for - * serialization and deserialization. - */ - public sync(): () => void { - const {model, log} = this; - const api = model.api; - const autoflushUnsubscribe = api.autoFlush(); - const onPatchUnsubscribe = api.onPatch.listen((patch) => { - log.end.applyPatch(patch); - }); - const onFlushUnsubscribe = api.onFlush.listen((patch) => { - log.end.applyPatch(patch); - }); - return () => { - autoflushUnsubscribe(); - onPatchUnsubscribe(); - onFlushUnsubscribe(); - }; - } - - public serialize(params: types.FileSerializeParams = {}): types.FileWriteSequence { - if (params.noView && params.model === 'sidecar') throw new Error('SIDECAR_MODEL_WITHOUT_VIEW'); - const metadata: types.FileMetadata = [{}, FileModelEncoding.Auto]; - let model: Uint8Array | unknown | null = null; - const modelFormat = params.model ?? 'sidecar'; - switch (modelFormat) { - case 'sidecar': { - metadata[1] = FileModelEncoding.SidecarBinary; - const encoder = this.options.sidecarEncoder; - if (!encoder) throw new Error('NO_SIDECAR_ENCODER'); - const [, uint8] = encoder.encode(this.model); - model = uint8; - break; - } - case 'binary': { - model = this.model.toBinary(); - break; - } - case 'compact': { - const encoder = this.options.structuralCompactEncoder; - if (!encoder) throw new Error('NO_COMPACT_ENCODER'); - model = encoder.encode(this.model); - break; - } - case 'verbose': { - const encoder = this.options.structuralVerboseEncoder; - if (!encoder) throw new Error('NO_VERBOSE_ENCODER'); - model = encoder.encode(this.model); - break; - } - case 'none': { - model = null; - break; - } - default: - throw new Error(`Invalid model format: ${modelFormat}`); - } - const history: types.FileWriteSequenceHistory = [null, []]; - const patchFormat = params.history ?? 'binary'; - switch (patchFormat) { - case 'binary': { - history[0] = this.log.start().toBinary(); - this.log.patches.forEach(({v}) => { - history[1].push(v.toBinary()); - }); - break; - } - case 'compact': { - const encoder = this.options.structuralCompactEncoder; - if (!encoder) throw new Error('NO_COMPACT_ENCODER'); - history[0] = encoder.encode(this.log.start()); - const encodeCompact = this.options.patchCompactEncoder; - if (!encodeCompact) throw new Error('NO_COMPACT_PATCH_ENCODER'); - const list = history[1]; - this.log.patches.forEach(({v}) => { - list.push(encodeCompact(v)); - }); - break; - } - case 'verbose': { - const encoder = this.options.structuralVerboseEncoder; - if (!encoder) throw new Error('NO_VERBOSE_ENCODER'); - history[0] = encoder.encode(this.log.start()); - const encodeVerbose = this.options.patchVerboseEncoder; - if (!encodeVerbose) throw new Error('NO_VERBOSE_PATCH_ENCODER'); - const list = history[1]; - this.log.patches.forEach(({v}) => { - list.push(encodeVerbose(v)); - }); - break; - } - case 'none': { - break; - } - default: - throw new Error(`Invalid history format: ${patchFormat}`); - } - return [params.noView ? null : this.model.view(), metadata, model, history]; - } - - public toBinary(params: types.FileEncodingParams): Uint8Array { - const sequence = this.serialize(params); - switch (params.format) { - case 'ndjson': { - const json = this.options.jsonEncoder; - if (!json) throw new Error('NO_JSON_ENCODER'); - for (const component of sequence) { - json.writeAny(component); - json.writer.u8('\n'.charCodeAt(0)); - } - return json.writer.flush(); - } - case 'seq.cbor': { - const cbor = this.options.cborEncoder; - if (!cbor) throw new Error('NO_CBOR_ENCODER'); - for (const component of sequence) cbor.writeAny(component); - return cbor.writer.flush(); - } - } - } - - // ---------------------------------------------------------------- Printable - - public toString(tab?: string) { - return `file` + printTree(tab, [(tab) => this.model.toString(tab), () => '', (tab) => this.log.toString(tab)]); - } -} diff --git a/src/json-crdt/file/__tests__/File.spec.ts b/src/json-crdt/file/__tests__/File.spec.ts deleted file mode 100644 index 7dd2015809..0000000000 --- a/src/json-crdt/file/__tests__/File.spec.ts +++ /dev/null @@ -1,159 +0,0 @@ -import {s} from '../../../json-crdt-patch'; -import {Model} from '../../model'; -import {File} from '../File'; -import {JsonDecoder} from '../../../json-pack/json/JsonDecoder'; -import {CborDecoder} from '../../../json-pack/cbor/CborDecoder'; -import {FileEncodingParams} from '../types'; -import {fileEncoders} from '../fileEncoders'; - -const setup = (view: unknown) => { - const model = Model.withServerClock(); - model.api.root(view); - const file = File.fromModel(model, fileEncoders); - return {model, file}; -}; - -test('can create File from new model', () => { - const model = Model.withServerClock().setSchema( - s.obj({ - foo: s.str('bar'), - }), - ); - const file = File.fromModel(model); - expect(file.log.start().view()).toBe(undefined); - expect(file.model.view()).toEqual({ - foo: 'bar', - }); - expect(file.log.start().clock.sid).toBe(file.model.clock.sid); -}); - -test.todo('patches are flushed and stored in memory'); -test.todo('can replay history'); - -describe('.toBinary()', () => { - describe('can read first value as view', () => { - test('.ndjson', () => { - const {file} = setup({foo: 'bar'}); - const blob = file.toBinary({format: 'ndjson', model: 'compact', history: 'compact'}); - const decoder = new JsonDecoder(); - const view = decoder.read(blob); - expect(view).toEqual({foo: 'bar'}); - }); - - test('.seq.cbor', () => { - const {file} = setup({foo: 'bar'}); - const blob = file.toBinary({format: 'seq.cbor'}); - const decoder = new CborDecoder(); - const view = decoder.read(blob); - expect(view).toEqual({foo: 'bar'}); - }); - }); - - describe('can decode from blob', () => { - test('.ndjson', () => { - const {file} = setup({foo: 'bar'}); - const blob = file.toBinary({format: 'ndjson', model: 'compact', history: 'compact'}); - const file2 = File.fromNdjson(blob); - expect(file2.model.view()).toEqual({foo: 'bar'}); - expect(file2.model !== file.model).toBe(true); - expect(file.log.start().view()).toEqual(undefined); - expect(file.log.replayToEnd().view()).toEqual({foo: 'bar'}); - }); - - test('.seq.cbor', () => { - const {file} = setup({foo: 'bar'}); - const blob = file.toBinary({format: 'seq.cbor', model: 'binary', history: 'binary'}); - const file2 = File.fromSeqCbor(blob); - expect(file2.model.view()).toEqual({foo: 'bar'}); - expect(file2.model !== file.model).toBe(true); - expect(file.log.start().view()).toEqual(undefined); - expect(file.log.replayToEnd().view()).toEqual({foo: 'bar'}); - }); - }); - - const assertEncoding = (file: File, params: FileEncodingParams) => { - const blob = file.toBinary(params); - // if (params.format === 'ndjson') console.log(Buffer.from(blob).toString('utf8')) - const file2 = - params.format === 'seq.cbor' ? File.fromSeqCbor(blob, fileEncoders) : File.fromNdjson(blob, fileEncoders); - expect(file2.model.view()).toEqual(file.model.view()); - expect(file2.model !== file.model).toBe(true); - expect(file2.log.start().view()).toEqual(undefined); - expect(file2.log.replayToEnd().view()).toEqual(file.model.view()); - expect(file2.log.patches.size()).toBe(file.log.patches.size()); - }; - - describe('can encode/decode all format combinations', () => { - const formats: FileEncodingParams['format'][] = ['ndjson', 'seq.cbor']; - const modelFormats: FileEncodingParams['model'][] = ['sidecar', 'binary', 'compact', 'verbose']; - const historyFormats: FileEncodingParams['history'][] = ['binary', 'compact', 'verbose']; - const noViews = [true, false]; - for (const format of formats) { - for (const model of modelFormats) { - for (const history of historyFormats) { - for (const noView of noViews) { - if (noView && model === 'sidecar') continue; - const params = {format, model, history, noView}; - test(JSON.stringify(params), () => { - const {file} = setup({foo: 'bar'}); - assertEncoding(file, params); - }); - } - } - } - } - }); -}); - -describe('.unserialize()', () => { - test('applies frontier', () => { - const {file, model} = setup({foo: 'bar'}); - const clone = model.clone(); - clone.api.obj([]).set({ - xyz: 123, - }); - const serialized = file.serialize({ - history: 'binary', - }); - serialized.push(clone.api.flush().toBinary()); - expect(file.model.view()).toEqual({foo: 'bar'}); - const file2 = File.unserialize(serialized, fileEncoders); - expect(file2.model.view()).toEqual({foo: 'bar', xyz: 123}); - }); -}); - -describe('.sync()', () => { - test('keeps track of local changes', async () => { - const {file, model} = setup({foo: 'bar'}); - file.sync(); - model.api.obj([]).set({x: 1}); - await Promise.resolve(); - expect(file.model.view()).toEqual({foo: 'bar', x: 1}); - expect(file.log.replayToEnd().view()).toEqual({foo: 'bar', x: 1}); - }); - - test('processes local transactions', async () => { - const {file, model} = setup({foo: 'bar'}); - file.sync(); - const logLength = file.log.patches.size(); - model.api.transaction(() => { - model.api.obj([]).set({x: 1}); - model.api.obj([]).set({y: 2}); - }); - expect(file.log.patches.size()).toBe(logLength + 1); - }); - - test('keeps track of remote changes', async () => { - const {file, model} = setup({foo: 'bar'}); - const clone = model.clone(); - file.sync(); - clone.api.obj([]).set({x: 1}); - expect(clone.view()).toEqual({foo: 'bar', x: 1}); - expect(file.model.view()).toEqual({foo: 'bar'}); - const patch = clone.api.flush(); - file.model.applyPatch(patch); - await Promise.resolve(); - expect(file.model.view()).toEqual({foo: 'bar', x: 1}); - expect(file.log.replayToEnd().view()).toEqual({foo: 'bar', x: 1}); - }); -}); diff --git a/src/json-crdt/file/fileEncoders.ts b/src/json-crdt/file/fileEncoders.ts deleted file mode 100644 index 7520b51999..0000000000 --- a/src/json-crdt/file/fileEncoders.ts +++ /dev/null @@ -1,23 +0,0 @@ -import {Writer} from '../../util/buffers/Writer'; -import {Encoder as SidecarEncoder} from '../codec/sidecar/binary/Encoder'; -import {Decoder as SidecarDecoder} from '../codec/sidecar/binary/Decoder'; -import {Encoder as StructuralEncoderCompact} from '../codec/structural/compact/Encoder'; -import {Encoder as StructuralEncoderVerbose} from '../codec/structural/verbose/Encoder'; -import {encode as encodeCompact} from '../../json-crdt-patch/codec/compact/encode'; -import {encode as encodeVerbose} from '../../json-crdt-patch/codec/verbose/encode'; -import {CborEncoder} from '../../json-pack/cbor/CborEncoder'; -import {JsonEncoder} from '../../json-pack/json/JsonEncoder'; -import type {FileOptions} from './File'; - -const writer = new Writer(4096); - -export const fileEncoders: FileOptions = { - jsonEncoder: new JsonEncoder(writer), - cborEncoder: new CborEncoder(writer), - structuralCompactEncoder: new StructuralEncoderCompact(), - structuralVerboseEncoder: new StructuralEncoderVerbose(), - sidecarEncoder: new SidecarEncoder(), - sidecarDecoder: new SidecarDecoder(), - patchCompactEncoder: encodeCompact, - patchVerboseEncoder: encodeVerbose, -}; diff --git a/src/json-crdt/file/types.ts b/src/json-crdt/file/types.ts deleted file mode 100644 index 5d2f6cc8e3..0000000000 --- a/src/json-crdt/file/types.ts +++ /dev/null @@ -1,24 +0,0 @@ -import type {FileModelEncoding} from './constants'; - -export type FileMetadata = [map: {}, modelFormat: FileModelEncoding]; - -export type FileWriteSequence = [ - view: unknown | null, - metadata: FileMetadata, - model: Uint8Array | unknown | null, - history: FileWriteSequenceHistory, -]; - -export type FileWriteSequenceHistory = [model: Uint8Array | unknown | null, patches: Array]; - -export type FileReadSequence = [...FileWriteSequence, ...frontier: Array]; - -export interface FileSerializeParams { - noView?: boolean; - model?: 'sidecar' | 'binary' | 'compact' | 'verbose' | 'none'; - history?: 'binary' | 'compact' | 'verbose' | 'none'; -} - -export interface FileEncodingParams extends FileSerializeParams { - format: 'ndjson' | 'seq.cbor'; -} diff --git a/src/json-crdt/file/util.ts b/src/json-crdt/file/util.ts deleted file mode 100644 index c268f98a97..0000000000 --- a/src/json-crdt/file/util.ts +++ /dev/null @@ -1,50 +0,0 @@ -import {JsonDecoder} from '../../json-pack/json/JsonDecoder'; -import {CborDecoder} from '../../json-pack/cbor/CborDecoder'; -import {Model} from '../model'; -import {Decoder as StructuralDecoderCompact} from '../codec/structural/compact/Decoder'; -import {Decoder as StructuralDecoderVerbose} from '../codec/structural/verbose/Decoder'; -import {decode as decodeCompact} from '../../json-crdt-patch/codec/compact/decode'; -import {decode as decodeVerbose} from '../../json-crdt-patch/codec/verbose/decode'; -import {Patch} from '../../json-crdt-patch'; -import type {JsonCrdtCompactDocument} from '../codec/structural/compact'; -import type {JsonCrdtVerboseDocument} from '../codec/structural/verbose'; -import type {CompactCodecPatch} from '../../json-crdt-patch/codec/compact'; -import type {JsonCodecPatch} from '../../json-crdt-patch/codec/verbose'; - -export const decodeNdjsonComponents = (blob: Uint8Array): unknown[] => { - const decoder = new JsonDecoder(); - const reader = decoder.reader; - reader.reset(blob); - const components: unknown[] = []; - while (reader.x < blob.length) { - components.push(decoder.readAny()); - const nl = reader.u8(); - if (nl !== '\n'.charCodeAt(0)) throw new Error('NDJSON_UNEXPECTED_NEWLINE'); - } - return components; -}; - -export const decodeSeqCborComponents = (blob: Uint8Array): unknown[] => { - const decoder = new CborDecoder(); - const reader = decoder.reader; - reader.reset(blob); - const components: unknown[] = []; - while (reader.x < blob.length) components.push(decoder.val()); - return components; -}; - -export const decodeModel = (serialized: unknown): Model => { - if (!serialized) throw new Error('NO_MODEL'); - if (serialized instanceof Uint8Array) return Model.fromBinary(serialized); - if (Array.isArray(serialized)) return new StructuralDecoderCompact().decode(serialized); - if (typeof serialized === 'object') return new StructuralDecoderVerbose().decode(serialized); - throw new Error('UNKNOWN_MODEL'); -}; - -export const decodePatch = (serialized: unknown): Patch => { - if (!serialized) throw new Error('NO_MODEL'); - if (serialized instanceof Uint8Array) return Patch.fromBinary(serialized); - if (Array.isArray(serialized)) return decodeCompact(serialized); - if (typeof serialized === 'object') return decodeVerbose(serialized); - throw new Error('UNKNOWN_MODEL'); -}; diff --git a/src/json-crdt/history/LocalHistoryCrud.ts b/src/json-crdt/history/LocalHistoryCrud.ts index 2a8d0fa79b..b19ba39140 100644 --- a/src/json-crdt/history/LocalHistoryCrud.ts +++ b/src/json-crdt/history/LocalHistoryCrud.ts @@ -1,9 +1,11 @@ -import {File, FileOptions} from '../file/File'; import {CborEncoder} from '../../json-pack/cbor/CborEncoder'; +import {CborDecoder} from '../../json-pack/cbor/CborDecoder'; +import {LogEncoder} from '../log/codec/LogEncoder'; +import {LogDecoder} from '../log/codec/LogDecoder'; import type {CrudApi} from 'memfs/lib/crud/types'; import type {Locks} from 'thingies/es2020/Locks'; import type {Patch} from '../../json-crdt-patch'; -import type {PatchLog} from './PatchLog'; +import type {Log} from '../log/Log'; import type {LocalHistory} from './types'; export const genId = (octets: number = 8): string => { @@ -16,22 +18,20 @@ export const genId = (octets: number = 8): string => { const STATE_FILE_NAME = 'state.seq.cbor'; export class LocalHistoryCrud implements LocalHistory { - protected fileOpts: FileOptions = { + protected encoder: LogEncoder = new LogEncoder({ cborEncoder: new CborEncoder(), - }; + }); + protected decoder: LogDecoder = new LogDecoder({ + cborDecoder: new CborDecoder(), + }); constructor( protected readonly crud: CrudApi, protected readonly locks: Locks, ) {} - public async create(collection: string[], log: PatchLog): Promise<{id: string}> { - // TODO: Remove `log.end`, just `log` should be enough. - const file = new File(log.end, log, this.fileOpts); - const blob = file.toBinary({ - format: 'seq.cbor', - model: 'binary', - }); + public async create(collection: string[], log: Log): Promise<{id: string}> { + const blob = this.encode(log); const id = genId(); await this.lock(collection, id, async () => { await this.crud.put([...collection, id], STATE_FILE_NAME, blob, {throwIf: 'exists'}); @@ -39,29 +39,36 @@ export class LocalHistoryCrud implements LocalHistory { return {id}; } - public async read(collection: string[], id: string): Promise<{log: PatchLog; cursor: string}> { + protected encode(log: Log): Uint8Array { + // TODO: Add browser-native compression. Wrap the blob into `[]` TLV tuple. + return this.encoder.encode(log, { + format: 'seq.cbor', + model: 'binary', + history: 'binary', + noView: true, + }); + } + + public async read(collection: string[], id: string): Promise<{log: Log; cursor: string}> { const blob = await this.crud.get([...collection, id], STATE_FILE_NAME); - const {log} = File.fromSeqCbor(blob); + const {frontier} = this.decoder.decode(blob, {format: 'seq.cbor', frontier: true}); return { - log, + log: frontier!, cursor: '', }; } - public readHistory(collection: string[], id: string, cursor: string): Promise<{log: PatchLog; cursor: string}> { + public readHistory(collection: string[], id: string, cursor: string): Promise<{log: Log; cursor: string}> { throw new Error('Method not implemented.'); } public async update(collection: string[], id: string, patches: Patch[]): Promise { await this.lock(collection, id, async () => { const blob = await this.crud.get([...collection, id], STATE_FILE_NAME); - const {log} = File.fromSeqCbor(blob); + const decoded = this.decoder.decode(blob, {format: 'seq.cbor', history: true}); + const log = decoded.history!; log.end.applyBatch(patches); - const file = new File(log.end, log, this.fileOpts); - const blob2 = file.toBinary({ - format: 'seq.cbor', - model: 'binary', - }); + const blob2 = this.encode(log); await this.crud.put([...collection, id], STATE_FILE_NAME, blob2, {throwIf: 'missing'}); }); } diff --git a/src/json-crdt/history/SessionHistory.ts b/src/json-crdt/history/SessionHistory.ts index 526d8b2f99..16622973ba 100644 --- a/src/json-crdt/history/SessionHistory.ts +++ b/src/json-crdt/history/SessionHistory.ts @@ -3,7 +3,7 @@ import {FanOutUnsubscribe} from 'thingies/es2020/fanout'; import {InsValOp, Patch} from '../../json-crdt-patch'; import {ValNode} from '../nodes'; import {toSchema} from '../schema/toSchema'; -import {PatchLog} from './PatchLog'; +import {Log} from '../log/Log'; import {RedoItem, UndoItem, UndoRedoStack} from './UndoRedoStack'; class Undo implements UndoItem { @@ -15,7 +15,7 @@ class Redo implements RedoItem { } export class SessionHistory { - constructor(public readonly log: PatchLog) {} + constructor(public readonly log: Log) {} private readonly __onPatchRace = createRace(); diff --git a/src/json-crdt/history/__tests__/LocalHistoryCrud.spec.ts b/src/json-crdt/history/__tests__/LocalHistoryCrud.spec.ts index aed5d686ef..cf6443657d 100644 --- a/src/json-crdt/history/__tests__/LocalHistoryCrud.spec.ts +++ b/src/json-crdt/history/__tests__/LocalHistoryCrud.spec.ts @@ -2,7 +2,7 @@ import {memfs} from 'memfs'; import {NodeCrud} from 'memfs/lib/node-to-crud'; import {Locks} from 'thingies/es2020/Locks'; import {LocalHistoryCrud} from '../LocalHistoryCrud'; -import {PatchLog} from '../PatchLog'; +import {Log} from '../../log/Log'; import {Model} from '../../model'; const setup = async () => { @@ -25,7 +25,7 @@ test('can create a new document', async () => { model.api.root({ foo: 'spam', }); - const log = PatchLog.fromNewModel(model); + const log = Log.fromNewModel(model); const {id} = await local.create(['test'], log); expect(typeof id).toBe('string'); expect(id.length > 6).toBe(true); @@ -49,7 +49,7 @@ test('can delete a document', async () => { model.api.root({ foo: 'spam', }); - const log = PatchLog.fromNewModel(model); + const log = Log.fromNewModel(model); const {id} = await local.create(['test'], log); await local.read(['test'], id); await local.delete(['test'], id); diff --git a/src/json-crdt/history/types.ts b/src/json-crdt/history/types.ts index 91feef7eec..59fcce0054 100644 --- a/src/json-crdt/history/types.ts +++ b/src/json-crdt/history/types.ts @@ -1,5 +1,5 @@ import type {Patch} from '../../json-crdt-patch'; -import type {PatchLog} from '../history/PatchLog'; +import type {Log} from '../log/Log'; import type {Model} from '../model'; /** @@ -37,16 +37,16 @@ export interface RemoteHistory { } export interface LocalHistory { - create(collection: string[], log: PatchLog): Promise<{id: string}>; - read(collection: string[], id: string): Promise<{log: PatchLog; cursor: string}>; - readHistory(collection: string[], id: string, cursor: string): Promise<{log: PatchLog; cursor: string}>; + create(collection: string[], log: Log): Promise<{id: string}>; + read(collection: string[], id: string): Promise<{log: Log; cursor: string}>; + readHistory(collection: string[], id: string, cursor: string): Promise<{log: Log; cursor: string}>; update(collection: string[], id: string, patches: Patch[]): Promise; delete(collection: string[], id: string): Promise; } export interface EditingSessionHistory { load(id: string): Promise; - loadHistory(id: string): Promise; + loadHistory(id: string): Promise; undo(id: string): Promise; redo(id: string): Promise; } diff --git a/src/json-crdt/history/PatchLog.ts b/src/json-crdt/log/Log.ts similarity index 96% rename from src/json-crdt/history/PatchLog.ts rename to src/json-crdt/log/Log.ts index 50ee57af00..e39c44f4b1 100644 --- a/src/json-crdt/history/PatchLog.ts +++ b/src/json-crdt/log/Log.ts @@ -6,7 +6,7 @@ import {Model} from '../model'; import {first, next} from '../../util/trees/util'; import type {Printable} from '../../util/print/types'; -export class PatchLog implements Printable { +export class Log implements Printable { /** * Creates a `PatchLog` instance from a newly JSON CRDT model. Checks if * the model API buffer has any initial operations applied, if yes, it @@ -16,9 +16,9 @@ export class PatchLog implements Printable { * `Model.withLogicalClock()` or `Model.withServerClock()`. * @returns A new `PatchLog` instance. */ - public static fromNewModel(model: Model): PatchLog { + public static fromNewModel(model: Model): Log { const clock = model.clock.clone(); - const log = new PatchLog(() => new Model(clock)); + const log = new Log(() => new Model(clock)); const api = model.api; if (api.builder.patch.ops.length) log.end.applyPatch(api.flush()); return log; diff --git a/src/json-crdt/history/__tests__/PatchLog.spec.ts b/src/json-crdt/log/__tests__/Log.spec.ts similarity index 96% rename from src/json-crdt/history/__tests__/PatchLog.spec.ts rename to src/json-crdt/log/__tests__/Log.spec.ts index 4eb694dce4..a59578cc6d 100644 --- a/src/json-crdt/history/__tests__/PatchLog.spec.ts +++ b/src/json-crdt/log/__tests__/Log.spec.ts @@ -1,10 +1,10 @@ import {Model} from '../../model'; -import {PatchLog} from '../PatchLog'; +import {Log} from '../Log'; const setup = (view: unknown) => { const model = Model.withServerClock(); model.api.root(view); - const log = PatchLog.fromNewModel(model); + const log = Log.fromNewModel(model); return {log}; }; diff --git a/src/json-crdt/log/codec/LogDecoder.ts b/src/json-crdt/log/codec/LogDecoder.ts new file mode 100644 index 0000000000..d47d651947 --- /dev/null +++ b/src/json-crdt/log/codec/LogDecoder.ts @@ -0,0 +1,201 @@ +import {Model} from '../../model'; +import {Log} from '../Log'; +import {Patch} from '../../../json-crdt-patch'; +import {FileModelEncoding} from './constants'; +import {SESSION} from '../../../json-crdt-patch/constants'; +import type * as types from './types'; +import type {CborDecoder} from '../../../json-pack/cbor/CborDecoder'; +import type {JsonDecoder} from '../../../json-pack/json/JsonDecoder'; +import type {Decoder as SidecarDecoder} from '../../codec/sidecar/binary/Decoder'; +import type {Decoder as StructuralDecoderCompact} from '../../codec/structural/compact/Decoder'; +import type {Decoder as StructuralDecoderVerbose} from '../../codec/structural/verbose/Decoder'; +import type {decode as decodeCompact} from '../../../json-crdt-patch/codec/compact/decode'; +import type {decode as decodeVerbose} from '../../../json-crdt-patch/codec/verbose/decode'; +import type {CompactCodecPatch} from '../../../json-crdt-patch/codec/compact'; +import type {JsonCodecPatch} from '../../../json-crdt-patch/codec/verbose'; +import type {JsonCrdtCompactDocument} from '../../codec/structural/compact'; +import type {JsonCrdtVerboseDocument} from '../../codec/structural/verbose'; + +export interface LogDecoderOpts { + jsonDecoder?: JsonDecoder; + cborDecoder?: CborDecoder; + structuralCompactDecoder?: StructuralDecoderCompact; + structuralVerboseDecoder?: StructuralDecoderVerbose; + sidecarDecoder?: SidecarDecoder; + patchCompactDecoder?: typeof decodeCompact; + patchVerboseDecoder?: typeof decodeVerbose; +} + +export class LogDecoder { + constructor(protected readonly opts: LogDecoderOpts = {}) {} + + public decode(blob: Uint8Array, params: DecodeParams = {}): DecodeResult { + switch (params.format) { + case 'ndjson': { + const components = this.decodeNdjsonComponents(blob); + const result = this.deserialize(components, params); + return result; + } + default: { + // 'seq.cbor' + const components = this.decodeSeqCborComponents(blob); + const result = this.deserialize(components, params); + return result; + } + } + } + + public decodeNdjsonComponents(blob: Uint8Array): types.LogComponentsWithFrontier { + const decoder = this.opts.jsonDecoder; + if (!decoder) throw new Error('NO_JSON_DECODER'); + const reader = decoder.reader; + reader.reset(blob); + const components: unknown[] = []; + while (reader.x < blob.length) { + components.push(decoder.readAny()); + const nl = reader.u8(); + if (nl !== '\n'.charCodeAt(0)) throw new Error('NDJSON_UNEXPECTED_NEWLINE'); + } + return components as types.LogComponentsWithFrontier; + } + + public decodeSeqCborComponents(blob: Uint8Array): types.LogComponentsWithFrontier { + const decoder = this.opts.cborDecoder; + if (!decoder) throw new Error('NO_CBOR_DECODER'); + const reader = decoder.reader; + reader.reset(blob); + const components: unknown[] = []; + while (reader.x < blob.length) components.push(decoder.val()); + return components as types.LogComponentsWithFrontier; + } + + public deserialize(components: types.LogComponentsWithFrontier, params: DeserializeParams = {}): DecodeResult { + const [view, metadata, model, , ...frontier] = components; + const result: DecodeResult = {}; + if (params.view) result.view = view; + if (params.history) result.history = this.deserializeHistory(components); + if (params.frontier) { + if (!model) result.history = this.deserializeHistory(components); + if (result.history) { + result.frontier = result.history; + } else if (model) { + const modelFormat = metadata[1]; + const start = (): Model => { + const isSidecar = modelFormat === FileModelEncoding.SidecarBinary; + if (isSidecar) { + const decoder = this.opts.sidecarDecoder; + if (!decoder) throw new Error('NO_SIDECAR_DECODER'); + if (!(model instanceof Uint8Array)) throw new Error('NOT_BLOB'); + return decoder.decode(view, model); + } + return this.deserializeModel(model); + }; + const log = new Log(start); + const end = log.end; + if (frontier && frontier.length) for (const patch of frontier) end.applyPatch(this.deserializePatch(patch)); + result.frontier = log; + } else { + throw new Error('NO_MODEL'); + } + } + return result; + } + + public deserializeHistory(components: types.LogComponentsWithFrontier): Log { + const [, , , history, ...frontier] = components; + const [startSerialized] = history; + const start = (): Model => { + if (!history || !startSerialized) { + // TODO: Handle case where new model should be started with server clock: `return Model.withServerClock()`. + return Model.withLogicalClock(SESSION.GLOBAL); + } + return this.deserializeModel(startSerialized); + }; + const log = new Log(start); + const end = log.end; + if (history) { + const [, patches] = history; + if (patches) for (const patch of patches) end.applyPatch(this.deserializePatch(patch)); + } + if (frontier.length) for (const patch of frontier) end.applyPatch(this.deserializePatch(patch)); + return log; + } + + public deserializeModel(serialized: unknown): Model { + if (!serialized) throw new Error('NO_MODEL'); + if (serialized instanceof Uint8Array) return Model.fromBinary(serialized); + if (Array.isArray(serialized)) { + const decoder = this.opts.structuralCompactDecoder; + if (!decoder) throw new Error('NO_STRUCTURAL_COMPACT_DECODER'); + return decoder.decode(serialized); + } + if (typeof serialized === 'object') { + const decoder = this.opts.structuralVerboseDecoder; + if (!decoder) throw new Error('NO_STRUCTURAL_VERBOSE_DECODER'); + return decoder.decode(serialized); + } + throw new Error('UNKNOWN_MODEL'); + } + + public deserializePatch(serialized: unknown): Patch { + if (!serialized) throw new Error('NO_MODEL'); + if (serialized instanceof Uint8Array) return Patch.fromBinary(serialized); + if (Array.isArray(serialized)) { + const decodeCompact = this.opts.patchCompactDecoder; + if (!decodeCompact) throw new Error('NO_PATCH_COMPACT_DECODER'); + return decodeCompact(serialized); + } + if (typeof serialized === 'object') { + const decodeVerbose = this.opts.patchVerboseDecoder; + if (!decodeVerbose) throw new Error('NO_PATCH_VERBOSE_DECODER'); + return decodeVerbose(serialized); + } + throw new Error('UNKNOWN_MODEL'); + } +} + +export interface DeserializeParams { + /** + * Whether to return decoded `view` of the end state of the log as a POJO in + * the {@link DecodeResult}. + */ + view?: boolean; + + /** + * Whether to return decoded frontier of the log in the {@link DecodeResult}. + */ + frontier?: boolean; + + /** + * Whether to return the full history of the log in the {@link DecodeResult}. + */ + history?: boolean; +} + +export interface DecodeParams extends DeserializeParams { + /** + * The format of the input binary blob, whether it is NDJSON or CBOR-Sequence + * format. + */ + format?: 'ndjson' | 'seq.cbor'; +} + +/** + * Decoding result of a log binary blob. + */ +export interface DecodeResult { + /** + * Plain POJO view of the end state of the log. + */ + view?: unknown; + + /** + * Final state of the log, the end state of the document. + */ + frontier?: Log; + + /** + * The full history of the log, from the start to the end state. + */ + history?: Log; +} diff --git a/src/json-crdt/log/codec/LogEncoder.ts b/src/json-crdt/log/codec/LogEncoder.ts new file mode 100644 index 0000000000..03330daef9 --- /dev/null +++ b/src/json-crdt/log/codec/LogEncoder.ts @@ -0,0 +1,173 @@ +import {Log} from '../Log'; +import {FileModelEncoding} from './constants'; +import type * as types from './types'; +import type {CborEncoder} from '../../../json-pack/cbor/CborEncoder'; +import type {JsonEncoder} from '../../../json-pack/json/JsonEncoder'; +import type {Encoder as StructuralEncoderCompact} from '../../codec/structural/compact/Encoder'; +import type {Encoder as StructuralEncoderVerbose} from '../../codec/structural/verbose/Encoder'; +import type {Encoder as SidecarEncoder} from '../../codec/sidecar/binary/Encoder'; +import type {encode as encodeCompact} from '../../../json-crdt-patch/codec/compact/encode'; +import type {encode as encodeVerbose} from '../../../json-crdt-patch/codec/verbose/encode'; + +export interface LogEncoderOpts { + jsonEncoder?: JsonEncoder; + cborEncoder?: CborEncoder; + structuralCompactEncoder?: StructuralEncoderCompact; + structuralVerboseEncoder?: StructuralEncoderVerbose; + sidecarEncoder?: SidecarEncoder; + patchCompactEncoder?: typeof encodeCompact; + patchVerboseEncoder?: typeof encodeVerbose; +} + +export class LogEncoder { + constructor(protected readonly options: LogEncoderOpts = {}) {} + + public serialize(log: Log, params: SerializeParams = {}): types.LogComponents { + if (params.noView && params.model === 'sidecar') throw new Error('SIDECAR_MODEL_WITHOUT_VIEW'); + const metadata: types.LogMetadata = [{}, FileModelEncoding.Auto]; + let model: Uint8Array | unknown | null = null; + const modelFormat = params.model ?? 'sidecar'; + switch (modelFormat) { + case 'sidecar': { + metadata[1] = FileModelEncoding.SidecarBinary; + const encoder = this.options.sidecarEncoder; + if (!encoder) throw new Error('NO_SIDECAR_ENCODER'); + const [, uint8] = encoder.encode(log.end); + model = uint8; + break; + } + case 'binary': { + model = log.end.toBinary(); + break; + } + case 'compact': { + const encoder = this.options.structuralCompactEncoder; + if (!encoder) throw new Error('NO_COMPACT_ENCODER'); + model = encoder.encode(log.end); + break; + } + case 'verbose': { + const encoder = this.options.structuralVerboseEncoder; + if (!encoder) throw new Error('NO_VERBOSE_ENCODER'); + model = encoder.encode(log.end); + break; + } + case 'none': { + model = null; + break; + } + default: + throw new Error(`Invalid model format: ${modelFormat}`); + } + const history: types.LogHistory = [null, []]; + const patchFormat = params.history ?? 'binary'; + switch (patchFormat) { + case 'binary': { + history[0] = log.start().toBinary(); + log.patches.forEach(({v}) => { + history[1].push(v.toBinary()); + }); + break; + } + case 'compact': { + const encoder = this.options.structuralCompactEncoder; + if (!encoder) throw new Error('NO_COMPACT_ENCODER'); + history[0] = encoder.encode(log.start()); + const encodeCompact = this.options.patchCompactEncoder; + if (!encodeCompact) throw new Error('NO_COMPACT_PATCH_ENCODER'); + const list = history[1]; + log.patches.forEach(({v}) => { + list.push(encodeCompact(v)); + }); + break; + } + case 'verbose': { + const encoder = this.options.structuralVerboseEncoder; + if (!encoder) throw new Error('NO_VERBOSE_ENCODER'); + history[0] = encoder.encode(log.start()); + const encodeVerbose = this.options.patchVerboseEncoder; + if (!encodeVerbose) throw new Error('NO_VERBOSE_PATCH_ENCODER'); + const list = history[1]; + log.patches.forEach(({v}) => { + list.push(encodeVerbose(v)); + }); + break; + } + case 'none': { + break; + } + default: + throw new Error(`Invalid history format: ${patchFormat}`); + } + return [params.noView ? null : log.end.view(), metadata, model, history]; + } + + public encode(log: Log, params: EncodingParams): Uint8Array { + const sequence = this.serialize(log, params); + switch (params.format) { + case 'ndjson': { + const json = this.options.jsonEncoder; + if (!json) throw new Error('NO_JSON_ENCODER'); + for (const component of sequence) { + json.writeAny(component); + json.writer.u8('\n'.charCodeAt(0)); + } + return json.writer.flush(); + } + case 'seq.cbor': { + const cbor = this.options.cborEncoder; + if (!cbor) throw new Error('NO_CBOR_ENCODER'); + for (const component of sequence) cbor.writeAny(component); + return cbor.writer.flush(); + } + } + } +} + +/** + * High-level serialization parameters for encoding a {@link Log} instance into + * a sequence of components. + */ +export interface SerializeParams { + /** + * If set to `false`, will not encode the view of the model as the very first + * component. Encoding the view of the latest known state as the first + * component of NDJSON or CBOR-Sequence is useful for allowing the decoders, + * which do not know the details of JSON CRDTs, to just read the view and + * ignore the rest of the components. + */ + noView?: boolean; + + /** + * Specifies the model encoding format for the latest state `.end` for + * the {@link Log}. The default is `'sidecar'`. The `'sidecar'` model format + * is a binary format which encodes only the metadata, which is very compact + * if the view was encoded separately. As it can then be used together with + * the view to decode it back. + */ + model?: 'sidecar' | 'binary' | 'compact' | 'verbose' | 'none'; + + /** + * Specifies the patch `log.patches` and start model `log.start()` encoding + * encoding format of the "history" part of the document. The default is + * `'binary'`. + */ + history?: 'binary' | 'compact' | 'verbose' | 'none'; +} + +/** + * High-level encoding parameters for encoding a {@link Log} instance into a + * binary blob. + */ +export interface EncodingParams extends SerializeParams { + /** + * Specifies the encoding format of the whole log document. The document is + * encoded as a sequence of JSON/CBOR-like components. Those can be encoded + * as JSON (for human-readable text) or CBOR (for compact binary data). + * + * - `ndjson` - encodes the log document as a sequence of new-line delimited + * JSON values. + * - `seq.cbor` - encodes the log document as a CBOR sequence binary data. + */ + format: 'ndjson' | 'seq.cbor'; +} diff --git a/src/json-crdt/log/codec/__tests__/LogDecoder.spec.ts b/src/json-crdt/log/codec/__tests__/LogDecoder.spec.ts new file mode 100644 index 0000000000..623f70afc7 --- /dev/null +++ b/src/json-crdt/log/codec/__tests__/LogDecoder.spec.ts @@ -0,0 +1,95 @@ +import {Log} from '../../Log'; +import {Model} from '../../../model'; +import {logEncoderOpts} from '../logEncoderOpts'; +import {EncodingParams, LogEncoder} from '../LogEncoder'; +import {LogDecoder} from '../LogDecoder'; +import {logDecoderOpts} from '../logDecoderOpts'; + +const setup = (view: unknown) => { + const model = Model.withServerClock(); + model.api.root(view); + const log = Log.fromNewModel(model); + const encoder = new LogEncoder(logEncoderOpts); + const decoder = new LogDecoder(logDecoderOpts); + return {model, log, encoder, decoder}; +}; + +describe('can decode from blob', () => { + test('.ndjson', () => { + const {log, encoder, decoder} = setup({foo: 'bar'}); + const blob = encoder.encode(log, {format: 'ndjson', model: 'compact', history: 'compact'}); + const decoded = decoder.decode(blob, {format: 'ndjson', frontier: true, history: true}); + const {frontier, history} = decoded; + expect(frontier!.end.view()).toEqual({foo: 'bar'}); + expect(frontier!.end !== log.end).toBe(true); + expect(history!.start().view()).toEqual(undefined); + expect(history!.end.view()).toEqual({foo: 'bar'}); + }); + + test('.seq.cbor', () => { + const {log, encoder, decoder} = setup({foo: 'bar'}); + const blob = encoder.encode(log, {format: 'seq.cbor', model: 'binary', history: 'binary'}); + const decoded = decoder.decode(blob, {format: 'seq.cbor', frontier: true, history: true}); + const {frontier, history} = decoded; + expect(frontier!.end.view()).toEqual({foo: 'bar'}); + expect(frontier!.end !== log.end).toBe(true); + expect(history!.start().view()).toEqual(undefined); + expect(history!.end.view()).toEqual({foo: 'bar'}); + }); +}); + +const assertEncoding = (log: Log, params: EncodingParams) => { + const encoder = new LogEncoder(logEncoderOpts); + const decoder = new LogDecoder(logDecoderOpts); + const encoded = encoder.encode(log, params); + const decoded = decoder.decode(encoded, { + format: params.format, + frontier: true, + history: true, + }); + expect(decoded.frontier!.end.view()).toEqual(log.end.view()); + expect(decoded.frontier!.end !== log.end).toBe(true); + expect(decoded.history!.start().view()).toEqual(undefined); + expect(decoded.history!.replayToEnd().view()).toEqual(log.end.view()); + expect(decoded.history!.patches.size()).toBe(log.patches.size()); +}; + +describe('can encode/decode all format combinations', () => { + const formats: EncodingParams['format'][] = ['ndjson', 'seq.cbor']; + const modelFormats: EncodingParams['model'][] = ['sidecar', 'binary', 'compact', 'verbose']; + const historyFormats: EncodingParams['history'][] = ['binary', 'compact', 'verbose']; + const noViews = [true, false]; + for (const format of formats) { + for (const model of modelFormats) { + for (const history of historyFormats) { + for (const noView of noViews) { + if (noView && model === 'sidecar') continue; + const params = {format, model, history, noView}; + test(JSON.stringify(params), () => { + const {log} = setup({foo: 'bar'}); + assertEncoding(log, params); + }); + } + } + } + } +}); + +describe('.deserialize()', () => { + test('applies frontier', () => { + const {log, model, encoder, decoder} = setup({foo: 'bar'}); + const clone = model.clone(); + clone.api.obj([]).set({ + xyz: 123, + }); + const serialized = encoder.serialize(log, { + history: 'binary', + }); + serialized.push(clone.api.flush().toBinary()); + expect(log.end.view()).toEqual({foo: 'bar'}); + const deserialized1 = decoder.deserialize(serialized, {frontier: true}); + const deserialized2 = decoder.deserialize(serialized, {history: true}); + expect(deserialized1.frontier!.end.view()).toEqual({foo: 'bar', xyz: 123}); + expect(deserialized2.history!.end.view()).toEqual({foo: 'bar', xyz: 123}); + }); +}); diff --git a/src/json-crdt/log/codec/__tests__/LogEncoder.spec.ts b/src/json-crdt/log/codec/__tests__/LogEncoder.spec.ts new file mode 100644 index 0000000000..dd090288c9 --- /dev/null +++ b/src/json-crdt/log/codec/__tests__/LogEncoder.spec.ts @@ -0,0 +1,35 @@ +// import {s} from '../../../json-crdt-patch'; +import {Model} from '../../../model'; +import {JsonDecoder} from '../../../../json-pack/json/JsonDecoder'; +import {logEncoderOpts} from '../logEncoderOpts'; +import {LogEncoder} from '../LogEncoder'; +import {Log} from '../../Log'; +import {CborDecoder} from '../../../../json-pack/cbor/CborDecoder'; + +const setup = (view: unknown) => { + const model = Model.withServerClock(); + model.api.root(view); + const log = Log.fromNewModel(model); + const encoder = new LogEncoder(logEncoderOpts); + return {model, log, encoder}; +}; + +describe('.toBinary()', () => { + describe('can read first value as view', () => { + test('.ndjson', () => { + const {encoder, log} = setup({foo: 'bar'}); + const blob = encoder.encode(log, {format: 'ndjson', model: 'compact', history: 'compact'}); + const decoder = new JsonDecoder(); + const view = decoder.read(blob); + expect(view).toEqual({foo: 'bar'}); + }); + + test('.seq.cbor', () => { + const {encoder, log} = setup({foo: 'bar'}); + const blob = encoder.encode(log, {format: 'seq.cbor'}); + const decoder = new CborDecoder(); + const view = decoder.read(blob); + expect(view).toEqual({foo: 'bar'}); + }); + }); +}); diff --git a/src/json-crdt/file/constants.ts b/src/json-crdt/log/codec/constants.ts similarity index 100% rename from src/json-crdt/file/constants.ts rename to src/json-crdt/log/codec/constants.ts diff --git a/src/json-crdt/log/codec/logDecoderOpts.ts b/src/json-crdt/log/codec/logDecoderOpts.ts new file mode 100644 index 0000000000..9fe19a5089 --- /dev/null +++ b/src/json-crdt/log/codec/logDecoderOpts.ts @@ -0,0 +1,22 @@ +import {Decoder as SidecarDecoder} from '../../codec/sidecar/binary/Decoder'; +import {Decoder as StructuralDecoderCompact} from '../../codec/structural/compact/Decoder'; +import {Decoder as StructuralDecoderVerbose} from '../../codec/structural/verbose/Decoder'; +import {JsonDecoder} from '../../../json-pack/json/JsonDecoder'; +import {CborDecoder} from '../../../json-pack/cbor/CborDecoder'; +import {decode as decodeCompact} from '../../../json-crdt-patch/codec/compact/decode'; +import {decode as decodeVerbose} from '../../../json-crdt-patch/codec/verbose/decode'; +import type {LogDecoderOpts} from './LogDecoder'; + +/** + * Default {@link LogDecoderOpts} for {@link LogDecoder}. Instantiates all + * possible decoders. + */ +export const logDecoderOpts: LogDecoderOpts = { + jsonDecoder: new JsonDecoder(), + cborDecoder: new CborDecoder(), + structuralCompactDecoder: new StructuralDecoderCompact(), + structuralVerboseDecoder: new StructuralDecoderVerbose(), + sidecarDecoder: new SidecarDecoder(), + patchCompactDecoder: decodeCompact, + patchVerboseDecoder: decodeVerbose, +}; diff --git a/src/json-crdt/log/codec/logEncoderOpts.ts b/src/json-crdt/log/codec/logEncoderOpts.ts new file mode 100644 index 0000000000..76f50d66d5 --- /dev/null +++ b/src/json-crdt/log/codec/logEncoderOpts.ts @@ -0,0 +1,25 @@ +import {Writer} from '../../../util/buffers/Writer'; +import {Encoder as SidecarEncoder} from '../../codec/sidecar/binary/Encoder'; +import {Encoder as StructuralEncoderCompact} from '../../codec/structural/compact/Encoder'; +import {Encoder as StructuralEncoderVerbose} from '../../codec/structural/verbose/Encoder'; +import {encode as encodeCompact} from '../../../json-crdt-patch/codec/compact/encode'; +import {encode as encodeVerbose} from '../../../json-crdt-patch/codec/verbose/encode'; +import {CborEncoder} from '../../../json-pack/cbor/CborEncoder'; +import {JsonEncoder} from '../../../json-pack/json/JsonEncoder'; +import type {LogEncoderOpts} from './LogEncoder'; + +const writer = new Writer(4096); + +/** + * Default {@link LogEncoderOpts} for {@link LogEncoder}. Instantiates all + * possible encoders. + */ +export const logEncoderOpts: LogEncoderOpts = { + jsonEncoder: new JsonEncoder(writer), + cborEncoder: new CborEncoder(writer), + structuralCompactEncoder: new StructuralEncoderCompact(), + structuralVerboseEncoder: new StructuralEncoderVerbose(), + sidecarEncoder: new SidecarEncoder(), + patchCompactEncoder: encodeCompact, + patchVerboseEncoder: encodeVerbose, +}; diff --git a/src/json-crdt/log/codec/types.ts b/src/json-crdt/log/codec/types.ts new file mode 100644 index 0000000000..2b278ad8ca --- /dev/null +++ b/src/json-crdt/log/codec/types.ts @@ -0,0 +1,14 @@ +import type {FileModelEncoding} from './constants'; + +export type LogMetadata = [map: {}, modelFormat: FileModelEncoding]; + +export type LogComponents = [ + view: unknown | null, + metadata: LogMetadata, + model: Uint8Array | unknown | null, + history: LogHistory, +]; + +export type LogHistory = [model: Uint8Array | unknown | null, patches: Array]; + +export type LogComponentsWithFrontier = [...LogComponents, ...frontier: Array];