diff --git a/.github/workflows/deploy-docs.yml b/.github/workflows/deploy-docs.yml new file mode 100644 index 00000000..15d078f7 --- /dev/null +++ b/.github/workflows/deploy-docs.yml @@ -0,0 +1,51 @@ +name: Deploy API Docs + +on: + push: + branches: [main] + workflow_dispatch: + +permissions: + contents: read + pages: write + id-token: write + +concurrency: + group: pages + cancel-in-progress: true + +jobs: + build: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + + - uses: pnpm/action-setup@v4 + with: + version: 9 + + - uses: actions/setup-node@v4 + with: + node-version: 20 + cache: pnpm + + - run: pnpm install --frozen-lockfile + + - run: pnpm run build + working-directory: packages/data + + - run: cp -r packages/data/dist packages/data/docs/dist + + - uses: actions/upload-pages-artifact@v3 + with: + path: packages/data/docs + + deploy: + needs: build + runs-on: ubuntu-latest + environment: + name: github-pages + url: ${{ steps.deployment.outputs.page_url }} + steps: + - id: deployment + uses: actions/deploy-pages@v4 diff --git a/README.md b/README.md index 339138a3..20367f2e 100644 --- a/README.md +++ b/README.md @@ -3,9 +3,9 @@ Adobe Data Oriented Programming Library ## Documentation -[Main Page](https://git.corp.adobe.com/pages/neuralfiltersplatform/firefly-data/docs/api/) +[API Reference](https://adobe.github.io/data/) -[ECS Performance Test](https://git.corp.adobe.com/pages/neuralfiltersplatform/firefly-data/docs/perftest.html) +[ECS Performance Test](https://adobe.github.io/data/perftest.html) ## Breaking API Changes @@ -76,7 +76,7 @@ An `Observable` is a subscription function that you can pass a callback funct Your callback function *may* be called back synchronously (before the initial call returns) zero or one times and asynchronously later any number of times. -For more information see the [Observable API documentation](./docs/api/modules/observe.html) +For more information see the [Observable API documentation](https://adobe.github.io/data/) ### Observable Types @@ -181,116 +181,33 @@ Contains some standard data type schemas in JSON Schema format for convenience. ## Entity Component System (ECS) -This ECS database is a high performance, strongly typed typescript implementation inspired by the Sanders Mertens C++ based [Flecs](https://www.flecs.dev/flecs/md_docs_2Docs.html). +A high-performance, strongly typed ECS database for TypeScript, inspired by [Flecs](https://www.flecs.dev/flecs/md_docs_2Docs.html). All application state is modeled as composable plugins, and all mutations flow through observable, undoable transactions. -This library provides two main interfaces for ECS operations: **Store** and **Database**. They share the same read API but differ significantly in their approach to writing and observability. - -### Store Interface - -The **Store** is the foundational, low-level interface for direct ECS data operations. - -**Key Characteristics:** -- **Direct Access**: Provides immediate, synchronous read/write access to entities, components, and resources -- **No Transaction Control**: Changes are applied directly without transaction boundaries -- **No Observability**: Changes are not automatically observable or trackable -- **High Performance**: Minimal overhead for direct operations using Structure of Arrays (SoA) with linear memory layout of numeric types for optimal cache performance -- **Core ECS Operations**: Includes entity creation, component updates, archetype querying, and resource management - -**Usage**: Ideal for scenarios requiring fast, direct ECS manipulation where you don't need change tracking or transactional safety. +For a complete guide covering plugins, transactions, observability, composition, and transient/ephemeral semantics, see the **[ECS README](./packages/data/src/ecs/README.md)**. ```typescript -// Create a store with components, resources, and archetypes -const store = createStore( - { - position: Vec3.schema, - health: { type: "number" }, - player: { const: true } - }, - { - gravity: { default: 9.8 as number } - }, - { - Player: ["position", "health", "player"], - Particle: ["position"] - } -); - -// Direct operations -const playerId = store.archetypes.Player.insert({ - position: [0, 0, 0], - health: 100, - player: true -}); -store.update(playerId, { position: [1, 1, 1] }); -store.resources.gravity = 10.0; -``` - -### Database Interface - -The **Database** wraps a Store to provide **transaction-based operations** with **full observability**. - -**Key Characteristics:** -- **Transaction-Based**: All changes must occur within predefined atomic transactions that can be undone. -- **Full Observability**: Every change is observable through the `observe` API -- **Predefined Operations**: Uses predefined transaction functions rather than direct mutations -- **Undo/Redo Support**: Transactions generate undo/redo operations automatically -- **Change Tracking**: Tracks which entities, components, and archetypes changed -- **Event Notifications**: Automatically notifies observers of changes +import { Database } from "@adobe/data/ecs"; -**Usage**: Ideal for applications requiring change history, multiplayer synchronization, undo/redo functionality, or reactive UI updates. - -**Important Note**: Even when using a Database, transaction functions are written as direct modifications to the underlying Store interface. The Database wraps these operations to provide transactional guarantees and observability. - -```typescript -// Create a database with predefined transactions -const database = createDatabase(store, { - createPlayer(t, args: { position: Vector3, health: number }) { - // Transaction function receives Store interface for direct operations - return t.archetypes.Player.insert({ - ...args, - player: true - }); +const myPlugin = Database.Plugin.create({ + resources: { + score: { default: 0 as number }, }, - movePlayer(t, args: { entity: Entity, position: Vector3 }) { - // Direct Store operations within transaction context - t.update(args.entity, { position: args.position }); + transactions: { + addPoints: (t, points: number) => { + t.resources.score += points; + }, }, - setGravity(t, gravity: number) { - // Direct resource modification within transaction - t.resources.gravity = gravity; - } }); -// Execute transactions (these provide observability and undo/redo) -const playerId = database.transactions.createPlayer({ - position: [10, 20, 0], - health: 100 -}); -database.transactions.movePlayer({ entity: playerId, position: [15, 25, 5] }); - -// Observe all changes -database.observe.transactions((result) => { - console.log('Transaction applied:', result); - console.log('Changed entities:', result.changedEntities); - console.log('Undo operations:', result.undo); -}); - -// Observe specific entities -database.observe.entity(playerId)((entityData) => { - if (entityData) { - console.log('Player moved to:', entityData.position); - } -}); +const db = Database.create(myPlugin); +db.observe.resources.score((score) => console.log("Score:", score)); +db.transactions.addPoints(10); ``` ### What is an ECS? -Sanders Mertens also covers this thoroughly in his ECS FAQ: +Sanders Mertens covers this thoroughly in his ECS FAQ: [https://github.com/SanderMertens/ecs-faq?tab=readme-ov-file#what-is-ecs](https://github.com/SanderMertens/ecs-faq?tab=readme-ov-file#what-is-ecs) -In addition to the Entity, Component and System definitions which are standard, we also use the term Resource. A Resource is just a value which is defined globally on the ECS itself and not attached to any specific Entity. You can think of them as a singleton Component. - -## Performance Test - -[Performance Test](https://git.corp.adobe.com/pages/neuralfiltersplatform/firefly-data/docs/perftest.html) +In addition to the standard Entity, Component, and System definitions, we also use the term **Resource** — a global singleton value defined on the ECS itself, not attached to any specific entity. diff --git a/package.json b/package.json index b32c0644..f544e7ca 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "data-monorepo", - "version": "0.9.47", + "version": "0.9.49", "private": true, "scripts": { "build": "pnpm -r run build", @@ -9,7 +9,7 @@ "dev:data": "pnpm --filter @adobe/data run dev", "link": "pnpm -r --filter @adobe/data* run link", "publish": "sh -c 'for x in \"$@\"; do OTP=\"$x\"; done; export NPM_CONFIG_OTP=\"$OTP\"; pnpm -r --filter @adobe/data --filter @adobe/data-react --filter @adobe/data-lit run publish-public' sh", - "bump": "pnpm version patch --no-git-tag-version && pnpm -r exec -- pnpm version patch --no-git-tag-version", + "bump": "pnpm version patch --no-git-tag-version && V=$(node -p \"require('$PWD/package.json').version\") && pnpm -r exec pnpm version $V --no-git-tag-version --allow-same-version", "release": "pnpm bump && pnpm publish", "bp": "pnpm bump && pnpm run publish" }, diff --git a/packages/data-lit-tictactoe/package.json b/packages/data-lit-tictactoe/package.json index 10d5c10a..5837db8f 100644 --- a/packages/data-lit-tictactoe/package.json +++ b/packages/data-lit-tictactoe/package.json @@ -1,6 +1,6 @@ { "name": "data-lit-tictactoe", - "version": "0.9.47", + "version": "0.9.49", "description": "Tic-Tac-Toe sample - Lit web components with @adobe/data-lit and AgenticService", "type": "module", "private": true, diff --git a/packages/data-lit-todo/package.json b/packages/data-lit-todo/package.json index 843beaeb..61f5964f 100644 --- a/packages/data-lit-todo/package.json +++ b/packages/data-lit-todo/package.json @@ -1,6 +1,6 @@ { "name": "data-lit-todo", - "version": "0.9.45", + "version": "0.9.49", "description": "Todo sample app demonstrating @adobe/data with Lit", "type": "module", "private": true, diff --git a/packages/data-lit/package.json b/packages/data-lit/package.json index 07c4ef39..2f930512 100644 --- a/packages/data-lit/package.json +++ b/packages/data-lit/package.json @@ -1,6 +1,6 @@ { "name": "@adobe/data-lit", - "version": "0.9.47", + "version": "0.9.49", "description": "Adobe data Lit bindings - hooks, elements, decorators", "type": "module", "private": false, diff --git a/packages/data-react-hello/package.json b/packages/data-react-hello/package.json index f74583cc..eeb753e9 100644 --- a/packages/data-react-hello/package.json +++ b/packages/data-react-hello/package.json @@ -1,6 +1,6 @@ { "name": "data-react-hello", - "version": "0.9.47", + "version": "0.9.49", "description": "Hello World sample - click counter using @adobe/data-react", "type": "module", "private": true, diff --git a/packages/data-react-pixie/package.json b/packages/data-react-pixie/package.json index dc15672c..965cdf9f 100644 --- a/packages/data-react-pixie/package.json +++ b/packages/data-react-pixie/package.json @@ -1,6 +1,6 @@ { "name": "data-react-pixie", - "version": "0.9.47", + "version": "0.9.49", "description": "PixiJS React sample - ECS sprites (bunny, fox) with @adobe/data-react", "type": "module", "private": true, diff --git a/packages/data-react/package.json b/packages/data-react/package.json index 0e22c0d5..f0c0599e 100644 --- a/packages/data-react/package.json +++ b/packages/data-react/package.json @@ -1,6 +1,6 @@ { "name": "@adobe/data-react", - "version": "0.9.47", + "version": "0.9.49", "description": "Adobe data React bindings — hooks and context for ECS database", "type": "module", "private": false, diff --git a/packages/data/README.md b/packages/data/README.md index 339138a3..290681c9 100644 --- a/packages/data/README.md +++ b/packages/data/README.md @@ -3,9 +3,9 @@ Adobe Data Oriented Programming Library ## Documentation -[Main Page](https://git.corp.adobe.com/pages/neuralfiltersplatform/firefly-data/docs/api/) +[API Reference](https://adobe.github.io/data/) -[ECS Performance Test](https://git.corp.adobe.com/pages/neuralfiltersplatform/firefly-data/docs/perftest.html) +[ECS Performance Test](https://adobe.github.io/data/perftest.html) ## Breaking API Changes @@ -76,7 +76,7 @@ An `Observable` is a subscription function that you can pass a callback funct Your callback function *may* be called back synchronously (before the initial call returns) zero or one times and asynchronously later any number of times. -For more information see the [Observable API documentation](./docs/api/modules/observe.html) +For more information see the [Observable API documentation](https://adobe.github.io/data/) ### Observable Types @@ -181,116 +181,33 @@ Contains some standard data type schemas in JSON Schema format for convenience. ## Entity Component System (ECS) -This ECS database is a high performance, strongly typed typescript implementation inspired by the Sanders Mertens C++ based [Flecs](https://www.flecs.dev/flecs/md_docs_2Docs.html). +A high-performance, strongly typed ECS database for TypeScript, inspired by [Flecs](https://www.flecs.dev/flecs/md_docs_2Docs.html). All application state is modeled as composable plugins, and all mutations flow through observable, undoable transactions. -This library provides two main interfaces for ECS operations: **Store** and **Database**. They share the same read API but differ significantly in their approach to writing and observability. - -### Store Interface - -The **Store** is the foundational, low-level interface for direct ECS data operations. - -**Key Characteristics:** -- **Direct Access**: Provides immediate, synchronous read/write access to entities, components, and resources -- **No Transaction Control**: Changes are applied directly without transaction boundaries -- **No Observability**: Changes are not automatically observable or trackable -- **High Performance**: Minimal overhead for direct operations using Structure of Arrays (SoA) with linear memory layout of numeric types for optimal cache performance -- **Core ECS Operations**: Includes entity creation, component updates, archetype querying, and resource management - -**Usage**: Ideal for scenarios requiring fast, direct ECS manipulation where you don't need change tracking or transactional safety. +For a complete guide covering plugins, transactions, observability, composition, and transient/ephemeral semantics, see the **[ECS README](./src/ecs/README.md)**. ```typescript -// Create a store with components, resources, and archetypes -const store = createStore( - { - position: Vec3.schema, - health: { type: "number" }, - player: { const: true } - }, - { - gravity: { default: 9.8 as number } - }, - { - Player: ["position", "health", "player"], - Particle: ["position"] - } -); - -// Direct operations -const playerId = store.archetypes.Player.insert({ - position: [0, 0, 0], - health: 100, - player: true -}); -store.update(playerId, { position: [1, 1, 1] }); -store.resources.gravity = 10.0; -``` - -### Database Interface - -The **Database** wraps a Store to provide **transaction-based operations** with **full observability**. - -**Key Characteristics:** -- **Transaction-Based**: All changes must occur within predefined atomic transactions that can be undone. -- **Full Observability**: Every change is observable through the `observe` API -- **Predefined Operations**: Uses predefined transaction functions rather than direct mutations -- **Undo/Redo Support**: Transactions generate undo/redo operations automatically -- **Change Tracking**: Tracks which entities, components, and archetypes changed -- **Event Notifications**: Automatically notifies observers of changes +import { Database } from "@adobe/data/ecs"; -**Usage**: Ideal for applications requiring change history, multiplayer synchronization, undo/redo functionality, or reactive UI updates. - -**Important Note**: Even when using a Database, transaction functions are written as direct modifications to the underlying Store interface. The Database wraps these operations to provide transactional guarantees and observability. - -```typescript -// Create a database with predefined transactions -const database = createDatabase(store, { - createPlayer(t, args: { position: Vector3, health: number }) { - // Transaction function receives Store interface for direct operations - return t.archetypes.Player.insert({ - ...args, - player: true - }); +const myPlugin = Database.Plugin.create({ + resources: { + score: { default: 0 as number }, }, - movePlayer(t, args: { entity: Entity, position: Vector3 }) { - // Direct Store operations within transaction context - t.update(args.entity, { position: args.position }); + transactions: { + addPoints: (t, points: number) => { + t.resources.score += points; + }, }, - setGravity(t, gravity: number) { - // Direct resource modification within transaction - t.resources.gravity = gravity; - } }); -// Execute transactions (these provide observability and undo/redo) -const playerId = database.transactions.createPlayer({ - position: [10, 20, 0], - health: 100 -}); -database.transactions.movePlayer({ entity: playerId, position: [15, 25, 5] }); - -// Observe all changes -database.observe.transactions((result) => { - console.log('Transaction applied:', result); - console.log('Changed entities:', result.changedEntities); - console.log('Undo operations:', result.undo); -}); - -// Observe specific entities -database.observe.entity(playerId)((entityData) => { - if (entityData) { - console.log('Player moved to:', entityData.position); - } -}); +const db = Database.create(myPlugin); +db.observe.resources.score((score) => console.log("Score:", score)); +db.transactions.addPoints(10); ``` ### What is an ECS? -Sanders Mertens also covers this thoroughly in his ECS FAQ: +Sanders Mertens covers this thoroughly in his ECS FAQ: [https://github.com/SanderMertens/ecs-faq?tab=readme-ov-file#what-is-ecs](https://github.com/SanderMertens/ecs-faq?tab=readme-ov-file#what-is-ecs) -In addition to the Entity, Component and System definitions which are standard, we also use the term Resource. A Resource is just a value which is defined globally on the ECS itself and not attached to any specific Entity. You can think of them as a singleton Component. - -## Performance Test - -[Performance Test](https://git.corp.adobe.com/pages/neuralfiltersplatform/firefly-data/docs/perftest.html) +In addition to the standard Entity, Component, and System definitions, we also use the term **Resource** — a global singleton value defined on the ECS itself, not attached to any specific entity. diff --git a/packages/data/docs/perftest.html b/packages/data/docs/perftest.html index 2c4e90e7..6dabdc49 100644 --- a/packages/data/docs/perftest.html +++ b/packages/data/docs/perftest.html @@ -3,13 +3,10 @@ ECS Performance Test - +

ECS Performance Test

- This is the documentation for the Firefly Data API. The Firefly Data API is a RESTful API that allows you to interact with the Firefly Data service. -

-

- Performance tests running. View them in the developer console. + Performance tests running. View results in the developer console.

\ No newline at end of file diff --git a/packages/data/package.json b/packages/data/package.json index 23fb7b77..894b4970 100644 --- a/packages/data/package.json +++ b/packages/data/package.json @@ -1,6 +1,6 @@ { "name": "@adobe/data", - "version": "0.9.47", + "version": "0.9.49", "description": "Adobe data oriented programming library", "type": "module", "sideEffects": false, @@ -23,7 +23,6 @@ "build-assembly": "run-p asbuild:release", "clean": "rm -rf dist build node_modules", "clean-cache": "rm -rf node_modules/.cache", - "deploy-docs": "pnpm build && ./scripts/deploy-docs.sh", "dev": "pnpm clean-cache && run-p dev:* dev:serve", "dev:serve": "vite", "dev:build": "tsc -b -w --preserveWatchOutput", diff --git a/packages/data/scripts/deploy-docs.sh b/packages/data/scripts/deploy-docs.sh deleted file mode 100755 index 729231c9..00000000 --- a/packages/data/scripts/deploy-docs.sh +++ /dev/null @@ -1,41 +0,0 @@ -#!/bin/bash - -# Exit if any command fails -set -e - -# Step 1: Ensure we are on the main branch -current_branch=$(git branch --show-current) -if [ "$current_branch" != "main" ]; then - echo "Error: You are on branch '$current_branch'. Please switch to 'main' before deploying." - exit 1 -fi - -# Step 2: Check for uncommitted changes -if [[ -n $(git status --porcelain) ]]; then - echo "Error: You have uncommitted changes. Please commit or stash your changes before deploying." - exit 1 -fi - -# Step 3: Build the project -echo "Building the project..." -pnpm install -pnpm run build - -# Step 4: Checkout gh-pages branch -echo "Checking out the gh-pages branch..." -git fetch origin -git checkout gh-pages -git pull origin gh-pages - -# Step 5: Commit and push the changes to gh-pages -echo "Committing and pushing changes to gh-pages..." -git checkout -f main -- README.md docs -git add dist/ docs/ README.md -git commit -m "Deploy updated docs and dist to GitHub Pages" -git push origin gh-pages - -# Step 6: Switch back to the main branch -echo "Switching back to the main branch..." -git checkout main - -echo "Deployment to gh-pages complete!" diff --git a/packages/data/src/ecs/README.md b/packages/data/src/ecs/README.md index bf6dcffd..0097ec62 100644 --- a/packages/data/src/ecs/README.md +++ b/packages/data/src/ecs/README.md @@ -1,21 +1,399 @@ -## New ECS Folder - -### New Features Needed - -ECS Core Database -- [x] components can have null value, deleting components is distinct from setting to null. -- [x] simplify and improve type definitions. -- [x] allow to define new components with separate schema and type declarations. -- [x] fix deserialization with constant columns. - -ECS Transactional Database -- [x] .components -- [x] .resources -- [x] .archetypes -- [ ] selectEntityValues with optional filter -- [x] observations and actions on updates - -ECS Action Database -- [-] applied actions are stored directly within the database. -- [x] logic to prune old applied actions after a certain point. -- [x] action sequences +# ECS — Entity Component System + +A high-performance, strongly typed ECS database for TypeScript. All application state is modeled as plugins, and all mutations flow through observable transactions. + +## Quick Start + +```ts +import { Database } from "@adobe/data/ecs"; + +const myPlugin = Database.Plugin.create({ + resources: { + score: { default: 0 as number }, + }, + transactions: { + addPoints: (t, points: number) => { + t.resources.score += points; + }, + }, +}); + +const db = Database.create(myPlugin); + +db.observe.resources.score((score) => console.log("Score:", score)); +db.transactions.addPoints(10); +``` + +## Core Concepts + +**Entity** — a unique integer ID. Persistent entities have positive IDs; ephemeral entities have negative IDs. + +**Component** — a named data column. Each component has a schema that describes its type. Numeric schemas (F32, Vec3, etc.) are stored in tightly packed typed arrays for cache-friendly performance. + +**Resource** — a global singleton value, not tied to any specific entity. Think of it as a single-row component. + +**Archetype** — a named grouping of components that defines what data an entity carries. Entities are stored in archetype tables with Structure-of-Arrays (SoA) layout. + +**Transaction** — a synchronous, atomic mutation of the database. Transactions produce undo/redo operations and notify observers. + +**Action** — a function that can be async and may call at most one transaction. Actions are the bridge between async side effects and the synchronous transaction model. + +**System** — a function created at database initialization, optionally returning a 60fps tick function. Systems can be ordered relative to each other. + +**Plugin** — a self-contained bundle of components, resources, archetypes, transactions, actions, computed observables, services, and systems. Plugins compose via `extends` (single parent) or `Database.Plugin.combine` (multiple peers). + +## Plugin Structure + +Create plugins with `Database.Plugin.create`. Properties must appear in this order (all optional): + +```ts +Database.Plugin.create({ + extends: basePlugin, // 1. single parent plugin + services: { ... }, // 2. singleton service factories + components: { ... }, // 3. component schemas + resources: { ... }, // 4. global state with defaults + archetypes: { ... }, // 5. named component groupings + computed: { ... }, // 6. derived observables + transactions: { ... }, // 7. synchronous mutations + actions: { ... }, // 8. async operations + systems: { ... }, // 9. tick functions +}); +``` + +Property order is enforced at runtime — misordering throws an error. + +## Components + +Components are schema-described data columns. Use numeric schemas from `@adobe/data/math` for high-performance linear-memory storage, or plain type-cast defaults for general data. + +```ts +import { Vec3, F32 } from "@adobe/data/math"; + +const physicsPlugin = Database.Plugin.create({ + components: { + position: Vec3.schema, + velocity: Vec3.schema, + mass: F32.schema, + name: { type: "string", maxLength: 50 }, + }, + archetypes: { + Particle: ["position", "velocity", "mass"], + NamedParticle: ["position", "velocity", "mass", "name"], + }, + // ... +}); +``` + +## Resources + +Resources hold global state. Define them with a default value and a type assertion: + +```ts +const gamePlugin = Database.Plugin.create({ + resources: { + score: { default: 0 as number }, + paused: { default: false as boolean }, + config: { default: { difficulty: "normal" } as GameConfig }, + }, + // ... +}); +``` + +For resources that are initialized later (e.g. by a system or service), use `null as unknown as Type`: + +```ts +resources: { + controller: { default: null as unknown as GameController }, +}, +``` + +## Archetypes + +Archetypes define the component shape of entity kinds. They determine how entities are stored and queried. + +```ts +const worldPlugin = Database.Plugin.create({ + components: { + position: Vec3.schema, + health: F32.schema, + player: { const: true }, + npc: { const: true }, + }, + archetypes: { + Player: ["position", "health", "player"], + NPC: ["position", "health", "npc"], + }, + transactions: { + spawnPlayer: (t, pos: { x: number; y: number; z: number }) => { + return t.archetypes.Player.insert({ + position: [pos.x, pos.y, pos.z], + health: 100, + player: true, + }); + }, + }, +}); +``` + +## Transactions + +Transactions are synchronous, deterministic functions that mutate the store. They automatically produce undo/redo operations and notify observers. + +```ts +transactions: { + moveEntity: (t, args: { entity: Entity; x: number; y: number; z: number }) => { + t.update(args.entity, { position: [args.x, args.y, args.z] }); + }, + setScore: (t, score: number) => { + t.resources.score = score; + }, + removeEntity: (t, entity: Entity) => { + t.delete(entity); + }, +}, +``` + +### Async Transactions + +When a transaction is called with a function argument, it supports async workflows. An async generator yields intermediate (transient) values before returning the final committed value: + +```ts +// Async generator — each yield applies transiently, only the return commits +db.transactions.updatePosition(async function* () { + yield { entity: id, x: 1, y: 0, z: 0 }; // transient + await someAsyncWork(); + yield { entity: id, x: 2, y: 0, z: 0 }; // transient + await moreAsyncWork(); + return { entity: id, x: 3, y: 0, z: 0 }; // committed +}); +``` + +Each `yield` rolls back the previous intermediate state and applies the new one. Only the final `return` value persists. If the generator throws, all intermediate state is rolled back. + +## Actions + +Actions receive the full database and can be async. They should call at most one transaction to keep undo/redo correct. UI should never consume action return values — data flows down via observables. + +```ts +actions: { + loadAndApply: async (db, url: string) => { + const data = await fetch(url).then(r => r.json()); + db.transactions.applyData(data); + }, +}, +``` + +## Computed Observables + +Computed values are derived observables created from database state. Each factory receives the database and returns an `Observe`. + +```ts +import { Observe } from "@adobe/data/observe"; + +const plugin = Database.Plugin.create({ + resources: { + board: { default: createInitialBoard() }, + firstPlayer: { default: "X" as PlayerMark }, + }, + computed: { + currentPlayer: (db) => + Observe.withFilter(db.observe.resources.board, (board) => + getCurrentPlayer(board, db.resources.firstPlayer), + ), + isGameOver: (db) => + Observe.withFilter(db.observe.resources.board, checkGameOver), + }, + // ... +}); +``` + +## Services + +Services are singleton objects created at database initialization. Extended plugin services initialize first, guaranteeing dependency order. + +```ts +const appPlugin = Database.Plugin.create({ + extends: basePlugin, + services: { + analytics: (db) => createAnalyticsService(db), + logger: (db) => createLogger(db.services.analytics), + }, + // ... +}); +``` + +Service factories can be overridden at database creation for testing: + +```ts +const db = Database.create(appPlugin, { + services: { analytics: mockAnalyticsService }, +}); +``` + +## Systems + +Systems run initialization logic and optionally return a tick function for 60fps frame processing. Scheduling constraints control execution order. + +```ts +systems: { + physics_update: { + create: (db) => { + // initialization code runs once + return () => { + // tick function runs every frame + const dt = db.resources.time.delta; + // update physics... + }; + }, + schedule: { before: ["render_update"] }, + }, + render_update: { + create: (db) => () => { + // runs after physics_update + }, + }, +}, +``` + +Systems that only need to run once at init can return `void` instead of a tick function. + +## Plugin Composition + +### Single Inheritance + +Use `extends` for single-parent relationships: + +```ts +const uiPlugin = Database.Plugin.create({ + extends: gamePlugin, + resources: { + showMenu: { default: false as boolean }, + }, + // uiPlugin has access to all of gamePlugin's types +}); +``` + +### Combining Peers + +Use `Database.Plugin.combine` to merge multiple independent plugins: + +```ts +const combinedPlugin = Database.Plugin.combine(physicsPlugin, renderPlugin, audioPlugin); +const db = Database.create(combinedPlugin); +``` + +## Observing State + +```ts +const db = Database.create(myPlugin); + +// Observe a resource +db.observe.resources.score((score) => { /* ... */ }); + +// Observe a component column (fires on any change to that component) +db.observe.components.position(() => { /* position data changed */ }); + +// Observe a specific entity +db.observe.entity(entityId)((values) => { + if (values) { /* entity exists with these values */ } + else { /* entity was deleted */ } +}); + +// Observe all transactions +db.observe.transactions((result) => { + // result.changedEntities, result.changedComponents, etc. +}); +``` + +## Serialization + +```ts +// Save +const data = db.toData(); + +// Restore +db.fromData(data); +``` + +Ephemeral entities and components marked `ephemeral: true` in their schema are excluded from serialization. + +## Type Utilities + +```ts +// Derive the full Database type from a plugin +type MyDB = Database.FromPlugin; + +// Derive the Store type (for transaction function signatures) +type MyStore = Database.Plugin.ToStore; +``` + +--- + +## Reference: Transient and Ephemeral Semantics + +The ECS uses the terms "ephemeral" and "transient" with precise, distinct meanings. Ephemeral means **not persisted**. Transient means **intermediate value**. + +### Ephemeral Component + +A built-in optional component that can only be set at entity creation time. It cannot be added to or removed from an existing entity. Entities created with this component are allocated negative IDs and stored in a separate entity table. + +### Ephemeral Entities + +Entities created with the `ephemeral` component. They always have negative IDs and are never persisted. Use ephemeral entities for session-only or UI-local state (selections, hover states, panel positions, etc.). + +### Ephemeral Schema + +A component or resource schema with `ephemeral: true`. This marks the data as not persisted, but unlike the ephemeral component, it can live on a persistent entity. Ephemeral schemas are excluded from serialization but their entities still carry positive IDs. + +```ts +resources: { + isHovering: { default: false as boolean, ephemeral: true }, +}, +``` + +### Transient Transaction + +A transaction that is part of an async sequence and is not the final committed step. Each `yield` in an async generator transaction produces a transient transaction. Reconciling database replays also produce transient transactions. Transient transactions notify observers but are not pushed to the undo stack and should not trigger persistence. + +### Ephemeral Transaction + +A transaction whose `TransactionResult.ephemeral` property is `true`. This happens when every entity touched by the transaction is an ephemeral entity (negative ID). If even one persistent entity was modified, the transaction is not ephemeral. + +### How They Interact on `TransactionResult` + +Every `TransactionResult` carries two independent boolean flags: + +| Flag | Source | Meaning | +|---|---|---| +| `transient` | Caller-provided via `execute` options | The transaction is an intermediate step, not the final commit | +| `ephemeral` | Derived from what changed | The transaction only touched ephemeral entities | + +These flags are orthogonal. All four combinations are valid: + +| `transient` | `ephemeral` | Example | +|---|---|---| +| `false` | `false` | Normal committed change to persistent data | +| `true` | `false` | Async generator yield that modifies persistent entities | +| `false` | `true` | Committed change to UI-only state (e.g. selection) | +| `true` | `true` | Intermediate step touching only ephemeral data | + +### Consumer Guidelines + +**Persistence observers** should skip both transient and ephemeral transactions — transient results are not final, and ephemeral results have nothing to persist: + +```ts +db.observe.transactions((t) => { + if (t.transient || t.ephemeral) return; + save(); +}); +``` + +**Undo/redo** should skip transient transactions (intermediate steps shouldn't clutter the undo stack) but may still record ephemeral ones when marked undoable, since users may want to undo UI state changes like selection: + +```ts +db.observe.transactions((t) => { + if (t.undoable && !t.transient) { + pushToUndoStack(t); + } +}); +``` diff --git a/packages/data/src/functions/serialization/compression.ts b/packages/data/src/functions/serialization/compression.ts new file mode 100644 index 00000000..a2d92a7d --- /dev/null +++ b/packages/data/src/functions/serialization/compression.ts @@ -0,0 +1,41 @@ +// © 2026 Adobe. MIT License. See /LICENSE for details. + +import { toArrayBufferBacked } from "../../internal/array-buffer-like/index.js"; + +const collectStream = async (stream: ReadableStream): Promise> => { + const chunks: Uint8Array[] = []; + const reader = stream.getReader(); + + for (;;) { + const { done, value } = await reader.read(); + if (done) break; + chunks.push(toArrayBufferBacked(value)); + } + + const totalLength = chunks.reduce((sum, chunk) => sum + chunk.length, 0); + const result = new Uint8Array(totalLength); + let offset = 0; + + for (const chunk of chunks) { + result.set(chunk, offset); + offset += chunk.length; + } + + return result; +}; + +const pipeThrough = (data: Uint8Array, transform: TransformStream) => { + const stream = new ReadableStream({ + start(controller) { + controller.enqueue(data); + controller.close(); + } + }); + return collectStream(stream.pipeThrough(transform)); +}; + +export const compressDeflate = (data: Uint8Array): Promise> => + pipeThrough(data, new CompressionStream('deflate')); + +export const decompressDeflate = (data: Uint8Array): Promise> => + pipeThrough(data, new DecompressionStream('deflate')); diff --git a/packages/data/src/functions/serialization/serialization.test.ts b/packages/data/src/functions/serialization/serialization.test.ts index edb54612..9cd6c372 100644 --- a/packages/data/src/functions/serialization/serialization.test.ts +++ b/packages/data/src/functions/serialization/serialization.test.ts @@ -7,6 +7,7 @@ import { equals } from '../../equals.js'; import { createTable } from '../../table/create-table.js'; import { addRow } from '../../table/add-row.js'; import { createStructBuffer } from '../../typed-buffer/create-struct-buffer.js'; +import type { TypedBuffer } from '../../typed-buffer/typed-buffer.js'; describe('serialize/deserialize', () => { @@ -158,4 +159,81 @@ describe('serialize/deserialize', () => { expect(roundTrip.structBuffer.get(0)).not.toEqual({ x: 999, y: 888 }); expect(roundTrip.structBuffer.get(1)).not.toEqual({ x: 777, y: 666 }); }); + + it('round-trips enum typed buffers', () => { + const schema = { enum: ["landscape", "portrait", "square"] } as const; + const buf = createTypedBuffer(schema, 3); + buf.set(0, "landscape"); + buf.set(1, "portrait"); + buf.set(2, "square"); + + const payload = serialize({ buf }); + const roundTrip = deserialize<{ buf: TypedBuffer }>(payload); + + expect(roundTrip.buf.type).toBe("enum"); + expect(roundTrip.buf.capacity).toBe(3); + expect(roundTrip.buf.get(0)).toBe("landscape"); + expect(roundTrip.buf.get(1)).toBe("portrait"); + expect(roundTrip.buf.get(2)).toBe("square"); + }); + + it('should deserialize legacy array-serialized enum buffers as enum buffers', () => { + const schema = { enum: ["landscape", "portrait", "square"] }; + + // Simulate the old serialized format: type was "array" with values in a JSON array + const legacyPayload = { + json: JSON.stringify({ + buf: { + codec: "typed-buffer", + json: { + type: "array", + schema, + capacity: 3, + array: ["landscape", "portrait", "square"], + }, + binaryIndex: 0, + binaryCount: 0, + }, + }), + binary: [], + }; + + const roundTrip = deserialize<{ buf: TypedBuffer }>(legacyPayload); + + expect(roundTrip.buf.type).toBe("enum"); + expect(roundTrip.buf.capacity).toBe(3); + expect(roundTrip.buf.get(0)).toBe("landscape"); + expect(roundTrip.buf.get(1)).toBe("portrait"); + expect(roundTrip.buf.get(2)).toBe("square"); + }); + + it('should deserialize enum type with json array fallback when binary is missing', () => { + const schema = { enum: ["a", "b", "c"] }; + + // Simulate a payload where type is "enum" but only a JSON array is present (no binary) + const fallbackPayload = { + json: JSON.stringify({ + buf: { + codec: "typed-buffer", + json: { + type: "enum", + schema, + capacity: 3, + array: ["b", "c", "a"], + }, + binaryIndex: 0, + binaryCount: 0, + }, + }), + binary: [], + }; + + const roundTrip = deserialize<{ buf: TypedBuffer }>(fallbackPayload); + + expect(roundTrip.buf.type).toBe("enum"); + expect(roundTrip.buf.capacity).toBe(3); + expect(roundTrip.buf.get(0)).toBe("b"); + expect(roundTrip.buf.get(1)).toBe("c"); + expect(roundTrip.buf.get(2)).toBe("a"); + }); }); \ No newline at end of file diff --git a/packages/data/src/functions/serialization/serialize-to-blobs.test.ts b/packages/data/src/functions/serialization/serialize-to-blobs.test.ts index 22e2c3cc..bed70c74 100644 --- a/packages/data/src/functions/serialization/serialize-to-blobs.test.ts +++ b/packages/data/src/functions/serialization/serialize-to-blobs.test.ts @@ -1,8 +1,41 @@ // © 2026 Adobe. MIT License. See /LICENSE for details. import { describe, it, expect } from "vitest"; import { serializeToBlobs, deserializeFromBlobs } from "./serialize-to-blobs.js"; +import { serialize } from "./serialize.js"; describe("serializeToBlobs", () => { + it("should include version 2 in json metadata", async () => { + const data = { value: new Uint8Array([1, 2, 3]) }; + const blobs = await serializeToBlobs(data); + const meta = JSON.parse(await blobs.json.text()); + + expect(meta.version).toBe(2); + }); + + it("should deserialize a legacy v1 payload (no version field, uncompressed binary)", async () => { + const data = { + scores: new Uint16Array([10, 20, 30]), + name: "legacy" + }; + + const serialized = serialize(data); + const binarySizes = serialized.binary.map((a) => a.byteLength); + const binaryParts = serialized.binary.map((a) => + new Uint8Array(a.buffer, a.byteOffset, a.byteLength) + ); + + const jsonBlob = new Blob( + [JSON.stringify({ json: serialized.json, binarySizes })], + { type: "application/json" } + ); + const binaryBlob = new Blob(binaryParts, { type: "application/octet-stream" }); + + const result = await deserializeFromBlobs({ json: jsonBlob, binary: binaryBlob }) as typeof data; + + expect(result.name).toBe("legacy"); + expect(result.scores).toEqual(data.scores); + }); + it("should serialize and deserialize primitive data", async () => { const data = { string: "hello world", diff --git a/packages/data/src/functions/serialization/serialize-to-blobs.ts b/packages/data/src/functions/serialization/serialize-to-blobs.ts index adb270ff..34167fea 100644 --- a/packages/data/src/functions/serialization/serialize-to-blobs.ts +++ b/packages/data/src/functions/serialization/serialize-to-blobs.ts @@ -1,21 +1,45 @@ // © 2026 Adobe. MIT License. See /LICENSE for details. import { toArrayBufferBacked } from "../../internal/array-buffer-like/index.js"; +import { compressDeflate, decompressDeflate } from "./compression.js"; import { serialize, deserialize } from "./serialize.js"; +const ENCODING_VERSION = 2; + +const concatenateBuffers = (parts: Uint8Array[]): Uint8Array => { + const totalSize = parts.reduce((sum, p) => sum + p.byteLength, 0); + const combined = new Uint8Array(totalSize); + let offset = 0; + for (const part of parts) { + combined.set(part, offset); + offset += part.byteLength; + } + return combined; +}; + export const serializeToBlobs = async (data: T): Promise<{ json: Blob, binary: Blob }> => { const serialized = serialize(data); const binarySizes = serialized.binary.map((array) => array.byteLength); const binaryParts = serialized.binary.map(toArrayBufferBacked); - const json = new Blob([JSON.stringify({ json: serialized.json, binarySizes })], { type: "application/json" }); - const binary = new Blob(binaryParts, { type: "application/octet-stream" }); + + const compressed = await compressDeflate(concatenateBuffers(binaryParts)); + + const json = new Blob( + [JSON.stringify({ version: ENCODING_VERSION, json: serialized.json, binarySizes })], + { type: "application/json" } + ); + const binary = new Blob([compressed], { type: "application/octet-stream" }); return { json, binary }; -} +}; export const deserializeFromBlobs = async ({ json, binary }: { json: Blob, binary: Blob }): Promise => { const jsonText = await json.text(); - const { json: serializedJson, binarySizes } = JSON.parse(jsonText); + const { version, json: serializedJson, binarySizes } = JSON.parse(jsonText); + + const raw = new Uint8Array(await binary.arrayBuffer()); + const binaryArray = version >= 2 + ? await decompressDeflate(raw) + : toArrayBufferBacked(raw); - const binaryArray = new Uint8Array(await binary.arrayBuffer()); const binaryChunks: Uint8Array[] = []; let offset = 0; @@ -25,4 +49,4 @@ export const deserializeFromBlobs = async ({ json, binary }: { json: Blob, bi } return deserialize({ json: serializedJson, binary: binaryChunks }); -}; \ No newline at end of file +}; diff --git a/packages/data/src/functions/serialization/serialize-to-json.ts b/packages/data/src/functions/serialization/serialize-to-json.ts index 6987ea19..432741aa 100644 --- a/packages/data/src/functions/serialization/serialize-to-json.ts +++ b/packages/data/src/functions/serialization/serialize-to-json.ts @@ -1,6 +1,7 @@ // © 2026 Adobe. MIT License. See /LICENSE for details. import { toArrayBufferBacked } from "../../internal/array-buffer-like/index.js"; +import { compressDeflate, decompressDeflate } from "./compression.js"; import { serialize, deserialize } from "./serialize.js"; /** @@ -48,80 +49,6 @@ const base64ToUint8Array = (base64: string): Uint8Array => { return bytes; }; -/** - * Compresses a Uint8Array using the deflate algorithm - */ -const compressData = async (data: Uint8Array): Promise> => { - const stream = new ReadableStream({ - start(controller) { - controller.enqueue(data); - controller.close(); - } - }); - - const compressedStream = stream.pipeThrough( - new CompressionStream('deflate') - ); - - const chunks: Uint8Array[] = []; - const reader = compressedStream.getReader(); - - for (;;) { - const { done, value } = await reader.read(); - if (done) break; - chunks.push(toArrayBufferBacked(value)); - } - - // Concatenate all chunks - const totalLength = chunks.reduce((sum, chunk) => sum + chunk.length, 0); - const result = new Uint8Array(totalLength); - let offset = 0; - - for (const chunk of chunks) { - result.set(chunk, offset); - offset += chunk.length; - } - - return result; -}; - -/** - * Decompresses a Uint8Array using the deflate algorithm - */ -const decompressData = async (data: Uint8Array): Promise> => { - const stream = new ReadableStream({ - start(controller) { - controller.enqueue(data); - controller.close(); - } - }); - - const decompressedStream = stream.pipeThrough( - new DecompressionStream('deflate') - ); - - const chunks: Uint8Array[] = []; - const reader = decompressedStream.getReader(); - - for (;;) { - const { done, value } = await reader.read(); - if (done) break; - chunks.push(toArrayBufferBacked(value)); - } - - // Concatenate all chunks - const totalLength = chunks.reduce((sum, chunk) => sum + chunk.length, 0); - const result = new Uint8Array(totalLength); - let offset = 0; - - for (const chunk of chunks) { - result.set(chunk, offset); - offset += chunk.length; - } - - return result; -}; - /** * Serializes data to a single JSON string with base64-encoded and compressed binary data. * The binary arrays are concatenated, compressed using deflate, and their original lengths are stored to allow reconstruction. @@ -129,10 +56,8 @@ const decompressData = async (data: Uint8Array): Promise export const serializeToJSON = async (data: T): Promise => { const serialized = serialize(data); - // Store the length of each binary chunk const lengths = serialized.binary.map(chunk => chunk.byteLength); - // Calculate total size and concatenate all binary arrays into a single Uint8Array const totalSize = lengths.reduce((sum, len) => sum + len, 0); const combinedBinary = new Uint8Array(totalSize); let offset = 0; @@ -142,14 +67,11 @@ export const serializeToJSON = async (data: T): Promise => { offset += binaryChunk.byteLength; } - // Compress the binary data - const compressedBinary = await compressData(combinedBinary); - - // Convert to base64 + const compressedBinary = await compressDeflate(combinedBinary); const base64Binary = uint8ArrayToBase64(compressedBinary); const result: SerializedJSON = { - json: JSON.parse(serialized.json), // Parse to avoid double-encoding + json: JSON.parse(serialized.json), lengths, binary: base64Binary }; @@ -163,13 +85,9 @@ export const serializeToJSON = async (data: T): Promise => { export const deserializeFromJSON = async (jsonString: string): Promise => { const parsed: SerializedJSON = JSON.parse(jsonString); - // Convert base64 back to Uint8Array const compressedBinary = base64ToUint8Array(parsed.binary); + const combinedBinary = await decompressDeflate(compressedBinary); - // Decompress the binary data - const combinedBinary = await decompressData(compressedBinary); - - // Split the combined binary back into chunks based on lengths const binaryChunks: Uint8Array[] = []; let offset = 0; @@ -179,10 +97,8 @@ export const deserializeFromJSON = async (jsonString: string): Promise => offset += length; } - // Deserialize using the original deserialize function return deserialize({ - json: JSON.stringify(parsed.json), // Convert back to string for deserialize() + json: JSON.stringify(parsed.json), binary: binaryChunks }); }; - diff --git a/packages/data/src/old-ecs/index.ts b/packages/data/src/old-ecs/index.ts index 43024e42..c27a63ce 100644 --- a/packages/data/src/old-ecs/index.ts +++ b/packages/data/src/old-ecs/index.ts @@ -1,4 +1,10 @@ // © 2026 Adobe. MIT License. See /LICENSE for details. + +/** + * @deprecated Use `@adobe/data/ecs` with `Database.Plugin.create` instead. + * This module will be removed in a future release. + */ + export * from "./core-ecs/index.js"; export * from "./ecs/index.js"; export * from "./transaction-ecs/index.js"; diff --git a/packages/data/src/typed-buffer/create-enum-buffer.test.ts b/packages/data/src/typed-buffer/create-enum-buffer.test.ts new file mode 100644 index 00000000..89237eae --- /dev/null +++ b/packages/data/src/typed-buffer/create-enum-buffer.test.ts @@ -0,0 +1,313 @@ +// © 2026 Adobe. MIT License. See /LICENSE for details. +import { describe, it, expect } from "vitest"; +import { createEnumBuffer } from "./create-enum-buffer.js"; +import { createTypedBuffer } from "./create-typed-buffer.js"; +import type { Schema } from "../schema/index.js"; + +describe("createEnumBuffer", () => { + describe("get and set", () => { + it("should store and retrieve string enum values", () => { + const schema: Schema = { enum: ["a", "b", "c"], default: "a" }; + const buf = createEnumBuffer(schema, 3); + buf.set(0, "a"); + buf.set(1, "b"); + buf.set(2, "c"); + + expect(buf.get(0)).toBe("a"); + expect(buf.get(1)).toBe("b"); + expect(buf.get(2)).toBe("c"); + }); + + it("should store and retrieve number enum values", () => { + const schema: Schema = { enum: [10, 20, 30], default: 10 }; + const buf = createEnumBuffer(schema, 2); + buf.set(0, 20); + buf.set(1, 30); + + expect(buf.get(0)).toBe(20); + expect(buf.get(1)).toBe(30); + }); + + it("should store and retrieve boolean enum values", () => { + const schema: Schema = { enum: [true, false], default: true }; + const buf = createEnumBuffer(schema, 2); + buf.set(0, true); + buf.set(1, false); + + expect(buf.get(0)).toBe(true); + expect(buf.get(1)).toBe(false); + }); + + it("should store and retrieve mixed-type enum values", () => { + const schema: Schema = { enum: ["a", 12, false, null], default: "a" }; + const buf = createEnumBuffer(schema, 4); + buf.set(0, "a"); + buf.set(1, 12); + buf.set(2, false); + buf.set(3, null); + + expect(buf.get(0)).toBe("a"); + expect(buf.get(1)).toBe(12); + expect(buf.get(2)).toBe(false); + expect(buf.get(3)).toBe(null); + }); + + it("should throw on set with a value not in the enum", () => { + const schema: Schema = { enum: ["x", "y"], default: "x" }; + const buf = createEnumBuffer(schema, 1); + + expect(() => buf.set(0, "z")).toThrow( + /Value "z" is not a valid enum value/ + ); + }); + }); + + describe("construction errors", () => { + it("should throw if enum has more than 256 values", () => { + const values = Array.from({ length: 257 }, (_, i) => `val${i}`); + const schema: Schema = { enum: values, default: values[0] }; + + expect(() => createEnumBuffer(schema, 1)).toThrow( + /257 values.*maximum is 256/ + ); + }); + + it("should accept exactly 256 values", () => { + const values = Array.from({ length: 256 }, (_, i) => i); + const schema: Schema = { enum: values, default: 0 }; + + const buf = createEnumBuffer(schema, 2); + buf.set(0, 0); + buf.set(1, 255); + expect(buf.get(0)).toBe(0); + expect(buf.get(1)).toBe(255); + }); + }); + + describe("isDefault", () => { + it("should return true for unset slots (default is first enum value)", () => { + const schema: Schema = { enum: ["a", "b", "c"], default: "a" }; + const buf = createEnumBuffer(schema, 3); + + expect(buf.isDefault(0)).toBe(true); + expect(buf.isDefault(1)).toBe(true); + expect(buf.isDefault(2)).toBe(true); + }); + + it("should return false after setting a non-default value", () => { + const schema: Schema = { enum: ["a", "b", "c"], default: "a" }; + const buf = createEnumBuffer(schema, 2); + buf.set(0, "b"); + + expect(buf.isDefault(0)).toBe(false); + expect(buf.isDefault(1)).toBe(true); + }); + + it("should use schema.default when it is not the first enum value", () => { + const schema: Schema = { enum: ["a", "b", "c"], default: "b" }; + const buf = createEnumBuffer(schema, 2); + + expect(buf.isDefault(0)).toBe(true); + expect(buf.get(0)).toBe("b"); + + buf.set(0, "a"); + expect(buf.isDefault(0)).toBe(false); + + buf.set(1, "b"); + expect(buf.isDefault(1)).toBe(true); + }); + + it("should default to index 0 when schema.default is not provided", () => { + const schema: Schema = { enum: ["x", "y"] }; + const buf = createEnumBuffer(schema, 1); + + expect(buf.isDefault(0)).toBe(true); + expect(buf.get(0)).toBe("x"); + }); + }); + + describe("copyWithin", () => { + it("should copy elements within the buffer", () => { + const schema: Schema = { enum: ["a", "b", "c"], default: "a" }; + const buf = createEnumBuffer(schema, 4); + buf.set(0, "a"); + buf.set(1, "b"); + buf.set(2, "c"); + buf.set(3, "a"); + + buf.copyWithin(2, 0, 2); + + expect(buf.get(0)).toBe("a"); + expect(buf.get(1)).toBe("b"); + expect(buf.get(2)).toBe("a"); + expect(buf.get(3)).toBe("b"); + }); + }); + + describe("slice", () => { + it("should return all values when called without arguments", () => { + const schema: Schema = { enum: ["a", "b", "c"], default: "a" }; + const buf = createEnumBuffer(schema, 3); + buf.set(0, "a"); + buf.set(1, "b"); + buf.set(2, "c"); + + expect(Array.from(buf.slice())).toEqual(["a", "b", "c"]); + }); + + it("should return a partial slice", () => { + const schema: Schema = { enum: ["a", "b", "c"], default: "a" }; + const buf = createEnumBuffer(schema, 4); + buf.set(0, "a"); + buf.set(1, "b"); + buf.set(2, "c"); + buf.set(3, "a"); + + expect(Array.from(buf.slice(1, 3))).toEqual(["b", "c"]); + }); + }); + + describe("copy", () => { + it("should produce an independent clone", () => { + const schema: Schema = { enum: ["a", "b", "c"], default: "a" }; + const buf = createEnumBuffer(schema, 3); + buf.set(0, "a"); + buf.set(1, "b"); + buf.set(2, "c"); + + const clone = buf.copy(); + + expect(clone.get(0)).toBe("a"); + expect(clone.get(1)).toBe("b"); + expect(clone.get(2)).toBe("c"); + expect(clone.capacity).toBe(3); + + buf.set(0, "c"); + expect(clone.get(0)).toBe("a"); + }); + + it("should have an independent typed array backing", () => { + const schema: Schema = { enum: [1, 2, 3], default: 1 }; + const buf = createEnumBuffer(schema, 2); + buf.set(0, 2); + + const clone = buf.copy(); + const srcTA = buf.getTypedArray(); + const dstTA = clone.getTypedArray(); + + expect(dstTA).not.toBe(srcTA); + expect(dstTA.buffer).not.toBe(srcTA.buffer); + }); + }); + + describe("getTypedArray", () => { + it("should return a Uint8Array", () => { + const schema: Schema = { enum: ["a", "b"], default: "a" }; + const buf = createEnumBuffer(schema, 4); + + const ta = buf.getTypedArray(); + expect(ta).toBeInstanceOf(Uint8Array); + expect(ta.length).toBe(4); + }); + }); + + describe("capacity resize", () => { + it("should preserve data when growing", () => { + const schema: Schema = { enum: ["a", "b", "c"], default: "a" }; + const buf = createEnumBuffer(schema, 2); + buf.set(0, "b"); + buf.set(1, "c"); + + buf.capacity = 4; + + expect(buf.capacity).toBe(4); + expect(buf.get(0)).toBe("b"); + expect(buf.get(1)).toBe("c"); + }); + + it("should truncate data when shrinking", () => { + const schema: Schema = { enum: ["a", "b", "c"], default: "a" }; + const buf = createEnumBuffer(schema, 4); + buf.set(0, "a"); + buf.set(1, "b"); + buf.set(2, "c"); + buf.set(3, "a"); + + buf.capacity = 2; + + expect(buf.capacity).toBe(2); + expect(buf.get(0)).toBe("a"); + expect(buf.get(1)).toBe("b"); + }); + + it("should fill new slots with default when growing and default is not index 0", () => { + const schema: Schema = { enum: ["a", "b", "c"], default: "b" }; + const buf = createEnumBuffer(schema, 2); + buf.set(0, "c"); + + buf.capacity = 4; + + expect(buf.get(0)).toBe("c"); + expect(buf.get(1)).toBe("b"); + expect(buf.get(2)).toBe("b"); + expect(buf.get(3)).toBe("b"); + }); + }); + + describe("type and metadata", () => { + it('should have type "enum"', () => { + const schema: Schema = { enum: ["a", "b"], default: "a" }; + const buf = createEnumBuffer(schema, 1); + expect(buf.type).toBe("enum"); + }); + + it("should have typedArrayElementSizeInBytes of 1", () => { + const schema: Schema = { enum: ["a", "b"], default: "a" }; + const buf = createEnumBuffer(schema, 1); + expect(buf.typedArrayElementSizeInBytes).toBe(1); + }); + }); +}); + +describe("createTypedBuffer enum integration", () => { + it("should auto-select enum buffer when schema has enum property", () => { + const schema = { enum: ["landscape", "portrait"] } as const; + const buf = createTypedBuffer(schema, 2); + + expect(buf.type).toBe("enum"); + buf.set(0, "landscape"); + buf.set(1, "portrait"); + expect(buf.get(0)).toBe("landscape"); + expect(buf.get(1)).toBe("portrait"); + }); + + it("should prefer enum over number for integer enum schemas", () => { + const schema = { type: "integer", enum: [0, 1, 2] } as const; + const buf = createTypedBuffer(schema, 3); + + expect(buf.type).toBe("enum"); + }); + + it("should prefer enum over array for string enum schemas", () => { + const schema = { type: "string", enum: ["a", "b", "c"] } as const; + const buf = createTypedBuffer(schema, 2); + + expect(buf.type).toBe("enum"); + }); + + it("should prefer const over enum when both are present", () => { + const schema = { const: "a", enum: ["a", "b"] } as const; + const buf = createTypedBuffer(schema, 1); + + expect(buf.type).toBe("const"); + }); + + it("should support initial values array", () => { + const schema = { enum: ["x", "y", "z"] } as const; + const buf = createTypedBuffer(schema, ["x", "z", "y"]); + + expect(buf.get(0)).toBe("x"); + expect(buf.get(1)).toBe("z"); + expect(buf.get(2)).toBe("y"); + }); +}); diff --git a/packages/data/src/typed-buffer/create-enum-buffer.ts b/packages/data/src/typed-buffer/create-enum-buffer.ts new file mode 100644 index 00000000..55d0906a --- /dev/null +++ b/packages/data/src/typed-buffer/create-enum-buffer.ts @@ -0,0 +1,116 @@ +// © 2026 Adobe. MIT License. See /LICENSE for details. +import { resize } from "../internal/array-buffer-like/resize.js"; +import { Schema } from "../schema/index.js"; +import { TypedArray } from "../internal/typed-array/index.js"; +import { TypedBuffer, TypedBufferType } from "./typed-buffer.js"; +import { createSharedArrayBuffer } from "../internal/shared-array-buffer/create-shared-array-buffer.js"; + +export const enumBufferType = "enum"; + +const MAX_ENUM_VALUES = 256; + +class EnumTypedBuffer extends TypedBuffer { + public readonly type: TypedBufferType = enumBufferType; + public readonly typedArrayElementSizeInBytes = 1; + + private arrayBuffer: ArrayBuffer | SharedArrayBuffer; + private array: Uint8Array; + private _capacity: number; + private readonly indexToValue: readonly T[]; + private readonly valueToIndex: Map; + private readonly defaultIndex: number; + + constructor(schema: Schema, initialCapacity: number) { + super(schema); + + const enumValues = schema.enum as readonly T[]; + if (enumValues.length > MAX_ENUM_VALUES) { + throw new Error( + `Enum schema has ${enumValues.length} values, but the maximum is ${MAX_ENUM_VALUES}. ` + + `Enum buffers use a Uint8Array and cannot represent more than ${MAX_ENUM_VALUES} distinct values.` + ); + } + + this.indexToValue = enumValues; + this.valueToIndex = new Map(); + for (let i = 0; i < enumValues.length; i++) { + this.valueToIndex.set(enumValues[i], i); + } + + this.defaultIndex = schema.default !== undefined + ? this.valueToIndex.get(schema.default as T) ?? 0 + : 0; + + this._capacity = initialCapacity; + this.arrayBuffer = createSharedArrayBuffer(initialCapacity); + this.array = new Uint8Array(this.arrayBuffer); + + if (this.defaultIndex !== 0) { + this.array.fill(this.defaultIndex); + } + } + + get capacity(): number { + return this._capacity; + } + + set capacity(value: number) { + if (value !== this._capacity) { + const oldCapacity = this._capacity; + this._capacity = value; + this.arrayBuffer = resize(this.arrayBuffer, value); + this.array = new Uint8Array(this.arrayBuffer); + if (this.defaultIndex !== 0 && value > oldCapacity) { + this.array.fill(this.defaultIndex, oldCapacity, value); + } + } + } + + getTypedArray(): TypedArray { + return this.array; + } + + get(index: number): T { + return this.indexToValue[this.array[index]]; + } + + set(index: number, value: T): void { + const enumIndex = this.valueToIndex.get(value); + if (enumIndex === undefined) { + throw new Error( + `Value ${JSON.stringify(value)} is not a valid enum value. ` + + `Expected one of: ${this.indexToValue.map(v => JSON.stringify(v)).join(", ")}` + ); + } + this.array[index] = enumIndex; + } + + isDefault(index: number): boolean { + return this.array[index] === this.defaultIndex; + } + + copyWithin(target: number, start: number, end: number): void { + this.array.copyWithin(target, start, end); + } + + slice(start = 0, end = this._capacity): ArrayLike & Iterable { + const result: T[] = []; + for (let i = start; i < end; i++) { + result.push(this.indexToValue[this.array[i]]); + } + return result; + } + + copy(): TypedBuffer { + const copy = new EnumTypedBuffer(this.schema, this._capacity); + copy.array.set(this.array); + return copy; + } +} + +export const createEnumBuffer = ( + schema: Schema, + initialCapacity: number, +): TypedBuffer => { + return new EnumTypedBuffer(schema, initialCapacity); +}; diff --git a/packages/data/src/typed-buffer/create-typed-buffer.ts b/packages/data/src/typed-buffer/create-typed-buffer.ts index 267d5f63..66d59890 100644 --- a/packages/data/src/typed-buffer/create-typed-buffer.ts +++ b/packages/data/src/typed-buffer/create-typed-buffer.ts @@ -6,6 +6,7 @@ import { TypedBuffer } from "./typed-buffer.js"; import { createNumberBuffer } from "./create-number-buffer.js"; import { createArrayBuffer } from "./create-array-buffer.js"; import { createConstBuffer } from "./create-const-buffer.js"; +import { createEnumBuffer } from "./create-enum-buffer.js"; export function createTypedBuffer ( schema: S, @@ -38,6 +39,10 @@ function createTypedBufferInternal ( return createConstBuffer(schema, initialCapacity) as TypedBuffer>; } + if (schema.enum !== undefined && schema.enum.length > 0) { + return createEnumBuffer(schema, initialCapacity) as TypedBuffer>; + } + if (schema.type === 'number' || schema.type === 'integer') { return createNumberBuffer(schema, initialCapacity) as TypedBuffer>; } diff --git a/packages/data/src/typed-buffer/register-typed-buffer-codecs.ts b/packages/data/src/typed-buffer/register-typed-buffer-codecs.ts index f3880798..93605ed0 100644 --- a/packages/data/src/typed-buffer/register-typed-buffer-codecs.ts +++ b/packages/data/src/typed-buffer/register-typed-buffer-codecs.ts @@ -7,6 +7,7 @@ import { Schema } from "../schema/index.js"; import { createArrayBuffer } from "./create-array-buffer.js"; import { createConstBuffer } from "./create-const-buffer.js"; import { createNumberBuffer } from "./create-number-buffer.js"; +import { createEnumBuffer } from "./create-enum-buffer.js"; import { createStructBuffer } from "./create-struct-buffer.js"; import { isTypedBuffer } from "./is-typed-buffer.js"; import { TypedBuffer, TypedBufferType } from "./typed-buffer.js"; @@ -24,7 +25,7 @@ export function registerTypedBufferCodecs() { else if (type === "array") { return { json: { type, schema, capacity, array: data.slice() as unknown as any[] } }; } - else if (type === "number" || type === "struct") { + else if (type === "enum" || type === "number" || type === "struct") { const typedArray = data.getTypedArray(); const view = new Uint8Array(typedArray.buffer, typedArray.byteOffset, typedArray.byteLength); return { json: { type, schema, capacity }, binary: [toArrayBufferBacked(view)] }; @@ -45,7 +46,10 @@ export function registerTypedBufferCodecs() { return createConstBuffer(schema, capacity); } else if (type === "array") { - const buffer = createArrayBuffer(schema, capacity); + const isEnum = schema.enum !== undefined && schema.enum.length > 0; + const buffer = isEnum + ? createEnumBuffer(schema, capacity) + : createArrayBuffer(schema, capacity); if (schema.ephemeral) { if (schema.default !== undefined && schema.default !== 0) { for (let i = 0; i < capacity; i++) { @@ -60,6 +64,19 @@ export function registerTypedBufferCodecs() { } return buffer; } + else if (type === "enum") { + const buffer = createEnumBuffer(schema, capacity); + if (!schema.ephemeral) { + if (binary[0]) { + copyViewBytes(binary[0], buffer.getTypedArray()); + } else if (array) { + for (let i = 0; i < capacity; i++) { + buffer.set(i, array[i]); + } + } + } + return buffer; + } else if (type === "number" || type === "struct") { const buffer = type === "number" ? createNumberBuffer(schema, capacity) : createStructBuffer(schema, capacity); if (schema.ephemeral) { diff --git a/packages/data/src/typed-buffer/typed-buffer.ts b/packages/data/src/typed-buffer/typed-buffer.ts index f09e463e..769e4342 100644 --- a/packages/data/src/typed-buffer/typed-buffer.ts +++ b/packages/data/src/typed-buffer/typed-buffer.ts @@ -3,7 +3,7 @@ import { TypedArray } from "../internal/typed-array/index.js"; import { Schema } from "../schema/index.js"; import { typedBufferEquals } from "./typed-buffer-equals.js"; -export type TypedBufferType = "array" | "const" | "number" | "struct"; +export type TypedBufferType = "array" | "const" | "enum" | "number" | "struct"; export interface ReadonlyTypedBuffer { readonly type: TypedBufferType; diff --git a/packages/data/typedoc.json b/packages/data/typedoc.json index 87fdbf8a..6211014f 100644 --- a/packages/data/typedoc.json +++ b/packages/data/typedoc.json @@ -14,7 +14,8 @@ "exclude": [ "**/__tests__/**", "**/*.spec.ts", - "**/references/**" + "**/references/**", + "src/old-ecs/**" ], "excludePrivate": true, "excludeProtected": true,