diff --git a/.cursor/rules/nodejs-api-service.mdc b/.cursor/rules/nodejs-api-service.mdc deleted file mode 100644 index 8ec8e991..00000000 --- a/.cursor/rules/nodejs-api-service.mdc +++ /dev/null @@ -1,12 +0,0 @@ ---- -description: Node.js API service -globs: -alwaysApply: true ---- - -- Use JSDoc standard for creating docblocks of functions and classes. -- Always use camelCase for function names. -- Always use upper-case snake_case for constants. -- Create integration tests in 'tests/integration' that use node-assert, which run with mocha. -- Create unit tests in 'tests/unit' that use node-assert, which run with mocha. -- Use node.js community "Best Practices". diff --git a/.github/CONTRIBUTING.md b/.github/CONTRIBUTING.md new file mode 100644 index 00000000..3cb6d8c0 --- /dev/null +++ b/.github/CONTRIBUTING.md @@ -0,0 +1,187 @@ +# Contributing to Haro + +Thank you for your interest in contributing to Haro! This document provides guidelines and instructions for contributing. + +## Table of Contents + +1. [Getting Started](#getting-started) +2. [Development Setup](#development-setup) +3. [Code Style](#code-style) +4. [Testing](#testing) +5. [Submitting Changes](#submitting-changes) +6. [Reporting Issues](#reporting-issues) + +## Getting Started + +1. Fork the repository +2. Clone your fork: `git clone https://github.com/your-username/haro.git` +3. Install dependencies: `npm install` +4. Create a branch: `git checkout -b feature/your-feature-name` + +## Development Setup + +### Requirements + +- Node.js >= 17.0.0 +- npm + +### Project Structure + +- `src/` - Source code +- `tests/unit/` - Unit tests +- `dist/` - Built distribution (generated) +- `types/` - TypeScript definitions + +## Code Style + +This project uses **Oxlint** and **Oxfmt** for code quality and formatting: + +```bash +# Check code style +npm run lint + +# Fix auto-fixable issues +npm run fix +``` + +### Key Guidelines + +- Use **tabs** for indentation +- Use **double quotes** for strings +- Use **camelCase** for variables and functions +- Use **PascalCase** for classes +- Use **UPPER_SNAKE_CASE** for constants +- Write **JSDoc comments** for all public APIs +- Keep functions small and focused + +### String Constants + +Use string constants from `src/constants.js` for string literals: + +```javascript +// ✅ Good +import { STRING_EMPTY } from './constants.js'; +if (str === STRING_EMPTY) { ... } + +// ❌ Bad +if (str === '') { ... } +``` + +## Testing + +This project uses **Node.js native test runner**: + +```bash +# Run all tests +npm test + +# Run tests with coverage +npm run coverage +``` + +### Writing Tests + +- Place unit tests in `tests/unit/` +- Use `node:assert` for assertions +- Follow AAA pattern (Arrange, Act, Assert) +- Test both success and error cases + +Example: + +```javascript +import assert from 'node:assert'; +import { describe, it } from 'node:test'; +import { Haro } from '../src/haro.js'; + +describe('MyFeature', () => { + it('should do something', () => { + // Arrange + const store = new Haro(); + + // Act + const result = store.set(null, { name: 'test' }); + + // Assert + assert.ok(result); + assert.strictEqual(result.name, 'test'); + }); +}); +``` + +## Submitting Changes + +1. Make your changes +2. Run tests: `npm test` +3. Run lint: `npm run lint` +4. Commit with clear messages +5. Push to your fork +6. Open a Pull Request + +### Commit Messages + +- Use present tense ("Add feature" not "Added feature") +- Use imperative mood ("Move cursor to..." not "Moves cursor to...") +- Be concise and descriptive +- Reference issues when applicable + +### Pull Request Checklist + +- [ ] Tests pass (`npm test`) +- [ ] Lint passes (`npm run lint`) +- [ ] Code is formatted (`npm run fix`) +- [ ] Documentation is updated if needed +- [ ] Commit messages are clear + +## Reporting Issues + +When reporting issues, please include: + +- **Description**: Clear description of the issue +- **Steps to Reproduce**: Detailed steps to reproduce +- **Expected Behavior**: What should happen +- **Actual Behavior**: What actually happens +- **Environment**: Node.js version, OS, etc. +- **Code Example**: Minimal reproducible example + +Example: + +```markdown +## Description +The `find()` method throws an error when passed null. + +## Steps to Reproduce +1. Create a new Haro instance +2. Call `store.find(null)` + +## Expected Behavior +Should throw a descriptive error or handle null gracefully + +## Actual Behavior +Throws: "Cannot read property 'length' of null" + +## Environment +- Node.js: v18.0.0 +- OS: macOS 13.0 + +## Code Example +```javascript +import { Haro } from 'haro'; +const store = new Haro(); +store.find(null); // Throws error +``` +``` + +## Code of Conduct + +- Be respectful and inclusive +- Focus on constructive feedback +- Welcome newcomers and help them learn +- Keep discussions professional and on-topic + +## Questions? + +Feel free to open an issue for questions or discussions about contributing. + +--- + +Thank you for contributing to Haro! 🎉 diff --git a/.husky/pre-commit b/.husky/pre-commit index 72c4429b..20e7c371 100644 --- a/.husky/pre-commit +++ b/.husky/pre-commit @@ -1 +1 @@ -npm test +npm run fix && npm run coverage && git add -A diff --git a/.oxfmtrc.json b/.oxfmtrc.json new file mode 100644 index 00000000..93b08303 --- /dev/null +++ b/.oxfmtrc.json @@ -0,0 +1,5 @@ +{ + "$schema": "./node_modules/oxfmt/configuration_schema.json", + "ignorePatterns": [], + "useTabs": true +} diff --git a/.oxlintrc.json b/.oxlintrc.json new file mode 100644 index 00000000..52a14169 --- /dev/null +++ b/.oxlintrc.json @@ -0,0 +1,8 @@ +{ + "$schema": "./node_modules/oxlint/configuration_schema.json", + "ignorePatterns": ["!**/src/**", "benchmarks/**"], + "rules": { + "no-console": ["error", {"allow": ["warn", "error"]}], + "no-unused-vars": ["error", {"argsIgnorePattern": "^(arg|batch|data|key|override|type)$"}] + } +} diff --git a/AGENTS.md b/AGENTS.md new file mode 100644 index 00000000..34503fff --- /dev/null +++ b/AGENTS.md @@ -0,0 +1,64 @@ +# Haro Project Guide + +## Overview +Haro is a modern immutable DataStore for collections of records with indexing, versioning, and batch operations support. + +## Project Structure +- `src/haro.js` - Main Haro class and factory function +- `src/constants.js` - String and number constants +- `tests/` - Unit tests using Node.js native test runner +- `dist/` - Built distribution files (generated) +- `types/haro.d.ts` - TypeScript definitions + +## Commands +```bash +npm run lint # Lint code with oxlint +npm run fix # Fix linting issues with oxlint and oxfmt +npm run test # Run tests with Node.js test runner +npm run coverage # Generate coverage report +npm run build # Lint and build distribution files +npm run benchmark # Run benchmarks +``` + +## Code Style +- Use tabs for indentation +- Follow ESLint/oxlint rules (no-console, no-unused-vars) +- Use JSDoc comments for documentation +- Keep functions small and focused +- Use template literals for string concatenation + +## Rules +- No magic strings or magic numbers - always use constants from `src/constants.js` +- All string literals must be defined as constants with descriptive names (e.g., `STRING_EMPTY`, `STRING_ID`) +- All numeric literals (except 0 and 1 in simple operations) should use constants (e.g., `INT_0`, `CACHE_SIZE_DEFAULT`) +- Constants follow naming convention: `TYPE_NAME` for strings, `TYPE_NAME` for numbers + +## Testing +- Tests use Node.js native test runner (`node --test`) +- Test files are in `tests/unit/` directory +- Run tests: `npm test` +- Generate coverage: `npm run coverage` + +## Key Conventions +- All string literals use constants from `src/constants.js` +- Private/internal methods start with underscore prefix +- Lifecycle hooks follow `before*` and `on*` naming pattern +- Return `this` for method chaining where appropriate +- Use `Map` and `Set` for data structures +- Immutable mode uses `Object.freeze()` for data safety +- Adheres to DRY, YAGNI, and SOLID principles +- Follows OWASP security guidance + +## Important Notes +- The `immutable` option freezes data for immutability +- Indexes improve query performance for `find()` and `where()` operations +- Deep indexing with dot notation is supported (e.g., `user.profile.department`) +- Versioning tracks historical changes when enabled +- Batch operations are more efficient than individual operations +- LRU caching is available for `search()` and `where()` methods (opt-in with `cache: true`) +- Cache uses Web Crypto API for SHA-256 hash generation (requires Node.js >=19.0.0) +- Cache keys are multi-domain: `search_HASH` or `where_HASH` format +- Cached results are cloned/frozen to prevent mutation (respects `immutable` mode) +- Cache invalidates on all write operations but preserves statistics +- `search()` and `where()` are async methods - use `await` when calling +- Cache statistics persist for the lifetime of the Haro instance diff --git a/CLAUDE.md b/CLAUDE.md new file mode 100644 index 00000000..43c994c2 --- /dev/null +++ b/CLAUDE.md @@ -0,0 +1 @@ +@AGENTS.md diff --git a/LICENSE b/LICENSE index e6fa6c97..d59bbc85 100644 --- a/LICENSE +++ b/LICENSE @@ -1,4 +1,4 @@ -Copyright (c) 2025, Jason Mulligan +Copyright (c) 2026, Jason Mulligan All rights reserved. Redistribution and use in source and binary forms, with or without diff --git a/README.md b/README.md index 8400f0de..96eacd61 100644 --- a/README.md +++ b/README.md @@ -5,7 +5,65 @@ [![License](https://img.shields.io/badge/License-BSD%203--Clause-blue.svg)](https://opensource.org/licenses/BSD-3-Clause) [![Build Status](https://github.com/avoidwork/haro/actions/workflows/ci.yml/badge.svg)](https://github.com/avoidwork/haro/actions) -A fast, flexible immutable DataStore for collections of records with indexing, versioning, and advanced querying capabilities. Provides a Map-like interface with powerful search and filtering features. +A fast, flexible immutable DataStore for collections of records with indexing, versioning, and advanced querying capabilities. + +## Table of Contents + +- [Key Features](#key-features) +- [Why Choose Haro?](#why-choose-haro) +- [Installation](#installation) +- [Quick Start](#quick-start) +- [Usage](#usage) + - [Factory Function](#factory-function) + - [Class Constructor](#class-constructor) + - [Class Inheritance](#class-inheritance) +- [Configuration Options](#configuration-options) +- [TypeScript Support](#typescript-support) +- [Real-World Examples](#real-world-examples) +- [Comparison with Alternatives](#comparison-with-alternatives) +- [API Reference](#api-reference) +- [Troubleshooting](#troubleshooting) +- [Testing](#testing) +- [Benchmarks](#benchmarks) +- [Learn More](#learn-more) +- [Community](#community) +- [License](#license) + +## Key Features + +- **⚡ Blazing Fast**: O(1) indexed lookups - up to 20M ops/sec for instant data access +- **📚 Built-in Versioning**: Automatic change tracking without writing audit trail code +- **🔒 Immutable Mode**: Data safety with frozen objects - prevent accidental mutations +- **🔍 Advanced Querying**: Complex queries with `find()`, `where()`, `search()` - no manual filtering +- **🎯 Deep Indexing**: Query nested objects with dot notation (e.g., `user.profile.department`) +- **🗄️ LRU Caching**: Built-in cache for repeated queries with automatic invalidation +- **📦 Batch Operations**: Process thousands of records in milliseconds with `setMany()`/`deleteMany()` +- **🛠️ Zero Boilerplate**: No setup required - just instantiate and query +- **📝 TypeScript Ready**: Full type definitions included - no @types packages needed +- **🎯 Zero Dependencies**: Pure JavaScript, ~8KB gzipped - nothing extra to install + +## Why Choose Haro? + +### ⏱️ Save Development Time + +- **No more manual indexing**: Define fields once, get instant O(1) lookups automatically +- **Built-in versioning**: Track changes without writing audit trail code +- **Zero boilerplate**: No setup, configuration, or initialization code needed +- **Instant queries**: Complex filtering with one-liners instead of loops and conditionals + +### 🚀 Performance Benefits + +- **20M+ ops/sec**: Blazing fast indexed lookups for real-time applications +- **Automatic optimization**: Indexes maintained automatically on every operation +- **Batch operations**: Process 10,000 records in milliseconds +- **Memory efficient**: Optimized data structures for minimal overhead + +### 🛡️ Data Safety + +- **Immutable mode**: Prevent accidental mutations with frozen objects +- **Type safety**: Full TypeScript support catches errors at compile time +- **Version history**: Roll back to previous states when needed +- **Validation**: Built-in checks prevent invalid data ## Installation @@ -27,6 +85,15 @@ yarn add haro pnpm add haro ``` +## Quick Start + +```javascript +import { haro } from 'haro'; + +const store = haro([{ id: 1, name: 'Alice' }], { index: ['name'] }); +console.log(store.find({ name: 'Alice' })); +``` + ## Usage ### Factory Function @@ -41,7 +108,6 @@ const store = haro(data, config); ```javascript import { Haro } from 'haro'; -// Create a store with indexes and versioning const store = new Haro({ index: ['name', 'email', 'department'], key: 'id', @@ -49,7 +115,6 @@ const store = new Haro({ immutable: true }); -// Create store with initial data const users = new Haro([ { name: 'Alice', email: 'alice@company.com', department: 'Engineering' }, { name: 'Bob', email: 'bob@company.com', department: 'Sales' } @@ -74,1257 +139,451 @@ class UserStore extends Haro { }); } - beforeSet(key, data, batch, override) { - // Validate email format - if (data.email && !this.isValidEmail(data.email)) { - throw new Error('Invalid email format'); - } - } - - onset(record, batch) { - console.log(`User ${record.name} was added/updated`); - } - isValidEmail(email) { return /^[^\s@]+@[^\s@]+\.[^\s@]+$/.test(email); } } -``` - -## Parameters - -### delimiter -**String** - Delimiter for composite indexes (default: `'|'`) - -```javascript -const store = haro(null, { delimiter: '::' }); -``` - -### id -**String** - Unique identifier for this store instance. Auto-generated if not provided. - -```javascript -const store = haro(null, { id: 'user-cache' }); -``` -### immutable -**Boolean** - Return frozen/immutable objects for data safety (default: `false`) - -```javascript -const store = haro(null, { immutable: true }); -``` - -### index -**Array** - Fields to index for faster searches. Supports composite indexes using delimiter. - -```javascript -const store = haro(null, { - index: ['name', 'email', 'name|department', 'department|role'] +const store = new UserStore(); +const user = store.set(null, { + name: 'John', + email: 'john@example.com' }); ``` -### key -**String** - Primary key field name (default: `'id'`) +## Configuration Options -```javascript -const store = haro(null, { key: 'userId' }); -``` +### cache -### versioning -**Boolean** - Enable MVCC-style versioning to track record changes (default: `false`) +**Boolean** - Enable LRU caching for `search()` and `where()` methods (default: `false`) ```javascript -const store = haro(null, { versioning: true }); +const store = haro(null, { cache: true }); ``` -### Parameter Validation +### cacheSize -The constructor validates configuration and provides helpful error messages: +**Number** - Maximum number of cached query results (default: `1000`) ```javascript -// Invalid index configuration will provide clear feedback -try { - const store = new Haro({ index: 'name' }); // Should be array -} catch (error) { - console.error(error.message); // Clear validation error -} - -// Missing required configuration -try { - const store = haro([{id: 1}], { key: 'nonexistent' }); -} catch (error) { - console.error('Key field validation error'); -} +const store = haro(null, { cache: true, cacheSize: 500 }); ``` -## Interoperability - -### Array Methods Compatibility - -Haro provides Array-like methods for familiar data manipulation: - -```javascript -import { haro } from 'haro'; - -const store = haro([ - { id: 1, name: 'Alice', age: 30 }, - { id: 2, name: 'Bob', age: 25 }, - { id: 3, name: 'Charlie', age: 35 } -]); - -// Use familiar Array methods -const adults = store.filter(record => record.age >= 30); -const names = store.map(record => record.name); -const totalAge = store.reduce((sum, record) => sum + record.age, 0); - -store.forEach((record, key) => { - console.log(`${key}: ${record.name} (${record.age})`); -}); -``` - -### Event-Driven Architecture +### delimiter -Compatible with event-driven patterns through lifecycle hooks: +**String** - Delimiter for composite indexes (default: `'|'`) ```javascript -class EventedStore extends Haro { - constructor(eventEmitter, config) { - super(config); - this.events = eventEmitter; - } - - onset(record, batch) { - this.events.emit('record:created', record); - } - - ondelete(key, batch) { - this.events.emit('record:deleted', key); - } -} -``` - -## Testing - -Haro maintains comprehensive test coverage across all features with **148 passing tests**: - -``` ---------------|---------|----------|---------|---------|------------------------- -File | % Stmts | % Branch | % Funcs | % Lines | Uncovered Line #s ---------------|---------|----------|---------|---------|------------------------- -All files | 100 | 96.95 | 100 | 100 | - constants.js | 100 | 100 | 100 | 100 | - haro.js | 100 | 96.94 | 100 | 100 | 205-208,667,678,972-976 ---------------|---------|----------|---------|---------|------------------------- -``` - -### Test Organization - -The test suite is organized into focused areas: - -- **Basic CRUD Operations** - Core data manipulation (set, get, delete, clear) -- **Indexing** - Index creation, composite indexes, and reindexing -- **Searching & Filtering** - find(), where(), search(), filter(), and sortBy() methods -- **Immutable Mode** - Data freezing and immutability guarantees -- **Versioning** - MVCC-style record versioning -- **Lifecycle Hooks** - beforeSet, onset, ondelete, etc. -- **Utility Methods** - clone(), merge(), limit(), map(), reduce(), etc. -- **Error Handling** - Validation and error scenarios -- **Factory Function** - haro() factory with various initialization patterns - -### Running Tests - -```bash -# Run unit tests -npm test - -# Run with coverage -npm run test:coverage - -# Run integration tests -npm run test:integration - -# Run performance benchmarks -npm run benchmark -``` - -## Benchmarks - -Haro includes comprehensive benchmark suites for performance analysis and comparison with other data store solutions. - -### Latest Performance Results - -**Overall Performance Summary:** -- **Total Tests**: 572 tests across 9 categories -- **Total Runtime**: 1.6 minutes -- **Best Performance**: HAS operation (20,815,120 ops/second on 1,000 records) -- **Memory Efficiency**: Highly efficient with minimal overhead for typical workloads - -### Benchmark Categories - -#### Basic Operations -- **SET operations**: Record creation, updates, overwrites -- **GET operations**: Single record retrieval, cache hits/misses -- **DELETE operations**: Record removal and index cleanup -- **BATCH operations**: Bulk insert/update/delete performance - -**Performance Highlights:** -- SET operations: Up to 3.2M ops/sec for typical workloads -- GET operations: Up to 20M ops/sec with index lookups -- DELETE operations: Efficient cleanup with index maintenance -- BATCH operations: Optimized for bulk data manipulation - -#### Search & Query Operations -- **INDEX queries**: Using find() with indexed fields -- **FILTER operations**: Predicate-based filtering -- **SEARCH operations**: Text and regex searching -- **WHERE clauses**: Complex query conditions - -**Performance Highlights:** -- Indexed FIND queries: Up to 64,594 ops/sec (1,000 records) -- FILTER operations: Up to 46,255 ops/sec -- Complex queries: Maintains good performance with multiple conditions -- Memory-efficient query processing - -#### Advanced Features -- **VERSION tracking**: Performance impact of versioning -- **IMMUTABLE mode**: Object freezing overhead -- **COMPOSITE indexes**: Multi-field index performance -- **Memory usage**: Efficient memory consumption patterns -- **Utility operations**: clone, merge, freeze, forEach performance -- **Pagination**: Limit-based result pagination -- **Persistence**: Data dump/restore operations - -### Running Benchmarks - -```bash -# Run all benchmarks -node benchmarks/index.js - -# Run specific benchmark categories -node benchmarks/index.js --basic-only # Basic CRUD operations -node benchmarks/index.js --search-only # Search and query operations -node benchmarks/index.js --index-only # Index operations -node benchmarks/index.js --memory-only # Memory usage analysis -node benchmarks/index.js --comparison-only # vs native structures -node benchmarks/index.js --utilities-only # Utility operations -node benchmarks/index.js --pagination-only # Pagination performance -node benchmarks/index.js --persistence-only # Persistence operations -node benchmarks/index.js --immutable-only # Immutable vs mutable - -# Run with memory analysis -node --expose-gc benchmarks/memory-usage.js +const store = haro(null, { delimiter: '::' }); ``` -### Performance Comparison with Native Structures - -**Storage Operations:** -- Haro vs Map: Comparable performance for basic operations -- Haro vs Array: Slower for simple operations, faster for complex queries -- Haro vs Object: Trade-off between features and raw performance - -**Query Operations:** -- Haro FIND (indexed): 64,594 ops/sec vs Array filter: 189,293 ops/sec -- Haro provides advanced query capabilities not available in native structures -- Memory overhead justified by feature richness - -### Memory Efficiency - -**Memory Usage Comparison (50,000 records):** -- Haro: 13.98 MB -- Map: 3.52 MB -- Object: 1.27 MB -- Array: 0.38 MB - -**Memory Analysis:** -- Reasonable overhead for feature set provided -- Efficient index storage and maintenance -- Garbage collection friendly - -### Performance Tips - -For optimal performance: - -1. **Use indexes wisely** - Index fields you'll query frequently -2. **Choose appropriate key strategy** - Shorter keys perform better -3. **Batch operations** - Use batch() for multiple changes -4. **Consider immutable mode cost** - Only enable if needed for data safety -5. **Minimize version history** - Disable versioning if not required -6. **Use pagination** - Implement limit() for large result sets -7. **Leverage utility methods** - Use built-in clone, merge, freeze for safety - -### Performance Indicators - -* ✅ **Indexed queries** significantly outperform filters (64k vs 46k ops/sec) -* ✅ **Batch operations** provide excellent bulk performance -* ✅ **Get operations** consistently outperform set operations -* ✅ **Memory usage** remains stable under load -* ✅ **Utility operations** perform well (clone: 1.6M ops/sec) - -### Immutable vs Mutable Mode - -**Performance Impact:** -- Creation: Minimal difference (1.27x faster mutable) -- Read operations: Comparable performance -- Write operations: Slight advantage to mutable mode -- Transformation operations: Significant performance cost in immutable mode - -**Recommendations:** -- Use immutable mode for data safety in multi-consumer environments -- Use mutable mode for high-frequency write operations -- Consider the trade-off between safety and performance - -See `benchmarks/README.md` for complete documentation and advanced usage. - -## API Reference - -### Properties - -#### data -`{Map}` - Internal Map of records, indexed by key - -```javascript -const store = haro(); -console.log(store.data.size); // 0 -``` +### id -#### delimiter -`{String}` - The delimiter used for composite indexes +**String** - Unique identifier for this store instance. Auto-generated if not provided. ```javascript -const store = haro(null, { delimiter: '|' }); -console.log(store.delimiter); // '|' +const store = haro(null, { id: 'user-cache' }); ``` -#### id -`{String}` - Unique identifier for this store instance - -```javascript -const store = haro(null, { id: 'my-store' }); -console.log(store.id); // 'my-store' -``` +### immutable -#### immutable -`{Boolean}` - Whether the store returns immutable objects +**Boolean** - Return frozen/immutable objects for data safety (default: `false`) ```javascript const store = haro(null, { immutable: true }); -console.log(store.immutable); // true -``` - -#### index -`{Array}` - Array of indexed field names - -```javascript -const store = haro(null, { index: ['name', 'email'] }); -console.log(store.index); // ['name', 'email'] ``` -#### indexes -`{Map}` - Map of indexes containing Sets of record keys - -```javascript -const store = haro(); -console.log(store.indexes); // Map(0) {} -``` +### index -#### key -`{String}` - The primary key field name +**Array** - Fields to index for faster searches. Supports composite indexes. ```javascript -const store = haro(null, { key: 'userId' }); -console.log(store.key); // 'userId' +const store = haro(null, { + index: ['name', 'email', 'name|department', 'department|role'] +}); ``` -#### registry -`{Array}` - Array of all record keys (read-only property) - -```javascript -const store = haro(); -store.set('key1', { name: 'Alice' }); -console.log(store.registry); // ['key1'] -``` +### key -#### size -`{Number}` - Number of records in the store (read-only property) +**String** - Primary key field name (default: `'id'`) ```javascript -const store = haro(); -console.log(store.size); // 0 +const store = haro(null, { key: 'userId' }); ``` -#### versions -`{Map}` - Map of version history (when versioning is enabled) - -```javascript -const store = haro(null, { versioning: true }); -console.log(store.versions); // Map(0) {} -``` +### versioning -#### versioning -`{Boolean}` - Whether versioning is enabled +**Boolean** - Enable version history tracking (default: `false`) ```javascript const store = haro(null, { versioning: true }); -console.log(store.versioning); // true -``` - -### Methods - -#### batch(array, type) - -Performs batch operations on multiple records for efficient bulk processing. - -**Parameters:** -- `array` `{Array}` - Array of records to process -- `type` `{String}` - Operation type: `'set'` or `'del'` (default: `'set'`) - -**Returns:** `{Array}` Array of results from the batch operation - -```javascript -const results = store.batch([ - { name: 'Alice', age: 30 }, - { name: 'Bob', age: 28 } -], 'set'); - -// Delete multiple records -store.batch(['key1', 'key2'], 'del'); ``` -**See also:** set(), delete() - -#### clear() - -Removes all records, indexes, and versions from the store. - -**Returns:** `{Haro}` Store instance for chaining - -```javascript -store.clear(); -console.log(store.size); // 0 -``` +## TypeScript Support -**See also:** delete() +TypeScript definitions are included - no separate installation needed. -#### clone(arg) - -Creates a deep clone of the given value, handling objects, arrays, and primitives. - -**Parameters:** -- `arg` `{*}` - Value to clone (any type) - -**Returns:** `{*}` Deep clone of the argument - -```javascript -const original = { name: 'John', tags: ['user', 'admin'] }; -const cloned = store.clone(original); -cloned.tags.push('new'); // original.tags is unchanged -``` - -#### delete(key, batch) - -Deletes a record from the store and removes it from all indexes. - -**Parameters:** -- `key` `{String}` - Key of record to delete -- `batch` `{Boolean}` - Whether this is part of a batch operation (default: `false`) - -**Returns:** `{undefined}` - -**Throws:** `{Error}` If record with the specified key is not found - -```javascript -store.delete('user123'); -``` - -**See also:** has(), clear(), batch() - -#### dump(type) - -Exports complete store data or indexes for persistence or debugging. - -**Parameters:** -- `type` `{String}` - Type of data to export: `'records'` or `'indexes'` (default: `'records'`) - -**Returns:** `{Array}` Array of [key, value] pairs or serialized index structure - -```javascript -const records = store.dump('records'); -const indexes = store.dump('indexes'); -// Use for persistence or backup -fs.writeFileSync('backup.json', JSON.stringify(records)); -``` - -**See also:** override() - -#### each(array, fn) - -Utility method to iterate over an array with a callback function. - -**Parameters:** -- `array` `{Array}` - Array to iterate over -- `fn` `{Function}` - Function to call for each element - -**Returns:** `{Array}` The original array for method chaining +```typescript +import { Haro } from 'haro'; -```javascript -store.each([1, 2, 3], (item, index) => { - console.log(`Item ${index}: ${item}`); +const store = new Haro<{ name: string; age: number }>({ + index: ['name'], + key: 'id' }); ``` -#### entries() - -Returns an iterator of [key, value] pairs for each record in the store. +## Real-World Examples -**Returns:** `{Iterator}` Iterator of [key, value] pairs +### ⚡ Instant Setup - Zero Boilerplate ```javascript -for (const [key, value] of store.entries()) { - console.log(`${key}:`, value); -} -``` - -**See also:** keys(), values() - -#### filter(fn, raw) - -Filters records using a predicate function, similar to Array.filter. - -**Parameters:** -- `fn` `{Function}` - Predicate function to test each record -- `raw` `{Boolean}` - Whether to return raw data (default: `false`) +import { haro } from 'haro'; -**Returns:** `{Array}` Array of records that pass the predicate test +// One line to create indexed store +const users = haro(null, { index: ['email', 'name'] }); -**Throws:** `{Error}` If fn is not a function +// Add data +users.set(null, { name: 'Alice', email: 'alice@example.com' }); -```javascript -const adults = store.filter(record => record.age >= 18); -const recentUsers = store.filter(record => - record.created > Date.now() - 86400000 -); +// Instant lookup - O(1) performance +const user = users.find({ email: 'alice@example.com' }); ``` -**See also:** find(), where(), map() +**Time saved**: No manual index creation, no caching logic, no performance tuning. -#### find(where, raw) - -Finds records matching the specified criteria using indexes for optimal performance. - -**Parameters:** -- `where` `{Object}` - Object with field-value pairs to match -- `raw` `{Boolean}` - Whether to return raw data (default: `false`) - -**Returns:** `{Array}` Array of matching records +### Indexing and Queries ```javascript -const engineers = store.find({ department: 'Engineering' }); -const activeUsers = store.find({ status: 'active', role: 'user' }); -``` - -**See also:** where(), search(), filter() - -#### forEach(fn, ctx) - -Executes a function for each record in the store, similar to Array.forEach. - -**Parameters:** -- `fn` `{Function}` - Function to execute for each record -- `ctx` `{*}` - Context object to use as 'this' (default: store instance) - -**Returns:** `{Haro}` Store instance for chaining +import { haro } from 'haro'; -```javascript -store.forEach((record, key) => { - console.log(`${key}: ${record.name}`); +const products = haro(null, { + index: ['category', 'brand', 'price', 'category|brand'] }); -``` -**See also:** map(), filter() +products.setMany([ + { sku: '1', name: 'Laptop', category: 'Electronics', brand: 'Apple', price: 2499 }, + { sku: '2', name: 'Phone', category: 'Electronics', brand: 'Apple', price: 999 }, + { sku: '3', name: 'Headphones', category: 'Electronics', brand: 'Sony', price: 299 } +]); -#### freeze(...args) +// Find by indexed field +const electronics = products.find({ category: 'Electronics' }); -Creates a frozen array from the given arguments for immutable data handling. +// Complex queries +const appleProducts = products.where({ + category: 'Electronics', + brand: 'Apple' +}, '&&'); -**Parameters:** -- `...args` `{*}` - Arguments to freeze into an array +// Search with regex +const searchResults = products.search(/^Laptop$/, 'name'); -**Returns:** `{Array}` Frozen array containing frozen arguments +// Filter with custom logic +const affordable = products.filter(p => p.price < 500); -```javascript -const frozen = store.freeze(obj1, obj2, obj3); -// Returns Object.freeze([Object.freeze(obj1), ...]) +// Sort and paginate +const sorted = products.sortBy('price'); +const page1 = products.limit(0, 10); ``` -#### get(key, raw) - -Retrieves a record by its key. - -**Parameters:** -- `key` `{String}` - Key of record to retrieve -- `raw` `{Boolean}` - Whether to return raw data (default: `false`) - -**Returns:** `{Object|null}` The record if found, null if not found +### Deep Indexing (Nested Paths) ```javascript -const user = store.get('user123'); -const rawUser = store.get('user123', true); -``` +import { haro } from 'haro'; -**See also:** has(), set() +const users = haro(null, { + index: ['name', 'user.email', 'user.profile.department', 'user.email|user.profile.department'] +}); -#### has(key) +users.setMany([ + { + id: '1', + name: 'Alice', + user: { + email: 'alice@company.com', + profile: { department: 'Engineering' } + } + }, + { + id: '2', + name: 'Bob', + user: { + email: 'bob@company.com', + profile: { department: 'Sales' } + } + } +]); -Checks if a record with the specified key exists in the store. +// Find by nested field +const alice = users.find({ 'user.email': 'alice@company.com' }); -**Parameters:** -- `key` `{String}` - Key to check for existence +// Query by deeply nested field +const engineers = users.find({ 'user.profile.department': 'Engineering' }); -**Returns:** `{Boolean}` True if record exists, false otherwise +// Composite index with nested fields +const aliceEng = users.find({ + 'user.email': 'alice@company.com', + 'user.profile.department': 'Engineering' +}); -```javascript -if (store.has('user123')) { - console.log('User exists'); -} +// Works with where(), search(), and sortBy() +const results = await users.where({ 'user.profile.department': 'Engineering' }); +const sorted = users.sortBy('user.profile.department'); ``` -**See also:** get(), delete() - -#### keys() - -Returns an iterator of all keys in the store. - -**Returns:** `{Iterator}` Iterator of record keys +### Versioning ```javascript -for (const key of store.keys()) { - console.log('Key:', key); -} -``` - -**See also:** values(), entries() - -#### limit(offset, max, raw) - -Returns a limited subset of records with offset support for pagination. +import { haro } from 'haro'; -**Parameters:** -- `offset` `{Number}` - Number of records to skip (default: `0`) -- `max` `{Number}` - Maximum number of records to return (default: `0`) -- `raw` `{Boolean}` - Whether to return raw data (default: `false`) +const config = haro(null, { versioning: true }); -**Returns:** `{Array}` Array of records within the specified range +config.set('api.timeout', { value: 30000 }); +config.set('api.timeout', { value: 45000 }); +config.set('api.timeout', { value: 60000 }); -```javascript -const page1 = store.limit(0, 10); // First 10 records -const page2 = store.limit(10, 10); // Next 10 records -const page3 = store.limit(20, 10); // Records 21-30 +// Access version history +const history = config.versions.get('api.timeout'); +console.log(history); // [previous versions] ``` -**See also:** toArray(), sort() - -#### map(fn, raw) - -Transforms all records using a mapping function, similar to Array.map. - -**Parameters:** -- `fn` `{Function}` - Function to transform each record -- `raw` `{Boolean}` - Whether to return raw data (default: `false`) - -**Returns:** `{Array}` Array of transformed results - -**Throws:** `{Error}` If fn is not a function +### Immutable Mode ```javascript -const names = store.map(record => record.name); -const summaries = store.map(record => ({ - id: record.id, - name: record.name, - email: record.email -})); -``` - -**See also:** filter(), forEach() - -#### merge(a, b, override) - -Merges two values together with support for arrays and objects. +import { haro } from 'haro'; -**Parameters:** -- `a` `{*}` - First value (target) -- `b` `{*}` - Second value (source) -- `override` `{Boolean}` - Whether to override arrays instead of concatenating (default: `false`) +const store = haro(null, { immutable: true }); -**Returns:** `{*}` Merged result +const user = store.set(null, { name: 'Alice', age: 30 }); -```javascript -const merged = store.merge({a: 1}, {b: 2}); // {a: 1, b: 2} -const arrays = store.merge([1, 2], [3, 4]); // [1, 2, 3, 4] -const overridden = store.merge([1, 2], [3, 4], true); // [3, 4] +// Attempting to modify will throw +try { + user.age = 31; // TypeError: Cannot assign to read only property +} catch (error) { + console.error(error.message); +} ``` -#### override(data, type) - -Replaces all store data or indexes with new data for bulk operations. - -**Parameters:** -- `data` `{Array}` - Data to replace with -- `type` `{String}` - Type of data: `'records'` or `'indexes'` (default: `'records'`) - -**Returns:** `{Boolean}` True if operation succeeded - -**Throws:** `{Error}` If type is invalid +### Caching ```javascript -const backup = store.dump('records'); -// Later restore from backup -store.override(backup, 'records'); -``` +import { haro } from 'haro'; -**See also:** dump(), clear() +const store = haro(null, { + index: ['name'], + cache: true, + cacheSize: 1000 +}); -#### reduce(fn, accumulator) +store.set("user1", { id: "user1", name: "John" }); -Reduces all records to a single value using a reducer function. +// First call - cache miss +const results1 = await store.where({ name: "John" }); -**Parameters:** -- `fn` `{Function}` - Reducer function (accumulator, value, key, store) -- `accumulator` `{*}` - Initial accumulator value (default: `[]`) +// Second call - cache hit (much faster) +const results2 = await store.where({ name: "John" }); -**Returns:** `{*}` Final reduced value +// Get cache statistics +console.log(store.getCacheStats()); // { hits: 1, misses: 1, sets: 1, ... } -```javascript -const totalAge = store.reduce((sum, record) => sum + record.age, 0); -const emailList = store.reduce((emails, record) => { - emails.push(record.email); - return emails; -}, []); +// Clear cache manually +store.clearCache(); ``` -**See also:** map(), filter() +## Comparison with Alternatives -#### reindex(index) +| Feature | Haro | Map | Object | lowdb | LokiJS | +|---------|------|-----|--------|-------|--------| +| **Indexing** | ✅ Multi-field | ❌ | ❌ | ⚠️ Limited | ✅ | +| **Versioning** | ✅ Built-in | ❌ | ❌ | ❌ | ⚠️ Plugins | +| **Immutable Mode** | ✅ | ❌ | ❌ | ❌ | ❌ | +| **Advanced Queries** | ✅ find/where/search | ❌ | ❌ | ⚠️ Basic | ✅ | +| **Batch Operations** | ✅ setMany/deleteMany | ❌ | ❌ | ⚠️ Manual | ✅ | +| **Persistence** | ❌ In-memory | ❌ | ❌ | ✅ JSON/Local | ✅ | +| **Performance (1k records)** | ⚡ Fast | ⚡ Fastest | ⚡ Fast | 🐌 Slower | ⚡ Fast | +| **Memory Overhead** | Medium | Low | Low | Medium | High | +| **TypeScript Support** | ✅ | ✅ | ✅ | ✅ | ⚠️ Community | +| **Bundle Size** | ~6KB gzipped | Native | Native | ~8KB | ~2.6MB | +| **Learning Curve** | Low | Low | Low | Low | Medium | -Rebuilds indexes for specified fields or all fields for data consistency. +**Legend**: ✅ Yes | ❌ No | ⚠️ Limited/Optional -**Parameters:** -- `index` `{String|Array}` - Specific index field(s) to rebuild (optional) +### When to Choose Each -**Returns:** `{Haro}` Store instance for chaining +- **Map**: Simple key-value storage, maximum performance +- **Object**: Basic data structures, JSON serialization +- **lowdb**: Persistent JSON file storage, simple queries +- **LokiJS**: Complex queries, large datasets, in-memory database needs +- **Haro**: Indexed queries, versioning, immutable data, moderate datasets -```javascript -store.reindex(); // Rebuild all indexes -store.reindex('name'); // Rebuild only name index -store.reindex(['name', 'email']); // Rebuild specific indexes -``` - -#### search(value, index, raw) - -Searches for records containing a value across specified indexes. +## API Reference -**Parameters:** -- `value` `{Function|RegExp|*}` - Value to search for -- `index` `{String|Array}` - Index(es) to search in (optional) -- `raw` `{Boolean}` - Whether to return raw data (default: `false`) +For complete API documentation with all methods and examples, see [API.md](https://github.com/avoidwork/haro/blob/master/docs/API.md). -**Returns:** `{Array}` Array of matching records +**Quick Overview:** -```javascript -// Function search -const results = store.search(key => key.includes('admin')); +- **Core Methods**: `set()`, `get()`, `delete()`, `has()`, `clear()` +- **Query Methods**: `find()`, `where()`, `search()`, `filter()`, `sortBy()`, `limit()` +- **Batch Operations**: `setMany()`, `deleteMany()` +- **Utility Methods**: `clone()`, `merge()`, `toArray()`, `dump()`, `override()` +- **Properties**: `size`, `registry` -// Regex search on specific index -const nameResults = store.search(/^john/i, 'name'); +## Troubleshooting -// Value search across all indexes -const emailResults = store.search('gmail.com', 'email'); -``` +### Common Issues -**See also:** find(), where(), filter() +#### "Cannot read property 'length' of undefined" -#### set(key, data, batch, override) +**Cause**: Passing invalid data to `find()` or `where()`. -Sets or updates a record in the store with automatic indexing. - -**Parameters:** -- `key` `{String|null}` - Key for the record, or null to use record's key field -- `data` `{Object}` - Record data to set (default: `{}`) -- `batch` `{Boolean}` - Whether this is part of a batch operation (default: `false`) -- `override` `{Boolean}` - Whether to override existing data instead of merging (default: `false`) - -**Returns:** `{Object}` The stored record +**Solution**: Ensure query objects have valid field names that exist in your index. ```javascript -// Auto-generate key -const user = store.set(null, { name: 'John', age: 30 }); +// ❌ Wrong +store.find(undefined); -// Update existing record (merges by default) -const updated = store.set('user123', { age: 31 }); - -// Replace existing record completely -const replaced = store.set('user123', { name: 'Jane' }, false, true); +// ✅ Correct +store.find({ name: 'Alice' }); ``` -**See also:** get(), batch(), merge() - -#### sort(fn, frozen) +#### Performance degradation with large datasets -Sorts all records using a comparator function. +**Cause**: Too many indexes or complex queries on large collections. -**Parameters:** -- `fn` `{Function}` - Comparator function for sorting (a, b) => number -- `frozen` `{Boolean}` - Whether to return frozen records (default: `false`) - -**Returns:** `{Array}` Sorted array of records +**Solution**: +- Limit indexes to frequently queried fields +- Use `limit()` for pagination +- Consider batch operations for bulk updates ```javascript -const byAge = store.sort((a, b) => a.age - b.age); -const byName = store.sort((a, b) => a.name.localeCompare(b.name)); -const frozen = store.sort((a, b) => a.created - b.created, true); -``` - -**See also:** sortBy(), limit() - -#### sortBy(index, raw) - -Sorts records by a specific indexed field in ascending order. - -**Parameters:** -- `index` `{String}` - Index field name to sort by -- `raw` `{Boolean}` - Whether to return raw data (default: `false`) - -**Returns:** `{Array}` Array of records sorted by the specified field - -**Throws:** `{Error}` If index field is empty or invalid - -```javascript -const byAge = store.sortBy('age'); -const byName = store.sortBy('name'); -const rawByDate = store.sortBy('created', true); -``` - -**See also:** sort(), find() - -#### toArray() - -Converts all store data to a plain array of records. - -**Returns:** `{Array}` Array containing all records in the store +// Optimize indexes +const store = haro(null, { + index: ['name', 'email'] // Only essential fields +}); -```javascript -const allRecords = store.toArray(); -console.log(`Store contains ${allRecords.length} records`); +// Use pagination +const results = store.limit(0, 100); ``` -**See also:** limit(), sort() - -#### uuid() +#### Version history growing unbounded -Generates a RFC4122 v4 UUID for record identification. +**Cause**: Versioning enabled with frequent updates. -**Returns:** `{String}` UUID string in standard format +**Solution**: Clear version history periodically or disable versioning if not needed. ```javascript -const id = store.uuid(); // "f47ac10b-58cc-4372-a567-0e02b2c3d479" -``` - -#### values() - -Returns an iterator of all values in the store. +// Clear specific version history +store.versions.delete('key123'); -**Returns:** `{Iterator}` Iterator of record values +// Clear all versions +store.versions.clear(); -```javascript -for (const record of store.values()) { - console.log(record.name); -} +// Disable versioning if not needed +const store = haro(null, { versioning: false }); ``` -**See also:** keys(), entries() - -#### where(predicate, op) - -Advanced filtering with predicate logic supporting AND/OR operations on arrays. +#### Immutable mode causing errors -**Parameters:** -- `predicate` `{Object}` - Object with field-value pairs for filtering -- `op` `{String}` - Operator for array matching: `'||'` for OR, `'&&'` for AND (default: `'||'`) +**Cause**: Attempting to modify frozen objects. -**Returns:** `{Array}` Array of records matching the predicate criteria +**Solution**: Use `set()` to update records instead of direct mutation. ```javascript -// Find records with tags containing 'admin' OR 'user' -const users = store.where({ tags: ['admin', 'user'] }, '||'); - -// Find records with ALL specified tags -const powerUsers = store.where({ tags: ['admin', 'power'] }, '&&'); - -// Regex matching -const companyEmails = store.where({ email: /^[^@]+@company\.com$/ }); +// ❌ Wrong +const user = store.get('user123'); +user.age = 31; -// Array field matching -const multiDeptUsers = store.where({ departments: ['IT', 'HR'] }); +// ✅ Correct +store.set('user123', { age: 31 }); ``` -**See also:** find(), filter(), search() - -## Lifecycle Hooks - -Override these methods in subclasses for custom behavior: - -### beforeBatch(args, type) -Executed before batch operations for preprocessing. - -### beforeClear() -Executed before clear operation for cleanup preparation. - -### beforeDelete(key, batch) -Executed before delete operation for validation or logging. - -### beforeSet(key, data, batch, override) -Executed before set operation for data validation or transformation. +#### Index not being used for query -### onbatch(results, type) -Executed after batch operations for postprocessing. +**Cause**: Querying non-indexed fields. -### onclear() -Executed after clear operation for cleanup tasks. - -### ondelete(key, batch) -Executed after delete operation for logging or notifications. - -### onset(record, batch) -Executed after set operation for indexing or event emission. - -## Examples - -### User Management System +**Solution**: Add the field to the index configuration. ```javascript -import { haro } from 'haro'; - -const users = haro(null, { - index: ['email', 'department', 'role', 'department|role'], - key: 'id', - versioning: true, - immutable: true +const store = haro(null, { + index: ['name', 'email', 'department'] }); - -// Add users with batch operation -users.batch([ - { - id: 'u1', - email: 'alice@company.com', - name: 'Alice Johnson', - department: 'Engineering', - role: 'Senior Developer', - active: true - }, - { - id: 'u2', - email: 'bob@company.com', - name: 'Bob Smith', - department: 'Engineering', - role: 'Team Lead', - active: true - }, - { - id: 'u3', - email: 'carol@company.com', - name: 'Carol Davis', - department: 'Marketing', - role: 'Manager', - active: false - } -], 'set'); - -// Find by department -const engineers = users.find({ department: 'Engineering' }); - -// Complex queries with where() -const activeEngineers = users.where({ - department: 'Engineering', - active: true -}, '&&'); - -// Search across multiple fields -const managers = users.search(/manager|lead/i, ['role']); - -// Pagination for large datasets -const page1 = users.limit(0, 10); -const page2 = users.limit(10, 10); - -// Update user with version tracking -const updated = users.set('u1', { role: 'Principal Developer' }); -console.log(users.versions.get('u1')); // Previous versions ``` -### E-commerce Product Catalog - -```javascript -import { Haro } from 'haro'; - -class ProductCatalog extends Haro { - constructor() { - super({ - index: ['category', 'brand', 'price', 'tags', 'category|brand'], - key: 'sku', - versioning: true - }); - } +### Error Messages - beforeSet(key, data, batch, override) { - // Validate required fields - if (!data.name || !data.price || !data.category) { - throw new Error('Missing required product fields'); - } - - // Normalize price - if (typeof data.price === 'string') { - data.price = parseFloat(data.price); - } - - // Auto-generate SKU if not provided - if (!data.sku && !key) { - data.sku = this.generateSKU(data); - } - } +| Error | Cause | Solution | +|-------|-------|----------| +| "Key field validation error" | Missing key field in data | Ensure key field exists in records | +| "Index must be an array" | Invalid index configuration | Pass array to `index` option | +| "Function required" | Invalid function parameter | Pass a function to `filter()` or `map()` | +| "Invalid index name" | Sorting by non-indexed field | Add field to index or use `sort()` | - onset(record, batch) { - console.log(`Product ${record.name} (${record.sku}) updated`); - } +### Getting Help - generateSKU(product) { - const prefix = product.category.substring(0, 3).toUpperCase(); - const suffix = Date.now().toString().slice(-6); - return `${prefix}-${suffix}`; - } +- Check [API.md](https://github.com/avoidwork/haro/blob/master/docs/API.md) for complete documentation +- Review [examples](https://github.com/avoidwork/haro/blob/master/docs/API.md#examples) in API docs +- Open an issue on [GitHub](https://github.com/avoidwork/haro/issues) - // Custom business methods - findByPriceRange(min, max) { - return this.filter(product => - product.price >= min && product.price <= max - ); - } - - searchProducts(query) { - // Search across multiple fields - const lowerQuery = query.toLowerCase(); - return this.filter(product => - product.name.toLowerCase().includes(lowerQuery) || - product.description.toLowerCase().includes(lowerQuery) || - product.tags.some(tag => tag.toLowerCase().includes(lowerQuery)) - ); - } +## Testing - getRecommendations(sku, limit = 5) { - const product = this.get(sku); - if (!product) return []; - - // Find similar products by category and brand - return this.find({ - category: product.category, - brand: product.brand - }) - .filter(p => p.sku !== sku) - .slice(0, limit); - } -} +```bash +# Run unit tests +npm test -const catalog = new ProductCatalog(); - -// Add products -catalog.batch([ - { - sku: 'LAP-001', - name: 'MacBook Pro 16"', - category: 'Laptops', - brand: 'Apple', - price: 2499.99, - tags: ['professional', 'high-performance', 'creative'], - description: 'Powerful laptop for professionals' - }, - { - sku: 'LAP-002', - name: 'ThinkPad X1 Carbon', - category: 'Laptops', - brand: 'Lenovo', - price: 1899.99, - tags: ['business', 'lightweight', 'durable'], - description: 'Business laptop with excellent build quality' - } -], 'set'); +# Run with coverage +npm run coverage -// Business queries -const laptops = catalog.find({ category: 'Laptops' }); -const affordable = catalog.findByPriceRange(1000, 2000); -const searchResults = catalog.searchProducts('professional'); -const recommendations = catalog.getRecommendations('LAP-001'); +# Run performance benchmarks +npm run benchmark ``` -### Real-time Analytics Dashboard - -```javascript -import { haro } from 'haro'; - -// Event tracking store -const events = haro(null, { - index: ['type', 'userId', 'timestamp', 'type|userId'], - key: 'id', - immutable: false // Allow mutations for performance -}); - -// Session tracking store -const sessions = haro(null, { - index: ['userId', 'status', 'lastActivity'], - key: 'sessionId', - versioning: true -}); +See [CONTRIBUTING.md](https://github.com/avoidwork/haro/blob/master/.github/CONTRIBUTING.md) for detailed testing guidelines. -// Analytics functions -function trackEvent(type, userId, data = {}) { - return events.set(null, { - id: events.uuid(), - type, - userId, - timestamp: Date.now(), - data, - ...data - }); -} - -function getActiveUsers(minutes = 5) { - const threshold = Date.now() - (minutes * 60 * 1000); - return sessions.filter(session => - session.status === 'active' && - session.lastActivity > threshold - ); -} +## Benchmarks -function getUserActivity(userId, hours = 24) { - const since = Date.now() - (hours * 60 * 60 * 1000); - return events.find({ userId }) - .filter(event => event.timestamp > since) - .sort((a, b) => b.timestamp - a.timestamp); -} +Haro includes comprehensive benchmark suites for performance analysis. -function getEventStats(timeframe = 'hour') { - const now = Date.now(); - const intervals = { - hour: 60 * 60 * 1000, - day: 24 * 60 * 60 * 1000, - week: 7 * 24 * 60 * 60 * 1000 - }; - - const since = now - intervals[timeframe]; - const recentEvents = events.filter(event => event.timestamp > since); - - return recentEvents.reduce((stats, event) => { - stats[event.type] = (stats[event.type] || 0) + 1; - return stats; - }, {}); -} +### Running Benchmarks -// Usage -trackEvent('page_view', 'user123', { page: '/dashboard' }); -trackEvent('click', 'user123', { element: 'nav-menu' }); -trackEvent('search', 'user456', { query: 'analytics' }); +```bash +# Run all benchmarks +node benchmarks/index.js -console.log('Active users:', getActiveUsers().length); -console.log('User activity:', getUserActivity('user123')); -console.log('Event stats:', getEventStats('hour')); +# Run specific categories +node benchmarks/index.js --basic-only # CRUD operations +node benchmarks/index.js --search-only # Query operations +node benchmarks/index.js --index-only # Index operations +node benchmarks/index.js --utilities-only # Utility operations +node benchmarks/index.js --pagination-only # Pagination benchmarks +node benchmarks/index.js --persistence-only # Persistence benchmarks +node benchmarks/index.js --core-only # Core benchmarks (basic, search, index) +node benchmarks/index.js --quiet # Minimal output ``` -### Configuration Management +### Performance Overview -```javascript -import { Haro } from 'haro'; +Haro provides excellent performance for in-memory data operations: -class ConfigStore extends Haro { - constructor() { - super({ - index: ['environment', 'service', 'type', 'environment|service'], - key: 'key', - versioning: true, - immutable: true - }); - - this.loadDefaults(); - } - - loadDefaults() { - this.batch([ - { key: 'db.host', value: 'localhost', environment: 'dev', type: 'database' }, - { key: 'db.port', value: 5432, environment: 'dev', type: 'database' }, - { key: 'api.timeout', value: 30000, environment: 'dev', type: 'api' }, - { key: 'db.host', value: 'prod-db.example.com', environment: 'prod', type: 'database' }, - { key: 'db.port', value: 5432, environment: 'prod', type: 'database' }, - { key: 'api.timeout', value: 10000, environment: 'prod', type: 'api' } - ], 'set'); - } - - getConfig(key, environment = 'dev') { - const configs = this.find({ key, environment }); - return configs.length > 0 ? configs[0].value : null; - } +- **Indexed lookups**: O(1) performance for find() operations +- **Batch operations**: Efficient bulk data processing +- **Memory efficiency**: Optimized data structures +- **Scalability**: Consistent performance across different data sizes - getEnvironmentConfig(environment) { - return this.find({ environment }).reduce((config, item) => { - config[item.key] = item.value; - return config; - }, {}); - } - - updateConfig(key, value, environment = 'dev') { - const existing = this.find({ key, environment })[0]; - if (existing) { - return this.set(key, { ...existing, value }); - } else { - return this.set(key, { key, value, environment, type: 'custom' }); - } - } - - getDatabaseConfig(environment = 'dev') { - return this.find({ environment, type: 'database' }); - } -} - -const config = new ConfigStore(); - -// Get specific config -console.log(config.getConfig('db.host', 'prod')); // 'prod-db.example.com' - -// Get all configs for environment -const devConfig = config.getEnvironmentConfig('dev'); - -// Update configuration -config.updateConfig('api.timeout', 45000, 'dev'); - -// Get configuration history -console.log(config.versions.get('api.timeout')); -``` +See [`benchmarks/README.md`](https://github.com/avoidwork/haro/blob/master/benchmarks/README.md) for complete benchmark documentation and detailed results. -## Performance +## Learn More -Haro is optimized for: -- **Fast indexing**: O(1) lookups on indexed fields -- **Efficient searches**: Regex and function-based filtering with index acceleration -- **Memory efficiency**: Minimal overhead with optional immutability -- **Batch operations**: Optimized bulk inserts and updates -- **Version tracking**: Efficient MVCC-style versioning when enabled +- [API Reference](https://github.com/avoidwork/haro/blob/master/docs/API.md) - Complete API documentation +- [Contributing Guide](https://github.com/avoidwork/haro/blob/master/.github/CONTRIBUTING.md) - How to contribute +- [Benchmarks](https://github.com/avoidwork/haro/blob/master/benchmarks/README.md) - Performance analysis +- [Changelog](https://github.com/avoidwork/haro/blob/master/CHANGELOG.md) - Version history +- [Security](https://github.com/avoidwork/haro/blob/master/SECURITY.md) - Security policy -### Performance Characteristics +## Community -| Operation | Indexed | Non-Indexed | Notes | -|-----------|---------|-------------|-------| -| `find()` | O(1) | O(n) | Use indexes for best performance | -| `get()` | O(1) | O(1) | Direct key lookup | -| `set()` | O(1) | O(1) | Includes index updates | -| `delete()` | O(1) | O(1) | Includes index cleanup | -| `filter()` | O(n) | O(n) | Full scan with predicate | -| `search()` | O(k) | O(n) | k = matching index entries | +- **Report Issues**: [GitHub Issues](https://github.com/avoidwork/haro/issues) ## License -Copyright (c) 2025 Jason Mulligan +Copyright (c) 2026 Jason Mulligan Licensed under the BSD-3-Clause license. diff --git a/SECURITY.md b/SECURITY.md new file mode 100644 index 00000000..fc89d422 --- /dev/null +++ b/SECURITY.md @@ -0,0 +1,66 @@ +# Security Policy + +## Supported Versions + +Use this section to tell people about which versions of your project are currently being supported with security updates. + +| Version | Supported | +| ------- | ------------------ | +| Latest | :white_check_mark: | +| < Latest| :x: | + +## Reporting a Vulnerability + +We take the security of Haro seriously. If you believe you have found a security vulnerability, please report it to us as described below. + +**Please do NOT report security vulnerabilities through public GitHub issues.** + +### How to Report a Security Vulnerability + +If you think you have found a vulnerability in Haro, please email [maintainer email]. Include as much detail as possible to help us identify and fix the issue quickly. + +### What to Include in Your Report + +- A description of the vulnerability +- Steps to reproduce the issue +- A proof of concept (if possible) +- Any potential impact +- Your suggested fix (if you have one) + +### What to Expect + +- **Acknowledgment**: We will acknowledge receipt of your report within 48 hours +- **Updates**: We will keep you informed of our progress +- **Timeline**: We aim to resolve critical issues within 7 days +- **Credit**: We will credit you in the security advisory (unless you prefer to remain anonymous) + +### Security Best Practices + +When using Haro, please follow these security best practices: + +1. **Keep Haro Updated**: Always use the latest version to benefit from security patches +2. **Validate Input**: Always validate and sanitize data before storing it in Haro +3. **Use Immutable Mode**: Enable immutable mode to prevent accidental data modification +4. **Limit Access**: Control access to your Haro instances through proper authentication +5. **Monitor Logs**: Watch for unusual patterns in data access + +### Security Considerations + +Haro is an in-memory data store. Consider the following when deploying: + +- **Data Persistence**: Haro does not persist data to disk. Ensure proper backup strategies +- **Memory Limits**: Be aware of memory consumption with large datasets +- **Access Control**: Implement proper access control at the application level +- **Network Exposure**: Do not expose Haro instances directly to untrusted networks + +## Security Updates + +Security updates will be released as patch versions (e.g., 1.0.1 -> 1.0.2) and will be announced in the changelog and GitHub releases. + +## Recognition + +We appreciate responsible disclosure and would like to thank the following security researchers for their contributions: + +- [Name/Handle] - [Date] - [Vulnerability type] + +*This security policy is adapted from best practices for open source projects.* diff --git a/benchmarks/README.md b/benchmarks/README.md index d32bf6e2..61acfaf5 100644 --- a/benchmarks/README.md +++ b/benchmarks/README.md @@ -9,9 +9,6 @@ The benchmark suite consists of several modules that test different aspects of H - **Basic Operations** - CRUD operations (Create, Read, Update, Delete) - **Search & Filter** - Query performance with various patterns - **Index Operations** - Indexing performance and benefits -- **Memory Usage** - Memory consumption patterns and efficiency -- **Comparison** - Performance vs native JavaScript structures -- **Utility Operations** - Helper methods (clone, merge, freeze, forEach, uuid) - **Pagination** - Limit-based pagination performance - **Persistence** - Dump/override operations for data serialization - **Immutable Comparison** - Performance comparison between mutable and immutable modes @@ -42,8 +39,7 @@ node benchmarks/index.js --memory-only # Run only comparison benchmarks node benchmarks/index.js --comparison-only -# Run only utility operations benchmarks -node benchmarks/index.js --utilities-only + # Run only pagination benchmarks node benchmarks/index.js --pagination-only @@ -85,8 +81,7 @@ node benchmarks/memory-usage.js # Performance comparisons node benchmarks/comparison.js -# Utility operations -node benchmarks/utility-operations.js + # Pagination benchmarks node benchmarks/pagination.js @@ -108,11 +103,11 @@ Tests fundamental CRUD operations performance: - **GET operations**: Record retrieval by key - **DELETE operations**: Individual and batch record deletion - **CLEAR operations**: Store clearing performance -- **Utility operations**: `toArray()`, `keys()`, `values()`, `entries()` **Data Sizes Tested**: 100, 1,000, 10,000, 50,000 records **Key Metrics**: + - Operations per second - Total execution time - Average operation time @@ -131,6 +126,7 @@ Tests query performance with various patterns: **Data Sizes Tested**: 1,000, 10,000, 50,000 records **Key Features Tested**: + - Simple vs complex queries - Indexed vs non-indexed queries - Array field queries @@ -148,6 +144,7 @@ Tests indexing performance and benefits: - **Index comparison**: Performance benefits analysis **Index Types Tested**: + - Single field indexes - Composite indexes (multi-field) - Array field indexes @@ -167,6 +164,7 @@ Analyzes memory consumption patterns: - **Stress memory**: Memory under high load conditions **Special Features**: + - Memory growth analysis over time - Garbage collection tracking - Memory leak detection @@ -182,6 +180,7 @@ Compares Haro performance with native JavaScript structures: - **Advanced features**: Unique Haro capabilities vs manual implementation **Operations Compared**: + - Storage operations - Retrieval operations - Query operations @@ -190,25 +189,7 @@ Compares Haro performance with native JavaScript structures: - Sorting operations - Memory usage -### 6. Utility Operations (`utility-operations.js`) - -Tests performance of helper and utility methods: - -- **CLONE operations**: Deep cloning of objects and arrays -- **MERGE operations**: Object and array merging with different strategies -- **FREEZE operations**: Object freezing for immutability -- **forEach operations**: Iteration with different callback complexities -- **UUID operations**: UUID generation and uniqueness testing - -**Data Sizes Tested**: 100, 1,000, 5,000 records - -**Key Features Tested**: -- Simple vs complex object cloning -- Array vs object merging strategies -- Performance vs safety trade-offs -- UUID generation rates and uniqueness - -### 7. Pagination (`pagination.js`) +### 6. Pagination (`pagination.js`) Tests pagination and data limiting performance: @@ -221,12 +202,13 @@ Tests pagination and data limiting performance: **Data Sizes Tested**: 1,000, 10,000, 50,000 records **Key Features Tested**: + - Small vs large page sizes - First page vs middle vs last page performance - Memory efficiency of chunked vs full data access - Integration with query operations -### 8. Persistence (`persistence.js`) +### 7. Persistence (`persistence.js`) Tests data serialization and restoration performance: @@ -239,162 +221,130 @@ Tests data serialization and restoration performance: **Data Sizes Tested**: 100, 1,000, 5,000 records **Key Features Tested**: + - Records vs indexes export/import - Data integrity validation - Memory impact of persistence operations - Complex object serialization performance -### 9. Immutable Comparison (`immutable-comparison.js`) - -Compares performance between immutable and mutable modes: +### 8. Immutable Comparison (`immutable-comparison.js`) -- **STORE CREATION**: Setup performance comparison -- **CRUD operations**: Create, Read, Update, Delete in both modes -- **QUERY operations**: Find, filter, search, where performance -- **TRANSFORMATION**: Map, reduce, sort, forEach comparison -- **MEMORY usage**: Memory consumption patterns -- **DATA SAFETY**: Mutation protection analysis +Compares performance between immutable and mutable modes. -**Data Sizes Tested**: 100, 1,000, 5,000 records - -**Key Features Tested**: -- Performance vs safety trade-offs -- Memory overhead of immutable mode -- Operation-specific performance differences -- Data protection effectiveness +**Note**: This benchmark file exists but is not integrated into the main benchmark runner. ## Latest Benchmark Results -### Performance Summary (Last Updated: December 2024) +### Performance Summary (Last Updated: April 2026) **Overall Test Results:** -- **Total Tests**: 572 tests across 9 categories -- **Total Runtime**: 1.6 minutes -- **Test Environment**: Node.js on macOS (darwin 24.5.0) + +- **Total Tests**: 18 tests across 6 categories +- **Total Runtime**: ~30 seconds +- **Test Environment**: Node.js v25.8.1 on Linux **Performance Highlights:** -- **Fastest Operation**: HAS operation (20,815,120 ops/second on 1,000 records) -- **Slowest Operation**: BATCH SET (88 ops/second on 50,000 records) -- **Memory Efficiency**: Most efficient DELETE operations (-170.19 MB for 100 deletions) -- **Least Memory Efficient**: FIND operations (34.49 MB for 25,000 records with 100 queries) + +- **Fastest Operation**: HAS operation (494,071 ops/second on 10,000 keys) +- **Slowest Operation**: CREATE indexes (166 ops/second on 10,000 records) +- **Most Efficient**: keys() iteration (138,812 ops/second) ### Category Performance Breakdown #### Basic Operations -- **Tests**: 40 tests -- **Runtime**: 249ms -- **Average Performance**: 3,266,856 ops/second + +- **Tests**: 4 tests +- **Runtime**: ~2 seconds +- **Average Performance**: 131,678 ops/second - **Key Findings**: Excellent performance for core CRUD operations #### Search & Filter Operations -- **Tests**: 93 tests -- **Runtime**: 1.2 minutes -- **Average Performance**: 856,503 ops/second + +- **Tests**: 4 tests +- **Runtime**: ~2 seconds +- **Average Performance**: 9,059 ops/second - **Key Findings**: Strong performance for indexed queries, good filter performance #### Index Operations -- **Tests**: 60 tests -- **Runtime**: 2.1 seconds -- **Average Performance**: 386,859 ops/second -- **Key Findings**: Efficient index creation and maintenance - -#### Memory Usage -- **Tests**: 60 tests -- **Runtime**: 419ms -- **Average Memory**: 1.28 MB -- **Key Findings**: Efficient memory usage patterns -#### Comparison with Native Structures -- **Tests**: 93 tests -- **Runtime**: 12.6 seconds -- **Average Performance**: 2,451,027 ops/second -- **Key Findings**: Competitive with native structures considering feature richness +- **Tests**: 3 tests +- **Runtime**: ~2 seconds +- **Average Performance**: 186 ops/second +- **Key Findings**: Efficient index creation and maintenance #### Utility Operations -- **Tests**: 45 tests -- **Runtime**: 206ms -- **Average Performance**: 3,059,333 ops/second -- **Key Findings**: Excellent performance for clone, merge, freeze operations + +- **Tests**: 4 tests +- **Runtime**: ~1 second +- **Average Performance**: 100,606 ops/second +- **Key Findings**: Very fast utility methods for data access #### Pagination -- **Tests**: 65 tests -- **Runtime**: 579ms -- **Average Performance**: 100,162 ops/second + +- **Tests**: 4 tests +- **Runtime**: ~1 second +- **Average Performance**: 64,911 ops/second - **Key Findings**: Efficient pagination suitable for UI requirements #### Persistence -- **Tests**: 38 tests -- **Runtime**: 314ms -- **Average Performance**: 114,384 ops/second -- **Key Findings**: Good performance for data serialization/deserialization -#### Immutable vs Mutable Comparison -- **Tests**: 78 tests -- **Runtime**: 8.4 seconds -- **Average Performance**: 835,983 ops/second -- **Key Findings**: Minimal performance difference for most operations +- **Tests**: 3 tests +- **Runtime**: ~1 second +- **Average Performance**: 20,954 ops/second +- **Key Findings**: Good performance for data serialization/deserialization ### Detailed Performance Results #### Basic Operations Performance -- **SET operations**: Up to 3.2M ops/sec for typical workloads -- **GET operations**: Up to 20M ops/sec with index lookups -- **DELETE operations**: Efficient cleanup with index maintenance -- **HAS operations**: 20,815,120 ops/sec (best performer) -- **CLEAR operations**: Fast bulk deletion -- **BATCH operations**: Optimized for bulk data manipulation + +- **SET operations**: 833 ops/sec (10,000 records) +- **GET operations**: 29,313 ops/sec (10,000 records) +- **HAS operations**: 494,071 ops/sec (10,000 keys) +- **DELETE operations**: 475 ops/sec (10,000 records) #### Query Operations Performance -- **FIND (indexed)**: 64,594 ops/sec (1,000 records) -- **FILTER operations**: 46,255 ops/sec -- **SEARCH operations**: Strong regex and text search performance -- **WHERE clauses**: 60,710 ops/sec for complex queries -- **SORT operations**: Efficient sorting with index optimization - -#### Comparison with Native Structures -- **Haro vs Array Filter**: 46,255 vs 189,293 ops/sec -- **Haro vs Map**: Comparable performance for basic operations -- **Haro vs Object**: Trade-off between features and raw performance -- **Advanced Features**: Unique capabilities not available in native structures - -#### Memory Usage Analysis -- **Haro (50,000 records)**: 13.98 MB -- **Map (50,000 records)**: 3.52 MB -- **Object (50,000 records)**: 1.27 MB -- **Array (50,000 records)**: 0.38 MB -- **Overhead Analysis**: Reasonable for feature set provided + +- **FIND (indexed)**: 10,437 ops/sec (10,000 records) +- **WHERE (indexed)**: 8,519 ops/sec (10,000 records) +- **SEARCH operations**: 8,921 ops/sec (10,000 records) +- **FILTER operations**: 8,975 ops/sec (10,000 records) + +#### Index Operations Performance + +- **CREATE indexes**: 166 ops/sec (10,000 records) +- **FIND with index**: 371 ops/sec (10,000 records) +- **REINDEX single field**: 289 ops/sec (10,000 records) #### Utility Operations Performance -- **Clone simple objects**: 1,605,780 ops/sec -- **Clone complex objects**: 234,455 ops/sec -- **Merge operations**: Up to 2,021,394 ops/sec -- **Freeze operations**: Up to 17,316,017 ops/sec -- **forEach operations**: Up to 58,678 ops/sec -- **UUID generation**: 14,630,218 ops/sec + +- **toArray()**: 136,537 ops/sec (1,000 iterations) +- **entries()**: 12,359 ops/sec (1,000 iterations) +- **keys()**: 138,812 ops/sec (1,000 iterations) +- **values()**: 136,351 ops/sec (1,000 iterations) #### Pagination Performance -- **Small pages (10 items)**: 616,488 ops/sec -- **Medium pages (50 items)**: 271,554 ops/sec -- **Large pages (100 items)**: 153,433 ops/sec -- **Sequential pagination**: Efficient for typical UI patterns -#### Immutable vs Mutable Performance -- **Creation**: Minimal difference (1.27x faster mutable) -- **Read operations**: Comparable performance -- **Write operations**: Slight advantage to mutable mode -- **Transformation operations**: Significant cost in immutable mode +- **Small pages (10 items)**: 70,487 ops/sec (10,000 records) +- **Medium pages (50 items)**: 66,494 ops/sec (10,000 records) +- **Large pages (100 items)**: 61,877 ops/sec (10,000 records) +- **With offset**: 66,361 ops/sec (10,000 records) + +#### Persistence Performance + +- **DUMP records**: 13,860 ops/sec (5,000 records) +- **DUMP indexes**: 45,163 ops/sec (5,000 records) +- **OVERRIDE records**: 3,807 ops/sec (5,000 records) ### Performance Recommendations Based on the latest benchmark results: -1. **✅ Basic operations performance is excellent** for most use cases -2. **✅ Memory usage is efficient** for typical workloads -3. **📊 Review comparison results** to understand trade-offs vs native structures -4. **✅ Utility operations** (clone, merge, freeze) perform well -5. **✅ Pagination performance** is suitable for typical UI requirements -6. **💾 Persistence operations** available for data serialization needs -7. **🔒 Review immutable vs mutable comparison** for data safety vs performance trade-offs +1. **✅ Basic operations perform well** - HAS is fastest at 494K ops/sec +2. **✅ Indexed queries are efficient** - FIND at 10K ops/sec for 10K records +3. **✅ Utility methods are very fast** - keys() and values() at 138K ops/sec +4. **✅ Pagination is fast** - 70K ops/sec for small pages +5. **✅ Persistence is reasonable** - DUMP indexes at 45K ops/sec +6. **⚠️ Index creation is slow** - 166 ops/sec (consider one-time setup) ## Understanding Results @@ -426,13 +376,15 @@ Based on the latest benchmark results: Based on the latest benchmark results, here are the key insights: #### Performance Strengths -1. **Excellent Basic Operations**: Core CRUD operations perform exceptionally well (3.2M+ ops/sec) -2. **Fast Record Lookups**: HAS operations achieve 20M+ ops/sec, demonstrating efficient key-based access -3. **Efficient Indexing**: Index-based queries provide significant performance benefits -4. **Strong Utility Performance**: Clone, merge, and freeze operations are highly optimized -5. **Competitive with Native Structures**: Maintains competitive performance while providing rich features + +1. **Excellent Basic Operations**: HAS achieves 494K ops/sec +2. **Fast Record Lookups**: GET at 29K ops/sec for 10K records +3. **Efficient Indexing**: FIND at 10K ops/sec for 10K records +4. **Fast Pagination**: 69K ops/sec for small pages +5. **Good Persistence**: DUMP indexes at 45K ops/sec #### Performance Considerations + 1. **Memory Overhead**: ~10x memory usage compared to native Arrays but justified by features 2. **Filter vs Find**: Array filters are ~4x faster than Haro filters, but Haro provides more features 3. **Immutable Mode Cost**: Transformation operations in immutable mode show significant performance impact @@ -440,6 +392,7 @@ Based on the latest benchmark results, here are the key insights: 5. **Complex Queries**: WHERE clauses maintain good performance even with multiple conditions #### Scaling Characteristics + - **Small datasets (100-1K records)**: Excellent performance across all operations - **Medium datasets (1K-10K records)**: Very good performance with minor degradation - **Large datasets (10K-50K records)**: Good performance with more noticeable costs for complex operations @@ -448,6 +401,7 @@ Based on the latest benchmark results, here are the key insights: ### Performance Recommendations by Use Case #### High-Performance Applications + - Use mutable mode for maximum performance - Leverage indexed queries (find) over filters - Implement batch operations for bulk changes @@ -455,13 +409,15 @@ Based on the latest benchmark results, here are the key insights: - Monitor memory usage with large datasets #### Data-Safe Applications + - Use immutable mode for data integrity - Accept performance trade-offs for safety -- Use utility methods (clone, merge) for safe data manipulation + - Enable versioning only when needed - Consider persistence for backup/restore needs #### Mixed Workloads + - Profile your specific use case - Consider hybrid approaches (mutable for writes, immutable for reads) - Use indexes strategically @@ -504,26 +460,27 @@ Based on the latest benchmark results, consider these optimizations: 5. **Consider memory limits** for large datasets (13.98MB for 50K records) 6. **Use immutable mode** strategically for data safety vs performance 7. **Implement pagination** for large result sets using `limit()` (616K ops/sec for small pages) -8. **Use utility methods** (clone: 1.6M ops/sec, merge: 2M ops/sec) for safe data manipulation -9. **Consider persistence** for data backup and restoration needs (114K ops/sec) -10. **Optimize WHERE queries** with proper indexing and operators + +8. **Consider persistence** for data backup and restoration needs (114K ops/sec) +9. **Optimize WHERE queries** with proper indexing and operators ## Interpreting Results ### When to Use Haro Haro is ideal when you need: + - **Complex queries** with multiple conditions (WHERE clauses: 60K ops/sec) - **Indexed search** performance (FIND: 64K ops/sec) - **Immutable data** with transformation capabilities - **Versioning** and data history tracking - **Advanced features** like regex search, array queries, pagination - **Memory efficiency** is acceptable for feature richness -- **Utility operations** for safe data manipulation ### When to Use Native Structures Consider native structures when: + - **Simple key-value** operations dominate (Array filter: 189K ops/sec) - **Memory efficiency** is critical (Array: 0.38MB vs Haro: 13.98MB for 50K records) - **Maximum performance** for basic operations is needed @@ -532,14 +489,14 @@ Consider native structures when: ### Performance vs Feature Trade-offs -| Feature | Performance Impact | Recommendation | -|---------|-------------------|----------------| -| Indexing | ✅ Significant improvement | Always use for queried fields | -| Immutable Mode | 🟡 Mixed (read: good, transform: slow) | Use for data safety when needed | -| Versioning | 🟡 Moderate impact | Enable only when history tracking required | -| Batch Operations | ✅ Better for bulk operations | Use for multiple changes | -| Pagination | ✅ Efficient for large datasets | Implement for UI performance | -| Persistence | 🟡 Good for data backup | Use for serialization needs | +| Feature | Performance Impact | Recommendation | +| ---------------- | -------------------------------------- | ------------------------------------------ | +| Indexing | ✅ Significant improvement | Always use for queried fields | +| Immutable Mode | 🟡 Mixed (read: good, transform: slow) | Use for data safety when needed | +| Versioning | 🟡 Moderate impact | Enable only when history tracking required | +| Batch Operations | ✅ Better for bulk operations | Use for multiple changes | +| Pagination | ✅ Efficient for large datasets | Implement for UI performance | +| Persistence | 🟡 Good for data backup | Use for serialization needs | ## Contributing @@ -559,16 +516,16 @@ To add new benchmarks: * @returns {Array} Array of benchmark results */ function benchmarkFeature(dataSizes) { - const results = []; - - dataSizes.forEach(size => { - const result = benchmark('Test name', () => { - // Test code here - }); - results.push(result); - }); - - return results; + const results = []; + + dataSizes.forEach((size) => { + const result = benchmark("Test name", () => { + // Test code here + }); + results.push(result); + }); + + return results; } ``` @@ -589,6 +546,7 @@ function benchmarkFeature(dataSizes) { ### Performance Factors Results may vary based on: + - System specifications (CPU, RAM) - Node.js version - Other running processes @@ -597,4 +555,4 @@ Results may vary based on: ## License -This benchmark suite is part of the Haro project and follows the same license terms. \ No newline at end of file +This benchmark suite is part of the Haro project and follows the same license terms. diff --git a/benchmarks/basic-operations.js b/benchmarks/basic-operations.js index 076ce057..96529624 100644 --- a/benchmarks/basic-operations.js +++ b/benchmarks/basic-operations.js @@ -6,7 +6,7 @@ import { haro } from "../dist/haro.js"; * @param {number} size - Number of records to generate * @returns {Array} Array of test records */ -function generateTestData (size) { +function generateTestData(size) { const data = []; for (let i = 0; i < size; i++) { data.push({ @@ -20,8 +20,8 @@ function generateTestData (size) { metadata: { created: new Date(), score: Math.random() * 100, - level: Math.floor(Math.random() * 10) - } + level: Math.floor(Math.random() * 10), + }, }); } @@ -35,7 +35,7 @@ function generateTestData (size) { * @param {number} iterations - Number of iterations to run * @returns {Object} Benchmark results */ -function benchmark (name, fn, iterations = 1000) { +function benchmark(name, fn, iterations = 1000) { const start = performance.now(); for (let i = 0; i < iterations; i++) { fn(); @@ -49,7 +49,7 @@ function benchmark (name, fn, iterations = 1000) { iterations, totalTime: total, avgTime, - opsPerSecond: Math.floor(1000 / avgTime) + opsPerSecond: Math.floor(1000 / avgTime), }; } @@ -58,10 +58,10 @@ function benchmark (name, fn, iterations = 1000) { * @param {Array} dataSizes - Array of data sizes to test * @returns {Array} Array of benchmark results */ -function benchmarkSetOperations (dataSizes) { +function benchmarkSetOperations(dataSizes) { const results = []; - dataSizes.forEach(size => { + dataSizes.forEach((size) => { const testData = generateTestData(size); const store = haro(); @@ -74,9 +74,13 @@ function benchmarkSetOperations (dataSizes) { // Batch set operations const batchStore = haro(); - const batchResult = benchmark(`BATCH SET (${size} records)`, () => { - batchStore.batch(testData, "set"); - }, 1); + const batchResult = benchmark( + `BATCH SET (${size} records)`, + () => { + batchStore.batch(testData, "set"); + }, + 1, + ); results.push(batchResult); }); @@ -88,10 +92,10 @@ function benchmarkSetOperations (dataSizes) { * @param {Array} dataSizes - Array of data sizes to test * @returns {Array} Array of benchmark results */ -function benchmarkGetOperations (dataSizes) { +function benchmarkGetOperations(dataSizes) { const results = []; - dataSizes.forEach(size => { + dataSizes.forEach((size) => { const testData = generateTestData(size); const store = haro(testData); @@ -118,33 +122,37 @@ function benchmarkGetOperations (dataSizes) { * @param {Array} dataSizes - Array of data sizes to test * @returns {Array} Array of benchmark results */ -function benchmarkDeleteOperations (dataSizes) { +function benchmarkDeleteOperations(dataSizes) { const results = []; - dataSizes.forEach(size => { + dataSizes.forEach((size) => { const testData = generateTestData(size); // Individual delete operations const deleteStore = haro(testData); - const deleteResult = benchmark(`DELETE (${size} records)`, () => { - const keys = Array.from(deleteStore.keys()); - if (keys.length > 0) { - const randomKey = keys[Math.floor(Math.random() * keys.length)]; - try { - deleteStore.del(randomKey); - } catch (e) { // eslint-disable-line no-unused-vars - // Record might already be deleted + const deleteResult = benchmark( + `DELETE (${size} records)`, + () => { + const keys = Array.from(deleteStore.keys()); + if (keys.length > 0) { + const randomKey = keys[Math.floor(Math.random() * keys.length)]; + deleteStore.delete(randomKey); } - } - }, Math.min(100, size)); + }, + Math.min(100, size), + ); results.push(deleteResult); // Clear operations const clearStore = haro(testData); - const clearResult = benchmark(`CLEAR (${size} records)`, () => { - clearStore.clear(); - clearStore.batch(testData, "set"); - }, 10); + const clearResult = benchmark( + `CLEAR (${size} records)`, + () => { + clearStore.clear(); + clearStore.batch(testData, "set"); + }, + 10, + ); results.push(clearResult); }); @@ -156,35 +164,51 @@ function benchmarkDeleteOperations (dataSizes) { * @param {Array} dataSizes - Array of data sizes to test * @returns {Array} Array of benchmark results */ -function benchmarkUtilityOperations (dataSizes) { +function benchmarkUtilityOperations(dataSizes) { const results = []; - dataSizes.forEach(size => { + dataSizes.forEach((size) => { const testData = generateTestData(size); const store = haro(testData); // ToArray operations - const toArrayResult = benchmark(`toArray (${size} records)`, () => { - store.toArray(); - }, 100); + const toArrayResult = benchmark( + `toArray (${size} records)`, + () => { + store.toArray(); + }, + 100, + ); results.push(toArrayResult); // Keys operations - const keysResult = benchmark(`keys (${size} records)`, () => { - Array.from(store.keys()); - }, 100); + const keysResult = benchmark( + `keys (${size} records)`, + () => { + Array.from(store.keys()); + }, + 100, + ); results.push(keysResult); // Values operations - const valuesResult = benchmark(`values (${size} records)`, () => { - Array.from(store.values()); - }, 100); + const valuesResult = benchmark( + `values (${size} records)`, + () => { + Array.from(store.values()); + }, + 100, + ); results.push(valuesResult); // Entries operations - const entriesResult = benchmark(`entries (${size} records)`, () => { - Array.from(store.entries()); - }, 100); + const entriesResult = benchmark( + `entries (${size} records)`, + () => { + Array.from(store.entries()); + }, + 100, + ); results.push(entriesResult); }); @@ -195,13 +219,19 @@ function benchmarkUtilityOperations (dataSizes) { * Prints benchmark results in a formatted table * @param {Array} results - Array of benchmark results */ -function printResults (results) { +function printResults(results) { console.log("\n=== BASIC OPERATIONS BENCHMARK RESULTS ===\n"); - console.log("Operation".padEnd(30) + "Iterations".padEnd(12) + "Total Time (ms)".padEnd(18) + "Avg Time (ms)".padEnd(16) + "Ops/Second"); + console.log( + "Operation".padEnd(30) + + "Iterations".padEnd(12) + + "Total Time (ms)".padEnd(18) + + "Avg Time (ms)".padEnd(16) + + "Ops/Second", + ); console.log("-".repeat(88)); - results.forEach(result => { + results.forEach((result) => { const name = result.name.padEnd(30); const iterations = result.iterations.toString().padEnd(12); const totalTime = result.totalTime.toFixed(2).padEnd(18); @@ -217,7 +247,7 @@ function printResults (results) { /** * Main function to run all basic operations benchmarks */ -function runBasicOperationsBenchmarks () { +function runBasicOperationsBenchmarks() { console.log("🚀 Running Basic Operations Benchmarks...\n"); const dataSizes = [100, 1000, 10000, 50000]; diff --git a/benchmarks/comparison.js b/benchmarks/comparison.js index e50659c9..e7481835 100644 --- a/benchmarks/comparison.js +++ b/benchmarks/comparison.js @@ -9,7 +9,7 @@ import { generateIndexTestData } from "./index-operations.js"; * @param {number} iterations - Number of iterations to run * @returns {Object} Benchmark results */ -function benchmark (name, fn, iterations = 1000) { +function benchmark(name, fn, iterations = 1000) { const start = performance.now(); for (let i = 0; i < iterations; i++) { fn(); @@ -23,7 +23,7 @@ function benchmark (name, fn, iterations = 1000) { iterations, totalTime: total, avgTime, - opsPerSecond: Math.floor(1000 / avgTime) + opsPerSecond: Math.floor(1000 / avgTime), }; } @@ -32,38 +32,54 @@ function benchmark (name, fn, iterations = 1000) { * @param {Array} dataSizes - Array of data sizes to test * @returns {Array} Array of benchmark results */ -function benchmarkStorageComparison (dataSizes) { +function benchmarkStorageComparison(dataSizes) { const results = []; - dataSizes.forEach(size => { + dataSizes.forEach((size) => { const testData = generateIndexTestData(size); // Haro storage - const haroSetResult = benchmark(`Haro SET (${size} records)`, () => { - const store = haro(); - testData.forEach(record => store.set(record.id, record)); - }, 10); + const haroSetResult = benchmark( + `Haro SET (${size} records)`, + () => { + const store = haro(); + testData.forEach((record) => store.set(record.id, record)); + }, + 10, + ); results.push(haroSetResult); // Native Map storage - const mapSetResult = benchmark(`Map SET (${size} records)`, () => { - const map = new Map(); - testData.forEach(record => map.set(record.id, record)); - }, 10); + const mapSetResult = benchmark( + `Map SET (${size} records)`, + () => { + const map = new Map(); + testData.forEach((record) => map.set(record.id, record)); + }, + 10, + ); results.push(mapSetResult); // Native Object storage - const objectSetResult = benchmark(`Object SET (${size} records)`, () => { - const obj = {}; - testData.forEach(record => obj[record.id] = record); // eslint-disable-line no-return-assign - }, 10); + const objectSetResult = benchmark( + `Object SET (${size} records)`, + () => { + const obj = {}; + testData.forEach((record) => (obj[record.id] = record)); // eslint-disable-line no-return-assign + }, + 10, + ); results.push(objectSetResult); // Array storage - const arraySetResult = benchmark(`Array PUSH (${size} records)`, () => { - const arr = []; - testData.forEach(record => arr.push(record)); - }, 10); + const arraySetResult = benchmark( + `Array PUSH (${size} records)`, + () => { + const arr = []; + testData.forEach((record) => arr.push(record)); + }, + 10, + ); results.push(arraySetResult); }); @@ -75,10 +91,10 @@ function benchmarkStorageComparison (dataSizes) { * @param {Array} dataSizes - Array of data sizes to test * @returns {Array} Array of benchmark results */ -function benchmarkRetrievalComparison (dataSizes) { +function benchmarkRetrievalComparison(dataSizes) { const results = []; - dataSizes.forEach(size => { + dataSizes.forEach((size) => { const testData = generateIndexTestData(size); // Prepare data structures @@ -87,7 +103,7 @@ function benchmarkRetrievalComparison (dataSizes) { const objectStore = {}; const arrayStore = []; - testData.forEach(record => { + testData.forEach((record) => { mapStore.set(record.id, record); objectStore[record.id] = record; arrayStore.push(record); @@ -124,7 +140,7 @@ function benchmarkRetrievalComparison (dataSizes) { // Array find (by property) const arrayFindResult = benchmark(`Array FIND (${size} records)`, () => { const id = Math.floor(Math.random() * size); - arrayStore.find(record => record.id === id); + arrayStore.find((record) => record.id === id); }); results.push(arrayFindResult); }); @@ -137,10 +153,10 @@ function benchmarkRetrievalComparison (dataSizes) { * @param {Array} dataSizes - Array of data sizes to test * @returns {Array} Array of benchmark results */ -function benchmarkQueryComparison (dataSizes) { +function benchmarkQueryComparison(dataSizes) { const results = []; - dataSizes.forEach(size => { + dataSizes.forEach((size) => { const testData = generateIndexTestData(size); // Prepare data structures @@ -155,31 +171,29 @@ function benchmarkQueryComparison (dataSizes) { // Haro filter query const haroFilterResult = benchmark(`Haro FILTER (${size} records)`, () => { - haroStore.filter(record => record.category === "A"); + haroStore.filter((record) => record.category === "A"); }); results.push(haroFilterResult); // Array filter query const arrayFilterResult = benchmark(`Array FILTER (${size} records)`, () => { - arrayStore.filter(record => record.category === "A"); + arrayStore.filter((record) => record.category === "A"); }); results.push(arrayFilterResult); // Complex query comparison const haroComplexResult = benchmark(`Haro COMPLEX query (${size} records)`, () => { - haroStore.filter(record => - record.category === "A" && - record.status === "active" && - record.priority === "high" + haroStore.filter( + (record) => + record.category === "A" && record.status === "active" && record.priority === "high", ); }); results.push(haroComplexResult); const arrayComplexResult = benchmark(`Array COMPLEX query (${size} records)`, () => { - arrayStore.filter(record => - record.category === "A" && - record.status === "active" && - record.priority === "high" + arrayStore.filter( + (record) => + record.category === "A" && record.status === "active" && record.priority === "high", ); }); results.push(arrayComplexResult); @@ -193,55 +207,67 @@ function benchmarkQueryComparison (dataSizes) { * @param {Array} dataSizes - Array of data sizes to test * @returns {Array} Array of benchmark results */ -function benchmarkDeletionComparison (dataSizes) { +function benchmarkDeletionComparison(dataSizes) { const results = []; - dataSizes.forEach(size => { + dataSizes.forEach((size) => { const testData = generateIndexTestData(size); // Haro deletion - const haroDeleteResult = benchmark(`Haro DELETE (${size} records)`, () => { - const store = haro(testData); - const keys = Array.from(store.keys()); - for (let i = 0; i < Math.min(100, keys.length); i++) { - try { - store.del(keys[i]); - } catch (e) { // eslint-disable-line no-unused-vars - // Record might already be deleted + const haroDeleteResult = benchmark( + `Haro DELETE (${size} records)`, + () => { + const store = haro(testData); + const keys = Array.from(store.keys()); + for (let i = 0; i < Math.min(100, keys.length); i++) { + store.delete(keys[i]); } - } - }, 10); + }, + 10, + ); results.push(haroDeleteResult); // Map deletion - const mapDeleteResult = benchmark(`Map DELETE (${size} records)`, () => { - const map = new Map(); - testData.forEach(record => map.set(record.id, record)); - const keys = Array.from(map.keys()); - for (let i = 0; i < Math.min(100, keys.length); i++) { - map.delete(keys[i]); - } - }, 10); + const mapDeleteResult = benchmark( + `Map DELETE (${size} records)`, + () => { + const map = new Map(); + testData.forEach((record) => map.set(record.id, record)); + const keys = Array.from(map.keys()); + for (let i = 0; i < Math.min(100, keys.length); i++) { + map.delete(keys[i]); + } + }, + 10, + ); results.push(mapDeleteResult); // Object deletion - const objectDeleteResult = benchmark(`Object DELETE (${size} records)`, () => { - const obj = {}; - testData.forEach(record => obj[record.id] = record); // eslint-disable-line no-return-assign - const keys = Object.keys(obj); - for (let i = 0; i < Math.min(100, keys.length); i++) { - delete obj[keys[i]]; - } - }, 10); + const objectDeleteResult = benchmark( + `Object DELETE (${size} records)`, + () => { + const obj = {}; + testData.forEach((record) => (obj[record.id] = record)); // eslint-disable-line no-return-assign + const keys = Object.keys(obj); + for (let i = 0; i < Math.min(100, keys.length); i++) { + delete obj[keys[i]]; + } + }, + 10, + ); results.push(objectDeleteResult); // Array splice deletion - const arrayDeleteResult = benchmark(`Array SPLICE (${size} records)`, () => { - const arr = [...testData]; - for (let i = 0; i < Math.min(100, arr.length); i++) { - arr.splice(0, 1); - } - }, 10); + const arrayDeleteResult = benchmark( + `Array SPLICE (${size} records)`, + () => { + const arr = [...testData]; + for (let i = 0; i < Math.min(100, arr.length); i++) { + arr.splice(0, 1); + } + }, + 10, + ); results.push(arrayDeleteResult); }); @@ -253,10 +279,10 @@ function benchmarkDeletionComparison (dataSizes) { * @param {Array} dataSizes - Array of data sizes to test * @returns {Array} Array of benchmark results */ -function benchmarkAggregationComparison (dataSizes) { +function benchmarkAggregationComparison(dataSizes) { const results = []; - dataSizes.forEach(size => { + dataSizes.forEach((size) => { const testData = generateIndexTestData(size); // Prepare data structures @@ -265,36 +291,16 @@ function benchmarkAggregationComparison (dataSizes) { // Haro map operation const haroMapResult = benchmark(`Haro MAP (${size} records)`, () => { - haroStore.map(record => record.category); + haroStore.map((record) => record.category); }); results.push(haroMapResult); // Array map operation const arrayMapResult = benchmark(`Array MAP (${size} records)`, () => { - arrayStore.map(record => record.category); + arrayStore.map((record) => record.category); }); results.push(arrayMapResult); - // Haro reduce operation - const haroReduceResult = benchmark(`Haro REDUCE (${size} records)`, () => { - haroStore.reduce((acc, record) => { - acc[record.category] = (acc[record.category] || 0) + 1; - - return acc; - }, {}); - }); - results.push(haroReduceResult); - - // Array reduce operation - const arrayReduceResult = benchmark(`Array REDUCE (${size} records)`, () => { - arrayStore.reduce((acc, record) => { - acc[record.category] = (acc[record.category] || 0) + 1; - - return acc; - }, {}); - }); - results.push(arrayReduceResult); - // Haro forEach operation const haroForEachResult = benchmark(`Haro FOREACH (${size} records)`, () => { let count = 0; @@ -318,10 +324,10 @@ function benchmarkAggregationComparison (dataSizes) { * @param {Array} dataSizes - Array of data sizes to test * @returns {Array} Array of benchmark results */ -function benchmarkSortingComparison (dataSizes) { +function benchmarkSortingComparison(dataSizes) { const results = []; - dataSizes.forEach(size => { + dataSizes.forEach((size) => { const testData = generateIndexTestData(size); // Prepare data structures @@ -329,44 +335,64 @@ function benchmarkSortingComparison (dataSizes) { const arrayStore = [...testData]; // Haro sort operation - const haroSortResult = benchmark(`Haro SORT (${size} records)`, () => { - haroStore.sort((a, b) => a.score - b.score); - }, 10); + const haroSortResult = benchmark( + `Haro SORT (${size} records)`, + () => { + haroStore.sort((a, b) => a.score - b.score); + }, + 10, + ); results.push(haroSortResult); // Array sort operation - const arraySortResult = benchmark(`Array SORT (${size} records)`, () => { - [...arrayStore].sort((a, b) => a.score - b.score); - }, 10); + const arraySortResult = benchmark( + `Array SORT (${size} records)`, + () => { + [...arrayStore].sort((a, b) => a.score - b.score); + }, + 10, + ); results.push(arraySortResult); // Haro sortBy operation (indexed) - const haroSortByResult = benchmark(`Haro SORTBY indexed (${size} records)`, () => { - haroStore.sortBy("score"); - }, 10); + const haroSortByResult = benchmark( + `Haro SORTBY indexed (${size} records)`, + () => { + haroStore.sortBy("score"); + }, + 10, + ); results.push(haroSortByResult); // Complex sort comparison - const haroComplexSortResult = benchmark(`Haro COMPLEX sort (${size} records)`, () => { - haroStore.sort((a, b) => { - if (a.category !== b.category) { - return a.category.localeCompare(b.category); - } - - return b.score - a.score; - }); - }, 10); + const haroComplexSortResult = benchmark( + `Haro COMPLEX sort (${size} records)`, + () => { + haroStore.sort((a, b) => { + if (a.category !== b.category) { + return a.category.localeCompare(b.category); + } + + return b.score - a.score; + }); + }, + 10, + ); results.push(haroComplexSortResult); - const arrayComplexSortResult = benchmark(`Array COMPLEX sort (${size} records)`, () => { - [...arrayStore].sort((a, b) => { - if (a.category !== b.category) { - return a.category.localeCompare(b.category); - } - - return b.score - a.score; - }); - }, 10); + const arrayComplexSortResult = benchmark( + `Array COMPLEX sort (${size} records)`, + () => { + [...arrayStore].sort((a, b) => { + if (a.category !== b.category) { + return a.category.localeCompare(b.category); + } + + return b.score - a.score; + }); + }, + 10, + ); results.push(arrayComplexSortResult); }); @@ -378,10 +404,10 @@ function benchmarkSortingComparison (dataSizes) { * @param {Array} dataSizes - Array of data sizes to test * @returns {Array} Array of memory comparison results */ -function benchmarkMemoryComparison (dataSizes) { +function benchmarkMemoryComparison(dataSizes) { const results = []; - dataSizes.forEach(size => { + dataSizes.forEach((size) => { const testData = generateIndexTestData(size); // Measure memory usage for each data structure @@ -393,27 +419,27 @@ function benchmarkMemoryComparison (dataSizes) { const haroMemEnd = process.memoryUsage().heapUsed; measurements.push({ name: `Haro memory (${size} records)`, - memoryUsed: (haroMemEnd - haroMemStart) / 1024 / 1024 // MB + memoryUsed: (haroMemEnd - haroMemStart) / 1024 / 1024, // MB }); // Map memory usage const mapMemStart = process.memoryUsage().heapUsed; const mapStore = new Map(); - testData.forEach(record => mapStore.set(record.id, record)); + testData.forEach((record) => mapStore.set(record.id, record)); const mapMemEnd = process.memoryUsage().heapUsed; measurements.push({ name: `Map memory (${size} records)`, - memoryUsed: (mapMemEnd - mapMemStart) / 1024 / 1024 // MB + memoryUsed: (mapMemEnd - mapMemStart) / 1024 / 1024, // MB }); // Object memory usage const objMemStart = process.memoryUsage().heapUsed; const objStore = {}; - testData.forEach(record => objStore[record.id] = record); // eslint-disable-line no-return-assign + testData.forEach((record) => (objStore[record.id] = record)); // eslint-disable-line no-return-assign const objMemEnd = process.memoryUsage().heapUsed; measurements.push({ name: `Object memory (${size} records)`, - memoryUsed: (objMemEnd - objMemStart) / 1024 / 1024 // MB + memoryUsed: (objMemEnd - objMemStart) / 1024 / 1024, // MB }); // Array memory usage @@ -422,7 +448,7 @@ function benchmarkMemoryComparison (dataSizes) { const arrMemEnd = process.memoryUsage().heapUsed; measurements.push({ name: `Array memory (${size} records)`, - memoryUsed: (arrMemEnd - arrMemStart) / 1024 / 1024 // MB + memoryUsed: (arrMemEnd - arrMemStart) / 1024 / 1024, // MB }); results.push(...measurements); @@ -436,60 +462,70 @@ function benchmarkMemoryComparison (dataSizes) { * @param {Array} dataSizes - Array of data sizes to test * @returns {Array} Array of benchmark results */ -function benchmarkAdvancedFeatures (dataSizes) { +function benchmarkAdvancedFeatures(dataSizes) { const results = []; - dataSizes.forEach(size => { + dataSizes.forEach((size) => { const testData = generateIndexTestData(size); // Haro advanced features - const haroAdvancedResult = benchmark(`Haro ADVANCED features (${size} records)`, () => { - const store = haro(testData, { - index: ["category", "status", "category|status"], - versioning: true - }); - - // Use advanced features - store.find({ category: "A", status: "active" }); - store.search(/^A/, "category"); - store.where({ category: ["A", "B"] }); - store.sortBy("category"); - store.limit(10, 20); - - return store; - }, 10); + const haroAdvancedResult = benchmark( + `Haro ADVANCED features (${size} records)`, + () => { + const store = haro(testData, { + index: ["category", "status", "category|status"], + versioning: true, + }); + + // Use advanced features + store.find({ category: "A", status: "active" }); + store.search(/^A/, "category"); + store.where({ category: ["A", "B"] }); + store.sortBy("category"); + store.limit(10, 20); + + return store; + }, + 10, + ); results.push(haroAdvancedResult); // Simulate similar operations with native structures - const nativeAdvancedResult = benchmark(`Native ADVANCED simulation (${size} records)`, () => { - const store = [...testData]; - - // Category index simulation - const categoryIndex = new Map(); - store.forEach(record => { - if (!categoryIndex.has(record.category)) { - categoryIndex.set(record.category, []); - } - categoryIndex.get(record.category).push(record); - }); - - // Find simulation - const found = store.filter(record => record.category === "A" && record.status === "active"); - - // Search simulation - const searched = store.filter(record => (/^A/).test(record.category)); - - // Where simulation - const where = store.filter(record => ["A", "B"].includes(record.category)); - - // Sort simulation - const sorted = [...store].sort((a, b) => a.category.localeCompare(b.category)); - - // Limit simulation - const limited = sorted.slice(10, 30); - - return { found, searched, where, sorted, limited }; - }, 10); + const nativeAdvancedResult = benchmark( + `Native ADVANCED simulation (${size} records)`, + () => { + const store = [...testData]; + + // Category index simulation + const categoryIndex = new Map(); + store.forEach((record) => { + if (!categoryIndex.has(record.category)) { + categoryIndex.set(record.category, []); + } + categoryIndex.get(record.category).push(record); + }); + + // Find simulation + const found = store.filter( + (record) => record.category === "A" && record.status === "active", + ); + + // Search simulation + const searched = store.filter((record) => /^A/.test(record.category)); + + // Where simulation + const where = store.filter((record) => ["A", "B"].includes(record.category)); + + // Sort simulation + const sorted = [...store].sort((a, b) => a.category.localeCompare(b.category)); + + // Limit simulation + const limited = sorted.slice(10, 30); + + return { found, searched, where, sorted, limited }; + }, + 10, + ); results.push(nativeAdvancedResult); }); @@ -501,13 +537,19 @@ function benchmarkAdvancedFeatures (dataSizes) { * @param {Array} results - Array of benchmark results * @param {string} title - Title for the results section */ -function printResults (results, title) { +function printResults(results, title) { console.log(`\n=== ${title} ===\n`); - console.log("Operation".padEnd(40) + "Iterations".padEnd(12) + "Total Time (ms)".padEnd(18) + "Avg Time (ms)".padEnd(16) + "Ops/Second"); + console.log( + "Operation".padEnd(40) + + "Iterations".padEnd(12) + + "Total Time (ms)".padEnd(18) + + "Avg Time (ms)".padEnd(16) + + "Ops/Second", + ); console.log("-".repeat(98)); - results.forEach(result => { + results.forEach((result) => { const name = result.name.padEnd(40); const iterations = result.iterations.toString().padEnd(12); const totalTime = result.totalTime.toFixed(2).padEnd(18); @@ -524,13 +566,13 @@ function printResults (results, title) { * Prints memory comparison results * @param {Array} results - Array of memory measurements */ -function printMemoryResults (results) { +function printMemoryResults(results) { console.log("\n=== MEMORY USAGE COMPARISON ===\n"); console.log("Data Structure".padEnd(40) + "Memory Used (MB)"); console.log("-".repeat(60)); - results.forEach(result => { + results.forEach((result) => { const name = result.name.padEnd(40); const memoryUsed = result.memoryUsed.toFixed(2); @@ -543,7 +585,7 @@ function printMemoryResults (results) { /** * Main function to run all comparison benchmarks */ -function runComparisonBenchmarks () { +function runComparisonBenchmarks() { console.log("⚡ Running Haro vs Native Structures Comparison...\n"); const dataSizes = [1000, 10000, 50000]; @@ -587,7 +629,7 @@ function runComparisonBenchmarks () { ...deletionResults, ...aggregationResults, ...sortingResults, - ...advancedResults + ...advancedResults, ]; return { allResults, memoryResults }; diff --git a/benchmarks/immutable-comparison.js b/benchmarks/immutable-comparison.js index 602d54de..090c3ce9 100644 --- a/benchmarks/immutable-comparison.js +++ b/benchmarks/immutable-comparison.js @@ -6,7 +6,7 @@ import { haro } from "../dist/haro.js"; * @param {number} size - Number of records to generate * @returns {Array} Array of test records */ -function generateComparisonTestData (size) { +function generateComparisonTestData(size) { const data = []; for (let i = 0; i < size; i++) { data.push({ @@ -24,14 +24,14 @@ function generateComparisonTestData (size) { preferences: { theme: i % 2 === 0 ? "dark" : "light", notifications: Math.random() > 0.5, - language: ["en", "es", "fr"][i % 3] - } + language: ["en", "es", "fr"][i % 3], + }, }, - history: Array.from({ length: Math.min(i % 10 + 1, 5) }, (_, j) => ({ + history: Array.from({ length: Math.min((i % 10) + 1, 5) }, (_, j) => ({ action: `action_${j}`, timestamp: new Date(Date.now() - j * 86400000), - value: Math.random() * 1000 - })) + value: Math.random() * 1000, + })), }); } @@ -45,7 +45,7 @@ function generateComparisonTestData (size) { * @param {number} iterations - Number of iterations to run * @returns {Object} Benchmark results */ -function benchmark (name, fn, iterations = 100) { +function benchmark(name, fn, iterations = 100) { const start = performance.now(); for (let i = 0; i < iterations; i++) { fn(); @@ -59,7 +59,7 @@ function benchmark (name, fn, iterations = 100) { iterations, totalTime: total, avgTime, - opsPerSecond: Math.floor(1000 / avgTime) + opsPerSecond: Math.floor(1000 / avgTime), }; } @@ -68,26 +68,36 @@ function benchmark (name, fn, iterations = 100) { * @param {Array} dataSizes - Array of data sizes to test * @returns {Array} Array of benchmark results */ -function benchmarkStoreCreation (dataSizes) { +function benchmarkStoreCreation(dataSizes) { const results = []; - dataSizes.forEach(size => { + dataSizes.forEach((size) => { const testData = generateComparisonTestData(size); // Mutable store creation - const mutableCreationResult = benchmark(`Store creation MUTABLE (${size} records)`, () => { - return haro(testData, { immutable: false, index: ["department", "active", "tags"] }); - }, 10); + const mutableCreationResult = benchmark( + `Store creation MUTABLE (${size} records)`, + () => { + return haro(testData, { immutable: false, index: ["department", "active", "tags"] }); + }, + 10, + ); results.push(mutableCreationResult); // Immutable store creation - const immutableCreationResult = benchmark(`Store creation IMMUTABLE (${size} records)`, () => { - return haro(testData, { immutable: true, index: ["department", "active", "tags"] }); - }, 10); + const immutableCreationResult = benchmark( + `Store creation IMMUTABLE (${size} records)`, + () => { + return haro(testData, { immutable: true, index: ["department", "active", "tags"] }); + }, + 10, + ); results.push(immutableCreationResult); // Performance comparison - const performanceRatio = (mutableCreationResult.opsPerSecond / immutableCreationResult.opsPerSecond).toFixed(2); + const performanceRatio = ( + mutableCreationResult.opsPerSecond / immutableCreationResult.opsPerSecond + ).toFixed(2); results.push({ name: `Creation performance ratio (${size} records)`, iterations: 1, @@ -97,7 +107,10 @@ function benchmarkStoreCreation (dataSizes) { mutableOps: mutableCreationResult.opsPerSecond, immutableOps: immutableCreationResult.opsPerSecond, ratio: `${performanceRatio}x faster (mutable)`, - recommendation: parseFloat(performanceRatio) > 1.5 ? "Use mutable for creation-heavy workloads" : "Performance difference minimal" + recommendation: + parseFloat(performanceRatio) > 1.5 + ? "Use mutable for creation-heavy workloads" + : "Performance difference minimal", }); }); @@ -109,10 +122,10 @@ function benchmarkStoreCreation (dataSizes) { * @param {Array} dataSizes - Array of data sizes to test * @returns {Array} Array of benchmark results */ -function benchmarkCrudOperations (dataSizes) { +function benchmarkCrudOperations(dataSizes) { const results = []; - dataSizes.forEach(size => { + dataSizes.forEach((size) => { const testData = generateComparisonTestData(size); // Create stores @@ -151,24 +164,34 @@ function benchmarkCrudOperations (dataSizes) { // DELETE operations (using a subset to avoid depleting data) const deleteCount = Math.min(10, size); - const mutableDeleteResult = benchmark(`DELETE operation MUTABLE (${deleteCount} deletes)`, () => { - const randomId = Math.floor(Math.random() * (size - deleteCount)).toString(); - try { - mutableStore.delete(randomId); - } catch (e) { // eslint-disable-line no-unused-vars - // Record might not exist - } - }, deleteCount); + const mutableDeleteResult = benchmark( + `DELETE operation MUTABLE (${deleteCount} deletes)`, + () => { + const randomId = Math.floor(Math.random() * (size - deleteCount)).toString(); + try { + mutableStore.delete(randomId); + } catch (e) { + // eslint-disable-line no-unused-vars + // Record might not exist + } + }, + deleteCount, + ); results.push(mutableDeleteResult); - const immutableDeleteResult = benchmark(`DELETE operation IMMUTABLE (${deleteCount} deletes)`, () => { - const randomId = Math.floor(Math.random() * (size - deleteCount)).toString(); - try { - immutableStore.delete(randomId); - } catch (e) { // eslint-disable-line no-unused-vars - // Record might not exist - } - }, deleteCount); + const immutableDeleteResult = benchmark( + `DELETE operation IMMUTABLE (${deleteCount} deletes)`, + () => { + const randomId = Math.floor(Math.random() * (size - deleteCount)).toString(); + try { + immutableStore.delete(randomId); + } catch (e) { + // eslint-disable-line no-unused-vars + // Record might not exist + } + }, + deleteCount, + ); results.push(immutableDeleteResult); }); @@ -180,20 +203,20 @@ function benchmarkCrudOperations (dataSizes) { * @param {Array} dataSizes - Array of data sizes to test * @returns {Array} Array of benchmark results */ -function benchmarkQueryOperations (dataSizes) { +function benchmarkQueryOperations(dataSizes) { const results = []; - dataSizes.forEach(size => { + dataSizes.forEach((size) => { const testData = generateComparisonTestData(size); // Create stores with extensive indexing const mutableStore = haro(testData, { immutable: false, - index: ["department", "active", "tags", "age", "department|active"] + index: ["department", "active", "tags", "age", "department|active"], }); const immutableStore = haro(testData, { immutable: true, - index: ["department", "active", "tags", "age", "department|active"] + index: ["department", "active", "tags", "age", "department|active"], }); // FIND operations @@ -209,12 +232,12 @@ function benchmarkQueryOperations (dataSizes) { // FILTER operations const mutableFilterResult = benchmark(`FILTER operation MUTABLE (${size} records)`, () => { - return mutableStore.filter(record => record.age > 30); + return mutableStore.filter((record) => record.age > 30); }); results.push(mutableFilterResult); const immutableFilterResult = benchmark(`FILTER operation IMMUTABLE (${size} records)`, () => { - return immutableStore.filter(record => record.age > 30); + return immutableStore.filter((record) => record.age > 30); }); results.push(immutableFilterResult); @@ -222,7 +245,7 @@ function benchmarkQueryOperations (dataSizes) { const mutableWhereResult = benchmark(`WHERE operation MUTABLE (${size} records)`, () => { return mutableStore.where({ department: ["Dept 0", "Dept 1"], - active: true + active: true, }); }); results.push(mutableWhereResult); @@ -230,7 +253,7 @@ function benchmarkQueryOperations (dataSizes) { const immutableWhereResult = benchmark(`WHERE operation IMMUTABLE (${size} records)`, () => { return immutableStore.where({ department: ["Dept 0", "Dept 1"], - active: true + active: true, }); }); results.push(immutableWhereResult); @@ -255,10 +278,10 @@ function benchmarkQueryOperations (dataSizes) { * @param {Array} dataSizes - Array of data sizes to test * @returns {Array} Array of benchmark results */ -function benchmarkTransformationOperations (dataSizes) { +function benchmarkTransformationOperations(dataSizes) { const results = []; - dataSizes.forEach(size => { + dataSizes.forEach((size) => { const testData = generateComparisonTestData(size); // Create stores @@ -267,68 +290,45 @@ function benchmarkTransformationOperations (dataSizes) { // MAP operations const mutableMapResult = benchmark(`MAP operation MUTABLE (${size} records)`, () => { - return mutableStore.map(record => ({ + return mutableStore.map((record) => ({ id: record.id, name: record.name, - summary: `${record.name} - ${record.department}` + summary: `${record.name} - ${record.department}`, })); }); results.push(mutableMapResult); const immutableMapResult = benchmark(`MAP operation IMMUTABLE (${size} records)`, () => { - return immutableStore.map(record => ({ + return immutableStore.map((record) => ({ id: record.id, name: record.name, - summary: `${record.name} - ${record.department}` + summary: `${record.name} - ${record.department}`, })); }); results.push(immutableMapResult); - // REDUCE operations - const mutableReduceResult = benchmark(`REDUCE operation MUTABLE (${size} records)`, () => { - return mutableStore.reduce((acc, record) => { - acc[record.department] = (acc[record.department] || 0) + 1; - - return acc; - }, {}); - }); - results.push(mutableReduceResult); - - const immutableReduceResult = benchmark(`REDUCE operation IMMUTABLE (${size} records)`, () => { - return immutableStore.reduce((acc, record) => { - acc[record.department] = (acc[record.department] || 0) + 1; - - return acc; - }, {}); - }); - results.push(immutableReduceResult); - - // SORT operations - const mutableSortResult = benchmark(`SORT operation MUTABLE (${size} records)`, () => { - return mutableStore.sort((a, b) => a.score - b.score); - }, 10); - results.push(mutableSortResult); - - const immutableSortResult = benchmark(`SORT operation IMMUTABLE (${size} records)`, () => { - return immutableStore.sort((a, b) => a.score - b.score); - }, 10); - results.push(immutableSortResult); - // forEach operations const mutableForEachResult = benchmark(`forEach operation MUTABLE (${size} records)`, () => { let count = 0; - mutableStore.forEach(() => { count++; }); + mutableStore.forEach(() => { + count++; + }); return count; }); results.push(mutableForEachResult); - const immutableForEachResult = benchmark(`forEach operation IMMUTABLE (${size} records)`, () => { - let count = 0; - immutableStore.forEach(() => { count++; }); + const immutableForEachResult = benchmark( + `forEach operation IMMUTABLE (${size} records)`, + () => { + let count = 0; + immutableStore.forEach(() => { + count++; + }); - return count; - }); + return count; + }, + ); results.push(immutableForEachResult); }); @@ -340,10 +340,10 @@ function benchmarkTransformationOperations (dataSizes) { * @param {Array} dataSizes - Array of data sizes to test * @returns {Array} Array of benchmark results */ -function benchmarkMemoryUsage (dataSizes) { +function benchmarkMemoryUsage(dataSizes) { const results = []; - dataSizes.forEach(size => { + dataSizes.forEach((size) => { const testData = generateComparisonTestData(size); // Test memory usage for mutable store @@ -405,7 +405,7 @@ function benchmarkMemoryUsage (dataSizes) { mutableOpsMemory: (memAfterMutableOps - memAfterMutable) / 1024 / 1024, // MB immutableOpsMemory: (memAfterImmutableOps - memAfterMutableOps) / 1024 / 1024, // MB totalMutableMemory: (memAfterMutableOps - memBefore) / 1024 / 1024, // MB - totalImmutableMemory: (memAfterImmutableOps - memAfterMutable) / 1024 / 1024 // MB + totalImmutableMemory: (memAfterImmutableOps - memAfterMutable) / 1024 / 1024, // MB }); }); @@ -417,10 +417,10 @@ function benchmarkMemoryUsage (dataSizes) { * @param {Array} dataSizes - Array of data sizes to test * @returns {Array} Array of benchmark results */ -function benchmarkDataSafety (dataSizes) { +function benchmarkDataSafety(dataSizes) { const results = []; - dataSizes.forEach(size => { + dataSizes.forEach((size) => { const testData = generateComparisonTestData(Math.min(size, 100)); // Limit for safety tests // Create stores @@ -438,7 +438,8 @@ function benchmarkDataSafety (dataSizes) { // This should work for mutable mutableRecord.name = "MUTATED"; mutableRecord.tags.push("new-tag"); - } catch (e) { // eslint-disable-line no-unused-vars + } catch (e) { + // eslint-disable-line no-unused-vars // Mutation failed } @@ -446,7 +447,8 @@ function benchmarkDataSafety (dataSizes) { // This should fail for immutable immutableRecord.name = "MUTATED"; immutableRecord.tags.push("new-tag"); - } catch (e) { // eslint-disable-line no-unused-vars + } catch (e) { + // eslint-disable-line no-unused-vars // Expected failure for immutable } @@ -466,7 +468,7 @@ function benchmarkDataSafety (dataSizes) { immutableMutated: immutableRecordAfter.name === "MUTATED", mutableProtected: mutableRecordAfter.name !== "MUTATED", immutableProtected: immutableRecordAfter.name !== "MUTATED", - recommendation: "Use immutable mode for data safety in multi-consumer environments" + recommendation: "Use immutable mode for data safety in multi-consumer environments", }); }); @@ -478,17 +480,19 @@ function benchmarkDataSafety (dataSizes) { * @param {Array} results - All benchmark results * @returns {Object} Performance recommendations */ -function generatePerformanceRecommendations (results) { +function generatePerformanceRecommendations(results) { const recommendations = { general: [], mutableAdvantages: [], immutableAdvantages: [], - useCase: {} + useCase: {}, }; // Analyze results to generate recommendations - const mutableOps = results.filter(r => r.name.includes("MUTABLE")).map(r => r.opsPerSecond); - const immutableOps = results.filter(r => r.name.includes("IMMUTABLE")).map(r => r.opsPerSecond); + const mutableOps = results.filter((r) => r.name.includes("MUTABLE")).map((r) => r.opsPerSecond); + const immutableOps = results + .filter((r) => r.name.includes("IMMUTABLE")) + .map((r) => r.opsPerSecond); const avgMutablePerf = mutableOps.reduce((a, b) => a + b, 0) / mutableOps.length; const avgImmutablePerf = immutableOps.reduce((a, b) => a + b, 0) / immutableOps.length; @@ -509,7 +513,7 @@ function generatePerformanceRecommendations (results) { "Data safety critical": "Use immutable mode to prevent accidental mutations", "Multi-consumer reads": "Immutable mode provides safer concurrent access", "Memory constrained": "Mutable mode may use less memory", - "Development/debugging": "Immutable mode helps catch mutation bugs early" + "Development/debugging": "Immutable mode helps catch mutation bugs early", }; return recommendations; @@ -519,14 +523,14 @@ function generatePerformanceRecommendations (results) { * Prints formatted benchmark results with detailed analysis * @param {Array} results - Array of benchmark results */ -function printResults (results) { +function printResults(results) { console.log("\n" + "=".repeat(80)); console.log("IMMUTABLE vs MUTABLE COMPARISON RESULTS"); console.log("=".repeat(80)); // Group results by operation type const groupedResults = {}; - results.forEach(result => { + results.forEach((result) => { const operation = result.name.split(" ").slice(-2, -1)[0] || "Analysis"; if (!groupedResults[operation]) { groupedResults[operation] = []; @@ -534,18 +538,25 @@ function printResults (results) { groupedResults[operation].push(result); }); - Object.keys(groupedResults).forEach(operation => { + Object.keys(groupedResults).forEach((operation) => { console.log(`\n${operation.toUpperCase()} OPERATIONS:`); console.log("-".repeat(50)); - groupedResults[operation].forEach(result => { + groupedResults[operation].forEach((result) => { if (result.opsPerSecond > 0) { - const opsIndicator = result.opsPerSecond > 1000 ? "✅" : - result.opsPerSecond > 100 ? "🟡" : - result.opsPerSecond > 10 ? "🟠" : "🔴"; + const opsIndicator = + result.opsPerSecond > 1000 + ? "✅" + : result.opsPerSecond > 100 + ? "🟡" + : result.opsPerSecond > 10 + ? "🟠" + : "🔴"; console.log(`${opsIndicator} ${result.name}`); - console.log(` ${result.opsPerSecond.toLocaleString()} ops/sec | ${result.totalTime.toFixed(2)}ms total`); + console.log( + ` ${result.opsPerSecond.toLocaleString()} ops/sec | ${result.totalTime.toFixed(2)}ms total`, + ); } else { console.log(`📊 ${result.name}`); } @@ -557,12 +568,18 @@ function printResults (results) { } if (result.mutableStoreMemory !== undefined) { - console.log(` Memory - Mutable store: ${result.mutableStoreMemory.toFixed(2)}MB | Immutable store: ${result.immutableStoreMemory.toFixed(2)}MB`); - console.log(` Memory - Mutable ops: +${result.mutableOpsMemory.toFixed(2)}MB | Immutable ops: +${result.immutableOpsMemory.toFixed(2)}MB`); + console.log( + ` Memory - Mutable store: ${result.mutableStoreMemory.toFixed(2)}MB | Immutable store: ${result.immutableStoreMemory.toFixed(2)}MB`, + ); + console.log( + ` Memory - Mutable ops: +${result.mutableOpsMemory.toFixed(2)}MB | Immutable ops: +${result.immutableOpsMemory.toFixed(2)}MB`, + ); } if (result.mutableMutated !== undefined) { - console.log(` Mutable protection: ${result.mutableProtected ? "❌" : "✅"} | Immutable protection: ${result.immutableProtected ? "✅" : "❌"}`); + console.log( + ` Mutable protection: ${result.mutableProtected ? "❌" : "✅"} | Immutable protection: ${result.immutableProtected ? "✅" : "❌"}`, + ); console.log(` ${result.recommendation}`); } @@ -577,10 +594,10 @@ function printResults (results) { console.log("=".repeat(80)); console.log("\nGeneral Findings:"); - recommendations.general.forEach(rec => console.log(`• ${rec}`)); + recommendations.general.forEach((rec) => console.log(`• ${rec}`)); console.log("\nUse Case Recommendations:"); - Object.keys(recommendations.useCase).forEach(useCase => { + Object.keys(recommendations.useCase).forEach((useCase) => { console.log(`• ${useCase}: ${recommendations.useCase[useCase]}`); }); @@ -591,7 +608,7 @@ function printResults (results) { * Runs all immutable vs mutable comparison benchmarks * @returns {Array} Array of all benchmark results */ -function runImmutableComparisonBenchmarks () { +function runImmutableComparisonBenchmarks() { console.log("Starting Immutable vs Mutable Comparison Benchmarks...\n"); const dataSizes = [100, 1000, 5000]; diff --git a/benchmarks/index-operations.js b/benchmarks/index-operations.js index e4acf074..22078b81 100644 --- a/benchmarks/index-operations.js +++ b/benchmarks/index-operations.js @@ -6,7 +6,7 @@ import { haro } from "../dist/haro.js"; * @param {number} size - Number of records to generate * @returns {Array} Array of test records optimized for indexing */ -function generateIndexTestData (size) { +function generateIndexTestData(size) { const data = []; const categories = ["A", "B", "C", "D", "E"]; const statuses = ["active", "inactive", "pending", "suspended"]; @@ -24,21 +24,17 @@ function generateIndexTestData (size) { projectId: Math.floor(i / 100), // Creates groups of 100 timestamp: new Date(2024, 0, 1, 0, 0, 0, i * 1000), score: Math.floor(Math.random() * 1000), - tags: [ - `tag${i % 20}`, - `category${i % 10}`, - `type${i % 5}` - ], + tags: [`tag${i % 20}`, `category${i % 10}`, `type${i % 5}`], metadata: { level: Math.floor(Math.random() * 10), department: `Dept${i % 15}`, - location: `Location${i % 25}` + location: `Location${i % 25}`, }, flags: { isPublic: Math.random() > 0.5, isVerified: Math.random() > 0.3, - isUrgent: Math.random() > 0.9 - } + isUrgent: Math.random() > 0.9, + }, }); } @@ -52,7 +48,7 @@ function generateIndexTestData (size) { * @param {number} iterations - Number of iterations to run * @returns {Object} Benchmark results */ -function benchmark (name, fn, iterations = 100) { +function benchmark(name, fn, iterations = 100) { const start = performance.now(); for (let i = 0; i < iterations; i++) { fn(); @@ -66,7 +62,7 @@ function benchmark (name, fn, iterations = 100) { iterations, totalTime: total, avgTime, - opsPerSecond: Math.floor(1000 / avgTime) + opsPerSecond: Math.floor(1000 / avgTime), }; } @@ -75,33 +71,45 @@ function benchmark (name, fn, iterations = 100) { * @param {Array} dataSizes - Array of data sizes to test * @returns {Array} Array of benchmark results */ -function benchmarkSingleIndexOperations (dataSizes) { +function benchmarkSingleIndexOperations(dataSizes) { const results = []; - dataSizes.forEach(size => { + dataSizes.forEach((size) => { const testData = generateIndexTestData(size); // Initial index creation during construction - const initialIndexResult = benchmark(`CREATE initial indexes (${size} records)`, () => { - const store = haro(testData, { - index: ["category", "status", "priority", "region", "userId"] - }); + const initialIndexResult = benchmark( + `CREATE initial indexes (${size} records)`, + () => { + const store = haro(testData, { + index: ["category", "status", "priority", "region", "userId"], + }); - return store; - }, 10); + return store; + }, + 10, + ); results.push(initialIndexResult); // Reindex single field const store = haro(testData, { index: ["category"] }); - const reindexSingleResult = benchmark(`REINDEX single field (${size} records)`, () => { - store.reindex("status"); - }, 10); + const reindexSingleResult = benchmark( + `REINDEX single field (${size} records)`, + () => { + store.reindex("status"); + }, + 10, + ); results.push(reindexSingleResult); // Reindex all fields - const reindexAllResult = benchmark(`REINDEX all fields (${size} records)`, () => { - store.reindex(); - }, 5); + const reindexAllResult = benchmark( + `REINDEX all fields (${size} records)`, + () => { + store.reindex(); + }, + 5, + ); results.push(reindexAllResult); }); @@ -113,31 +121,35 @@ function benchmarkSingleIndexOperations (dataSizes) { * @param {Array} dataSizes - Array of data sizes to test * @returns {Array} Array of benchmark results */ -function benchmarkCompositeIndexOperations (dataSizes) { +function benchmarkCompositeIndexOperations(dataSizes) { const results = []; - dataSizes.forEach(size => { + dataSizes.forEach((size) => { const testData = generateIndexTestData(size); // Create composite indexes - const compositeIndexResult = benchmark(`CREATE composite indexes (${size} records)`, () => { - const store = haro(testData, { - index: [ - "category|status", - "region|priority", - "userId|projectId", - "category|status|priority", - "region|category|status" - ] - }); - - return store; - }, 5); + const compositeIndexResult = benchmark( + `CREATE composite indexes (${size} records)`, + () => { + const store = haro(testData, { + index: [ + "category|status", + "region|priority", + "userId|projectId", + "category|status|priority", + "region|category|status", + ], + }); + + return store; + }, + 5, + ); results.push(compositeIndexResult); // Query composite indexes const store = haro(testData, { - index: ["category|status", "region|priority", "userId|projectId"] + index: ["category|status", "region|priority", "userId|projectId"], }); const queryCompositeResult = benchmark(`QUERY composite index (${size} records)`, () => { @@ -159,20 +171,24 @@ function benchmarkCompositeIndexOperations (dataSizes) { * @param {Array} dataSizes - Array of data sizes to test * @returns {Array} Array of benchmark results */ -function benchmarkArrayIndexOperations (dataSizes) { +function benchmarkArrayIndexOperations(dataSizes) { const results = []; - dataSizes.forEach(size => { + dataSizes.forEach((size) => { const testData = generateIndexTestData(size); // Create array field indexes - const arrayIndexResult = benchmark(`CREATE array indexes (${size} records)`, () => { - const store = haro(testData, { - index: ["tags", "tags|category", "tags|status"] - }); + const arrayIndexResult = benchmark( + `CREATE array indexes (${size} records)`, + () => { + const store = haro(testData, { + index: ["tags", "tags|category", "tags|status"], + }); - return store; - }, 5); + return store; + }, + 5, + ); results.push(arrayIndexResult); // Query array indexes @@ -197,20 +213,24 @@ function benchmarkArrayIndexOperations (dataSizes) { * @param {Array} dataSizes - Array of data sizes to test * @returns {Array} Array of benchmark results */ -function benchmarkNestedIndexOperations (dataSizes) { +function benchmarkNestedIndexOperations(dataSizes) { const results = []; - dataSizes.forEach(size => { + dataSizes.forEach((size) => { const testData = generateIndexTestData(size); // Create nested field indexes (simulated with dot notation) - const nestedIndexResult = benchmark(`CREATE nested indexes (${size} records)`, () => { - const store = haro(testData, { - index: ["metadata.level", "metadata.department", "flags.isPublic"] - }); + const nestedIndexResult = benchmark( + `CREATE nested indexes (${size} records)`, + () => { + const store = haro(testData, { + index: ["metadata.level", "metadata.department", "flags.isPublic"], + }); - return store; - }, 5); + return store; + }, + 5, + ); results.push(nestedIndexResult); }); @@ -222,50 +242,58 @@ function benchmarkNestedIndexOperations (dataSizes) { * @param {Array} dataSizes - Array of data sizes to test * @returns {Array} Array of benchmark results */ -function benchmarkIndexModificationOperations (dataSizes) { +function benchmarkIndexModificationOperations(dataSizes) { const results = []; - dataSizes.forEach(size => { + dataSizes.forEach((size) => { const testData = generateIndexTestData(size); const store = haro(testData, { - index: ["category", "status", "priority", "category|status", "userId"] + index: ["category", "status", "priority", "category|status", "userId"], }); // Benchmark SET operations with existing indexes - const setWithIndexResult = benchmark(`SET with indexes (${size} records)`, () => { - const randomId = Math.floor(Math.random() * size); - store.set(randomId, { - ...testData[randomId], - category: "Z", - status: "updated", - timestamp: new Date() - }); - }, 100); + const setWithIndexResult = benchmark( + `SET with indexes (${size} records)`, + () => { + const randomId = Math.floor(Math.random() * size); + store.set(randomId, { + ...testData[randomId], + category: "Z", + status: "updated", + timestamp: new Date(), + }); + }, + 100, + ); results.push(setWithIndexResult); // Benchmark DELETE operations with existing indexes - const deleteWithIndexResult = benchmark(`DELETE with indexes (${size} records)`, () => { - const keys = Array.from(store.keys()); - if (keys.length > 0) { - const randomKey = keys[Math.floor(Math.random() * keys.length)]; - try { - store.del(randomKey); - } catch (e) { // eslint-disable-line no-unused-vars - // Record might already be deleted + const deleteWithIndexResult = benchmark( + `DELETE with indexes (${size} records)`, + () => { + const keys = Array.from(store.keys()); + if (keys.length > 0) { + const randomKey = keys[Math.floor(Math.random() * keys.length)]; + store.delete(randomKey); } - } - }, 50); + }, + 50, + ); results.push(deleteWithIndexResult); // Benchmark BATCH operations with existing indexes - const batchWithIndexResult = benchmark(`BATCH with indexes (${size} records)`, () => { - const batchData = testData.slice(0, 10).map(item => ({ - ...item, - category: "BATCH", - status: "batch_updated" - })); - store.batch(batchData, "set"); - }, 10); + const batchWithIndexResult = benchmark( + `BATCH with indexes (${size} records)`, + () => { + const batchData = testData.slice(0, 10).map((item) => ({ + ...item, + category: "BATCH", + status: "batch_updated", + })); + store.batch(batchData, "set"); + }, + 10, + ); results.push(batchWithIndexResult); }); @@ -277,41 +305,61 @@ function benchmarkIndexModificationOperations (dataSizes) { * @param {Array} dataSizes - Array of data sizes to test * @returns {Array} Array of benchmark results */ -function benchmarkIndexMemoryOperations (dataSizes) { +function benchmarkIndexMemoryOperations(dataSizes) { const results = []; - dataSizes.forEach(size => { + dataSizes.forEach((size) => { const testData = generateIndexTestData(size); const store = haro(testData, { - index: ["category", "status", "priority", "region", "userId", "category|status", "region|priority"] + index: [ + "category", + "status", + "priority", + "region", + "userId", + "category|status", + "region|priority", + ], }); // Benchmark index dump operations - const dumpIndexResult = benchmark(`DUMP indexes (${size} records)`, () => { - store.dump("indexes"); - }, 10); + const dumpIndexResult = benchmark( + `DUMP indexes (${size} records)`, + () => { + store.dump("indexes"); + }, + 10, + ); results.push(dumpIndexResult); // Benchmark index override operations const indexData = store.dump("indexes"); - const overrideIndexResult = benchmark(`OVERRIDE indexes (${size} records)`, () => { - const newStore = haro(); - newStore.override(indexData, "indexes"); - }, 10); + const overrideIndexResult = benchmark( + `OVERRIDE indexes (${size} records)`, + () => { + const newStore = haro(); + newStore.override(indexData, "indexes"); + }, + 10, + ); results.push(overrideIndexResult); // Benchmark index size measurement - const indexSizeResult = benchmark(`INDEX size check (${size} records)`, () => { - const indexes = store.indexes; - let totalSize = 0; - indexes.forEach(index => { - index.forEach(set => { - totalSize += set.size; + const indexSizeResult = benchmark( + `INDEX size check (${size} records)`, + () => { + const indexes = store.indexes; + let totalSize = 0; + indexes.forEach((index) => { + index.forEach((set) => { + totalSize += set.size; + }); }); - }); - return totalSize; - }, 100); + return totalSize; + }, + 100, + ); results.push(indexSizeResult); }); @@ -323,45 +371,62 @@ function benchmarkIndexMemoryOperations (dataSizes) { * @param {Array} dataSizes - Array of data sizes to test * @returns {Array} Array of benchmark results */ -function benchmarkIndexComparison (dataSizes) { +function benchmarkIndexComparison(dataSizes) { const results = []; - dataSizes.forEach(size => { + dataSizes.forEach((size) => { const testData = generateIndexTestData(size); // Store without indexes const storeNoIndex = haro(testData); - const filterNoIndexResult = benchmark(`FILTER no index (${size} records)`, () => { - storeNoIndex.filter(record => record.category === "A"); - }, 10); + const filterNoIndexResult = benchmark( + `FILTER no index (${size} records)`, + () => { + storeNoIndex.filter((record) => record.category === "A"); + }, + 10, + ); results.push(filterNoIndexResult); // Store with indexes const storeWithIndex = haro(testData, { index: ["category"] }); - const findWithIndexResult = benchmark(`FIND with index (${size} records)`, () => { - storeWithIndex.find({ category: "A" }); - }, 100); + const findWithIndexResult = benchmark( + `FIND with index (${size} records)`, + () => { + storeWithIndex.find({ category: "A" }); + }, + 100, + ); results.push(findWithIndexResult); // Complex query without indexes - const complexFilterResult = benchmark(`COMPLEX filter no index (${size} records)`, () => { - storeNoIndex.filter(record => - record.category === "A" && - record.status === "active" && - record.priority === "high" - ); - }, 10); + const complexFilterResult = benchmark( + `COMPLEX filter no index (${size} records)`, + () => { + storeNoIndex.filter( + (record) => + record.category === "A" && record.status === "active" && record.priority === "high", + ); + }, + 10, + ); results.push(complexFilterResult); // Complex query with indexes - const storeComplexIndex = haro(testData, { index: ["category", "status", "priority", "category|status|priority"] }); - const complexFindResult = benchmark(`COMPLEX find with index (${size} records)`, () => { - storeComplexIndex.find({ - category: "A", - status: "active", - priority: "high" - }); - }, 100); + const storeComplexIndex = haro(testData, { + index: ["category", "status", "priority", "category|status|priority"], + }); + const complexFindResult = benchmark( + `COMPLEX find with index (${size} records)`, + () => { + storeComplexIndex.find({ + category: "A", + status: "active", + priority: "high", + }); + }, + 100, + ); results.push(complexFindResult); }); @@ -372,13 +437,19 @@ function benchmarkIndexComparison (dataSizes) { * Prints benchmark results in a formatted table * @param {Array} results - Array of benchmark results */ -function printResults (results) { +function printResults(results) { console.log("\n=== INDEX OPERATIONS BENCHMARK RESULTS ===\n"); - console.log("Operation".padEnd(40) + "Iterations".padEnd(12) + "Total Time (ms)".padEnd(18) + "Avg Time (ms)".padEnd(16) + "Ops/Second"); + console.log( + "Operation".padEnd(40) + + "Iterations".padEnd(12) + + "Total Time (ms)".padEnd(18) + + "Avg Time (ms)".padEnd(16) + + "Ops/Second", + ); console.log("-".repeat(98)); - results.forEach(result => { + results.forEach((result) => { const name = result.name.padEnd(40); const iterations = result.iterations.toString().padEnd(12); const totalTime = result.totalTime.toFixed(2).padEnd(18); @@ -394,7 +465,7 @@ function printResults (results) { /** * Main function to run all index operations benchmarks */ -function runIndexOperationsBenchmarks () { +function runIndexOperationsBenchmarks() { console.log("📊 Running Index Operations Benchmarks...\n"); const dataSizes = [1000, 10000, 50000]; diff --git a/benchmarks/index.js b/benchmarks/index.js index 064bf35a..12e4eb68 100644 --- a/benchmarks/index.js +++ b/benchmarks/index.js @@ -1,360 +1,325 @@ -import { runBasicOperationsBenchmarks } from "./basic-operations.js"; -import { runSearchFilterBenchmarks } from "./search-filter.js"; -import { runIndexOperationsBenchmarks } from "./index-operations.js"; -import { runMemoryBenchmarks } from "./memory-usage.js"; -import { runComparisonBenchmarks } from "./comparison.js"; -import { runUtilityOperationsBenchmarks } from "./utility-operations.js"; -import { runPaginationBenchmarks } from "./pagination.js"; -import { runPersistenceBenchmarks } from "./persistence.js"; -import { runImmutableComparisonBenchmarks } from "./immutable-comparison.js"; +import { Bench } from "tinybench"; +import { haro } from "../dist/haro.js"; /** - * Formats duration in milliseconds to human-readable format - * @param {number} ms - Duration in milliseconds - * @returns {string} Formatted duration string + * Creates a benchmark suite for basic CRUD operations + * @param {number} size - Number of records to test + * @returns {Bench} Configured benchmark suite */ -function formatDuration (ms) { - if (ms < 1000) { - return `${ms.toFixed(0)}ms`; - } else if (ms < 60000) { - return `${(ms / 1000).toFixed(1)}s`; - } else { - return `${(ms / 60000).toFixed(1)}m`; - } -} - -/** - * Generates a summary report of all benchmark results - * @param {Object} results - All benchmark results - * @returns {Object} Summary report - */ -function generateSummaryReport (results) { - const { basicOps, searchFilter, indexOps, memory, comparison, utilities, pagination, persistence, immutableComparison } = results; - - const summary = { - totalTests: 0, - totalTime: 0, - categories: {}, - performance: { - fastest: { name: "", opsPerSecond: 0 }, - slowest: { name: "", opsPerSecond: Infinity }, - mostMemoryEfficient: { name: "", memoryUsed: Infinity }, - leastMemoryEfficient: { name: "", memoryUsed: 0 } - }, - recommendations: [] - }; - - // Process basic operations - if (basicOps && basicOps.length > 0) { - summary.categories.basicOperations = { - testCount: basicOps.length, - totalTime: basicOps.reduce((sum, test) => sum + test.totalTime, 0), - avgOpsPerSecond: basicOps.reduce((sum, test) => sum + test.opsPerSecond, 0) / basicOps.length - }; - - // Find fastest and slowest operations - basicOps.forEach(test => { - if (test.opsPerSecond > summary.performance.fastest.opsPerSecond) { - summary.performance.fastest = { name: test.name, opsPerSecond: test.opsPerSecond }; +function createBasicOperationsBench(size = 10000) { + const bench = new Bench({ time: 500 }); + const testData = Array.from({ length: size }, (_, i) => ({ + id: i, + name: `test${i}`, + category: "A", + })); + const store = haro(testData); + + bench + .add(`store.set() ${size} records`, () => { + const newStore = haro(); + for (let i = 0; i < size; i++) { + newStore.set(i, testData[i]); } - if (test.opsPerSecond < summary.performance.slowest.opsPerSecond) { - summary.performance.slowest = { name: test.name, opsPerSecond: test.opsPerSecond }; + }) + .add(`store.get() ${size} records`, () => { + for (let i = 0; i < size; i++) { + store.get(i); } - }); - } - - // Process search and filter operations - if (searchFilter && searchFilter.length > 0) { - summary.categories.searchFilter = { - testCount: searchFilter.length, - totalTime: searchFilter.reduce((sum, test) => sum + test.totalTime, 0), - avgOpsPerSecond: searchFilter.reduce((sum, test) => sum + test.opsPerSecond, 0) / searchFilter.length - }; - } - - // Process index operations - if (indexOps && indexOps.length > 0) { - summary.categories.indexOperations = { - testCount: indexOps.length, - totalTime: indexOps.reduce((sum, test) => sum + test.totalTime, 0), - avgOpsPerSecond: indexOps.reduce((sum, test) => sum + test.opsPerSecond, 0) / indexOps.length - }; - } - - // Process memory results - if (memory && memory.results && memory.results.length > 0) { - summary.categories.memoryUsage = { - testCount: memory.results.length, - totalTime: memory.results.reduce((sum, test) => sum + test.executionTime, 0), - avgHeapDelta: memory.results.reduce((sum, test) => sum + test.memoryDelta.heapUsed, 0) / memory.results.length - }; - - // Find memory efficiency - memory.results.forEach(test => { - if (test.memoryDelta.heapUsed < summary.performance.mostMemoryEfficient.memoryUsed) { - summary.performance.mostMemoryEfficient = { - name: test.description, - memoryUsed: test.memoryDelta.heapUsed - }; + }) + .add(`store.has() ${size} keys`, () => { + for (let i = 0; i < size; i++) { + store.has(i); } - if (test.memoryDelta.heapUsed > summary.performance.leastMemoryEfficient.memoryUsed) { - summary.performance.leastMemoryEfficient = { - name: test.description, - memoryUsed: test.memoryDelta.heapUsed - }; + }) + .add(`store.delete() ${size} records`, () => { + const deleteStore = haro(testData); + for (let i = 0; i < size; i++) { + deleteStore.delete(i); } }); - } - - // Process comparison results - if (comparison && comparison.allResults && comparison.allResults.length > 0) { - summary.categories.comparison = { - testCount: comparison.allResults.length, - totalTime: comparison.allResults.reduce((sum, test) => sum + test.totalTime, 0), - avgOpsPerSecond: comparison.allResults.reduce((sum, test) => sum + test.opsPerSecond, 0) / comparison.allResults.length - }; - } - - // Process utility operations - if (utilities && utilities.length > 0) { - summary.categories.utilityOperations = { - testCount: utilities.length, - totalTime: utilities.reduce((sum, test) => sum + test.totalTime, 0), - avgOpsPerSecond: utilities.reduce((sum, test) => sum + test.opsPerSecond, 0) / utilities.length - }; - } - - // Process pagination results - if (pagination && pagination.length > 0) { - summary.categories.pagination = { - testCount: pagination.length, - totalTime: pagination.reduce((sum, test) => sum + test.totalTime, 0), - avgOpsPerSecond: pagination.reduce((sum, test) => sum + test.opsPerSecond, 0) / pagination.length - }; - } - - // Process persistence results - if (persistence && persistence.length > 0) { - summary.categories.persistence = { - testCount: persistence.length, - totalTime: persistence.reduce((sum, test) => sum + test.totalTime, 0), - avgOpsPerSecond: persistence.filter(test => test.opsPerSecond > 0).reduce((sum, test) => sum + test.opsPerSecond, 0) / persistence.filter(test => test.opsPerSecond > 0).length || 0 - }; - } - // Process immutable comparison results - if (immutableComparison && immutableComparison.length > 0) { - summary.categories.immutableComparison = { - testCount: immutableComparison.length, - totalTime: immutableComparison.reduce((sum, test) => sum + test.totalTime, 0), - avgOpsPerSecond: immutableComparison.filter(test => test.opsPerSecond > 0).reduce((sum, test) => sum + test.opsPerSecond, 0) / immutableComparison.filter(test => test.opsPerSecond > 0).length || 0 - }; - } - - // Calculate totals - summary.totalTests = Object.values(summary.categories).reduce((sum, cat) => sum + cat.testCount, 0); - summary.totalTime = Object.values(summary.categories).reduce((sum, cat) => sum + cat.totalTime, 0); - - // Generate recommendations - if (summary.categories.basicOperations && summary.categories.basicOperations.avgOpsPerSecond > 10000) { - summary.recommendations.push("✅ Basic operations performance is excellent for most use cases"); - } - - if (summary.categories.indexOperations && summary.categories.searchFilter) { - const indexAvg = summary.categories.indexOperations.avgOpsPerSecond; - const searchAvg = summary.categories.searchFilter.avgOpsPerSecond; - if (indexAvg > searchAvg * 2) { - summary.recommendations.push("💡 Consider using indexed queries (find) instead of filters for better performance"); - } - } + return bench; +} - if (summary.categories.memoryUsage && summary.categories.memoryUsage.avgHeapDelta < 10) { - summary.recommendations.push("✅ Memory usage is efficient for typical workloads"); - } else if (summary.categories.memoryUsage && summary.categories.memoryUsage.avgHeapDelta > 50) { - summary.recommendations.push("⚠️ Consider optimizing memory usage for large datasets"); - } +/** + * Creates a benchmark suite for search and filter operations + * @param {number} size - Number of records to test + * @returns {Bench} Configured benchmark suite + */ +function createSearchFilterBench(size = 10000) { + const bench = new Bench({ time: 500 }); + const testData = Array.from({ length: size }, (_, i) => ({ + id: i, + name: `User ${i}`, + department: i % 5 === 0 ? "Engineering" : "Marketing", + skills: ["JavaScript", "Python"], + city: "New York", + active: true, + tags: [`tag${i % 10}`], + age: 25 + (i % 30), + salary: 50000 + (i % 100000), + })); + + const store = haro(testData, { + index: ["department", "skills", "city", "active", "tags", "age", "salary"], + warnOnFullScan: false, + }); - if (summary.categories.comparison) { - summary.recommendations.push("📊 Review comparison results to understand trade-offs vs native structures"); - } + bench + .add(`FIND by indexed field (${size} records)`, () => { + store.find({ department: "Engineering" }); + }) + .add(`WHERE by indexed field (${size} records)`, () => { + store.where({ department: "Engineering" }); + }) + .add(`SEARCH in index (${size} records)`, () => { + store.search("Engineering", "department"); + }) + .add(`FILTER all records (${size} records)`, () => { + store.filter((record) => record.active === true); + }); - if (summary.categories.utilityOperations && summary.categories.utilityOperations.avgOpsPerSecond > 1000) { - summary.recommendations.push("✅ Utility operations (clone, merge, freeze) perform well"); - } + return bench; +} - if (summary.categories.pagination && summary.categories.pagination.avgOpsPerSecond > 100) { - summary.recommendations.push("✅ Pagination performance is suitable for typical UI requirements"); - } +/** + * Creates a benchmark suite for index operations + * @param {number} size - Number of records to test + * @returns {Bench} Configured benchmark suite + */ +function createIndexOperationsBench(size = 10000) { + const bench = new Bench({ time: 500 }); + const testData = Array.from({ length: size }, (_, i) => ({ + id: i, + category: i % 5 === 0 ? "A" : "B", + status: "active", + priority: "high", + region: "north", + userId: Math.floor(i / 10), + timestamp: new Date(), + score: Math.floor(Math.random() * 1000), + tags: [`tag${i % 20}`], + })); + + bench + .add(`CREATE indexes (${size} records)`, () => { + const store = haro(testData, { + index: ["category", "status", "priority", "region", "userId"], + }); + return store; + }) + .add(`FIND with index (${size} records)`, () => { + const store = haro(testData, { index: ["category"] }); + store.find({ category: "A" }); + }) + .add(`REINDEX single field (${size} records)`, () => { + const store = haro(testData, { index: ["category"] }); + store.reindex("status"); + }); - if (summary.categories.persistence) { - summary.recommendations.push("💾 Persistence operations available for data serialization needs"); - } + return bench; +} - if (summary.categories.immutableComparison) { - summary.recommendations.push("🔒 Review immutable vs mutable comparison for data safety vs performance trade-offs"); - } +/** + * Creates a benchmark suite for utility operations + * @param {number} size - Number of records to test + * @returns {Bench} Configured benchmark suite + */ +function createUtilityOperationsBench(size = 1000) { + const bench = new Bench({ time: 500 }); + const store = haro(); + const testData = { id: 1, name: "test", tags: ["a", "b", "c"] }; + + bench + .add(`toArray() (${size} iterations)`, () => { + for (let i = 0; i < size; i++) { + store.toArray(); + } + }) + .add(`entries() (${size} iterations)`, () => { + for (let i = 0; i < size; i++) { + Array.from(store.entries()); + } + }) + .add(`keys() (${size} iterations)`, () => { + for (let i = 0; i < size; i++) { + Array.from(store.keys()); + } + }) + .add(`values() (${size} iterations)`, () => { + for (let i = 0; i < size; i++) { + Array.from(store.values()); + } + }); - return summary; + return bench; } /** - * Prints the summary report - * @param {Object} summary - Summary report object + * Creates a benchmark suite for pagination operations + * @param {number} size - Number of records to test + * @returns {Bench} Configured benchmark suite */ -function printSummaryReport (summary) { - console.log("\n" + "=".repeat(80)); - console.log("🎯 HARO BENCHMARK SUMMARY REPORT"); - console.log("=".repeat(80)); - - console.log("\n📊 OVERVIEW:"); - console.log(` Total Tests: ${summary.totalTests}`); - console.log(` Total Time: ${formatDuration(summary.totalTime)}`); - console.log(` Categories: ${Object.keys(summary.categories).length}`); - - console.log("\n🏆 PERFORMANCE HIGHLIGHTS:"); - console.log(` Fastest Operation: ${summary.performance.fastest.name}`); - console.log(` └── ${summary.performance.fastest.opsPerSecond.toLocaleString()} ops/second`); - console.log(` Slowest Operation: ${summary.performance.slowest.name}`); - console.log(` └── ${summary.performance.slowest.opsPerSecond.toLocaleString()} ops/second`); - - if (summary.performance.mostMemoryEfficient.memoryUsed !== Infinity) { - console.log("\n💾 MEMORY EFFICIENCY:"); - console.log(` Most Efficient: ${summary.performance.mostMemoryEfficient.name}`); - console.log(` └── ${summary.performance.mostMemoryEfficient.memoryUsed.toFixed(2)} MB`); - console.log(` Least Efficient: ${summary.performance.leastMemoryEfficient.name}`); - console.log(` └── ${summary.performance.leastMemoryEfficient.memoryUsed.toFixed(2)} MB`); - } +function createPaginationBench(size = 10000) { + const bench = new Bench({ time: 500 }); + const testData = Array.from({ length: size }, (_, i) => ({ + id: i, + name: `Item ${i}`, + category: `cat${i % 10}`, + })); + const store = haro(testData); + + bench + .add(`LIMIT 10 (${size} records)`, () => { + store.limit(0, 10); + }) + .add(`LIMIT 50 (${size} records)`, () => { + store.limit(0, 50); + }) + .add(`LIMIT 100 (${size} records)`, () => { + store.limit(0, 100); + }) + .add(`LIMIT with offset (${size} records)`, () => { + store.limit(500, 50); + }); - console.log("\n📋 CATEGORY BREAKDOWN:"); - Object.entries(summary.categories).forEach(([category, stats]) => { - console.log(` ${category}:`); - console.log(` ├── Tests: ${stats.testCount}`); - console.log(` ├── Time: ${formatDuration(stats.totalTime)}`); - if (stats.avgOpsPerSecond) { - console.log(` └── Avg Performance: ${stats.avgOpsPerSecond.toFixed(0)} ops/second`); - } else if (stats.avgHeapDelta) { - console.log(` └── Avg Memory: ${stats.avgHeapDelta.toFixed(2)} MB`); - } - }); + return bench; +} - if (summary.recommendations.length > 0) { - console.log("\n💡 RECOMMENDATIONS:"); - summary.recommendations.forEach(rec => { - console.log(` ${rec}`); +/** + * Creates a benchmark suite for persistence operations + * @param {number} size - Number of records to test + * @returns {Bench} Configured benchmark suite + */ +function createPersistenceBench(size = 5000) { + const bench = new Bench({ time: 500 }); + const testData = Array.from({ length: size }, (_, i) => ({ + id: i, + name: `Record ${i}`, + department: `Dept${i % 10}`, + location: `Loc${i % 5}`, + active: true, + })); + const store = haro(testData, { index: ["department", "location", "active"] }); + + bench + .add(`DUMP records (${size} records)`, () => { + store.dump("records"); + }) + .add(`DUMP indexes (${size} records)`, () => { + store.dump("indexes"); + }) + .add(`OVERRIDE records (${size} records)`, () => { + const dump = store.dump("records"); + const newStore = haro(); + newStore.override(dump, "records"); }); - } - console.log("\n" + "=".repeat(80)); - console.log("🏁 BENCHMARK COMPLETE"); - console.log("=".repeat(80) + "\n"); + return bench; } /** - * Main function to run all benchmarks + * Runs all benchmark suites and displays results * @param {Object} options - Benchmark options - * @returns {Object} All benchmark results + * @returns {Promise} All benchmark results */ -async function runAllBenchmarks (options = {}) { +async function runAllBenchmarks(options = {}) { const { includeBasic = true, includeSearch = true, includeIndex = true, - includeMemory = true, - includeComparison = true, includeUtilities = true, includePagination = true, includePersistence = true, - includeImmutableComparison = true, - verbose = true + verbose = true, } = options; const results = {}; - const startTime = Date.now(); + const sizes = { + basic: 10000, + search: 10000, + index: 10000, + utility: 1000, + pagination: 10000, + persistence: 5000, + }; - console.log("🚀 Starting Haro Benchmark Suite...\n"); - console.log("📋 Benchmark Configuration:"); - console.log(` Node.js Version: ${process.version}`); - console.log(` Platform: ${process.platform}`); - console.log(` Architecture: ${process.arch}`); - console.log(` Memory: ${Math.round(process.memoryUsage().heapTotal / 1024 / 1024)} MB available\n`); + console.log("🚀 Starting Haro Benchmark Suite (tinybench)...\n"); + console.log(`Node.js: ${process.version}\n`); try { - // Run basic operations benchmarks if (includeBasic) { - if (verbose) console.log("⏳ Running basic operations benchmarks..."); - results.basicOps = runBasicOperationsBenchmarks(); - if (verbose) console.log("✅ Basic operations benchmarks completed\n"); + if (verbose) console.log("⏳ Running basic operations..."); + const bench = createBasicOperationsBench(sizes.basic); + await bench.run(); + results.basicOps = bench; + if (verbose) { + console.log("\n📊 BASIC OPERATIONS:"); + console.table(bench.table()); + } } - // Run search and filter benchmarks if (includeSearch) { - if (verbose) console.log("⏳ Running search and filter benchmarks..."); - results.searchFilter = runSearchFilterBenchmarks(); - if (verbose) console.log("✅ Search and filter benchmarks completed\n"); + if (verbose) console.log("⏳ Running search/filter operations..."); + const bench = createSearchFilterBench(sizes.search); + await bench.run(); + results.searchFilter = bench; + if (verbose) { + console.log("\n📊 SEARCH & FILTER:"); + console.table(bench.table()); + } } - // Run index operations benchmarks if (includeIndex) { - if (verbose) console.log("⏳ Running index operations benchmarks..."); - results.indexOps = runIndexOperationsBenchmarks(); - if (verbose) console.log("✅ Index operations benchmarks completed\n"); - } - - // Run memory benchmarks - if (includeMemory) { - if (verbose) console.log("⏳ Running memory usage benchmarks..."); - results.memory = runMemoryBenchmarks(); - if (verbose) console.log("✅ Memory usage benchmarks completed\n"); - } - - // Run comparison benchmarks - if (includeComparison) { - if (verbose) console.log("⏳ Running comparison benchmarks..."); - results.comparison = runComparisonBenchmarks(); - if (verbose) console.log("✅ Comparison benchmarks completed\n"); + if (verbose) console.log("⏳ Running index operations..."); + const bench = createIndexOperationsBench(sizes.index); + await bench.run(); + results.indexOps = bench; + if (verbose) { + console.log("\n📊 INDEX OPERATIONS:"); + console.table(bench.table()); + } } - // Run utility operations benchmarks if (includeUtilities) { - if (verbose) console.log("⏳ Running utility operations benchmarks..."); - results.utilities = runUtilityOperationsBenchmarks(); - if (verbose) console.log("✅ Utility operations benchmarks completed\n"); + if (verbose) console.log("⏳ Running utility operations..."); + const bench = createUtilityOperationsBench(sizes.utility); + await bench.run(); + results.utilities = bench; + if (verbose) { + console.log("\n📊 UTILITY OPERATIONS:"); + console.table(bench.table()); + } } - // Run pagination benchmarks if (includePagination) { - if (verbose) console.log("⏳ Running pagination benchmarks..."); - results.pagination = runPaginationBenchmarks(); - if (verbose) console.log("✅ Pagination benchmarks completed\n"); + if (verbose) console.log("⏳ Running pagination operations..."); + const bench = createPaginationBench(sizes.pagination); + await bench.run(); + results.pagination = bench; + if (verbose) { + console.log("\n📊 PAGINATION:"); + console.table(bench.table()); + } } - // Run persistence benchmarks if (includePersistence) { - if (verbose) console.log("⏳ Running persistence benchmarks..."); - results.persistence = runPersistenceBenchmarks(); - if (verbose) console.log("✅ Persistence benchmarks completed\n"); - } - - // Run immutable vs mutable comparison benchmarks - if (includeImmutableComparison) { - if (verbose) console.log("⏳ Running immutable vs mutable comparison benchmarks..."); - results.immutableComparison = runImmutableComparisonBenchmarks(); - if (verbose) console.log("✅ Immutable vs mutable comparison benchmarks completed\n"); - } - - const endTime = Date.now(); - const totalDuration = endTime - startTime; - - // Generate and print summary - const summary = generateSummaryReport(results); - summary.totalDuration = totalDuration; - - if (verbose) { - printSummaryReport(summary); + if (verbose) console.log("⏳ Running persistence operations..."); + const bench = createPersistenceBench(sizes.persistence); + await bench.run(); + results.persistence = bench; + if (verbose) { + console.log("\n📊 PERSISTENCE:"); + console.table(bench.table()); + } } - return { results, summary }; + console.log("\n" + "=".repeat(80)); + console.log("🏁 BENCHMARK COMPLETE"); + console.log("=".repeat(80) + "\n"); + return results; } catch (error) { console.error("❌ Benchmark suite failed:", error); throw error; @@ -365,32 +330,28 @@ async function runAllBenchmarks (options = {}) { * CLI argument parser * @returns {Object} Parsed CLI options */ -function parseCliArguments () { +function parseCliArguments() { const args = process.argv.slice(2); const options = { includeBasic: true, includeSearch: true, includeIndex: true, - includeMemory: true, - includeComparison: true, includeUtilities: true, includePagination: true, includePersistence: true, - includeImmutableComparison: true, - verbose: true + verbose: true, }; - // Helper function to disable all categories except the specified one - const runOnlyCategory = category => { - Object.keys(options).forEach(key => { + const runOnlyCategory = (category) => { + Object.keys(options).forEach((key) => { if (key.startsWith("include") && key !== category) { options[key] = false; } }); }; - args.forEach(arg => { - switch (arg) { // eslint-disable-line default-case + args.forEach((arg) => { + switch (arg) { case "--basic-only": runOnlyCategory("includeBasic"); break; @@ -400,12 +361,6 @@ function parseCliArguments () { case "--index-only": runOnlyCategory("includeIndex"); break; - case "--memory-only": - runOnlyCategory("includeMemory"); - break; - case "--comparison-only": - runOnlyCategory("includeComparison"); - break; case "--utilities-only": runOnlyCategory("includeUtilities"); break; @@ -415,23 +370,10 @@ function parseCliArguments () { case "--persistence-only": runOnlyCategory("includePersistence"); break; - case "--immutable-only": - runOnlyCategory("includeImmutableComparison"); - break; case "--core-only": - // Run only core benchmarks (basic, search, index) - options.includeMemory = false; - options.includeComparison = false; options.includeUtilities = false; options.includePagination = false; options.includePersistence = false; - options.includeImmutableComparison = false; - break; - case "--advanced-only": - // Run only advanced benchmarks - options.includeBasic = false; - options.includeSearch = false; - options.includeIndex = false; break; case "--no-basic": options.includeBasic = false; @@ -442,12 +384,6 @@ function parseCliArguments () { case "--no-index": options.includeIndex = false; break; - case "--no-memory": - options.includeMemory = false; - break; - case "--no-comparison": - options.includeComparison = false; - break; case "--no-utilities": options.includeUtilities = false; break; @@ -457,15 +393,12 @@ function parseCliArguments () { case "--no-persistence": options.includePersistence = false; break; - case "--no-immutable": - options.includeImmutableComparison = false; - break; case "--quiet": options.verbose = false; break; case "--help": console.log(` -Haro Benchmark Suite v16.0.0 +Haro Benchmark Suite v17.0.0 (tinybench) Usage: node benchmarks/index.js [options] @@ -473,50 +406,30 @@ SINGLE CATEGORY OPTIONS: --basic-only Run only basic CRUD operations benchmarks --search-only Run only search and filter benchmarks --index-only Run only index operations benchmarks - --memory-only Run only memory usage benchmarks - --comparison-only Run only vs native structures benchmarks - --utilities-only Run only utility operations benchmarks (clone, merge, freeze, etc.) - --pagination-only Run only pagination/limit benchmarks - --persistence-only Run only dump/override persistence benchmarks - --immutable-only Run only immutable vs mutable comparison benchmarks + --utilities-only Run only utility operations benchmarks + --pagination-only Run only pagination benchmarks + --persistence-only Run only persistence benchmarks CATEGORY GROUP OPTIONS: --core-only Run only core benchmarks (basic, search, index) - --advanced-only Run only advanced benchmarks (memory, comparison, utilities, etc.) EXCLUSION OPTIONS: --no-basic Exclude basic operations benchmarks --no-search Exclude search and filter benchmarks --no-index Exclude index operations benchmarks - --no-memory Exclude memory usage benchmarks - --no-comparison Exclude comparison benchmarks --no-utilities Exclude utility operations benchmarks --no-pagination Exclude pagination benchmarks --no-persistence Exclude persistence benchmarks - --no-immutable Exclude immutable vs mutable benchmarks OUTPUT OPTIONS: --quiet Suppress verbose output --help Show this help message -BENCHMARK CATEGORIES: - Basic Operations CRUD operations (set, get, delete, batch) - Search & Filter Query operations (find, filter, search, where) - Index Operations Indexing performance and benefits - Memory Usage Memory consumption and efficiency analysis - Comparison Performance vs native JavaScript structures - Utility Operations Helper methods (clone, merge, freeze, forEach, uuid) - Pagination Limit-based pagination performance - Persistence Dump/override operations for data serialization - Immutable Comparison Performance comparison between mutable and immutable modes - Examples: node benchmarks/index.js # Run all benchmarks node benchmarks/index.js --basic-only # Run basic operations only node benchmarks/index.js --core-only # Run core benchmarks only - node benchmarks/index.js --no-memory # Run all except memory benchmarks node benchmarks/index.js --quiet # Run all benchmarks quietly - node benchmarks/index.js --utilities-only # Test utility methods only `); process.exit(0); break; @@ -529,10 +442,10 @@ Examples: // Run benchmarks if this file is executed directly if (import.meta.url === `file://${process.argv[1]}`) { const options = parseCliArguments(); - runAllBenchmarks(options).catch(error => { + runAllBenchmarks(options).catch((error) => { console.error("Fatal error:", error); process.exit(1); }); } -export { runAllBenchmarks, generateSummaryReport }; +export { runAllBenchmarks }; diff --git a/benchmarks/memory-usage.js b/benchmarks/memory-usage.js index edbd37a0..2f6d177f 100644 --- a/benchmarks/memory-usage.js +++ b/benchmarks/memory-usage.js @@ -6,7 +6,7 @@ import { generateIndexTestData } from "./index-operations.js"; * Gets current memory usage information * @returns {Object} Memory usage information */ -function getMemoryUsage () { +function getMemoryUsage() { const memUsage = process.memoryUsage(); return { @@ -14,14 +14,14 @@ function getMemoryUsage () { heapUsed: memUsage.heapUsed / 1024 / 1024, // MB heapTotal: memUsage.heapTotal / 1024 / 1024, // MB external: memUsage.external / 1024 / 1024, // MB - arrayBuffers: memUsage.arrayBuffers / 1024 / 1024 // MB + arrayBuffers: memUsage.arrayBuffers / 1024 / 1024, // MB }; } /** * Forces garbage collection if possible */ -function forceGC () { +function forceGC() { if (global.gc) { global.gc(); } @@ -33,7 +33,7 @@ function forceGC () { * @param {string} description - Description of the test * @returns {Object} Memory usage results */ -function measureMemory (fn, description) { +function measureMemory(fn, description) { forceGC(); const startMemory = getMemoryUsage(); @@ -54,9 +54,9 @@ function measureMemory (fn, description) { heapUsed: endMemory.heapUsed - startMemory.heapUsed, heapTotal: endMemory.heapTotal - startMemory.heapTotal, external: endMemory.external - startMemory.external, - arrayBuffers: endMemory.arrayBuffers - startMemory.arrayBuffers + arrayBuffers: endMemory.arrayBuffers - startMemory.arrayBuffers, }, - result + result, }; } @@ -65,10 +65,10 @@ function measureMemory (fn, description) { * @param {Array} dataSizes - Array of data sizes to test * @returns {Array} Array of memory benchmark results */ -function benchmarkCreationMemory (dataSizes) { +function benchmarkCreationMemory(dataSizes) { const results = []; - dataSizes.forEach(size => { + dataSizes.forEach((size) => { const testData = generateIndexTestData(size); // Test basic store creation @@ -80,7 +80,7 @@ function benchmarkCreationMemory (dataSizes) { // Test store creation with indexes const indexedCreationResult = measureMemory(() => { return haro(testData, { - index: ["category", "status", "priority", "region", "userId"] + index: ["category", "status", "priority", "region", "userId"], }); }, `Indexed store creation (${size} records)`); results.push(indexedCreationResult); @@ -89,10 +89,16 @@ function benchmarkCreationMemory (dataSizes) { const complexIndexCreationResult = measureMemory(() => { return haro(testData, { index: [ - "category", "status", "priority", "region", "userId", - "category|status", "region|priority", "userId|category", - "category|status|priority" - ] + "category", + "status", + "priority", + "region", + "userId", + "category|status", + "region|priority", + "userId|category", + "category|status|priority", + ], }); }, `Complex indexed store creation (${size} records)`); results.push(complexIndexCreationResult); @@ -101,7 +107,7 @@ function benchmarkCreationMemory (dataSizes) { const versioningCreationResult = measureMemory(() => { return haro(testData, { versioning: true, - index: ["category", "status"] + index: ["category", "status"], }); }, `Versioning store creation (${size} records)`); results.push(versioningCreationResult); @@ -115,21 +121,24 @@ function benchmarkCreationMemory (dataSizes) { * @param {Array} dataSizes - Array of data sizes to test * @returns {Array} Array of memory benchmark results */ -function benchmarkOperationMemory (dataSizes) { +function benchmarkOperationMemory(dataSizes) { const results = []; - dataSizes.forEach(size => { + dataSizes.forEach((size) => { const testData = generateIndexTestData(size); // Test SET operations memory usage - const setOperationResult = measureMemory(() => { - const store = haro(); - for (let i = 0; i < Math.min(size, 1000); i++) { - store.set(i, testData[i]); - } + const setOperationResult = measureMemory( + () => { + const store = haro(); + for (let i = 0; i < Math.min(size, 1000); i++) { + store.set(i, testData[i]); + } - return store; - }, `SET operations memory (${Math.min(size, 1000)} records)`); + return store; + }, + `SET operations memory (${Math.min(size, 1000)} records)`, + ); results.push(setOperationResult); // Test BATCH operations memory usage @@ -142,19 +151,18 @@ function benchmarkOperationMemory (dataSizes) { results.push(batchOperationResult); // Test DELETE operations memory usage - const deleteOperationResult = measureMemory(() => { - const store = haro(testData); - const keys = Array.from(store.keys()); - for (let i = 0; i < Math.min(keys.length, 100); i++) { - try { - store.del(keys[i]); - } catch (e) { // eslint-disable-line no-unused-vars - // Record might already be deleted + const deleteOperationResult = measureMemory( + () => { + const store = haro(testData); + const keys = Array.from(store.keys()); + for (let i = 0; i < Math.min(keys.length, 100); i++) { + store.delete(keys[i]); } - } - return store; - }, `DELETE operations memory (${Math.min(size, 100)} deletions)`); + return store; + }, + `DELETE operations memory (${Math.min(size, 100)} deletions)`, + ); results.push(deleteOperationResult); // Test CLEAR operations memory usage @@ -175,13 +183,13 @@ function benchmarkOperationMemory (dataSizes) { * @param {Array} dataSizes - Array of data sizes to test * @returns {Array} Array of memory benchmark results */ -function benchmarkQueryMemory (dataSizes) { +function benchmarkQueryMemory(dataSizes) { const results = []; - dataSizes.forEach(size => { + dataSizes.forEach((size) => { const testData = generateIndexTestData(size); const store = haro(testData, { - index: ["category", "status", "priority", "category|status"] + index: ["category", "status", "priority", "category|status"], }); // Test FIND operations memory usage @@ -199,7 +207,9 @@ function benchmarkQueryMemory (dataSizes) { const filterOperationResult = measureMemory(() => { const results = []; // eslint-disable-line no-shadow for (let i = 0; i < 10; i++) { - results.push(store.filter(record => record.category === "A" && record.status === "active")); + results.push( + store.filter((record) => record.category === "A" && record.status === "active"), + ); } return results; @@ -219,10 +229,10 @@ function benchmarkQueryMemory (dataSizes) { // Test MAP operations memory usage const mapOperationResult = measureMemory(() => { - return store.map(record => ({ + return store.map((record) => ({ id: record.id, category: record.category, - status: record.status + status: record.status, })); }, `MAP operations memory (${size} records)`); results.push(mapOperationResult); @@ -236,10 +246,10 @@ function benchmarkQueryMemory (dataSizes) { * @param {Array} dataSizes - Array of data sizes to test * @returns {Array} Array of memory benchmark results */ -function benchmarkIndexMemory (dataSizes) { +function benchmarkIndexMemory(dataSizes) { const results = []; - dataSizes.forEach(size => { + dataSizes.forEach((size) => { const testData = generateIndexTestData(size); // Test index creation memory usage @@ -266,7 +276,7 @@ function benchmarkIndexMemory (dataSizes) { // Test index dump memory usage const indexDumpResult = measureMemory(() => { const store = haro(testData, { - index: ["category", "status", "priority", "category|status"] + index: ["category", "status", "priority", "category|status"], }); return store.dump("indexes"); @@ -276,7 +286,7 @@ function benchmarkIndexMemory (dataSizes) { // Test index override memory usage const indexOverrideResult = measureMemory(() => { const store = haro(testData, { - index: ["category", "status", "priority"] + index: ["category", "status", "priority"], }); const indexData = store.dump("indexes"); const newStore = haro(); @@ -295,10 +305,10 @@ function benchmarkIndexMemory (dataSizes) { * @param {Array} dataSizes - Array of data sizes to test * @returns {Array} Array of memory benchmark results */ -function benchmarkVersioningMemory (dataSizes) { +function benchmarkVersioningMemory(dataSizes) { const results = []; - dataSizes.forEach(size => { + dataSizes.forEach((size) => { const testData = generateIndexTestData(size); // Test versioning store creation @@ -308,22 +318,25 @@ function benchmarkVersioningMemory (dataSizes) { results.push(versioningCreationResult); // Test versioning with updates - const versioningUpdatesResult = measureMemory(() => { - const store = haro(testData, { versioning: true }); - - // Update records multiple times to create versions - for (let i = 0; i < Math.min(size, 100); i++) { - for (let version = 0; version < 5; version++) { - store.set(i, { - ...testData[i], - version: version, - updated: new Date() - }); + const versioningUpdatesResult = measureMemory( + () => { + const store = haro(testData, { versioning: true }); + + // Update records multiple times to create versions + for (let i = 0; i < Math.min(size, 100); i++) { + for (let version = 0; version < 5; version++) { + store.set(i, { + ...testData[i], + version: version, + updated: new Date(), + }); + } } - } - return store; - }, `Versioning with updates (${Math.min(size, 100)} records, 5 versions each)`); + return store; + }, + `Versioning with updates (${Math.min(size, 100)} records, 5 versions each)`, + ); results.push(versioningUpdatesResult); }); @@ -335,10 +348,10 @@ function benchmarkVersioningMemory (dataSizes) { * @param {Array} dataSizes - Array of data sizes to test * @returns {Array} Array of memory benchmark results */ -function benchmarkStressMemory (dataSizes) { +function benchmarkStressMemory(dataSizes) { const results = []; - dataSizes.forEach(size => { + dataSizes.forEach((size) => { const testData = generateIndexTestData(size); // Test rapid creation and destruction @@ -349,7 +362,7 @@ function benchmarkStressMemory (dataSizes) { } // Clear all stores - stores.forEach(store => store.clear()); + stores.forEach((store) => store.clear()); return stores; }, `Rapid store cycles (${size} records)`); @@ -380,7 +393,7 @@ function benchmarkStressMemory (dataSizes) { * @param {number} dataSize - Size of test data * @returns {Object} Memory growth analysis */ -function analyzeMemoryGrowth (dataSize) { +function analyzeMemoryGrowth(dataSize) { const testData = generateIndexTestData(dataSize); const memorySnapshots = []; @@ -388,7 +401,7 @@ function analyzeMemoryGrowth (dataSize) { forceGC(); memorySnapshots.push({ operation: "Initial", - memory: getMemoryUsage() + memory: getMemoryUsage(), }); // Create store @@ -396,7 +409,7 @@ function analyzeMemoryGrowth (dataSize) { forceGC(); memorySnapshots.push({ operation: "Store created", - memory: getMemoryUsage() + memory: getMemoryUsage(), }); // Add data in batches @@ -408,7 +421,7 @@ function analyzeMemoryGrowth (dataSize) { forceGC(); memorySnapshots.push({ operation: `Batch ${i + 1} added`, - memory: getMemoryUsage() + memory: getMemoryUsage(), }); } @@ -420,19 +433,19 @@ function analyzeMemoryGrowth (dataSize) { forceGC(); memorySnapshots.push({ operation: "Indexes added", - memory: getMemoryUsage() + memory: getMemoryUsage(), }); // Perform queries for (let i = 0; i < 100; i++) { store.find({ category: "A" }); - store.filter(record => record.status === "active"); + store.filter((record) => record.status === "active"); } forceGC(); memorySnapshots.push({ operation: "After queries", - memory: getMemoryUsage() + memory: getMemoryUsage(), }); // Clear store @@ -441,14 +454,16 @@ function analyzeMemoryGrowth (dataSize) { forceGC(); memorySnapshots.push({ operation: "After clear", - memory: getMemoryUsage() + memory: getMemoryUsage(), }); return { dataSize, snapshots: memorySnapshots, - maxHeapUsed: Math.max(...memorySnapshots.map(s => s.memory.heapUsed)), - totalGrowth: memorySnapshots[memorySnapshots.length - 1].memory.heapUsed - memorySnapshots[0].memory.heapUsed + maxHeapUsed: Math.max(...memorySnapshots.map((s) => s.memory.heapUsed)), + totalGrowth: + memorySnapshots[memorySnapshots.length - 1].memory.heapUsed - + memorySnapshots[0].memory.heapUsed, }; } @@ -456,13 +471,18 @@ function analyzeMemoryGrowth (dataSize) { * Prints memory benchmark results * @param {Array} results - Array of memory benchmark results */ -function printMemoryResults (results) { +function printMemoryResults(results) { console.log("\n=== MEMORY USAGE BENCHMARK RESULTS ===\n"); - console.log("Operation".padEnd(50) + "Execution Time".padEnd(16) + "Heap Delta (MB)".padEnd(16) + "RSS Delta (MB)"); + console.log( + "Operation".padEnd(50) + + "Execution Time".padEnd(16) + + "Heap Delta (MB)".padEnd(16) + + "RSS Delta (MB)", + ); console.log("-".repeat(98)); - results.forEach(result => { + results.forEach((result) => { const name = result.description.padEnd(50); const execTime = result.executionTime.toFixed(2).padEnd(16); const heapDelta = result.memoryDelta.heapUsed.toFixed(2).padEnd(16); @@ -478,7 +498,7 @@ function printMemoryResults (results) { * Prints memory growth analysis * @param {Object} analysis - Memory growth analysis results */ -function printMemoryGrowthAnalysis (analysis) { +function printMemoryGrowthAnalysis(analysis) { console.log("\n=== MEMORY GROWTH ANALYSIS ===\n"); console.log(`Data Size: ${analysis.dataSize} records`); console.log(`Max Heap Used: ${analysis.maxHeapUsed.toFixed(2)} MB`); @@ -489,9 +509,10 @@ function printMemoryGrowthAnalysis (analysis) { const operation = snapshot.operation.padEnd(20); const heapUsed = snapshot.memory.heapUsed.toFixed(2).padEnd(10); const rss = snapshot.memory.rss.toFixed(2).padEnd(10); - const delta = index > 0 ? - (snapshot.memory.heapUsed - analysis.snapshots[index - 1].memory.heapUsed).toFixed(2) : - "0.00"; + const delta = + index > 0 + ? (snapshot.memory.heapUsed - analysis.snapshots[index - 1].memory.heapUsed).toFixed(2) + : "0.00"; console.log(`${operation} | Heap: ${heapUsed} MB | RSS: ${rss} MB | Delta: ${delta} MB`); }); @@ -502,7 +523,7 @@ function printMemoryGrowthAnalysis (analysis) { /** * Main function to run all memory benchmarks */ -function runMemoryBenchmarks () { +function runMemoryBenchmarks() { console.log("💾 Running Memory Usage Benchmarks...\n"); const dataSizes = [1000, 10000, 25000]; diff --git a/benchmarks/pagination.js b/benchmarks/pagination.js index 8d0332f4..53280ce6 100644 --- a/benchmarks/pagination.js +++ b/benchmarks/pagination.js @@ -6,7 +6,7 @@ import { haro } from "../dist/haro.js"; * @param {number} size - Number of records to generate * @returns {Array} Array of test records optimized for pagination testing */ -function generatePaginationTestData (size) { +function generatePaginationTestData(size) { const data = []; const categories = ["A", "B", "C", "D", "E", "F", "G", "H", "I", "J"]; const statuses = ["active", "inactive", "pending", "archived"]; @@ -25,8 +25,8 @@ function generatePaginationTestData (size) { metadata: { level: Math.floor(i / 100), region: `Region ${i % 5}`, - department: `Dept ${i % 15}` - } + department: `Dept ${i % 15}`, + }, }); } @@ -40,7 +40,7 @@ function generatePaginationTestData (size) { * @param {number} iterations - Number of iterations to run * @returns {Object} Benchmark results */ -function benchmark (name, fn, iterations = 100) { +function benchmark(name, fn, iterations = 100) { const start = performance.now(); for (let i = 0; i < iterations; i++) { fn(); @@ -54,7 +54,7 @@ function benchmark (name, fn, iterations = 100) { iterations, totalTime: total, avgTime, - opsPerSecond: Math.floor(1000 / avgTime) + opsPerSecond: Math.floor(1000 / avgTime), }; } @@ -63,10 +63,10 @@ function benchmark (name, fn, iterations = 100) { * @param {Array} dataSizes - Array of data sizes to test * @returns {Array} Array of benchmark results */ -function benchmarkBasicLimitOperations (dataSizes) { +function benchmarkBasicLimitOperations(dataSizes) { const results = []; - dataSizes.forEach(size => { + dataSizes.forEach((size) => { const testData = generatePaginationTestData(size); const store = haro(testData); @@ -89,9 +89,12 @@ function benchmarkBasicLimitOperations (dataSizes) { results.push(largePageResult); // Very large page sizes - const veryLargePageResult = benchmark(`LIMIT very large page (1000 items from ${size} records)`, () => { - store.limit(0, Math.min(1000, size)); - }); + const veryLargePageResult = benchmark( + `LIMIT very large page (1000 items from ${size} records)`, + () => { + store.limit(0, Math.min(1000, size)); + }, + ); results.push(veryLargePageResult); }); @@ -103,10 +106,10 @@ function benchmarkBasicLimitOperations (dataSizes) { * @param {Array} dataSizes - Array of data sizes to test * @returns {Array} Array of benchmark results */ -function benchmarkOffsetPagination (dataSizes) { +function benchmarkOffsetPagination(dataSizes) { const results = []; - dataSizes.forEach(size => { + dataSizes.forEach((size) => { const testData = generatePaginationTestData(size); const store = haro(testData); const pageSize = 20; @@ -119,29 +122,41 @@ function benchmarkOffsetPagination (dataSizes) { // Middle page const middleOffset = Math.floor(size / 2); - const middlePageResult = benchmark(`LIMIT middle page (offset ${middleOffset}, ${pageSize} items)`, () => { - store.limit(middleOffset, pageSize); - }); + const middlePageResult = benchmark( + `LIMIT middle page (offset ${middleOffset}, ${pageSize} items)`, + () => { + store.limit(middleOffset, pageSize); + }, + ); results.push(middlePageResult); // Near end page const nearEndOffset = Math.max(0, size - pageSize * 2); - const nearEndPageResult = benchmark(`LIMIT near end page (offset ${nearEndOffset}, ${pageSize} items)`, () => { - store.limit(nearEndOffset, pageSize); - }); + const nearEndPageResult = benchmark( + `LIMIT near end page (offset ${nearEndOffset}, ${pageSize} items)`, + () => { + store.limit(nearEndOffset, pageSize); + }, + ); results.push(nearEndPageResult); // Last page (potentially partial) const lastOffset = Math.max(0, size - pageSize); - const lastPageResult = benchmark(`LIMIT last page (offset ${lastOffset}, ${pageSize} items)`, () => { - store.limit(lastOffset, pageSize); - }); + const lastPageResult = benchmark( + `LIMIT last page (offset ${lastOffset}, ${pageSize} items)`, + () => { + store.limit(lastOffset, pageSize); + }, + ); results.push(lastPageResult); // Beyond data bounds (should return empty) - const beyondBoundsResult = benchmark(`LIMIT beyond bounds (offset ${size + 100}, ${pageSize} items)`, () => { - store.limit(size + 100, pageSize); - }); + const beyondBoundsResult = benchmark( + `LIMIT beyond bounds (offset ${size + 100}, ${pageSize} items)`, + () => { + store.limit(size + 100, pageSize); + }, + ); results.push(beyondBoundsResult); }); @@ -153,19 +168,22 @@ function benchmarkOffsetPagination (dataSizes) { * @param {Array} dataSizes - Array of data sizes to test * @returns {Array} Array of benchmark results */ -function benchmarkPageSizeOptimization (dataSizes) { +function benchmarkPageSizeOptimization(dataSizes) { const results = []; const pageSizes = [1, 5, 10, 20, 50, 100, 200, 500, 1000]; - dataSizes.forEach(size => { + dataSizes.forEach((size) => { const testData = generatePaginationTestData(size); const store = haro(testData); - pageSizes.forEach(pageSize => { + pageSizes.forEach((pageSize) => { if (pageSize <= size) { - const pageSizeResult = benchmark(`LIMIT page size ${pageSize} (${size} total records)`, () => { - store.limit(0, pageSize); - }); + const pageSizeResult = benchmark( + `LIMIT page size ${pageSize} (${size} total records)`, + () => { + store.limit(0, pageSize); + }, + ); results.push(pageSizeResult); } }); @@ -179,17 +197,20 @@ function benchmarkPageSizeOptimization (dataSizes) { * @param {Array} dataSizes - Array of data sizes to test * @returns {Array} Array of benchmark results */ -function benchmarkPaginationModes (dataSizes) { +function benchmarkPaginationModes(dataSizes) { const results = []; - dataSizes.forEach(size => { + dataSizes.forEach((size) => { const testData = generatePaginationTestData(size); // Test with immutable store const immutableStore = haro(testData, { immutable: true }); - const immutableResult = benchmark(`LIMIT immutable mode (50 items from ${size} records)`, () => { - immutableStore.limit(0, 50); - }); + const immutableResult = benchmark( + `LIMIT immutable mode (50 items from ${size} records)`, + () => { + immutableStore.limit(0, 50); + }, + ); results.push(immutableResult); // Test with mutable store @@ -206,9 +227,12 @@ function benchmarkPaginationModes (dataSizes) { results.push(rawResult); // Test with processed data - const processedResult = benchmark(`LIMIT processed data (50 items from ${size} records)`, () => { - mutableStore.limit(0, 50, false); - }); + const processedResult = benchmark( + `LIMIT processed data (50 items from ${size} records)`, + () => { + mutableStore.limit(0, 50, false); + }, + ); results.push(processedResult); }); @@ -220,10 +244,10 @@ function benchmarkPaginationModes (dataSizes) { * @param {Array} dataSizes - Array of data sizes to test * @returns {Array} Array of benchmark results */ -function benchmarkSequentialPagination (dataSizes) { +function benchmarkSequentialPagination(dataSizes) { const results = []; - dataSizes.forEach(size => { + dataSizes.forEach((size) => { const testData = generatePaginationTestData(size); const store = haro(testData); const pageSize = 25; @@ -231,22 +255,30 @@ function benchmarkSequentialPagination (dataSizes) { // Simulate browsing through first 10 pages const pagesToTest = Math.min(10, totalPages); - const sequentialResult = benchmark(`LIMIT sequential pagination (${pagesToTest} pages, ${pageSize} items each)`, () => { - for (let page = 0; page < pagesToTest; page++) { - const offset = page * pageSize; - store.limit(offset, pageSize); - } - }, 1); + const sequentialResult = benchmark( + `LIMIT sequential pagination (${pagesToTest} pages, ${pageSize} items each)`, + () => { + for (let page = 0; page < pagesToTest; page++) { + const offset = page * pageSize; + store.limit(offset, pageSize); + } + }, + 1, + ); results.push(sequentialResult); // Simulate random page access pattern - const randomPagesResult = benchmark(`LIMIT random page access (10 random pages, ${pageSize} items each)`, () => { - for (let i = 0; i < 10; i++) { - const randomPage = Math.floor(Math.random() * totalPages); - const offset = randomPage * pageSize; - store.limit(offset, pageSize); - } - }, 1); + const randomPagesResult = benchmark( + `LIMIT random page access (10 random pages, ${pageSize} items each)`, + () => { + for (let i = 0; i < 10; i++) { + const randomPage = Math.floor(Math.random() * totalPages); + const offset = randomPage * pageSize; + store.limit(offset, pageSize); + } + }, + 1, + ); results.push(randomPagesResult); }); @@ -258,18 +290,18 @@ function benchmarkSequentialPagination (dataSizes) { * @param {Array} dataSizes - Array of data sizes to test * @returns {Array} Array of benchmark results */ -function benchmarkPaginationWithOperations (dataSizes) { +function benchmarkPaginationWithOperations(dataSizes) { const results = []; - dataSizes.forEach(size => { + dataSizes.forEach((size) => { const testData = generatePaginationTestData(size); const store = haro(testData, { - index: ["category", "status", "priority"] + index: ["category", "status", "priority"], }); // Pagination after filtering const paginateAfterFilterResult = benchmark(`LIMIT after filter (${size} records)`, () => { - const filtered = store.filter(record => record.priority > 3); + const filtered = store.filter((record) => record.priority > 3); // Simulate pagination on filtered results by taking first 20 return filtered.slice(0, 20); @@ -286,12 +318,15 @@ function benchmarkPaginationWithOperations (dataSizes) { results.push(paginateAfterFindResult); // Combined operations: find + sort + paginate simulation - const combinedOperationsResult = benchmark(`Combined find + sort + limit (${size} records)`, () => { - const found = store.find({ status: "active" }); - const sorted = found.sort((a, b) => b.score - a.score); - - return sorted.slice(0, 20); // Simulate limit - }); + const combinedOperationsResult = benchmark( + `Combined find + sort + limit (${size} records)`, + () => { + const found = store.find({ status: "active" }); + const sorted = found.sort((a, b) => b.score - a.score); + + return sorted.slice(0, 20); // Simulate limit + }, + ); results.push(combinedOperationsResult); }); @@ -303,10 +338,10 @@ function benchmarkPaginationWithOperations (dataSizes) { * @param {Array} dataSizes - Array of data sizes to test * @returns {Array} Array of benchmark results */ -function benchmarkPaginationMemory (dataSizes) { +function benchmarkPaginationMemory(dataSizes) { const results = []; - dataSizes.forEach(size => { + dataSizes.forEach((size) => { const testData = generatePaginationTestData(size); const store = haro(testData); @@ -348,7 +383,7 @@ function benchmarkPaginationMemory (dataSizes) { memoryAllData: (memAfterAll - memBefore) / 1024 / 1024, // MB memoryChunked: (memAfterChunks - memAfterAll) / 1024 / 1024, // MB iterations: 1, - opsPerSecond: Math.floor(1000 / (allDataEnd - allDataStart + (chunksEnd - chunksStart))) + opsPerSecond: Math.floor(1000 / (allDataEnd - allDataStart + (chunksEnd - chunksStart))), }); }); @@ -359,23 +394,34 @@ function benchmarkPaginationMemory (dataSizes) { * Prints formatted benchmark results * @param {Array} results - Array of benchmark results */ -function printResults (results) { +function printResults(results) { console.log("\n" + "=".repeat(80)); console.log("PAGINATION BENCHMARK RESULTS"); console.log("=".repeat(80)); - results.forEach(result => { - const opsIndicator = result.opsPerSecond > 1000 ? "✅" : - result.opsPerSecond > 100 ? "🟡" : - result.opsPerSecond > 10 ? "🟠" : "🔴"; + results.forEach((result) => { + const opsIndicator = + result.opsPerSecond > 1000 + ? "✅" + : result.opsPerSecond > 100 + ? "🟡" + : result.opsPerSecond > 10 + ? "🟠" + : "🔴"; console.log(`${opsIndicator} ${result.name}`); - console.log(` ${result.opsPerSecond.toLocaleString()} ops/sec | ${result.totalTime.toFixed(2)}ms total | ${result.avgTime?.toFixed(4) || "N/A"}ms avg`); + console.log( + ` ${result.opsPerSecond.toLocaleString()} ops/sec | ${result.totalTime.toFixed(2)}ms total | ${result.avgTime?.toFixed(4) || "N/A"}ms avg`, + ); // Special formatting for memory results if (result.memoryAllData !== undefined) { - console.log(` All data: ${result.allDataTime.toFixed(2)}ms, ${result.memoryAllData.toFixed(2)}MB`); - console.log(` Chunked: ${result.chunkedTime.toFixed(2)}ms, ${result.memoryChunked.toFixed(2)}MB`); + console.log( + ` All data: ${result.allDataTime.toFixed(2)}ms, ${result.memoryAllData.toFixed(2)}MB`, + ); + console.log( + ` Chunked: ${result.chunkedTime.toFixed(2)}ms, ${result.memoryChunked.toFixed(2)}MB`, + ); } console.log(""); }); @@ -385,7 +431,7 @@ function printResults (results) { * Runs all pagination benchmarks * @returns {Array} Array of all benchmark results */ -function runPaginationBenchmarks () { +function runPaginationBenchmarks() { console.log("Starting Pagination Benchmarks...\n"); const dataSizes = [1000, 10000, 50000]; diff --git a/benchmarks/persistence.js b/benchmarks/persistence.js index fa73cc12..8ba663ee 100644 --- a/benchmarks/persistence.js +++ b/benchmarks/persistence.js @@ -6,7 +6,7 @@ import { haro } from "../dist/haro.js"; * @param {number} size - Number of records to generate * @returns {Array} Array of test records optimized for persistence testing */ -function generatePersistenceTestData (size) { +function generatePersistenceTestData(size) { const data = []; const departments = ["Engineering", "Marketing", "Sales", "HR", "Finance", "Operations"]; const locations = ["NYC", "SF", "LA", "Chicago", "Boston", "Austin"]; @@ -18,13 +18,17 @@ function generatePersistenceTestData (size) { email: `employee${i}@company.com`, department: departments[i % departments.length], location: locations[i % locations.length], - startDate: new Date(2020 + i % 4, i % 12, i % 28 + 1), - salary: 50000 + i % 100000, + startDate: new Date(2020 + (i % 4), i % 12, (i % 28) + 1), + salary: 50000 + (i % 100000), active: Math.random() > 0.1, - skills: Array.from({ length: Math.floor(Math.random() * 5) + 1 }, - (_, j) => `skill${(i + j) % 20}`), - projects: Array.from({ length: Math.floor(i % 10) + 1 }, - (_, j) => ({ id: `proj${i}-${j}`, name: `Project ${i}-${j}` })), + skills: Array.from( + { length: Math.floor(Math.random() * 5) + 1 }, + (_, j) => `skill${(i + j) % 20}`, + ), + projects: Array.from({ length: Math.floor(i % 10) + 1 }, (_, j) => ({ + id: `proj${i}-${j}`, + name: `Project ${i}-${j}`, + })), metadata: { created: new Date(), updated: new Date(), @@ -33,9 +37,9 @@ function generatePersistenceTestData (size) { preferences: { theme: i % 2 === 0 ? "dark" : "light", language: i % 3 === 0 ? "en" : i % 3 === 1 ? "es" : "fr", - timezone: `UTC${i % 24 - 12}` - } - } + timezone: `UTC${(i % 24) - 12}`, + }, + }, }); } @@ -49,7 +53,7 @@ function generatePersistenceTestData (size) { * @param {number} iterations - Number of iterations to run * @returns {Object} Benchmark results */ -function benchmark (name, fn, iterations = 10) { +function benchmark(name, fn, iterations = 10) { const start = performance.now(); for (let i = 0; i < iterations; i++) { fn(); @@ -63,7 +67,7 @@ function benchmark (name, fn, iterations = 10) { iterations, totalTime: total, avgTime, - opsPerSecond: Math.floor(1000 / avgTime) + opsPerSecond: Math.floor(1000 / avgTime), }; } @@ -72,13 +76,20 @@ function benchmark (name, fn, iterations = 10) { * @param {Array} dataSizes - Array of data sizes to test * @returns {Array} Array of benchmark results */ -function benchmarkDumpOperations (dataSizes) { +function benchmarkDumpOperations(dataSizes) { const results = []; - dataSizes.forEach(size => { + dataSizes.forEach((size) => { const testData = generatePersistenceTestData(size); const store = haro(testData, { - index: ["department", "location", "active", "skills", "department|location", "active|department"] + index: [ + "department", + "location", + "active", + "skills", + "department|location", + "active|department", + ], }); // Dump records @@ -106,7 +117,7 @@ function benchmarkDumpOperations (dataSizes) { recordsSize: recordsDump.length, indexesSize: indexesDump.length, recordsDataSize: JSON.stringify(recordsDump).length, - indexesDataSize: JSON.stringify(indexesDump).length + indexesDataSize: JSON.stringify(indexesDump).length, }); }); @@ -118,13 +129,13 @@ function benchmarkDumpOperations (dataSizes) { * @param {Array} dataSizes - Array of data sizes to test * @returns {Array} Array of benchmark results */ -function benchmarkOverrideOperations (dataSizes) { +function benchmarkOverrideOperations(dataSizes) { const results = []; - dataSizes.forEach(size => { + dataSizes.forEach((size) => { const testData = generatePersistenceTestData(size); const sourceStore = haro(testData, { - index: ["department", "location", "active", "skills"] + index: ["department", "location", "active", "skills"], }); // Get dump data for override testing @@ -173,7 +184,7 @@ function benchmarkOverrideOperations (dataSizes) { originalSize: sourceStore.size, restoredSize: targetStore.size, integrityMatch: sourceStore.size === targetStore.size, - sampleRecordMatch: JSON.stringify(sourceStore.get(0)) === JSON.stringify(targetStore.get(0)) + sampleRecordMatch: JSON.stringify(sourceStore.get(0)) === JSON.stringify(targetStore.get(0)), }; results.push(integrityResult); }); @@ -186,14 +197,14 @@ function benchmarkOverrideOperations (dataSizes) { * @param {Array} dataSizes - Array of data sizes to test * @returns {Array} Array of benchmark results */ -function benchmarkRoundTripPersistence (dataSizes) { +function benchmarkRoundTripPersistence(dataSizes) { const results = []; - dataSizes.forEach(size => { + dataSizes.forEach((size) => { const testData = generatePersistenceTestData(size); const sourceStore = haro(testData, { index: ["department", "location", "active", "skills", "department|location"], - versioning: true + versioning: true, }); // Perform some operations to create versions @@ -228,18 +239,21 @@ function benchmarkRoundTripPersistence (dataSizes) { results.push(roundTripCompleteResult); // Test with different store configurations - const roundTripConfigResult = benchmark(`Round-trip with config restore (${size} records)`, () => { - const recordsDump = sourceStore.dump("records"); - const targetStore = haro(null, { - index: ["department", "location", "active"], - versioning: true, - immutable: true - }); - targetStore.override(recordsDump, "records"); - targetStore.reindex(); // Rebuild indexes with new config - - return targetStore; - }); + const roundTripConfigResult = benchmark( + `Round-trip with config restore (${size} records)`, + () => { + const recordsDump = sourceStore.dump("records"); + const targetStore = haro(null, { + index: ["department", "location", "active"], + versioning: true, + immutable: true, + }); + targetStore.override(recordsDump, "records"); + targetStore.reindex(); // Rebuild indexes with new config + + return targetStore; + }, + ); results.push(roundTripConfigResult); }); @@ -251,10 +265,10 @@ function benchmarkRoundTripPersistence (dataSizes) { * @param {Array} dataSizes - Array of data sizes to test * @returns {Array} Array of benchmark results */ -function benchmarkPersistenceMemory (dataSizes) { +function benchmarkPersistenceMemory(dataSizes) { const results = []; - dataSizes.forEach(size => { + dataSizes.forEach((size) => { const testData = generatePersistenceTestData(size); if (global.gc) { @@ -264,7 +278,7 @@ function benchmarkPersistenceMemory (dataSizes) { // Create store and measure memory const store = haro(testData, { - index: ["department", "location", "active", "skills"] + index: ["department", "location", "active", "skills"], }); if (global.gc) { @@ -309,7 +323,7 @@ function benchmarkPersistenceMemory (dataSizes) { dumpMemoryImpact: (memAfterDump - memAfterCreate) / 1024 / 1024, // MB overrideMemoryImpact: (memAfterOverride - memAfterDump) / 1024 / 1024, // MB finalMemory: (memAfterCleanup - memBefore) / 1024 / 1024, // MB - opsPerSecond: Math.floor(1000 / (dumpEnd - dumpStart + (overrideEnd - overrideStart))) + opsPerSecond: Math.floor(1000 / (dumpEnd - dumpStart + (overrideEnd - overrideStart))), }); }); @@ -321,10 +335,10 @@ function benchmarkPersistenceMemory (dataSizes) { * @param {Array} dataSizes - Array of data sizes to test * @returns {Array} Array of benchmark results */ -function benchmarkComplexObjectPersistence (dataSizes) { +function benchmarkComplexObjectPersistence(dataSizes) { const results = []; - dataSizes.forEach(size => { + dataSizes.forEach((size) => { // Generate more complex test data const complexData = []; for (let i = 0; i < size; i++) { @@ -334,38 +348,38 @@ function benchmarkComplexObjectPersistence (dataSizes) { personal: { name: `User ${i}`, email: `user${i}@test.com`, - birth: new Date(1990 + i % 30, i % 12, i % 28 + 1) + birth: new Date(1990 + (i % 30), i % 12, (i % 28) + 1), }, professional: { title: `Title ${i % 20}`, department: `Dept ${i % 10}`, - experience: Array.from({ length: i % 5 + 1 }, (_, j) => ({ + experience: Array.from({ length: (i % 5) + 1 }, (_, j) => ({ company: `Company ${j}`, role: `Role ${j}`, - duration: `${j + 1} years` - })) - } + duration: `${j + 1} years`, + })), + }, }, - activities: Array.from({ length: i % 50 + 1 }, (_, j) => ({ + activities: Array.from({ length: (i % 50) + 1 }, (_, j) => ({ id: `activity_${i}_${j}`, type: `type_${j % 10}`, timestamp: new Date(Date.now() - j * 86400000), data: { action: `action_${j}`, - details: { value: Math.random() * 1000, category: `cat_${j % 5}` } - } + details: { value: Math.random() * 1000, category: `cat_${j % 5}` }, + }, })), settings: { preferences: Object.fromEntries( - Array.from({ length: 20 }, (_, j) => [`pref_${j}`, Math.random() > 0.5]) + Array.from({ length: 20 }, (_, j) => [`pref_${j}`, Math.random() > 0.5]), ), - permissions: Array.from({ length: 10 }, (_, j) => `perm_${j}`) - } + permissions: Array.from({ length: 10 }, (_, j) => `perm_${j}`), + }, }); } const store = haro(complexData, { - index: ["profile.professional.department", "settings.permissions"] + index: ["profile.professional.department", "settings.permissions"], }); // Dump complex objects @@ -393,7 +407,7 @@ function benchmarkComplexObjectPersistence (dataSizes) { opsPerSecond: 0, averageObjectSize: JSON.stringify(complexData[0]).length, totalDataSize: JSON.stringify(dump).length, - compressionRatio: JSON.stringify(dump).length / JSON.stringify(complexData).length + compressionRatio: JSON.stringify(dump).length / JSON.stringify(complexData).length, }; results.push(dataComplexityResult); }); @@ -405,42 +419,63 @@ function benchmarkComplexObjectPersistence (dataSizes) { * Prints formatted benchmark results * @param {Array} results - Array of benchmark results */ -function printResults (results) { +function printResults(results) { console.log("\n" + "=".repeat(80)); console.log("PERSISTENCE BENCHMARK RESULTS"); console.log("=".repeat(80)); - results.forEach(result => { - const opsIndicator = result.opsPerSecond > 100 ? "✅" : - result.opsPerSecond > 10 ? "🟡" : - result.opsPerSecond > 1 ? "🟠" : "🔴"; + results.forEach((result) => { + const opsIndicator = + result.opsPerSecond > 100 + ? "✅" + : result.opsPerSecond > 10 + ? "🟡" + : result.opsPerSecond > 1 + ? "🟠" + : "🔴"; if (result.opsPerSecond > 0) { console.log(`${opsIndicator} ${result.name}`); - console.log(` ${result.opsPerSecond.toLocaleString()} ops/sec | ${result.totalTime.toFixed(2)}ms total | ${result.avgTime?.toFixed(4) || "N/A"}ms avg`); + console.log( + ` ${result.opsPerSecond.toLocaleString()} ops/sec | ${result.totalTime.toFixed(2)}ms total | ${result.avgTime?.toFixed(4) || "N/A"}ms avg`, + ); } else { console.log(`📊 ${result.name}`); } // Special formatting for different result types if (result.recordsSize !== undefined) { - console.log(` Records: ${result.recordsSize} items, ${(result.recordsDataSize / 1024).toFixed(2)}KB`); - console.log(` Indexes: ${result.indexesSize} items, ${(result.indexesDataSize / 1024).toFixed(2)}KB`); + console.log( + ` Records: ${result.recordsSize} items, ${(result.recordsDataSize / 1024).toFixed(2)}KB`, + ); + console.log( + ` Indexes: ${result.indexesSize} items, ${(result.indexesDataSize / 1024).toFixed(2)}KB`, + ); } if (result.integrityMatch !== undefined) { console.log(` Original: ${result.originalSize} | Restored: ${result.restoredSize}`); - console.log(` Integrity: ${result.integrityMatch ? "✅" : "❌"} | Sample match: ${result.sampleRecordMatch ? "✅" : "❌"}`); + console.log( + ` Integrity: ${result.integrityMatch ? "✅" : "❌"} | Sample match: ${result.sampleRecordMatch ? "✅" : "❌"}`, + ); } if (result.originalMemory !== undefined) { - console.log(` Dump: ${result.dumpTime.toFixed(2)}ms | Override: ${result.overrideTime.toFixed(2)}ms`); - console.log(` Memory - Original: ${result.originalMemory.toFixed(2)}MB | Final: ${result.finalMemory.toFixed(2)}MB`); - console.log(` Memory Impact - Dump: ${result.dumpMemoryImpact.toFixed(2)}MB | Override: ${result.overrideMemoryImpact.toFixed(2)}MB`); + console.log( + ` Dump: ${result.dumpTime.toFixed(2)}ms | Override: ${result.overrideTime.toFixed(2)}ms`, + ); + console.log( + ` Memory - Original: ${result.originalMemory.toFixed(2)}MB | Final: ${result.finalMemory.toFixed(2)}MB`, + ); + console.log( + ` Memory Impact - Dump: ${result.dumpMemoryImpact.toFixed(2)}MB | Override: ${result.overrideMemoryImpact.toFixed(2)}MB`, + ); } if (result.averageObjectSize !== undefined) { - console.log(` Avg object: ${result.averageObjectSize} bytes | Total: ${(result.totalDataSize / 1024).toFixed(2)}KB`); + console.log( + ` Avg object: ${result.averageObjectSize} bytes | Total: ${(result.totalDataSize / 1024).toFixed(2)}KB`, + ); console.log(` Compression ratio: ${(result.compressionRatio * 100).toFixed(1)}%`); } @@ -452,7 +487,7 @@ function printResults (results) { * Runs all persistence benchmarks * @returns {Array} Array of all benchmark results */ -function runPersistenceBenchmarks () { +function runPersistenceBenchmarks() { console.log("Starting Persistence Benchmarks...\n"); const dataSizes = [100, 1000, 5000]; diff --git a/benchmarks/search-filter.js b/benchmarks/search-filter.js index b015a205..fc45f85c 100644 --- a/benchmarks/search-filter.js +++ b/benchmarks/search-filter.js @@ -6,7 +6,7 @@ import { haro } from "../dist/haro.js"; * @param {number} size - Number of records to generate * @returns {Array} Array of test records with searchable fields */ -function generateSearchTestData (size) { +function generateSearchTestData(size) { const data = []; const departments = ["Engineering", "Marketing", "Sales", "HR", "Finance"]; const skills = ["JavaScript", "Python", "Java", "React", "Node.js", "SQL", "Docker", "AWS"]; @@ -22,19 +22,23 @@ function generateSearchTestData (size) { skills: [ skills[i % skills.length], skills[(i + 1) % skills.length], - skills[(i + 2) % skills.length] + skills[(i + 2) % skills.length], ], city: cities[i % cities.length], active: Math.random() > 0.3, salary: Math.floor(Math.random() * 100000) + 50000, - joinDate: new Date(2020 + Math.floor(Math.random() * 4), Math.floor(Math.random() * 12), Math.floor(Math.random() * 28)), + joinDate: new Date( + 2020 + Math.floor(Math.random() * 4), + Math.floor(Math.random() * 12), + Math.floor(Math.random() * 28), + ), tags: [`tag${i % 10}`, `category${i % 5}`], metadata: { created: new Date(), score: Math.random() * 100, level: Math.floor(Math.random() * 10), - region: `Region ${i % 3}` - } + region: `Region ${i % 3}`, + }, }); } @@ -48,7 +52,7 @@ function generateSearchTestData (size) { * @param {number} iterations - Number of iterations to run * @returns {Object} Benchmark results */ -function benchmark (name, fn, iterations = 1000) { +function benchmark(name, fn, iterations = 1000) { const start = performance.now(); for (let i = 0; i < iterations; i++) { fn(); @@ -62,7 +66,7 @@ function benchmark (name, fn, iterations = 1000) { iterations, totalTime: total, avgTime, - opsPerSecond: Math.floor(1000 / avgTime) + opsPerSecond: Math.floor(1000 / avgTime), }; } @@ -71,13 +75,13 @@ function benchmark (name, fn, iterations = 1000) { * @param {Array} dataSizes - Array of data sizes to test * @returns {Array} Array of benchmark results */ -function benchmarkFindOperations (dataSizes) { +function benchmarkFindOperations(dataSizes) { const results = []; - dataSizes.forEach(size => { + dataSizes.forEach((size) => { const testData = generateSearchTestData(size); const store = haro(testData, { - index: ["department", "age", "city", "active", "active|department", "city|department"] + index: ["department", "age", "city", "active", "active|department", "city|department"], }); // Simple find operations @@ -111,38 +115,39 @@ function benchmarkFindOperations (dataSizes) { * @param {Array} dataSizes - Array of data sizes to test * @returns {Array} Array of benchmark results */ -function benchmarkFilterOperations (dataSizes) { +function benchmarkFilterOperations(dataSizes) { const results = []; - dataSizes.forEach(size => { + dataSizes.forEach((size) => { const testData = generateSearchTestData(size); const store = haro(testData); // Simple filter operations const filterAgeResult = benchmark(`FILTER by age range (${size} records)`, () => { - store.filter(record => record.age >= 25 && record.age <= 35); + store.filter((record) => record.age >= 25 && record.age <= 35); }); results.push(filterAgeResult); const filterSalaryResult = benchmark(`FILTER by salary range (${size} records)`, () => { - store.filter(record => record.salary > 75000); + store.filter((record) => record.salary > 75000); }); results.push(filterSalaryResult); // Complex filter operations const filterComplexResult = benchmark(`FILTER complex condition (${size} records)`, () => { - store.filter(record => - record.active && - record.age > 30 && - record.department === "Engineering" && - record.skills.includes("JavaScript") + store.filter( + (record) => + record.active && + record.age > 30 && + record.department === "Engineering" && + record.skills.includes("JavaScript"), ); }); results.push(filterComplexResult); // Array filter operations const filterArrayResult = benchmark(`FILTER by array contains (${size} records)`, () => { - store.filter(record => record.skills.includes("React")); + store.filter((record) => record.skills.includes("React")); }); results.push(filterArrayResult); }); @@ -155,13 +160,13 @@ function benchmarkFilterOperations (dataSizes) { * @param {Array} dataSizes - Array of data sizes to test * @returns {Array} Array of benchmark results */ -function benchmarkSearchOperations (dataSizes) { +function benchmarkSearchOperations(dataSizes) { const results = []; - dataSizes.forEach(size => { + dataSizes.forEach((size) => { const testData = generateSearchTestData(size); const store = haro(testData, { - index: ["department", "skills", "city", "name", "tags"] + index: ["department", "skills", "city", "name", "tags"], }); // String search operations @@ -203,13 +208,24 @@ function benchmarkSearchOperations (dataSizes) { * @param {Array} dataSizes - Array of data sizes to test * @returns {Array} Array of benchmark results */ -function benchmarkWhereOperations (dataSizes) { +function benchmarkWhereOperations(dataSizes) { const results = []; - dataSizes.forEach(size => { + dataSizes.forEach((size) => { const testData = generateSearchTestData(size); const store = haro(testData, { - index: ["department", "skills", "city", "active", "tags", "age", "salary", "department|active", "city|department"] + index: [ + "department", + "skills", + "city", + "active", + "tags", + "age", + "salary", + "department|active", + "city|department", + ], + warnOnFullScan: false, }); // Simple where operations @@ -220,42 +236,57 @@ function benchmarkWhereOperations (dataSizes) { // Array where operations with OR (default) const whereArrayOrResult = benchmark(`WHERE array OR operation (${size} records)`, () => { - store.where({ - skills: ["JavaScript", "Python"] - }, "||"); + store.where( + { + skills: ["JavaScript", "Python"], + }, + "||", + ); }); results.push(whereArrayOrResult); // Array where operations with AND const whereArrayAndResult = benchmark(`WHERE array AND operation (${size} records)`, () => { - store.where({ - skills: ["JavaScript", "React"] - }, "&&"); + store.where( + { + skills: ["JavaScript", "React"], + }, + "&&", + ); }); results.push(whereArrayAndResult); // Multiple array fields with OR const whereMultiArrayOrResult = benchmark(`WHERE multiple arrays OR (${size} records)`, () => { - store.where({ - skills: ["JavaScript", "Python"], - tags: ["tag0", "tag1"] - }, "||"); + store.where( + { + skills: ["JavaScript", "Python"], + tags: ["tag0", "tag1"], + }, + "||", + ); }); results.push(whereMultiArrayOrResult); // Multiple array fields with AND - const whereMultiArrayAndResult = benchmark(`WHERE multiple arrays AND (${size} records)`, () => { - store.where({ - skills: ["JavaScript"], - tags: ["tag0"] - }, "&&"); - }); + const whereMultiArrayAndResult = benchmark( + `WHERE multiple arrays AND (${size} records)`, + () => { + store.where( + { + skills: ["JavaScript"], + tags: ["tag0"], + }, + "&&", + ); + }, + ); results.push(whereMultiArrayAndResult); // Regex where operations const whereRegexResult = benchmark(`WHERE with regex (${size} records)`, () => { store.where({ - department: /^Eng/ + department: /^Eng/, }); }); results.push(whereRegexResult); @@ -264,7 +295,7 @@ function benchmarkWhereOperations (dataSizes) { const whereMultiRegexResult = benchmark(`WHERE multiple regex (${size} records)`, () => { store.where({ department: /^(Engineering|Marketing)$/, - city: /^(New|San)/ + city: /^(New|San)/, }); }); results.push(whereMultiRegexResult); @@ -274,27 +305,33 @@ function benchmarkWhereOperations (dataSizes) { store.where({ department: "Engineering", active: true, - skills: ["JavaScript"] + skills: ["JavaScript"], }); }); results.push(whereComplexResult); // Very complex where with all predicate types - const whereVeryComplexResult = benchmark(`WHERE very complex predicates (${size} records)`, () => { - store.where({ - department: ["Engineering", "Marketing"], - active: true, - skills: ["JavaScript", "Python"], - city: /^(New|San)/ - }, "||"); - }); + const whereVeryComplexResult = benchmark( + `WHERE very complex predicates (${size} records)`, + () => { + store.where( + { + department: ["Engineering", "Marketing"], + active: true, + skills: ["JavaScript", "Python"], + city: /^(New|San)/, + }, + "||", + ); + }, + ); results.push(whereVeryComplexResult); // Nested field matching (if metadata fields are indexed) const whereNestedResult = benchmark(`WHERE nested field matching (${size} records)`, () => { store.where({ department: "Engineering", - tags: ["tag0", "tag1"] + tags: ["tag0", "tag1"], }); }); results.push(whereNestedResult); @@ -303,8 +340,8 @@ function benchmarkWhereOperations (dataSizes) { const whereVsFilterResult = benchmark(`WHERE vs FILTER comparison (${size} records)`, () => { const wherePredicate = { department: "Engineering", active: true }; const whereResults = store.where(wherePredicate); - const filterResults = store.filter(record => - record.department === "Engineering" && record.active === true + const filterResults = store.filter( + (record) => record.department === "Engineering" && record.active === true, ); return { whereCount: whereResults.length, filterCount: filterResults.length }; @@ -320,7 +357,7 @@ function benchmarkWhereOperations (dataSizes) { // Edge case: non-indexed field const whereNonIndexedResult = benchmark(`WHERE non-indexed field (${size} records)`, () => { store.where({ - email: "user0@example.com" + email: "user0@example.com", }); }); results.push(whereNonIndexedResult); @@ -334,33 +371,23 @@ function benchmarkWhereOperations (dataSizes) { * @param {Array} dataSizes - Array of data sizes to test * @returns {Array} Array of benchmark results */ -function benchmarkMapReduceOperations (dataSizes) { +function benchmarkMapReduceOperations(dataSizes) { const results = []; - dataSizes.forEach(size => { + dataSizes.forEach((size) => { const testData = generateSearchTestData(size); const store = haro(testData); // Map operations const mapResult = benchmark(`MAP transformation (${size} records)`, () => { - store.map(record => ({ + store.map((record) => ({ id: record.id, name: record.name, - department: record.department + department: record.department, })); }); results.push(mapResult); - // Reduce operations - const reduceResult = benchmark(`REDUCE aggregation (${size} records)`, () => { - store.reduce((acc, record) => { - acc[record.department] = (acc[record.department] || 0) + 1; - - return acc; - }, {}); - }); - results.push(reduceResult); - // ForEach operations const forEachResult = benchmark(`FOREACH iteration (${size} records)`, () => { let count = 0; // eslint-disable-line no-unused-vars @@ -379,13 +406,13 @@ function benchmarkMapReduceOperations (dataSizes) { * @param {Array} dataSizes - Array of data sizes to test * @returns {Array} Array of benchmark results */ -function benchmarkSortOperations (dataSizes) { +function benchmarkSortOperations(dataSizes) { const results = []; - dataSizes.forEach(size => { + dataSizes.forEach((size) => { const testData = generateSearchTestData(size); const store = haro(testData, { - index: ["age", "salary", "name", "department"] + index: ["age", "salary", "name", "department"], }); // Sort operations @@ -420,13 +447,19 @@ function benchmarkSortOperations (dataSizes) { * Prints benchmark results in a formatted table * @param {Array} results - Array of benchmark results */ -function printResults (results) { +function printResults(results) { console.log("\n=== SEARCH & FILTER BENCHMARK RESULTS ===\n"); - console.log("Operation".padEnd(40) + "Iterations".padEnd(12) + "Total Time (ms)".padEnd(18) + "Avg Time (ms)".padEnd(16) + "Ops/Second"); + console.log( + "Operation".padEnd(40) + + "Iterations".padEnd(12) + + "Total Time (ms)".padEnd(18) + + "Avg Time (ms)".padEnd(16) + + "Ops/Second", + ); console.log("-".repeat(98)); - results.forEach(result => { + results.forEach((result) => { const name = result.name.padEnd(40); const iterations = result.iterations.toString().padEnd(12); const totalTime = result.totalTime.toFixed(2).padEnd(18); @@ -442,7 +475,7 @@ function printResults (results) { /** * Main function to run all search and filter benchmarks */ -function runSearchFilterBenchmarks () { +function runSearchFilterBenchmarks() { console.log("🔍 Running Search & Filter Benchmarks...\n"); const dataSizes = [1000, 10000, 50000]; diff --git a/benchmarks/utility-operations.js b/benchmarks/utility-operations.js index 88a2a792..7178901f 100644 --- a/benchmarks/utility-operations.js +++ b/benchmarks/utility-operations.js @@ -6,7 +6,7 @@ import { haro } from "../dist/haro.js"; * @param {number} size - Number of records to generate * @returns {Array} Array of test records with complex nested structures */ -function generateUtilityTestData (size) { +function generateUtilityTestData(size) { const data = []; for (let i = 0; i < size; i++) { data.push({ @@ -24,15 +24,15 @@ function generateUtilityTestData (size) { notifications: Math.random() > 0.5, settings: { privacy: Math.random() > 0.3, - analytics: Math.random() > 0.7 - } - } + analytics: Math.random() > 0.7, + }, + }, }, - history: Array.from({ length: Math.min(i % 20 + 1, 10) }, (_, j) => ({ + history: Array.from({ length: Math.min((i % 20) + 1, 10) }, (_, j) => ({ action: `action_${j}`, timestamp: new Date(Date.now() - j * 1000 * 60), - data: { value: Math.random() * 1000 } - })) + data: { value: Math.random() * 1000 }, + })), }); } @@ -46,7 +46,7 @@ function generateUtilityTestData (size) { * @param {number} iterations - Number of iterations to run * @returns {Object} Benchmark results */ -function benchmark (name, fn, iterations = 1000) { +function benchmark(name, fn, iterations = 1000) { const start = performance.now(); for (let i = 0; i < iterations; i++) { fn(); @@ -60,7 +60,7 @@ function benchmark (name, fn, iterations = 1000) { iterations, totalTime: total, avgTime, - opsPerSecond: Math.floor(1000 / avgTime) + opsPerSecond: Math.floor(1000 / avgTime), }; } @@ -69,10 +69,10 @@ function benchmark (name, fn, iterations = 1000) { * @param {Array} dataSizes - Array of data sizes to test * @returns {Array} Array of benchmark results */ -function benchmarkCloneOperations (dataSizes) { +function benchmarkCloneOperations(dataSizes) { const results = []; - dataSizes.forEach(size => { + dataSizes.forEach((size) => { const testData = generateUtilityTestData(size); const store = haro(testData); @@ -92,9 +92,13 @@ function benchmarkCloneOperations (dataSizes) { // Clone arrays const arrayData = testData.slice(0, Math.min(100, size)); - const cloneArrayResult = benchmark(`Clone array (${arrayData.length} items, ${Math.min(100, size)} iterations)`, () => { - store.clone(arrayData); - }, Math.min(100, size)); + const cloneArrayResult = benchmark( + `Clone array (${arrayData.length} items, ${Math.min(100, size)} iterations)`, + () => { + store.clone(arrayData); + }, + Math.min(100, size), + ); results.push(cloneArrayResult); }); @@ -106,10 +110,10 @@ function benchmarkCloneOperations (dataSizes) { * @param {Array} dataSizes - Array of data sizes to test * @returns {Array} Array of benchmark results */ -function benchmarkMergeOperations (dataSizes) { +function benchmarkMergeOperations(dataSizes) { const results = []; - dataSizes.forEach(size => { + dataSizes.forEach((size) => { const store = haro(); // Merge simple objects @@ -125,12 +129,12 @@ function benchmarkMergeOperations (dataSizes) { id: 1, profile: { name: "John", age: 30 }, settings: { theme: "dark", notifications: true }, - tags: ["user", "admin"] + tags: ["user", "admin"], }; const complexUpdate = { profile: { age: 31, location: "NYC" }, settings: { privacy: true }, - tags: ["power-user"] + tags: ["power-user"], }; const mergeComplexResult = benchmark(`Merge complex objects (${size} iterations)`, () => { store.merge(store.clone(complexBase), complexUpdate); @@ -160,10 +164,10 @@ function benchmarkMergeOperations (dataSizes) { * @param {Array} dataSizes - Array of data sizes to test * @returns {Array} Array of benchmark results */ -function benchmarkFreezeOperations (dataSizes) { +function benchmarkFreezeOperations(dataSizes) { const results = []; - dataSizes.forEach(size => { + dataSizes.forEach((size) => { const testData = generateUtilityTestData(size); const store = haro(); @@ -176,19 +180,27 @@ function benchmarkFreezeOperations (dataSizes) { // Freeze multiple objects const multipleObjects = testData.slice(0, Math.min(10, size)); - const freezeMultipleResult = benchmark(`Freeze multiple objects (${multipleObjects.length} objects, ${Math.min(100, size)} iterations)`, () => { - store.freeze(...multipleObjects); - }, Math.min(100, size)); + const freezeMultipleResult = benchmark( + `Freeze multiple objects (${multipleObjects.length} objects, ${Math.min(100, size)} iterations)`, + () => { + store.freeze(...multipleObjects); + }, + Math.min(100, size), + ); results.push(freezeMultipleResult); // Freeze nested structures const nestedStructure = { data: testData.slice(0, Math.min(50, size)), - metadata: { count: size, timestamp: new Date() } + metadata: { count: size, timestamp: new Date() }, }; - const freezeNestedResult = benchmark(`Freeze nested structure (${Math.min(10, size)} iterations)`, () => { - store.freeze(nestedStructure); - }, Math.min(10, size)); + const freezeNestedResult = benchmark( + `Freeze nested structure (${Math.min(10, size)} iterations)`, + () => { + store.freeze(nestedStructure); + }, + Math.min(10, size), + ); results.push(freezeNestedResult); }); @@ -200,43 +212,58 @@ function benchmarkFreezeOperations (dataSizes) { * @param {Array} dataSizes - Array of data sizes to test * @returns {Array} Array of benchmark results */ -function benchmarkForEachOperations (dataSizes) { +function benchmarkForEachOperations(dataSizes) { const results = []; - dataSizes.forEach(size => { + dataSizes.forEach((size) => { const testData = generateUtilityTestData(size); const store = haro(testData); // Simple forEach operation - const forEachSimpleResult = benchmark(`forEach simple operation (${size} records)`, () => { - let count = 0; // eslint-disable-line no-unused-vars - store.forEach(() => { count++; }); - }, 1); + const forEachSimpleResult = benchmark( + `forEach simple operation (${size} records)`, + () => { + let count = 0; // eslint-disable-line no-unused-vars + store.forEach(() => { + count++; + }); + }, + 1, + ); results.push(forEachSimpleResult); // Complex forEach operation const aggregated = {}; - const forEachComplexResult = benchmark(`forEach complex operation (${size} records)`, () => { - store.forEach(record => { - const dept = record.metadata?.preferences?.theme || "unknown"; - aggregated[dept] = (aggregated[dept] || 0) + 1; - }); - }, 1); + const forEachComplexResult = benchmark( + `forEach complex operation (${size} records)`, + () => { + store.forEach((record) => { + const dept = record.metadata?.preferences?.theme || "unknown"; + aggregated[dept] = (aggregated[dept] || 0) + 1; + }); + }, + 1, + ); results.push(forEachComplexResult); // forEach with context const context = { processed: 0, errors: 0 }; - const forEachContextResult = benchmark(`forEach with context (${size} records)`, () => { - store.forEach(function (record) { - try { - if (record.age > 0) { - this.processed++; + const forEachContextResult = benchmark( + `forEach with context (${size} records)`, + () => { + store.forEach(function (record) { + try { + if (record.age > 0) { + this.processed++; + } + } catch (e) { + // eslint-disable-line no-unused-vars + this.errors++; } - } catch (e) { // eslint-disable-line no-unused-vars - this.errors++; - } - }, context); - }, 1); + }, context); + }, + 1, + ); results.push(forEachContextResult); }); @@ -248,15 +275,19 @@ function benchmarkForEachOperations (dataSizes) { * @param {Array} iterations - Array of iteration counts to test * @returns {Array} Array of benchmark results */ -function benchmarkUuidOperations (iterations) { +function benchmarkUuidOperations(iterations) { const results = []; const store = haro(); - iterations.forEach(count => { + iterations.forEach((count) => { // UUID generation - const uuidResult = benchmark(`UUID generation (${count} iterations)`, () => { - store.uuid(); - }, count); + const uuidResult = benchmark( + `UUID generation (${count} iterations)`, + () => { + store.uuid(); + }, + count, + ); results.push(uuidResult); // UUID uniqueness test (collect UUIDs and check for duplicates) @@ -273,7 +304,7 @@ function benchmarkUuidOperations (iterations) { avgTime: (uniquenessEnd - uniquenessStart) / count, opsPerSecond: Math.floor(count / ((uniquenessEnd - uniquenessStart) / 1000)), duplicates: count - uuids.size, - uniqueRatio: (uuids.size / count * 100).toFixed(2) + "%" + uniqueRatio: ((uuids.size / count) * 100).toFixed(2) + "%", }; results.push(uniquenessResult); }); @@ -285,18 +316,25 @@ function benchmarkUuidOperations (iterations) { * Prints formatted benchmark results * @param {Array} results - Array of benchmark results */ -function printResults (results) { +function printResults(results) { console.log("\n" + "=".repeat(80)); console.log("UTILITY OPERATIONS BENCHMARK RESULTS"); console.log("=".repeat(80)); - results.forEach(result => { - const opsIndicator = result.opsPerSecond > 10000 ? "✅" : - result.opsPerSecond > 1000 ? "🟡" : - result.opsPerSecond > 100 ? "🟠" : "🔴"; + results.forEach((result) => { + const opsIndicator = + result.opsPerSecond > 10000 + ? "✅" + : result.opsPerSecond > 1000 + ? "🟡" + : result.opsPerSecond > 100 + ? "🟠" + : "🔴"; console.log(`${opsIndicator} ${result.name}`); - console.log(` ${result.opsPerSecond.toLocaleString()} ops/sec | ${result.totalTime.toFixed(2)}ms total | ${result.avgTime.toFixed(4)}ms avg`); + console.log( + ` ${result.opsPerSecond.toLocaleString()} ops/sec | ${result.totalTime.toFixed(2)}ms total | ${result.avgTime.toFixed(4)}ms avg`, + ); if (result.duplicates !== undefined) { console.log(` Duplicates: ${result.duplicates} | Unique ratio: ${result.uniqueRatio}`); @@ -309,7 +347,7 @@ function printResults (results) { * Runs all utility operation benchmarks * @returns {Array} Array of all benchmark results */ -function runUtilityOperationsBenchmarks () { +function runUtilityOperationsBenchmarks() { console.log("Starting Utility Operations Benchmarks...\n"); const dataSizes = [100, 1000, 5000]; diff --git a/coverage.txt b/coverage.txt new file mode 100644 index 00000000..c03cd9f5 --- /dev/null +++ b/coverage.txt @@ -0,0 +1,11 @@ +ℹ start of coverage report +ℹ -------------------------------------------------------------- +ℹ file | line % | branch % | funcs % | uncovered lines +ℹ -------------------------------------------------------------- +ℹ src | | | | +ℹ constants.js | 100.00 | 100.00 | 100.00 | +ℹ haro.js | 100.00 | 97.92 | 98.73 | +ℹ -------------------------------------------------------------- +ℹ all files | 100.00 | 97.93 | 98.73 | +ℹ -------------------------------------------------------------- +ℹ end of coverage report diff --git a/dist/haro.cjs b/dist/haro.cjs index 342df23f..6e1d50a8 100644 --- a/dist/haro.cjs +++ b/dist/haro.cjs @@ -1,30 +1,28 @@ /** * haro * - * @copyright 2025 Jason Mulligan + * @copyright 2026 Jason Mulligan * @license BSD-3-Clause - * @version 16.0.0 + * @version 17.0.0 */ 'use strict'; -var crypto = require('crypto'); +var crypto$1 = require('crypto'); +var tinyLru = require('tiny-lru'); // String constants - Single characters and symbols const STRING_COMMA = ","; +const STRING_DOT = "."; const STRING_EMPTY = ""; const STRING_PIPE = "|"; const STRING_DOUBLE_PIPE = "||"; const STRING_DOUBLE_AND = "&&"; - -// String constants - Operation and type names -const STRING_ID = "id"; -const STRING_DEL = "del"; const STRING_FUNCTION = "function"; +const STRING_ID = "id"; const STRING_INDEXES = "indexes"; const STRING_OBJECT = "object"; const STRING_RECORDS = "records"; const STRING_REGISTRY = "registry"; -const STRING_SET = "set"; const STRING_SIZE = "size"; const STRING_STRING = "string"; const STRING_NUMBER = "number"; @@ -37,196 +35,346 @@ const STRING_RECORD_NOT_FOUND = "Record not found"; // Integer constants const INT_0 = 0; +const INT_2 = 2; + +// Number constants +const CACHE_SIZE_DEFAULT = 1000; + +// String constants - Cache and hashing +const STRING_CACHE_DOMAIN_SEARCH = "search"; +const STRING_CACHE_DOMAIN_WHERE = "where"; +const STRING_HASH_ALGORITHM = "SHA-256"; +const STRING_HEX_PAD = "0"; +const STRING_UNDERSCORE = "_"; + +// String constants - Security (prototype pollution protection) +const STRING_PROTO = "__proto__"; +const STRING_CONSTRUCTOR = "constructor"; +const STRING_PROTOTYPE = "prototype"; + +// String constants - Error messages +const STRING_ERROR_BATCH_SETMANY = "setMany: cannot call setMany within a batch operation"; +const STRING_ERROR_BATCH_DELETEMANY = + "deleteMany: cannot call deleteMany within a batch operation"; +const STRING_ERROR_DELETE_KEY_TYPE = "delete: key must be a string or number"; +const STRING_ERROR_FIND_WHERE_TYPE = "find: where must be an object"; +const STRING_ERROR_LIMIT_OFFSET_TYPE = "limit: offset must be a number"; +const STRING_ERROR_LIMIT_MAX_TYPE = "limit: max must be a number"; +const STRING_ERROR_SEARCH_VALUE = "search: value cannot be null or undefined"; +const STRING_ERROR_SET_KEY_TYPE = "set: key must be a string or number"; +const STRING_ERROR_SET_DATA_TYPE = "set: data must be an object"; +const STRING_ERROR_SORT_FN_TYPE = "sort: fn must be a function"; +const STRING_ERROR_WHERE_OP_TYPE = "where: op must be a string"; +const STRING_ERROR_WHERE_PREDICATE_TYPE = "where: predicate must be an object"; + +// String constants - Property names +const PROP_DELIMITER = "delimiter"; +const PROP_ID = "id"; +const PROP_IMMUTABLE = "immutable"; +const PROP_INDEX = "index"; +const PROP_KEY = "key"; +const PROP_VERSIONING = "versioning"; +const PROP_VERSIONS = "versions"; +const PROP_WARN_ON_FULL_SCAN = "warnOnFullScan"; /** - * Haro is a modern immutable DataStore for collections of records with indexing, - * versioning, and batch operations support. It provides a Map-like interface - * with advanced querying capabilities through indexes. + * Haro is an immutable DataStore with indexing, versioning, and batch operations. + * Provides a Map-like interface with advanced querying capabilities. * @class * @example - * const store = new Haro({ - * index: ['name', 'age'], - * key: 'id', - * versioning: true - * }); - * - * store.set(null, {name: 'John', age: 30}); + * const store = new Haro({ index: ['name'], key: 'id', versioning: true }); + * store.set(null, {name: 'John'}); * const results = store.find({name: 'John'}); */ class Haro { + #cache; + #cacheEnabled; + #data; + #delimiter; + #id; + #immutable; + #index; + #indexes; + #key; + #versions; + #versioning; + #warnOnFullScan; + #inBatch = false; + /** - * Creates a new Haro instance with specified configuration - * @param {Object} [config={}] - Configuration object for the store - * @param {string} [config.delimiter=STRING_PIPE] - Delimiter for composite indexes (default: '|') - * @param {string} [config.id] - Unique identifier for this instance (auto-generated if not provided) - * @param {boolean} [config.immutable=false] - Return frozen/immutable objects for data safety - * @param {string[]} [config.index=[]] - Array of field names to create indexes for - * @param {string} [config.key=STRING_ID] - Primary key field name used for record identification - * @param {boolean} [config.versioning=false] - Enable versioning to track record changes + * Creates a new Haro instance. + * @param {Object} [config={}] - Configuration object + * @param {string} [config.delimiter=STRING_PIPE] - Delimiter for composite indexes + * @param {string} [config.id] - Unique instance identifier (auto-generated) + * @param {boolean} [config.immutable=false] - Return frozen objects + * @param {string[]} [config.index=[]] - Fields to index + * @param {string} [config.key=STRING_ID] - Primary key field name + * @param {boolean} [config.versioning=false] - Enable versioning + * @param {boolean} [config.warnOnFullScan=true] - Warn on full table scans * @constructor * @example - * const store = new Haro({ - * index: ['name', 'email', 'name|department'], - * key: 'userId', - * versioning: true, - * immutable: true - * }); + * const store = new Haro({ index: ['name', 'email'], key: 'userId', versioning: true }); */ - constructor ({delimiter = STRING_PIPE, id = this.uuid(), immutable = false, index = [], key = STRING_ID, versioning = false} = {}) { - this.data = new Map(); - this.delimiter = delimiter; - this.id = id; - this.immutable = immutable; - this.index = Array.isArray(index) ? [...index] : []; - this.indexes = new Map(); - this.key = key; - this.versions = new Map(); - this.versioning = versioning; + constructor({ + cache = false, + cacheSize = CACHE_SIZE_DEFAULT, + delimiter = STRING_PIPE, + id = crypto$1.randomUUID(), + immutable = false, + index = [], + key = STRING_ID, + versioning = false, + warnOnFullScan = true, + } = {}) { + this.#data = new Map(); + this.#cacheEnabled = cache === true; + this.#cache = cache === true ? tinyLru.lru(cacheSize) : null; + this.#delimiter = delimiter; + this.#id = id; + this.#immutable = immutable; + this.#index = Array.isArray(index) ? [...index] : []; + this.#indexes = new Map(); + this.#key = key; + this.#versions = new Map(); + this.#versioning = versioning; + this.#warnOnFullScan = warnOnFullScan; + this.#inBatch = false; Object.defineProperty(this, STRING_REGISTRY, { enumerable: true, - get: () => Array.from(this.data.keys()) + get: () => Array.from(this.#data.keys()), }); Object.defineProperty(this, STRING_SIZE, { enumerable: true, - get: () => this.data.size + get: () => this.#data.size, }); - - return this.reindex(); + Object.defineProperty(this, PROP_KEY, { + enumerable: true, + get: () => this.#key, + }); + Object.defineProperty(this, PROP_INDEX, { + enumerable: true, + get: () => [...this.#index], + }); + Object.defineProperty(this, PROP_DELIMITER, { + enumerable: true, + get: () => this.#delimiter, + }); + Object.defineProperty(this, PROP_IMMUTABLE, { + enumerable: true, + get: () => this.#immutable, + }); + Object.defineProperty(this, PROP_VERSIONING, { + enumerable: true, + get: () => this.#versioning, + }); + Object.defineProperty(this, PROP_WARN_ON_FULL_SCAN, { + enumerable: true, + get: () => this.#warnOnFullScan, + }); + Object.defineProperty(this, PROP_VERSIONS, { + enumerable: true, + get: () => this.#versions, + }); + Object.defineProperty(this, PROP_ID, { + enumerable: true, + get: () => this.#id, + }); + this.reindex(); } /** - * Performs batch operations on multiple records for efficient bulk processing - * @param {Array} args - Array of records to process - * @param {string} [type=STRING_SET] - Type of operation: 'set' for upsert, 'del' for delete - * @returns {Array} Array of results from the batch operation - * @throws {Error} Throws error if individual operations fail during batch processing + * Inserts or updates multiple records. + * @param {Array} records - Records to insert or update + * @returns {Array} Stored records * @example - * const results = store.batch([ - * {id: 1, name: 'John'}, - * {id: 2, name: 'Jane'} - * ], 'set'); + * store.setMany([{id: 1, name: 'John'}, {id: 2, name: 'Jane'}]); */ - batch (args, type = STRING_SET) { - const fn = type === STRING_DEL ? i => this.delete(i, true) : i => this.set(null, i, true, true); + setMany(records) { + if (this.#inBatch) { + throw new Error(STRING_ERROR_BATCH_SETMANY); + } + this.#inBatch = true; + const results = records.map((i) => this.set(null, i, true)); + this.#inBatch = false; + this.reindex(); + this.#invalidateCache(); + return results; + } - return this.onbatch(this.beforeBatch(args, type).map(fn), type); + /** + * Deletes multiple records. + * @param {Array} keys - Keys to delete + * @returns {Array} + * @example + * store.deleteMany(['key1', 'key2']); + */ + deleteMany(keys) { + if (this.#inBatch) { + /* node:coverage ignore next */ throw new Error(STRING_ERROR_BATCH_DELETEMANY); + } + this.#inBatch = true; + const results = keys.map((i) => this.delete(i)); + this.#inBatch = false; + this.reindex(); + this.#invalidateCache(); + return results; } /** - * Lifecycle hook executed before batch operations for custom preprocessing - * @param {Array} arg - Arguments passed to batch operation - * @param {string} [type=STRING_EMPTY] - Type of batch operation ('set' or 'del') - * @returns {Array} The arguments array (possibly modified) to be processed + * Returns true if currently in a batch operation. + * @returns {boolean} Batch operation status */ - beforeBatch (arg, type = STRING_EMPTY) { // eslint-disable-line no-unused-vars - // Hook for custom logic before batch; override in subclass if needed - return arg; + get isBatching() { + return this.#inBatch; } /** - * Lifecycle hook executed before clear operation for custom preprocessing - * @returns {void} Override this method in subclasses to implement custom logic + * Removes all records, indexes, and versions. + * @returns {Haro} This instance * @example - * class MyStore extends Haro { - * beforeClear() { - * this.backup = this.toArray(); - * } - * } + * store.clear(); */ - beforeClear () { - // Hook for custom logic before clear; override in subclass if needed + clear() { + this.#data.clear(); + this.#indexes.clear(); + this.#versions.clear(); + this.#invalidateCache(); + + return this; } /** - * Lifecycle hook executed before delete operation for custom preprocessing - * @param {string} [key=STRING_EMPTY] - Key of record to delete - * @param {boolean} [batch=false] - Whether this is part of a batch operation - * @returns {void} Override this method in subclasses to implement custom logic + * Creates a deep clone of a value. + * @param {*} arg - Value to clone + * @returns {*} Deep clone */ - beforeDelete (key = STRING_EMPTY, batch = false) { // eslint-disable-line no-unused-vars - // Hook for custom logic before delete; override in subclass if needed + #clone(arg) { + if (typeof structuredClone === STRING_FUNCTION) { + return structuredClone(arg); + } + + /* node:coverage ignore */ return JSON.parse(JSON.stringify(arg)); } /** - * Lifecycle hook executed before set operation for custom preprocessing - * @param {string} [key=STRING_EMPTY] - Key of record to set - * @param {Object} [data={}] - Record data being set - * @param {boolean} [batch=false] - Whether this is part of a batch operation - * @param {boolean} [override=false] - Whether to override existing data - * @returns {void} Override this method in subclasses to implement custom logic + * Deletes a record and removes it from all indexes. + * @param {string} [key=STRING_EMPTY] - Key to delete + * @throws {Error} If key not found + * @example + * store.delete('user123'); */ - beforeSet (key = STRING_EMPTY, data = {}, batch = false, override = false) { // eslint-disable-line no-unused-vars - // Hook for custom logic before set; override in subclass if needed + delete(key = STRING_EMPTY) { + if (typeof key !== STRING_STRING && typeof key !== STRING_NUMBER) { + throw new Error(STRING_ERROR_DELETE_KEY_TYPE); + } + if (!this.#data.has(key)) { + throw new Error(STRING_RECORD_NOT_FOUND); + } + const og = this.#data.get(key); + if (!this.#inBatch) { + this.#deleteIndex(key, og); + } + this.#data.delete(key); + if (this.#versioning && !this.#inBatch) { + this.#versions.delete(key); + } + this.#invalidateCache(); } /** - * Removes all records, indexes, and versions from the store - * @returns {Haro} This instance for method chaining - * @example - * store.clear(); - * console.log(store.size); // 0 + * Generates a cache key using SHA-256 hash. + * @param {string} domain - Cache key prefix (e.g., 'search', 'where') + * @param {...*} args - Arguments to hash + * @returns {string} Cache key in format 'domain_HASH' */ - clear () { - this.beforeClear(); - this.data.clear(); - this.indexes.clear(); - this.versions.clear(); - this.reindex().onclear(); + async #getCacheKey(domain, ...args) { + const data = JSON.stringify(args); + const encoder = new TextEncoder(); + const hashBuffer = await crypto.subtle.digest(STRING_HASH_ALGORITHM, encoder.encode(data)); + const hashArray = Array.from(new Uint8Array(hashBuffer)); + const hashHex = hashArray.map((b) => b.toString(16).padStart(INT_2, STRING_HEX_PAD)).join(""); + return `${domain}${STRING_UNDERSCORE}${hashHex}`; + } + /** + * Clears the cache. + * @returns {Haro} This instance + */ + clearCache() { + if (this.#cacheEnabled) { + this.#cache.clear(); + } return this; } /** - * Creates a deep clone of the given value, handling objects, arrays, and primitives - * @param {*} arg - Value to clone (any type) - * @returns {*} Deep clone of the argument - * @example - * const original = {name: 'John', tags: ['user', 'admin']}; - * const cloned = store.clone(original); - * cloned.tags.push('new'); // original.tags is unchanged + * Returns the current cache size. + * @returns {number} Number of entries in cache + */ + getCacheSize() { + return this.#cacheEnabled ? this.#cache.size : 0; + } + + /** + * Returns cache statistics. + * @returns {Object|null} Stats object with hits, misses, sets, deletes, evictions */ - clone (arg) { - return structuredClone(arg); + getCacheStats() { + return this.#cacheEnabled ? this.#cache.stats() : null; } /** - * Deletes a record from the store and removes it from all indexes - * @param {string} [key=STRING_EMPTY] - Key of record to delete - * @param {boolean} [batch=false] - Whether this is part of a batch operation + * Invalidates the cache if enabled and not in batch mode. * @returns {void} - * @throws {Error} Throws error if record with the specified key is not found - * @example - * store.delete('user123'); - * // Throws error if 'user123' doesn't exist */ - delete (key = STRING_EMPTY, batch = false) { - if (!this.data.has(key)) { - throw new Error(STRING_RECORD_NOT_FOUND); + #invalidateCache() { + if (this.#cacheEnabled && !this.#inBatch) { + this.#cache.clear(); + } + } + + /** + * Retrieves a value from a nested object using dot notation. + * @param {Object} obj - Object to traverse + * @param {string} path - Dot-notation path (e.g., 'user.address.city') + * @returns {*} Value at path, or undefined if path doesn't exist + */ + #getNestedValue(obj, path) { + /* node:coverage ignore next 3 */ + if (obj === null || obj === undefined || path === STRING_EMPTY) { + return undefined; } - const og = this.get(key, true); - this.beforeDelete(key, batch); - this.deleteIndex(key, og); - this.data.delete(key); - this.ondelete(key, batch); - if (this.versioning) { - this.versions.delete(key); + const keys = path.split(STRING_DOT); + let result = obj; + const keysLen = keys.length; + for (let i = 0; i < keysLen; i++) { + const key = keys[i]; + if (result === null || result === undefined || !(key in result)) { + return undefined; + } + result = result[key]; } + return result; } /** - * Internal method to remove entries from indexes for a deleted record - * @param {string} key - Key of record being deleted - * @param {Object} data - Data of record being deleted - * @returns {Haro} This instance for method chaining + * Removes a record from all indexes. + * @param {string} key - Record key + * @param {Object} data - Record data + * @returns {Haro} This instance */ - deleteIndex (key, data) { - this.index.forEach(i => { - const idx = this.indexes.get(i); + #deleteIndex(key, data) { + this.#index.forEach((i) => { + const idx = this.#indexes.get(i); if (!idx) return; - const values = i.includes(this.delimiter) ? - this.indexKeys(i, this.delimiter, data) : - Array.isArray(data[i]) ? data[i] : [data[i]]; - this.each(values, value => { + const values = i.includes(this.#delimiter) + ? this.#getIndexKeys(i, this.#delimiter, data) + : Array.isArray(this.#getNestedValue(data, i)) + ? this.#getNestedValue(data, i) + : [this.#getNestedValue(data, i)]; + const len = values.length; + for (let j = 0; j < len; j++) { + const value = values[j]; if (idx.has(value)) { const o = idx.get(value); o.delete(key); @@ -234,27 +382,26 @@ class Haro { idx.delete(value); } } - }); + } }); return this; } /** - * Exports complete store data or indexes for persistence or debugging - * @param {string} [type=STRING_RECORDS] - Type of data to export: 'records' or 'indexes' - * @returns {Array} Array of [key, value] pairs for records, or serialized index structure + * Exports store data or indexes. + * @param {string} [type=STRING_RECORDS] - Export type: 'records' or 'indexes' + * @returns {Array} Exported data * @example * const records = store.dump('records'); - * const indexes = store.dump('indexes'); */ - dump (type = STRING_RECORDS) { + dump(type = STRING_RECORDS) { let result; if (type === STRING_RECORDS) { result = Array.from(this.entries()); } else { - result = Array.from(this.indexes).map(i => { - i[1] = Array.from(i[1]).map(ii => { + result = Array.from(this.#indexes).map((i) => { + i[1] = Array.from(i[1]).map((ii) => { ii[1] = Array.from(ii[1]); return ii; @@ -268,110 +415,159 @@ class Haro { } /** - * Utility method to iterate over an array with a callback function - * @param {Array<*>} [arr=[]] - Array to iterate over - * @param {Function} fn - Function to call for each element (element, index) - * @returns {Array<*>} The original array for method chaining - * @example - * store.each([1, 2, 3], (item, index) => console.log(item, index)); + * Generates index keys for composite indexes from data object. + * @param {string} arg - Composite index field names + * @param {string} delimiter - Field delimiter + * @param {Object} data - Data object + * @returns {string[]} Index keys */ - each (arr = [], fn) { - const len = arr.length; - for (let i = 0; i < len; i++) { - fn(arr[i], i); + #getIndexKeys(arg, delimiter, data) { + const fields = arg.split(this.#delimiter).sort(this.#sortKeys); + const result = [STRING_EMPTY]; + const fieldsLen = fields.length; + for (let i = 0; i < fieldsLen; i++) { + const field = fields[i]; + const fieldValue = this.#getNestedValue(data, field); + const values = Array.isArray(fieldValue) ? fieldValue : [fieldValue]; + const newResult = []; + const resultLen = result.length; + const valuesLen = values.length; + for (let j = 0; j < resultLen; j++) { + const existing = result[j]; + for (let k = 0; k < valuesLen; k++) { + const value = values[k]; + const newKey = i === 0 ? value : `${existing}${this.#delimiter}${value}`; + newResult.push(newKey); + } + } + result.length = 0; + result.push(...newResult); } + return result; + } - return arr; + /** + * Generates index keys for where object (handles both dot notation and direct access). + * @param {string} arg - Composite index field names + * @param {string} delimiter - Field delimiter + * @param {Object} where - Where object + * @returns {string[]} Index keys + */ + #getIndexKeysForWhere(arg, delimiter, where) { + const fields = arg.split(this.#delimiter).sort(this.#sortKeys); + const result = [STRING_EMPTY]; + const fieldsLen = fields.length; + for (let i = 0; i < fieldsLen; i++) { + const field = fields[i]; + // Check if field exists directly in where object first (for dot notation keys) + let fieldValue; + if (field in where) { + fieldValue = where[field]; + /* node:coverage ignore next 4 */ + } else { + fieldValue = this.#getNestedValue(where, field); + } + const values = Array.isArray(fieldValue) ? fieldValue : [fieldValue]; + const newResult = []; + const resultLen = result.length; + const valuesLen = values.length; + for (let j = 0; j < resultLen; j++) { + const existing = result[j]; + for (let k = 0; k < valuesLen; k++) { + const value = values[k]; + const newKey = i === 0 ? value : `${existing}${this.#delimiter}${value}`; + newResult.push(newKey); + } + } + result.length = 0; + result.push(...newResult); + } + return result; } /** - * Returns an iterator of [key, value] pairs for each record in the store - * @returns {Iterator>} Iterator of [key, value] pairs + * Returns an iterator of [key, value] pairs. + * @returns {Iterator>} Key-value pairs * @example - * for (const [key, value] of store.entries()) { - * console.log(key, value); - * } + * for (const [key, value] of store.entries()) { } */ - entries () { - return this.data.entries(); + entries() { + return this.#data.entries(); } /** - * Finds records matching the specified criteria using indexes for optimal performance - * @param {Object} [where={}] - Object with field-value pairs to match against - * @param {boolean} [raw=false] - Whether to return raw data without processing - * @returns {Array} Array of matching records (frozen if immutable mode) + * Finds records matching criteria using indexes. + * @param {Object} [where={}] - Field-value pairs to match + * @returns {Array} Matching records * @example - * const users = store.find({department: 'engineering', active: true}); - * const admins = store.find({role: 'admin'}); + * store.find({department: 'engineering', active: true}); */ - find (where = {}, raw = false) { - const key = Object.keys(where).sort(this.sortKeys).join(this.delimiter); - const index = this.indexes.get(key) ?? new Map(); - let result = []; - if (index.size > 0) { - const keys = this.indexKeys(key, this.delimiter, where); - result = Array.from(keys.reduce((a, v) => { + find(where = {}) { + if (typeof where !== STRING_OBJECT || where === null) { + throw new Error(STRING_ERROR_FIND_WHERE_TYPE); + } + const whereKeys = Object.keys(where).sort(this.#sortKeys); + const compositeKey = whereKeys.join(this.#delimiter); + const result = new Set(); + + const index = this.#indexes.get(compositeKey); + if (index) { + const keys = this.#getIndexKeysForWhere(compositeKey, this.#delimiter, where); + const keysLen = keys.length; + for (let i = 0; i < keysLen; i++) { + const v = keys[i]; if (index.has(v)) { - index.get(v).forEach(k => a.add(k)); + const keySet = index.get(v); + for (const k of keySet) { + result.add(k); + } } - - return a; - }, new Set())).map(i => this.get(i, raw)); - } - if (!raw && this.immutable) { - result = Object.freeze(result); + } } - return result; + const records = Array.from(result, (i) => this.get(i)); + if (this.#immutable) { + return Object.freeze(records); + } + return records; } /** - * Filters records using a predicate function, similar to Array.filter - * @param {Function} fn - Predicate function to test each record (record, key, store) - * @param {boolean} [raw=false] - Whether to return raw data without processing - * @returns {Array} Array of records that pass the predicate test - * @throws {Error} Throws error if fn is not a function + * Filters records using a predicate function. + * @param {Function} fn - Predicate function (record, key, store) + * @returns {Array} Filtered records + * @throws {Error} If fn is not a function * @example - * const adults = store.filter(record => record.age >= 18); - * const recent = store.filter(record => record.created > Date.now() - 86400000); + * store.filter(record => record.age >= 18); */ - filter (fn, raw = false) { + filter(fn) { if (typeof fn !== STRING_FUNCTION) { throw new Error(STRING_INVALID_FUNCTION); } - let result = this.reduce((a, v) => { - if (fn(v)) { - a.push(v); - } - - return a; - }, []); - if (!raw) { - result = result.map(i => this.list(i)); - - if (this.immutable) { - result = Object.freeze(result); + const result = []; + this.#data.forEach((value, key) => { + if (fn(value, key, this)) { + result.push(value); } + }); + if (this.#immutable) { + return Object.freeze(result); } - return result; } /** - * Executes a function for each record in the store, similar to Array.forEach - * @param {Function} fn - Function to execute for each record (value, key) - * @param {*} [ctx] - Context object to use as 'this' when executing the function - * @returns {Haro} This instance for method chaining + * Executes a function for each record. + * @param {Function} fn - Function (value, key) + * @param {*} [ctx] - Context for fn + * @returns {Haro} This instance * @example - * store.forEach((record, key) => { - * console.log(`${key}: ${record.name}`); - * }); + * store.forEach((record, key) => console.log(key, record)); */ - forEach (fn, ctx = this) { - this.data.forEach((value, key) => { - if (this.immutable) { - value = this.clone(value); + forEach(fn, ctx = this) { + this.#data.forEach((value, key) => { + if (this.#immutable) { + value = this.#clone(value); } fn.call(ctx, value, key); }, this); @@ -380,109 +576,61 @@ class Haro { } /** - * Creates a frozen array from the given arguments for immutable data handling - * @param {...*} args - Arguments to freeze into an array - * @returns {Array<*>} Frozen array containing frozen arguments - * @example - * const frozen = store.freeze(obj1, obj2, obj3); - * // Returns Object.freeze([Object.freeze(obj1), Object.freeze(obj2), Object.freeze(obj3)]) - */ - freeze (...args) { - return Object.freeze(args.map(i => Object.freeze(i))); - } - - /** - * Retrieves a record by its key - * @param {string} key - Key of record to retrieve - * @param {boolean} [raw=false] - Whether to return raw data (true) or processed/frozen data (false) - * @returns {Object|null} The record if found, null if not found + * Retrieves a record by key. + * @param {string} key - Record key + * @returns {Object|null} Record or null * @example - * const user = store.get('user123'); - * const rawUser = store.get('user123', true); + * store.get('user123'); */ - get (key, raw = false) { - let result = this.data.get(key) ?? null; - if (result !== null && !raw) { - result = this.list(result); - if (this.immutable) { - result = Object.freeze(result); - } + get(key) { + const result = this.#data.get(key); + if (result === undefined) { + return null; + } + if (this.#immutable) { + return Object.freeze(result); } - return result; } /** - * Checks if a record with the specified key exists in the store - * @param {string} key - Key to check for existence - * @returns {boolean} True if record exists, false otherwise - * @example - * if (store.has('user123')) { - * console.log('User exists'); - * } - */ - has (key) { - return this.data.has(key); - } - - /** - * Generates index keys for composite indexes from data values - * @param {string} [arg=STRING_EMPTY] - Composite index field names joined by delimiter - * @param {string} [delimiter=STRING_PIPE] - Delimiter used in composite index - * @param {Object} [data={}] - Data object to extract field values from - * @returns {string[]} Array of generated index keys + * Checks if a record exists. + * @param {string} key - Record key + * @returns {boolean} True if exists * @example - * // For index 'name|department' with data {name: 'John', department: 'IT'} - * const keys = store.indexKeys('name|department', '|', data); - * // Returns ['John|IT'] + * store.has('user123'); */ - indexKeys (arg = STRING_EMPTY, delimiter = STRING_PIPE, data = {}) { - const fields = arg.split(delimiter).sort(this.sortKeys); - const fieldsLen = fields.length; - let result = [""]; - for (let i = 0; i < fieldsLen; i++) { - const field = fields[i]; - const values = Array.isArray(data[field]) ? data[field] : [data[field]]; - const newResult = []; - const resultLen = result.length; - const valuesLen = values.length; - for (let j = 0; j < resultLen; j++) { - for (let k = 0; k < valuesLen; k++) { - const newKey = i === 0 ? values[k] : `${result[j]}${delimiter}${values[k]}`; - newResult.push(newKey); - } - } - result = newResult; - } - - return result; + has(key) { + return this.#data.has(key); } /** - * Returns an iterator of all keys in the store - * @returns {Iterator} Iterator of record keys + * Returns an iterator of all keys. + * @returns {Iterator} Keys * @example - * for (const key of store.keys()) { - * console.log(key); - * } + * for (const key of store.keys()) { } */ - keys () { - return this.data.keys(); + keys() { + return this.#data.keys(); } /** - * Returns a limited subset of records with offset support for pagination - * @param {number} [offset=INT_0] - Number of records to skip from the beginning - * @param {number} [max=INT_0] - Maximum number of records to return - * @param {boolean} [raw=false] - Whether to return raw data without processing - * @returns {Array} Array of records within the specified range + * Returns a limited subset of records. + * @param {number} [offset=INT_0] - Records to skip + * @param {number} [max=INT_0] - Max records to return + * @returns {Array} Records * @example - * const page1 = store.limit(0, 10); // First 10 records - * const page2 = store.limit(10, 10); // Next 10 records + * store.limit(0, 10); */ - limit (offset = INT_0, max = INT_0, raw = false) { - let result = this.registry.slice(offset, offset + max).map(i => this.get(i, raw)); - if (!raw && this.immutable) { + limit(offset = INT_0, max = INT_0) { + if (typeof offset !== STRING_NUMBER) { + throw new Error(STRING_ERROR_LIMIT_OFFSET_TYPE); + } + if (typeof max !== STRING_NUMBER) { + throw new Error(STRING_ERROR_LIMIT_MAX_TYPE); + } + let result = this.registry.slice(offset, offset + max).map((i) => this.get(i)); + if (this.#immutable) { result = Object.freeze(result); } @@ -490,62 +638,51 @@ class Haro { } /** - * Converts a record into a [key, value] pair array format - * @param {Object} arg - Record object to convert to list format - * @returns {Array<*>} Array containing [key, record] where key is extracted from record's key field + * Transforms records using a mapping function. + * @param {Function} fn - Transform function (record, key) + * @returns {Array<*>} Transformed results + * @throws {Error} If fn is not a function * @example - * const record = {id: 'user123', name: 'John', age: 30}; - * const pair = store.list(record); // ['user123', {id: 'user123', name: 'John', age: 30}] + * store.map(record => record.name); */ - list (arg) { - const result = [arg[this.key], arg]; - - return this.immutable ? this.freeze(...result) : result; - } - - /** - * Transforms all records using a mapping function, similar to Array.map - * @param {Function} fn - Function to transform each record (record, key) - * @param {boolean} [raw=false] - Whether to return raw data without processing - * @returns {Array<*>} Array of transformed results - * @throws {Error} Throws error if fn is not a function - * @example - * const names = store.map(record => record.name); - * const summaries = store.map(record => ({id: record.id, name: record.name})); - */ - map (fn, raw = false) { + map(fn) { if (typeof fn !== STRING_FUNCTION) { throw new Error(STRING_INVALID_FUNCTION); } let result = []; this.forEach((value, key) => result.push(fn(value, key))); - if (!raw) { - result = result.map(i => this.list(i)); - if (this.immutable) { - result = Object.freeze(result); - } + if (this.#immutable) { + result = Object.freeze(result); } return result; } /** - * Merges two values together with support for arrays and objects - * @param {*} a - First value (target) - * @param {*} b - Second value (source) - * @param {boolean} [override=false] - Whether to override arrays instead of concatenating + * Merges two values. + * @param {*} a - Target value + * @param {*} b - Source value + * @param {boolean} [override=false] - Override arrays * @returns {*} Merged result - * @example - * const merged = store.merge({a: 1}, {b: 2}); // {a: 1, b: 2} - * const arrays = store.merge([1, 2], [3, 4]); // [1, 2, 3, 4] */ - merge (a, b, override = false) { + #merge(a, b, override = false) { if (Array.isArray(a) && Array.isArray(b)) { a = override ? b : a.concat(b); - } else if (typeof a === STRING_OBJECT && a !== null && typeof b === STRING_OBJECT && b !== null) { - this.each(Object.keys(b), i => { - a[i] = this.merge(a[i], b[i], override); - }); + } else if ( + typeof a === STRING_OBJECT && + a !== null && + typeof b === STRING_OBJECT && + b !== null + ) { + const keys = Object.keys(b); + const keysLen = keys.length; + for (let i = 0; i < keysLen; i++) { + const key = keys[i]; + if (key === STRING_PROTO || key === STRING_CONSTRUCTOR || key === STRING_PROTOTYPE) { + continue; + } + a[key] = this.#merge(a[key], b[key], override); + } } else { a = b; } @@ -554,271 +691,235 @@ class Haro { } /** - * Lifecycle hook executed after batch operations for custom postprocessing - * @param {Array} arg - Result of batch operation - * @param {string} [type=STRING_EMPTY] - Type of batch operation that was performed - * @returns {Array} Modified result (override this method to implement custom logic) - */ - onbatch (arg, type = STRING_EMPTY) { // eslint-disable-line no-unused-vars - return arg; - } - - /** - * Lifecycle hook executed after clear operation for custom postprocessing - * @returns {void} Override this method in subclasses to implement custom logic + * Replaces store data or indexes. + * @param {Array} data - Data to replace + * @param {string} [type=STRING_RECORDS] - Type: 'records' or 'indexes' + * @returns {boolean} Success + * @throws {Error} If type is invalid * @example - * class MyStore extends Haro { - * onclear() { - * console.log('Store cleared'); - * } - * } + * store.override([['key1', {name: 'John'}]], 'records'); */ - onclear () { - // Hook for custom logic after clear; override in subclass if needed - } - - /** - * Lifecycle hook executed after delete operation for custom postprocessing - * @param {string} [key=STRING_EMPTY] - Key of deleted record - * @param {boolean} [batch=false] - Whether this was part of a batch operation - * @returns {void} Override this method in subclasses to implement custom logic - */ - ondelete (key = STRING_EMPTY, batch = false) { // eslint-disable-line no-unused-vars - // Hook for custom logic after delete; override in subclass if needed - } - - /** - * Lifecycle hook executed after override operation for custom postprocessing - * @param {string} [type=STRING_EMPTY] - Type of override operation that was performed - * @returns {void} Override this method in subclasses to implement custom logic - */ - onoverride (type = STRING_EMPTY) { // eslint-disable-line no-unused-vars - // Hook for custom logic after override; override in subclass if needed - } - - /** - * Lifecycle hook executed after set operation for custom postprocessing - * @param {Object} [arg={}] - Record that was set - * @param {boolean} [batch=false] - Whether this was part of a batch operation - * @returns {void} Override this method in subclasses to implement custom logic - */ - onset (arg = {}, batch = false) { // eslint-disable-line no-unused-vars - // Hook for custom logic after set; override in subclass if needed - } - - /** - * Replaces all store data or indexes with new data for bulk operations - * @param {Array} data - Data to replace with (format depends on type) - * @param {string} [type=STRING_RECORDS] - Type of data: 'records' or 'indexes' - * @returns {boolean} True if operation succeeded - * @throws {Error} Throws error if type is invalid - * @example - * const records = [['key1', {name: 'John'}], ['key2', {name: 'Jane'}]]; - * store.override(records, 'records'); - */ - override (data, type = STRING_RECORDS) { + override(data, type = STRING_RECORDS) { const result = true; if (type === STRING_INDEXES) { - this.indexes = new Map(data.map(i => [i[0], new Map(i[1].map(ii => [ii[0], new Set(ii[1])]))])); + this.#indexes = new Map( + data.map((i) => [i[0], new Map(i[1].map((ii) => [ii[0], new Set(ii[1])]))]), + ); } else if (type === STRING_RECORDS) { - this.indexes.clear(); - this.data = new Map(data); + this.#indexes.clear(); + this.#data = new Map(data); } else { throw new Error(STRING_INVALID_TYPE); } - this.onoverride(type); + this.#invalidateCache(); return result; } /** - * Reduces all records to a single value using a reducer function - * @param {Function} fn - Reducer function (accumulator, value, key, store) - * @param {*} [accumulator] - Initial accumulator value - * @returns {*} Final reduced value + * Rebuilds indexes. + * @param {string|string[]} [index] - Field(s) to rebuild, or all + * @returns {Haro} This instance * @example - * const totalAge = store.reduce((sum, record) => sum + record.age, 0); - * const names = store.reduce((acc, record) => acc.concat(record.name), []); + * store.reindex(); + * store.reindex('name'); */ - reduce (fn, accumulator = []) { - let a = accumulator; - this.forEach((v, k) => { - a = fn(a, v, k, this); - }, this); + reindex(index) { + const indices = index ? (Array.isArray(index) ? index : [index]) : this.#index; + if (index && this.#index.includes(index) === false) { + this.#index.push(index); + } + const indicesLen = indices.length; + for (let i = 0; i < indicesLen; i++) { + this.#indexes.set(indices[i], new Map()); + } + this.forEach((data, key) => { + for (let i = 0; i < indicesLen; i++) { + this.#setIndex(key, data, indices[i]); + } + }); + this.#invalidateCache(); - return a; + return this; } /** - * Rebuilds indexes for specified fields or all fields for data consistency - * @param {string|string[]} [index] - Specific index field(s) to rebuild, or all if not specified - * @returns {Haro} This instance for method chaining + * Searches for records containing a value. + * @param {*} value - Search value (string, function, or RegExp) + * @param {string|string[]} [index] - Index(es) to search, or all + * @returns {Promise>} Matching records * @example - * store.reindex(); // Rebuild all indexes - * store.reindex('name'); // Rebuild only name index - * store.reindex(['name', 'email']); // Rebuild name and email indexes + * store.search('john'); + * store.search(/^admin/, 'role'); */ - reindex (index) { - const indices = index ? [index] : this.index; - if (index && this.index.includes(index) === false) { - this.index.push(index); + async search(value, index) { + if (value === null || value === undefined) { + throw new Error(STRING_ERROR_SEARCH_VALUE); } - this.each(indices, i => this.indexes.set(i, new Map())); - this.forEach((data, key) => this.each(indices, i => this.setIndex(key, data, i))); - return this; - } + let cacheKey; + if (this.#cacheEnabled) { + cacheKey = await this.#getCacheKey(STRING_CACHE_DOMAIN_SEARCH, value, index); + const cached = this.#cache.get(cacheKey); + if (cached !== undefined) { + return this.#immutable ? Object.freeze(cached) : this.#clone(cached); + } + } - /** - * Searches for records containing a value across specified indexes - * @param {*} value - Value to search for (string, function, or RegExp) - * @param {string|string[]} [index] - Index(es) to search in, or all if not specified - * @param {boolean} [raw=false] - Whether to return raw data without processing - * @returns {Array} Array of matching records - * @example - * const results = store.search('john'); // Search all indexes - * const nameResults = store.search('john', 'name'); // Search only name index - * const regexResults = store.search(/^admin/, 'role'); // Regex search - */ - search (value, index, raw = false) { - const result = new Set(); // Use Set for unique keys + const result = new Set(); const fn = typeof value === STRING_FUNCTION; const rgex = value && typeof value.test === STRING_FUNCTION; - if (!value) return this.immutable ? this.freeze() : []; - const indices = index ? Array.isArray(index) ? index : [index] : this.index; - for (const i of indices) { - const idx = this.indexes.get(i); - if (idx) { - for (const [lkey, lset] of idx) { - let match = false; - - if (fn) { - match = value(lkey, i); - } else if (rgex) { - match = value.test(Array.isArray(lkey) ? lkey.join(STRING_COMMA) : lkey); - } else { - match = lkey === value; - } + const indices = index ? (Array.isArray(index) ? index : [index]) : this.#index; + const indicesLen = indices.length; - if (match) { - for (const key of lset) { - if (this.data.has(key)) { - result.add(key); - } + for (let i = 0; i < indicesLen; i++) { + const idxName = indices[i]; + const idx = this.#indexes.get(idxName); + if (!idx) continue; + + for (const [lkey, lset] of idx) { + let match = false; + + if (fn) { + match = value(lkey, idxName); + } else if (rgex) { + match = value.test(Array.isArray(lkey) ? lkey.join(STRING_COMMA) : lkey); + } else { + match = lkey === value; + } + + if (match) { + for (const key of lset) { + if (this.#data.has(key)) { + result.add(key); } } } } } - let records = Array.from(result).map(key => this.get(key, raw)); - if (!raw && this.immutable) { - records = Object.freeze(records); + const records = Array.from(result, (key) => this.get(key)); + + if (this.#cacheEnabled) { + this.#cache.set(cacheKey, records); } + if (this.#immutable) { + return Object.freeze(records); + } return records; } /** - * Sets or updates a record in the store with automatic indexing - * @param {string|null} [key=null] - Key for the record, or null to use record's key field - * @param {Object} [data={}] - Record data to set - * @param {boolean} [batch=false] - Whether this is part of a batch operation - * @param {boolean} [override=false] - Whether to override existing data instead of merging - * @returns {Object} The stored record (frozen if immutable mode) + * Sets or updates a record with automatic indexing. + * @param {string|null} [key=null] - Record key, or null for auto-generate + * @param {Object} [data={}] - Record data + * @param {boolean} [override=false] - Override instead of merge + * @returns {Object} Stored record * @example - * const user = store.set(null, {name: 'John', age: 30}); // Auto-generate key - * const updated = store.set('user123', {age: 31}); // Update existing record + * store.set(null, {name: 'John'}); + * store.set('user123', {age: 31}); */ - set (key = null, data = {}, batch = false, override = false) { + set(key = null, data = {}, override = false) { + if (key !== null && typeof key !== STRING_STRING && typeof key !== STRING_NUMBER) { + throw new Error(STRING_ERROR_SET_KEY_TYPE); + } + if (typeof data !== STRING_OBJECT || data === null) { + throw new Error(STRING_ERROR_SET_DATA_TYPE); + } if (key === null) { - key = data[this.key] ?? this.uuid(); + key = data[this.#key] ?? crypto$1.randomUUID(); } - let x = {...data, [this.key]: key}; - this.beforeSet(key, x, batch, override); - if (!this.data.has(key)) { - if (this.versioning) { - this.versions.set(key, new Set()); + let x = { ...data, [this.#key]: key }; + if (!this.#data.has(key)) { + if (this.#versioning && !this.#inBatch) { + this.#versions.set(key, new Set()); } } else { - const og = this.get(key, true); - this.deleteIndex(key, og); - if (this.versioning) { - this.versions.get(key).add(Object.freeze(this.clone(og))); + const og = this.#data.get(key); + if (!this.#inBatch) { + this.#deleteIndex(key, og); + if (this.#versioning) { + this.#versions.get(key).add(Object.freeze(this.#clone(og))); + } } - if (!override) { - x = this.merge(this.clone(og), x); + if (!this.#inBatch && !override) { + x = this.#merge(this.#clone(og), x); } } - this.data.set(key, x); - this.setIndex(key, x, null); + this.#data.set(key, x); + + if (!this.#inBatch) { + this.#setIndex(key, x, null); + } + const result = this.get(key); - this.onset(result, batch); + this.#invalidateCache(); return result; } /** - * Internal method to add entries to indexes for a record - * @param {string} key - Key of record being indexed - * @param {Object} data - Data of record being indexed - * @param {string|null} indice - Specific index to update, or null for all - * @returns {Haro} This instance for method chaining + * Adds a record to indexes. + * @param {string} key - Record key + * @param {Object} data - Record data + * @param {string|null} indice - Index to update, or null for all + * @returns {Haro} This instance */ - setIndex (key, data, indice) { - this.each(indice === null ? this.index : [indice], i => { - let idx = this.indexes.get(i); + #setIndex(key, data, indice) { + const indices = indice === null ? this.#index : [indice]; + const indicesLen = indices.length; + for (let i = 0; i < indicesLen; i++) { + const field = indices[i]; + let idx = this.#indexes.get(field); if (!idx) { idx = new Map(); - this.indexes.set(i, idx); + this.#indexes.set(field, idx); } - const fn = c => { - if (!idx.has(c)) { - idx.set(c, new Set()); + const values = field.includes(this.#delimiter) + ? this.#getIndexKeys(field, this.#delimiter, data) + : Array.isArray(this.#getNestedValue(data, field)) + ? this.#getNestedValue(data, field) + : [this.#getNestedValue(data, field)]; + const valuesLen = values.length; + for (let j = 0; j < valuesLen; j++) { + const value = values[j]; + if (!idx.has(value)) { + idx.set(value, new Set()); } - idx.get(c).add(key); - }; - if (i.includes(this.delimiter)) { - this.each(this.indexKeys(i, this.delimiter, data), fn); - } else { - this.each(Array.isArray(data[i]) ? data[i] : [data[i]], fn); + idx.get(value).add(key); } - }); - + } return this; } /** - * Sorts all records using a comparator function - * @param {Function} fn - Comparator function for sorting (a, b) => number - * @param {boolean} [frozen=false] - Whether to return frozen records - * @returns {Array} Sorted array of records + * Sorts records using a comparator function. + * @param {Function} fn - Comparator (a, b) => number + * @param {boolean} [frozen=false] - Return frozen records + * @returns {Array} Sorted records * @example - * const sorted = store.sort((a, b) => a.age - b.age); // Sort by age - * const names = store.sort((a, b) => a.name.localeCompare(b.name)); // Sort by name + * store.sort((a, b) => a.age - b.age); */ - sort (fn, frozen = false) { - const dataSize = this.data.size; - let result = this.limit(INT_0, dataSize, true).sort(fn); + sort(fn, frozen = false) { + if (typeof fn !== STRING_FUNCTION) { + throw new Error(STRING_ERROR_SORT_FN_TYPE); + } + const dataSize = this.#data.size; + let result = this.limit(INT_0, dataSize).sort(fn); if (frozen) { - result = this.freeze(...result); + result = Object.freeze(result); } return result; } /** - * Comparator function for sorting keys with type-aware comparison logic - * @param {*} a - First value to compare - * @param {*} b - Second value to compare - * @returns {number} Negative number if a < b, positive if a > b, zero if equal - * @example - * const keys = ['name', 'age', 'email']; - * keys.sort(store.sortKeys); // Alphabetical sort - * - * const mixed = [10, '5', 'abc', 3]; - * mixed.sort(store.sortKeys); // Type-aware sort: numbers first, then strings + * Sorts keys with type-aware comparison. + * @param {*} a - First value + * @param {*} b - Second value + * @returns {number} Comparison result */ - sortKeys (a, b) { + #sortKeys(a, b) { // Handle string comparison if (typeof a === STRING_STRING && typeof b === STRING_STRING) { return a.localeCompare(b); @@ -829,50 +930,54 @@ class Haro { } // Handle mixed types or other types by converting to string - return String(a).localeCompare(String(b)); } /** - * Sorts records by a specific indexed field in ascending order - * @param {string} [index=STRING_EMPTY] - Index field name to sort by - * @param {boolean} [raw=false] - Whether to return raw data without processing - * @returns {Array} Array of records sorted by the specified field - * @throws {Error} Throws error if index field is empty or invalid + * Sorts records by an indexed field. + * @param {string} [index=STRING_EMPTY] - Field to sort by + * @returns {Array} Sorted records + * @throws {Error} If index is empty * @example - * const byAge = store.sortBy('age'); - * const byName = store.sortBy('name'); + * store.sortBy('age'); */ - sortBy (index = STRING_EMPTY, raw = false) { + sortBy(index = STRING_EMPTY) { if (index === STRING_EMPTY) { throw new Error(STRING_INVALID_FIELD); } - let result = []; const keys = []; - if (this.indexes.has(index) === false) { + if (this.#indexes.has(index) === false) { this.reindex(index); } - const lindex = this.indexes.get(index); + const lindex = this.#indexes.get(index); lindex.forEach((idx, key) => keys.push(key)); - this.each(keys.sort(this.sortKeys), i => lindex.get(i).forEach(key => result.push(this.get(key, raw)))); - if (this.immutable) { - result = Object.freeze(result); - } + keys.sort(this.#sortKeys); + const result = keys.flatMap((i) => { + const inner = Array.from(lindex.get(i)); + const innerLen = inner.length; + const mapped = Array.from({ length: innerLen }, (_, j) => this.get(inner[j])); + return mapped; + }); + if (this.#immutable) { + return Object.freeze(result); + } return result; } /** - * Converts all store data to a plain array of records - * @returns {Array} Array containing all records in the store + * Converts store data to an array. + * @returns {Array} All records * @example - * const allRecords = store.toArray(); - * console.log(`Store contains ${allRecords.length} records`); + * store.toArray(); */ - toArray () { - const result = Array.from(this.data.values()); - if (this.immutable) { - this.each(result, i => Object.freeze(i)); + toArray() { + const result = Array.from(this.#data.values()); + if (this.#immutable) { + const resultLen = result.length; + for (let i = 0; i < resultLen; i++) { + Object.freeze(result[i]); + } Object.freeze(result); } @@ -880,88 +985,100 @@ class Haro { } /** - * Generates a RFC4122 v4 UUID for record identification - * @returns {string} UUID string in standard format - * @example - * const id = store.uuid(); // "f47ac10b-58cc-4372-a567-0e02b2c3d479" - */ - uuid () { - return crypto.randomUUID(); - } - - /** - * Returns an iterator of all values in the store - * @returns {Iterator} Iterator of record values + * Returns an iterator of all values. + * @returns {Iterator} Values * @example - * for (const record of store.values()) { - * console.log(record.name); - * } + * for (const record of store.values()) { } */ - values () { - return this.data.values(); + values() { + return this.#data.values(); } /** - * Internal helper method for predicate matching with support for arrays and regex - * @param {Object} record - Record to test against predicate - * @param {Object} predicate - Predicate object with field-value pairs - * @param {string} op - Operator for array matching ('||' for OR, '&&' for AND) - * @returns {boolean} True if record matches predicate criteria + * Matches a record against a predicate. + * @param {Object} record - Record to test + * @param {Object} predicate - Predicate object + * @param {string} op - Operator: '||' or '&&' + * @returns {boolean} True if matches */ - matchesPredicate (record, predicate, op) { + #matchesPredicate(record, predicate, op) { const keys = Object.keys(predicate); - return keys.every(key => { + return keys.every((key) => { const pred = predicate[key]; - const val = record[key]; + // Use nested value extraction for dot notation paths + const val = this.#getNestedValue(record, key); if (Array.isArray(pred)) { if (Array.isArray(val)) { - return op === STRING_DOUBLE_AND ? pred.every(p => val.includes(p)) : pred.some(p => val.includes(p)); - } else { - return op === STRING_DOUBLE_AND ? pred.every(p => val === p) : pred.some(p => val === p); - } - } else if (pred instanceof RegExp) { - if (Array.isArray(val)) { - return op === STRING_DOUBLE_AND ? val.every(v => pred.test(v)) : val.some(v => pred.test(v)); - } else { - return pred.test(val); + return op === STRING_DOUBLE_AND + ? pred.every((p) => val.includes(p)) + : pred.some((p) => val.includes(p)); } - } else if (Array.isArray(val)) { - return val.includes(pred); - } else { - return val === pred; + return op === STRING_DOUBLE_AND + ? pred.every((p) => val === p) + : pred.some((p) => val === p); + } + if (Array.isArray(val)) { + return val.some((v) => { + if (pred instanceof RegExp) { + return pred.test(v); + } + if (v instanceof RegExp) { + return v.test(pred); + } + return v === pred; + }); } + if (pred instanceof RegExp) { + return pred.test(val); + } + return val === pred; }); } /** - * Advanced filtering with predicate logic supporting AND/OR operations on arrays - * @param {Object} [predicate={}] - Object with field-value pairs for filtering - * @param {string} [op=STRING_DOUBLE_PIPE] - Operator for array matching ('||' for OR, '&&' for AND) - * @returns {Array} Array of records matching the predicate criteria + * Filters records with predicate logic supporting AND/OR on arrays. + * @param {Object} [predicate={}] - Field-value pairs + * @param {string} [op=STRING_DOUBLE_PIPE] - Operator: '||' (OR) or '&&' (AND) + * @returns {Promise>} Matching records * @example - * // Find records with tags containing 'admin' OR 'user' - * const users = store.where({tags: ['admin', 'user']}, '||'); - * - * // Find records with ALL specified tags - * const powerUsers = store.where({tags: ['admin', 'power']}, '&&'); - * - * // Regex matching - * const emails = store.where({email: /^admin@/}); + * store.where({tags: ['admin', 'user']}, '||'); + * store.where({email: /^admin@/}); */ - where (predicate = {}, op = STRING_DOUBLE_PIPE) { - const keys = this.index.filter(i => i in predicate); - if (keys.length === 0) return []; + async where(predicate = {}, op = STRING_DOUBLE_PIPE) { + if (typeof predicate !== STRING_OBJECT || predicate === null) { + throw new Error(STRING_ERROR_WHERE_PREDICATE_TYPE); + } + if (typeof op !== STRING_STRING) { + throw new Error(STRING_ERROR_WHERE_OP_TYPE); + } + + let cacheKey; + if (this.#cacheEnabled) { + cacheKey = await this.#getCacheKey(STRING_CACHE_DOMAIN_WHERE, predicate, op); + const cached = this.#cache.get(cacheKey); + if (cached !== undefined) { + return this.#immutable ? Object.freeze(cached) : this.#clone(cached); + } + } + + const keys = this.#index.filter((i) => i in predicate); + if (keys.length === 0) { + if (this.#warnOnFullScan) { + console.warn("where(): performing full table scan - consider adding an index"); + } + return this.filter((a) => this.#matchesPredicate(a, predicate, op)); + } // Try to use indexes for better performance - const indexedKeys = keys.filter(k => this.indexes.has(k)); + const indexedKeys = keys.filter((k) => this.#indexes.has(k)); if (indexedKeys.length > 0) { // Use index-based filtering for better performance let candidateKeys = new Set(); let first = true; for (const key of indexedKeys) { const pred = predicate[key]; - const idx = this.indexes.get(key); + const idx = this.#indexes.get(key); const matchingKeys = new Set(); if (Array.isArray(pred)) { for (const p of pred) { @@ -971,9 +1088,29 @@ class Haro { } } } - } else if (idx.has(pred)) { - for (const k of idx.get(pred)) { - matchingKeys.add(k); + } else if (pred instanceof RegExp) { + for (const [indexKey, keySet] of idx) { + if (pred.test(indexKey)) { + for (const k of keySet) { + matchingKeys.add(k); + } + } + } + } else { + // Direct value lookup - works for both flat and nested fields + // Also check for RegExp keys that match the predicate + for (const [indexKey, keySet] of idx) { + if (indexKey instanceof RegExp) { + if (indexKey.test(pred)) { + for (const k of keySet) { + matchingKeys.add(k); + } + } + } else if (indexKey === pred) { + for (const k of keySet) { + matchingKeys.add(k); + } + } } } if (first) { @@ -981,45 +1118,43 @@ class Haro { first = false; } else { // AND operation across different fields - candidateKeys = new Set([...candidateKeys].filter(k => matchingKeys.has(k))); + candidateKeys = new Set([...candidateKeys].filter((k) => matchingKeys.has(k))); } } // Filter candidates with full predicate logic const results = []; for (const key of candidateKeys) { - const record = this.get(key, true); - if (this.matchesPredicate(record, predicate, op)) { - results.push(this.immutable ? this.get(key) : record); + const record = this.get(key); + if (this.#matchesPredicate(record, predicate, op)) { + results.push(record); } } - return this.immutable ? this.freeze(...results) : results; - } + if (this.#cacheEnabled) { + this.#cache.set(cacheKey, results); + } - // Fallback to full scan if no indexes available - return this.filter(a => this.matchesPredicate(a, predicate, op)); + if (this.#immutable) { + return Object.freeze(results); + } + return results; + } } } /** - * Factory function to create a new Haro instance with optional initial data - * @param {Array|null} [data=null] - Initial data to populate the store - * @param {Object} [config={}] - Configuration object passed to Haro constructor - * @returns {Haro} New Haro instance configured and optionally populated + * Factory function to create a Haro instance. + * @param {Array|null} [data=null] - Initial data + * @param {Object} [config={}] - Configuration + * @returns {Haro} New Haro instance * @example - * const store = haro([ - * {id: 1, name: 'John', age: 30}, - * {id: 2, name: 'Jane', age: 25} - * ], { - * index: ['name', 'age'], - * versioning: true - * }); + * const store = haro([{id: 1, name: 'John'}], {index: ['name']}); */ -function haro (data = null, config = {}) { +function haro(data = null, config = {}) { const obj = new Haro(config); if (Array.isArray(data)) { - obj.batch(data, STRING_SET); + obj.setMany(data); } return obj; diff --git a/dist/haro.js b/dist/haro.js index 26aed958..08af2345 100644 --- a/dist/haro.js +++ b/dist/haro.js @@ -1,26 +1,23 @@ /** * haro * - * @copyright 2025 Jason Mulligan + * @copyright 2026 Jason Mulligan * @license BSD-3-Clause - * @version 16.0.0 + * @version 17.0.0 */ -import {randomUUID}from'crypto';// String constants - Single characters and symbols +import {randomUUID}from'crypto';import {lru}from'tiny-lru';// String constants - Single characters and symbols const STRING_COMMA = ","; +const STRING_DOT = "."; const STRING_EMPTY = ""; const STRING_PIPE = "|"; const STRING_DOUBLE_PIPE = "||"; const STRING_DOUBLE_AND = "&&"; - -// String constants - Operation and type names -const STRING_ID = "id"; -const STRING_DEL = "del"; const STRING_FUNCTION = "function"; +const STRING_ID = "id"; const STRING_INDEXES = "indexes"; const STRING_OBJECT = "object"; const STRING_RECORDS = "records"; const STRING_REGISTRY = "registry"; -const STRING_SET = "set"; const STRING_SIZE = "size"; const STRING_STRING = "string"; const STRING_NUMBER = "number"; @@ -32,195 +29,345 @@ const STRING_INVALID_TYPE = "Invalid type"; const STRING_RECORD_NOT_FOUND = "Record not found"; // Integer constants -const INT_0 = 0;/** - * Haro is a modern immutable DataStore for collections of records with indexing, - * versioning, and batch operations support. It provides a Map-like interface - * with advanced querying capabilities through indexes. +const INT_0 = 0; +const INT_2 = 2; + +// Number constants +const CACHE_SIZE_DEFAULT = 1000; + +// String constants - Cache and hashing +const STRING_CACHE_DOMAIN_SEARCH = "search"; +const STRING_CACHE_DOMAIN_WHERE = "where"; +const STRING_HASH_ALGORITHM = "SHA-256"; +const STRING_HEX_PAD = "0"; +const STRING_UNDERSCORE = "_"; + +// String constants - Security (prototype pollution protection) +const STRING_PROTO = "__proto__"; +const STRING_CONSTRUCTOR = "constructor"; +const STRING_PROTOTYPE = "prototype"; + +// String constants - Error messages +const STRING_ERROR_BATCH_SETMANY = "setMany: cannot call setMany within a batch operation"; +const STRING_ERROR_BATCH_DELETEMANY = + "deleteMany: cannot call deleteMany within a batch operation"; +const STRING_ERROR_DELETE_KEY_TYPE = "delete: key must be a string or number"; +const STRING_ERROR_FIND_WHERE_TYPE = "find: where must be an object"; +const STRING_ERROR_LIMIT_OFFSET_TYPE = "limit: offset must be a number"; +const STRING_ERROR_LIMIT_MAX_TYPE = "limit: max must be a number"; +const STRING_ERROR_SEARCH_VALUE = "search: value cannot be null or undefined"; +const STRING_ERROR_SET_KEY_TYPE = "set: key must be a string or number"; +const STRING_ERROR_SET_DATA_TYPE = "set: data must be an object"; +const STRING_ERROR_SORT_FN_TYPE = "sort: fn must be a function"; +const STRING_ERROR_WHERE_OP_TYPE = "where: op must be a string"; +const STRING_ERROR_WHERE_PREDICATE_TYPE = "where: predicate must be an object"; + +// String constants - Property names +const PROP_DELIMITER = "delimiter"; +const PROP_ID = "id"; +const PROP_IMMUTABLE = "immutable"; +const PROP_INDEX = "index"; +const PROP_KEY = "key"; +const PROP_VERSIONING = "versioning"; +const PROP_VERSIONS = "versions"; +const PROP_WARN_ON_FULL_SCAN = "warnOnFullScan";/** + * Haro is an immutable DataStore with indexing, versioning, and batch operations. + * Provides a Map-like interface with advanced querying capabilities. * @class * @example - * const store = new Haro({ - * index: ['name', 'age'], - * key: 'id', - * versioning: true - * }); - * - * store.set(null, {name: 'John', age: 30}); + * const store = new Haro({ index: ['name'], key: 'id', versioning: true }); + * store.set(null, {name: 'John'}); * const results = store.find({name: 'John'}); */ class Haro { + #cache; + #cacheEnabled; + #data; + #delimiter; + #id; + #immutable; + #index; + #indexes; + #key; + #versions; + #versioning; + #warnOnFullScan; + #inBatch = false; + /** - * Creates a new Haro instance with specified configuration - * @param {Object} [config={}] - Configuration object for the store - * @param {string} [config.delimiter=STRING_PIPE] - Delimiter for composite indexes (default: '|') - * @param {string} [config.id] - Unique identifier for this instance (auto-generated if not provided) - * @param {boolean} [config.immutable=false] - Return frozen/immutable objects for data safety - * @param {string[]} [config.index=[]] - Array of field names to create indexes for - * @param {string} [config.key=STRING_ID] - Primary key field name used for record identification - * @param {boolean} [config.versioning=false] - Enable versioning to track record changes + * Creates a new Haro instance. + * @param {Object} [config={}] - Configuration object + * @param {string} [config.delimiter=STRING_PIPE] - Delimiter for composite indexes + * @param {string} [config.id] - Unique instance identifier (auto-generated) + * @param {boolean} [config.immutable=false] - Return frozen objects + * @param {string[]} [config.index=[]] - Fields to index + * @param {string} [config.key=STRING_ID] - Primary key field name + * @param {boolean} [config.versioning=false] - Enable versioning + * @param {boolean} [config.warnOnFullScan=true] - Warn on full table scans * @constructor * @example - * const store = new Haro({ - * index: ['name', 'email', 'name|department'], - * key: 'userId', - * versioning: true, - * immutable: true - * }); - */ - constructor ({delimiter = STRING_PIPE, id = this.uuid(), immutable = false, index = [], key = STRING_ID, versioning = false} = {}) { - this.data = new Map(); - this.delimiter = delimiter; - this.id = id; - this.immutable = immutable; - this.index = Array.isArray(index) ? [...index] : []; - this.indexes = new Map(); - this.key = key; - this.versions = new Map(); - this.versioning = versioning; + * const store = new Haro({ index: ['name', 'email'], key: 'userId', versioning: true }); + */ + constructor({ + cache = false, + cacheSize = CACHE_SIZE_DEFAULT, + delimiter = STRING_PIPE, + id = randomUUID(), + immutable = false, + index = [], + key = STRING_ID, + versioning = false, + warnOnFullScan = true, + } = {}) { + this.#data = new Map(); + this.#cacheEnabled = cache === true; + this.#cache = cache === true ? lru(cacheSize) : null; + this.#delimiter = delimiter; + this.#id = id; + this.#immutable = immutable; + this.#index = Array.isArray(index) ? [...index] : []; + this.#indexes = new Map(); + this.#key = key; + this.#versions = new Map(); + this.#versioning = versioning; + this.#warnOnFullScan = warnOnFullScan; + this.#inBatch = false; Object.defineProperty(this, STRING_REGISTRY, { enumerable: true, - get: () => Array.from(this.data.keys()) + get: () => Array.from(this.#data.keys()), }); Object.defineProperty(this, STRING_SIZE, { enumerable: true, - get: () => this.data.size + get: () => this.#data.size, }); - - return this.reindex(); + Object.defineProperty(this, PROP_KEY, { + enumerable: true, + get: () => this.#key, + }); + Object.defineProperty(this, PROP_INDEX, { + enumerable: true, + get: () => [...this.#index], + }); + Object.defineProperty(this, PROP_DELIMITER, { + enumerable: true, + get: () => this.#delimiter, + }); + Object.defineProperty(this, PROP_IMMUTABLE, { + enumerable: true, + get: () => this.#immutable, + }); + Object.defineProperty(this, PROP_VERSIONING, { + enumerable: true, + get: () => this.#versioning, + }); + Object.defineProperty(this, PROP_WARN_ON_FULL_SCAN, { + enumerable: true, + get: () => this.#warnOnFullScan, + }); + Object.defineProperty(this, PROP_VERSIONS, { + enumerable: true, + get: () => this.#versions, + }); + Object.defineProperty(this, PROP_ID, { + enumerable: true, + get: () => this.#id, + }); + this.reindex(); } /** - * Performs batch operations on multiple records for efficient bulk processing - * @param {Array} args - Array of records to process - * @param {string} [type=STRING_SET] - Type of operation: 'set' for upsert, 'del' for delete - * @returns {Array} Array of results from the batch operation - * @throws {Error} Throws error if individual operations fail during batch processing + * Inserts or updates multiple records. + * @param {Array} records - Records to insert or update + * @returns {Array} Stored records * @example - * const results = store.batch([ - * {id: 1, name: 'John'}, - * {id: 2, name: 'Jane'} - * ], 'set'); + * store.setMany([{id: 1, name: 'John'}, {id: 2, name: 'Jane'}]); */ - batch (args, type = STRING_SET) { - const fn = type === STRING_DEL ? i => this.delete(i, true) : i => this.set(null, i, true, true); + setMany(records) { + if (this.#inBatch) { + throw new Error(STRING_ERROR_BATCH_SETMANY); + } + this.#inBatch = true; + const results = records.map((i) => this.set(null, i, true)); + this.#inBatch = false; + this.reindex(); + this.#invalidateCache(); + return results; + } - return this.onbatch(this.beforeBatch(args, type).map(fn), type); + /** + * Deletes multiple records. + * @param {Array} keys - Keys to delete + * @returns {Array} + * @example + * store.deleteMany(['key1', 'key2']); + */ + deleteMany(keys) { + if (this.#inBatch) { + /* node:coverage ignore next */ throw new Error(STRING_ERROR_BATCH_DELETEMANY); + } + this.#inBatch = true; + const results = keys.map((i) => this.delete(i)); + this.#inBatch = false; + this.reindex(); + this.#invalidateCache(); + return results; } /** - * Lifecycle hook executed before batch operations for custom preprocessing - * @param {Array} arg - Arguments passed to batch operation - * @param {string} [type=STRING_EMPTY] - Type of batch operation ('set' or 'del') - * @returns {Array} The arguments array (possibly modified) to be processed + * Returns true if currently in a batch operation. + * @returns {boolean} Batch operation status */ - beforeBatch (arg, type = STRING_EMPTY) { // eslint-disable-line no-unused-vars - // Hook for custom logic before batch; override in subclass if needed - return arg; + get isBatching() { + return this.#inBatch; } /** - * Lifecycle hook executed before clear operation for custom preprocessing - * @returns {void} Override this method in subclasses to implement custom logic + * Removes all records, indexes, and versions. + * @returns {Haro} This instance * @example - * class MyStore extends Haro { - * beforeClear() { - * this.backup = this.toArray(); - * } - * } + * store.clear(); */ - beforeClear () { - // Hook for custom logic before clear; override in subclass if needed + clear() { + this.#data.clear(); + this.#indexes.clear(); + this.#versions.clear(); + this.#invalidateCache(); + + return this; } /** - * Lifecycle hook executed before delete operation for custom preprocessing - * @param {string} [key=STRING_EMPTY] - Key of record to delete - * @param {boolean} [batch=false] - Whether this is part of a batch operation - * @returns {void} Override this method in subclasses to implement custom logic + * Creates a deep clone of a value. + * @param {*} arg - Value to clone + * @returns {*} Deep clone */ - beforeDelete (key = STRING_EMPTY, batch = false) { // eslint-disable-line no-unused-vars - // Hook for custom logic before delete; override in subclass if needed + #clone(arg) { + if (typeof structuredClone === STRING_FUNCTION) { + return structuredClone(arg); + } + + /* node:coverage ignore */ return JSON.parse(JSON.stringify(arg)); } /** - * Lifecycle hook executed before set operation for custom preprocessing - * @param {string} [key=STRING_EMPTY] - Key of record to set - * @param {Object} [data={}] - Record data being set - * @param {boolean} [batch=false] - Whether this is part of a batch operation - * @param {boolean} [override=false] - Whether to override existing data - * @returns {void} Override this method in subclasses to implement custom logic + * Deletes a record and removes it from all indexes. + * @param {string} [key=STRING_EMPTY] - Key to delete + * @throws {Error} If key not found + * @example + * store.delete('user123'); */ - beforeSet (key = STRING_EMPTY, data = {}, batch = false, override = false) { // eslint-disable-line no-unused-vars - // Hook for custom logic before set; override in subclass if needed + delete(key = STRING_EMPTY) { + if (typeof key !== STRING_STRING && typeof key !== STRING_NUMBER) { + throw new Error(STRING_ERROR_DELETE_KEY_TYPE); + } + if (!this.#data.has(key)) { + throw new Error(STRING_RECORD_NOT_FOUND); + } + const og = this.#data.get(key); + if (!this.#inBatch) { + this.#deleteIndex(key, og); + } + this.#data.delete(key); + if (this.#versioning && !this.#inBatch) { + this.#versions.delete(key); + } + this.#invalidateCache(); } /** - * Removes all records, indexes, and versions from the store - * @returns {Haro} This instance for method chaining - * @example - * store.clear(); - * console.log(store.size); // 0 + * Generates a cache key using SHA-256 hash. + * @param {string} domain - Cache key prefix (e.g., 'search', 'where') + * @param {...*} args - Arguments to hash + * @returns {string} Cache key in format 'domain_HASH' */ - clear () { - this.beforeClear(); - this.data.clear(); - this.indexes.clear(); - this.versions.clear(); - this.reindex().onclear(); + async #getCacheKey(domain, ...args) { + const data = JSON.stringify(args); + const encoder = new TextEncoder(); + const hashBuffer = await crypto.subtle.digest(STRING_HASH_ALGORITHM, encoder.encode(data)); + const hashArray = Array.from(new Uint8Array(hashBuffer)); + const hashHex = hashArray.map((b) => b.toString(16).padStart(INT_2, STRING_HEX_PAD)).join(""); + return `${domain}${STRING_UNDERSCORE}${hashHex}`; + } + /** + * Clears the cache. + * @returns {Haro} This instance + */ + clearCache() { + if (this.#cacheEnabled) { + this.#cache.clear(); + } return this; } /** - * Creates a deep clone of the given value, handling objects, arrays, and primitives - * @param {*} arg - Value to clone (any type) - * @returns {*} Deep clone of the argument - * @example - * const original = {name: 'John', tags: ['user', 'admin']}; - * const cloned = store.clone(original); - * cloned.tags.push('new'); // original.tags is unchanged + * Returns the current cache size. + * @returns {number} Number of entries in cache + */ + getCacheSize() { + return this.#cacheEnabled ? this.#cache.size : 0; + } + + /** + * Returns cache statistics. + * @returns {Object|null} Stats object with hits, misses, sets, deletes, evictions */ - clone (arg) { - return structuredClone(arg); + getCacheStats() { + return this.#cacheEnabled ? this.#cache.stats() : null; } /** - * Deletes a record from the store and removes it from all indexes - * @param {string} [key=STRING_EMPTY] - Key of record to delete - * @param {boolean} [batch=false] - Whether this is part of a batch operation + * Invalidates the cache if enabled and not in batch mode. * @returns {void} - * @throws {Error} Throws error if record with the specified key is not found - * @example - * store.delete('user123'); - * // Throws error if 'user123' doesn't exist */ - delete (key = STRING_EMPTY, batch = false) { - if (!this.data.has(key)) { - throw new Error(STRING_RECORD_NOT_FOUND); + #invalidateCache() { + if (this.#cacheEnabled && !this.#inBatch) { + this.#cache.clear(); + } + } + + /** + * Retrieves a value from a nested object using dot notation. + * @param {Object} obj - Object to traverse + * @param {string} path - Dot-notation path (e.g., 'user.address.city') + * @returns {*} Value at path, or undefined if path doesn't exist + */ + #getNestedValue(obj, path) { + /* node:coverage ignore next 3 */ + if (obj === null || obj === undefined || path === STRING_EMPTY) { + return undefined; } - const og = this.get(key, true); - this.beforeDelete(key, batch); - this.deleteIndex(key, og); - this.data.delete(key); - this.ondelete(key, batch); - if (this.versioning) { - this.versions.delete(key); + const keys = path.split(STRING_DOT); + let result = obj; + const keysLen = keys.length; + for (let i = 0; i < keysLen; i++) { + const key = keys[i]; + if (result === null || result === undefined || !(key in result)) { + return undefined; + } + result = result[key]; } + return result; } /** - * Internal method to remove entries from indexes for a deleted record - * @param {string} key - Key of record being deleted - * @param {Object} data - Data of record being deleted - * @returns {Haro} This instance for method chaining + * Removes a record from all indexes. + * @param {string} key - Record key + * @param {Object} data - Record data + * @returns {Haro} This instance */ - deleteIndex (key, data) { - this.index.forEach(i => { - const idx = this.indexes.get(i); + #deleteIndex(key, data) { + this.#index.forEach((i) => { + const idx = this.#indexes.get(i); if (!idx) return; - const values = i.includes(this.delimiter) ? - this.indexKeys(i, this.delimiter, data) : - Array.isArray(data[i]) ? data[i] : [data[i]]; - this.each(values, value => { + const values = i.includes(this.#delimiter) + ? this.#getIndexKeys(i, this.#delimiter, data) + : Array.isArray(this.#getNestedValue(data, i)) + ? this.#getNestedValue(data, i) + : [this.#getNestedValue(data, i)]; + const len = values.length; + for (let j = 0; j < len; j++) { + const value = values[j]; if (idx.has(value)) { const o = idx.get(value); o.delete(key); @@ -228,27 +375,26 @@ class Haro { idx.delete(value); } } - }); + } }); return this; } /** - * Exports complete store data or indexes for persistence or debugging - * @param {string} [type=STRING_RECORDS] - Type of data to export: 'records' or 'indexes' - * @returns {Array} Array of [key, value] pairs for records, or serialized index structure + * Exports store data or indexes. + * @param {string} [type=STRING_RECORDS] - Export type: 'records' or 'indexes' + * @returns {Array} Exported data * @example * const records = store.dump('records'); - * const indexes = store.dump('indexes'); */ - dump (type = STRING_RECORDS) { + dump(type = STRING_RECORDS) { let result; if (type === STRING_RECORDS) { result = Array.from(this.entries()); } else { - result = Array.from(this.indexes).map(i => { - i[1] = Array.from(i[1]).map(ii => { + result = Array.from(this.#indexes).map((i) => { + i[1] = Array.from(i[1]).map((ii) => { ii[1] = Array.from(ii[1]); return ii; @@ -262,110 +408,159 @@ class Haro { } /** - * Utility method to iterate over an array with a callback function - * @param {Array<*>} [arr=[]] - Array to iterate over - * @param {Function} fn - Function to call for each element (element, index) - * @returns {Array<*>} The original array for method chaining - * @example - * store.each([1, 2, 3], (item, index) => console.log(item, index)); + * Generates index keys for composite indexes from data object. + * @param {string} arg - Composite index field names + * @param {string} delimiter - Field delimiter + * @param {Object} data - Data object + * @returns {string[]} Index keys */ - each (arr = [], fn) { - const len = arr.length; - for (let i = 0; i < len; i++) { - fn(arr[i], i); + #getIndexKeys(arg, delimiter, data) { + const fields = arg.split(this.#delimiter).sort(this.#sortKeys); + const result = [STRING_EMPTY]; + const fieldsLen = fields.length; + for (let i = 0; i < fieldsLen; i++) { + const field = fields[i]; + const fieldValue = this.#getNestedValue(data, field); + const values = Array.isArray(fieldValue) ? fieldValue : [fieldValue]; + const newResult = []; + const resultLen = result.length; + const valuesLen = values.length; + for (let j = 0; j < resultLen; j++) { + const existing = result[j]; + for (let k = 0; k < valuesLen; k++) { + const value = values[k]; + const newKey = i === 0 ? value : `${existing}${this.#delimiter}${value}`; + newResult.push(newKey); + } + } + result.length = 0; + result.push(...newResult); } + return result; + } - return arr; + /** + * Generates index keys for where object (handles both dot notation and direct access). + * @param {string} arg - Composite index field names + * @param {string} delimiter - Field delimiter + * @param {Object} where - Where object + * @returns {string[]} Index keys + */ + #getIndexKeysForWhere(arg, delimiter, where) { + const fields = arg.split(this.#delimiter).sort(this.#sortKeys); + const result = [STRING_EMPTY]; + const fieldsLen = fields.length; + for (let i = 0; i < fieldsLen; i++) { + const field = fields[i]; + // Check if field exists directly in where object first (for dot notation keys) + let fieldValue; + if (field in where) { + fieldValue = where[field]; + /* node:coverage ignore next 4 */ + } else { + fieldValue = this.#getNestedValue(where, field); + } + const values = Array.isArray(fieldValue) ? fieldValue : [fieldValue]; + const newResult = []; + const resultLen = result.length; + const valuesLen = values.length; + for (let j = 0; j < resultLen; j++) { + const existing = result[j]; + for (let k = 0; k < valuesLen; k++) { + const value = values[k]; + const newKey = i === 0 ? value : `${existing}${this.#delimiter}${value}`; + newResult.push(newKey); + } + } + result.length = 0; + result.push(...newResult); + } + return result; } /** - * Returns an iterator of [key, value] pairs for each record in the store - * @returns {Iterator>} Iterator of [key, value] pairs + * Returns an iterator of [key, value] pairs. + * @returns {Iterator>} Key-value pairs * @example - * for (const [key, value] of store.entries()) { - * console.log(key, value); - * } + * for (const [key, value] of store.entries()) { } */ - entries () { - return this.data.entries(); + entries() { + return this.#data.entries(); } /** - * Finds records matching the specified criteria using indexes for optimal performance - * @param {Object} [where={}] - Object with field-value pairs to match against - * @param {boolean} [raw=false] - Whether to return raw data without processing - * @returns {Array} Array of matching records (frozen if immutable mode) + * Finds records matching criteria using indexes. + * @param {Object} [where={}] - Field-value pairs to match + * @returns {Array} Matching records * @example - * const users = store.find({department: 'engineering', active: true}); - * const admins = store.find({role: 'admin'}); + * store.find({department: 'engineering', active: true}); */ - find (where = {}, raw = false) { - const key = Object.keys(where).sort(this.sortKeys).join(this.delimiter); - const index = this.indexes.get(key) ?? new Map(); - let result = []; - if (index.size > 0) { - const keys = this.indexKeys(key, this.delimiter, where); - result = Array.from(keys.reduce((a, v) => { + find(where = {}) { + if (typeof where !== STRING_OBJECT || where === null) { + throw new Error(STRING_ERROR_FIND_WHERE_TYPE); + } + const whereKeys = Object.keys(where).sort(this.#sortKeys); + const compositeKey = whereKeys.join(this.#delimiter); + const result = new Set(); + + const index = this.#indexes.get(compositeKey); + if (index) { + const keys = this.#getIndexKeysForWhere(compositeKey, this.#delimiter, where); + const keysLen = keys.length; + for (let i = 0; i < keysLen; i++) { + const v = keys[i]; if (index.has(v)) { - index.get(v).forEach(k => a.add(k)); + const keySet = index.get(v); + for (const k of keySet) { + result.add(k); + } } - - return a; - }, new Set())).map(i => this.get(i, raw)); - } - if (!raw && this.immutable) { - result = Object.freeze(result); + } } - return result; + const records = Array.from(result, (i) => this.get(i)); + if (this.#immutable) { + return Object.freeze(records); + } + return records; } /** - * Filters records using a predicate function, similar to Array.filter - * @param {Function} fn - Predicate function to test each record (record, key, store) - * @param {boolean} [raw=false] - Whether to return raw data without processing - * @returns {Array} Array of records that pass the predicate test - * @throws {Error} Throws error if fn is not a function + * Filters records using a predicate function. + * @param {Function} fn - Predicate function (record, key, store) + * @returns {Array} Filtered records + * @throws {Error} If fn is not a function * @example - * const adults = store.filter(record => record.age >= 18); - * const recent = store.filter(record => record.created > Date.now() - 86400000); + * store.filter(record => record.age >= 18); */ - filter (fn, raw = false) { + filter(fn) { if (typeof fn !== STRING_FUNCTION) { throw new Error(STRING_INVALID_FUNCTION); } - let result = this.reduce((a, v) => { - if (fn(v)) { - a.push(v); - } - - return a; - }, []); - if (!raw) { - result = result.map(i => this.list(i)); - - if (this.immutable) { - result = Object.freeze(result); + const result = []; + this.#data.forEach((value, key) => { + if (fn(value, key, this)) { + result.push(value); } + }); + if (this.#immutable) { + return Object.freeze(result); } - return result; } /** - * Executes a function for each record in the store, similar to Array.forEach - * @param {Function} fn - Function to execute for each record (value, key) - * @param {*} [ctx] - Context object to use as 'this' when executing the function - * @returns {Haro} This instance for method chaining + * Executes a function for each record. + * @param {Function} fn - Function (value, key) + * @param {*} [ctx] - Context for fn + * @returns {Haro} This instance * @example - * store.forEach((record, key) => { - * console.log(`${key}: ${record.name}`); - * }); - */ - forEach (fn, ctx = this) { - this.data.forEach((value, key) => { - if (this.immutable) { - value = this.clone(value); + * store.forEach((record, key) => console.log(key, record)); + */ + forEach(fn, ctx = this) { + this.#data.forEach((value, key) => { + if (this.#immutable) { + value = this.#clone(value); } fn.call(ctx, value, key); }, this); @@ -374,109 +569,61 @@ class Haro { } /** - * Creates a frozen array from the given arguments for immutable data handling - * @param {...*} args - Arguments to freeze into an array - * @returns {Array<*>} Frozen array containing frozen arguments + * Retrieves a record by key. + * @param {string} key - Record key + * @returns {Object|null} Record or null * @example - * const frozen = store.freeze(obj1, obj2, obj3); - * // Returns Object.freeze([Object.freeze(obj1), Object.freeze(obj2), Object.freeze(obj3)]) + * store.get('user123'); */ - freeze (...args) { - return Object.freeze(args.map(i => Object.freeze(i))); - } - - /** - * Retrieves a record by its key - * @param {string} key - Key of record to retrieve - * @param {boolean} [raw=false] - Whether to return raw data (true) or processed/frozen data (false) - * @returns {Object|null} The record if found, null if not found - * @example - * const user = store.get('user123'); - * const rawUser = store.get('user123', true); - */ - get (key, raw = false) { - let result = this.data.get(key) ?? null; - if (result !== null && !raw) { - result = this.list(result); - if (this.immutable) { - result = Object.freeze(result); - } + get(key) { + const result = this.#data.get(key); + if (result === undefined) { + return null; + } + if (this.#immutable) { + return Object.freeze(result); } - return result; } /** - * Checks if a record with the specified key exists in the store - * @param {string} key - Key to check for existence - * @returns {boolean} True if record exists, false otherwise - * @example - * if (store.has('user123')) { - * console.log('User exists'); - * } - */ - has (key) { - return this.data.has(key); - } - - /** - * Generates index keys for composite indexes from data values - * @param {string} [arg=STRING_EMPTY] - Composite index field names joined by delimiter - * @param {string} [delimiter=STRING_PIPE] - Delimiter used in composite index - * @param {Object} [data={}] - Data object to extract field values from - * @returns {string[]} Array of generated index keys + * Checks if a record exists. + * @param {string} key - Record key + * @returns {boolean} True if exists * @example - * // For index 'name|department' with data {name: 'John', department: 'IT'} - * const keys = store.indexKeys('name|department', '|', data); - * // Returns ['John|IT'] + * store.has('user123'); */ - indexKeys (arg = STRING_EMPTY, delimiter = STRING_PIPE, data = {}) { - const fields = arg.split(delimiter).sort(this.sortKeys); - const fieldsLen = fields.length; - let result = [""]; - for (let i = 0; i < fieldsLen; i++) { - const field = fields[i]; - const values = Array.isArray(data[field]) ? data[field] : [data[field]]; - const newResult = []; - const resultLen = result.length; - const valuesLen = values.length; - for (let j = 0; j < resultLen; j++) { - for (let k = 0; k < valuesLen; k++) { - const newKey = i === 0 ? values[k] : `${result[j]}${delimiter}${values[k]}`; - newResult.push(newKey); - } - } - result = newResult; - } - - return result; + has(key) { + return this.#data.has(key); } /** - * Returns an iterator of all keys in the store - * @returns {Iterator} Iterator of record keys + * Returns an iterator of all keys. + * @returns {Iterator} Keys * @example - * for (const key of store.keys()) { - * console.log(key); - * } + * for (const key of store.keys()) { } */ - keys () { - return this.data.keys(); + keys() { + return this.#data.keys(); } /** - * Returns a limited subset of records with offset support for pagination - * @param {number} [offset=INT_0] - Number of records to skip from the beginning - * @param {number} [max=INT_0] - Maximum number of records to return - * @param {boolean} [raw=false] - Whether to return raw data without processing - * @returns {Array} Array of records within the specified range + * Returns a limited subset of records. + * @param {number} [offset=INT_0] - Records to skip + * @param {number} [max=INT_0] - Max records to return + * @returns {Array} Records * @example - * const page1 = store.limit(0, 10); // First 10 records - * const page2 = store.limit(10, 10); // Next 10 records + * store.limit(0, 10); */ - limit (offset = INT_0, max = INT_0, raw = false) { - let result = this.registry.slice(offset, offset + max).map(i => this.get(i, raw)); - if (!raw && this.immutable) { + limit(offset = INT_0, max = INT_0) { + if (typeof offset !== STRING_NUMBER) { + throw new Error(STRING_ERROR_LIMIT_OFFSET_TYPE); + } + if (typeof max !== STRING_NUMBER) { + throw new Error(STRING_ERROR_LIMIT_MAX_TYPE); + } + let result = this.registry.slice(offset, offset + max).map((i) => this.get(i)); + if (this.#immutable) { result = Object.freeze(result); } @@ -484,62 +631,51 @@ class Haro { } /** - * Converts a record into a [key, value] pair array format - * @param {Object} arg - Record object to convert to list format - * @returns {Array<*>} Array containing [key, record] where key is extracted from record's key field + * Transforms records using a mapping function. + * @param {Function} fn - Transform function (record, key) + * @returns {Array<*>} Transformed results + * @throws {Error} If fn is not a function * @example - * const record = {id: 'user123', name: 'John', age: 30}; - * const pair = store.list(record); // ['user123', {id: 'user123', name: 'John', age: 30}] + * store.map(record => record.name); */ - list (arg) { - const result = [arg[this.key], arg]; - - return this.immutable ? this.freeze(...result) : result; - } - - /** - * Transforms all records using a mapping function, similar to Array.map - * @param {Function} fn - Function to transform each record (record, key) - * @param {boolean} [raw=false] - Whether to return raw data without processing - * @returns {Array<*>} Array of transformed results - * @throws {Error} Throws error if fn is not a function - * @example - * const names = store.map(record => record.name); - * const summaries = store.map(record => ({id: record.id, name: record.name})); - */ - map (fn, raw = false) { + map(fn) { if (typeof fn !== STRING_FUNCTION) { throw new Error(STRING_INVALID_FUNCTION); } let result = []; this.forEach((value, key) => result.push(fn(value, key))); - if (!raw) { - result = result.map(i => this.list(i)); - if (this.immutable) { - result = Object.freeze(result); - } + if (this.#immutable) { + result = Object.freeze(result); } return result; } /** - * Merges two values together with support for arrays and objects - * @param {*} a - First value (target) - * @param {*} b - Second value (source) - * @param {boolean} [override=false] - Whether to override arrays instead of concatenating + * Merges two values. + * @param {*} a - Target value + * @param {*} b - Source value + * @param {boolean} [override=false] - Override arrays * @returns {*} Merged result - * @example - * const merged = store.merge({a: 1}, {b: 2}); // {a: 1, b: 2} - * const arrays = store.merge([1, 2], [3, 4]); // [1, 2, 3, 4] */ - merge (a, b, override = false) { + #merge(a, b, override = false) { if (Array.isArray(a) && Array.isArray(b)) { a = override ? b : a.concat(b); - } else if (typeof a === STRING_OBJECT && a !== null && typeof b === STRING_OBJECT && b !== null) { - this.each(Object.keys(b), i => { - a[i] = this.merge(a[i], b[i], override); - }); + } else if ( + typeof a === STRING_OBJECT && + a !== null && + typeof b === STRING_OBJECT && + b !== null + ) { + const keys = Object.keys(b); + const keysLen = keys.length; + for (let i = 0; i < keysLen; i++) { + const key = keys[i]; + if (key === STRING_PROTO || key === STRING_CONSTRUCTOR || key === STRING_PROTOTYPE) { + continue; + } + a[key] = this.#merge(a[key], b[key], override); + } } else { a = b; } @@ -548,271 +684,235 @@ class Haro { } /** - * Lifecycle hook executed after batch operations for custom postprocessing - * @param {Array} arg - Result of batch operation - * @param {string} [type=STRING_EMPTY] - Type of batch operation that was performed - * @returns {Array} Modified result (override this method to implement custom logic) - */ - onbatch (arg, type = STRING_EMPTY) { // eslint-disable-line no-unused-vars - return arg; - } - - /** - * Lifecycle hook executed after clear operation for custom postprocessing - * @returns {void} Override this method in subclasses to implement custom logic + * Replaces store data or indexes. + * @param {Array} data - Data to replace + * @param {string} [type=STRING_RECORDS] - Type: 'records' or 'indexes' + * @returns {boolean} Success + * @throws {Error} If type is invalid * @example - * class MyStore extends Haro { - * onclear() { - * console.log('Store cleared'); - * } - * } + * store.override([['key1', {name: 'John'}]], 'records'); */ - onclear () { - // Hook for custom logic after clear; override in subclass if needed - } - - /** - * Lifecycle hook executed after delete operation for custom postprocessing - * @param {string} [key=STRING_EMPTY] - Key of deleted record - * @param {boolean} [batch=false] - Whether this was part of a batch operation - * @returns {void} Override this method in subclasses to implement custom logic - */ - ondelete (key = STRING_EMPTY, batch = false) { // eslint-disable-line no-unused-vars - // Hook for custom logic after delete; override in subclass if needed - } - - /** - * Lifecycle hook executed after override operation for custom postprocessing - * @param {string} [type=STRING_EMPTY] - Type of override operation that was performed - * @returns {void} Override this method in subclasses to implement custom logic - */ - onoverride (type = STRING_EMPTY) { // eslint-disable-line no-unused-vars - // Hook for custom logic after override; override in subclass if needed - } - - /** - * Lifecycle hook executed after set operation for custom postprocessing - * @param {Object} [arg={}] - Record that was set - * @param {boolean} [batch=false] - Whether this was part of a batch operation - * @returns {void} Override this method in subclasses to implement custom logic - */ - onset (arg = {}, batch = false) { // eslint-disable-line no-unused-vars - // Hook for custom logic after set; override in subclass if needed - } - - /** - * Replaces all store data or indexes with new data for bulk operations - * @param {Array} data - Data to replace with (format depends on type) - * @param {string} [type=STRING_RECORDS] - Type of data: 'records' or 'indexes' - * @returns {boolean} True if operation succeeded - * @throws {Error} Throws error if type is invalid - * @example - * const records = [['key1', {name: 'John'}], ['key2', {name: 'Jane'}]]; - * store.override(records, 'records'); - */ - override (data, type = STRING_RECORDS) { + override(data, type = STRING_RECORDS) { const result = true; if (type === STRING_INDEXES) { - this.indexes = new Map(data.map(i => [i[0], new Map(i[1].map(ii => [ii[0], new Set(ii[1])]))])); + this.#indexes = new Map( + data.map((i) => [i[0], new Map(i[1].map((ii) => [ii[0], new Set(ii[1])]))]), + ); } else if (type === STRING_RECORDS) { - this.indexes.clear(); - this.data = new Map(data); + this.#indexes.clear(); + this.#data = new Map(data); } else { throw new Error(STRING_INVALID_TYPE); } - this.onoverride(type); + this.#invalidateCache(); return result; } /** - * Reduces all records to a single value using a reducer function - * @param {Function} fn - Reducer function (accumulator, value, key, store) - * @param {*} [accumulator] - Initial accumulator value - * @returns {*} Final reduced value + * Rebuilds indexes. + * @param {string|string[]} [index] - Field(s) to rebuild, or all + * @returns {Haro} This instance * @example - * const totalAge = store.reduce((sum, record) => sum + record.age, 0); - * const names = store.reduce((acc, record) => acc.concat(record.name), []); + * store.reindex(); + * store.reindex('name'); */ - reduce (fn, accumulator = []) { - let a = accumulator; - this.forEach((v, k) => { - a = fn(a, v, k, this); - }, this); + reindex(index) { + const indices = index ? (Array.isArray(index) ? index : [index]) : this.#index; + if (index && this.#index.includes(index) === false) { + this.#index.push(index); + } + const indicesLen = indices.length; + for (let i = 0; i < indicesLen; i++) { + this.#indexes.set(indices[i], new Map()); + } + this.forEach((data, key) => { + for (let i = 0; i < indicesLen; i++) { + this.#setIndex(key, data, indices[i]); + } + }); + this.#invalidateCache(); - return a; + return this; } /** - * Rebuilds indexes for specified fields or all fields for data consistency - * @param {string|string[]} [index] - Specific index field(s) to rebuild, or all if not specified - * @returns {Haro} This instance for method chaining + * Searches for records containing a value. + * @param {*} value - Search value (string, function, or RegExp) + * @param {string|string[]} [index] - Index(es) to search, or all + * @returns {Promise>} Matching records * @example - * store.reindex(); // Rebuild all indexes - * store.reindex('name'); // Rebuild only name index - * store.reindex(['name', 'email']); // Rebuild name and email indexes + * store.search('john'); + * store.search(/^admin/, 'role'); */ - reindex (index) { - const indices = index ? [index] : this.index; - if (index && this.index.includes(index) === false) { - this.index.push(index); + async search(value, index) { + if (value === null || value === undefined) { + throw new Error(STRING_ERROR_SEARCH_VALUE); } - this.each(indices, i => this.indexes.set(i, new Map())); - this.forEach((data, key) => this.each(indices, i => this.setIndex(key, data, i))); - return this; - } + let cacheKey; + if (this.#cacheEnabled) { + cacheKey = await this.#getCacheKey(STRING_CACHE_DOMAIN_SEARCH, value, index); + const cached = this.#cache.get(cacheKey); + if (cached !== undefined) { + return this.#immutable ? Object.freeze(cached) : this.#clone(cached); + } + } - /** - * Searches for records containing a value across specified indexes - * @param {*} value - Value to search for (string, function, or RegExp) - * @param {string|string[]} [index] - Index(es) to search in, or all if not specified - * @param {boolean} [raw=false] - Whether to return raw data without processing - * @returns {Array} Array of matching records - * @example - * const results = store.search('john'); // Search all indexes - * const nameResults = store.search('john', 'name'); // Search only name index - * const regexResults = store.search(/^admin/, 'role'); // Regex search - */ - search (value, index, raw = false) { - const result = new Set(); // Use Set for unique keys + const result = new Set(); const fn = typeof value === STRING_FUNCTION; const rgex = value && typeof value.test === STRING_FUNCTION; - if (!value) return this.immutable ? this.freeze() : []; - const indices = index ? Array.isArray(index) ? index : [index] : this.index; - for (const i of indices) { - const idx = this.indexes.get(i); - if (idx) { - for (const [lkey, lset] of idx) { - let match = false; - - if (fn) { - match = value(lkey, i); - } else if (rgex) { - match = value.test(Array.isArray(lkey) ? lkey.join(STRING_COMMA) : lkey); - } else { - match = lkey === value; - } + const indices = index ? (Array.isArray(index) ? index : [index]) : this.#index; + const indicesLen = indices.length; - if (match) { - for (const key of lset) { - if (this.data.has(key)) { - result.add(key); - } + for (let i = 0; i < indicesLen; i++) { + const idxName = indices[i]; + const idx = this.#indexes.get(idxName); + if (!idx) continue; + + for (const [lkey, lset] of idx) { + let match = false; + + if (fn) { + match = value(lkey, idxName); + } else if (rgex) { + match = value.test(Array.isArray(lkey) ? lkey.join(STRING_COMMA) : lkey); + } else { + match = lkey === value; + } + + if (match) { + for (const key of lset) { + if (this.#data.has(key)) { + result.add(key); } } } } } - let records = Array.from(result).map(key => this.get(key, raw)); - if (!raw && this.immutable) { - records = Object.freeze(records); + const records = Array.from(result, (key) => this.get(key)); + + if (this.#cacheEnabled) { + this.#cache.set(cacheKey, records); } + if (this.#immutable) { + return Object.freeze(records); + } return records; } /** - * Sets or updates a record in the store with automatic indexing - * @param {string|null} [key=null] - Key for the record, or null to use record's key field - * @param {Object} [data={}] - Record data to set - * @param {boolean} [batch=false] - Whether this is part of a batch operation - * @param {boolean} [override=false] - Whether to override existing data instead of merging - * @returns {Object} The stored record (frozen if immutable mode) + * Sets or updates a record with automatic indexing. + * @param {string|null} [key=null] - Record key, or null for auto-generate + * @param {Object} [data={}] - Record data + * @param {boolean} [override=false] - Override instead of merge + * @returns {Object} Stored record * @example - * const user = store.set(null, {name: 'John', age: 30}); // Auto-generate key - * const updated = store.set('user123', {age: 31}); // Update existing record + * store.set(null, {name: 'John'}); + * store.set('user123', {age: 31}); */ - set (key = null, data = {}, batch = false, override = false) { + set(key = null, data = {}, override = false) { + if (key !== null && typeof key !== STRING_STRING && typeof key !== STRING_NUMBER) { + throw new Error(STRING_ERROR_SET_KEY_TYPE); + } + if (typeof data !== STRING_OBJECT || data === null) { + throw new Error(STRING_ERROR_SET_DATA_TYPE); + } if (key === null) { - key = data[this.key] ?? this.uuid(); + key = data[this.#key] ?? randomUUID(); } - let x = {...data, [this.key]: key}; - this.beforeSet(key, x, batch, override); - if (!this.data.has(key)) { - if (this.versioning) { - this.versions.set(key, new Set()); + let x = { ...data, [this.#key]: key }; + if (!this.#data.has(key)) { + if (this.#versioning && !this.#inBatch) { + this.#versions.set(key, new Set()); } } else { - const og = this.get(key, true); - this.deleteIndex(key, og); - if (this.versioning) { - this.versions.get(key).add(Object.freeze(this.clone(og))); + const og = this.#data.get(key); + if (!this.#inBatch) { + this.#deleteIndex(key, og); + if (this.#versioning) { + this.#versions.get(key).add(Object.freeze(this.#clone(og))); + } } - if (!override) { - x = this.merge(this.clone(og), x); + if (!this.#inBatch && !override) { + x = this.#merge(this.#clone(og), x); } } - this.data.set(key, x); - this.setIndex(key, x, null); + this.#data.set(key, x); + + if (!this.#inBatch) { + this.#setIndex(key, x, null); + } + const result = this.get(key); - this.onset(result, batch); + this.#invalidateCache(); return result; } /** - * Internal method to add entries to indexes for a record - * @param {string} key - Key of record being indexed - * @param {Object} data - Data of record being indexed - * @param {string|null} indice - Specific index to update, or null for all - * @returns {Haro} This instance for method chaining + * Adds a record to indexes. + * @param {string} key - Record key + * @param {Object} data - Record data + * @param {string|null} indice - Index to update, or null for all + * @returns {Haro} This instance */ - setIndex (key, data, indice) { - this.each(indice === null ? this.index : [indice], i => { - let idx = this.indexes.get(i); + #setIndex(key, data, indice) { + const indices = indice === null ? this.#index : [indice]; + const indicesLen = indices.length; + for (let i = 0; i < indicesLen; i++) { + const field = indices[i]; + let idx = this.#indexes.get(field); if (!idx) { idx = new Map(); - this.indexes.set(i, idx); + this.#indexes.set(field, idx); } - const fn = c => { - if (!idx.has(c)) { - idx.set(c, new Set()); + const values = field.includes(this.#delimiter) + ? this.#getIndexKeys(field, this.#delimiter, data) + : Array.isArray(this.#getNestedValue(data, field)) + ? this.#getNestedValue(data, field) + : [this.#getNestedValue(data, field)]; + const valuesLen = values.length; + for (let j = 0; j < valuesLen; j++) { + const value = values[j]; + if (!idx.has(value)) { + idx.set(value, new Set()); } - idx.get(c).add(key); - }; - if (i.includes(this.delimiter)) { - this.each(this.indexKeys(i, this.delimiter, data), fn); - } else { - this.each(Array.isArray(data[i]) ? data[i] : [data[i]], fn); + idx.get(value).add(key); } - }); - + } return this; } /** - * Sorts all records using a comparator function - * @param {Function} fn - Comparator function for sorting (a, b) => number - * @param {boolean} [frozen=false] - Whether to return frozen records - * @returns {Array} Sorted array of records + * Sorts records using a comparator function. + * @param {Function} fn - Comparator (a, b) => number + * @param {boolean} [frozen=false] - Return frozen records + * @returns {Array} Sorted records * @example - * const sorted = store.sort((a, b) => a.age - b.age); // Sort by age - * const names = store.sort((a, b) => a.name.localeCompare(b.name)); // Sort by name + * store.sort((a, b) => a.age - b.age); */ - sort (fn, frozen = false) { - const dataSize = this.data.size; - let result = this.limit(INT_0, dataSize, true).sort(fn); + sort(fn, frozen = false) { + if (typeof fn !== STRING_FUNCTION) { + throw new Error(STRING_ERROR_SORT_FN_TYPE); + } + const dataSize = this.#data.size; + let result = this.limit(INT_0, dataSize).sort(fn); if (frozen) { - result = this.freeze(...result); + result = Object.freeze(result); } return result; } /** - * Comparator function for sorting keys with type-aware comparison logic - * @param {*} a - First value to compare - * @param {*} b - Second value to compare - * @returns {number} Negative number if a < b, positive if a > b, zero if equal - * @example - * const keys = ['name', 'age', 'email']; - * keys.sort(store.sortKeys); // Alphabetical sort - * - * const mixed = [10, '5', 'abc', 3]; - * mixed.sort(store.sortKeys); // Type-aware sort: numbers first, then strings + * Sorts keys with type-aware comparison. + * @param {*} a - First value + * @param {*} b - Second value + * @returns {number} Comparison result */ - sortKeys (a, b) { + #sortKeys(a, b) { // Handle string comparison if (typeof a === STRING_STRING && typeof b === STRING_STRING) { return a.localeCompare(b); @@ -823,50 +923,54 @@ class Haro { } // Handle mixed types or other types by converting to string - return String(a).localeCompare(String(b)); } /** - * Sorts records by a specific indexed field in ascending order - * @param {string} [index=STRING_EMPTY] - Index field name to sort by - * @param {boolean} [raw=false] - Whether to return raw data without processing - * @returns {Array} Array of records sorted by the specified field - * @throws {Error} Throws error if index field is empty or invalid + * Sorts records by an indexed field. + * @param {string} [index=STRING_EMPTY] - Field to sort by + * @returns {Array} Sorted records + * @throws {Error} If index is empty * @example - * const byAge = store.sortBy('age'); - * const byName = store.sortBy('name'); + * store.sortBy('age'); */ - sortBy (index = STRING_EMPTY, raw = false) { + sortBy(index = STRING_EMPTY) { if (index === STRING_EMPTY) { throw new Error(STRING_INVALID_FIELD); } - let result = []; const keys = []; - if (this.indexes.has(index) === false) { + if (this.#indexes.has(index) === false) { this.reindex(index); } - const lindex = this.indexes.get(index); + const lindex = this.#indexes.get(index); lindex.forEach((idx, key) => keys.push(key)); - this.each(keys.sort(this.sortKeys), i => lindex.get(i).forEach(key => result.push(this.get(key, raw)))); - if (this.immutable) { - result = Object.freeze(result); - } + keys.sort(this.#sortKeys); + const result = keys.flatMap((i) => { + const inner = Array.from(lindex.get(i)); + const innerLen = inner.length; + const mapped = Array.from({ length: innerLen }, (_, j) => this.get(inner[j])); + return mapped; + }); + if (this.#immutable) { + return Object.freeze(result); + } return result; } /** - * Converts all store data to a plain array of records - * @returns {Array} Array containing all records in the store + * Converts store data to an array. + * @returns {Array} All records * @example - * const allRecords = store.toArray(); - * console.log(`Store contains ${allRecords.length} records`); + * store.toArray(); */ - toArray () { - const result = Array.from(this.data.values()); - if (this.immutable) { - this.each(result, i => Object.freeze(i)); + toArray() { + const result = Array.from(this.#data.values()); + if (this.#immutable) { + const resultLen = result.length; + for (let i = 0; i < resultLen; i++) { + Object.freeze(result[i]); + } Object.freeze(result); } @@ -874,88 +978,100 @@ class Haro { } /** - * Generates a RFC4122 v4 UUID for record identification - * @returns {string} UUID string in standard format - * @example - * const id = store.uuid(); // "f47ac10b-58cc-4372-a567-0e02b2c3d479" - */ - uuid () { - return randomUUID(); - } - - /** - * Returns an iterator of all values in the store - * @returns {Iterator} Iterator of record values + * Returns an iterator of all values. + * @returns {Iterator} Values * @example - * for (const record of store.values()) { - * console.log(record.name); - * } + * for (const record of store.values()) { } */ - values () { - return this.data.values(); + values() { + return this.#data.values(); } /** - * Internal helper method for predicate matching with support for arrays and regex - * @param {Object} record - Record to test against predicate - * @param {Object} predicate - Predicate object with field-value pairs - * @param {string} op - Operator for array matching ('||' for OR, '&&' for AND) - * @returns {boolean} True if record matches predicate criteria + * Matches a record against a predicate. + * @param {Object} record - Record to test + * @param {Object} predicate - Predicate object + * @param {string} op - Operator: '||' or '&&' + * @returns {boolean} True if matches */ - matchesPredicate (record, predicate, op) { + #matchesPredicate(record, predicate, op) { const keys = Object.keys(predicate); - return keys.every(key => { + return keys.every((key) => { const pred = predicate[key]; - const val = record[key]; + // Use nested value extraction for dot notation paths + const val = this.#getNestedValue(record, key); if (Array.isArray(pred)) { if (Array.isArray(val)) { - return op === STRING_DOUBLE_AND ? pred.every(p => val.includes(p)) : pred.some(p => val.includes(p)); - } else { - return op === STRING_DOUBLE_AND ? pred.every(p => val === p) : pred.some(p => val === p); - } - } else if (pred instanceof RegExp) { - if (Array.isArray(val)) { - return op === STRING_DOUBLE_AND ? val.every(v => pred.test(v)) : val.some(v => pred.test(v)); - } else { - return pred.test(val); + return op === STRING_DOUBLE_AND + ? pred.every((p) => val.includes(p)) + : pred.some((p) => val.includes(p)); } - } else if (Array.isArray(val)) { - return val.includes(pred); - } else { - return val === pred; + return op === STRING_DOUBLE_AND + ? pred.every((p) => val === p) + : pred.some((p) => val === p); + } + if (Array.isArray(val)) { + return val.some((v) => { + if (pred instanceof RegExp) { + return pred.test(v); + } + if (v instanceof RegExp) { + return v.test(pred); + } + return v === pred; + }); + } + if (pred instanceof RegExp) { + return pred.test(val); } + return val === pred; }); } /** - * Advanced filtering with predicate logic supporting AND/OR operations on arrays - * @param {Object} [predicate={}] - Object with field-value pairs for filtering - * @param {string} [op=STRING_DOUBLE_PIPE] - Operator for array matching ('||' for OR, '&&' for AND) - * @returns {Array} Array of records matching the predicate criteria + * Filters records with predicate logic supporting AND/OR on arrays. + * @param {Object} [predicate={}] - Field-value pairs + * @param {string} [op=STRING_DOUBLE_PIPE] - Operator: '||' (OR) or '&&' (AND) + * @returns {Promise>} Matching records * @example - * // Find records with tags containing 'admin' OR 'user' - * const users = store.where({tags: ['admin', 'user']}, '||'); - * - * // Find records with ALL specified tags - * const powerUsers = store.where({tags: ['admin', 'power']}, '&&'); - * - * // Regex matching - * const emails = store.where({email: /^admin@/}); - */ - where (predicate = {}, op = STRING_DOUBLE_PIPE) { - const keys = this.index.filter(i => i in predicate); - if (keys.length === 0) return []; + * store.where({tags: ['admin', 'user']}, '||'); + * store.where({email: /^admin@/}); + */ + async where(predicate = {}, op = STRING_DOUBLE_PIPE) { + if (typeof predicate !== STRING_OBJECT || predicate === null) { + throw new Error(STRING_ERROR_WHERE_PREDICATE_TYPE); + } + if (typeof op !== STRING_STRING) { + throw new Error(STRING_ERROR_WHERE_OP_TYPE); + } + + let cacheKey; + if (this.#cacheEnabled) { + cacheKey = await this.#getCacheKey(STRING_CACHE_DOMAIN_WHERE, predicate, op); + const cached = this.#cache.get(cacheKey); + if (cached !== undefined) { + return this.#immutable ? Object.freeze(cached) : this.#clone(cached); + } + } + + const keys = this.#index.filter((i) => i in predicate); + if (keys.length === 0) { + if (this.#warnOnFullScan) { + console.warn("where(): performing full table scan - consider adding an index"); + } + return this.filter((a) => this.#matchesPredicate(a, predicate, op)); + } // Try to use indexes for better performance - const indexedKeys = keys.filter(k => this.indexes.has(k)); + const indexedKeys = keys.filter((k) => this.#indexes.has(k)); if (indexedKeys.length > 0) { // Use index-based filtering for better performance let candidateKeys = new Set(); let first = true; for (const key of indexedKeys) { const pred = predicate[key]; - const idx = this.indexes.get(key); + const idx = this.#indexes.get(key); const matchingKeys = new Set(); if (Array.isArray(pred)) { for (const p of pred) { @@ -965,9 +1081,29 @@ class Haro { } } } - } else if (idx.has(pred)) { - for (const k of idx.get(pred)) { - matchingKeys.add(k); + } else if (pred instanceof RegExp) { + for (const [indexKey, keySet] of idx) { + if (pred.test(indexKey)) { + for (const k of keySet) { + matchingKeys.add(k); + } + } + } + } else { + // Direct value lookup - works for both flat and nested fields + // Also check for RegExp keys that match the predicate + for (const [indexKey, keySet] of idx) { + if (indexKey instanceof RegExp) { + if (indexKey.test(pred)) { + for (const k of keySet) { + matchingKeys.add(k); + } + } + } else if (indexKey === pred) { + for (const k of keySet) { + matchingKeys.add(k); + } + } } } if (first) { @@ -975,45 +1111,43 @@ class Haro { first = false; } else { // AND operation across different fields - candidateKeys = new Set([...candidateKeys].filter(k => matchingKeys.has(k))); + candidateKeys = new Set([...candidateKeys].filter((k) => matchingKeys.has(k))); } } // Filter candidates with full predicate logic const results = []; for (const key of candidateKeys) { - const record = this.get(key, true); - if (this.matchesPredicate(record, predicate, op)) { - results.push(this.immutable ? this.get(key) : record); + const record = this.get(key); + if (this.#matchesPredicate(record, predicate, op)) { + results.push(record); } } - return this.immutable ? this.freeze(...results) : results; - } + if (this.#cacheEnabled) { + this.#cache.set(cacheKey, results); + } - // Fallback to full scan if no indexes available - return this.filter(a => this.matchesPredicate(a, predicate, op)); + if (this.#immutable) { + return Object.freeze(results); + } + return results; + } } } /** - * Factory function to create a new Haro instance with optional initial data - * @param {Array|null} [data=null] - Initial data to populate the store - * @param {Object} [config={}] - Configuration object passed to Haro constructor - * @returns {Haro} New Haro instance configured and optionally populated + * Factory function to create a Haro instance. + * @param {Array|null} [data=null] - Initial data + * @param {Object} [config={}] - Configuration + * @returns {Haro} New Haro instance * @example - * const store = haro([ - * {id: 1, name: 'John', age: 30}, - * {id: 2, name: 'Jane', age: 25} - * ], { - * index: ['name', 'age'], - * versioning: true - * }); + * const store = haro([{id: 1, name: 'John'}], {index: ['name']}); */ -function haro (data = null, config = {}) { +function haro(data = null, config = {}) { const obj = new Haro(config); if (Array.isArray(data)) { - obj.batch(data, STRING_SET); + obj.setMany(data); } return obj; diff --git a/dist/haro.min.js b/dist/haro.min.js deleted file mode 100644 index 6bd0e2bd..00000000 --- a/dist/haro.min.js +++ /dev/null @@ -1,5 +0,0 @@ -/*! - 2025 Jason Mulligan - @version 16.0.0 -*/ -import{randomUUID as e}from"crypto";const t="",s="&&",r="function",i="object",n="records",h="string",a="number",o="Invalid function";class l{constructor({delimiter:e="|",id:t=this.uuid(),immutable:s=!1,index:r=[],key:i="id",versioning:n=!1}={}){return this.data=new Map,this.delimiter=e,this.id=t,this.immutable=s,this.index=Array.isArray(r)?[...r]:[],this.indexes=new Map,this.key=i,this.versions=new Map,this.versioning=n,Object.defineProperty(this,"registry",{enumerable:!0,get:()=>Array.from(this.data.keys())}),Object.defineProperty(this,"size",{enumerable:!0,get:()=>this.data.size}),this.reindex()}batch(e,t="set"){const s="del"===t?e=>this.delete(e,!0):e=>this.set(null,e,!0,!0);return this.onbatch(this.beforeBatch(e,t).map(s),t)}beforeBatch(e,t=""){return e}beforeClear(){}beforeDelete(e="",t=!1){}beforeSet(e="",t={},s=!1,r=!1){}clear(){return this.beforeClear(),this.data.clear(),this.indexes.clear(),this.versions.clear(),this.reindex().onclear(),this}clone(e){return structuredClone(e)}delete(e="",t=!1){if(!this.data.has(e))throw new Error("Record not found");const s=this.get(e,!0);this.beforeDelete(e,t),this.deleteIndex(e,s),this.data.delete(e),this.ondelete(e,t),this.versioning&&this.versions.delete(e)}deleteIndex(e,t){return this.index.forEach(s=>{const r=this.indexes.get(s);if(!r)return;const i=s.includes(this.delimiter)?this.indexKeys(s,this.delimiter,t):Array.isArray(t[s])?t[s]:[t[s]];this.each(i,t=>{if(r.has(t)){const s=r.get(t);s.delete(e),0===s.size&&r.delete(t)}})}),this}dump(e=n){let t;return t=e===n?Array.from(this.entries()):Array.from(this.indexes).map(e=>(e[1]=Array.from(e[1]).map(e=>(e[1]=Array.from(e[1]),e)),e)),t}each(e=[],t){const s=e.length;for(let r=0;r0){const n=this.indexKeys(s,this.delimiter,e);i=Array.from(n.reduce((e,t)=>(r.has(t)&&r.get(t).forEach(t=>e.add(t)),e),new Set)).map(e=>this.get(e,t))}return!t&&this.immutable&&(i=Object.freeze(i)),i}filter(e,t=!1){if(typeof e!==r)throw new Error(o);let s=this.reduce((t,s)=>(e(s)&&t.push(s),t),[]);return t||(s=s.map(e=>this.list(e)),this.immutable&&(s=Object.freeze(s))),s}forEach(e,t=this){return this.data.forEach((s,r)=>{this.immutable&&(s=this.clone(s)),e.call(t,s,r)},this),this}freeze(...e){return Object.freeze(e.map(e=>Object.freeze(e)))}get(e,t=!1){let s=this.data.get(e)??null;return null===s||t||(s=this.list(s),this.immutable&&(s=Object.freeze(s))),s}has(e){return this.data.has(e)}indexKeys(e="",t="|",s={}){const r=e.split(t).sort(this.sortKeys),i=r.length;let n=[""];for(let e=0;ethis.get(e,s));return!s&&this.immutable&&(r=Object.freeze(r)),r}list(e){const t=[e[this.key],e];return this.immutable?this.freeze(...t):t}map(e,t=!1){if(typeof e!==r)throw new Error(o);let s=[];return this.forEach((t,r)=>s.push(e(t,r))),t||(s=s.map(e=>this.list(e)),this.immutable&&(s=Object.freeze(s))),s}merge(e,t,s=!1){return Array.isArray(e)&&Array.isArray(t)?e=s?t:e.concat(t):typeof e===i&&null!==e&&typeof t===i&&null!==t?this.each(Object.keys(t),r=>{e[r]=this.merge(e[r],t[r],s)}):e=t,e}onbatch(e,t=""){return e}onclear(){}ondelete(e="",t=!1){}onoverride(e=""){}onset(e={},t=!1){}override(e,t=n){if("indexes"===t)this.indexes=new Map(e.map(e=>[e[0],new Map(e[1].map(e=>[e[0],new Set(e[1])]))]));else{if(t!==n)throw new Error("Invalid type");this.indexes.clear(),this.data=new Map(e)}return this.onoverride(t),!0}reduce(e,t=[]){let s=t;return this.forEach((t,r)=>{s=e(s,t,r,this)},this),s}reindex(e){const t=e?[e]:this.index;return e&&!1===this.index.includes(e)&&this.index.push(e),this.each(t,e=>this.indexes.set(e,new Map)),this.forEach((e,s)=>this.each(t,t=>this.setIndex(s,e,t))),this}search(e,t,s=!1){const i=new Set,n=typeof e===r,h=e&&typeof e.test===r;if(!e)return this.immutable?this.freeze():[];const a=t?Array.isArray(t)?t:[t]:this.index;for(const t of a){const s=this.indexes.get(t);if(s)for(const[r,a]of s){let s=!1;if(s=n?e(r,t):h?e.test(Array.isArray(r)?r.join(","):r):r===e,s)for(const e of a)this.data.has(e)&&i.add(e)}}let o=Array.from(i).map(e=>this.get(e,s));return!s&&this.immutable&&(o=Object.freeze(o)),o}set(e=null,t={},s=!1,r=!1){null===e&&(e=t[this.key]??this.uuid());let i={...t,[this.key]:e};if(this.beforeSet(e,i,s,r),this.data.has(e)){const t=this.get(e,!0);this.deleteIndex(e,t),this.versioning&&this.versions.get(e).add(Object.freeze(this.clone(t))),r||(i=this.merge(this.clone(t),i))}else this.versioning&&this.versions.set(e,new Set);this.data.set(e,i),this.setIndex(e,i,null);const n=this.get(e);return this.onset(n,s),n}setIndex(e,t,s){return this.each(null===s?this.index:[s],s=>{let r=this.indexes.get(s);r||(r=new Map,this.indexes.set(s,r));const i=t=>{r.has(t)||r.set(t,new Set),r.get(t).add(e)};s.includes(this.delimiter)?this.each(this.indexKeys(s,this.delimiter,t),i):this.each(Array.isArray(t[s])?t[s]:[t[s]],i)}),this}sort(e,t=!1){const s=this.data.size;let r=this.limit(0,s,!0).sort(e);return t&&(r=this.freeze(...r)),r}sortKeys(e,t){return typeof e===h&&typeof t===h?e.localeCompare(t):typeof e===a&&typeof t===a?e-t:String(e).localeCompare(String(t))}sortBy(e="",s=!1){if(e===t)throw new Error("Invalid field");let r=[];const i=[];!1===this.indexes.has(e)&&this.reindex(e);const n=this.indexes.get(e);return n.forEach((e,t)=>i.push(t)),this.each(i.sort(this.sortKeys),e=>n.get(e).forEach(e=>r.push(this.get(e,s)))),this.immutable&&(r=Object.freeze(r)),r}toArray(){const e=Array.from(this.data.values());return this.immutable&&(this.each(e,e=>Object.freeze(e)),Object.freeze(e)),e}uuid(){return e()}values(){return this.data.values()}matchesPredicate(e,t,r){return Object.keys(t).every(i=>{const n=t[i],h=e[i];return Array.isArray(n)?Array.isArray(h)?r===s?n.every(e=>h.includes(e)):n.some(e=>h.includes(e)):r===s?n.every(e=>h===e):n.some(e=>h===e):n instanceof RegExp?Array.isArray(h)?r===s?h.every(e=>n.test(e)):h.some(e=>n.test(e)):n.test(h):Array.isArray(h)?h.includes(n):h===n})}where(e={},t="||"){const s=this.index.filter(t=>t in e);if(0===s.length)return[];const r=s.filter(e=>this.indexes.has(e));if(r.length>0){let s=new Set,i=!0;for(const t of r){const r=e[t],n=this.indexes.get(t),h=new Set;if(Array.isArray(r)){for(const e of r)if(n.has(e))for(const t of n.get(e))h.add(t)}else if(n.has(r))for(const e of n.get(r))h.add(e);i?(s=h,i=!1):s=new Set([...s].filter(e=>h.has(e)))}const n=[];for(const r of s){const s=this.get(r,!0);this.matchesPredicate(s,e,t)&&n.push(this.immutable?this.get(r):s)}return this.immutable?this.freeze(...n):n}return this.filter(s=>this.matchesPredicate(s,e,t))}}function c(e=null,t={}){const s=new l(t);return Array.isArray(e)&&s.batch(e,"set"),s}export{l as Haro,c as haro};//# sourceMappingURL=haro.min.js.map diff --git a/dist/haro.min.js.map b/dist/haro.min.js.map deleted file mode 100644 index 5447b886..00000000 --- a/dist/haro.min.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"haro.min.js","sources":["../src/constants.js","../src/haro.js"],"sourcesContent":["// String constants - Single characters and symbols\nexport const STRING_COMMA = \",\";\nexport const STRING_EMPTY = \"\";\nexport const STRING_PIPE = \"|\";\nexport const STRING_DOUBLE_PIPE = \"||\";\nexport const STRING_DOUBLE_AND = \"&&\";\n\n// String constants - Operation and type names\nexport const STRING_ID = \"id\";\nexport const STRING_DEL = \"del\";\nexport const STRING_FUNCTION = \"function\";\nexport const STRING_INDEXES = \"indexes\";\nexport const STRING_OBJECT = \"object\";\nexport const STRING_RECORDS = \"records\";\nexport const STRING_REGISTRY = \"registry\";\nexport const STRING_SET = \"set\";\nexport const STRING_SIZE = \"size\";\nexport const STRING_STRING = \"string\";\nexport const STRING_NUMBER = \"number\";\n\n// String constants - Error messages\nexport const STRING_INVALID_FIELD = \"Invalid field\";\nexport const STRING_INVALID_FUNCTION = \"Invalid function\";\nexport const STRING_INVALID_TYPE = \"Invalid type\";\nexport const STRING_RECORD_NOT_FOUND = \"Record not found\";\n\n// Integer constants\nexport const INT_0 = 0;\n","import {randomUUID as uuid} from \"crypto\";\nimport {\n\tINT_0,\n\tSTRING_COMMA,\n\tSTRING_DEL, STRING_DOUBLE_AND,\n\tSTRING_DOUBLE_PIPE,\n\tSTRING_EMPTY,\n\tSTRING_FUNCTION,\n\tSTRING_ID,\n\tSTRING_INDEXES,\n\tSTRING_INVALID_FIELD,\n\tSTRING_INVALID_FUNCTION,\n\tSTRING_INVALID_TYPE, STRING_NUMBER, STRING_OBJECT,\n\tSTRING_PIPE,\n\tSTRING_RECORD_NOT_FOUND,\n\tSTRING_RECORDS,\n\tSTRING_REGISTRY,\n\tSTRING_SET,\n\tSTRING_SIZE, STRING_STRING\n} from \"./constants.js\";\n\n/**\n * Haro is a modern immutable DataStore for collections of records with indexing,\n * versioning, and batch operations support. It provides a Map-like interface\n * with advanced querying capabilities through indexes.\n * @class\n * @example\n * const store = new Haro({\n * index: ['name', 'age'],\n * key: 'id',\n * versioning: true\n * });\n *\n * store.set(null, {name: 'John', age: 30});\n * const results = store.find({name: 'John'});\n */\nexport class Haro {\n\t/**\n\t * Creates a new Haro instance with specified configuration\n\t * @param {Object} [config={}] - Configuration object for the store\n\t * @param {string} [config.delimiter=STRING_PIPE] - Delimiter for composite indexes (default: '|')\n\t * @param {string} [config.id] - Unique identifier for this instance (auto-generated if not provided)\n\t * @param {boolean} [config.immutable=false] - Return frozen/immutable objects for data safety\n\t * @param {string[]} [config.index=[]] - Array of field names to create indexes for\n\t * @param {string} [config.key=STRING_ID] - Primary key field name used for record identification\n\t * @param {boolean} [config.versioning=false] - Enable versioning to track record changes\n\t * @constructor\n\t * @example\n\t * const store = new Haro({\n\t * index: ['name', 'email', 'name|department'],\n\t * key: 'userId',\n\t * versioning: true,\n\t * immutable: true\n\t * });\n\t */\n\tconstructor ({delimiter = STRING_PIPE, id = this.uuid(), immutable = false, index = [], key = STRING_ID, versioning = false} = {}) {\n\t\tthis.data = new Map();\n\t\tthis.delimiter = delimiter;\n\t\tthis.id = id;\n\t\tthis.immutable = immutable;\n\t\tthis.index = Array.isArray(index) ? [...index] : [];\n\t\tthis.indexes = new Map();\n\t\tthis.key = key;\n\t\tthis.versions = new Map();\n\t\tthis.versioning = versioning;\n\t\tObject.defineProperty(this, STRING_REGISTRY, {\n\t\t\tenumerable: true,\n\t\t\tget: () => Array.from(this.data.keys())\n\t\t});\n\t\tObject.defineProperty(this, STRING_SIZE, {\n\t\t\tenumerable: true,\n\t\t\tget: () => this.data.size\n\t\t});\n\n\t\treturn this.reindex();\n\t}\n\n\t/**\n\t * Performs batch operations on multiple records for efficient bulk processing\n\t * @param {Array} args - Array of records to process\n\t * @param {string} [type=STRING_SET] - Type of operation: 'set' for upsert, 'del' for delete\n\t * @returns {Array} Array of results from the batch operation\n\t * @throws {Error} Throws error if individual operations fail during batch processing\n\t * @example\n\t * const results = store.batch([\n\t * {id: 1, name: 'John'},\n\t * {id: 2, name: 'Jane'}\n\t * ], 'set');\n\t */\n\tbatch (args, type = STRING_SET) {\n\t\tconst fn = type === STRING_DEL ? i => this.delete(i, true) : i => this.set(null, i, true, true);\n\n\t\treturn this.onbatch(this.beforeBatch(args, type).map(fn), type);\n\t}\n\n\t/**\n\t * Lifecycle hook executed before batch operations for custom preprocessing\n\t * @param {Array} arg - Arguments passed to batch operation\n\t * @param {string} [type=STRING_EMPTY] - Type of batch operation ('set' or 'del')\n\t * @returns {Array} The arguments array (possibly modified) to be processed\n\t */\n\tbeforeBatch (arg, type = STRING_EMPTY) { // eslint-disable-line no-unused-vars\n\t\t// Hook for custom logic before batch; override in subclass if needed\n\t\treturn arg;\n\t}\n\n\t/**\n\t * Lifecycle hook executed before clear operation for custom preprocessing\n\t * @returns {void} Override this method in subclasses to implement custom logic\n\t * @example\n\t * class MyStore extends Haro {\n\t * beforeClear() {\n\t * this.backup = this.toArray();\n\t * }\n\t * }\n\t */\n\tbeforeClear () {\n\t\t// Hook for custom logic before clear; override in subclass if needed\n\t}\n\n\t/**\n\t * Lifecycle hook executed before delete operation for custom preprocessing\n\t * @param {string} [key=STRING_EMPTY] - Key of record to delete\n\t * @param {boolean} [batch=false] - Whether this is part of a batch operation\n\t * @returns {void} Override this method in subclasses to implement custom logic\n\t */\n\tbeforeDelete (key = STRING_EMPTY, batch = false) { // eslint-disable-line no-unused-vars\n\t\t// Hook for custom logic before delete; override in subclass if needed\n\t}\n\n\t/**\n\t * Lifecycle hook executed before set operation for custom preprocessing\n\t * @param {string} [key=STRING_EMPTY] - Key of record to set\n\t * @param {Object} [data={}] - Record data being set\n\t * @param {boolean} [batch=false] - Whether this is part of a batch operation\n\t * @param {boolean} [override=false] - Whether to override existing data\n\t * @returns {void} Override this method in subclasses to implement custom logic\n\t */\n\tbeforeSet (key = STRING_EMPTY, data = {}, batch = false, override = false) { // eslint-disable-line no-unused-vars\n\t\t// Hook for custom logic before set; override in subclass if needed\n\t}\n\n\t/**\n\t * Removes all records, indexes, and versions from the store\n\t * @returns {Haro} This instance for method chaining\n\t * @example\n\t * store.clear();\n\t * console.log(store.size); // 0\n\t */\n\tclear () {\n\t\tthis.beforeClear();\n\t\tthis.data.clear();\n\t\tthis.indexes.clear();\n\t\tthis.versions.clear();\n\t\tthis.reindex().onclear();\n\n\t\treturn this;\n\t}\n\n\t/**\n\t * Creates a deep clone of the given value, handling objects, arrays, and primitives\n\t * @param {*} arg - Value to clone (any type)\n\t * @returns {*} Deep clone of the argument\n\t * @example\n\t * const original = {name: 'John', tags: ['user', 'admin']};\n\t * const cloned = store.clone(original);\n\t * cloned.tags.push('new'); // original.tags is unchanged\n\t */\n\tclone (arg) {\n\t\treturn structuredClone(arg);\n\t}\n\n\t/**\n\t * Deletes a record from the store and removes it from all indexes\n\t * @param {string} [key=STRING_EMPTY] - Key of record to delete\n\t * @param {boolean} [batch=false] - Whether this is part of a batch operation\n\t * @returns {void}\n\t * @throws {Error} Throws error if record with the specified key is not found\n\t * @example\n\t * store.delete('user123');\n\t * // Throws error if 'user123' doesn't exist\n\t */\n\tdelete (key = STRING_EMPTY, batch = false) {\n\t\tif (!this.data.has(key)) {\n\t\t\tthrow new Error(STRING_RECORD_NOT_FOUND);\n\t\t}\n\t\tconst og = this.get(key, true);\n\t\tthis.beforeDelete(key, batch);\n\t\tthis.deleteIndex(key, og);\n\t\tthis.data.delete(key);\n\t\tthis.ondelete(key, batch);\n\t\tif (this.versioning) {\n\t\t\tthis.versions.delete(key);\n\t\t}\n\t}\n\n\t/**\n\t * Internal method to remove entries from indexes for a deleted record\n\t * @param {string} key - Key of record being deleted\n\t * @param {Object} data - Data of record being deleted\n\t * @returns {Haro} This instance for method chaining\n\t */\n\tdeleteIndex (key, data) {\n\t\tthis.index.forEach(i => {\n\t\t\tconst idx = this.indexes.get(i);\n\t\t\tif (!idx) return;\n\t\t\tconst values = i.includes(this.delimiter) ?\n\t\t\t\tthis.indexKeys(i, this.delimiter, data) :\n\t\t\t\tArray.isArray(data[i]) ? data[i] : [data[i]];\n\t\t\tthis.each(values, value => {\n\t\t\t\tif (idx.has(value)) {\n\t\t\t\t\tconst o = idx.get(value);\n\t\t\t\t\to.delete(key);\n\t\t\t\t\tif (o.size === INT_0) {\n\t\t\t\t\t\tidx.delete(value);\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t});\n\t\t});\n\n\t\treturn this;\n\t}\n\n\t/**\n\t * Exports complete store data or indexes for persistence or debugging\n\t * @param {string} [type=STRING_RECORDS] - Type of data to export: 'records' or 'indexes'\n\t * @returns {Array} Array of [key, value] pairs for records, or serialized index structure\n\t * @example\n\t * const records = store.dump('records');\n\t * const indexes = store.dump('indexes');\n\t */\n\tdump (type = STRING_RECORDS) {\n\t\tlet result;\n\t\tif (type === STRING_RECORDS) {\n\t\t\tresult = Array.from(this.entries());\n\t\t} else {\n\t\t\tresult = Array.from(this.indexes).map(i => {\n\t\t\t\ti[1] = Array.from(i[1]).map(ii => {\n\t\t\t\t\tii[1] = Array.from(ii[1]);\n\n\t\t\t\t\treturn ii;\n\t\t\t\t});\n\n\t\t\t\treturn i;\n\t\t\t});\n\t\t}\n\n\t\treturn result;\n\t}\n\n\t/**\n\t * Utility method to iterate over an array with a callback function\n\t * @param {Array<*>} [arr=[]] - Array to iterate over\n\t * @param {Function} fn - Function to call for each element (element, index)\n\t * @returns {Array<*>} The original array for method chaining\n\t * @example\n\t * store.each([1, 2, 3], (item, index) => console.log(item, index));\n\t */\n\teach (arr = [], fn) {\n\t\tconst len = arr.length;\n\t\tfor (let i = 0; i < len; i++) {\n\t\t\tfn(arr[i], i);\n\t\t}\n\n\t\treturn arr;\n\t}\n\n\t/**\n\t * Returns an iterator of [key, value] pairs for each record in the store\n\t * @returns {Iterator>} Iterator of [key, value] pairs\n\t * @example\n\t * for (const [key, value] of store.entries()) {\n\t * console.log(key, value);\n\t * }\n\t */\n\tentries () {\n\t\treturn this.data.entries();\n\t}\n\n\t/**\n\t * Finds records matching the specified criteria using indexes for optimal performance\n\t * @param {Object} [where={}] - Object with field-value pairs to match against\n\t * @param {boolean} [raw=false] - Whether to return raw data without processing\n\t * @returns {Array} Array of matching records (frozen if immutable mode)\n\t * @example\n\t * const users = store.find({department: 'engineering', active: true});\n\t * const admins = store.find({role: 'admin'});\n\t */\n\tfind (where = {}, raw = false) {\n\t\tconst key = Object.keys(where).sort(this.sortKeys).join(this.delimiter);\n\t\tconst index = this.indexes.get(key) ?? new Map();\n\t\tlet result = [];\n\t\tif (index.size > 0) {\n\t\t\tconst keys = this.indexKeys(key, this.delimiter, where);\n\t\t\tresult = Array.from(keys.reduce((a, v) => {\n\t\t\t\tif (index.has(v)) {\n\t\t\t\t\tindex.get(v).forEach(k => a.add(k));\n\t\t\t\t}\n\n\t\t\t\treturn a;\n\t\t\t}, new Set())).map(i => this.get(i, raw));\n\t\t}\n\t\tif (!raw && this.immutable) {\n\t\t\tresult = Object.freeze(result);\n\t\t}\n\n\t\treturn result;\n\t}\n\n\t/**\n\t * Filters records using a predicate function, similar to Array.filter\n\t * @param {Function} fn - Predicate function to test each record (record, key, store)\n\t * @param {boolean} [raw=false] - Whether to return raw data without processing\n\t * @returns {Array} Array of records that pass the predicate test\n\t * @throws {Error} Throws error if fn is not a function\n\t * @example\n\t * const adults = store.filter(record => record.age >= 18);\n\t * const recent = store.filter(record => record.created > Date.now() - 86400000);\n\t */\n\tfilter (fn, raw = false) {\n\t\tif (typeof fn !== STRING_FUNCTION) {\n\t\t\tthrow new Error(STRING_INVALID_FUNCTION);\n\t\t}\n\t\tlet result = this.reduce((a, v) => {\n\t\t\tif (fn(v)) {\n\t\t\t\ta.push(v);\n\t\t\t}\n\n\t\t\treturn a;\n\t\t}, []);\n\t\tif (!raw) {\n\t\t\tresult = result.map(i => this.list(i));\n\n\t\t\tif (this.immutable) {\n\t\t\t\tresult = Object.freeze(result);\n\t\t\t}\n\t\t}\n\n\t\treturn result;\n\t}\n\n\t/**\n\t * Executes a function for each record in the store, similar to Array.forEach\n\t * @param {Function} fn - Function to execute for each record (value, key)\n\t * @param {*} [ctx] - Context object to use as 'this' when executing the function\n\t * @returns {Haro} This instance for method chaining\n\t * @example\n\t * store.forEach((record, key) => {\n\t * console.log(`${key}: ${record.name}`);\n\t * });\n\t */\n\tforEach (fn, ctx = this) {\n\t\tthis.data.forEach((value, key) => {\n\t\t\tif (this.immutable) {\n\t\t\t\tvalue = this.clone(value);\n\t\t\t}\n\t\t\tfn.call(ctx, value, key);\n\t\t}, this);\n\n\t\treturn this;\n\t}\n\n\t/**\n\t * Creates a frozen array from the given arguments for immutable data handling\n\t * @param {...*} args - Arguments to freeze into an array\n\t * @returns {Array<*>} Frozen array containing frozen arguments\n\t * @example\n\t * const frozen = store.freeze(obj1, obj2, obj3);\n\t * // Returns Object.freeze([Object.freeze(obj1), Object.freeze(obj2), Object.freeze(obj3)])\n\t */\n\tfreeze (...args) {\n\t\treturn Object.freeze(args.map(i => Object.freeze(i)));\n\t}\n\n\t/**\n\t * Retrieves a record by its key\n\t * @param {string} key - Key of record to retrieve\n\t * @param {boolean} [raw=false] - Whether to return raw data (true) or processed/frozen data (false)\n\t * @returns {Object|null} The record if found, null if not found\n\t * @example\n\t * const user = store.get('user123');\n\t * const rawUser = store.get('user123', true);\n\t */\n\tget (key, raw = false) {\n\t\tlet result = this.data.get(key) ?? null;\n\t\tif (result !== null && !raw) {\n\t\t\tresult = this.list(result);\n\t\t\tif (this.immutable) {\n\t\t\t\tresult = Object.freeze(result);\n\t\t\t}\n\t\t}\n\n\t\treturn result;\n\t}\n\n\t/**\n\t * Checks if a record with the specified key exists in the store\n\t * @param {string} key - Key to check for existence\n\t * @returns {boolean} True if record exists, false otherwise\n\t * @example\n\t * if (store.has('user123')) {\n\t * console.log('User exists');\n\t * }\n\t */\n\thas (key) {\n\t\treturn this.data.has(key);\n\t}\n\n\t/**\n\t * Generates index keys for composite indexes from data values\n\t * @param {string} [arg=STRING_EMPTY] - Composite index field names joined by delimiter\n\t * @param {string} [delimiter=STRING_PIPE] - Delimiter used in composite index\n\t * @param {Object} [data={}] - Data object to extract field values from\n\t * @returns {string[]} Array of generated index keys\n\t * @example\n\t * // For index 'name|department' with data {name: 'John', department: 'IT'}\n\t * const keys = store.indexKeys('name|department', '|', data);\n\t * // Returns ['John|IT']\n\t */\n\tindexKeys (arg = STRING_EMPTY, delimiter = STRING_PIPE, data = {}) {\n\t\tconst fields = arg.split(delimiter).sort(this.sortKeys);\n\t\tconst fieldsLen = fields.length;\n\t\tlet result = [\"\"];\n\t\tfor (let i = 0; i < fieldsLen; i++) {\n\t\t\tconst field = fields[i];\n\t\t\tconst values = Array.isArray(data[field]) ? data[field] : [data[field]];\n\t\t\tconst newResult = [];\n\t\t\tconst resultLen = result.length;\n\t\t\tconst valuesLen = values.length;\n\t\t\tfor (let j = 0; j < resultLen; j++) {\n\t\t\t\tfor (let k = 0; k < valuesLen; k++) {\n\t\t\t\t\tconst newKey = i === 0 ? values[k] : `${result[j]}${delimiter}${values[k]}`;\n\t\t\t\t\tnewResult.push(newKey);\n\t\t\t\t}\n\t\t\t}\n\t\t\tresult = newResult;\n\t\t}\n\n\t\treturn result;\n\t}\n\n\t/**\n\t * Returns an iterator of all keys in the store\n\t * @returns {Iterator} Iterator of record keys\n\t * @example\n\t * for (const key of store.keys()) {\n\t * console.log(key);\n\t * }\n\t */\n\tkeys () {\n\t\treturn this.data.keys();\n\t}\n\n\t/**\n\t * Returns a limited subset of records with offset support for pagination\n\t * @param {number} [offset=INT_0] - Number of records to skip from the beginning\n\t * @param {number} [max=INT_0] - Maximum number of records to return\n\t * @param {boolean} [raw=false] - Whether to return raw data without processing\n\t * @returns {Array} Array of records within the specified range\n\t * @example\n\t * const page1 = store.limit(0, 10); // First 10 records\n\t * const page2 = store.limit(10, 10); // Next 10 records\n\t */\n\tlimit (offset = INT_0, max = INT_0, raw = false) {\n\t\tlet result = this.registry.slice(offset, offset + max).map(i => this.get(i, raw));\n\t\tif (!raw && this.immutable) {\n\t\t\tresult = Object.freeze(result);\n\t\t}\n\n\t\treturn result;\n\t}\n\n\t/**\n\t * Converts a record into a [key, value] pair array format\n\t * @param {Object} arg - Record object to convert to list format\n\t * @returns {Array<*>} Array containing [key, record] where key is extracted from record's key field\n\t * @example\n\t * const record = {id: 'user123', name: 'John', age: 30};\n\t * const pair = store.list(record); // ['user123', {id: 'user123', name: 'John', age: 30}]\n\t */\n\tlist (arg) {\n\t\tconst result = [arg[this.key], arg];\n\n\t\treturn this.immutable ? this.freeze(...result) : result;\n\t}\n\n\t/**\n\t * Transforms all records using a mapping function, similar to Array.map\n\t * @param {Function} fn - Function to transform each record (record, key)\n\t * @param {boolean} [raw=false] - Whether to return raw data without processing\n\t * @returns {Array<*>} Array of transformed results\n\t * @throws {Error} Throws error if fn is not a function\n\t * @example\n\t * const names = store.map(record => record.name);\n\t * const summaries = store.map(record => ({id: record.id, name: record.name}));\n\t */\n\tmap (fn, raw = false) {\n\t\tif (typeof fn !== STRING_FUNCTION) {\n\t\t\tthrow new Error(STRING_INVALID_FUNCTION);\n\t\t}\n\t\tlet result = [];\n\t\tthis.forEach((value, key) => result.push(fn(value, key)));\n\t\tif (!raw) {\n\t\t\tresult = result.map(i => this.list(i));\n\t\t\tif (this.immutable) {\n\t\t\t\tresult = Object.freeze(result);\n\t\t\t}\n\t\t}\n\n\t\treturn result;\n\t}\n\n\t/**\n\t * Merges two values together with support for arrays and objects\n\t * @param {*} a - First value (target)\n\t * @param {*} b - Second value (source)\n\t * @param {boolean} [override=false] - Whether to override arrays instead of concatenating\n\t * @returns {*} Merged result\n\t * @example\n\t * const merged = store.merge({a: 1}, {b: 2}); // {a: 1, b: 2}\n\t * const arrays = store.merge([1, 2], [3, 4]); // [1, 2, 3, 4]\n\t */\n\tmerge (a, b, override = false) {\n\t\tif (Array.isArray(a) && Array.isArray(b)) {\n\t\t\ta = override ? b : a.concat(b);\n\t\t} else if (typeof a === STRING_OBJECT && a !== null && typeof b === STRING_OBJECT && b !== null) {\n\t\t\tthis.each(Object.keys(b), i => {\n\t\t\t\ta[i] = this.merge(a[i], b[i], override);\n\t\t\t});\n\t\t} else {\n\t\t\ta = b;\n\t\t}\n\n\t\treturn a;\n\t}\n\n\t/**\n\t * Lifecycle hook executed after batch operations for custom postprocessing\n\t * @param {Array} arg - Result of batch operation\n\t * @param {string} [type=STRING_EMPTY] - Type of batch operation that was performed\n\t * @returns {Array} Modified result (override this method to implement custom logic)\n\t */\n\tonbatch (arg, type = STRING_EMPTY) { // eslint-disable-line no-unused-vars\n\t\treturn arg;\n\t}\n\n\t/**\n\t * Lifecycle hook executed after clear operation for custom postprocessing\n\t * @returns {void} Override this method in subclasses to implement custom logic\n\t * @example\n\t * class MyStore extends Haro {\n\t * onclear() {\n\t * console.log('Store cleared');\n\t * }\n\t * }\n\t */\n\tonclear () {\n\t\t// Hook for custom logic after clear; override in subclass if needed\n\t}\n\n\t/**\n\t * Lifecycle hook executed after delete operation for custom postprocessing\n\t * @param {string} [key=STRING_EMPTY] - Key of deleted record\n\t * @param {boolean} [batch=false] - Whether this was part of a batch operation\n\t * @returns {void} Override this method in subclasses to implement custom logic\n\t */\n\tondelete (key = STRING_EMPTY, batch = false) { // eslint-disable-line no-unused-vars\n\t\t// Hook for custom logic after delete; override in subclass if needed\n\t}\n\n\t/**\n\t * Lifecycle hook executed after override operation for custom postprocessing\n\t * @param {string} [type=STRING_EMPTY] - Type of override operation that was performed\n\t * @returns {void} Override this method in subclasses to implement custom logic\n\t */\n\tonoverride (type = STRING_EMPTY) { // eslint-disable-line no-unused-vars\n\t\t// Hook for custom logic after override; override in subclass if needed\n\t}\n\n\t/**\n\t * Lifecycle hook executed after set operation for custom postprocessing\n\t * @param {Object} [arg={}] - Record that was set\n\t * @param {boolean} [batch=false] - Whether this was part of a batch operation\n\t * @returns {void} Override this method in subclasses to implement custom logic\n\t */\n\tonset (arg = {}, batch = false) { // eslint-disable-line no-unused-vars\n\t\t// Hook for custom logic after set; override in subclass if needed\n\t}\n\n\t/**\n\t * Replaces all store data or indexes with new data for bulk operations\n\t * @param {Array} data - Data to replace with (format depends on type)\n\t * @param {string} [type=STRING_RECORDS] - Type of data: 'records' or 'indexes'\n\t * @returns {boolean} True if operation succeeded\n\t * @throws {Error} Throws error if type is invalid\n\t * @example\n\t * const records = [['key1', {name: 'John'}], ['key2', {name: 'Jane'}]];\n\t * store.override(records, 'records');\n\t */\n\toverride (data, type = STRING_RECORDS) {\n\t\tconst result = true;\n\t\tif (type === STRING_INDEXES) {\n\t\t\tthis.indexes = new Map(data.map(i => [i[0], new Map(i[1].map(ii => [ii[0], new Set(ii[1])]))]));\n\t\t} else if (type === STRING_RECORDS) {\n\t\t\tthis.indexes.clear();\n\t\t\tthis.data = new Map(data);\n\t\t} else {\n\t\t\tthrow new Error(STRING_INVALID_TYPE);\n\t\t}\n\t\tthis.onoverride(type);\n\n\t\treturn result;\n\t}\n\n\t/**\n\t * Reduces all records to a single value using a reducer function\n\t * @param {Function} fn - Reducer function (accumulator, value, key, store)\n\t * @param {*} [accumulator] - Initial accumulator value\n\t * @returns {*} Final reduced value\n\t * @example\n\t * const totalAge = store.reduce((sum, record) => sum + record.age, 0);\n\t * const names = store.reduce((acc, record) => acc.concat(record.name), []);\n\t */\n\treduce (fn, accumulator = []) {\n\t\tlet a = accumulator;\n\t\tthis.forEach((v, k) => {\n\t\t\ta = fn(a, v, k, this);\n\t\t}, this);\n\n\t\treturn a;\n\t}\n\n\t/**\n\t * Rebuilds indexes for specified fields or all fields for data consistency\n\t * @param {string|string[]} [index] - Specific index field(s) to rebuild, or all if not specified\n\t * @returns {Haro} This instance for method chaining\n\t * @example\n\t * store.reindex(); // Rebuild all indexes\n\t * store.reindex('name'); // Rebuild only name index\n\t * store.reindex(['name', 'email']); // Rebuild name and email indexes\n\t */\n\treindex (index) {\n\t\tconst indices = index ? [index] : this.index;\n\t\tif (index && this.index.includes(index) === false) {\n\t\t\tthis.index.push(index);\n\t\t}\n\t\tthis.each(indices, i => this.indexes.set(i, new Map()));\n\t\tthis.forEach((data, key) => this.each(indices, i => this.setIndex(key, data, i)));\n\n\t\treturn this;\n\t}\n\n\t/**\n\t * Searches for records containing a value across specified indexes\n\t * @param {*} value - Value to search for (string, function, or RegExp)\n\t * @param {string|string[]} [index] - Index(es) to search in, or all if not specified\n\t * @param {boolean} [raw=false] - Whether to return raw data without processing\n\t * @returns {Array} Array of matching records\n\t * @example\n\t * const results = store.search('john'); // Search all indexes\n\t * const nameResults = store.search('john', 'name'); // Search only name index\n\t * const regexResults = store.search(/^admin/, 'role'); // Regex search\n\t */\n\tsearch (value, index, raw = false) {\n\t\tconst result = new Set(); // Use Set for unique keys\n\t\tconst fn = typeof value === STRING_FUNCTION;\n\t\tconst rgex = value && typeof value.test === STRING_FUNCTION;\n\t\tif (!value) return this.immutable ? this.freeze() : [];\n\t\tconst indices = index ? Array.isArray(index) ? index : [index] : this.index;\n\t\tfor (const i of indices) {\n\t\t\tconst idx = this.indexes.get(i);\n\t\t\tif (idx) {\n\t\t\t\tfor (const [lkey, lset] of idx) {\n\t\t\t\t\tlet match = false;\n\n\t\t\t\t\tif (fn) {\n\t\t\t\t\t\tmatch = value(lkey, i);\n\t\t\t\t\t} else if (rgex) {\n\t\t\t\t\t\tmatch = value.test(Array.isArray(lkey) ? lkey.join(STRING_COMMA) : lkey);\n\t\t\t\t\t} else {\n\t\t\t\t\t\tmatch = lkey === value;\n\t\t\t\t\t}\n\n\t\t\t\t\tif (match) {\n\t\t\t\t\t\tfor (const key of lset) {\n\t\t\t\t\t\t\tif (this.data.has(key)) {\n\t\t\t\t\t\t\t\tresult.add(key);\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t\tlet records = Array.from(result).map(key => this.get(key, raw));\n\t\tif (!raw && this.immutable) {\n\t\t\trecords = Object.freeze(records);\n\t\t}\n\n\t\treturn records;\n\t}\n\n\t/**\n\t * Sets or updates a record in the store with automatic indexing\n\t * @param {string|null} [key=null] - Key for the record, or null to use record's key field\n\t * @param {Object} [data={}] - Record data to set\n\t * @param {boolean} [batch=false] - Whether this is part of a batch operation\n\t * @param {boolean} [override=false] - Whether to override existing data instead of merging\n\t * @returns {Object} The stored record (frozen if immutable mode)\n\t * @example\n\t * const user = store.set(null, {name: 'John', age: 30}); // Auto-generate key\n\t * const updated = store.set('user123', {age: 31}); // Update existing record\n\t */\n\tset (key = null, data = {}, batch = false, override = false) {\n\t\tif (key === null) {\n\t\t\tkey = data[this.key] ?? this.uuid();\n\t\t}\n\t\tlet x = {...data, [this.key]: key};\n\t\tthis.beforeSet(key, x, batch, override);\n\t\tif (!this.data.has(key)) {\n\t\t\tif (this.versioning) {\n\t\t\t\tthis.versions.set(key, new Set());\n\t\t\t}\n\t\t} else {\n\t\t\tconst og = this.get(key, true);\n\t\t\tthis.deleteIndex(key, og);\n\t\t\tif (this.versioning) {\n\t\t\t\tthis.versions.get(key).add(Object.freeze(this.clone(og)));\n\t\t\t}\n\t\t\tif (!override) {\n\t\t\t\tx = this.merge(this.clone(og), x);\n\t\t\t}\n\t\t}\n\t\tthis.data.set(key, x);\n\t\tthis.setIndex(key, x, null);\n\t\tconst result = this.get(key);\n\t\tthis.onset(result, batch);\n\n\t\treturn result;\n\t}\n\n\t/**\n\t * Internal method to add entries to indexes for a record\n\t * @param {string} key - Key of record being indexed\n\t * @param {Object} data - Data of record being indexed\n\t * @param {string|null} indice - Specific index to update, or null for all\n\t * @returns {Haro} This instance for method chaining\n\t */\n\tsetIndex (key, data, indice) {\n\t\tthis.each(indice === null ? this.index : [indice], i => {\n\t\t\tlet idx = this.indexes.get(i);\n\t\t\tif (!idx) {\n\t\t\t\tidx = new Map();\n\t\t\t\tthis.indexes.set(i, idx);\n\t\t\t}\n\t\t\tconst fn = c => {\n\t\t\t\tif (!idx.has(c)) {\n\t\t\t\t\tidx.set(c, new Set());\n\t\t\t\t}\n\t\t\t\tidx.get(c).add(key);\n\t\t\t};\n\t\t\tif (i.includes(this.delimiter)) {\n\t\t\t\tthis.each(this.indexKeys(i, this.delimiter, data), fn);\n\t\t\t} else {\n\t\t\t\tthis.each(Array.isArray(data[i]) ? data[i] : [data[i]], fn);\n\t\t\t}\n\t\t});\n\n\t\treturn this;\n\t}\n\n\t/**\n\t * Sorts all records using a comparator function\n\t * @param {Function} fn - Comparator function for sorting (a, b) => number\n\t * @param {boolean} [frozen=false] - Whether to return frozen records\n\t * @returns {Array} Sorted array of records\n\t * @example\n\t * const sorted = store.sort((a, b) => a.age - b.age); // Sort by age\n\t * const names = store.sort((a, b) => a.name.localeCompare(b.name)); // Sort by name\n\t */\n\tsort (fn, frozen = false) {\n\t\tconst dataSize = this.data.size;\n\t\tlet result = this.limit(INT_0, dataSize, true).sort(fn);\n\t\tif (frozen) {\n\t\t\tresult = this.freeze(...result);\n\t\t}\n\n\t\treturn result;\n\t}\n\n\t/**\n\t * Comparator function for sorting keys with type-aware comparison logic\n\t * @param {*} a - First value to compare\n\t * @param {*} b - Second value to compare\n\t * @returns {number} Negative number if a < b, positive if a > b, zero if equal\n\t * @example\n\t * const keys = ['name', 'age', 'email'];\n\t * keys.sort(store.sortKeys); // Alphabetical sort\n\t *\n\t * const mixed = [10, '5', 'abc', 3];\n\t * mixed.sort(store.sortKeys); // Type-aware sort: numbers first, then strings\n\t */\n\tsortKeys (a, b) {\n\t\t// Handle string comparison\n\t\tif (typeof a === STRING_STRING && typeof b === STRING_STRING) {\n\t\t\treturn a.localeCompare(b);\n\t\t}\n\t\t// Handle numeric comparison\n\t\tif (typeof a === STRING_NUMBER && typeof b === STRING_NUMBER) {\n\t\t\treturn a - b;\n\t\t}\n\n\t\t// Handle mixed types or other types by converting to string\n\n\t\treturn String(a).localeCompare(String(b));\n\t}\n\n\t/**\n\t * Sorts records by a specific indexed field in ascending order\n\t * @param {string} [index=STRING_EMPTY] - Index field name to sort by\n\t * @param {boolean} [raw=false] - Whether to return raw data without processing\n\t * @returns {Array} Array of records sorted by the specified field\n\t * @throws {Error} Throws error if index field is empty or invalid\n\t * @example\n\t * const byAge = store.sortBy('age');\n\t * const byName = store.sortBy('name');\n\t */\n\tsortBy (index = STRING_EMPTY, raw = false) {\n\t\tif (index === STRING_EMPTY) {\n\t\t\tthrow new Error(STRING_INVALID_FIELD);\n\t\t}\n\t\tlet result = [];\n\t\tconst keys = [];\n\t\tif (this.indexes.has(index) === false) {\n\t\t\tthis.reindex(index);\n\t\t}\n\t\tconst lindex = this.indexes.get(index);\n\t\tlindex.forEach((idx, key) => keys.push(key));\n\t\tthis.each(keys.sort(this.sortKeys), i => lindex.get(i).forEach(key => result.push(this.get(key, raw))));\n\t\tif (this.immutable) {\n\t\t\tresult = Object.freeze(result);\n\t\t}\n\n\t\treturn result;\n\t}\n\n\t/**\n\t * Converts all store data to a plain array of records\n\t * @returns {Array} Array containing all records in the store\n\t * @example\n\t * const allRecords = store.toArray();\n\t * console.log(`Store contains ${allRecords.length} records`);\n\t */\n\ttoArray () {\n\t\tconst result = Array.from(this.data.values());\n\t\tif (this.immutable) {\n\t\t\tthis.each(result, i => Object.freeze(i));\n\t\t\tObject.freeze(result);\n\t\t}\n\n\t\treturn result;\n\t}\n\n\t/**\n\t * Generates a RFC4122 v4 UUID for record identification\n\t * @returns {string} UUID string in standard format\n\t * @example\n\t * const id = store.uuid(); // \"f47ac10b-58cc-4372-a567-0e02b2c3d479\"\n\t */\n\tuuid () {\n\t\treturn uuid();\n\t}\n\n\t/**\n\t * Returns an iterator of all values in the store\n\t * @returns {Iterator} Iterator of record values\n\t * @example\n\t * for (const record of store.values()) {\n\t * console.log(record.name);\n\t * }\n\t */\n\tvalues () {\n\t\treturn this.data.values();\n\t}\n\n\t/**\n\t * Internal helper method for predicate matching with support for arrays and regex\n\t * @param {Object} record - Record to test against predicate\n\t * @param {Object} predicate - Predicate object with field-value pairs\n\t * @param {string} op - Operator for array matching ('||' for OR, '&&' for AND)\n\t * @returns {boolean} True if record matches predicate criteria\n\t */\n\tmatchesPredicate (record, predicate, op) {\n\t\tconst keys = Object.keys(predicate);\n\n\t\treturn keys.every(key => {\n\t\t\tconst pred = predicate[key];\n\t\t\tconst val = record[key];\n\t\t\tif (Array.isArray(pred)) {\n\t\t\t\tif (Array.isArray(val)) {\n\t\t\t\t\treturn op === STRING_DOUBLE_AND ? pred.every(p => val.includes(p)) : pred.some(p => val.includes(p));\n\t\t\t\t} else {\n\t\t\t\t\treturn op === STRING_DOUBLE_AND ? pred.every(p => val === p) : pred.some(p => val === p);\n\t\t\t\t}\n\t\t\t} else if (pred instanceof RegExp) {\n\t\t\t\tif (Array.isArray(val)) {\n\t\t\t\t\treturn op === STRING_DOUBLE_AND ? val.every(v => pred.test(v)) : val.some(v => pred.test(v));\n\t\t\t\t} else {\n\t\t\t\t\treturn pred.test(val);\n\t\t\t\t}\n\t\t\t} else if (Array.isArray(val)) {\n\t\t\t\treturn val.includes(pred);\n\t\t\t} else {\n\t\t\t\treturn val === pred;\n\t\t\t}\n\t\t});\n\t}\n\n\t/**\n\t * Advanced filtering with predicate logic supporting AND/OR operations on arrays\n\t * @param {Object} [predicate={}] - Object with field-value pairs for filtering\n\t * @param {string} [op=STRING_DOUBLE_PIPE] - Operator for array matching ('||' for OR, '&&' for AND)\n\t * @returns {Array} Array of records matching the predicate criteria\n\t * @example\n\t * // Find records with tags containing 'admin' OR 'user'\n\t * const users = store.where({tags: ['admin', 'user']}, '||');\n\t *\n\t * // Find records with ALL specified tags\n\t * const powerUsers = store.where({tags: ['admin', 'power']}, '&&');\n\t *\n\t * // Regex matching\n\t * const emails = store.where({email: /^admin@/});\n\t */\n\twhere (predicate = {}, op = STRING_DOUBLE_PIPE) {\n\t\tconst keys = this.index.filter(i => i in predicate);\n\t\tif (keys.length === 0) return [];\n\n\t\t// Try to use indexes for better performance\n\t\tconst indexedKeys = keys.filter(k => this.indexes.has(k));\n\t\tif (indexedKeys.length > 0) {\n\t\t\t// Use index-based filtering for better performance\n\t\t\tlet candidateKeys = new Set();\n\t\t\tlet first = true;\n\t\t\tfor (const key of indexedKeys) {\n\t\t\t\tconst pred = predicate[key];\n\t\t\t\tconst idx = this.indexes.get(key);\n\t\t\t\tconst matchingKeys = new Set();\n\t\t\t\tif (Array.isArray(pred)) {\n\t\t\t\t\tfor (const p of pred) {\n\t\t\t\t\t\tif (idx.has(p)) {\n\t\t\t\t\t\t\tfor (const k of idx.get(p)) {\n\t\t\t\t\t\t\t\tmatchingKeys.add(k);\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t} else if (idx.has(pred)) {\n\t\t\t\t\tfor (const k of idx.get(pred)) {\n\t\t\t\t\t\tmatchingKeys.add(k);\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\tif (first) {\n\t\t\t\t\tcandidateKeys = matchingKeys;\n\t\t\t\t\tfirst = false;\n\t\t\t\t} else {\n\t\t\t\t\t// AND operation across different fields\n\t\t\t\t\tcandidateKeys = new Set([...candidateKeys].filter(k => matchingKeys.has(k)));\n\t\t\t\t}\n\t\t\t}\n\t\t\t// Filter candidates with full predicate logic\n\t\t\tconst results = [];\n\t\t\tfor (const key of candidateKeys) {\n\t\t\t\tconst record = this.get(key, true);\n\t\t\t\tif (this.matchesPredicate(record, predicate, op)) {\n\t\t\t\t\tresults.push(this.immutable ? this.get(key) : record);\n\t\t\t\t}\n\t\t\t}\n\n\t\t\treturn this.immutable ? this.freeze(...results) : results;\n\t\t}\n\n\t\t// Fallback to full scan if no indexes available\n\t\treturn this.filter(a => this.matchesPredicate(a, predicate, op));\n\t}\n}\n\n/**\n * Factory function to create a new Haro instance with optional initial data\n * @param {Array|null} [data=null] - Initial data to populate the store\n * @param {Object} [config={}] - Configuration object passed to Haro constructor\n * @returns {Haro} New Haro instance configured and optionally populated\n * @example\n * const store = haro([\n * {id: 1, name: 'John', age: 30},\n * {id: 2, name: 'Jane', age: 25}\n * ], {\n * index: ['name', 'age'],\n * versioning: true\n * });\n */\nexport function haro (data = null, config = {}) {\n\tconst obj = new Haro(config);\n\n\tif (Array.isArray(data)) {\n\t\tobj.batch(data, STRING_SET);\n\t}\n\n\treturn obj;\n}\n"],"names":["randomUUID","STRING_EMPTY","STRING_DOUBLE_AND","STRING_FUNCTION","STRING_OBJECT","STRING_RECORDS","STRING_STRING","STRING_NUMBER","STRING_INVALID_FUNCTION","Haro","constructor","delimiter","id","this","uuid","immutable","index","key","versioning","data","Map","Array","isArray","indexes","versions","Object","defineProperty","enumerable","get","from","keys","size","reindex","batch","args","type","fn","i","delete","set","onbatch","beforeBatch","map","arg","beforeClear","beforeDelete","beforeSet","override","clear","onclear","clone","structuredClone","has","Error","og","deleteIndex","ondelete","forEach","idx","values","includes","indexKeys","each","value","o","dump","result","entries","ii","arr","len","length","find","where","raw","sort","sortKeys","join","reduce","a","v","k","add","Set","freeze","filter","push","list","ctx","call","fields","split","fieldsLen","field","newResult","resultLen","valuesLen","j","newKey","limit","offset","max","registry","slice","merge","b","concat","onoverride","onset","accumulator","indices","setIndex","search","rgex","test","lkey","lset","match","records","x","indice","c","frozen","dataSize","localeCompare","String","sortBy","lindex","toArray","matchesPredicate","record","predicate","op","every","pred","val","p","some","RegExp","indexedKeys","candidateKeys","first","matchingKeys","results","haro","config","obj"],"mappings":";;;;qBAAAA,MAAA,SACO,MACMC,EAAe,GAGfC,EAAoB,KAKpBC,EAAkB,WAElBC,EAAgB,SAChBC,EAAiB,UAIjBC,EAAgB,SAChBC,EAAgB,SAIhBC,EAA0B,mBCchC,MAAMC,EAmBZ,WAAAC,EAAaC,UAACA,EDpDY,ICoDWC,GAAEA,EAAKC,KAAKC,OAAMC,UAAEA,GAAY,EAAKC,MAAEA,EAAQ,GAAEC,IAAEA,ED/ChE,KC+C+EC,WAAEA,GAAa,GAAS,IAmB9H,OAlBAL,KAAKM,KAAO,IAAIC,IAChBP,KAAKF,UAAYA,EACjBE,KAAKD,GAAKA,EACVC,KAAKE,UAAYA,EACjBF,KAAKG,MAAQK,MAAMC,QAAQN,GAAS,IAAIA,GAAS,GACjDH,KAAKU,QAAU,IAAIH,IACnBP,KAAKI,IAAMA,EACXJ,KAAKW,SAAW,IAAIJ,IACpBP,KAAKK,WAAaA,EAClBO,OAAOC,eAAeb,KDnDO,WCmDgB,CAC5Cc,YAAY,EACZC,IAAK,IAAMP,MAAMQ,KAAKhB,KAAKM,KAAKW,UAEjCL,OAAOC,eAAeb,KDrDG,OCqDgB,CACxCc,YAAY,EACZC,IAAK,IAAMf,KAAKM,KAAKY,OAGflB,KAAKmB,SACb,CAcA,KAAAC,CAAOC,EAAMC,ED1EY,OC2ExB,MAAMC,EDjFkB,QCiFbD,EAAsBE,GAAKxB,KAAKyB,OAAOD,GAAG,GAAQA,GAAKxB,KAAK0B,IAAI,KAAMF,GAAG,GAAM,GAE1F,OAAOxB,KAAK2B,QAAQ3B,KAAK4B,YAAYP,EAAMC,GAAMO,IAAIN,GAAKD,EAC3D,CAQA,WAAAM,CAAaE,EAAKR,EAAOlC,IAExB,OAAO0C,CACR,CAYA,WAAAC,GAEA,CAQA,YAAAC,CAAc5B,EAAMhB,GAAcgC,GAAQ,GAE1C,CAUA,SAAAa,CAAW7B,EAAMhB,GAAckB,EAAO,CAAA,EAAIc,GAAQ,EAAOc,GAAW,GAEpE,CASA,KAAAC,GAOC,OANAnC,KAAK+B,cACL/B,KAAKM,KAAK6B,QACVnC,KAAKU,QAAQyB,QACbnC,KAAKW,SAASwB,QACdnC,KAAKmB,UAAUiB,UAERpC,IACR,CAWA,KAAAqC,CAAOP,GACN,OAAOQ,gBAAgBR,EACxB,CAYA,OAAQ1B,EAAMhB,GAAcgC,GAAQ,GACnC,IAAKpB,KAAKM,KAAKiC,IAAInC,GAClB,MAAM,IAAIoC,MDhK0B,oBCkKrC,MAAMC,EAAKzC,KAAKe,IAAIX,GAAK,GACzBJ,KAAKgC,aAAa5B,EAAKgB,GACvBpB,KAAK0C,YAAYtC,EAAKqC,GACtBzC,KAAKM,KAAKmB,OAAOrB,GACjBJ,KAAK2C,SAASvC,EAAKgB,GACfpB,KAAKK,YACRL,KAAKW,SAASc,OAAOrB,EAEvB,CAQA,WAAAsC,CAAatC,EAAKE,GAkBjB,OAjBAN,KAAKG,MAAMyC,QAAQpB,IAClB,MAAMqB,EAAM7C,KAAKU,QAAQK,IAAIS,GAC7B,IAAKqB,EAAK,OACV,MAAMC,EAAStB,EAAEuB,SAAS/C,KAAKF,WAC9BE,KAAKgD,UAAUxB,EAAGxB,KAAKF,UAAWQ,GAClCE,MAAMC,QAAQH,EAAKkB,IAAMlB,EAAKkB,GAAK,CAAClB,EAAKkB,IAC1CxB,KAAKiD,KAAKH,EAAQI,IACjB,GAAIL,EAAIN,IAAIW,GAAQ,CACnB,MAAMC,EAAIN,EAAI9B,IAAImC,GAClBC,EAAE1B,OAAOrB,GDzLO,IC0LZ+C,EAAEjC,MACL2B,EAAIpB,OAAOyB,EAEb,MAIKlD,IACR,CAUA,IAAAoD,CAAM9B,EAAO9B,GACZ,IAAI6D,EAeJ,OAbCA,EADG/B,IAAS9B,EACHgB,MAAMQ,KAAKhB,KAAKsD,WAEhB9C,MAAMQ,KAAKhB,KAAKU,SAASmB,IAAIL,IACrCA,EAAE,GAAKhB,MAAMQ,KAAKQ,EAAE,IAAIK,IAAI0B,IAC3BA,EAAG,GAAK/C,MAAMQ,KAAKuC,EAAG,IAEfA,IAGD/B,IAIF6B,CACR,CAUA,IAAAJ,CAAMO,EAAM,GAAIjC,GACf,MAAMkC,EAAMD,EAAIE,OAChB,IAAK,IAAIlC,EAAI,EAAGA,EAAIiC,EAAKjC,IACxBD,EAAGiC,EAAIhC,GAAIA,GAGZ,OAAOgC,CACR,CAUA,OAAAF,GACC,OAAOtD,KAAKM,KAAKgD,SAClB,CAWA,IAAAK,CAAMC,EAAQ,GAAIC,GAAM,GACvB,MAAMzD,EAAMQ,OAAOK,KAAK2C,GAAOE,KAAK9D,KAAK+D,UAAUC,KAAKhE,KAAKF,WACvDK,EAAQH,KAAKU,QAAQK,IAAIX,IAAQ,IAAIG,IAC3C,IAAI8C,EAAS,GACb,GAAIlD,EAAMe,KAAO,EAAG,CACnB,MAAMD,EAAOjB,KAAKgD,UAAU5C,EAAKJ,KAAKF,UAAW8D,GACjDP,EAAS7C,MAAMQ,KAAKC,EAAKgD,OAAO,CAACC,EAAGC,KAC/BhE,EAAMoC,IAAI4B,IACbhE,EAAMY,IAAIoD,GAAGvB,QAAQwB,GAAKF,EAAEG,IAAID,IAG1BF,GACL,IAAII,MAAQzC,IAAIL,GAAKxB,KAAKe,IAAIS,EAAGqC,GACrC,CAKA,OAJKA,GAAO7D,KAAKE,YAChBmD,EAASzC,OAAO2D,OAAOlB,IAGjBA,CACR,CAYA,MAAAmB,CAAQjD,EAAIsC,GAAM,GACjB,UAAWtC,IAAOjC,EACjB,MAAM,IAAIkD,MAAM7C,GAEjB,IAAI0D,EAASrD,KAAKiE,OAAO,CAACC,EAAGC,KACxB5C,EAAG4C,IACND,EAAEO,KAAKN,GAGDD,GACL,IASH,OARKL,IACJR,EAASA,EAAOxB,IAAIL,GAAKxB,KAAK0E,KAAKlD,IAE/BxB,KAAKE,YACRmD,EAASzC,OAAO2D,OAAOlB,KAIlBA,CACR,CAYA,OAAAT,CAASrB,EAAIoD,EAAM3E,MAQlB,OAPAA,KAAKM,KAAKsC,QAAQ,CAACM,EAAO9C,KACrBJ,KAAKE,YACRgD,EAAQlD,KAAKqC,MAAMa,IAEpB3B,EAAGqD,KAAKD,EAAKzB,EAAO9C,IAClBJ,MAEIA,IACR,CAUA,MAAAuE,IAAWlD,GACV,OAAOT,OAAO2D,OAAOlD,EAAKQ,IAAIL,GAAKZ,OAAO2D,OAAO/C,IAClD,CAWA,GAAAT,CAAKX,EAAKyD,GAAM,GACf,IAAIR,EAASrD,KAAKM,KAAKS,IAAIX,IAAQ,KAQnC,OAPe,OAAXiD,GAAoBQ,IACvBR,EAASrD,KAAK0E,KAAKrB,GACfrD,KAAKE,YACRmD,EAASzC,OAAO2D,OAAOlB,KAIlBA,CACR,CAWA,GAAAd,CAAKnC,GACJ,OAAOJ,KAAKM,KAAKiC,IAAInC,EACtB,CAaA,SAAA4C,CAAWlB,EAAM1C,GAAcU,EDhaL,ICga8BQ,EAAO,IAC9D,MAAMuE,EAAS/C,EAAIgD,MAAMhF,GAAWgE,KAAK9D,KAAK+D,UACxCgB,EAAYF,EAAOnB,OACzB,IAAIL,EAAS,CAAC,IACd,IAAK,IAAI7B,EAAI,EAAGA,EAAIuD,EAAWvD,IAAK,CACnC,MAAMwD,EAAQH,EAAOrD,GACfsB,EAAStC,MAAMC,QAAQH,EAAK0E,IAAU1E,EAAK0E,GAAS,CAAC1E,EAAK0E,IAC1DC,EAAY,GACZC,EAAY7B,EAAOK,OACnByB,EAAYrC,EAAOY,OACzB,IAAK,IAAI0B,EAAI,EAAGA,EAAIF,EAAWE,IAC9B,IAAK,IAAIhB,EAAI,EAAGA,EAAIe,EAAWf,IAAK,CACnC,MAAMiB,EAAe,IAAN7D,EAAUsB,EAAOsB,GAAK,GAAGf,EAAO+B,KAAKtF,IAAYgD,EAAOsB,KACvEa,EAAUR,KAAKY,EAChB,CAEDhC,EAAS4B,CACV,CAEA,OAAO5B,CACR,CAUA,IAAApC,GACC,OAAOjB,KAAKM,KAAKW,MAClB,CAYA,KAAAqE,CAAOC,EDpba,ECobGC,EDpbH,ECobgB3B,GAAM,GACzC,IAAIR,EAASrD,KAAKyF,SAASC,MAAMH,EAAQA,EAASC,GAAK3D,IAAIL,GAAKxB,KAAKe,IAAIS,EAAGqC,IAK5E,OAJKA,GAAO7D,KAAKE,YAChBmD,EAASzC,OAAO2D,OAAOlB,IAGjBA,CACR,CAUA,IAAAqB,CAAM5C,GACL,MAAMuB,EAAS,CAACvB,EAAI9B,KAAKI,KAAM0B,GAE/B,OAAO9B,KAAKE,UAAYF,KAAKuE,UAAUlB,GAAUA,CAClD,CAYA,GAAAxB,CAAKN,EAAIsC,GAAM,GACd,UAAWtC,IAAOjC,EACjB,MAAM,IAAIkD,MAAM7C,GAEjB,IAAI0D,EAAS,GASb,OARArD,KAAK4C,QAAQ,CAACM,EAAO9C,IAAQiD,EAAOoB,KAAKlD,EAAG2B,EAAO9C,KAC9CyD,IACJR,EAASA,EAAOxB,IAAIL,GAAKxB,KAAK0E,KAAKlD,IAC/BxB,KAAKE,YACRmD,EAASzC,OAAO2D,OAAOlB,KAIlBA,CACR,CAYA,KAAAsC,CAAOzB,EAAG0B,EAAG1D,GAAW,GAWvB,OAVI1B,MAAMC,QAAQyD,IAAM1D,MAAMC,QAAQmF,GACrC1B,EAAIhC,EAAW0D,EAAI1B,EAAE2B,OAAOD,UACX1B,IAAM3E,GAAuB,OAAN2E,UAAqB0B,IAAMrG,GAAuB,OAANqG,EACpF5F,KAAKiD,KAAKrC,OAAOK,KAAK2E,GAAIpE,IACzB0C,EAAE1C,GAAKxB,KAAK2F,MAAMzB,EAAE1C,GAAIoE,EAAEpE,GAAIU,KAG/BgC,EAAI0B,EAGE1B,CACR,CAQA,OAAAvC,CAASG,EAAKR,EAAOlC,IACpB,OAAO0C,CACR,CAYA,OAAAM,GAEA,CAQA,QAAAO,CAAUvC,EAAMhB,GAAcgC,GAAQ,GAEtC,CAOA,UAAA0E,CAAYxE,EAAOlC,IAEnB,CAQA,KAAA2G,CAAOjE,EAAM,GAAIV,GAAQ,GAEzB,CAYA,QAAAc,CAAU5B,EAAMgB,EAAO9B,GAEtB,GD9kB4B,YC8kBxB8B,EACHtB,KAAKU,QAAU,IAAIH,IAAID,EAAKuB,IAAIL,GAAK,CAACA,EAAE,GAAI,IAAIjB,IAAIiB,EAAE,GAAGK,IAAI0B,GAAM,CAACA,EAAG,GAAI,IAAIe,IAAIf,EAAG,cAChF,IAAIjC,IAAS9B,EAInB,MAAM,IAAIgD,MDxkBsB,gBCqkBhCxC,KAAKU,QAAQyB,QACbnC,KAAKM,KAAO,IAAIC,IAAID,EAGrB,CAGA,OAFAN,KAAK8F,WAAWxE,IATD,CAYhB,CAWA,MAAA2C,CAAQ1C,EAAIyE,EAAc,IACzB,IAAI9B,EAAI8B,EAKR,OAJAhG,KAAK4C,QAAQ,CAACuB,EAAGC,KAChBF,EAAI3C,EAAG2C,EAAGC,EAAGC,EAAGpE,OACdA,MAEIkE,CACR,CAWA,OAAA/C,CAAShB,GACR,MAAM8F,EAAU9F,EAAQ,CAACA,GAASH,KAAKG,MAOvC,OANIA,IAAwC,IAA/BH,KAAKG,MAAM4C,SAAS5C,IAChCH,KAAKG,MAAMsE,KAAKtE,GAEjBH,KAAKiD,KAAKgD,EAASzE,GAAKxB,KAAKU,QAAQgB,IAAIF,EAAG,IAAIjB,MAChDP,KAAK4C,QAAQ,CAACtC,EAAMF,IAAQJ,KAAKiD,KAAKgD,EAASzE,GAAKxB,KAAKkG,SAAS9F,EAAKE,EAAMkB,KAEtExB,IACR,CAaA,MAAAmG,CAAQjD,EAAO/C,EAAO0D,GAAM,GAC3B,MAAMR,EAAS,IAAIiB,IACb/C,SAAY2B,IAAU5D,EACtB8G,EAAOlD,UAAgBA,EAAMmD,OAAS/G,EAC5C,IAAK4D,EAAO,OAAOlD,KAAKE,UAAYF,KAAKuE,SAAW,GACpD,MAAM0B,EAAU9F,EAAQK,MAAMC,QAAQN,GAASA,EAAQ,CAACA,GAASH,KAAKG,MACtE,IAAK,MAAMqB,KAAKyE,EAAS,CACxB,MAAMpD,EAAM7C,KAAKU,QAAQK,IAAIS,GAC7B,GAAIqB,EACH,IAAK,MAAOyD,EAAMC,KAAS1D,EAAK,CAC/B,IAAI2D,GAAQ,EAUZ,GAPCA,EADGjF,EACK2B,EAAMoD,EAAM9E,GACV4E,EACFlD,EAAMmD,KAAK7F,MAAMC,QAAQ6F,GAAQA,EAAKtC,KDrqBxB,KCqqB6CsC,GAE3DA,IAASpD,EAGdsD,EACH,IAAK,MAAMpG,KAAOmG,EACbvG,KAAKM,KAAKiC,IAAInC,IACjBiD,EAAOgB,IAAIjE,EAIf,CAEF,CACA,IAAIqG,EAAUjG,MAAMQ,KAAKqC,GAAQxB,IAAIzB,GAAOJ,KAAKe,IAAIX,EAAKyD,IAK1D,OAJKA,GAAO7D,KAAKE,YAChBuG,EAAU7F,OAAO2D,OAAOkC,IAGlBA,CACR,CAaA,GAAA/E,CAAKtB,EAAM,KAAME,EAAO,CAAA,EAAIc,GAAQ,EAAOc,GAAW,GACzC,OAAR9B,IACHA,EAAME,EAAKN,KAAKI,MAAQJ,KAAKC,QAE9B,IAAIyG,EAAI,IAAIpG,EAAM,CAACN,KAAKI,KAAMA,GAE9B,GADAJ,KAAKiC,UAAU7B,EAAKsG,EAAGtF,EAAOc,GACzBlC,KAAKM,KAAKiC,IAAInC,GAIZ,CACN,MAAMqC,EAAKzC,KAAKe,IAAIX,GAAK,GACzBJ,KAAK0C,YAAYtC,EAAKqC,GAClBzC,KAAKK,YACRL,KAAKW,SAASI,IAAIX,GAAKiE,IAAIzD,OAAO2D,OAAOvE,KAAKqC,MAAMI,KAEhDP,IACJwE,EAAI1G,KAAK2F,MAAM3F,KAAKqC,MAAMI,GAAKiE,GAEjC,MAZK1G,KAAKK,YACRL,KAAKW,SAASe,IAAItB,EAAK,IAAIkE,KAY7BtE,KAAKM,KAAKoB,IAAItB,EAAKsG,GACnB1G,KAAKkG,SAAS9F,EAAKsG,EAAG,MACtB,MAAMrD,EAASrD,KAAKe,IAAIX,GAGxB,OAFAJ,KAAK+F,MAAM1C,EAAQjC,GAEZiC,CACR,CASA,QAAA6C,CAAU9F,EAAKE,EAAMqG,GAoBpB,OAnBA3G,KAAKiD,KAAgB,OAAX0D,EAAkB3G,KAAKG,MAAQ,CAACwG,GAASnF,IAClD,IAAIqB,EAAM7C,KAAKU,QAAQK,IAAIS,GACtBqB,IACJA,EAAM,IAAItC,IACVP,KAAKU,QAAQgB,IAAIF,EAAGqB,IAErB,MAAMtB,EAAKqF,IACL/D,EAAIN,IAAIqE,IACZ/D,EAAInB,IAAIkF,EAAG,IAAItC,KAEhBzB,EAAI9B,IAAI6F,GAAGvC,IAAIjE,IAEZoB,EAAEuB,SAAS/C,KAAKF,WACnBE,KAAKiD,KAAKjD,KAAKgD,UAAUxB,EAAGxB,KAAKF,UAAWQ,GAAOiB,GAEnDvB,KAAKiD,KAAKzC,MAAMC,QAAQH,EAAKkB,IAAMlB,EAAKkB,GAAK,CAAClB,EAAKkB,IAAKD,KAInDvB,IACR,CAWA,IAAA8D,CAAMvC,EAAIsF,GAAS,GAClB,MAAMC,EAAW9G,KAAKM,KAAKY,KAC3B,IAAImC,EAASrD,KAAKsF,MDlvBC,ECkvBYwB,GAAU,GAAMhD,KAAKvC,GAKpD,OAJIsF,IACHxD,EAASrD,KAAKuE,UAAUlB,IAGlBA,CACR,CAcA,QAAAU,CAAUG,EAAG0B,GAEZ,cAAW1B,IAAMzE,UAAwBmG,IAAMnG,EACvCyE,EAAE6C,cAAcnB,UAGb1B,IAAMxE,UAAwBkG,IAAMlG,EACvCwE,EAAI0B,EAKLoB,OAAO9C,GAAG6C,cAAcC,OAAOpB,GACvC,CAYA,MAAAqB,CAAQ9G,EAAQf,GAAcyE,GAAM,GACnC,GAAI1D,IAAUf,EACb,MAAM,IAAIoD,MDvyBuB,iBCyyBlC,IAAIa,EAAS,GACb,MAAMpC,EAAO,IACmB,IAA5BjB,KAAKU,QAAQ6B,IAAIpC,IACpBH,KAAKmB,QAAQhB,GAEd,MAAM+G,EAASlH,KAAKU,QAAQK,IAAIZ,GAOhC,OANA+G,EAAOtE,QAAQ,CAACC,EAAKzC,IAAQa,EAAKwD,KAAKrE,IACvCJ,KAAKiD,KAAKhC,EAAK6C,KAAK9D,KAAK+D,UAAWvC,GAAK0F,EAAOnG,IAAIS,GAAGoB,QAAQxC,GAAOiD,EAAOoB,KAAKzE,KAAKe,IAAIX,EAAKyD,MAC5F7D,KAAKE,YACRmD,EAASzC,OAAO2D,OAAOlB,IAGjBA,CACR,CASA,OAAA8D,GACC,MAAM9D,EAAS7C,MAAMQ,KAAKhB,KAAKM,KAAKwC,UAMpC,OALI9C,KAAKE,YACRF,KAAKiD,KAAKI,EAAQ7B,GAAKZ,OAAO2D,OAAO/C,IACrCZ,OAAO2D,OAAOlB,IAGRA,CACR,CAQA,IAAApD,GACC,OAAOA,GACR,CAUA,MAAA6C,GACC,OAAO9C,KAAKM,KAAKwC,QAClB,CASA,gBAAAsE,CAAkBC,EAAQC,EAAWC,GAGpC,OAFa3G,OAAOK,KAAKqG,GAEbE,MAAMpH,IACjB,MAAMqH,EAAOH,EAAUlH,GACjBsH,EAAML,EAAOjH,GACnB,OAAII,MAAMC,QAAQgH,GACbjH,MAAMC,QAAQiH,GACVH,IAAOlI,EAAoBoI,EAAKD,MAAMG,GAAKD,EAAI3E,SAAS4E,IAAMF,EAAKG,KAAKD,GAAKD,EAAI3E,SAAS4E,IAE1FJ,IAAOlI,EAAoBoI,EAAKD,MAAMG,GAAKD,IAAQC,GAAKF,EAAKG,KAAKD,GAAKD,IAAQC,GAE7EF,aAAgBI,OACtBrH,MAAMC,QAAQiH,GACVH,IAAOlI,EAAoBqI,EAAIF,MAAMrD,GAAKsD,EAAKpB,KAAKlC,IAAMuD,EAAIE,KAAKzD,GAAKsD,EAAKpB,KAAKlC,IAElFsD,EAAKpB,KAAKqB,GAERlH,MAAMC,QAAQiH,GACjBA,EAAI3E,SAAS0E,GAEbC,IAAQD,GAGlB,CAiBA,KAAA7D,CAAO0D,EAAY,GAAIC,EDh6BU,MCi6BhC,MAAMtG,EAAOjB,KAAKG,MAAMqE,OAAOhD,GAAKA,KAAK8F,GACzC,GAAoB,IAAhBrG,EAAKyC,OAAc,MAAO,GAG9B,MAAMoE,EAAc7G,EAAKuD,OAAOJ,GAAKpE,KAAKU,QAAQ6B,IAAI6B,IACtD,GAAI0D,EAAYpE,OAAS,EAAG,CAE3B,IAAIqE,EAAgB,IAAIzD,IACpB0D,GAAQ,EACZ,IAAK,MAAM5H,KAAO0H,EAAa,CAC9B,MAAML,EAAOH,EAAUlH,GACjByC,EAAM7C,KAAKU,QAAQK,IAAIX,GACvB6H,EAAe,IAAI3D,IACzB,GAAI9D,MAAMC,QAAQgH,IACjB,IAAK,MAAME,KAAKF,EACf,GAAI5E,EAAIN,IAAIoF,GACX,IAAK,MAAMvD,KAAKvB,EAAI9B,IAAI4G,GACvBM,EAAa5D,IAAID,QAId,GAAIvB,EAAIN,IAAIkF,GAClB,IAAK,MAAMrD,KAAKvB,EAAI9B,IAAI0G,GACvBQ,EAAa5D,IAAID,GAGf4D,GACHD,EAAgBE,EAChBD,GAAQ,GAGRD,EAAgB,IAAIzD,IAAI,IAAIyD,GAAevD,OAAOJ,GAAK6D,EAAa1F,IAAI6B,IAE1E,CAEA,MAAM8D,EAAU,GAChB,IAAK,MAAM9H,KAAO2H,EAAe,CAChC,MAAMV,EAASrH,KAAKe,IAAIX,GAAK,GACzBJ,KAAKoH,iBAAiBC,EAAQC,EAAWC,IAC5CW,EAAQzD,KAAKzE,KAAKE,UAAYF,KAAKe,IAAIX,GAAOiH,EAEhD,CAEA,OAAOrH,KAAKE,UAAYF,KAAKuE,UAAU2D,GAAWA,CACnD,CAGA,OAAOlI,KAAKwE,OAAON,GAAKlE,KAAKoH,iBAAiBlD,EAAGoD,EAAWC,GAC7D,EAiBM,SAASY,EAAM7H,EAAO,KAAM8H,EAAS,CAAA,GAC3C,MAAMC,EAAM,IAAIzI,EAAKwI,GAMrB,OAJI5H,MAAMC,QAAQH,IACjB+H,EAAIjH,MAAMd,ED39Bc,OC89BlB+H,CACR,QAAAzI,UAAAuI"} \ No newline at end of file diff --git a/dist/haro.umd.js b/dist/haro.umd.js deleted file mode 100644 index 7f24f2bf..00000000 --- a/dist/haro.umd.js +++ /dev/null @@ -1,1020 +0,0 @@ -/** - * haro - * - * @copyright 2025 Jason Mulligan - * @license BSD-3-Clause - * @version 16.0.0 - */ -(function(g,f){typeof exports==='object'&&typeof module!=='undefined'?f(exports,require('crypto')):typeof define==='function'&&define.amd?define(['exports','crypto'],f):(g=typeof globalThis!=='undefined'?globalThis:g||self,f(g.lru={},g.crypto));})(this,(function(exports,crypto){'use strict';// String constants - Single characters and symbols -const STRING_COMMA = ","; -const STRING_EMPTY = ""; -const STRING_PIPE = "|"; -const STRING_DOUBLE_PIPE = "||"; -const STRING_DOUBLE_AND = "&&"; - -// String constants - Operation and type names -const STRING_ID = "id"; -const STRING_DEL = "del"; -const STRING_FUNCTION = "function"; -const STRING_INDEXES = "indexes"; -const STRING_OBJECT = "object"; -const STRING_RECORDS = "records"; -const STRING_REGISTRY = "registry"; -const STRING_SET = "set"; -const STRING_SIZE = "size"; -const STRING_STRING = "string"; -const STRING_NUMBER = "number"; - -// String constants - Error messages -const STRING_INVALID_FIELD = "Invalid field"; -const STRING_INVALID_FUNCTION = "Invalid function"; -const STRING_INVALID_TYPE = "Invalid type"; -const STRING_RECORD_NOT_FOUND = "Record not found"; - -// Integer constants -const INT_0 = 0;/** - * Haro is a modern immutable DataStore for collections of records with indexing, - * versioning, and batch operations support. It provides a Map-like interface - * with advanced querying capabilities through indexes. - * @class - * @example - * const store = new Haro({ - * index: ['name', 'age'], - * key: 'id', - * versioning: true - * }); - * - * store.set(null, {name: 'John', age: 30}); - * const results = store.find({name: 'John'}); - */ -class Haro { - /** - * Creates a new Haro instance with specified configuration - * @param {Object} [config={}] - Configuration object for the store - * @param {string} [config.delimiter=STRING_PIPE] - Delimiter for composite indexes (default: '|') - * @param {string} [config.id] - Unique identifier for this instance (auto-generated if not provided) - * @param {boolean} [config.immutable=false] - Return frozen/immutable objects for data safety - * @param {string[]} [config.index=[]] - Array of field names to create indexes for - * @param {string} [config.key=STRING_ID] - Primary key field name used for record identification - * @param {boolean} [config.versioning=false] - Enable versioning to track record changes - * @constructor - * @example - * const store = new Haro({ - * index: ['name', 'email', 'name|department'], - * key: 'userId', - * versioning: true, - * immutable: true - * }); - */ - constructor ({delimiter = STRING_PIPE, id = this.uuid(), immutable = false, index = [], key = STRING_ID, versioning = false} = {}) { - this.data = new Map(); - this.delimiter = delimiter; - this.id = id; - this.immutable = immutable; - this.index = Array.isArray(index) ? [...index] : []; - this.indexes = new Map(); - this.key = key; - this.versions = new Map(); - this.versioning = versioning; - Object.defineProperty(this, STRING_REGISTRY, { - enumerable: true, - get: () => Array.from(this.data.keys()) - }); - Object.defineProperty(this, STRING_SIZE, { - enumerable: true, - get: () => this.data.size - }); - - return this.reindex(); - } - - /** - * Performs batch operations on multiple records for efficient bulk processing - * @param {Array} args - Array of records to process - * @param {string} [type=STRING_SET] - Type of operation: 'set' for upsert, 'del' for delete - * @returns {Array} Array of results from the batch operation - * @throws {Error} Throws error if individual operations fail during batch processing - * @example - * const results = store.batch([ - * {id: 1, name: 'John'}, - * {id: 2, name: 'Jane'} - * ], 'set'); - */ - batch (args, type = STRING_SET) { - const fn = type === STRING_DEL ? i => this.delete(i, true) : i => this.set(null, i, true, true); - - return this.onbatch(this.beforeBatch(args, type).map(fn), type); - } - - /** - * Lifecycle hook executed before batch operations for custom preprocessing - * @param {Array} arg - Arguments passed to batch operation - * @param {string} [type=STRING_EMPTY] - Type of batch operation ('set' or 'del') - * @returns {Array} The arguments array (possibly modified) to be processed - */ - beforeBatch (arg, type = STRING_EMPTY) { // eslint-disable-line no-unused-vars - // Hook for custom logic before batch; override in subclass if needed - return arg; - } - - /** - * Lifecycle hook executed before clear operation for custom preprocessing - * @returns {void} Override this method in subclasses to implement custom logic - * @example - * class MyStore extends Haro { - * beforeClear() { - * this.backup = this.toArray(); - * } - * } - */ - beforeClear () { - // Hook for custom logic before clear; override in subclass if needed - } - - /** - * Lifecycle hook executed before delete operation for custom preprocessing - * @param {string} [key=STRING_EMPTY] - Key of record to delete - * @param {boolean} [batch=false] - Whether this is part of a batch operation - * @returns {void} Override this method in subclasses to implement custom logic - */ - beforeDelete (key = STRING_EMPTY, batch = false) { // eslint-disable-line no-unused-vars - // Hook for custom logic before delete; override in subclass if needed - } - - /** - * Lifecycle hook executed before set operation for custom preprocessing - * @param {string} [key=STRING_EMPTY] - Key of record to set - * @param {Object} [data={}] - Record data being set - * @param {boolean} [batch=false] - Whether this is part of a batch operation - * @param {boolean} [override=false] - Whether to override existing data - * @returns {void} Override this method in subclasses to implement custom logic - */ - beforeSet (key = STRING_EMPTY, data = {}, batch = false, override = false) { // eslint-disable-line no-unused-vars - // Hook for custom logic before set; override in subclass if needed - } - - /** - * Removes all records, indexes, and versions from the store - * @returns {Haro} This instance for method chaining - * @example - * store.clear(); - * console.log(store.size); // 0 - */ - clear () { - this.beforeClear(); - this.data.clear(); - this.indexes.clear(); - this.versions.clear(); - this.reindex().onclear(); - - return this; - } - - /** - * Creates a deep clone of the given value, handling objects, arrays, and primitives - * @param {*} arg - Value to clone (any type) - * @returns {*} Deep clone of the argument - * @example - * const original = {name: 'John', tags: ['user', 'admin']}; - * const cloned = store.clone(original); - * cloned.tags.push('new'); // original.tags is unchanged - */ - clone (arg) { - return structuredClone(arg); - } - - /** - * Deletes a record from the store and removes it from all indexes - * @param {string} [key=STRING_EMPTY] - Key of record to delete - * @param {boolean} [batch=false] - Whether this is part of a batch operation - * @returns {void} - * @throws {Error} Throws error if record with the specified key is not found - * @example - * store.delete('user123'); - * // Throws error if 'user123' doesn't exist - */ - delete (key = STRING_EMPTY, batch = false) { - if (!this.data.has(key)) { - throw new Error(STRING_RECORD_NOT_FOUND); - } - const og = this.get(key, true); - this.beforeDelete(key, batch); - this.deleteIndex(key, og); - this.data.delete(key); - this.ondelete(key, batch); - if (this.versioning) { - this.versions.delete(key); - } - } - - /** - * Internal method to remove entries from indexes for a deleted record - * @param {string} key - Key of record being deleted - * @param {Object} data - Data of record being deleted - * @returns {Haro} This instance for method chaining - */ - deleteIndex (key, data) { - this.index.forEach(i => { - const idx = this.indexes.get(i); - if (!idx) return; - const values = i.includes(this.delimiter) ? - this.indexKeys(i, this.delimiter, data) : - Array.isArray(data[i]) ? data[i] : [data[i]]; - this.each(values, value => { - if (idx.has(value)) { - const o = idx.get(value); - o.delete(key); - if (o.size === INT_0) { - idx.delete(value); - } - } - }); - }); - - return this; - } - - /** - * Exports complete store data or indexes for persistence or debugging - * @param {string} [type=STRING_RECORDS] - Type of data to export: 'records' or 'indexes' - * @returns {Array} Array of [key, value] pairs for records, or serialized index structure - * @example - * const records = store.dump('records'); - * const indexes = store.dump('indexes'); - */ - dump (type = STRING_RECORDS) { - let result; - if (type === STRING_RECORDS) { - result = Array.from(this.entries()); - } else { - result = Array.from(this.indexes).map(i => { - i[1] = Array.from(i[1]).map(ii => { - ii[1] = Array.from(ii[1]); - - return ii; - }); - - return i; - }); - } - - return result; - } - - /** - * Utility method to iterate over an array with a callback function - * @param {Array<*>} [arr=[]] - Array to iterate over - * @param {Function} fn - Function to call for each element (element, index) - * @returns {Array<*>} The original array for method chaining - * @example - * store.each([1, 2, 3], (item, index) => console.log(item, index)); - */ - each (arr = [], fn) { - const len = arr.length; - for (let i = 0; i < len; i++) { - fn(arr[i], i); - } - - return arr; - } - - /** - * Returns an iterator of [key, value] pairs for each record in the store - * @returns {Iterator>} Iterator of [key, value] pairs - * @example - * for (const [key, value] of store.entries()) { - * console.log(key, value); - * } - */ - entries () { - return this.data.entries(); - } - - /** - * Finds records matching the specified criteria using indexes for optimal performance - * @param {Object} [where={}] - Object with field-value pairs to match against - * @param {boolean} [raw=false] - Whether to return raw data without processing - * @returns {Array} Array of matching records (frozen if immutable mode) - * @example - * const users = store.find({department: 'engineering', active: true}); - * const admins = store.find({role: 'admin'}); - */ - find (where = {}, raw = false) { - const key = Object.keys(where).sort(this.sortKeys).join(this.delimiter); - const index = this.indexes.get(key) ?? new Map(); - let result = []; - if (index.size > 0) { - const keys = this.indexKeys(key, this.delimiter, where); - result = Array.from(keys.reduce((a, v) => { - if (index.has(v)) { - index.get(v).forEach(k => a.add(k)); - } - - return a; - }, new Set())).map(i => this.get(i, raw)); - } - if (!raw && this.immutable) { - result = Object.freeze(result); - } - - return result; - } - - /** - * Filters records using a predicate function, similar to Array.filter - * @param {Function} fn - Predicate function to test each record (record, key, store) - * @param {boolean} [raw=false] - Whether to return raw data without processing - * @returns {Array} Array of records that pass the predicate test - * @throws {Error} Throws error if fn is not a function - * @example - * const adults = store.filter(record => record.age >= 18); - * const recent = store.filter(record => record.created > Date.now() - 86400000); - */ - filter (fn, raw = false) { - if (typeof fn !== STRING_FUNCTION) { - throw new Error(STRING_INVALID_FUNCTION); - } - let result = this.reduce((a, v) => { - if (fn(v)) { - a.push(v); - } - - return a; - }, []); - if (!raw) { - result = result.map(i => this.list(i)); - - if (this.immutable) { - result = Object.freeze(result); - } - } - - return result; - } - - /** - * Executes a function for each record in the store, similar to Array.forEach - * @param {Function} fn - Function to execute for each record (value, key) - * @param {*} [ctx] - Context object to use as 'this' when executing the function - * @returns {Haro} This instance for method chaining - * @example - * store.forEach((record, key) => { - * console.log(`${key}: ${record.name}`); - * }); - */ - forEach (fn, ctx = this) { - this.data.forEach((value, key) => { - if (this.immutable) { - value = this.clone(value); - } - fn.call(ctx, value, key); - }, this); - - return this; - } - - /** - * Creates a frozen array from the given arguments for immutable data handling - * @param {...*} args - Arguments to freeze into an array - * @returns {Array<*>} Frozen array containing frozen arguments - * @example - * const frozen = store.freeze(obj1, obj2, obj3); - * // Returns Object.freeze([Object.freeze(obj1), Object.freeze(obj2), Object.freeze(obj3)]) - */ - freeze (...args) { - return Object.freeze(args.map(i => Object.freeze(i))); - } - - /** - * Retrieves a record by its key - * @param {string} key - Key of record to retrieve - * @param {boolean} [raw=false] - Whether to return raw data (true) or processed/frozen data (false) - * @returns {Object|null} The record if found, null if not found - * @example - * const user = store.get('user123'); - * const rawUser = store.get('user123', true); - */ - get (key, raw = false) { - let result = this.data.get(key) ?? null; - if (result !== null && !raw) { - result = this.list(result); - if (this.immutable) { - result = Object.freeze(result); - } - } - - return result; - } - - /** - * Checks if a record with the specified key exists in the store - * @param {string} key - Key to check for existence - * @returns {boolean} True if record exists, false otherwise - * @example - * if (store.has('user123')) { - * console.log('User exists'); - * } - */ - has (key) { - return this.data.has(key); - } - - /** - * Generates index keys for composite indexes from data values - * @param {string} [arg=STRING_EMPTY] - Composite index field names joined by delimiter - * @param {string} [delimiter=STRING_PIPE] - Delimiter used in composite index - * @param {Object} [data={}] - Data object to extract field values from - * @returns {string[]} Array of generated index keys - * @example - * // For index 'name|department' with data {name: 'John', department: 'IT'} - * const keys = store.indexKeys('name|department', '|', data); - * // Returns ['John|IT'] - */ - indexKeys (arg = STRING_EMPTY, delimiter = STRING_PIPE, data = {}) { - const fields = arg.split(delimiter).sort(this.sortKeys); - const fieldsLen = fields.length; - let result = [""]; - for (let i = 0; i < fieldsLen; i++) { - const field = fields[i]; - const values = Array.isArray(data[field]) ? data[field] : [data[field]]; - const newResult = []; - const resultLen = result.length; - const valuesLen = values.length; - for (let j = 0; j < resultLen; j++) { - for (let k = 0; k < valuesLen; k++) { - const newKey = i === 0 ? values[k] : `${result[j]}${delimiter}${values[k]}`; - newResult.push(newKey); - } - } - result = newResult; - } - - return result; - } - - /** - * Returns an iterator of all keys in the store - * @returns {Iterator} Iterator of record keys - * @example - * for (const key of store.keys()) { - * console.log(key); - * } - */ - keys () { - return this.data.keys(); - } - - /** - * Returns a limited subset of records with offset support for pagination - * @param {number} [offset=INT_0] - Number of records to skip from the beginning - * @param {number} [max=INT_0] - Maximum number of records to return - * @param {boolean} [raw=false] - Whether to return raw data without processing - * @returns {Array} Array of records within the specified range - * @example - * const page1 = store.limit(0, 10); // First 10 records - * const page2 = store.limit(10, 10); // Next 10 records - */ - limit (offset = INT_0, max = INT_0, raw = false) { - let result = this.registry.slice(offset, offset + max).map(i => this.get(i, raw)); - if (!raw && this.immutable) { - result = Object.freeze(result); - } - - return result; - } - - /** - * Converts a record into a [key, value] pair array format - * @param {Object} arg - Record object to convert to list format - * @returns {Array<*>} Array containing [key, record] where key is extracted from record's key field - * @example - * const record = {id: 'user123', name: 'John', age: 30}; - * const pair = store.list(record); // ['user123', {id: 'user123', name: 'John', age: 30}] - */ - list (arg) { - const result = [arg[this.key], arg]; - - return this.immutable ? this.freeze(...result) : result; - } - - /** - * Transforms all records using a mapping function, similar to Array.map - * @param {Function} fn - Function to transform each record (record, key) - * @param {boolean} [raw=false] - Whether to return raw data without processing - * @returns {Array<*>} Array of transformed results - * @throws {Error} Throws error if fn is not a function - * @example - * const names = store.map(record => record.name); - * const summaries = store.map(record => ({id: record.id, name: record.name})); - */ - map (fn, raw = false) { - if (typeof fn !== STRING_FUNCTION) { - throw new Error(STRING_INVALID_FUNCTION); - } - let result = []; - this.forEach((value, key) => result.push(fn(value, key))); - if (!raw) { - result = result.map(i => this.list(i)); - if (this.immutable) { - result = Object.freeze(result); - } - } - - return result; - } - - /** - * Merges two values together with support for arrays and objects - * @param {*} a - First value (target) - * @param {*} b - Second value (source) - * @param {boolean} [override=false] - Whether to override arrays instead of concatenating - * @returns {*} Merged result - * @example - * const merged = store.merge({a: 1}, {b: 2}); // {a: 1, b: 2} - * const arrays = store.merge([1, 2], [3, 4]); // [1, 2, 3, 4] - */ - merge (a, b, override = false) { - if (Array.isArray(a) && Array.isArray(b)) { - a = override ? b : a.concat(b); - } else if (typeof a === STRING_OBJECT && a !== null && typeof b === STRING_OBJECT && b !== null) { - this.each(Object.keys(b), i => { - a[i] = this.merge(a[i], b[i], override); - }); - } else { - a = b; - } - - return a; - } - - /** - * Lifecycle hook executed after batch operations for custom postprocessing - * @param {Array} arg - Result of batch operation - * @param {string} [type=STRING_EMPTY] - Type of batch operation that was performed - * @returns {Array} Modified result (override this method to implement custom logic) - */ - onbatch (arg, type = STRING_EMPTY) { // eslint-disable-line no-unused-vars - return arg; - } - - /** - * Lifecycle hook executed after clear operation for custom postprocessing - * @returns {void} Override this method in subclasses to implement custom logic - * @example - * class MyStore extends Haro { - * onclear() { - * console.log('Store cleared'); - * } - * } - */ - onclear () { - // Hook for custom logic after clear; override in subclass if needed - } - - /** - * Lifecycle hook executed after delete operation for custom postprocessing - * @param {string} [key=STRING_EMPTY] - Key of deleted record - * @param {boolean} [batch=false] - Whether this was part of a batch operation - * @returns {void} Override this method in subclasses to implement custom logic - */ - ondelete (key = STRING_EMPTY, batch = false) { // eslint-disable-line no-unused-vars - // Hook for custom logic after delete; override in subclass if needed - } - - /** - * Lifecycle hook executed after override operation for custom postprocessing - * @param {string} [type=STRING_EMPTY] - Type of override operation that was performed - * @returns {void} Override this method in subclasses to implement custom logic - */ - onoverride (type = STRING_EMPTY) { // eslint-disable-line no-unused-vars - // Hook for custom logic after override; override in subclass if needed - } - - /** - * Lifecycle hook executed after set operation for custom postprocessing - * @param {Object} [arg={}] - Record that was set - * @param {boolean} [batch=false] - Whether this was part of a batch operation - * @returns {void} Override this method in subclasses to implement custom logic - */ - onset (arg = {}, batch = false) { // eslint-disable-line no-unused-vars - // Hook for custom logic after set; override in subclass if needed - } - - /** - * Replaces all store data or indexes with new data for bulk operations - * @param {Array} data - Data to replace with (format depends on type) - * @param {string} [type=STRING_RECORDS] - Type of data: 'records' or 'indexes' - * @returns {boolean} True if operation succeeded - * @throws {Error} Throws error if type is invalid - * @example - * const records = [['key1', {name: 'John'}], ['key2', {name: 'Jane'}]]; - * store.override(records, 'records'); - */ - override (data, type = STRING_RECORDS) { - const result = true; - if (type === STRING_INDEXES) { - this.indexes = new Map(data.map(i => [i[0], new Map(i[1].map(ii => [ii[0], new Set(ii[1])]))])); - } else if (type === STRING_RECORDS) { - this.indexes.clear(); - this.data = new Map(data); - } else { - throw new Error(STRING_INVALID_TYPE); - } - this.onoverride(type); - - return result; - } - - /** - * Reduces all records to a single value using a reducer function - * @param {Function} fn - Reducer function (accumulator, value, key, store) - * @param {*} [accumulator] - Initial accumulator value - * @returns {*} Final reduced value - * @example - * const totalAge = store.reduce((sum, record) => sum + record.age, 0); - * const names = store.reduce((acc, record) => acc.concat(record.name), []); - */ - reduce (fn, accumulator = []) { - let a = accumulator; - this.forEach((v, k) => { - a = fn(a, v, k, this); - }, this); - - return a; - } - - /** - * Rebuilds indexes for specified fields or all fields for data consistency - * @param {string|string[]} [index] - Specific index field(s) to rebuild, or all if not specified - * @returns {Haro} This instance for method chaining - * @example - * store.reindex(); // Rebuild all indexes - * store.reindex('name'); // Rebuild only name index - * store.reindex(['name', 'email']); // Rebuild name and email indexes - */ - reindex (index) { - const indices = index ? [index] : this.index; - if (index && this.index.includes(index) === false) { - this.index.push(index); - } - this.each(indices, i => this.indexes.set(i, new Map())); - this.forEach((data, key) => this.each(indices, i => this.setIndex(key, data, i))); - - return this; - } - - /** - * Searches for records containing a value across specified indexes - * @param {*} value - Value to search for (string, function, or RegExp) - * @param {string|string[]} [index] - Index(es) to search in, or all if not specified - * @param {boolean} [raw=false] - Whether to return raw data without processing - * @returns {Array} Array of matching records - * @example - * const results = store.search('john'); // Search all indexes - * const nameResults = store.search('john', 'name'); // Search only name index - * const regexResults = store.search(/^admin/, 'role'); // Regex search - */ - search (value, index, raw = false) { - const result = new Set(); // Use Set for unique keys - const fn = typeof value === STRING_FUNCTION; - const rgex = value && typeof value.test === STRING_FUNCTION; - if (!value) return this.immutable ? this.freeze() : []; - const indices = index ? Array.isArray(index) ? index : [index] : this.index; - for (const i of indices) { - const idx = this.indexes.get(i); - if (idx) { - for (const [lkey, lset] of idx) { - let match = false; - - if (fn) { - match = value(lkey, i); - } else if (rgex) { - match = value.test(Array.isArray(lkey) ? lkey.join(STRING_COMMA) : lkey); - } else { - match = lkey === value; - } - - if (match) { - for (const key of lset) { - if (this.data.has(key)) { - result.add(key); - } - } - } - } - } - } - let records = Array.from(result).map(key => this.get(key, raw)); - if (!raw && this.immutable) { - records = Object.freeze(records); - } - - return records; - } - - /** - * Sets or updates a record in the store with automatic indexing - * @param {string|null} [key=null] - Key for the record, or null to use record's key field - * @param {Object} [data={}] - Record data to set - * @param {boolean} [batch=false] - Whether this is part of a batch operation - * @param {boolean} [override=false] - Whether to override existing data instead of merging - * @returns {Object} The stored record (frozen if immutable mode) - * @example - * const user = store.set(null, {name: 'John', age: 30}); // Auto-generate key - * const updated = store.set('user123', {age: 31}); // Update existing record - */ - set (key = null, data = {}, batch = false, override = false) { - if (key === null) { - key = data[this.key] ?? this.uuid(); - } - let x = {...data, [this.key]: key}; - this.beforeSet(key, x, batch, override); - if (!this.data.has(key)) { - if (this.versioning) { - this.versions.set(key, new Set()); - } - } else { - const og = this.get(key, true); - this.deleteIndex(key, og); - if (this.versioning) { - this.versions.get(key).add(Object.freeze(this.clone(og))); - } - if (!override) { - x = this.merge(this.clone(og), x); - } - } - this.data.set(key, x); - this.setIndex(key, x, null); - const result = this.get(key); - this.onset(result, batch); - - return result; - } - - /** - * Internal method to add entries to indexes for a record - * @param {string} key - Key of record being indexed - * @param {Object} data - Data of record being indexed - * @param {string|null} indice - Specific index to update, or null for all - * @returns {Haro} This instance for method chaining - */ - setIndex (key, data, indice) { - this.each(indice === null ? this.index : [indice], i => { - let idx = this.indexes.get(i); - if (!idx) { - idx = new Map(); - this.indexes.set(i, idx); - } - const fn = c => { - if (!idx.has(c)) { - idx.set(c, new Set()); - } - idx.get(c).add(key); - }; - if (i.includes(this.delimiter)) { - this.each(this.indexKeys(i, this.delimiter, data), fn); - } else { - this.each(Array.isArray(data[i]) ? data[i] : [data[i]], fn); - } - }); - - return this; - } - - /** - * Sorts all records using a comparator function - * @param {Function} fn - Comparator function for sorting (a, b) => number - * @param {boolean} [frozen=false] - Whether to return frozen records - * @returns {Array} Sorted array of records - * @example - * const sorted = store.sort((a, b) => a.age - b.age); // Sort by age - * const names = store.sort((a, b) => a.name.localeCompare(b.name)); // Sort by name - */ - sort (fn, frozen = false) { - const dataSize = this.data.size; - let result = this.limit(INT_0, dataSize, true).sort(fn); - if (frozen) { - result = this.freeze(...result); - } - - return result; - } - - /** - * Comparator function for sorting keys with type-aware comparison logic - * @param {*} a - First value to compare - * @param {*} b - Second value to compare - * @returns {number} Negative number if a < b, positive if a > b, zero if equal - * @example - * const keys = ['name', 'age', 'email']; - * keys.sort(store.sortKeys); // Alphabetical sort - * - * const mixed = [10, '5', 'abc', 3]; - * mixed.sort(store.sortKeys); // Type-aware sort: numbers first, then strings - */ - sortKeys (a, b) { - // Handle string comparison - if (typeof a === STRING_STRING && typeof b === STRING_STRING) { - return a.localeCompare(b); - } - // Handle numeric comparison - if (typeof a === STRING_NUMBER && typeof b === STRING_NUMBER) { - return a - b; - } - - // Handle mixed types or other types by converting to string - - return String(a).localeCompare(String(b)); - } - - /** - * Sorts records by a specific indexed field in ascending order - * @param {string} [index=STRING_EMPTY] - Index field name to sort by - * @param {boolean} [raw=false] - Whether to return raw data without processing - * @returns {Array} Array of records sorted by the specified field - * @throws {Error} Throws error if index field is empty or invalid - * @example - * const byAge = store.sortBy('age'); - * const byName = store.sortBy('name'); - */ - sortBy (index = STRING_EMPTY, raw = false) { - if (index === STRING_EMPTY) { - throw new Error(STRING_INVALID_FIELD); - } - let result = []; - const keys = []; - if (this.indexes.has(index) === false) { - this.reindex(index); - } - const lindex = this.indexes.get(index); - lindex.forEach((idx, key) => keys.push(key)); - this.each(keys.sort(this.sortKeys), i => lindex.get(i).forEach(key => result.push(this.get(key, raw)))); - if (this.immutable) { - result = Object.freeze(result); - } - - return result; - } - - /** - * Converts all store data to a plain array of records - * @returns {Array} Array containing all records in the store - * @example - * const allRecords = store.toArray(); - * console.log(`Store contains ${allRecords.length} records`); - */ - toArray () { - const result = Array.from(this.data.values()); - if (this.immutable) { - this.each(result, i => Object.freeze(i)); - Object.freeze(result); - } - - return result; - } - - /** - * Generates a RFC4122 v4 UUID for record identification - * @returns {string} UUID string in standard format - * @example - * const id = store.uuid(); // "f47ac10b-58cc-4372-a567-0e02b2c3d479" - */ - uuid () { - return crypto.randomUUID(); - } - - /** - * Returns an iterator of all values in the store - * @returns {Iterator} Iterator of record values - * @example - * for (const record of store.values()) { - * console.log(record.name); - * } - */ - values () { - return this.data.values(); - } - - /** - * Internal helper method for predicate matching with support for arrays and regex - * @param {Object} record - Record to test against predicate - * @param {Object} predicate - Predicate object with field-value pairs - * @param {string} op - Operator for array matching ('||' for OR, '&&' for AND) - * @returns {boolean} True if record matches predicate criteria - */ - matchesPredicate (record, predicate, op) { - const keys = Object.keys(predicate); - - return keys.every(key => { - const pred = predicate[key]; - const val = record[key]; - if (Array.isArray(pred)) { - if (Array.isArray(val)) { - return op === STRING_DOUBLE_AND ? pred.every(p => val.includes(p)) : pred.some(p => val.includes(p)); - } else { - return op === STRING_DOUBLE_AND ? pred.every(p => val === p) : pred.some(p => val === p); - } - } else if (pred instanceof RegExp) { - if (Array.isArray(val)) { - return op === STRING_DOUBLE_AND ? val.every(v => pred.test(v)) : val.some(v => pred.test(v)); - } else { - return pred.test(val); - } - } else if (Array.isArray(val)) { - return val.includes(pred); - } else { - return val === pred; - } - }); - } - - /** - * Advanced filtering with predicate logic supporting AND/OR operations on arrays - * @param {Object} [predicate={}] - Object with field-value pairs for filtering - * @param {string} [op=STRING_DOUBLE_PIPE] - Operator for array matching ('||' for OR, '&&' for AND) - * @returns {Array} Array of records matching the predicate criteria - * @example - * // Find records with tags containing 'admin' OR 'user' - * const users = store.where({tags: ['admin', 'user']}, '||'); - * - * // Find records with ALL specified tags - * const powerUsers = store.where({tags: ['admin', 'power']}, '&&'); - * - * // Regex matching - * const emails = store.where({email: /^admin@/}); - */ - where (predicate = {}, op = STRING_DOUBLE_PIPE) { - const keys = this.index.filter(i => i in predicate); - if (keys.length === 0) return []; - - // Try to use indexes for better performance - const indexedKeys = keys.filter(k => this.indexes.has(k)); - if (indexedKeys.length > 0) { - // Use index-based filtering for better performance - let candidateKeys = new Set(); - let first = true; - for (const key of indexedKeys) { - const pred = predicate[key]; - const idx = this.indexes.get(key); - const matchingKeys = new Set(); - if (Array.isArray(pred)) { - for (const p of pred) { - if (idx.has(p)) { - for (const k of idx.get(p)) { - matchingKeys.add(k); - } - } - } - } else if (idx.has(pred)) { - for (const k of idx.get(pred)) { - matchingKeys.add(k); - } - } - if (first) { - candidateKeys = matchingKeys; - first = false; - } else { - // AND operation across different fields - candidateKeys = new Set([...candidateKeys].filter(k => matchingKeys.has(k))); - } - } - // Filter candidates with full predicate logic - const results = []; - for (const key of candidateKeys) { - const record = this.get(key, true); - if (this.matchesPredicate(record, predicate, op)) { - results.push(this.immutable ? this.get(key) : record); - } - } - - return this.immutable ? this.freeze(...results) : results; - } - - // Fallback to full scan if no indexes available - return this.filter(a => this.matchesPredicate(a, predicate, op)); - } -} - -/** - * Factory function to create a new Haro instance with optional initial data - * @param {Array|null} [data=null] - Initial data to populate the store - * @param {Object} [config={}] - Configuration object passed to Haro constructor - * @returns {Haro} New Haro instance configured and optionally populated - * @example - * const store = haro([ - * {id: 1, name: 'John', age: 30}, - * {id: 2, name: 'Jane', age: 25} - * ], { - * index: ['name', 'age'], - * versioning: true - * }); - */ -function haro (data = null, config = {}) { - const obj = new Haro(config); - - if (Array.isArray(data)) { - obj.batch(data, STRING_SET); - } - - return obj; -}exports.Haro=Haro;exports.haro=haro;})); \ No newline at end of file diff --git a/dist/haro.umd.min.js b/dist/haro.umd.min.js deleted file mode 100644 index 6277f793..00000000 --- a/dist/haro.umd.min.js +++ /dev/null @@ -1,5 +0,0 @@ -/*! - 2025 Jason Mulligan - @version 16.0.0 -*/ -!function(e,t){"object"==typeof exports&&"undefined"!=typeof module?t(exports,require("crypto")):"function"==typeof define&&define.amd?define(["exports","crypto"],t):t((e="undefined"!=typeof globalThis?globalThis:e||self).lru={},e.crypto)}(this,function(e,t){"use strict";const s="",r="&&",i="function",n="object",h="records",a="string",o="number",l="Invalid function";class c{constructor({delimiter:e="|",id:t=this.uuid(),immutable:s=!1,index:r=[],key:i="id",versioning:n=!1}={}){return this.data=new Map,this.delimiter=e,this.id=t,this.immutable=s,this.index=Array.isArray(r)?[...r]:[],this.indexes=new Map,this.key=i,this.versions=new Map,this.versioning=n,Object.defineProperty(this,"registry",{enumerable:!0,get:()=>Array.from(this.data.keys())}),Object.defineProperty(this,"size",{enumerable:!0,get:()=>this.data.size}),this.reindex()}batch(e,t="set"){const s="del"===t?e=>this.delete(e,!0):e=>this.set(null,e,!0,!0);return this.onbatch(this.beforeBatch(e,t).map(s),t)}beforeBatch(e,t=""){return e}beforeClear(){}beforeDelete(e="",t=!1){}beforeSet(e="",t={},s=!1,r=!1){}clear(){return this.beforeClear(),this.data.clear(),this.indexes.clear(),this.versions.clear(),this.reindex().onclear(),this}clone(e){return structuredClone(e)}delete(e="",t=!1){if(!this.data.has(e))throw new Error("Record not found");const s=this.get(e,!0);this.beforeDelete(e,t),this.deleteIndex(e,s),this.data.delete(e),this.ondelete(e,t),this.versioning&&this.versions.delete(e)}deleteIndex(e,t){return this.index.forEach(s=>{const r=this.indexes.get(s);if(!r)return;const i=s.includes(this.delimiter)?this.indexKeys(s,this.delimiter,t):Array.isArray(t[s])?t[s]:[t[s]];this.each(i,t=>{if(r.has(t)){const s=r.get(t);s.delete(e),0===s.size&&r.delete(t)}})}),this}dump(e=h){let t;return t=e===h?Array.from(this.entries()):Array.from(this.indexes).map(e=>(e[1]=Array.from(e[1]).map(e=>(e[1]=Array.from(e[1]),e)),e)),t}each(e=[],t){const s=e.length;for(let r=0;r0){const n=this.indexKeys(s,this.delimiter,e);i=Array.from(n.reduce((e,t)=>(r.has(t)&&r.get(t).forEach(t=>e.add(t)),e),new Set)).map(e=>this.get(e,t))}return!t&&this.immutable&&(i=Object.freeze(i)),i}filter(e,t=!1){if(typeof e!==i)throw new Error(l);let s=this.reduce((t,s)=>(e(s)&&t.push(s),t),[]);return t||(s=s.map(e=>this.list(e)),this.immutable&&(s=Object.freeze(s))),s}forEach(e,t=this){return this.data.forEach((s,r)=>{this.immutable&&(s=this.clone(s)),e.call(t,s,r)},this),this}freeze(...e){return Object.freeze(e.map(e=>Object.freeze(e)))}get(e,t=!1){let s=this.data.get(e)??null;return null===s||t||(s=this.list(s),this.immutable&&(s=Object.freeze(s))),s}has(e){return this.data.has(e)}indexKeys(e="",t="|",s={}){const r=e.split(t).sort(this.sortKeys),i=r.length;let n=[""];for(let e=0;ethis.get(e,s));return!s&&this.immutable&&(r=Object.freeze(r)),r}list(e){const t=[e[this.key],e];return this.immutable?this.freeze(...t):t}map(e,t=!1){if(typeof e!==i)throw new Error(l);let s=[];return this.forEach((t,r)=>s.push(e(t,r))),t||(s=s.map(e=>this.list(e)),this.immutable&&(s=Object.freeze(s))),s}merge(e,t,s=!1){return Array.isArray(e)&&Array.isArray(t)?e=s?t:e.concat(t):typeof e===n&&null!==e&&typeof t===n&&null!==t?this.each(Object.keys(t),r=>{e[r]=this.merge(e[r],t[r],s)}):e=t,e}onbatch(e,t=""){return e}onclear(){}ondelete(e="",t=!1){}onoverride(e=""){}onset(e={},t=!1){}override(e,t=h){if("indexes"===t)this.indexes=new Map(e.map(e=>[e[0],new Map(e[1].map(e=>[e[0],new Set(e[1])]))]));else{if(t!==h)throw new Error("Invalid type");this.indexes.clear(),this.data=new Map(e)}return this.onoverride(t),!0}reduce(e,t=[]){let s=t;return this.forEach((t,r)=>{s=e(s,t,r,this)},this),s}reindex(e){const t=e?[e]:this.index;return e&&!1===this.index.includes(e)&&this.index.push(e),this.each(t,e=>this.indexes.set(e,new Map)),this.forEach((e,s)=>this.each(t,t=>this.setIndex(s,e,t))),this}search(e,t,s=!1){const r=new Set,n=typeof e===i,h=e&&typeof e.test===i;if(!e)return this.immutable?this.freeze():[];const a=t?Array.isArray(t)?t:[t]:this.index;for(const t of a){const s=this.indexes.get(t);if(s)for(const[i,a]of s){let s=!1;if(s=n?e(i,t):h?e.test(Array.isArray(i)?i.join(","):i):i===e,s)for(const e of a)this.data.has(e)&&r.add(e)}}let o=Array.from(r).map(e=>this.get(e,s));return!s&&this.immutable&&(o=Object.freeze(o)),o}set(e=null,t={},s=!1,r=!1){null===e&&(e=t[this.key]??this.uuid());let i={...t,[this.key]:e};if(this.beforeSet(e,i,s,r),this.data.has(e)){const t=this.get(e,!0);this.deleteIndex(e,t),this.versioning&&this.versions.get(e).add(Object.freeze(this.clone(t))),r||(i=this.merge(this.clone(t),i))}else this.versioning&&this.versions.set(e,new Set);this.data.set(e,i),this.setIndex(e,i,null);const n=this.get(e);return this.onset(n,s),n}setIndex(e,t,s){return this.each(null===s?this.index:[s],s=>{let r=this.indexes.get(s);r||(r=new Map,this.indexes.set(s,r));const i=t=>{r.has(t)||r.set(t,new Set),r.get(t).add(e)};s.includes(this.delimiter)?this.each(this.indexKeys(s,this.delimiter,t),i):this.each(Array.isArray(t[s])?t[s]:[t[s]],i)}),this}sort(e,t=!1){const s=this.data.size;let r=this.limit(0,s,!0).sort(e);return t&&(r=this.freeze(...r)),r}sortKeys(e,t){return typeof e===a&&typeof t===a?e.localeCompare(t):typeof e===o&&typeof t===o?e-t:String(e).localeCompare(String(t))}sortBy(e="",t=!1){if(e===s)throw new Error("Invalid field");let r=[];const i=[];!1===this.indexes.has(e)&&this.reindex(e);const n=this.indexes.get(e);return n.forEach((e,t)=>i.push(t)),this.each(i.sort(this.sortKeys),e=>n.get(e).forEach(e=>r.push(this.get(e,t)))),this.immutable&&(r=Object.freeze(r)),r}toArray(){const e=Array.from(this.data.values());return this.immutable&&(this.each(e,e=>Object.freeze(e)),Object.freeze(e)),e}uuid(){return t.randomUUID()}values(){return this.data.values()}matchesPredicate(e,t,s){return Object.keys(t).every(i=>{const n=t[i],h=e[i];return Array.isArray(n)?Array.isArray(h)?s===r?n.every(e=>h.includes(e)):n.some(e=>h.includes(e)):s===r?n.every(e=>h===e):n.some(e=>h===e):n instanceof RegExp?Array.isArray(h)?s===r?h.every(e=>n.test(e)):h.some(e=>n.test(e)):n.test(h):Array.isArray(h)?h.includes(n):h===n})}where(e={},t="||"){const s=this.index.filter(t=>t in e);if(0===s.length)return[];const r=s.filter(e=>this.indexes.has(e));if(r.length>0){let s=new Set,i=!0;for(const t of r){const r=e[t],n=this.indexes.get(t),h=new Set;if(Array.isArray(r)){for(const e of r)if(n.has(e))for(const t of n.get(e))h.add(t)}else if(n.has(r))for(const e of n.get(r))h.add(e);i?(s=h,i=!1):s=new Set([...s].filter(e=>h.has(e)))}const n=[];for(const r of s){const s=this.get(r,!0);this.matchesPredicate(s,e,t)&&n.push(this.immutable?this.get(r):s)}return this.immutable?this.freeze(...n):n}return this.filter(s=>this.matchesPredicate(s,e,t))}}e.Haro=c,e.haro=function(e=null,t={}){const s=new c(t);return Array.isArray(e)&&s.batch(e,"set"),s}});//# sourceMappingURL=haro.umd.min.js.map diff --git a/dist/haro.umd.min.js.map b/dist/haro.umd.min.js.map deleted file mode 100644 index 46b6d34e..00000000 --- a/dist/haro.umd.min.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"haro.umd.min.js","sources":["../src/constants.js","../src/haro.js"],"sourcesContent":["// String constants - Single characters and symbols\nexport const STRING_COMMA = \",\";\nexport const STRING_EMPTY = \"\";\nexport const STRING_PIPE = \"|\";\nexport const STRING_DOUBLE_PIPE = \"||\";\nexport const STRING_DOUBLE_AND = \"&&\";\n\n// String constants - Operation and type names\nexport const STRING_ID = \"id\";\nexport const STRING_DEL = \"del\";\nexport const STRING_FUNCTION = \"function\";\nexport const STRING_INDEXES = \"indexes\";\nexport const STRING_OBJECT = \"object\";\nexport const STRING_RECORDS = \"records\";\nexport const STRING_REGISTRY = \"registry\";\nexport const STRING_SET = \"set\";\nexport const STRING_SIZE = \"size\";\nexport const STRING_STRING = \"string\";\nexport const STRING_NUMBER = \"number\";\n\n// String constants - Error messages\nexport const STRING_INVALID_FIELD = \"Invalid field\";\nexport const STRING_INVALID_FUNCTION = \"Invalid function\";\nexport const STRING_INVALID_TYPE = \"Invalid type\";\nexport const STRING_RECORD_NOT_FOUND = \"Record not found\";\n\n// Integer constants\nexport const INT_0 = 0;\n","import {randomUUID as uuid} from \"crypto\";\nimport {\n\tINT_0,\n\tSTRING_COMMA,\n\tSTRING_DEL, STRING_DOUBLE_AND,\n\tSTRING_DOUBLE_PIPE,\n\tSTRING_EMPTY,\n\tSTRING_FUNCTION,\n\tSTRING_ID,\n\tSTRING_INDEXES,\n\tSTRING_INVALID_FIELD,\n\tSTRING_INVALID_FUNCTION,\n\tSTRING_INVALID_TYPE, STRING_NUMBER, STRING_OBJECT,\n\tSTRING_PIPE,\n\tSTRING_RECORD_NOT_FOUND,\n\tSTRING_RECORDS,\n\tSTRING_REGISTRY,\n\tSTRING_SET,\n\tSTRING_SIZE, STRING_STRING\n} from \"./constants.js\";\n\n/**\n * Haro is a modern immutable DataStore for collections of records with indexing,\n * versioning, and batch operations support. It provides a Map-like interface\n * with advanced querying capabilities through indexes.\n * @class\n * @example\n * const store = new Haro({\n * index: ['name', 'age'],\n * key: 'id',\n * versioning: true\n * });\n *\n * store.set(null, {name: 'John', age: 30});\n * const results = store.find({name: 'John'});\n */\nexport class Haro {\n\t/**\n\t * Creates a new Haro instance with specified configuration\n\t * @param {Object} [config={}] - Configuration object for the store\n\t * @param {string} [config.delimiter=STRING_PIPE] - Delimiter for composite indexes (default: '|')\n\t * @param {string} [config.id] - Unique identifier for this instance (auto-generated if not provided)\n\t * @param {boolean} [config.immutable=false] - Return frozen/immutable objects for data safety\n\t * @param {string[]} [config.index=[]] - Array of field names to create indexes for\n\t * @param {string} [config.key=STRING_ID] - Primary key field name used for record identification\n\t * @param {boolean} [config.versioning=false] - Enable versioning to track record changes\n\t * @constructor\n\t * @example\n\t * const store = new Haro({\n\t * index: ['name', 'email', 'name|department'],\n\t * key: 'userId',\n\t * versioning: true,\n\t * immutable: true\n\t * });\n\t */\n\tconstructor ({delimiter = STRING_PIPE, id = this.uuid(), immutable = false, index = [], key = STRING_ID, versioning = false} = {}) {\n\t\tthis.data = new Map();\n\t\tthis.delimiter = delimiter;\n\t\tthis.id = id;\n\t\tthis.immutable = immutable;\n\t\tthis.index = Array.isArray(index) ? [...index] : [];\n\t\tthis.indexes = new Map();\n\t\tthis.key = key;\n\t\tthis.versions = new Map();\n\t\tthis.versioning = versioning;\n\t\tObject.defineProperty(this, STRING_REGISTRY, {\n\t\t\tenumerable: true,\n\t\t\tget: () => Array.from(this.data.keys())\n\t\t});\n\t\tObject.defineProperty(this, STRING_SIZE, {\n\t\t\tenumerable: true,\n\t\t\tget: () => this.data.size\n\t\t});\n\n\t\treturn this.reindex();\n\t}\n\n\t/**\n\t * Performs batch operations on multiple records for efficient bulk processing\n\t * @param {Array} args - Array of records to process\n\t * @param {string} [type=STRING_SET] - Type of operation: 'set' for upsert, 'del' for delete\n\t * @returns {Array} Array of results from the batch operation\n\t * @throws {Error} Throws error if individual operations fail during batch processing\n\t * @example\n\t * const results = store.batch([\n\t * {id: 1, name: 'John'},\n\t * {id: 2, name: 'Jane'}\n\t * ], 'set');\n\t */\n\tbatch (args, type = STRING_SET) {\n\t\tconst fn = type === STRING_DEL ? i => this.delete(i, true) : i => this.set(null, i, true, true);\n\n\t\treturn this.onbatch(this.beforeBatch(args, type).map(fn), type);\n\t}\n\n\t/**\n\t * Lifecycle hook executed before batch operations for custom preprocessing\n\t * @param {Array} arg - Arguments passed to batch operation\n\t * @param {string} [type=STRING_EMPTY] - Type of batch operation ('set' or 'del')\n\t * @returns {Array} The arguments array (possibly modified) to be processed\n\t */\n\tbeforeBatch (arg, type = STRING_EMPTY) { // eslint-disable-line no-unused-vars\n\t\t// Hook for custom logic before batch; override in subclass if needed\n\t\treturn arg;\n\t}\n\n\t/**\n\t * Lifecycle hook executed before clear operation for custom preprocessing\n\t * @returns {void} Override this method in subclasses to implement custom logic\n\t * @example\n\t * class MyStore extends Haro {\n\t * beforeClear() {\n\t * this.backup = this.toArray();\n\t * }\n\t * }\n\t */\n\tbeforeClear () {\n\t\t// Hook for custom logic before clear; override in subclass if needed\n\t}\n\n\t/**\n\t * Lifecycle hook executed before delete operation for custom preprocessing\n\t * @param {string} [key=STRING_EMPTY] - Key of record to delete\n\t * @param {boolean} [batch=false] - Whether this is part of a batch operation\n\t * @returns {void} Override this method in subclasses to implement custom logic\n\t */\n\tbeforeDelete (key = STRING_EMPTY, batch = false) { // eslint-disable-line no-unused-vars\n\t\t// Hook for custom logic before delete; override in subclass if needed\n\t}\n\n\t/**\n\t * Lifecycle hook executed before set operation for custom preprocessing\n\t * @param {string} [key=STRING_EMPTY] - Key of record to set\n\t * @param {Object} [data={}] - Record data being set\n\t * @param {boolean} [batch=false] - Whether this is part of a batch operation\n\t * @param {boolean} [override=false] - Whether to override existing data\n\t * @returns {void} Override this method in subclasses to implement custom logic\n\t */\n\tbeforeSet (key = STRING_EMPTY, data = {}, batch = false, override = false) { // eslint-disable-line no-unused-vars\n\t\t// Hook for custom logic before set; override in subclass if needed\n\t}\n\n\t/**\n\t * Removes all records, indexes, and versions from the store\n\t * @returns {Haro} This instance for method chaining\n\t * @example\n\t * store.clear();\n\t * console.log(store.size); // 0\n\t */\n\tclear () {\n\t\tthis.beforeClear();\n\t\tthis.data.clear();\n\t\tthis.indexes.clear();\n\t\tthis.versions.clear();\n\t\tthis.reindex().onclear();\n\n\t\treturn this;\n\t}\n\n\t/**\n\t * Creates a deep clone of the given value, handling objects, arrays, and primitives\n\t * @param {*} arg - Value to clone (any type)\n\t * @returns {*} Deep clone of the argument\n\t * @example\n\t * const original = {name: 'John', tags: ['user', 'admin']};\n\t * const cloned = store.clone(original);\n\t * cloned.tags.push('new'); // original.tags is unchanged\n\t */\n\tclone (arg) {\n\t\treturn structuredClone(arg);\n\t}\n\n\t/**\n\t * Deletes a record from the store and removes it from all indexes\n\t * @param {string} [key=STRING_EMPTY] - Key of record to delete\n\t * @param {boolean} [batch=false] - Whether this is part of a batch operation\n\t * @returns {void}\n\t * @throws {Error} Throws error if record with the specified key is not found\n\t * @example\n\t * store.delete('user123');\n\t * // Throws error if 'user123' doesn't exist\n\t */\n\tdelete (key = STRING_EMPTY, batch = false) {\n\t\tif (!this.data.has(key)) {\n\t\t\tthrow new Error(STRING_RECORD_NOT_FOUND);\n\t\t}\n\t\tconst og = this.get(key, true);\n\t\tthis.beforeDelete(key, batch);\n\t\tthis.deleteIndex(key, og);\n\t\tthis.data.delete(key);\n\t\tthis.ondelete(key, batch);\n\t\tif (this.versioning) {\n\t\t\tthis.versions.delete(key);\n\t\t}\n\t}\n\n\t/**\n\t * Internal method to remove entries from indexes for a deleted record\n\t * @param {string} key - Key of record being deleted\n\t * @param {Object} data - Data of record being deleted\n\t * @returns {Haro} This instance for method chaining\n\t */\n\tdeleteIndex (key, data) {\n\t\tthis.index.forEach(i => {\n\t\t\tconst idx = this.indexes.get(i);\n\t\t\tif (!idx) return;\n\t\t\tconst values = i.includes(this.delimiter) ?\n\t\t\t\tthis.indexKeys(i, this.delimiter, data) :\n\t\t\t\tArray.isArray(data[i]) ? data[i] : [data[i]];\n\t\t\tthis.each(values, value => {\n\t\t\t\tif (idx.has(value)) {\n\t\t\t\t\tconst o = idx.get(value);\n\t\t\t\t\to.delete(key);\n\t\t\t\t\tif (o.size === INT_0) {\n\t\t\t\t\t\tidx.delete(value);\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t});\n\t\t});\n\n\t\treturn this;\n\t}\n\n\t/**\n\t * Exports complete store data or indexes for persistence or debugging\n\t * @param {string} [type=STRING_RECORDS] - Type of data to export: 'records' or 'indexes'\n\t * @returns {Array} Array of [key, value] pairs for records, or serialized index structure\n\t * @example\n\t * const records = store.dump('records');\n\t * const indexes = store.dump('indexes');\n\t */\n\tdump (type = STRING_RECORDS) {\n\t\tlet result;\n\t\tif (type === STRING_RECORDS) {\n\t\t\tresult = Array.from(this.entries());\n\t\t} else {\n\t\t\tresult = Array.from(this.indexes).map(i => {\n\t\t\t\ti[1] = Array.from(i[1]).map(ii => {\n\t\t\t\t\tii[1] = Array.from(ii[1]);\n\n\t\t\t\t\treturn ii;\n\t\t\t\t});\n\n\t\t\t\treturn i;\n\t\t\t});\n\t\t}\n\n\t\treturn result;\n\t}\n\n\t/**\n\t * Utility method to iterate over an array with a callback function\n\t * @param {Array<*>} [arr=[]] - Array to iterate over\n\t * @param {Function} fn - Function to call for each element (element, index)\n\t * @returns {Array<*>} The original array for method chaining\n\t * @example\n\t * store.each([1, 2, 3], (item, index) => console.log(item, index));\n\t */\n\teach (arr = [], fn) {\n\t\tconst len = arr.length;\n\t\tfor (let i = 0; i < len; i++) {\n\t\t\tfn(arr[i], i);\n\t\t}\n\n\t\treturn arr;\n\t}\n\n\t/**\n\t * Returns an iterator of [key, value] pairs for each record in the store\n\t * @returns {Iterator>} Iterator of [key, value] pairs\n\t * @example\n\t * for (const [key, value] of store.entries()) {\n\t * console.log(key, value);\n\t * }\n\t */\n\tentries () {\n\t\treturn this.data.entries();\n\t}\n\n\t/**\n\t * Finds records matching the specified criteria using indexes for optimal performance\n\t * @param {Object} [where={}] - Object with field-value pairs to match against\n\t * @param {boolean} [raw=false] - Whether to return raw data without processing\n\t * @returns {Array} Array of matching records (frozen if immutable mode)\n\t * @example\n\t * const users = store.find({department: 'engineering', active: true});\n\t * const admins = store.find({role: 'admin'});\n\t */\n\tfind (where = {}, raw = false) {\n\t\tconst key = Object.keys(where).sort(this.sortKeys).join(this.delimiter);\n\t\tconst index = this.indexes.get(key) ?? new Map();\n\t\tlet result = [];\n\t\tif (index.size > 0) {\n\t\t\tconst keys = this.indexKeys(key, this.delimiter, where);\n\t\t\tresult = Array.from(keys.reduce((a, v) => {\n\t\t\t\tif (index.has(v)) {\n\t\t\t\t\tindex.get(v).forEach(k => a.add(k));\n\t\t\t\t}\n\n\t\t\t\treturn a;\n\t\t\t}, new Set())).map(i => this.get(i, raw));\n\t\t}\n\t\tif (!raw && this.immutable) {\n\t\t\tresult = Object.freeze(result);\n\t\t}\n\n\t\treturn result;\n\t}\n\n\t/**\n\t * Filters records using a predicate function, similar to Array.filter\n\t * @param {Function} fn - Predicate function to test each record (record, key, store)\n\t * @param {boolean} [raw=false] - Whether to return raw data without processing\n\t * @returns {Array} Array of records that pass the predicate test\n\t * @throws {Error} Throws error if fn is not a function\n\t * @example\n\t * const adults = store.filter(record => record.age >= 18);\n\t * const recent = store.filter(record => record.created > Date.now() - 86400000);\n\t */\n\tfilter (fn, raw = false) {\n\t\tif (typeof fn !== STRING_FUNCTION) {\n\t\t\tthrow new Error(STRING_INVALID_FUNCTION);\n\t\t}\n\t\tlet result = this.reduce((a, v) => {\n\t\t\tif (fn(v)) {\n\t\t\t\ta.push(v);\n\t\t\t}\n\n\t\t\treturn a;\n\t\t}, []);\n\t\tif (!raw) {\n\t\t\tresult = result.map(i => this.list(i));\n\n\t\t\tif (this.immutable) {\n\t\t\t\tresult = Object.freeze(result);\n\t\t\t}\n\t\t}\n\n\t\treturn result;\n\t}\n\n\t/**\n\t * Executes a function for each record in the store, similar to Array.forEach\n\t * @param {Function} fn - Function to execute for each record (value, key)\n\t * @param {*} [ctx] - Context object to use as 'this' when executing the function\n\t * @returns {Haro} This instance for method chaining\n\t * @example\n\t * store.forEach((record, key) => {\n\t * console.log(`${key}: ${record.name}`);\n\t * });\n\t */\n\tforEach (fn, ctx = this) {\n\t\tthis.data.forEach((value, key) => {\n\t\t\tif (this.immutable) {\n\t\t\t\tvalue = this.clone(value);\n\t\t\t}\n\t\t\tfn.call(ctx, value, key);\n\t\t}, this);\n\n\t\treturn this;\n\t}\n\n\t/**\n\t * Creates a frozen array from the given arguments for immutable data handling\n\t * @param {...*} args - Arguments to freeze into an array\n\t * @returns {Array<*>} Frozen array containing frozen arguments\n\t * @example\n\t * const frozen = store.freeze(obj1, obj2, obj3);\n\t * // Returns Object.freeze([Object.freeze(obj1), Object.freeze(obj2), Object.freeze(obj3)])\n\t */\n\tfreeze (...args) {\n\t\treturn Object.freeze(args.map(i => Object.freeze(i)));\n\t}\n\n\t/**\n\t * Retrieves a record by its key\n\t * @param {string} key - Key of record to retrieve\n\t * @param {boolean} [raw=false] - Whether to return raw data (true) or processed/frozen data (false)\n\t * @returns {Object|null} The record if found, null if not found\n\t * @example\n\t * const user = store.get('user123');\n\t * const rawUser = store.get('user123', true);\n\t */\n\tget (key, raw = false) {\n\t\tlet result = this.data.get(key) ?? null;\n\t\tif (result !== null && !raw) {\n\t\t\tresult = this.list(result);\n\t\t\tif (this.immutable) {\n\t\t\t\tresult = Object.freeze(result);\n\t\t\t}\n\t\t}\n\n\t\treturn result;\n\t}\n\n\t/**\n\t * Checks if a record with the specified key exists in the store\n\t * @param {string} key - Key to check for existence\n\t * @returns {boolean} True if record exists, false otherwise\n\t * @example\n\t * if (store.has('user123')) {\n\t * console.log('User exists');\n\t * }\n\t */\n\thas (key) {\n\t\treturn this.data.has(key);\n\t}\n\n\t/**\n\t * Generates index keys for composite indexes from data values\n\t * @param {string} [arg=STRING_EMPTY] - Composite index field names joined by delimiter\n\t * @param {string} [delimiter=STRING_PIPE] - Delimiter used in composite index\n\t * @param {Object} [data={}] - Data object to extract field values from\n\t * @returns {string[]} Array of generated index keys\n\t * @example\n\t * // For index 'name|department' with data {name: 'John', department: 'IT'}\n\t * const keys = store.indexKeys('name|department', '|', data);\n\t * // Returns ['John|IT']\n\t */\n\tindexKeys (arg = STRING_EMPTY, delimiter = STRING_PIPE, data = {}) {\n\t\tconst fields = arg.split(delimiter).sort(this.sortKeys);\n\t\tconst fieldsLen = fields.length;\n\t\tlet result = [\"\"];\n\t\tfor (let i = 0; i < fieldsLen; i++) {\n\t\t\tconst field = fields[i];\n\t\t\tconst values = Array.isArray(data[field]) ? data[field] : [data[field]];\n\t\t\tconst newResult = [];\n\t\t\tconst resultLen = result.length;\n\t\t\tconst valuesLen = values.length;\n\t\t\tfor (let j = 0; j < resultLen; j++) {\n\t\t\t\tfor (let k = 0; k < valuesLen; k++) {\n\t\t\t\t\tconst newKey = i === 0 ? values[k] : `${result[j]}${delimiter}${values[k]}`;\n\t\t\t\t\tnewResult.push(newKey);\n\t\t\t\t}\n\t\t\t}\n\t\t\tresult = newResult;\n\t\t}\n\n\t\treturn result;\n\t}\n\n\t/**\n\t * Returns an iterator of all keys in the store\n\t * @returns {Iterator} Iterator of record keys\n\t * @example\n\t * for (const key of store.keys()) {\n\t * console.log(key);\n\t * }\n\t */\n\tkeys () {\n\t\treturn this.data.keys();\n\t}\n\n\t/**\n\t * Returns a limited subset of records with offset support for pagination\n\t * @param {number} [offset=INT_0] - Number of records to skip from the beginning\n\t * @param {number} [max=INT_0] - Maximum number of records to return\n\t * @param {boolean} [raw=false] - Whether to return raw data without processing\n\t * @returns {Array} Array of records within the specified range\n\t * @example\n\t * const page1 = store.limit(0, 10); // First 10 records\n\t * const page2 = store.limit(10, 10); // Next 10 records\n\t */\n\tlimit (offset = INT_0, max = INT_0, raw = false) {\n\t\tlet result = this.registry.slice(offset, offset + max).map(i => this.get(i, raw));\n\t\tif (!raw && this.immutable) {\n\t\t\tresult = Object.freeze(result);\n\t\t}\n\n\t\treturn result;\n\t}\n\n\t/**\n\t * Converts a record into a [key, value] pair array format\n\t * @param {Object} arg - Record object to convert to list format\n\t * @returns {Array<*>} Array containing [key, record] where key is extracted from record's key field\n\t * @example\n\t * const record = {id: 'user123', name: 'John', age: 30};\n\t * const pair = store.list(record); // ['user123', {id: 'user123', name: 'John', age: 30}]\n\t */\n\tlist (arg) {\n\t\tconst result = [arg[this.key], arg];\n\n\t\treturn this.immutable ? this.freeze(...result) : result;\n\t}\n\n\t/**\n\t * Transforms all records using a mapping function, similar to Array.map\n\t * @param {Function} fn - Function to transform each record (record, key)\n\t * @param {boolean} [raw=false] - Whether to return raw data without processing\n\t * @returns {Array<*>} Array of transformed results\n\t * @throws {Error} Throws error if fn is not a function\n\t * @example\n\t * const names = store.map(record => record.name);\n\t * const summaries = store.map(record => ({id: record.id, name: record.name}));\n\t */\n\tmap (fn, raw = false) {\n\t\tif (typeof fn !== STRING_FUNCTION) {\n\t\t\tthrow new Error(STRING_INVALID_FUNCTION);\n\t\t}\n\t\tlet result = [];\n\t\tthis.forEach((value, key) => result.push(fn(value, key)));\n\t\tif (!raw) {\n\t\t\tresult = result.map(i => this.list(i));\n\t\t\tif (this.immutable) {\n\t\t\t\tresult = Object.freeze(result);\n\t\t\t}\n\t\t}\n\n\t\treturn result;\n\t}\n\n\t/**\n\t * Merges two values together with support for arrays and objects\n\t * @param {*} a - First value (target)\n\t * @param {*} b - Second value (source)\n\t * @param {boolean} [override=false] - Whether to override arrays instead of concatenating\n\t * @returns {*} Merged result\n\t * @example\n\t * const merged = store.merge({a: 1}, {b: 2}); // {a: 1, b: 2}\n\t * const arrays = store.merge([1, 2], [3, 4]); // [1, 2, 3, 4]\n\t */\n\tmerge (a, b, override = false) {\n\t\tif (Array.isArray(a) && Array.isArray(b)) {\n\t\t\ta = override ? b : a.concat(b);\n\t\t} else if (typeof a === STRING_OBJECT && a !== null && typeof b === STRING_OBJECT && b !== null) {\n\t\t\tthis.each(Object.keys(b), i => {\n\t\t\t\ta[i] = this.merge(a[i], b[i], override);\n\t\t\t});\n\t\t} else {\n\t\t\ta = b;\n\t\t}\n\n\t\treturn a;\n\t}\n\n\t/**\n\t * Lifecycle hook executed after batch operations for custom postprocessing\n\t * @param {Array} arg - Result of batch operation\n\t * @param {string} [type=STRING_EMPTY] - Type of batch operation that was performed\n\t * @returns {Array} Modified result (override this method to implement custom logic)\n\t */\n\tonbatch (arg, type = STRING_EMPTY) { // eslint-disable-line no-unused-vars\n\t\treturn arg;\n\t}\n\n\t/**\n\t * Lifecycle hook executed after clear operation for custom postprocessing\n\t * @returns {void} Override this method in subclasses to implement custom logic\n\t * @example\n\t * class MyStore extends Haro {\n\t * onclear() {\n\t * console.log('Store cleared');\n\t * }\n\t * }\n\t */\n\tonclear () {\n\t\t// Hook for custom logic after clear; override in subclass if needed\n\t}\n\n\t/**\n\t * Lifecycle hook executed after delete operation for custom postprocessing\n\t * @param {string} [key=STRING_EMPTY] - Key of deleted record\n\t * @param {boolean} [batch=false] - Whether this was part of a batch operation\n\t * @returns {void} Override this method in subclasses to implement custom logic\n\t */\n\tondelete (key = STRING_EMPTY, batch = false) { // eslint-disable-line no-unused-vars\n\t\t// Hook for custom logic after delete; override in subclass if needed\n\t}\n\n\t/**\n\t * Lifecycle hook executed after override operation for custom postprocessing\n\t * @param {string} [type=STRING_EMPTY] - Type of override operation that was performed\n\t * @returns {void} Override this method in subclasses to implement custom logic\n\t */\n\tonoverride (type = STRING_EMPTY) { // eslint-disable-line no-unused-vars\n\t\t// Hook for custom logic after override; override in subclass if needed\n\t}\n\n\t/**\n\t * Lifecycle hook executed after set operation for custom postprocessing\n\t * @param {Object} [arg={}] - Record that was set\n\t * @param {boolean} [batch=false] - Whether this was part of a batch operation\n\t * @returns {void} Override this method in subclasses to implement custom logic\n\t */\n\tonset (arg = {}, batch = false) { // eslint-disable-line no-unused-vars\n\t\t// Hook for custom logic after set; override in subclass if needed\n\t}\n\n\t/**\n\t * Replaces all store data or indexes with new data for bulk operations\n\t * @param {Array} data - Data to replace with (format depends on type)\n\t * @param {string} [type=STRING_RECORDS] - Type of data: 'records' or 'indexes'\n\t * @returns {boolean} True if operation succeeded\n\t * @throws {Error} Throws error if type is invalid\n\t * @example\n\t * const records = [['key1', {name: 'John'}], ['key2', {name: 'Jane'}]];\n\t * store.override(records, 'records');\n\t */\n\toverride (data, type = STRING_RECORDS) {\n\t\tconst result = true;\n\t\tif (type === STRING_INDEXES) {\n\t\t\tthis.indexes = new Map(data.map(i => [i[0], new Map(i[1].map(ii => [ii[0], new Set(ii[1])]))]));\n\t\t} else if (type === STRING_RECORDS) {\n\t\t\tthis.indexes.clear();\n\t\t\tthis.data = new Map(data);\n\t\t} else {\n\t\t\tthrow new Error(STRING_INVALID_TYPE);\n\t\t}\n\t\tthis.onoverride(type);\n\n\t\treturn result;\n\t}\n\n\t/**\n\t * Reduces all records to a single value using a reducer function\n\t * @param {Function} fn - Reducer function (accumulator, value, key, store)\n\t * @param {*} [accumulator] - Initial accumulator value\n\t * @returns {*} Final reduced value\n\t * @example\n\t * const totalAge = store.reduce((sum, record) => sum + record.age, 0);\n\t * const names = store.reduce((acc, record) => acc.concat(record.name), []);\n\t */\n\treduce (fn, accumulator = []) {\n\t\tlet a = accumulator;\n\t\tthis.forEach((v, k) => {\n\t\t\ta = fn(a, v, k, this);\n\t\t}, this);\n\n\t\treturn a;\n\t}\n\n\t/**\n\t * Rebuilds indexes for specified fields or all fields for data consistency\n\t * @param {string|string[]} [index] - Specific index field(s) to rebuild, or all if not specified\n\t * @returns {Haro} This instance for method chaining\n\t * @example\n\t * store.reindex(); // Rebuild all indexes\n\t * store.reindex('name'); // Rebuild only name index\n\t * store.reindex(['name', 'email']); // Rebuild name and email indexes\n\t */\n\treindex (index) {\n\t\tconst indices = index ? [index] : this.index;\n\t\tif (index && this.index.includes(index) === false) {\n\t\t\tthis.index.push(index);\n\t\t}\n\t\tthis.each(indices, i => this.indexes.set(i, new Map()));\n\t\tthis.forEach((data, key) => this.each(indices, i => this.setIndex(key, data, i)));\n\n\t\treturn this;\n\t}\n\n\t/**\n\t * Searches for records containing a value across specified indexes\n\t * @param {*} value - Value to search for (string, function, or RegExp)\n\t * @param {string|string[]} [index] - Index(es) to search in, or all if not specified\n\t * @param {boolean} [raw=false] - Whether to return raw data without processing\n\t * @returns {Array} Array of matching records\n\t * @example\n\t * const results = store.search('john'); // Search all indexes\n\t * const nameResults = store.search('john', 'name'); // Search only name index\n\t * const regexResults = store.search(/^admin/, 'role'); // Regex search\n\t */\n\tsearch (value, index, raw = false) {\n\t\tconst result = new Set(); // Use Set for unique keys\n\t\tconst fn = typeof value === STRING_FUNCTION;\n\t\tconst rgex = value && typeof value.test === STRING_FUNCTION;\n\t\tif (!value) return this.immutable ? this.freeze() : [];\n\t\tconst indices = index ? Array.isArray(index) ? index : [index] : this.index;\n\t\tfor (const i of indices) {\n\t\t\tconst idx = this.indexes.get(i);\n\t\t\tif (idx) {\n\t\t\t\tfor (const [lkey, lset] of idx) {\n\t\t\t\t\tlet match = false;\n\n\t\t\t\t\tif (fn) {\n\t\t\t\t\t\tmatch = value(lkey, i);\n\t\t\t\t\t} else if (rgex) {\n\t\t\t\t\t\tmatch = value.test(Array.isArray(lkey) ? lkey.join(STRING_COMMA) : lkey);\n\t\t\t\t\t} else {\n\t\t\t\t\t\tmatch = lkey === value;\n\t\t\t\t\t}\n\n\t\t\t\t\tif (match) {\n\t\t\t\t\t\tfor (const key of lset) {\n\t\t\t\t\t\t\tif (this.data.has(key)) {\n\t\t\t\t\t\t\t\tresult.add(key);\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t\tlet records = Array.from(result).map(key => this.get(key, raw));\n\t\tif (!raw && this.immutable) {\n\t\t\trecords = Object.freeze(records);\n\t\t}\n\n\t\treturn records;\n\t}\n\n\t/**\n\t * Sets or updates a record in the store with automatic indexing\n\t * @param {string|null} [key=null] - Key for the record, or null to use record's key field\n\t * @param {Object} [data={}] - Record data to set\n\t * @param {boolean} [batch=false] - Whether this is part of a batch operation\n\t * @param {boolean} [override=false] - Whether to override existing data instead of merging\n\t * @returns {Object} The stored record (frozen if immutable mode)\n\t * @example\n\t * const user = store.set(null, {name: 'John', age: 30}); // Auto-generate key\n\t * const updated = store.set('user123', {age: 31}); // Update existing record\n\t */\n\tset (key = null, data = {}, batch = false, override = false) {\n\t\tif (key === null) {\n\t\t\tkey = data[this.key] ?? this.uuid();\n\t\t}\n\t\tlet x = {...data, [this.key]: key};\n\t\tthis.beforeSet(key, x, batch, override);\n\t\tif (!this.data.has(key)) {\n\t\t\tif (this.versioning) {\n\t\t\t\tthis.versions.set(key, new Set());\n\t\t\t}\n\t\t} else {\n\t\t\tconst og = this.get(key, true);\n\t\t\tthis.deleteIndex(key, og);\n\t\t\tif (this.versioning) {\n\t\t\t\tthis.versions.get(key).add(Object.freeze(this.clone(og)));\n\t\t\t}\n\t\t\tif (!override) {\n\t\t\t\tx = this.merge(this.clone(og), x);\n\t\t\t}\n\t\t}\n\t\tthis.data.set(key, x);\n\t\tthis.setIndex(key, x, null);\n\t\tconst result = this.get(key);\n\t\tthis.onset(result, batch);\n\n\t\treturn result;\n\t}\n\n\t/**\n\t * Internal method to add entries to indexes for a record\n\t * @param {string} key - Key of record being indexed\n\t * @param {Object} data - Data of record being indexed\n\t * @param {string|null} indice - Specific index to update, or null for all\n\t * @returns {Haro} This instance for method chaining\n\t */\n\tsetIndex (key, data, indice) {\n\t\tthis.each(indice === null ? this.index : [indice], i => {\n\t\t\tlet idx = this.indexes.get(i);\n\t\t\tif (!idx) {\n\t\t\t\tidx = new Map();\n\t\t\t\tthis.indexes.set(i, idx);\n\t\t\t}\n\t\t\tconst fn = c => {\n\t\t\t\tif (!idx.has(c)) {\n\t\t\t\t\tidx.set(c, new Set());\n\t\t\t\t}\n\t\t\t\tidx.get(c).add(key);\n\t\t\t};\n\t\t\tif (i.includes(this.delimiter)) {\n\t\t\t\tthis.each(this.indexKeys(i, this.delimiter, data), fn);\n\t\t\t} else {\n\t\t\t\tthis.each(Array.isArray(data[i]) ? data[i] : [data[i]], fn);\n\t\t\t}\n\t\t});\n\n\t\treturn this;\n\t}\n\n\t/**\n\t * Sorts all records using a comparator function\n\t * @param {Function} fn - Comparator function for sorting (a, b) => number\n\t * @param {boolean} [frozen=false] - Whether to return frozen records\n\t * @returns {Array} Sorted array of records\n\t * @example\n\t * const sorted = store.sort((a, b) => a.age - b.age); // Sort by age\n\t * const names = store.sort((a, b) => a.name.localeCompare(b.name)); // Sort by name\n\t */\n\tsort (fn, frozen = false) {\n\t\tconst dataSize = this.data.size;\n\t\tlet result = this.limit(INT_0, dataSize, true).sort(fn);\n\t\tif (frozen) {\n\t\t\tresult = this.freeze(...result);\n\t\t}\n\n\t\treturn result;\n\t}\n\n\t/**\n\t * Comparator function for sorting keys with type-aware comparison logic\n\t * @param {*} a - First value to compare\n\t * @param {*} b - Second value to compare\n\t * @returns {number} Negative number if a < b, positive if a > b, zero if equal\n\t * @example\n\t * const keys = ['name', 'age', 'email'];\n\t * keys.sort(store.sortKeys); // Alphabetical sort\n\t *\n\t * const mixed = [10, '5', 'abc', 3];\n\t * mixed.sort(store.sortKeys); // Type-aware sort: numbers first, then strings\n\t */\n\tsortKeys (a, b) {\n\t\t// Handle string comparison\n\t\tif (typeof a === STRING_STRING && typeof b === STRING_STRING) {\n\t\t\treturn a.localeCompare(b);\n\t\t}\n\t\t// Handle numeric comparison\n\t\tif (typeof a === STRING_NUMBER && typeof b === STRING_NUMBER) {\n\t\t\treturn a - b;\n\t\t}\n\n\t\t// Handle mixed types or other types by converting to string\n\n\t\treturn String(a).localeCompare(String(b));\n\t}\n\n\t/**\n\t * Sorts records by a specific indexed field in ascending order\n\t * @param {string} [index=STRING_EMPTY] - Index field name to sort by\n\t * @param {boolean} [raw=false] - Whether to return raw data without processing\n\t * @returns {Array} Array of records sorted by the specified field\n\t * @throws {Error} Throws error if index field is empty or invalid\n\t * @example\n\t * const byAge = store.sortBy('age');\n\t * const byName = store.sortBy('name');\n\t */\n\tsortBy (index = STRING_EMPTY, raw = false) {\n\t\tif (index === STRING_EMPTY) {\n\t\t\tthrow new Error(STRING_INVALID_FIELD);\n\t\t}\n\t\tlet result = [];\n\t\tconst keys = [];\n\t\tif (this.indexes.has(index) === false) {\n\t\t\tthis.reindex(index);\n\t\t}\n\t\tconst lindex = this.indexes.get(index);\n\t\tlindex.forEach((idx, key) => keys.push(key));\n\t\tthis.each(keys.sort(this.sortKeys), i => lindex.get(i).forEach(key => result.push(this.get(key, raw))));\n\t\tif (this.immutable) {\n\t\t\tresult = Object.freeze(result);\n\t\t}\n\n\t\treturn result;\n\t}\n\n\t/**\n\t * Converts all store data to a plain array of records\n\t * @returns {Array} Array containing all records in the store\n\t * @example\n\t * const allRecords = store.toArray();\n\t * console.log(`Store contains ${allRecords.length} records`);\n\t */\n\ttoArray () {\n\t\tconst result = Array.from(this.data.values());\n\t\tif (this.immutable) {\n\t\t\tthis.each(result, i => Object.freeze(i));\n\t\t\tObject.freeze(result);\n\t\t}\n\n\t\treturn result;\n\t}\n\n\t/**\n\t * Generates a RFC4122 v4 UUID for record identification\n\t * @returns {string} UUID string in standard format\n\t * @example\n\t * const id = store.uuid(); // \"f47ac10b-58cc-4372-a567-0e02b2c3d479\"\n\t */\n\tuuid () {\n\t\treturn uuid();\n\t}\n\n\t/**\n\t * Returns an iterator of all values in the store\n\t * @returns {Iterator} Iterator of record values\n\t * @example\n\t * for (const record of store.values()) {\n\t * console.log(record.name);\n\t * }\n\t */\n\tvalues () {\n\t\treturn this.data.values();\n\t}\n\n\t/**\n\t * Internal helper method for predicate matching with support for arrays and regex\n\t * @param {Object} record - Record to test against predicate\n\t * @param {Object} predicate - Predicate object with field-value pairs\n\t * @param {string} op - Operator for array matching ('||' for OR, '&&' for AND)\n\t * @returns {boolean} True if record matches predicate criteria\n\t */\n\tmatchesPredicate (record, predicate, op) {\n\t\tconst keys = Object.keys(predicate);\n\n\t\treturn keys.every(key => {\n\t\t\tconst pred = predicate[key];\n\t\t\tconst val = record[key];\n\t\t\tif (Array.isArray(pred)) {\n\t\t\t\tif (Array.isArray(val)) {\n\t\t\t\t\treturn op === STRING_DOUBLE_AND ? pred.every(p => val.includes(p)) : pred.some(p => val.includes(p));\n\t\t\t\t} else {\n\t\t\t\t\treturn op === STRING_DOUBLE_AND ? pred.every(p => val === p) : pred.some(p => val === p);\n\t\t\t\t}\n\t\t\t} else if (pred instanceof RegExp) {\n\t\t\t\tif (Array.isArray(val)) {\n\t\t\t\t\treturn op === STRING_DOUBLE_AND ? val.every(v => pred.test(v)) : val.some(v => pred.test(v));\n\t\t\t\t} else {\n\t\t\t\t\treturn pred.test(val);\n\t\t\t\t}\n\t\t\t} else if (Array.isArray(val)) {\n\t\t\t\treturn val.includes(pred);\n\t\t\t} else {\n\t\t\t\treturn val === pred;\n\t\t\t}\n\t\t});\n\t}\n\n\t/**\n\t * Advanced filtering with predicate logic supporting AND/OR operations on arrays\n\t * @param {Object} [predicate={}] - Object with field-value pairs for filtering\n\t * @param {string} [op=STRING_DOUBLE_PIPE] - Operator for array matching ('||' for OR, '&&' for AND)\n\t * @returns {Array} Array of records matching the predicate criteria\n\t * @example\n\t * // Find records with tags containing 'admin' OR 'user'\n\t * const users = store.where({tags: ['admin', 'user']}, '||');\n\t *\n\t * // Find records with ALL specified tags\n\t * const powerUsers = store.where({tags: ['admin', 'power']}, '&&');\n\t *\n\t * // Regex matching\n\t * const emails = store.where({email: /^admin@/});\n\t */\n\twhere (predicate = {}, op = STRING_DOUBLE_PIPE) {\n\t\tconst keys = this.index.filter(i => i in predicate);\n\t\tif (keys.length === 0) return [];\n\n\t\t// Try to use indexes for better performance\n\t\tconst indexedKeys = keys.filter(k => this.indexes.has(k));\n\t\tif (indexedKeys.length > 0) {\n\t\t\t// Use index-based filtering for better performance\n\t\t\tlet candidateKeys = new Set();\n\t\t\tlet first = true;\n\t\t\tfor (const key of indexedKeys) {\n\t\t\t\tconst pred = predicate[key];\n\t\t\t\tconst idx = this.indexes.get(key);\n\t\t\t\tconst matchingKeys = new Set();\n\t\t\t\tif (Array.isArray(pred)) {\n\t\t\t\t\tfor (const p of pred) {\n\t\t\t\t\t\tif (idx.has(p)) {\n\t\t\t\t\t\t\tfor (const k of idx.get(p)) {\n\t\t\t\t\t\t\t\tmatchingKeys.add(k);\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t} else if (idx.has(pred)) {\n\t\t\t\t\tfor (const k of idx.get(pred)) {\n\t\t\t\t\t\tmatchingKeys.add(k);\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\tif (first) {\n\t\t\t\t\tcandidateKeys = matchingKeys;\n\t\t\t\t\tfirst = false;\n\t\t\t\t} else {\n\t\t\t\t\t// AND operation across different fields\n\t\t\t\t\tcandidateKeys = new Set([...candidateKeys].filter(k => matchingKeys.has(k)));\n\t\t\t\t}\n\t\t\t}\n\t\t\t// Filter candidates with full predicate logic\n\t\t\tconst results = [];\n\t\t\tfor (const key of candidateKeys) {\n\t\t\t\tconst record = this.get(key, true);\n\t\t\t\tif (this.matchesPredicate(record, predicate, op)) {\n\t\t\t\t\tresults.push(this.immutable ? this.get(key) : record);\n\t\t\t\t}\n\t\t\t}\n\n\t\t\treturn this.immutable ? this.freeze(...results) : results;\n\t\t}\n\n\t\t// Fallback to full scan if no indexes available\n\t\treturn this.filter(a => this.matchesPredicate(a, predicate, op));\n\t}\n}\n\n/**\n * Factory function to create a new Haro instance with optional initial data\n * @param {Array|null} [data=null] - Initial data to populate the store\n * @param {Object} [config={}] - Configuration object passed to Haro constructor\n * @returns {Haro} New Haro instance configured and optionally populated\n * @example\n * const store = haro([\n * {id: 1, name: 'John', age: 30},\n * {id: 2, name: 'Jane', age: 25}\n * ], {\n * index: ['name', 'age'],\n * versioning: true\n * });\n */\nexport function haro (data = null, config = {}) {\n\tconst obj = new Haro(config);\n\n\tif (Array.isArray(data)) {\n\t\tobj.batch(data, STRING_SET);\n\t}\n\n\treturn obj;\n}\n"],"names":["g","f","exports","module","require","define","amd","globalThis","self","lru","crypto","this","STRING_EMPTY","STRING_DOUBLE_AND","STRING_FUNCTION","STRING_OBJECT","STRING_RECORDS","STRING_STRING","STRING_NUMBER","STRING_INVALID_FUNCTION","Haro","constructor","delimiter","id","uuid","immutable","index","key","versioning","data","Map","Array","isArray","indexes","versions","Object","defineProperty","enumerable","get","from","keys","size","reindex","batch","args","type","fn","i","delete","set","onbatch","beforeBatch","map","arg","beforeClear","beforeDelete","beforeSet","override","clear","onclear","clone","structuredClone","has","Error","og","deleteIndex","ondelete","forEach","idx","values","includes","indexKeys","each","value","o","dump","result","entries","ii","arr","len","length","find","where","raw","sort","sortKeys","join","reduce","a","v","k","add","Set","freeze","filter","push","list","ctx","call","fields","split","fieldsLen","field","newResult","resultLen","valuesLen","j","newKey","limit","offset","max","registry","slice","merge","b","concat","onoverride","onset","accumulator","indices","setIndex","search","rgex","test","lkey","lset","match","records","x","indice","c","frozen","dataSize","localeCompare","String","sortBy","lindex","toArray","matchesPredicate","record","predicate","op","every","pred","val","p","some","RegExp","indexedKeys","candidateKeys","first","matchingKeys","results","haro","config","obj"],"mappings":";;;;CAAA,SAAAA,EAAAC,GAAA,iBAAAC,SAAA,oBAAAC,OAAAF,EAAAC,QAAAE,QAAA,WAAA,mBAAAC,QAAAA,OAAAC,IAAAD,OAAA,CAAA,UAAA,UAAAJ,GAAAA,GAAAD,EAAA,oBAAAO,WAAAA,WAAAP,GAAAQ,MAAAC,IAAA,CAAA,EAAAT,EAAAU,OAAA,CAAA,CAAAC,KAAA,SAAAT,EAAAQ,GAAA,aACO,MACME,EAAe,GAGfC,EAAoB,KAKpBC,EAAkB,WAElBC,EAAgB,SAChBC,EAAiB,UAIjBC,EAAgB,SAChBC,EAAgB,SAIhBC,EAA0B,mBCchC,MAAMC,EAmBZ,WAAAC,EAAaC,UAACA,EDpDY,ICoDWC,GAAEA,EAAKZ,KAAKa,OAAMC,UAAEA,GAAY,EAAKC,MAAEA,EAAQ,GAAEC,IAAEA,ED/ChE,KC+C+EC,WAAEA,GAAa,GAAS,IAmB9H,OAlBAjB,KAAKkB,KAAO,IAAIC,IAChBnB,KAAKW,UAAYA,EACjBX,KAAKY,GAAKA,EACVZ,KAAKc,UAAYA,EACjBd,KAAKe,MAAQK,MAAMC,QAAQN,GAAS,IAAIA,GAAS,GACjDf,KAAKsB,QAAU,IAAIH,IACnBnB,KAAKgB,IAAMA,EACXhB,KAAKuB,SAAW,IAAIJ,IACpBnB,KAAKiB,WAAaA,EAClBO,OAAOC,eAAezB,KDnDO,WCmDgB,CAC5C0B,YAAY,EACZC,IAAK,IAAMP,MAAMQ,KAAK5B,KAAKkB,KAAKW,UAEjCL,OAAOC,eAAezB,KDrDG,OCqDgB,CACxC0B,YAAY,EACZC,IAAK,IAAM3B,KAAKkB,KAAKY,OAGf9B,KAAK+B,SACb,CAcA,KAAAC,CAAOC,EAAMC,ED1EY,OC2ExB,MAAMC,EDjFkB,QCiFbD,EAAsBE,GAAKpC,KAAKqC,OAAOD,GAAG,GAAQA,GAAKpC,KAAKsC,IAAI,KAAMF,GAAG,GAAM,GAE1F,OAAOpC,KAAKuC,QAAQvC,KAAKwC,YAAYP,EAAMC,GAAMO,IAAIN,GAAKD,EAC3D,CAQA,WAAAM,CAAaE,EAAKR,EAAOjC,IAExB,OAAOyC,CACR,CAYA,WAAAC,GAEA,CAQA,YAAAC,CAAc5B,EAAMf,GAAc+B,GAAQ,GAE1C,CAUA,SAAAa,CAAW7B,EAAMf,GAAciB,EAAO,CAAA,EAAIc,GAAQ,EAAOc,GAAW,GAEpE,CASA,KAAAC,GAOC,OANA/C,KAAK2C,cACL3C,KAAKkB,KAAK6B,QACV/C,KAAKsB,QAAQyB,QACb/C,KAAKuB,SAASwB,QACd/C,KAAK+B,UAAUiB,UAERhD,IACR,CAWA,KAAAiD,CAAOP,GACN,OAAOQ,gBAAgBR,EACxB,CAYA,OAAQ1B,EAAMf,GAAc+B,GAAQ,GACnC,IAAKhC,KAAKkB,KAAKiC,IAAInC,GAClB,MAAM,IAAIoC,MDhK0B,oBCkKrC,MAAMC,EAAKrD,KAAK2B,IAAIX,GAAK,GACzBhB,KAAK4C,aAAa5B,EAAKgB,GACvBhC,KAAKsD,YAAYtC,EAAKqC,GACtBrD,KAAKkB,KAAKmB,OAAOrB,GACjBhB,KAAKuD,SAASvC,EAAKgB,GACfhC,KAAKiB,YACRjB,KAAKuB,SAASc,OAAOrB,EAEvB,CAQA,WAAAsC,CAAatC,EAAKE,GAkBjB,OAjBAlB,KAAKe,MAAMyC,QAAQpB,IAClB,MAAMqB,EAAMzD,KAAKsB,QAAQK,IAAIS,GAC7B,IAAKqB,EAAK,OACV,MAAMC,EAAStB,EAAEuB,SAAS3D,KAAKW,WAC9BX,KAAK4D,UAAUxB,EAAGpC,KAAKW,UAAWO,GAClCE,MAAMC,QAAQH,EAAKkB,IAAMlB,EAAKkB,GAAK,CAAClB,EAAKkB,IAC1CpC,KAAK6D,KAAKH,EAAQI,IACjB,GAAIL,EAAIN,IAAIW,GAAQ,CACnB,MAAMC,EAAIN,EAAI9B,IAAImC,GAClBC,EAAE1B,OAAOrB,GDzLO,IC0LZ+C,EAAEjC,MACL2B,EAAIpB,OAAOyB,EAEb,MAIK9D,IACR,CAUA,IAAAgE,CAAM9B,EAAO7B,GACZ,IAAI4D,EAeJ,OAbCA,EADG/B,IAAS7B,EACHe,MAAMQ,KAAK5B,KAAKkE,WAEhB9C,MAAMQ,KAAK5B,KAAKsB,SAASmB,IAAIL,IACrCA,EAAE,GAAKhB,MAAMQ,KAAKQ,EAAE,IAAIK,IAAI0B,IAC3BA,EAAG,GAAK/C,MAAMQ,KAAKuC,EAAG,IAEfA,IAGD/B,IAIF6B,CACR,CAUA,IAAAJ,CAAMO,EAAM,GAAIjC,GACf,MAAMkC,EAAMD,EAAIE,OAChB,IAAK,IAAIlC,EAAI,EAAGA,EAAIiC,EAAKjC,IACxBD,EAAGiC,EAAIhC,GAAIA,GAGZ,OAAOgC,CACR,CAUA,OAAAF,GACC,OAAOlE,KAAKkB,KAAKgD,SAClB,CAWA,IAAAK,CAAMC,EAAQ,GAAIC,GAAM,GACvB,MAAMzD,EAAMQ,OAAOK,KAAK2C,GAAOE,KAAK1E,KAAK2E,UAAUC,KAAK5E,KAAKW,WACvDI,EAAQf,KAAKsB,QAAQK,IAAIX,IAAQ,IAAIG,IAC3C,IAAI8C,EAAS,GACb,GAAIlD,EAAMe,KAAO,EAAG,CACnB,MAAMD,EAAO7B,KAAK4D,UAAU5C,EAAKhB,KAAKW,UAAW6D,GACjDP,EAAS7C,MAAMQ,KAAKC,EAAKgD,OAAO,CAACC,EAAGC,KAC/BhE,EAAMoC,IAAI4B,IACbhE,EAAMY,IAAIoD,GAAGvB,QAAQwB,GAAKF,EAAEG,IAAID,IAG1BF,GACL,IAAII,MAAQzC,IAAIL,GAAKpC,KAAK2B,IAAIS,EAAGqC,GACrC,CAKA,OAJKA,GAAOzE,KAAKc,YAChBmD,EAASzC,OAAO2D,OAAOlB,IAGjBA,CACR,CAYA,MAAAmB,CAAQjD,EAAIsC,GAAM,GACjB,UAAWtC,IAAOhC,EACjB,MAAM,IAAIiD,MAAM5C,GAEjB,IAAIyD,EAASjE,KAAK6E,OAAO,CAACC,EAAGC,KACxB5C,EAAG4C,IACND,EAAEO,KAAKN,GAGDD,GACL,IASH,OARKL,IACJR,EAASA,EAAOxB,IAAIL,GAAKpC,KAAKsF,KAAKlD,IAE/BpC,KAAKc,YACRmD,EAASzC,OAAO2D,OAAOlB,KAIlBA,CACR,CAYA,OAAAT,CAASrB,EAAIoD,EAAMvF,MAQlB,OAPAA,KAAKkB,KAAKsC,QAAQ,CAACM,EAAO9C,KACrBhB,KAAKc,YACRgD,EAAQ9D,KAAKiD,MAAMa,IAEpB3B,EAAGqD,KAAKD,EAAKzB,EAAO9C,IAClBhB,MAEIA,IACR,CAUA,MAAAmF,IAAWlD,GACV,OAAOT,OAAO2D,OAAOlD,EAAKQ,IAAIL,GAAKZ,OAAO2D,OAAO/C,IAClD,CAWA,GAAAT,CAAKX,EAAKyD,GAAM,GACf,IAAIR,EAASjE,KAAKkB,KAAKS,IAAIX,IAAQ,KAQnC,OAPe,OAAXiD,GAAoBQ,IACvBR,EAASjE,KAAKsF,KAAKrB,GACfjE,KAAKc,YACRmD,EAASzC,OAAO2D,OAAOlB,KAIlBA,CACR,CAWA,GAAAd,CAAKnC,GACJ,OAAOhB,KAAKkB,KAAKiC,IAAInC,EACtB,CAaA,SAAA4C,CAAWlB,EAAMzC,GAAcU,EDhaL,ICga8BO,EAAO,IAC9D,MAAMuE,EAAS/C,EAAIgD,MAAM/E,GAAW+D,KAAK1E,KAAK2E,UACxCgB,EAAYF,EAAOnB,OACzB,IAAIL,EAAS,CAAC,IACd,IAAK,IAAI7B,EAAI,EAAGA,EAAIuD,EAAWvD,IAAK,CACnC,MAAMwD,EAAQH,EAAOrD,GACfsB,EAAStC,MAAMC,QAAQH,EAAK0E,IAAU1E,EAAK0E,GAAS,CAAC1E,EAAK0E,IAC1DC,EAAY,GACZC,EAAY7B,EAAOK,OACnByB,EAAYrC,EAAOY,OACzB,IAAK,IAAI0B,EAAI,EAAGA,EAAIF,EAAWE,IAC9B,IAAK,IAAIhB,EAAI,EAAGA,EAAIe,EAAWf,IAAK,CACnC,MAAMiB,EAAe,IAAN7D,EAAUsB,EAAOsB,GAAK,GAAGf,EAAO+B,KAAKrF,IAAY+C,EAAOsB,KACvEa,EAAUR,KAAKY,EAChB,CAEDhC,EAAS4B,CACV,CAEA,OAAO5B,CACR,CAUA,IAAApC,GACC,OAAO7B,KAAKkB,KAAKW,MAClB,CAYA,KAAAqE,CAAOC,EDpba,ECobGC,EDpbH,ECobgB3B,GAAM,GACzC,IAAIR,EAASjE,KAAKqG,SAASC,MAAMH,EAAQA,EAASC,GAAK3D,IAAIL,GAAKpC,KAAK2B,IAAIS,EAAGqC,IAK5E,OAJKA,GAAOzE,KAAKc,YAChBmD,EAASzC,OAAO2D,OAAOlB,IAGjBA,CACR,CAUA,IAAAqB,CAAM5C,GACL,MAAMuB,EAAS,CAACvB,EAAI1C,KAAKgB,KAAM0B,GAE/B,OAAO1C,KAAKc,UAAYd,KAAKmF,UAAUlB,GAAUA,CAClD,CAYA,GAAAxB,CAAKN,EAAIsC,GAAM,GACd,UAAWtC,IAAOhC,EACjB,MAAM,IAAIiD,MAAM5C,GAEjB,IAAIyD,EAAS,GASb,OARAjE,KAAKwD,QAAQ,CAACM,EAAO9C,IAAQiD,EAAOoB,KAAKlD,EAAG2B,EAAO9C,KAC9CyD,IACJR,EAASA,EAAOxB,IAAIL,GAAKpC,KAAKsF,KAAKlD,IAC/BpC,KAAKc,YACRmD,EAASzC,OAAO2D,OAAOlB,KAIlBA,CACR,CAYA,KAAAsC,CAAOzB,EAAG0B,EAAG1D,GAAW,GAWvB,OAVI1B,MAAMC,QAAQyD,IAAM1D,MAAMC,QAAQmF,GACrC1B,EAAIhC,EAAW0D,EAAI1B,EAAE2B,OAAOD,UACX1B,IAAM1E,GAAuB,OAAN0E,UAAqB0B,IAAMpG,GAAuB,OAANoG,EACpFxG,KAAK6D,KAAKrC,OAAOK,KAAK2E,GAAIpE,IACzB0C,EAAE1C,GAAKpC,KAAKuG,MAAMzB,EAAE1C,GAAIoE,EAAEpE,GAAIU,KAG/BgC,EAAI0B,EAGE1B,CACR,CAQA,OAAAvC,CAASG,EAAKR,EAAOjC,IACpB,OAAOyC,CACR,CAYA,OAAAM,GAEA,CAQA,QAAAO,CAAUvC,EAAMf,GAAc+B,GAAQ,GAEtC,CAOA,UAAA0E,CAAYxE,EAAOjC,IAEnB,CAQA,KAAA0G,CAAOjE,EAAM,GAAIV,GAAQ,GAEzB,CAYA,QAAAc,CAAU5B,EAAMgB,EAAO7B,GAEtB,GD9kB4B,YC8kBxB6B,EACHlC,KAAKsB,QAAU,IAAIH,IAAID,EAAKuB,IAAIL,GAAK,CAACA,EAAE,GAAI,IAAIjB,IAAIiB,EAAE,GAAGK,IAAI0B,GAAM,CAACA,EAAG,GAAI,IAAIe,IAAIf,EAAG,cAChF,IAAIjC,IAAS7B,EAInB,MAAM,IAAI+C,MDxkBsB,gBCqkBhCpD,KAAKsB,QAAQyB,QACb/C,KAAKkB,KAAO,IAAIC,IAAID,EAGrB,CAGA,OAFAlB,KAAK0G,WAAWxE,IATD,CAYhB,CAWA,MAAA2C,CAAQ1C,EAAIyE,EAAc,IACzB,IAAI9B,EAAI8B,EAKR,OAJA5G,KAAKwD,QAAQ,CAACuB,EAAGC,KAChBF,EAAI3C,EAAG2C,EAAGC,EAAGC,EAAGhF,OACdA,MAEI8E,CACR,CAWA,OAAA/C,CAAShB,GACR,MAAM8F,EAAU9F,EAAQ,CAACA,GAASf,KAAKe,MAOvC,OANIA,IAAwC,IAA/Bf,KAAKe,MAAM4C,SAAS5C,IAChCf,KAAKe,MAAMsE,KAAKtE,GAEjBf,KAAK6D,KAAKgD,EAASzE,GAAKpC,KAAKsB,QAAQgB,IAAIF,EAAG,IAAIjB,MAChDnB,KAAKwD,QAAQ,CAACtC,EAAMF,IAAQhB,KAAK6D,KAAKgD,EAASzE,GAAKpC,KAAK8G,SAAS9F,EAAKE,EAAMkB,KAEtEpC,IACR,CAaA,MAAA+G,CAAQjD,EAAO/C,EAAO0D,GAAM,GAC3B,MAAMR,EAAS,IAAIiB,IACb/C,SAAY2B,IAAU3D,EACtB6G,EAAOlD,UAAgBA,EAAMmD,OAAS9G,EAC5C,IAAK2D,EAAO,OAAO9D,KAAKc,UAAYd,KAAKmF,SAAW,GACpD,MAAM0B,EAAU9F,EAAQK,MAAMC,QAAQN,GAASA,EAAQ,CAACA,GAASf,KAAKe,MACtE,IAAK,MAAMqB,KAAKyE,EAAS,CACxB,MAAMpD,EAAMzD,KAAKsB,QAAQK,IAAIS,GAC7B,GAAIqB,EACH,IAAK,MAAOyD,EAAMC,KAAS1D,EAAK,CAC/B,IAAI2D,GAAQ,EAUZ,GAPCA,EADGjF,EACK2B,EAAMoD,EAAM9E,GACV4E,EACFlD,EAAMmD,KAAK7F,MAAMC,QAAQ6F,GAAQA,EAAKtC,KDrqBxB,KCqqB6CsC,GAE3DA,IAASpD,EAGdsD,EACH,IAAK,MAAMpG,KAAOmG,EACbnH,KAAKkB,KAAKiC,IAAInC,IACjBiD,EAAOgB,IAAIjE,EAIf,CAEF,CACA,IAAIqG,EAAUjG,MAAMQ,KAAKqC,GAAQxB,IAAIzB,GAAOhB,KAAK2B,IAAIX,EAAKyD,IAK1D,OAJKA,GAAOzE,KAAKc,YAChBuG,EAAU7F,OAAO2D,OAAOkC,IAGlBA,CACR,CAaA,GAAA/E,CAAKtB,EAAM,KAAME,EAAO,CAAA,EAAIc,GAAQ,EAAOc,GAAW,GACzC,OAAR9B,IACHA,EAAME,EAAKlB,KAAKgB,MAAQhB,KAAKa,QAE9B,IAAIyG,EAAI,IAAIpG,EAAM,CAAClB,KAAKgB,KAAMA,GAE9B,GADAhB,KAAK6C,UAAU7B,EAAKsG,EAAGtF,EAAOc,GACzB9C,KAAKkB,KAAKiC,IAAInC,GAIZ,CACN,MAAMqC,EAAKrD,KAAK2B,IAAIX,GAAK,GACzBhB,KAAKsD,YAAYtC,EAAKqC,GAClBrD,KAAKiB,YACRjB,KAAKuB,SAASI,IAAIX,GAAKiE,IAAIzD,OAAO2D,OAAOnF,KAAKiD,MAAMI,KAEhDP,IACJwE,EAAItH,KAAKuG,MAAMvG,KAAKiD,MAAMI,GAAKiE,GAEjC,MAZKtH,KAAKiB,YACRjB,KAAKuB,SAASe,IAAItB,EAAK,IAAIkE,KAY7BlF,KAAKkB,KAAKoB,IAAItB,EAAKsG,GACnBtH,KAAK8G,SAAS9F,EAAKsG,EAAG,MACtB,MAAMrD,EAASjE,KAAK2B,IAAIX,GAGxB,OAFAhB,KAAK2G,MAAM1C,EAAQjC,GAEZiC,CACR,CASA,QAAA6C,CAAU9F,EAAKE,EAAMqG,GAoBpB,OAnBAvH,KAAK6D,KAAgB,OAAX0D,EAAkBvH,KAAKe,MAAQ,CAACwG,GAASnF,IAClD,IAAIqB,EAAMzD,KAAKsB,QAAQK,IAAIS,GACtBqB,IACJA,EAAM,IAAItC,IACVnB,KAAKsB,QAAQgB,IAAIF,EAAGqB,IAErB,MAAMtB,EAAKqF,IACL/D,EAAIN,IAAIqE,IACZ/D,EAAInB,IAAIkF,EAAG,IAAItC,KAEhBzB,EAAI9B,IAAI6F,GAAGvC,IAAIjE,IAEZoB,EAAEuB,SAAS3D,KAAKW,WACnBX,KAAK6D,KAAK7D,KAAK4D,UAAUxB,EAAGpC,KAAKW,UAAWO,GAAOiB,GAEnDnC,KAAK6D,KAAKzC,MAAMC,QAAQH,EAAKkB,IAAMlB,EAAKkB,GAAK,CAAClB,EAAKkB,IAAKD,KAInDnC,IACR,CAWA,IAAA0E,CAAMvC,EAAIsF,GAAS,GAClB,MAAMC,EAAW1H,KAAKkB,KAAKY,KAC3B,IAAImC,EAASjE,KAAKkG,MDlvBC,ECkvBYwB,GAAU,GAAMhD,KAAKvC,GAKpD,OAJIsF,IACHxD,EAASjE,KAAKmF,UAAUlB,IAGlBA,CACR,CAcA,QAAAU,CAAUG,EAAG0B,GAEZ,cAAW1B,IAAMxE,UAAwBkG,IAAMlG,EACvCwE,EAAE6C,cAAcnB,UAGb1B,IAAMvE,UAAwBiG,IAAMjG,EACvCuE,EAAI0B,EAKLoB,OAAO9C,GAAG6C,cAAcC,OAAOpB,GACvC,CAYA,MAAAqB,CAAQ9G,EAAQd,GAAcwE,GAAM,GACnC,GAAI1D,IAAUd,EACb,MAAM,IAAImD,MDvyBuB,iBCyyBlC,IAAIa,EAAS,GACb,MAAMpC,EAAO,IACmB,IAA5B7B,KAAKsB,QAAQ6B,IAAIpC,IACpBf,KAAK+B,QAAQhB,GAEd,MAAM+G,EAAS9H,KAAKsB,QAAQK,IAAIZ,GAOhC,OANA+G,EAAOtE,QAAQ,CAACC,EAAKzC,IAAQa,EAAKwD,KAAKrE,IACvChB,KAAK6D,KAAKhC,EAAK6C,KAAK1E,KAAK2E,UAAWvC,GAAK0F,EAAOnG,IAAIS,GAAGoB,QAAQxC,GAAOiD,EAAOoB,KAAKrF,KAAK2B,IAAIX,EAAKyD,MAC5FzE,KAAKc,YACRmD,EAASzC,OAAO2D,OAAOlB,IAGjBA,CACR,CASA,OAAA8D,GACC,MAAM9D,EAAS7C,MAAMQ,KAAK5B,KAAKkB,KAAKwC,UAMpC,OALI1D,KAAKc,YACRd,KAAK6D,KAAKI,EAAQ7B,GAAKZ,OAAO2D,OAAO/C,IACrCZ,OAAO2D,OAAOlB,IAGRA,CACR,CAQA,IAAApD,GACC,OAAOA,cACR,CAUA,MAAA6C,GACC,OAAO1D,KAAKkB,KAAKwC,QAClB,CASA,gBAAAsE,CAAkBC,EAAQC,EAAWC,GAGpC,OAFa3G,OAAOK,KAAKqG,GAEbE,MAAMpH,IACjB,MAAMqH,EAAOH,EAAUlH,GACjBsH,EAAML,EAAOjH,GACnB,OAAII,MAAMC,QAAQgH,GACbjH,MAAMC,QAAQiH,GACVH,IAAOjI,EAAoBmI,EAAKD,MAAMG,GAAKD,EAAI3E,SAAS4E,IAAMF,EAAKG,KAAKD,GAAKD,EAAI3E,SAAS4E,IAE1FJ,IAAOjI,EAAoBmI,EAAKD,MAAMG,GAAKD,IAAQC,GAAKF,EAAKG,KAAKD,GAAKD,IAAQC,GAE7EF,aAAgBI,OACtBrH,MAAMC,QAAQiH,GACVH,IAAOjI,EAAoBoI,EAAIF,MAAMrD,GAAKsD,EAAKpB,KAAKlC,IAAMuD,EAAIE,KAAKzD,GAAKsD,EAAKpB,KAAKlC,IAElFsD,EAAKpB,KAAKqB,GAERlH,MAAMC,QAAQiH,GACjBA,EAAI3E,SAAS0E,GAEbC,IAAQD,GAGlB,CAiBA,KAAA7D,CAAO0D,EAAY,GAAIC,EDh6BU,MCi6BhC,MAAMtG,EAAO7B,KAAKe,MAAMqE,OAAOhD,GAAKA,KAAK8F,GACzC,GAAoB,IAAhBrG,EAAKyC,OAAc,MAAO,GAG9B,MAAMoE,EAAc7G,EAAKuD,OAAOJ,GAAKhF,KAAKsB,QAAQ6B,IAAI6B,IACtD,GAAI0D,EAAYpE,OAAS,EAAG,CAE3B,IAAIqE,EAAgB,IAAIzD,IACpB0D,GAAQ,EACZ,IAAK,MAAM5H,KAAO0H,EAAa,CAC9B,MAAML,EAAOH,EAAUlH,GACjByC,EAAMzD,KAAKsB,QAAQK,IAAIX,GACvB6H,EAAe,IAAI3D,IACzB,GAAI9D,MAAMC,QAAQgH,IACjB,IAAK,MAAME,KAAKF,EACf,GAAI5E,EAAIN,IAAIoF,GACX,IAAK,MAAMvD,KAAKvB,EAAI9B,IAAI4G,GACvBM,EAAa5D,IAAID,QAId,GAAIvB,EAAIN,IAAIkF,GAClB,IAAK,MAAMrD,KAAKvB,EAAI9B,IAAI0G,GACvBQ,EAAa5D,IAAID,GAGf4D,GACHD,EAAgBE,EAChBD,GAAQ,GAGRD,EAAgB,IAAIzD,IAAI,IAAIyD,GAAevD,OAAOJ,GAAK6D,EAAa1F,IAAI6B,IAE1E,CAEA,MAAM8D,EAAU,GAChB,IAAK,MAAM9H,KAAO2H,EAAe,CAChC,MAAMV,EAASjI,KAAK2B,IAAIX,GAAK,GACzBhB,KAAKgI,iBAAiBC,EAAQC,EAAWC,IAC5CW,EAAQzD,KAAKrF,KAAKc,UAAYd,KAAK2B,IAAIX,GAAOiH,EAEhD,CAEA,OAAOjI,KAAKc,UAAYd,KAAKmF,UAAU2D,GAAWA,CACnD,CAGA,OAAO9I,KAAKoF,OAAON,GAAK9E,KAAKgI,iBAAiBlD,EAAGoD,EAAWC,GAC7D,EAyBD5I,EAAAkB,KAAAA,EAAAlB,EAAAwJ,KARO,SAAe7H,EAAO,KAAM8H,EAAS,CAAA,GAC3C,MAAMC,EAAM,IAAIxI,EAAKuI,GAMrB,OAJI5H,MAAMC,QAAQH,IACjB+H,EAAIjH,MAAMd,ED39Bc,OC89BlB+H,CACR,CAAA"} \ No newline at end of file diff --git a/docs/API.md b/docs/API.md new file mode 100644 index 00000000..beb0e46b --- /dev/null +++ b/docs/API.md @@ -0,0 +1,634 @@ +# Haro API Reference + +## Overview + +Haro is an immutable DataStore with indexing, versioning, and batch operations. Provides a Map-like interface with advanced querying capabilities. + +## Table of Contents + +- [Haro Class](#haro-class) + - [Constructor](#constructorconfig) + - [Properties](#properties) +- [Core Methods](#core-methods) + - [set()](#setkey-data-batch-override) + - [get()](#getkey) + - [delete()](#deletekey-batch) + - [has()](#haskey) + - [clear()](#clear) +- [Query Methods](#query-methods) + - [find()](#findwhere) + - [where()](#wherepredicate-op) + - [search()](#searchvalue-index) + - [filter()](#filterfn) + - [sortBy()](#sortbyindex) + - [sort()](#sortfn-frozen) + - [limit()](#limitoffset-max) +- [Batch Operations](#batch-operations) + - [setMany()](#setmanyrecords) + - [deleteMany()](#deletemanykeys) + - [override()](#overridedata-type) +- [Iteration Methods](#iteration-methods) + - [entries()](#entries) + - [keys()](#keys) + - [values()](#values) + - [forEach()](#foreachfn-ctx) + - [map()](#mapfn) +- [Utility Methods](#utility-methods) + - [clone()](#clonearg) + - [freeze()](#freezeargs) + - [merge()](#mergea-b-override) +- [Index Management](#index-management) + - [reindex()](#reindexindex) +- [Cache Control Methods](#cache-control-methods) + - [clearCache()](#clearcache) + - [getCacheSize()](#getcachesize) + - [getCacheStats()](#getcachestats) +- [Export Methods](#export-methods) + - [dump()](#dumptype) + - [toArray()](#toarray) +- [Properties](#properties) + - [registry](#registry) + - [size](#size) +- [Factory Function](#factory-function) + - [haro()](#harodata-config) + +--- + +## Haro Class + +### Private Fields + +The Haro class uses the following private fields: + +- `#data` (Map) - Internal storage for records +- `#delimiter` (string) - Delimiter for composite indexes +- `#id` (string) - Unique instance identifier +- `#immutable` (boolean) - Immutable mode flag +- `#index` (Array) - Array of indexed field names +- `#indexes` (Map) - Map of index structures +- `#key` (string) - Primary key field name +- `#versions` (Map) - Map of version histories +- `#versioning` (boolean) - Versioning flag +- `#warnOnFullScan` (boolean) - Full scan warning flag +- `#inBatch` (boolean) - Batch operation state flag + +### Constructor(config) + +Creates a new Haro instance. + +**Parameters:** +- `config` (Object): Configuration object + - `cache` (boolean): Enable LRU caching for `search()` and `where()` (default: `false`) + - `cacheSize` (number): Maximum cache entries (default: `1000`) + - `delimiter` (string): Delimiter for composite indexes (default: `'|'`) + - `id` (string): Unique instance identifier (auto-generated) + - `immutable` (boolean): Return frozen objects (default: `false`) + - `index` (string[]): Fields to index (default: `[]`) + - `key` (string): Primary key field name (default: `'id'`) + - `versioning` (boolean): Enable versioning (default: `false`) + - `warnOnFullScan` (boolean): Warn on full table scans (default: `true`) + +**Example:** +```javascript +const store = new Haro({ + index: ['name', 'email'], + key: 'userId', + versioning: true, + cache: true, + cacheSize: 500 +}); +``` + +--- + +## Core Methods + +### set(key, data, batch, override) + +Sets or updates a record with automatic indexing. + +**Parameters:** +- `key` (string|null): Record key, or null for auto-generate +- `data` (Object): Record data +- `batch` (boolean): Batch operation flag (default: `false`) +- `override` (boolean): Override instead of merge (default: `false`) + +**Returns:** Object - Stored record + +**Example:** +```javascript +store.set(null, {name: 'John'}); +store.set('user123', {age: 31}); +``` + +--- + +### get(key) + +Retrieves a record by key. + +**Parameters:** +- `key` (string): Record key + +**Returns:** Object|null - Record or null + +**Example:** +```javascript +store.get('user123'); +``` + +--- + +### delete(key, batch) + +Deletes a record and removes it from all indexes. + +**Parameters:** +- `key` (string): Key to delete +- `batch` (boolean): Batch operation flag (default: `false`) + +**Throws:** Error if key not found + +**Example:** +```javascript +store.delete('user123'); +``` + +--- + +### has(key) + +Checks if a record exists. + +**Parameters:** +- `key` (string): Record key + +**Returns:** boolean - True if exists + +**Example:** +```javascript +store.has('user123'); +``` + +--- + +### clear() + +Removes all records, indexes, and versions. + +**Returns:** Haro - This instance + +**Example:** +```javascript +store.clear(); +``` + +--- + +## Query Methods + +### find(where) + +Finds records matching criteria using indexes. Supports dot notation for nested fields. + +**Parameters:** +- `where` (Object): Field-value pairs to match (supports dot notation for nested paths) + +**Returns:** Array - Matching records + +**Example:** +```javascript +// Flat field +store.find({department: 'engineering'}); + +// Nested field with dot notation +store.find({'user.email': 'john@example.com', 'user.profile.department': 'IT'}); +``` + +--- + +### where(predicate, op) + +Filters records with predicate logic supporting AND/OR on arrays. Supports dot notation for nested fields. + +**Parameters:** +- `predicate` (Object): Field-value pairs (supports dot notation for nested paths) +- `op` (string): Operator: '||' (OR) or '&&' (AND) (default: `'||'`) + +**Returns:** Promise> - Matching records (async) + +**Example:** +```javascript +// Flat field +const results = await store.where({tags: ['admin', 'user']}, '||'); + +// Nested field with dot notation +const filtered = await store.where({'user.profile.department': 'IT', 'user.status': 'active'}); +``` + +--- + +### search(value, index) + +Searches for records containing a value. + +**Parameters:** +- `value` (*): Search value (string, function, or RegExp) +- `index` (string|string[]): Index(es) to search, or all + +**Returns:** Promise> - Matching records (async) + +**Example:** +```javascript +const results = await store.search('john'); +const matches = await store.search(/^admin/, 'role'); +``` + +--- + +### filter(fn) + +Filters records using a predicate function. + +**Parameters:** +- `fn` (Function): Predicate function (record, key, store) + +**Returns:** Array - Filtered records + +**Throws:** Error if fn is not a function + +**Example:** +```javascript +store.filter(record => record.age >= 18); +``` + +--- + +### sortBy(index) + +Sorts records by an indexed field. + +**Parameters:** +- `index` (string): Field to sort by + +**Returns:** Array - Sorted records + +**Throws:** Error if index is empty + +**Example:** +```javascript +store.sortBy('age'); +``` + +--- + +### sort(fn, frozen) + +Sorts records using a comparator function. + +**Parameters:** +- `fn` (Function): Comparator (a, b) => number +- `frozen` (boolean): Return frozen records (default: `false`) + +**Returns:** Array - Sorted records + +**Throws:** Error if fn is not a function + +**Example:** +```javascript +store.sort((a, b) => a.age - b.age); +``` + +--- + +### limit(offset, max) + +Returns a limited subset of records. + +**Parameters:** +- `offset` (number): Records to skip (default: `0`) +- `max` (number): Max records to return (default: `0`) + +**Returns:** Array - Records + +**Example:** +```javascript +store.limit(0, 10); +``` + +--- + +## Batch Operations + +### setMany(records) + +Inserts or updates multiple records. + +**Parameters:** +- `records` (Array): Records to insert or update + +**Returns:** Array - Stored records + +**Example:** + +```javascript +store.setMany([{id: 1, name: 'John'}, {id: 2, name: 'Jane'}]); +``` + +### deleteMany(keys) + +Deletes multiple records. + +**Parameters:** + +- `keys` (Array): Keys to delete + +**Returns:** Array + +**Example:** + +```javascript +store.deleteMany(['key1', 'key2']); +``` + +### override(data, type) + +Replaces store data or indexes. + +**Parameters:** + +- `data` (Array): Data to replace +- `type` (string): Type: 'records' or 'indexes' (default: `'records'`) + +**Returns:** boolean - Success + +**Throws:** Error if type is invalid + +**Example:** + +```javascript +store.override([['key1', {name: 'John'}]], 'records'); +``` + +## Iteration Methods + +### entries() + +Returns an iterator of [key, value] pairs. + +**Returns:** Iterator> - Key-value pairs + +**Example:** +```javascript +for (const [key, value] of store.entries()) { } +``` + +--- + +### keys() + +Returns an iterator of all keys. + +**Returns:** Iterator - Keys + +**Example:** +```javascript +for (const key of store.keys()) { } +``` + +--- + +### values() + +Returns an iterator of all values. + +**Returns:** Iterator - Values + +**Example:** +```javascript +for (const record of store.values()) { } +``` + +--- + +### forEach(fn, ctx) + +Executes a function for each record. + +**Parameters:** +- `fn` (Function): Function (value, key) +- `ctx` (*): Context for fn + +**Returns:** Haro - This instance + +**Example:** +```javascript +store.forEach((record, key) => console.log(key, record)); +``` + +--- + +### map(fn) + +Transforms records using a mapping function. + +**Parameters:** +- `fn` (Function): Transform function (record, key) + +**Returns:** Array<*> - Transformed results + +**Throws:** Error if fn is not a function + +**Example:** +```javascript +store.map(record => record.name); +``` + +--- + +## Utility Methods + +### clone(arg) + +Creates a deep clone of a value. + +**Parameters:** +- `arg` (*): Value to clone + +**Returns:** * - Deep clone + +**Example:** +```javascript +store.clone({name: 'John', tags: ['user']}); +``` + +--- + +### freeze(...args) + +Creates a frozen array from arguments. + +**Parameters:** +- `args` (...*): Arguments to freeze + +**Returns:** Array<*> - Frozen array + +**Example:** +```javascript +store.freeze(obj1, obj2); +``` + +--- + +### merge(a, b, override) + +Merges two values. + +**Parameters:** +- `a` (*): Target value +- `b` (*): Source value +- `override` (boolean): Override arrays (default: `false`) + +**Returns:** * - Merged result + +**Example:** +```javascript +store.merge({a: 1}, {b: 2}); +``` + +--- + +## Index Management + +### reindex(index) + +Rebuilds indexes. + +**Parameters:** +- `index` (string|string[]): Field(s) to rebuild, or all + +**Returns:** Haro - This instance + +**Example:** +```javascript +store.reindex(); +store.reindex('name'); +``` + +--- + +## Cache Control Methods + +### clearCache() + +Clears the query cache. + +**Returns:** Haro - This instance + +**Example:** +```javascript +store.clearCache(); +``` + +### getCacheSize() + +Returns the current cache size. + +**Returns:** number - Number of entries in cache + +**Example:** +```javascript +console.log(store.getCacheSize()); // 5 +``` + +### getCacheStats() + +Returns cache statistics. + +**Returns:** Object - Stats with hits, misses, sets, deletes, evictions + +**Example:** +```javascript +console.log(store.getCacheStats()); +// { hits: 10, misses: 2, sets: 12, deletes: 0, evictions: 0 } +``` + +## Export Methods + +### dump(type) + +Exports store data or indexes. + +**Parameters:** +- `type` (string): Export type: 'records' or 'indexes' (default: `'records'`) + +**Returns:** Array - Exported data + +**Example:** +```javascript +store.dump('records'); +``` + +--- + +### toArray() + +Converts store data to an array. + +**Returns:** Array - All records + +**Example:** +```javascript +store.toArray(); +``` + +--- + +## Properties + +### registry + +Array of all keys in the store (read-only). + +**Type:** Array + +**Example:** +```javascript +console.log(store.registry); +``` + +--- + +### size + +Number of records in the store (read-only). + +**Type:** number + +**Example:** +```javascript +console.log(store.size); +``` + +--- + +## Factory Function + +### haro(data, config) + +Factory function to create a Haro instance. + +**Parameters:** +- `data` (Array|null): Initial data (default: `null`) +- `config` (Object): Configuration (default: `{}`) + +**Returns:** Haro - New Haro instance + +**Example:** +```javascript +const store = haro([{id: 1, name: 'John'}], {index: ['name']}); +``` + +--- + +*Generated from src/haro.js* diff --git a/docs/CODE_STYLE_GUIDE.md b/docs/CODE_STYLE_GUIDE.md index f1b49481..c73bc19a 100644 --- a/docs/CODE_STYLE_GUIDE.md +++ b/docs/CODE_STYLE_GUIDE.md @@ -13,7 +13,7 @@ This document outlines the coding standards and conventions for the Haro project 7. [Error Handling](#error-handling) 8. [Performance Considerations](#performance-considerations) 9. [Security Guidelines](#security-guidelines) -10. [ESLint Configuration](#eslint-configuration) +10. [Oxlint Configuration](#oxlint-configuration) ## General Principles @@ -37,8 +37,8 @@ Use modern JavaScript features appropriately: ```javascript // ✅ Good - Use const/let instead of var -const API_ENDPOINT = 'https://api.example.com'; -let userData = null; +const STRING_EMPTY = ''; +let result = null; // ✅ Good - Use arrow functions for concise syntax const processData = data => data.map(item => item.value); @@ -47,11 +47,15 @@ const processData = data => data.map(item => item.value); const message = `Processing ${count} items`; // ✅ Good - Use destructuring -const {name, age} = user; +const { name, age } = user; const [first, second] = array; // ✅ Good - Use spread operator const newArray = [...existingArray, newItem]; + +// ✅ Good - Use optional chaining and nullish coalescing +const key = data[this.key] ?? this.uuid(); +const userName = user?.profile?.name; ``` ### Variable Declarations @@ -105,6 +109,7 @@ const proc = (req) => {...}; ### Constants - Use **UPPER_SNAKE_CASE** for constants - Group related constants together +- Import constants from `constants.js` for string literals used in comparisons ```javascript // ✅ Good @@ -114,6 +119,20 @@ const ERROR_MESSAGES = { INVALID_INPUT: 'Invalid input provided', NETWORK_ERROR: 'Network connection failed' }; + +// ✅ Good - String constants from constants.js +import { + STRING_EMPTY, + STRING_FUNCTION, + STRING_OBJECT, + STRING_RECORD_NOT_FOUND +} from './constants.js'; + +function validateData(data) { + if (typeof data !== STRING_OBJECT) { + throw new Error(STRING_RECORD_NOT_FOUND); + } +} ``` ### Classes @@ -268,14 +287,19 @@ function complexCalculation(data) { // This approach reduces time complexity from O(n*m) to O(n+m) return algorithmImplementation(data); } + +// ✅ Good - Use eslint-disable comments for intentional violations +// eslint-disable-line no-unused-vars +// Hook for custom logic before batch; override in subclass if needed +return arg; ``` ## Testing Standards ### Unit Tests - Place unit tests in `tests/unit/` directory -- Use **node-assert** for assertions -- Run tests with **Mocha** +- Use **node:assert** for assertions +- Run tests with **Node.js native test runner** (`node --test`) - Follow **AAA pattern** (Arrange, Act, Assert) ```javascript @@ -471,9 +495,9 @@ function processUserProfile(profile) { } ``` -## ESLint Configuration +## Oxlint Configuration -The project uses ESLint for code quality enforcement. Key rules include: +The project uses Oxlint for code quality enforcement. Key rules include: - **Indentation**: Tabs with consistent variable declaration alignment - **Quotes**: Double quotes with escape avoidance @@ -483,14 +507,15 @@ The project uses ESLint for code quality enforcement. Key rules include: - **Space Requirements**: Consistent spacing around operators and keywords - **No Unused Variables**: All variables must be used - **Consistent Returns**: Functions should have consistent return patterns +- **No Console**: Console statements are not allowed (except `warn` and `error` for warnings/errors) -### Running ESLint +### Running Oxlint ```bash # Check all files npm run lint # Fix auto-fixable issues -npm run lint:fix +npm run fix ``` ## Best Practices Summary @@ -508,8 +533,9 @@ npm run lint:fix ## Tools and Automation -- **ESLint**: Code quality and style enforcement -- **Mocha**: Test runner for unit and integration tests +- **Oxlint**: Code quality and style enforcement +- **Oxfmt**: Code formatter +- **Node.js native test runner**: Test runner for unit and integration tests - **Node Assert**: Assertion library for testing - **Rollup**: Module bundler for distribution - **Husky**: Git hooks for pre-commit checks diff --git a/docs/TECHNICAL_DOCUMENTATION.md b/docs/TECHNICAL_DOCUMENTATION.md index 18969746..652a9298 100644 --- a/docs/TECHNICAL_DOCUMENTATION.md +++ b/docs/TECHNICAL_DOCUMENTATION.md @@ -10,6 +10,7 @@ Haro is a modern, immutable DataStore designed for high-performance data operati - [Core Components](#core-components) - [Data Flow](#data-flow) - [Indexing System](#indexing-system) +- [Mathematical Foundation](#mathematical-foundation) - [Operations](#operations) - [Configuration](#configuration) - [Performance Characteristics](#performance-characteristics) @@ -41,6 +42,7 @@ graph TB E --> L["🏷️ Key Field"] E --> M["🔒 Immutable Mode"] E --> N["📊 Index Fields"] + E --> O["💾 Cache Settings"] classDef dataStore fill:#0066CC,stroke:#004499,stroke-width:2px,color:#fff classDef indexSystem fill:#008000,stroke:#006600,stroke-width:2px,color:#fff @@ -51,12 +53,33 @@ graph TB class A,B dataStore class C,H,I indexSystem class D,J,K versionStore - class E,L,M,N config + class E,L,M,N,O config class F,G detail ``` ## Core Components +### Private Fields + +The Haro class uses the following private fields (denoted by `#` prefix): + +- `#data` - Internal Map storing all records +- `#delimiter` - Delimiter for composite indexes +- `#id` - Unique instance identifier +- `#immutable` - Boolean flag for immutable mode +- `#index` - Array of indexed field names +- `#indexes` - Map of index structures +- `#key` - Primary key field name +- `#versions` - Map of version histories +- `#versioning` - Boolean flag for versioning +- `#warnOnFullScan` - Boolean flag for full scan warnings +- `#inBatch` - Boolean flag for batch operation state +- `#cache` - LRU cache instance (when enabled) +- `#cacheEnabled` - Boolean flag for cache state +- `#cacheSize` - Maximum cache size (when enabled) + +These fields are encapsulated and not directly accessible from outside the class. + ### Data Store (Map) - **Purpose**: Primary storage for all records - **Structure**: `Map` @@ -73,87 +96,85 @@ graph TB - **Features**: Immutable version snapshots, configurable retention ### Configuration -- **Purpose**: Store instance settings and behavior -- **Options**: Immutable mode, versioning, custom delimiters, key fields -## Data Flow - -### Record Creation Flow - -```mermaid -sequenceDiagram - participant Client - participant Haro - participant DataStore - participant IndexSystem - participant VersionStore - - Client->>+Haro: set(key, data) - - Note over Haro: Validate and prepare data - Haro->>Haro: beforeSet(key, data) - - alt Key exists - Haro->>+DataStore: get(key) - DataStore-->>-Haro: existing record - Haro->>+IndexSystem: deleteIndex(key, oldData) - IndexSystem-->>-Haro: indexes updated - - opt Versioning enabled - Haro->>+VersionStore: add version - VersionStore-->>-Haro: version stored - end - - Haro->>Haro: merge(oldData, newData) - end - - Haro->>+DataStore: set(key, processedData) - DataStore-->>-Haro: record stored - - Haro->>+IndexSystem: setIndex(key, data) - IndexSystem-->>-Haro: indexes updated - - Haro->>Haro: onset(record) - - Haro-->>-Client: processed record +```javascript +const store = new Haro({ + // Primary key field (default: 'id') + key: 'userId', + + // Index configuration + index: ['name', 'email', 'department', 'name|department'], + + // Immutable mode - returns frozen objects + immutable: true, + + // Version tracking + versioning: true, + + // Composite key delimiter + delimiter: '|', + + // Instance identifier (auto-generated if not provided) + id: 'user-store-1', + + // Enable warnings for full table scan queries (only applies to where()) + warnOnFullScan: true, + + // Enable LRU caching for search/where (default: false) + cache: true, + + // Maximum cache size (default: 1000) + cacheSize: 500 +}); ``` ### Query Processing Flow ```mermaid flowchart TD - A["🔍 Query Request"] --> B{"Index Available?"} + A["🔍 Query Request"] --> B{"Cached Method?
(search/where)"} - B -->|Yes| C["📇 Index Lookup"] - B -->|No| D["🔄 Full Scan"] + B -->|Yes| C{"Cache Enabled?"} + B -->|No| D{"Index Available?"} - C --> E["🔑 Extract Keys"] - D --> F["🔍 Filter Records"] + C -->|Yes| E{"Cache Hit?"} + C -->|No| D - E --> G["📊 Fetch Records"] - F --> G + E -->|Yes| F["💾 Return Cached Result"] + E -->|No| D - G --> H{"Immutable Mode?"} + D -->|Yes| G["📇 Index Lookup"] + D -->|No| H["🔄 Full Scan"] - H -->|Yes| I["🔒 Freeze Results"] - H -->|No| J["✅ Return Results"] + G --> I["📊 Fetch Records"] + H --> I - I --> J + I --> J{"Immutable Mode?"} + + J -->|Yes| K["🔒 Freeze Results"] + J -->|No| L["✅ Return Results"] + + K --> L + L --> M["💾 Cache Result"] classDef query fill:#0066CC,stroke:#004499,stroke-width:2px,color:#fff + classDef cache fill:#FF8C00,stroke:#CC7000,stroke-width:2px,color:#fff classDef index fill:#008000,stroke:#006600,stroke-width:2px,color:#fff - classDef scan fill:#FF8C00,stroke:#CC7000,stroke-width:2px,color:#fff + classDef scan fill:#FF4500,stroke:#CC3700,stroke-width:2px,color:#fff classDef result fill:#6600CC,stroke:#440088,stroke-width:2px,color:#fff class A,B query - class C,E index - class D,F scan - class G,H,I,J result + class C,E,F,M cache + class G index + class H scan + class I,J,K,L result ``` +> **Note:** Cache is only used by `search()` and `where()` methods. Methods like `get()`, `find()`, and `filter()` do not use cache. + ## Indexing System -Haro's indexing system provides O(1) lookup performance for indexed fields: +Haro's indexing system provides O(1) lookup performance for indexed fields, including support for nested paths with dot notation: ### Index Types @@ -161,17 +182,19 @@ Haro's indexing system provides O(1) lookup performance for indexed fields: graph LR A["🏷️ Index Types"] --> B["📊 Single Field
name → users"] A --> C["🔗 Composite
name|dept → users"] - A --> D["📚 Array Field
tags[*] → users"] + A --> D["📚 Array Field
tags → users"] + A --> E["🔍 Nested Path
user.email → users"] - B --> E["🔍 Direct Lookup
O(1) complexity"] - C --> F["🔍 Multi-key Lookup
O(k) complexity"] - D --> G["🔍 Array Search
O(m) complexity"] + B --> F["🔍 Direct Lookup
O(1) complexity"] + C --> G["🔍 Multi-key Lookup
O(k) complexity"] + D --> H["🔍 Array Search
O(m) complexity"] + E --> I["🔍 Nested Lookup
O(1) complexity"] classDef indexType fill:#0066CC,stroke:#004499,stroke-width:2px,color:#fff classDef performance fill:#008000,stroke:#006600,stroke-width:2px,color:#fff - class A,B,C,D indexType - class E,F,G performance + class A,B,C,D,E indexType + class F,G,H,I performance ``` ### Index Maintenance @@ -199,51 +222,195 @@ stateDiagram-v2 RebuildComplete --> IndexReady ``` +## Mathematical Foundation + +Haro's operations are grounded in computer science fundamentals, providing predictable performance characteristics through well-established data structures and algorithms. + +### Data Structures + +| Structure | Purpose | Complexity | Operations | +|-----------|---------|------------|------------| +| `Map` (data) | Primary storage | $O(1)$ get/set | get, set, delete, has | +| `Map` (indexes) | Query optimization | $O(1)$ lookup | find, where, search | +| `Set` (index values) | Unique value tracking | $O(1)$ add/has | Index maintenance | +| `Set` (versions) | Version history | $O(1)$ add | Version tracking | + +### Algorithmic Complexity + +#### Basic Operations + +| Operation | Complexity | Description | +|-----------|------------|-------------| +| GET | $O(1)$ | Direct hash map lookup | +| SET | $O(1) + O(i) + O(v)$ | Hash map insert + index updates + version storage (if versioning enabled) | +| DELETE | $O(1) + O(i)$ | Hash map delete + index cleanup | +| HAS | $O(1)$ | Hash map key existence check | + +#### Query Operations + +| Operation | Complexity | Description | +|-----------|------------|-------------| +| FIND | $O(i \times k)$ | i = number of indexes, k = composite keys generated | +| SEARCH (cached) | $O(1)$ | Direct cache lookup | +| SEARCH (uncached) | $O(n \times m)$ | n = total index entries, m = indexes searched | +| WHERE (cached) | $O(1)$ | Direct cache lookup | +| WHERE (uncached) | $O(1)$ to $O(n)$ | Indexed lookup or full scan fallback | +| FILTER | $O(n)$ | Predicate evaluation per record | +| SORTBY | $O(k \log k + n)$ | Sorting by indexed field (k = unique indexed values) | +| LIMIT | $O(m)$ | m = max records to return | + +#### Composite Index Formula + +For a composite index with fields $F = [f_1, f_2, \dots, f_n]$, the index keys are computed by concatenating field values with the delimiter: + +$$IK = V(f_1) + \text{delimiter} + V(f_2) + \dots + \text{delimiter} + V(f_n)$$ + +Where: +- `$V(f)$` = Value(s) for field `f` (supports dot notation for nested paths) +- For array fields, each array element generates a separate key + +**Example:** + +For data `{name: 'John', dept: 'IT'}` with composite index `name|dept`: + +Generated key: `'John|IT'` + +For array data `{name: ['John', 'Jane'], dept: 'IT'}` with composite index `name|dept`: + +Generated keys: `['John|IT', 'Jane|IT']` + +For nested data `{user: {email: 'john@example.com', profile: {dept: 'IT'}}}` with composite index `user.email|user.profile.dept`: + +Generated keys: `['john@example.com|IT']` + +### Set Theory Operations + +Haro's `find()` and `where()` methods use set operations for query optimization, including support for nested paths: + +**Find operation (AND logic across fields):** + +```math +\text{find}(\{a: v_a, b: v_b\}) = \bigcap_{k \in \{a,b\}} \text{Index}(k = v_k) +``` + +**Where operation with OR logic (union of indexes):** + +```math +\text{where}(\{t: [v_{t1}, v_{t2}]\}, '||') = \bigcup_{t \in \{v_{t1},v_{t2}\}} \text{Index}(t = v_t) +``` + +> Example: Records with tag 'a' ∪ Records with tag 'b' + +**Where operation with AND logic (intersection of indexes):** + +```math +\text{where}(\{s: v_s, r: v_r\}, '\&\&') = \bigcap_{f \in \{s,r\}} \text{Index}(f = v_f) +``` + +> Example: Records with status='active' ∩ Records with role='admin' (must have BOTH) + +**Nested path example:** + +```math +\text{find}(\{\text{user.email}: v_e, \text{user.profile.dept}: v_d\}) = \bigcap_{k \in \{e,d\}} \text{Index}(k = v_k) +``` + +> Example: Records with user.email='john@example.com' ∩ Records with user.profile.dept='IT' + +### Cache Key Generation + +Cache keys are generated using SHA-256 hashing of serialized query parameters: + +$$CK = \text{domain} + \text{\\_} + \text{SHA256}(\text{JSON.stringify}(\text{args}))$$ + +Where: +- `$CK$` = Cache key +- `$\text{domain}$` = Query method name ('search' or 'where') +- `$\text{args}$` = Method arguments (value, index for search; predicate, op for where) + +**Example:** +```javascript +// Cache key for where({ name: 'John' }) +CK = 'where_' + SHA256(JSON.stringify([{ name: 'John' }])) +// = 'where_a3f2b8c9d4e5f6...' +``` + +### LRU Eviction Policy + +When cache size exceeds maximum ($S > S_{max}$), the least recently used entry is evicted: + +$$\text{evict}() = \text{LRU\\_head}$$ + +Where `$\text{LRU\\_head}$` is the oldest accessed entry in the doubly-linked list. + +**Time Complexity:** +- Cache hit: `$O(1)$` - Direct hash lookup + move to end +- Cache miss: `$O(1)$` - Hash computation + insertion +- Cache eviction: `$O(1)$` - Remove head of LRU list + +### Immutability Model + +Objects are frozen using `Object.freeze()`. Formally: + +$$\text{freeze}(\text{obj}) = \text{obj} \text{ where } \forall \text{prop} \in \text{obj}: \text{prop is non-writable}$$ + +$$\text{deepFreeze}(\text{obj}) = \text{freeze}(\text{obj}) \text{ where } \forall \text{prop} \in \text{obj}: \text{deepFreeze}(\text{prop})$$ + +**Cache Mutation Protection:** + +When returning cached results, a deep clone is created to prevent mutation: + +$$\text{return} = \begin{cases} \text{freeze}(\text{clone}(\text{cached})) & \text{if immutable} \\ \text{clone}(\text{cached}) & \text{if mutable} \end{cases}$$ + ## Operations ### CRUD Operations Performance | Operation | Time Complexity | Space Complexity | Notes | |-----------|----------------|------------------|--------| -| **Create** | O(1) + O(i) | O(1) | i = number of indexes | -| **Read** | O(1) | O(1) | Direct key access | -| **Update** | O(1) + O(i) | O(1) | Index maintenance | -| **Delete** | O(1) + O(i) | O(1) | Cleanup indexes | -| **Find** | O(1) | O(r) | r = result set size | -| **Search** | O(n) | O(r) | Full scan fallback | -| **Batch** | O(n) + O(ni) | O(n) | n = batch size | +| **Create (set)** | O(i) | O(i) | i = number of indexes | +| **Read (get)** | O(1) | O(1) | Direct Map lookup | +| **Update (set)** | O(i) | O(i) | i = number of indexes | +| **Delete** | O(i) | O(1) | i = number of indexes | +| **Find** | O(i × k) | O(r) | i = indexes, k = composite keys, r = results | +| **Search** | O(n × m) | O(r) | n = index entries, m = indexes searched | +| **setMany** | O(n × i) | O(n) | n = records size, i = indexes | +| **deleteMany** | O(n × i) | O(n) | n = keys size, i = indexes | +| **Clear** | O(n) | O(1) | Remove all records | ### Batch Operations ```mermaid graph TD - A["📦 Batch Request"] --> B["🔄 beforeBatch()"] - B --> C["📊 Process Items"] + A["📦 Batch Request"] --> B["📊 Process Items"] - C --> D["🔗 Parallel Processing"] - D --> E1["⚡ Item 1"] - D --> E2["⚡ Item 2"] - D --> E3["⚡ Item N"] + B --> C["🔗 Sequential Processing"] + C --> D1["⚡ Item 1"] + C --> D2["⚡ Item 2"] + C --> D3["⚡ Item N"] - E1 --> F["📝 Individual Operation"] - E2 --> F - E3 --> F + D1 --> E["📝 Individual Operation"] + D2 --> E + D3 --> E - F --> G["📊 Collect Results"] - G --> H["🔄 onbatch()"] - H --> I["✅ Return Results"] + E --> F["📊 Collect Results"] + F --> G["✅ Return Results"] classDef batch fill:#0066CC,stroke:#004499,stroke-width:2px,color:#fff classDef process fill:#008000,stroke:#006600,stroke-width:2px,color:#fff - classDef parallel fill:#FF8C00,stroke:#CC7000,stroke-width:2px,color:#fff + classDef sequential fill:#FF8C00,stroke:#CC7000,stroke-width:2px,color:#fff - class A,B,H,I batch - class C,F,G process - class D,E1,E2,E3 parallel + class A,F,G batch + class B,E process + class C,D1,D2,D3 sequential ``` ## Configuration +### Configuration Runtime Behavior + +Configuration options are set at construction time and cannot be changed at runtime. To modify configuration, create a new Haro instance with the desired options. + ### Initialization Options ```javascript @@ -263,32 +430,39 @@ const store = new Haro({ // Composite key delimiter delimiter: '|', - // Instance identifier - id: 'user-store-1' + // Instance identifier (auto-generated if not provided) + id: 'user-store-1', + + // Enable warnings for full table scan queries (only applies to where()) + warnOnFullScan: true }); ``` ### Runtime Configuration +> **Note:** Configuration is set at construction time. See [Initialization Options](#initialization-options) for details. + ```mermaid graph TD - A["⚙️ Configuration"] --> B["🔑 Key Field"] + A["⚙️ Constructor Options"] --> B["🔑 Key Field"] A --> C["📇 Index Fields"] A --> D["🔒 Immutable Mode"] A --> E["📚 Versioning"] A --> F["🔗 Delimiter"] + A --> G["💾 Cache Settings"] - B --> G["🎯 Primary Key Selection"] - C --> H["⚡ Query Optimization"] - D --> I["🛡️ Data Protection"] - E --> J["📜 Change Tracking"] - F --> K["🔗 Composite Keys"] + B --> H["🎯 Primary Key Selection"] + C --> I["⚡ Query Optimization"] + D --> J["🛡️ Data Protection"] + E --> K["📜 Change Tracking"] + F --> L["🔗 Composite Keys"] + G --> M["⚡ Query Caching"] classDef config fill:#6600CC,stroke:#440088,stroke-width:2px,color:#fff classDef feature fill:#0066CC,stroke:#004499,stroke-width:2px,color:#fff - class A,B,C,D,E,F config - class G,H,I,J,K feature + class A,B,C,D,E,F,G config + class H,I,J,K,L,M feature ``` ## Performance Characteristics @@ -296,24 +470,37 @@ graph TD ### Memory Usage ```mermaid -pie title Memory Distribution +pie title Memory Distribution (without cache) - Illustrative "Record Data" : 60 "Index Structures" : 25 "Version History" : 10 "Metadata" : 5 ``` +```mermaid +pie title Memory Distribution (with cache enabled) - Illustrative + "Record Data" : 50 + "Index Structures" : 20 + "Version History" : 10 + "Cache" : 15 + "Metadata" : 5 +``` + +> **Note:** Actual memory distribution varies based on record count, index count, record sizes, and version history depth. + ### Query Performance ```mermaid xychart-beta - title "Query Performance by Data Size" + title "Query Performance by Data Size (Relative)" x-axis [1K, 10K, 100K, 1M, 10M] - y-axis "Response Time (ms)" 0 --> 100 - line "Indexed Query" [0.1, 0.15, 0.2, 0.3, 0.5] - line "Full Scan" [1, 10, 100, 1000, 10000] + y-axis "Relative Time" 0 --> 100 + line "Indexed Query" [1, 1.5, 2, 3, 5] + line "Full Scan" [10, 100, 1000, 10000, 100000] ``` +> **Note:** Actual performance varies based on hardware, data characteristics, and index configuration. Run `npm run benchmark` for environment-specific measurements. + ## Usage Patterns ### Real-time Data Management @@ -344,29 +531,25 @@ function handleUserEvent(event) { ### Caching Layer ```javascript -// Cache configuration -const cache = new Haro({ - key: 'cacheKey', - index: ['category', 'expiry'], - immutable: false +// Built-in query cache +const store = new Haro({ + index: ['name', 'category'], + cache: true, + cacheSize: 1000 }); -// Cache with TTL -function setCache(key, data, ttl = 3600000) { - return cache.set(key, { - cacheKey: key, - data: data, - expiry: Date.now() + ttl, - category: 'api-response' - }); -} +// First call - cache miss +const results1 = await store.where({ name: 'John' }); -// Cleanup expired entries -function cleanupCache() { - const now = Date.now(); - const expired = cache.filter(record => record.expiry < now); - expired.forEach(record => cache.delete(record.cacheKey)); -} +// Second call - cache hit (instant) +const results2 = await store.where({ name: 'John' }); + +// Get cache statistics +console.log(store.getCacheStats()); +// { hits: 1, misses: 1, sets: 1, deletes: 0, evictions: 0 } + +// Manual cache clear +store.clearCache(); ``` ### State Management @@ -402,7 +585,7 @@ const stateManager = { }; ``` -## 2025 Application Examples +## 2026 Application Examples ### Edge Computing Data Store @@ -450,7 +633,7 @@ class EdgeDataManager { async syncToCloud() { const batch = this.syncQueue.splice(0, 100); - await this.cloudSync.batch(batch); + await this.cloudSync.setMany(batch); } } ``` @@ -547,7 +730,7 @@ class MLFeatureStore { } })); - return this.store.batch(batch, 'set'); + return this.store.setMany(batch); } getFeatureVector(entityId, featureTypes, version = 'latest') { @@ -741,33 +924,47 @@ new Haro(config) **Parameters:** - `config` (Object): Configuration options - - `key` (string): Primary key field name - - `index` (string[]): Fields to index - - `immutable` (boolean): Enable immutable mode - - `versioning` (boolean): Enable version tracking - - `delimiter` (string): Composite key delimiter + - `key` (string): Primary key field name (default: 'id') + - `index` (string[]): Fields to index (default: []) + - `immutable` (boolean): Enable immutable mode (default: false) + - `versioning` (boolean): Enable version tracking (default: false) + - `delimiter` (string): Composite key delimiter (default: '|') + - `id` (string): Unique instance identifier (auto-generated if not provided) + - `warnOnFullScan` (boolean): Enable warnings for full table scans (default: true) ### Core Methods -| Method | Description | Time Complexity | -|--------|-------------|----------------| -| `set(key, data)` | Create or update record | O(1) + O(i) | -| `get(key)` | Retrieve record by key | O(1) | -| `delete(key)` | Remove record | O(1) + O(i) | -| `find(criteria)` | Query with indexes | O(1) to O(n) | -| `search(value, index)` | Search across indexes | O(n) | -| `batch(records, type)` | Bulk operations | O(n) + O(ni) | -| `clear()` | Remove all records | O(n) | +| Method | Description | Time Complexity (Uncached) | Time Complexity (Cached) | +|--------|-------------|----------------|----------------| +| `set(key, data)` | Create or update record | O(1) + O(i) + O(v) | O(1) + O(i) + O(v) | +| `get(key)` | Retrieve record by key | O(1) | O(1) | +| `delete(key)` | Remove record | O(1) + O(i) | O(1) + O(i) | +| `find(criteria)` | Query with indexes | O(1) to O(n) | O(1) to O(n) | +| `search(value, index)` | Search across indexes | O(n × m) | O(1) | +| `where(criteria, op)` | Advanced filtering | O(1) to O(n) | O(1) | +| `setMany(records)` | Bulk insert/update | O(n) + O(ni) | O(n) + O(ni) | +| `deleteMany(keys)` | Bulk delete | O(n) + O(ni) | O(n) + O(ni) | +| `clear()` | Remove all records | O(n) | O(n) | + +> Note: O(v) = version storage overhead when versioning enabled, O(i) = number of indexes, O(n) = number of records, O(m) = number of indexes searched ### Query Methods -| Method | Description | Use Case | -|--------|-------------|----------| -| `filter(predicate)` | Filter with function | Complex logic | -| `where(criteria, op)` | Advanced filtering | Multi-condition queries | -| `sortBy(field)` | Sort by indexed field | Ordered results | -| `limit(offset, max)` | Pagination | Large datasets | -| `map(transform)` | Transform records | Data projection | +| Method | Description | Use Case | Time Complexity | +|--------|-------------|----------|----------------| +| `filter(predicate)` | Filter with function | Complex logic | O(n) | +| `where(criteria, op)` | Advanced filtering with AND/OR | Multi-condition queries | O(1) to O(n) | +| `sortBy(field)` | Sort by indexed field | Ordered results | O(k log k + n) | +| `limit(offset, max)` | Pagination | Large datasets | O(max) | +| `map(transform)` | Transform records | Data projection | O(n) | +| `reduce(fn, acc)` | Reduce to single value | Aggregations | O(n) | +| `search(value, index)` | Search across indexes | Full-text search | O(n) | + +> Note: k = number of unique indexed values for sortBy + +### Utility Methods + +Haro uses internal utility methods for cloning and merging data. These are implementation details and not part of the public API. ## Best Practices @@ -794,14 +991,16 @@ const auditStore = new Haro({ immutable: true }); -// ✅ Good - Batch operations for bulk updates +// ✅ Good - Batch operations for bulk data const records = [...largeDataset]; -store.batch(records, 'set'); +store.setMany(records); // ❌ Bad - Individual operations for bulk data largeDataset.forEach(record => store.set(null, record)); ``` + + ### Query Optimization ```javascript diff --git a/eslint.config.js b/eslint.config.js deleted file mode 100644 index 6509734b..00000000 --- a/eslint.config.js +++ /dev/null @@ -1,185 +0,0 @@ -import globals from "globals"; -import pluginJs from "@eslint/js"; - -export default [ - // Mocha environment for test files - { - files: ["tests/**/*.js"], - languageOptions: { - globals: { - ...globals.mocha - } - } - }, - { - languageOptions: { - globals: { - ...globals.node, - it: true, - describe: true, - crypto: true - }, - parserOptions: { - ecmaVersion: 2022 - } - }, - rules: { - "arrow-parens": [2, "as-needed"], - "arrow-spacing": [2, {"before": true, "after": true}], - "block-scoped-var": [0], - "brace-style": [2, "1tbs", {"allowSingleLine": true}], - "camelcase": [0], - "comma-dangle": [2, "never"], - "comma-spacing": [2], - "comma-style": [2, "last"], - "complexity": [0, 11], - "consistent-return": [2], - "consistent-this": [0, "that"], - "curly": [2, "multi-line"], - "default-case": [2], - "dot-notation": [2, {"allowKeywords": true}], - "eol-last": [2], - "eqeqeq": [2], - "func-names": [0], - "func-style": [0, "declaration"], - "generator-star-spacing": [2, "after"], - "guard-for-in": [0], - "handle-callback-err": [0], - "indent": ["error", "tab", {"VariableDeclarator": {"var": 1, "let": 1, "const": 1}, "SwitchCase": 1}], - "key-spacing": [2, {"beforeColon": false, "afterColon": true}], - "quotes": [2, "double", "avoid-escape"], - "max-depth": [0, 4], - "max-len": [0, 80, 4], - "max-nested-callbacks": [0, 2], - "max-params": [0, 3], - "max-statements": [0, 10], - "new-parens": [2], - "new-cap": [2, {"capIsNewExceptions": ["ToInteger", "ToObject", "ToPrimitive", "ToUint32"]}], - "newline-after-var": [0], - "newline-before-return": [2], - "no-alert": [2], - "no-array-constructor": [2], - "no-bitwise": [0], - "no-caller": [2], - "no-catch-shadow": [2], - "no-cond-assign": [2], - "no-console": [0], - "no-constant-condition": [1], - "no-continue": [2], - "no-control-regex": [2], - "no-debugger": [2], - "no-delete-var": [2], - "no-div-regex": [0], - "no-dupe-args": [2], - "no-dupe-keys": [2], - "no-duplicate-case": [2], - "no-else-return": [0], - "no-empty": [2], - "no-eq-null": [0], - "no-eval": [2], - "no-ex-assign": [2], - "no-extend-native": [1], - "no-extra-bind": [2], - "no-extra-boolean-cast": [2], - "no-extra-semi": [1], - "no-empty-character-class": [2], - "no-fallthrough": [2], - "no-floating-decimal": [2], - "no-func-assign": [2], - "no-implied-eval": [2], - "no-inline-comments": [0], - "no-inner-declarations": [2, "functions"], - "no-invalid-regexp": [2], - "no-irregular-whitespace": [2], - "no-iterator": [2], - "no-label-var": [2], - "no-labels": [2], - "no-lone-blocks": [2], - "no-lonely-if": [2], - "no-loop-func": [2], - "no-mixed-requires": [0, false], - "no-mixed-spaces-and-tabs": [2, false], - "no-multi-spaces": [2], - "no-multi-str": [2], - "no-multiple-empty-lines": [2, {"max": 2}], - "no-native-reassign": [0], - "no-negated-in-lhs": [2], - "no-nested-ternary": [0], - "no-new": [2], - "no-new-func": [0], - "no-new-object": [2], - "no-new-require": [0], - "no-new-wrappers": [2], - "no-obj-calls": [2], - "no-octal": [2], - "no-octal-escape": [2], - "no-param-reassign": [0], - "no-path-concat": [0], - "no-plusplus": [0], - "no-process-env": [0], - "no-process-exit": [0], - "no-proto": [2], - "no-redeclare": [2], - "no-regex-spaces": [2], - "no-reserved-keys": [0], - "no-reno-new-funced-modules": [0], - "no-return-assign": [2], - "no-script-url": [2], - "no-self-compare": [0], - "no-sequences": [2], - "no-shadow": [2], - "no-shadow-restricted-names": [2], - "no-spaced-func": [2], - "no-sparse-arrays": [2], - "no-sync": [0], - "no-ternary": [0], - "no-throw-literal": [2], - "no-trailing-spaces": [2], - "no-undef": [2], - "no-undef-init": [2], - "no-undefined": [0], - "no-underscore-dangle": [0], - "no-unreachable": [2], - "no-unused-expressions": [2], - "no-unused-vars": [2, {"vars": "all", "args": "after-used"}], - "no-use-before-define": [2], - "no-void": [0], - "no-warning-comments": [0, {"terms": ["todo", "fixme", "xxx"], "location": "start"}], - "no-with": [2], - "no-extra-parens": [2], - "one-var": [0], - "operator-assignment": [0, "always"], - "operator-linebreak": [2, "after"], - "padded-blocks": [0], - "quote-props": [0], - "radix": [0], - "semi": [2], - "semi-spacing": [2, {before: false, after: true}], - "sort-vars": [0], - "keyword-spacing": [2], - "space-before-function-paren": [2, {anonymous: "always", named: "always"}], - "space-before-blocks": [2, "always"], - "space-in-brackets": [0, "never", { - singleValue: true, - arraysInArrays: false, - arraysInObjects: false, - objectsInArrays: true, - objectsInObjects: true, - propertyName: false - }], - "space-in-parens": [2, "never"], - "space-infix-ops": [2], - "space-unary-ops": [2, {words: true, nonwords: false}], - "spaced-line-comment": [0, "always"], - strict: [0], - "use-isnan": [2], - "valid-jsdoc": [0], - "valid-typeof": [2], - "vars-on-top": [0], - "wrap-iife": [2], - "wrap-regex": [2], - yoda: [2, "never", {exceptRange: true}] - } - }, - pluginJs.configs.recommended -]; diff --git a/package-lock.json b/package-lock.json index 697fdc1b..983602b6 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,350 +1,380 @@ { "name": "haro", - "version": "16.0.0", + "version": "17.0.0", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "haro", - "version": "16.0.0", + "version": "17.0.0", "license": "BSD-3-Clause", + "dependencies": { + "tiny-lru": "^13.0.0" + }, "devDependencies": { - "@eslint/js": "^9.31.0", - "@rollup/plugin-terser": "^1.0.0", "auto-changelog": "^2.5.0", - "c8": "^11.0.0", - "eslint": "^10.0.0", - "globals": "^17.0.0", + "globals": "^17.5.0", "husky": "^9.1.7", - "mocha": "^11.7.1", - "rollup": "^4.45.0" + "oxfmt": "^0.45.0", + "oxlint": "^1.60.0", + "rollup": "^4.60.2", + "tinybench": "^6.0.0" }, "engines": { - "node": ">=17.0.0" + "node": ">=19.0.0" } }, - "node_modules/@bcoe/v8-coverage": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/@bcoe/v8-coverage/-/v8-coverage-1.0.2.tgz", - "integrity": "sha512-6zABk/ECA/QYSCQ1NGiVwwbQerUCZ+TQbp64Q3AgmfNvurHH0j8TtXa1qbShXA6qqkpAj4V5W8pP6mLe1mcMqA==", + "node_modules/@oxfmt/binding-android-arm-eabi": { + "version": "0.45.0", + "resolved": "https://registry.npmjs.org/@oxfmt/binding-android-arm-eabi/-/binding-android-arm-eabi-0.45.0.tgz", + "integrity": "sha512-A/UMxFob1fefCuMeGxQBulGfFE38g2Gm23ynr3u6b+b7fY7/ajGbNsa3ikMIkGMLJW/TRoQaMoP1kME7S+815w==", + "cpu": [ + "arm" + ], "dev": true, "license": "MIT", + "optional": true, + "os": [ + "android" + ], "engines": { - "node": ">=18" + "node": "^20.19.0 || >=22.12.0" } }, - "node_modules/@eslint-community/eslint-utils": { - "version": "4.8.0", - "resolved": "https://registry.npmjs.org/@eslint-community/eslint-utils/-/eslint-utils-4.8.0.tgz", - "integrity": "sha512-MJQFqrZgcW0UNYLGOuQpey/oTN59vyWwplvCGZztn1cKz9agZPPYpJB7h2OMmuu7VLqkvEjN8feFZJmxNF9D+Q==", + "node_modules/@oxfmt/binding-android-arm64": { + "version": "0.45.0", + "resolved": "https://registry.npmjs.org/@oxfmt/binding-android-arm64/-/binding-android-arm64-0.45.0.tgz", + "integrity": "sha512-L63z4uZmHjgvvqvMJD7mwff8aSBkM0+X4uFr6l6U5t6+Qc9DCLVZWIunJ7Gm4fn4zHPdSq6FFQnhu9yqqobxIg==", + "cpu": [ + "arm64" + ], "dev": true, "license": "MIT", - "dependencies": { - "eslint-visitor-keys": "^3.4.3" - }, - "engines": { - "node": "^12.22.0 || ^14.17.0 || >=16.0.0" - }, - "funding": { - "url": "https://opencollective.com/eslint" - }, - "peerDependencies": { - "eslint": "^6.0.0 || ^7.0.0 || >=8.0.0" - } - }, - "node_modules/@eslint-community/eslint-utils/node_modules/eslint-visitor-keys": { - "version": "3.4.3", - "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-3.4.3.tgz", - "integrity": "sha512-wpc+LXeiyiisxPlEkUzU6svyS1frIO3Mgxj1fdy7Pm8Ygzguax2N3Fa/D/ag1WqbOprdI+uY6wMUl8/a2G+iag==", - "dev": true, - "license": "Apache-2.0", + "optional": true, + "os": [ + "android" + ], "engines": { - "node": "^12.22.0 || ^14.17.0 || >=16.0.0" - }, - "funding": { - "url": "https://opencollective.com/eslint" + "node": "^20.19.0 || >=22.12.0" } }, - "node_modules/@eslint-community/regexpp": { - "version": "4.12.2", - "resolved": "https://registry.npmjs.org/@eslint-community/regexpp/-/regexpp-4.12.2.tgz", - "integrity": "sha512-EriSTlt5OC9/7SXkRSCAhfSxxoSUgBm33OH+IkwbdpgoqsSsUg7y3uh+IICI/Qg4BBWr3U2i39RpmycbxMq4ew==", + "node_modules/@oxfmt/binding-darwin-arm64": { + "version": "0.45.0", + "resolved": "https://registry.npmjs.org/@oxfmt/binding-darwin-arm64/-/binding-darwin-arm64-0.45.0.tgz", + "integrity": "sha512-UV34dd623FzqT+outIGndsCA/RBB+qgB3XVQhgmmJ9PJwa37NzPC9qzgKeOhPKxVk2HW+JKldQrVL54zs4Noww==", + "cpu": [ + "arm64" + ], "dev": true, "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], "engines": { - "node": "^12.0.0 || ^14.0.0 || >=16.0.0" - } - }, - "node_modules/@eslint/config-array": { - "version": "0.23.5", - "resolved": "https://registry.npmjs.org/@eslint/config-array/-/config-array-0.23.5.tgz", - "integrity": "sha512-Y3kKLvC1dvTOT+oGlqNQ1XLqK6D1HU2YXPc52NmAlJZbMMWDzGYXMiPRJ8TYD39muD/OTjlZmNJ4ib7dvSrMBA==", - "dev": true, - "license": "Apache-2.0", - "dependencies": { - "@eslint/object-schema": "^3.0.5", - "debug": "^4.3.1", - "minimatch": "^10.2.4" - }, - "engines": { - "node": "^20.19.0 || ^22.13.0 || >=24" - } - }, - "node_modules/@eslint/config-helpers": { - "version": "0.5.5", - "resolved": "https://registry.npmjs.org/@eslint/config-helpers/-/config-helpers-0.5.5.tgz", - "integrity": "sha512-eIJYKTCECbP/nsKaaruF6LW967mtbQbsw4JTtSVkUQc9MneSkbrgPJAbKl9nWr0ZeowV8BfsarBmPpBzGelA2w==", - "dev": true, - "license": "Apache-2.0", - "dependencies": { - "@eslint/core": "^1.2.1" - }, - "engines": { - "node": "^20.19.0 || ^22.13.0 || >=24" - } - }, - "node_modules/@eslint/core": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/@eslint/core/-/core-1.2.1.tgz", - "integrity": "sha512-MwcE1P+AZ4C6DWlpin/OmOA54mmIZ/+xZuJiQd4SyB29oAJjN30UW9wkKNptW2ctp4cEsvhlLY/CsQ1uoHDloQ==", - "dev": true, - "license": "Apache-2.0", - "dependencies": { - "@types/json-schema": "^7.0.15" - }, - "engines": { - "node": "^20.19.0 || ^22.13.0 || >=24" + "node": "^20.19.0 || >=22.12.0" } }, - "node_modules/@eslint/js": { - "version": "9.39.2", - "resolved": "https://registry.npmjs.org/@eslint/js/-/js-9.39.2.tgz", - "integrity": "sha512-q1mjIoW1VX4IvSocvM/vbTiveKC4k9eLrajNEuSsmjymSDEbpGddtpfOoN7YGAqBK3NG+uqo8ia4PDTt8buCYA==", + "node_modules/@oxfmt/binding-darwin-x64": { + "version": "0.45.0", + "resolved": "https://registry.npmjs.org/@oxfmt/binding-darwin-x64/-/binding-darwin-x64-0.45.0.tgz", + "integrity": "sha512-pMNJv0CMa1pDefVPeNbuQxibh8ITpWDFEhMC/IBB9Zlu76EbgzYwrzI4Cb11mqX2+rIYN70UTrh3z06TM59ptQ==", + "cpu": [ + "x64" + ], "dev": true, "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], "engines": { - "node": "^18.18.0 || ^20.9.0 || >=21.1.0" - }, - "funding": { - "url": "https://eslint.org/donate" - } - }, - "node_modules/@eslint/object-schema": { - "version": "3.0.5", - "resolved": "https://registry.npmjs.org/@eslint/object-schema/-/object-schema-3.0.5.tgz", - "integrity": "sha512-vqTaUEgxzm+YDSdElad6PiRoX4t8VGDjCtt05zn4nU810UIx/uNEV7/lZJ6KwFThKZOzOxzXy48da+No7HZaMw==", - "dev": true, - "license": "Apache-2.0", - "engines": { - "node": "^20.19.0 || ^22.13.0 || >=24" + "node": "^20.19.0 || >=22.12.0" } }, - "node_modules/@eslint/plugin-kit": { - "version": "0.7.1", - "resolved": "https://registry.npmjs.org/@eslint/plugin-kit/-/plugin-kit-0.7.1.tgz", - "integrity": "sha512-rZAP3aVgB9ds9KOeUSL+zZ21hPmo8dh6fnIFwRQj5EAZl9gzR7wxYbYXYysAM8CTqGmUGyp2S4kUdV17MnGuWQ==", + "node_modules/@oxfmt/binding-freebsd-x64": { + "version": "0.45.0", + "resolved": "https://registry.npmjs.org/@oxfmt/binding-freebsd-x64/-/binding-freebsd-x64-0.45.0.tgz", + "integrity": "sha512-xTcRoxbbo61sW2+ZRPeH+vp/o9G8gkdhiVumFU+TpneiPm14c79l6GFlxPXlCE9bNWikigbsrvJw46zCVAQFfg==", + "cpu": [ + "x64" + ], "dev": true, - "license": "Apache-2.0", - "dependencies": { - "@eslint/core": "^1.2.1", - "levn": "^0.4.1" - }, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ], "engines": { - "node": "^20.19.0 || ^22.13.0 || >=24" + "node": "^20.19.0 || >=22.12.0" } }, - "node_modules/@humanfs/core": { - "version": "0.19.1", - "resolved": "https://registry.npmjs.org/@humanfs/core/-/core-0.19.1.tgz", - "integrity": "sha512-5DyQ4+1JEUzejeK1JGICcideyfUbGixgS9jNgex5nqkW+cY7WZhxBigmieN5Qnw9ZosSNVC9KQKyb+GUaGyKUA==", + "node_modules/@oxfmt/binding-linux-arm-gnueabihf": { + "version": "0.45.0", + "resolved": "https://registry.npmjs.org/@oxfmt/binding-linux-arm-gnueabihf/-/binding-linux-arm-gnueabihf-0.45.0.tgz", + "integrity": "sha512-hWL8Hdni+3U1mPFx1UtWeGp3tNb6EhBAUHRMbKUxVkOp3WwoJbpVO2bfUVbS4PfpledviXXNHSTl1veTa6FhkQ==", + "cpu": [ + "arm" + ], "dev": true, - "license": "Apache-2.0", + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], "engines": { - "node": ">=18.18.0" + "node": "^20.19.0 || >=22.12.0" } }, - "node_modules/@humanfs/node": { - "version": "0.16.6", - "resolved": "https://registry.npmjs.org/@humanfs/node/-/node-0.16.6.tgz", - "integrity": "sha512-YuI2ZHQL78Q5HbhDiBA1X4LmYdXCKCMQIfw0pw7piHJwyREFebJUvrQN4cMssyES6x+vfUbx1CIpaQUKYdQZOw==", + "node_modules/@oxfmt/binding-linux-arm-musleabihf": { + "version": "0.45.0", + "resolved": "https://registry.npmjs.org/@oxfmt/binding-linux-arm-musleabihf/-/binding-linux-arm-musleabihf-0.45.0.tgz", + "integrity": "sha512-6Blt/0OBT7vvfQpqYuYbpbFLPqSiaYpEJzUUWhinPEuADypDbtV1+LdjM0vYBNGPvnj85ex7lTerEX6JGcPt9w==", + "cpu": [ + "arm" + ], "dev": true, - "license": "Apache-2.0", - "dependencies": { - "@humanfs/core": "^0.19.1", - "@humanwhocodes/retry": "^0.3.0" - }, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], "engines": { - "node": ">=18.18.0" + "node": "^20.19.0 || >=22.12.0" } }, - "node_modules/@humanfs/node/node_modules/@humanwhocodes/retry": { - "version": "0.3.1", - "resolved": "https://registry.npmjs.org/@humanwhocodes/retry/-/retry-0.3.1.tgz", - "integrity": "sha512-JBxkERygn7Bv/GbN5Rv8Ul6LVknS+5Bp6RgDC/O8gEBU/yeH5Ui5C/OlWrTb6qct7LjjfT6Re2NxB0ln0yYybA==", + "node_modules/@oxfmt/binding-linux-arm64-gnu": { + "version": "0.45.0", + "resolved": "https://registry.npmjs.org/@oxfmt/binding-linux-arm64-gnu/-/binding-linux-arm64-gnu-0.45.0.tgz", + "integrity": "sha512-jLjoLfe+hGfjhA8hNBSdw85yCA8ePKq7ME4T+g6P9caQXvmt6IhE2X7iVjnVdkmYUWEzZrxlh4p6RkDmAMJY/A==", + "cpu": [ + "arm64" + ], "dev": true, - "license": "Apache-2.0", + "libc": [ + "glibc" + ], + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], "engines": { - "node": ">=18.18" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/nzakas" + "node": "^20.19.0 || >=22.12.0" } }, - "node_modules/@humanwhocodes/module-importer": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/@humanwhocodes/module-importer/-/module-importer-1.0.1.tgz", - "integrity": "sha512-bxveV4V8v5Yb4ncFTT3rPSgZBOpCkjfK0y4oVVVJwIuDVBRMDXrPyXRL988i5ap9m9bnyEEjWfm5WkBmtffLfA==", + "node_modules/@oxfmt/binding-linux-arm64-musl": { + "version": "0.45.0", + "resolved": "https://registry.npmjs.org/@oxfmt/binding-linux-arm64-musl/-/binding-linux-arm64-musl-0.45.0.tgz", + "integrity": "sha512-XQKXZIKYJC3GQJ8FnD3iMntpw69Wd9kDDK/Xt79p6xnFYlGGxSNv2vIBvRTDg5CKByWFWWZLCRDOXoP/m6YN4g==", + "cpu": [ + "arm64" + ], "dev": true, - "license": "Apache-2.0", + "libc": [ + "musl" + ], + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], "engines": { - "node": ">=12.22" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/nzakas" + "node": "^20.19.0 || >=22.12.0" } }, - "node_modules/@humanwhocodes/retry": { - "version": "0.4.3", - "resolved": "https://registry.npmjs.org/@humanwhocodes/retry/-/retry-0.4.3.tgz", - "integrity": "sha512-bV0Tgo9K4hfPCek+aMAn81RppFKv2ySDQeMoSZuvTASywNTnVJCArCZE2FWqpvIatKu7VMRLWlR1EazvVhDyhQ==", + "node_modules/@oxfmt/binding-linux-ppc64-gnu": { + "version": "0.45.0", + "resolved": "https://registry.npmjs.org/@oxfmt/binding-linux-ppc64-gnu/-/binding-linux-ppc64-gnu-0.45.0.tgz", + "integrity": "sha512-+g5RiG+xOkdrCWkKodv407nTvMq4vYM18Uox2MhZBm/YoqFxxJpWKsloskFFG5NU13HGPw1wzYjjOVcyd9moCA==", + "cpu": [ + "ppc64" + ], "dev": true, - "license": "Apache-2.0", + "libc": [ + "glibc" + ], + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], "engines": { - "node": ">=18.18" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/nzakas" + "node": "^20.19.0 || >=22.12.0" } }, - "node_modules/@isaacs/cliui": { - "version": "8.0.2", - "resolved": "https://registry.npmjs.org/@isaacs/cliui/-/cliui-8.0.2.tgz", - "integrity": "sha512-O8jcjabXaleOG9DQ0+ARXWZBTfnP4WNAqzuiJK7ll44AmxGKv/J2M4TPjxjY3znBCfvBXFzucm1twdyFybFqEA==", + "node_modules/@oxfmt/binding-linux-riscv64-gnu": { + "version": "0.45.0", + "resolved": "https://registry.npmjs.org/@oxfmt/binding-linux-riscv64-gnu/-/binding-linux-riscv64-gnu-0.45.0.tgz", + "integrity": "sha512-V7dXKoSyEbWAkkSF4JJNtF+NJZDmJoSarSoP30WCsB3X636Rehd3CvxBj49FIJxEBFWhvcUjGSHVeU8Erck1bQ==", + "cpu": [ + "riscv64" + ], "dev": true, - "license": "ISC", - "dependencies": { - "string-width": "^5.1.2", - "string-width-cjs": "npm:string-width@^4.2.0", - "strip-ansi": "^7.0.1", - "strip-ansi-cjs": "npm:strip-ansi@^6.0.1", - "wrap-ansi": "^8.1.0", - "wrap-ansi-cjs": "npm:wrap-ansi@^7.0.0" - }, + "libc": [ + "glibc" + ], + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], "engines": { - "node": ">=12" + "node": "^20.19.0 || >=22.12.0" } }, - "node_modules/@istanbuljs/schema": { - "version": "0.1.3", - "resolved": "https://registry.npmjs.org/@istanbuljs/schema/-/schema-0.1.3.tgz", - "integrity": "sha512-ZXRY4jNvVgSVQ8DL3LTcakaAtXwTVUxE81hslsyD2AtoXW/wVob10HkOJ1X/pAlcI7D+2YoZKg5do8G/w6RYgA==", + "node_modules/@oxfmt/binding-linux-riscv64-musl": { + "version": "0.45.0", + "resolved": "https://registry.npmjs.org/@oxfmt/binding-linux-riscv64-musl/-/binding-linux-riscv64-musl-0.45.0.tgz", + "integrity": "sha512-Vdelft1sAEYojVGgcODEFXSWYQYlIvoyIGWebKCuUibd1tvS1TjTx413xG2ZLuHpYj45CkN/ztMLMX6jrgqpgg==", + "cpu": [ + "riscv64" + ], "dev": true, + "libc": [ + "musl" + ], "license": "MIT", + "optional": true, + "os": [ + "linux" + ], "engines": { - "node": ">=8" + "node": "^20.19.0 || >=22.12.0" } }, - "node_modules/@jridgewell/gen-mapping": { - "version": "0.3.12", - "resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.12.tgz", - "integrity": "sha512-OuLGC46TjB5BbN1dH8JULVVZY4WTdkF7tV9Ys6wLL1rubZnCMstOhNHueU5bLCrnRuDhKPDM4g6sw4Bel5Gzqg==", + "node_modules/@oxfmt/binding-linux-s390x-gnu": { + "version": "0.45.0", + "resolved": "https://registry.npmjs.org/@oxfmt/binding-linux-s390x-gnu/-/binding-linux-s390x-gnu-0.45.0.tgz", + "integrity": "sha512-RR7xKgNpqwENnK0aYCGYg0JycY2n93J0reNjHyes+I9Gq52dH95x+CBlnlAQHCPfz6FGnKA9HirgUl14WO6o7w==", + "cpu": [ + "s390x" + ], "dev": true, + "libc": [ + "glibc" + ], "license": "MIT", - "dependencies": { - "@jridgewell/sourcemap-codec": "^1.5.0", - "@jridgewell/trace-mapping": "^0.3.24" + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" } }, - "node_modules/@jridgewell/resolve-uri": { - "version": "3.1.2", - "resolved": "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.1.2.tgz", - "integrity": "sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw==", + "node_modules/@oxfmt/binding-linux-x64-gnu": { + "version": "0.45.0", + "resolved": "https://registry.npmjs.org/@oxfmt/binding-linux-x64-gnu/-/binding-linux-x64-gnu-0.45.0.tgz", + "integrity": "sha512-U/QQ0+BQNSHxjuXR/utvXnQ50Vu5kUuqEomZvQ1/3mhgbBiMc2WU9q5kZ5WwLp3gnFIx9ibkveoRSe2EZubkqg==", + "cpu": [ + "x64" + ], "dev": true, + "libc": [ + "glibc" + ], "license": "MIT", + "optional": true, + "os": [ + "linux" + ], "engines": { - "node": ">=6.0.0" + "node": "^20.19.0 || >=22.12.0" } }, - "node_modules/@jridgewell/source-map": { - "version": "0.3.10", - "resolved": "https://registry.npmjs.org/@jridgewell/source-map/-/source-map-0.3.10.tgz", - "integrity": "sha512-0pPkgz9dY+bijgistcTTJ5mR+ocqRXLuhXHYdzoMmmoJ2C9S46RCm2GMUbatPEUK9Yjy26IrAy8D/M00lLkv+Q==", + "node_modules/@oxfmt/binding-linux-x64-musl": { + "version": "0.45.0", + "resolved": "https://registry.npmjs.org/@oxfmt/binding-linux-x64-musl/-/binding-linux-x64-musl-0.45.0.tgz", + "integrity": "sha512-o5TLOUCF0RWQjsIS06yVC+kFgp092/yLe6qBGSUvtnmTVw9gxjpdQSXc3VN5Cnive4K11HNstEZF8ROKHfDFSw==", + "cpu": [ + "x64" + ], "dev": true, + "libc": [ + "musl" + ], "license": "MIT", - "dependencies": { - "@jridgewell/gen-mapping": "^0.3.5", - "@jridgewell/trace-mapping": "^0.3.25" + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" } }, - "node_modules/@jridgewell/sourcemap-codec": { - "version": "1.5.4", - "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.5.4.tgz", - "integrity": "sha512-VT2+G1VQs/9oz078bLrYbecdZKs912zQlkelYpuf+SXF+QvZDYJlbx/LSx+meSAwdDFnF8FVXW92AVjjkVmgFw==", - "dev": true, - "license": "MIT" - }, - "node_modules/@jridgewell/trace-mapping": { - "version": "0.3.29", - "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.29.tgz", - "integrity": "sha512-uw6guiW/gcAGPDhLmd77/6lW8QLeiV5RUTsAX46Db6oLhGaVj4lhnPwb184s1bkc8kdVg/+h988dro8GRDpmYQ==", + "node_modules/@oxfmt/binding-openharmony-arm64": { + "version": "0.45.0", + "resolved": "https://registry.npmjs.org/@oxfmt/binding-openharmony-arm64/-/binding-openharmony-arm64-0.45.0.tgz", + "integrity": "sha512-RnGcV3HgPuOjsGx/k9oyRNKmOp+NBLGzZTdPDYbc19r7NGeYPplnUU/BfU35bX2Y/O4ejvHxcfkvW2WoYL/gsg==", + "cpu": [ + "arm64" + ], "dev": true, "license": "MIT", - "dependencies": { - "@jridgewell/resolve-uri": "^3.1.0", - "@jridgewell/sourcemap-codec": "^1.4.14" + "optional": true, + "os": [ + "openharmony" + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" } }, - "node_modules/@pkgjs/parseargs": { - "version": "0.11.0", - "resolved": "https://registry.npmjs.org/@pkgjs/parseargs/-/parseargs-0.11.0.tgz", - "integrity": "sha512-+1VkjdD0QBLPodGrJUeqarH8VAIvQODIbwh9XpP5Syisf7YoQgsJKPNFoqqLQlu+VQ/tVSshMR6loPMn8U+dPg==", + "node_modules/@oxfmt/binding-win32-arm64-msvc": { + "version": "0.45.0", + "resolved": "https://registry.npmjs.org/@oxfmt/binding-win32-arm64-msvc/-/binding-win32-arm64-msvc-0.45.0.tgz", + "integrity": "sha512-v3Vj7iKKsUFwt9w5hsqIIoErKVoENC6LoqfDlteOQ5QMDCXihlqLoxpmviUhXnNncg4zV6U9BPwlBbwa+qm4wg==", + "cpu": [ + "arm64" + ], "dev": true, "license": "MIT", "optional": true, + "os": [ + "win32" + ], "engines": { - "node": ">=14" + "node": "^20.19.0 || >=22.12.0" } }, - "node_modules/@rollup/plugin-terser": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/@rollup/plugin-terser/-/plugin-terser-1.0.0.tgz", - "integrity": "sha512-FnCxhTBx6bMOYQrar6C8h3scPt8/JwIzw3+AJ2K++6guogH5fYaIFia+zZuhqv0eo1RN7W1Pz630SyvLbDjhtQ==", + "node_modules/@oxfmt/binding-win32-ia32-msvc": { + "version": "0.45.0", + "resolved": "https://registry.npmjs.org/@oxfmt/binding-win32-ia32-msvc/-/binding-win32-ia32-msvc-0.45.0.tgz", + "integrity": "sha512-N8yotPBX6ph0H3toF4AEpdCeVPrdcSetj+8eGiZGsrLsng3bs/Q5HPu4bbSxip5GBPx5hGbGHrZwH4+rcrjhHA==", + "cpu": [ + "ia32" + ], "dev": true, "license": "MIT", - "dependencies": { - "serialize-javascript": "^7.0.3", - "smob": "^1.0.0", - "terser": "^5.17.4" - }, + "optional": true, + "os": [ + "win32" + ], "engines": { - "node": ">=20.0.0" - }, - "peerDependencies": { - "rollup": "^2.0.0||^3.0.0||^4.0.0" - }, - "peerDependenciesMeta": { - "rollup": { - "optional": true - } + "node": "^20.19.0 || >=22.12.0" } }, - "node_modules/@rollup/plugin-terser/node_modules/serialize-javascript": { - "version": "7.0.4", - "resolved": "https://registry.npmjs.org/serialize-javascript/-/serialize-javascript-7.0.4.tgz", - "integrity": "sha512-DuGdB+Po43Q5Jxwpzt1lhyFSYKryqoNjQSA9M92tyw0lyHIOur+XCalOUe0KTJpyqzT8+fQ5A0Jf7vCx/NKmIg==", + "node_modules/@oxfmt/binding-win32-x64-msvc": { + "version": "0.45.0", + "resolved": "https://registry.npmjs.org/@oxfmt/binding-win32-x64-msvc/-/binding-win32-x64-msvc-0.45.0.tgz", + "integrity": "sha512-w5MMTRCK1dpQeRA+HHqXQXyN33DlG/N2LOYxJmaT4fJjcmZrbNnqw7SmIk7I2/a2493PPLZ+2E/Ar6t2iKVMug==", + "cpu": [ + "x64" + ], "dev": true, - "license": "BSD-3-Clause", + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], "engines": { - "node": ">=20.0.0" + "node": "^20.19.0 || >=22.12.0" } }, - "node_modules/@rollup/rollup-android-arm-eabi": { - "version": "4.60.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.60.1.tgz", - "integrity": "sha512-d6FinEBLdIiK+1uACUttJKfgZREXrF0Qc2SmLII7W2AD8FfiZ9Wjd+rD/iRuf5s5dWrr1GgwXCvPqOuDquOowA==", + "node_modules/@oxlint/binding-android-arm-eabi": { + "version": "1.60.0", + "resolved": "https://registry.npmjs.org/@oxlint/binding-android-arm-eabi/-/binding-android-arm-eabi-1.60.0.tgz", + "integrity": "sha512-YdeJKaZckDQL1qa62a1aKq/goyq48aX3yOxaaWqWb4sau4Ee4IiLbamftNLU3zbePky6QsDj6thnSSzHRBjDfA==", "cpu": [ "arm" ], @@ -353,12 +383,15 @@ "optional": true, "os": [ "android" - ] + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } }, - "node_modules/@rollup/rollup-android-arm64": { - "version": "4.60.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.60.1.tgz", - "integrity": "sha512-YjG/EwIDvvYI1YvYbHvDz/BYHtkY4ygUIXHnTdLhG+hKIQFBiosfWiACWortsKPKU/+dUwQQCKQM3qrDe8c9BA==", + "node_modules/@oxlint/binding-android-arm64": { + "version": "1.60.0", + "resolved": "https://registry.npmjs.org/@oxlint/binding-android-arm64/-/binding-android-arm64-1.60.0.tgz", + "integrity": "sha512-7ANS7PpXCfq84xZQ8E5WPs14gwcuPcl+/8TFNXfpSu0CQBXz3cUo2fDpHT8v8HJN+Ut02eacvMAzTnc9s6X4tw==", "cpu": [ "arm64" ], @@ -367,12 +400,15 @@ "optional": true, "os": [ "android" - ] + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } }, - "node_modules/@rollup/rollup-darwin-arm64": { - "version": "4.60.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.60.1.tgz", - "integrity": "sha512-mjCpF7GmkRtSJwon+Rq1N8+pI+8l7w5g9Z3vWj4T7abguC4Czwi3Yu/pFaLvA3TTeMVjnu3ctigusqWUfjZzvw==", + "node_modules/@oxlint/binding-darwin-arm64": { + "version": "1.60.0", + "resolved": "https://registry.npmjs.org/@oxlint/binding-darwin-arm64/-/binding-darwin-arm64-1.60.0.tgz", + "integrity": "sha512-pJsgd9AfplLGBm1fIr25V6V14vMrayhx4uIQvlfH7jWs2SZwSrvi3TfgfJySB8T+hvyEH8K2zXljQiUnkgUnfQ==", "cpu": [ "arm64" ], @@ -381,12 +417,15 @@ "optional": true, "os": [ "darwin" - ] + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } }, - "node_modules/@rollup/rollup-darwin-x64": { - "version": "4.60.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.60.1.tgz", - "integrity": "sha512-haZ7hJ1JT4e9hqkoT9R/19XW2QKqjfJVv+i5AGg57S+nLk9lQnJ1F/eZloRO3o9Scy9CM3wQ9l+dkXtcBgN5Ew==", + "node_modules/@oxlint/binding-darwin-x64": { + "version": "1.60.0", + "resolved": "https://registry.npmjs.org/@oxlint/binding-darwin-x64/-/binding-darwin-x64-1.60.0.tgz", + "integrity": "sha512-Ue1aXHX49ivwflKqGJc7zcd/LeLgbhaTcDCQStgx5x06AXgjEAZmvrlMuIkWd4AL4FHQe6QJ9f33z04Cg448VQ==", "cpu": [ "x64" ], @@ -395,44 +434,385 @@ "optional": true, "os": [ "darwin" - ] + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } }, - "node_modules/@rollup/rollup-freebsd-arm64": { - "version": "4.60.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-arm64/-/rollup-freebsd-arm64-4.60.1.tgz", - "integrity": "sha512-czw90wpQq3ZsAVBlinZjAYTKduOjTywlG7fEeWKUA7oCmpA8xdTkxZZlwNJKWqILlq0wehoZcJYfBvOyhPTQ6w==", + "node_modules/@oxlint/binding-freebsd-x64": { + "version": "1.60.0", + "resolved": "https://registry.npmjs.org/@oxlint/binding-freebsd-x64/-/binding-freebsd-x64-1.60.0.tgz", + "integrity": "sha512-YCyQzsQtusQw+gNRW9rRTifSO+Dt/+dtCl2NHoDMZqJlRTEZ/Oht9YnuporI9yiTx7+cB+eqzX3MtHHVHGIWhg==", "cpu": [ - "arm64" + "x64" ], "dev": true, "license": "MIT", "optional": true, "os": [ "freebsd" - ] + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } }, - "node_modules/@rollup/rollup-freebsd-x64": { - "version": "4.60.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-x64/-/rollup-freebsd-x64-4.60.1.tgz", - "integrity": "sha512-KVB2rqsxTHuBtfOeySEyzEOB7ltlB/ux38iu2rBQzkjbwRVlkhAGIEDiiYnO2kFOkJp+Z7pUXKyrRRFuFUKt+g==", + "node_modules/@oxlint/binding-linux-arm-gnueabihf": { + "version": "1.60.0", + "resolved": "https://registry.npmjs.org/@oxlint/binding-linux-arm-gnueabihf/-/binding-linux-arm-gnueabihf-1.60.0.tgz", + "integrity": "sha512-c7dxM2Zksa45Qw16i2iGY3Fti2NirJ38FrsBsKw+qcJ0OtqTsBgKJLF0xV+yLG56UH01Z8WRPgsw31e0MoRoGQ==", "cpu": [ - "x64" + "arm" ], "dev": true, "license": "MIT", "optional": true, "os": [ - "freebsd" - ] - }, + "linux" + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } + }, + "node_modules/@oxlint/binding-linux-arm-musleabihf": { + "version": "1.60.0", + "resolved": "https://registry.npmjs.org/@oxlint/binding-linux-arm-musleabihf/-/binding-linux-arm-musleabihf-1.60.0.tgz", + "integrity": "sha512-ZWALoA42UYqBEP1Tbw9OWURgFGS1nWj2AAvLdY6ZcGx/Gj93qVCBKjcvwXMupZibYwFbi9s/rzqkZseb/6gVtQ==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } + }, + "node_modules/@oxlint/binding-linux-arm64-gnu": { + "version": "1.60.0", + "resolved": "https://registry.npmjs.org/@oxlint/binding-linux-arm64-gnu/-/binding-linux-arm64-gnu-1.60.0.tgz", + "integrity": "sha512-tpy+1w4p9hN5CicMCxqNy6ymfRtV5ayE573vFNjp1k1TN/qhLFgflveZoE/0++RlkHikBz2vY545NWm/hp7big==", + "cpu": [ + "arm64" + ], + "dev": true, + "libc": [ + "glibc" + ], + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } + }, + "node_modules/@oxlint/binding-linux-arm64-musl": { + "version": "1.60.0", + "resolved": "https://registry.npmjs.org/@oxlint/binding-linux-arm64-musl/-/binding-linux-arm64-musl-1.60.0.tgz", + "integrity": "sha512-eDYDXZGhQAXyn6GwtwiX/qcLS0HlOLPJ/+iiIY8RYr+3P8oKBmgKxADLlniL6FtWfE7pPk7IGN9/xvDEvDvFeg==", + "cpu": [ + "arm64" + ], + "dev": true, + "libc": [ + "musl" + ], + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } + }, + "node_modules/@oxlint/binding-linux-ppc64-gnu": { + "version": "1.60.0", + "resolved": "https://registry.npmjs.org/@oxlint/binding-linux-ppc64-gnu/-/binding-linux-ppc64-gnu-1.60.0.tgz", + "integrity": "sha512-nxehly5XYBHUWI9VJX1bqCf9j/B43DaK/aS/T1fcxCpX3PA4Rm9BB54nPD1CKayT8xg6REN1ao+01hSRNgy8OA==", + "cpu": [ + "ppc64" + ], + "dev": true, + "libc": [ + "glibc" + ], + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } + }, + "node_modules/@oxlint/binding-linux-riscv64-gnu": { + "version": "1.60.0", + "resolved": "https://registry.npmjs.org/@oxlint/binding-linux-riscv64-gnu/-/binding-linux-riscv64-gnu-1.60.0.tgz", + "integrity": "sha512-j1qf/NaUfOWQutjeoooNG1Q0zsK0XGmSu1uDLq3cctquRF3j7t9Hxqf/76ehCc5GEUAanth2W4Fa+XT1RFg/nw==", + "cpu": [ + "riscv64" + ], + "dev": true, + "libc": [ + "glibc" + ], + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } + }, + "node_modules/@oxlint/binding-linux-riscv64-musl": { + "version": "1.60.0", + "resolved": "https://registry.npmjs.org/@oxlint/binding-linux-riscv64-musl/-/binding-linux-riscv64-musl-1.60.0.tgz", + "integrity": "sha512-YELKPRefQ/q/h3RUmeRfPCUhh2wBvgV1RyZ/F9M9u8cDyXsQW2ojv1DeWQTt466yczDITjZnIOg/s05pk7Ve2A==", + "cpu": [ + "riscv64" + ], + "dev": true, + "libc": [ + "musl" + ], + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } + }, + "node_modules/@oxlint/binding-linux-s390x-gnu": { + "version": "1.60.0", + "resolved": "https://registry.npmjs.org/@oxlint/binding-linux-s390x-gnu/-/binding-linux-s390x-gnu-1.60.0.tgz", + "integrity": "sha512-JkO3C6Gki7Y6h/MiIkFKvHFOz98/YWvQ4WYbK9DLXACMP2rjULzkeGyAzorJE5S1dzLQGFgeqvN779kSFwoV1g==", + "cpu": [ + "s390x" + ], + "dev": true, + "libc": [ + "glibc" + ], + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } + }, + "node_modules/@oxlint/binding-linux-x64-gnu": { + "version": "1.60.0", + "resolved": "https://registry.npmjs.org/@oxlint/binding-linux-x64-gnu/-/binding-linux-x64-gnu-1.60.0.tgz", + "integrity": "sha512-XjKHdFVCpZZZSWBCKyyqCq65s2AKXykMXkjLoKYODrD+f5toLhlwsMESscu8FbgnJQ4Y/dpR/zdazsahmgBJIA==", + "cpu": [ + "x64" + ], + "dev": true, + "libc": [ + "glibc" + ], + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } + }, + "node_modules/@oxlint/binding-linux-x64-musl": { + "version": "1.60.0", + "resolved": "https://registry.npmjs.org/@oxlint/binding-linux-x64-musl/-/binding-linux-x64-musl-1.60.0.tgz", + "integrity": "sha512-js29ZWIuPhNWzY8NC7KoffEMEeWG105vbmm+8EOJsC+T/jHBiKIJEUF78+F/IrgEWMMP9N0kRND4Pp75+xAhKg==", + "cpu": [ + "x64" + ], + "dev": true, + "libc": [ + "musl" + ], + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } + }, + "node_modules/@oxlint/binding-openharmony-arm64": { + "version": "1.60.0", + "resolved": "https://registry.npmjs.org/@oxlint/binding-openharmony-arm64/-/binding-openharmony-arm64-1.60.0.tgz", + "integrity": "sha512-H+PUITKHk04stFpWj3x3Kg08Afp/bcXSBi0EhasR5a0Vw7StXHTzdl655PUI0fB4qdh2Wsu6Dsi+3ACxPoyQnA==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openharmony" + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } + }, + "node_modules/@oxlint/binding-win32-arm64-msvc": { + "version": "1.60.0", + "resolved": "https://registry.npmjs.org/@oxlint/binding-win32-arm64-msvc/-/binding-win32-arm64-msvc-1.60.0.tgz", + "integrity": "sha512-WA/yc7f7ZfCefBXVzNHn1Ztulb1EFwNBb4jMZ6pjML0zz6pHujlF3Q3jySluz3XHl/GNeMTntG1seUBWVMlMag==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } + }, + "node_modules/@oxlint/binding-win32-ia32-msvc": { + "version": "1.60.0", + "resolved": "https://registry.npmjs.org/@oxlint/binding-win32-ia32-msvc/-/binding-win32-ia32-msvc-1.60.0.tgz", + "integrity": "sha512-33YxL1sqwYNZXtn3MD/4dno6s0xeedXOJlT1WohkVD565WvohClZUr7vwKdAk954n4xiEWJkewiCr+zLeq7AeA==", + "cpu": [ + "ia32" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } + }, + "node_modules/@oxlint/binding-win32-x64-msvc": { + "version": "1.60.0", + "resolved": "https://registry.npmjs.org/@oxlint/binding-win32-x64-msvc/-/binding-win32-x64-msvc-1.60.0.tgz", + "integrity": "sha512-JOro4ZcfBLamJCyfURQmOQByoorgOdx3ZjAkSqnb/CyG/i+lN3KoV5LAgk5ZAW6DPq7/Cx7n23f8DuTWXTWgyQ==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } + }, + "node_modules/@rollup/rollup-android-arm-eabi": { + "version": "4.60.2", + "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.60.2.tgz", + "integrity": "sha512-dnlp69efPPg6Uaw2dVqzWRfAWRnYVb1XJ8CyyhIbZeaq4CA5/mLeZ1IEt9QqQxmbdvagjLIm2ZL8BxXv5lH4Yw==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ] + }, + "node_modules/@rollup/rollup-android-arm64": { + "version": "4.60.2", + "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.60.2.tgz", + "integrity": "sha512-OqZTwDRDchGRHHm/hwLOL7uVPB9aUvI0am/eQuWMNyFHf5PSEQmyEeYYheA0EPPKUO/l0uigCp+iaTjoLjVoHg==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ] + }, + "node_modules/@rollup/rollup-darwin-arm64": { + "version": "4.60.2", + "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.60.2.tgz", + "integrity": "sha512-UwRE7CGpvSVEQS8gUMBe1uADWjNnVgP3Iusyda1nSRwNDCsRjnGc7w6El6WLQsXmZTbLZx9cecegumcitNfpmA==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ] + }, + "node_modules/@rollup/rollup-darwin-x64": { + "version": "4.60.2", + "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.60.2.tgz", + "integrity": "sha512-gjEtURKLCC5VXm1I+2i1u9OhxFsKAQJKTVB8WvDAHF+oZlq0GTVFOlTlO1q3AlCTE/DF32c16ESvfgqR7343/g==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ] + }, + "node_modules/@rollup/rollup-freebsd-arm64": { + "version": "4.60.2", + "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-arm64/-/rollup-freebsd-arm64-4.60.2.tgz", + "integrity": "sha512-Bcl6CYDeAgE70cqZaMojOi/eK63h5Me97ZqAQoh77VPjMysA/4ORQBRGo3rRy45x4MzVlU9uZxs8Uwy7ZaKnBw==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ] + }, + "node_modules/@rollup/rollup-freebsd-x64": { + "version": "4.60.2", + "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-x64/-/rollup-freebsd-x64-4.60.2.tgz", + "integrity": "sha512-LU+TPda3mAE2QB0/Hp5VyeKJivpC6+tlOXd1VMoXV/YFMvk/MNk5iXeBfB4MQGRWyOYVJ01625vjkr0Az98OJQ==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ] + }, "node_modules/@rollup/rollup-linux-arm-gnueabihf": { - "version": "4.60.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.60.1.tgz", - "integrity": "sha512-L+34Qqil+v5uC0zEubW7uByo78WOCIrBvci69E7sFASRl0X7b/MB6Cqd1lky/CtcSVTydWa2WZwFuWexjS5o6g==", + "version": "4.60.2", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.60.2.tgz", + "integrity": "sha512-2QxQrM+KQ7DAW4o22j+XZ6RKdxjLD7BOWTP0Bv0tmjdyhXSsr2Ul1oJDQqh9Zf5qOwTuTc7Ek83mOFaKnodPjg==", "cpu": [ "arm" ], "dev": true, + "libc": [ + "glibc" + ], "license": "MIT", "optional": true, "os": [ @@ -440,13 +820,16 @@ ] }, "node_modules/@rollup/rollup-linux-arm-musleabihf": { - "version": "4.60.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-musleabihf/-/rollup-linux-arm-musleabihf-4.60.1.tgz", - "integrity": "sha512-n83O8rt4v34hgFzlkb1ycniJh7IR5RCIqt6mz1VRJD6pmhRi0CXdmfnLu9dIUS6buzh60IvACM842Ffb3xd6Gg==", + "version": "4.60.2", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-musleabihf/-/rollup-linux-arm-musleabihf-4.60.2.tgz", + "integrity": "sha512-TbziEu2DVsTEOPif2mKWkMeDMLoYjx95oESa9fkQQK7r/Orta0gnkcDpzwufEcAO2BLBsD7mZkXGFqEdMRRwfw==", "cpu": [ "arm" ], "dev": true, + "libc": [ + "musl" + ], "license": "MIT", "optional": true, "os": [ @@ -454,13 +837,16 @@ ] }, "node_modules/@rollup/rollup-linux-arm64-gnu": { - "version": "4.60.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.60.1.tgz", - "integrity": "sha512-Nql7sTeAzhTAja3QXeAI48+/+GjBJ+QmAH13snn0AJSNL50JsDqotyudHyMbO2RbJkskbMbFJfIJKWA6R1LCJQ==", + "version": "4.60.2", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.60.2.tgz", + "integrity": "sha512-bO/rVDiDUuM2YfuCUwZ1t1cP+/yqjqz+Xf2VtkdppefuOFS2OSeAfgafaHNkFn0t02hEyXngZkxtGqXcXwO8Rg==", "cpu": [ "arm64" ], "dev": true, + "libc": [ + "glibc" + ], "license": "MIT", "optional": true, "os": [ @@ -468,13 +854,16 @@ ] }, "node_modules/@rollup/rollup-linux-arm64-musl": { - "version": "4.60.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.60.1.tgz", - "integrity": "sha512-+pUymDhd0ys9GcKZPPWlFiZ67sTWV5UU6zOJat02M1+PiuSGDziyRuI/pPue3hoUwm2uGfxdL+trT6Z9rxnlMA==", + "version": "4.60.2", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.60.2.tgz", + "integrity": "sha512-hr26p7e93Rl0Za+JwW7EAnwAvKkehh12BU1Llm9Ykiibg4uIr2rbpxG9WCf56GuvidlTG9KiiQT/TXT1yAWxTA==", "cpu": [ "arm64" ], "dev": true, + "libc": [ + "musl" + ], "license": "MIT", "optional": true, "os": [ @@ -482,13 +871,16 @@ ] }, "node_modules/@rollup/rollup-linux-loong64-gnu": { - "version": "4.60.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-loong64-gnu/-/rollup-linux-loong64-gnu-4.60.1.tgz", - "integrity": "sha512-VSvgvQeIcsEvY4bKDHEDWcpW4Yw7BtlKG1GUT4FzBUlEKQK0rWHYBqQt6Fm2taXS+1bXvJT6kICu5ZwqKCnvlQ==", + "version": "4.60.2", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-loong64-gnu/-/rollup-linux-loong64-gnu-4.60.2.tgz", + "integrity": "sha512-pOjB/uSIyDt+ow3k/RcLvUAOGpysT2phDn7TTUB3n75SlIgZzM6NKAqlErPhoFU+npgY3/n+2HYIQVbF70P9/A==", "cpu": [ "loong64" ], "dev": true, + "libc": [ + "glibc" + ], "license": "MIT", "optional": true, "os": [ @@ -496,13 +888,16 @@ ] }, "node_modules/@rollup/rollup-linux-loong64-musl": { - "version": "4.60.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-loong64-musl/-/rollup-linux-loong64-musl-4.60.1.tgz", - "integrity": "sha512-4LqhUomJqwe641gsPp6xLfhqWMbQV04KtPp7/dIp0nzPxAkNY1AbwL5W0MQpcalLYk07vaW9Kp1PBhdpZYYcEw==", + "version": "4.60.2", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-loong64-musl/-/rollup-linux-loong64-musl-4.60.2.tgz", + "integrity": "sha512-2/w+q8jszv9Ww1c+6uJT3OwqhdmGP2/4T17cu8WuwyUuuaCDDJ2ojdyYwZzCxx0GcsZBhzi3HmH+J5pZNXnd+Q==", "cpu": [ "loong64" ], "dev": true, + "libc": [ + "musl" + ], "license": "MIT", "optional": true, "os": [ @@ -510,13 +905,16 @@ ] }, "node_modules/@rollup/rollup-linux-ppc64-gnu": { - "version": "4.60.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-ppc64-gnu/-/rollup-linux-ppc64-gnu-4.60.1.tgz", - "integrity": "sha512-tLQQ9aPvkBxOc/EUT6j3pyeMD6Hb8QF2BTBnCQWP/uu1lhc9AIrIjKnLYMEroIz/JvtGYgI9dF3AxHZNaEH0rw==", + "version": "4.60.2", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-ppc64-gnu/-/rollup-linux-ppc64-gnu-4.60.2.tgz", + "integrity": "sha512-11+aL5vKheYgczxtPVVRhdptAM2H7fcDR5Gw4/bTcteuZBlH4oP9f5s9zYO9aGZvoGeBpqXI/9TZZihZ609wKw==", "cpu": [ "ppc64" ], "dev": true, + "libc": [ + "glibc" + ], "license": "MIT", "optional": true, "os": [ @@ -524,13 +922,16 @@ ] }, "node_modules/@rollup/rollup-linux-ppc64-musl": { - "version": "4.60.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-ppc64-musl/-/rollup-linux-ppc64-musl-4.60.1.tgz", - "integrity": "sha512-RMxFhJwc9fSXP6PqmAz4cbv3kAyvD1etJFjTx4ONqFP9DkTkXsAMU4v3Vyc5BgzC+anz7nS/9tp4obsKfqkDHg==", + "version": "4.60.2", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-ppc64-musl/-/rollup-linux-ppc64-musl-4.60.2.tgz", + "integrity": "sha512-i16fokAGK46IVZuV8LIIwMdtqhin9hfYkCh8pf8iC3QU3LpwL+1FSFGej+O7l3E/AoknL6Dclh2oTdnRMpTzFQ==", "cpu": [ "ppc64" ], "dev": true, + "libc": [ + "musl" + ], "license": "MIT", "optional": true, "os": [ @@ -538,13 +939,16 @@ ] }, "node_modules/@rollup/rollup-linux-riscv64-gnu": { - "version": "4.60.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.60.1.tgz", - "integrity": "sha512-QKgFl+Yc1eEk6MmOBfRHYF6lTxiiiV3/z/BRrbSiW2I7AFTXoBFvdMEyglohPj//2mZS4hDOqeB0H1ACh3sBbg==", + "version": "4.60.2", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.60.2.tgz", + "integrity": "sha512-49FkKS6RGQoriDSK/6E2GkAsAuU5kETFCh7pG4yD/ylj9rKhTmO3elsnmBvRD4PgJPds5W2PkhC82aVwmUcJ7A==", "cpu": [ "riscv64" ], "dev": true, + "libc": [ + "glibc" + ], "license": "MIT", "optional": true, "os": [ @@ -552,13 +956,16 @@ ] }, "node_modules/@rollup/rollup-linux-riscv64-musl": { - "version": "4.60.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-musl/-/rollup-linux-riscv64-musl-4.60.1.tgz", - "integrity": "sha512-RAjXjP/8c6ZtzatZcA1RaQr6O1TRhzC+adn8YZDnChliZHviqIjmvFwHcxi4JKPSDAt6Uhf/7vqcBzQJy0PDJg==", + "version": "4.60.2", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-musl/-/rollup-linux-riscv64-musl-4.60.2.tgz", + "integrity": "sha512-mjYNkHPfGpUR00DuM1ZZIgs64Hpf4bWcz9Z41+4Q+pgDx73UwWdAYyf6EG/lRFldmdHHzgrYyge5akFUW0D3mQ==", "cpu": [ "riscv64" ], "dev": true, + "libc": [ + "musl" + ], "license": "MIT", "optional": true, "os": [ @@ -566,13 +973,16 @@ ] }, "node_modules/@rollup/rollup-linux-s390x-gnu": { - "version": "4.60.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-s390x-gnu/-/rollup-linux-s390x-gnu-4.60.1.tgz", - "integrity": "sha512-wcuocpaOlaL1COBYiA89O6yfjlp3RwKDeTIA0hM7OpmhR1Bjo9j31G1uQVpDlTvwxGn2nQs65fBFL5UFd76FcQ==", + "version": "4.60.2", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-s390x-gnu/-/rollup-linux-s390x-gnu-4.60.2.tgz", + "integrity": "sha512-ALyvJz965BQk8E9Al/JDKKDLH2kfKFLTGMlgkAbbYtZuJt9LU8DW3ZoDMCtQpXAltZxwBHevXz5u+gf0yA0YoA==", "cpu": [ "s390x" ], "dev": true, + "libc": [ + "glibc" + ], "license": "MIT", "optional": true, "os": [ @@ -580,13 +990,16 @@ ] }, "node_modules/@rollup/rollup-linux-x64-gnu": { - "version": "4.60.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.60.1.tgz", - "integrity": "sha512-77PpsFQUCOiZR9+LQEFg9GClyfkNXj1MP6wRnzYs0EeWbPcHs02AXu4xuUbM1zhwn3wqaizle3AEYg5aeoohhg==", + "version": "4.60.2", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.60.2.tgz", + "integrity": "sha512-UQjrkIdWrKI626Du8lCQ6MJp/6V1LAo2bOK9OTu4mSn8GGXIkPXk/Vsp4bLHCd9Z9Iz2OTEaokUE90VweJgIYQ==", "cpu": [ "x64" ], "dev": true, + "libc": [ + "glibc" + ], "license": "MIT", "optional": true, "os": [ @@ -594,13 +1007,16 @@ ] }, "node_modules/@rollup/rollup-linux-x64-musl": { - "version": "4.60.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.60.1.tgz", - "integrity": "sha512-5cIATbk5vynAjqqmyBjlciMJl1+R/CwX9oLk/EyiFXDWd95KpHdrOJT//rnUl4cUcskrd0jCCw3wpZnhIHdD9w==", + "version": "4.60.2", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.60.2.tgz", + "integrity": "sha512-bTsRGj6VlSdn/XD4CGyzMnzaBs9bsRxy79eTqTCBsA8TMIEky7qg48aPkvJvFe1HyzQ5oMZdg7AnVlWQSKLTnw==", "cpu": [ "x64" ], "dev": true, + "libc": [ + "musl" + ], "license": "MIT", "optional": true, "os": [ @@ -608,9 +1024,9 @@ ] }, "node_modules/@rollup/rollup-openbsd-x64": { - "version": "4.60.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-openbsd-x64/-/rollup-openbsd-x64-4.60.1.tgz", - "integrity": "sha512-cl0w09WsCi17mcmWqqglez9Gk8isgeWvoUZ3WiJFYSR3zjBQc2J5/ihSjpl+VLjPqjQ/1hJRcqBfLjssREQILw==", + "version": "4.60.2", + "resolved": "https://registry.npmjs.org/@rollup/rollup-openbsd-x64/-/rollup-openbsd-x64-4.60.2.tgz", + "integrity": "sha512-6d4Z3534xitaA1FcMWP7mQPq5zGwBmGbhphh2DwaA1aNIXUu3KTOfwrWpbwI4/Gr0uANo7NTtaykFyO2hPuFLg==", "cpu": [ "x64" ], @@ -622,9 +1038,9 @@ ] }, "node_modules/@rollup/rollup-openharmony-arm64": { - "version": "4.60.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-openharmony-arm64/-/rollup-openharmony-arm64-4.60.1.tgz", - "integrity": "sha512-4Cv23ZrONRbNtbZa37mLSueXUCtN7MXccChtKpUnQNgF010rjrjfHx3QxkS2PI7LqGT5xXyYs1a7LbzAwT0iCA==", + "version": "4.60.2", + "resolved": "https://registry.npmjs.org/@rollup/rollup-openharmony-arm64/-/rollup-openharmony-arm64-4.60.2.tgz", + "integrity": "sha512-NetAg5iO2uN7eB8zE5qrZ3CSil+7IJt4WDFLcC75Ymywq1VZVD6qJ6EvNLjZ3rEm6gB7XW5JdT60c6MN35Z85Q==", "cpu": [ "arm64" ], @@ -636,9 +1052,9 @@ ] }, "node_modules/@rollup/rollup-win32-arm64-msvc": { - "version": "4.60.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.60.1.tgz", - "integrity": "sha512-i1okWYkA4FJICtr7KpYzFpRTHgy5jdDbZiWfvny21iIKky5YExiDXP+zbXzm3dUcFpkEeYNHgQ5fuG236JPq0g==", + "version": "4.60.2", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.60.2.tgz", + "integrity": "sha512-NCYhOotpgWZ5kdxCZsv6Iudx0wX8980Q/oW4pNFNihpBKsDbEA1zpkfxJGC0yugsUuyDZ7gL37dbzwhR0VI7pQ==", "cpu": [ "arm64" ], @@ -650,9 +1066,9 @@ ] }, "node_modules/@rollup/rollup-win32-ia32-msvc": { - "version": "4.60.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.60.1.tgz", - "integrity": "sha512-u09m3CuwLzShA0EYKMNiFgcjjzwqtUMLmuCJLeZWjjOYA3IT2Di09KaxGBTP9xVztWyIWjVdsB2E9goMjZvTQg==", + "version": "4.60.2", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.60.2.tgz", + "integrity": "sha512-RXsaOqXxfoUBQoOgvmmijVxJnW2IGB0eoMO7F8FAjaj0UTywUO/luSqimWBJn04WNgUkeNhh7fs7pESXajWmkg==", "cpu": [ "ia32" ], @@ -664,9 +1080,9 @@ ] }, "node_modules/@rollup/rollup-win32-x64-gnu": { - "version": "4.60.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-gnu/-/rollup-win32-x64-gnu-4.60.1.tgz", - "integrity": "sha512-k+600V9Zl1CM7eZxJgMyTUzmrmhB/0XZnF4pRypKAlAgxmedUA+1v9R+XOFv56W4SlHEzfeMtzujLJD22Uz5zg==", + "version": "4.60.2", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-gnu/-/rollup-win32-x64-gnu-4.60.2.tgz", + "integrity": "sha512-qdAzEULD+/hzObedtmV6iBpdL5TIbKVztGiK7O3/KYSf+HIzU257+MX1EXJcyIiDbMAqmbwaufcYPvyRryeZtA==", "cpu": [ "x64" ], @@ -678,9 +1094,9 @@ ] }, "node_modules/@rollup/rollup-win32-x64-msvc": { - "version": "4.60.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.60.1.tgz", - "integrity": "sha512-lWMnixq/QzxyhTV6NjQJ4SFo1J6PvOX8vUx5Wb4bBPsEb+8xZ89Bz6kOXpfXj9ak9AHTQVQzlgzBEc1SyM27xQ==", + "version": "4.60.2", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.60.2.tgz", + "integrity": "sha512-Nd/SgG27WoA9e+/TdK74KnHz852TLa94ovOYySo/yMPuTmpckK/jIF2jSwS3g7ELSKXK13/cVdmg1Z/DaCWKxA==", "cpu": [ "x64" ], @@ -691,13 +1107,6 @@ "win32" ] }, - "node_modules/@types/esrecurse": { - "version": "4.3.1", - "resolved": "https://registry.npmjs.org/@types/esrecurse/-/esrecurse-4.3.1.tgz", - "integrity": "sha512-xJBAbDifo5hpffDBuHl0Y8ywswbiAp/Wi7Y/GtAgSlZyIABppyurxVueOPE8LUQOxdlgi6Zqce7uoEpqNTeiUw==", - "dev": true, - "license": "MIT" - }, "node_modules/@types/estree": { "version": "1.0.8", "resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.8.tgz", @@ -705,2186 +1114,417 @@ "dev": true, "license": "MIT" }, - "node_modules/@types/istanbul-lib-coverage": { - "version": "2.0.6", - "resolved": "https://registry.npmjs.org/@types/istanbul-lib-coverage/-/istanbul-lib-coverage-2.0.6.tgz", - "integrity": "sha512-2QF/t/auWm0lsy8XtKVPG19v3sSOQlJe/YHZgfjb/KBBHOGSV+J2q/S671rcq9uTBrLAXmZpqJiaQbMT+zNU1w==", - "dev": true, - "license": "MIT" - }, - "node_modules/@types/json-schema": { - "version": "7.0.15", - "resolved": "https://registry.npmjs.org/@types/json-schema/-/json-schema-7.0.15.tgz", - "integrity": "sha512-5+fP8P8MFNC+AyZCDxrB2pkZFPGzqQWUzpSeuuVLvm8VMcorNYavBqoFcxK8bQz4Qsbn4oUEEem4wDLfcysGHA==", - "dev": true, - "license": "MIT" - }, - "node_modules/acorn": { - "version": "8.16.0", - "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.16.0.tgz", - "integrity": "sha512-UVJyE9MttOsBQIDKw1skb9nAwQuR5wuGD3+82K6JgJlm/Y+KI92oNsMNGZCYdDsVtRHSak0pcV5Dno5+4jh9sw==", + "node_modules/auto-changelog": { + "version": "2.5.0", + "resolved": "https://registry.npmjs.org/auto-changelog/-/auto-changelog-2.5.0.tgz", + "integrity": "sha512-UTnLjT7I9U2U/xkCUH5buDlp8C7g0SGChfib+iDrJkamcj5kaMqNKHNfbKJw1kthJUq8sUo3i3q2S6FzO/l/wA==", "dev": true, "license": "MIT", + "dependencies": { + "commander": "^7.2.0", + "handlebars": "^4.7.7", + "import-cwd": "^3.0.0", + "node-fetch": "^2.6.1", + "parse-github-url": "^1.0.3", + "semver": "^7.3.5" + }, "bin": { - "acorn": "bin/acorn" + "auto-changelog": "src/index.js" }, "engines": { - "node": ">=0.4.0" + "node": ">=8.3" } }, - "node_modules/acorn-jsx": { - "version": "5.3.2", - "resolved": "https://registry.npmjs.org/acorn-jsx/-/acorn-jsx-5.3.2.tgz", - "integrity": "sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ==", + "node_modules/commander": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/commander/-/commander-7.2.0.tgz", + "integrity": "sha512-QrWXB+ZQSVPmIWIhtEO9H+gwHaMGYiF5ChvoJ+K9ZGHG/sVsa6yiesAD1GC/x46sET00Xlwo1u49RVVVzvcSkw==", "dev": true, "license": "MIT", - "peerDependencies": { - "acorn": "^6.0.0 || ^7.0.0 || ^8.0.0" + "engines": { + "node": ">= 10" } }, - "node_modules/ajv": { - "version": "6.14.0", - "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.14.0.tgz", - "integrity": "sha512-IWrosm/yrn43eiKqkfkHis7QioDleaXQHdDVPKg0FSwwd/DuvyX79TZnFOnYpB7dcsFAMmtFztZuXPDvSePkFw==", + "node_modules/fsevents": { + "version": "2.3.3", + "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz", + "integrity": "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==", "dev": true, + "hasInstallScript": true, "license": "MIT", - "dependencies": { - "fast-deep-equal": "^3.1.1", - "fast-json-stable-stringify": "^2.0.0", - "json-schema-traverse": "^0.4.1", - "uri-js": "^4.2.2" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/epoberezkin" + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": "^8.16.0 || ^10.6.0 || >=11.0.0" } }, - "node_modules/ansi-regex": { - "version": "6.1.0", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.1.0.tgz", - "integrity": "sha512-7HSX4QQb4CspciLpVFwyRe79O3xsIZDDLER21kERQ71oaPodF8jL725AgJMFAYbooIqolJoRLuM81SpeUkpkvA==", + "node_modules/globals": { + "version": "17.5.0", + "resolved": "https://registry.npmjs.org/globals/-/globals-17.5.0.tgz", + "integrity": "sha512-qoV+HK2yFl/366t2/Cb3+xxPUo5BuMynomoDmiaZBIdbs+0pYbjfZU+twLhGKp4uCZ/+NbtpVepH5bGCxRyy2g==", "dev": true, "license": "MIT", "engines": { - "node": ">=12" + "node": ">=18" }, "funding": { - "url": "https://github.com/chalk/ansi-regex?sponsor=1" + "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/ansi-styles": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", - "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "node_modules/handlebars": { + "version": "4.7.9", + "resolved": "https://registry.npmjs.org/handlebars/-/handlebars-4.7.9.tgz", + "integrity": "sha512-4E71E0rpOaQuJR2A3xDZ+GM1HyWYv1clR58tC8emQNeQe3RH7MAzSbat+V0wG78LQBo6m6bzSG/L4pBuCsgnUQ==", "dev": true, "license": "MIT", "dependencies": { - "color-convert": "^2.0.1" + "minimist": "^1.2.5", + "neo-async": "^2.6.2", + "source-map": "^0.6.1", + "wordwrap": "^1.0.0" + }, + "bin": { + "handlebars": "bin/handlebars" }, "engines": { - "node": ">=8" + "node": ">=0.4.7" }, - "funding": { - "url": "https://github.com/chalk/ansi-styles?sponsor=1" + "optionalDependencies": { + "uglify-js": "^3.1.4" } }, - "node_modules/argparse": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", - "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==", - "dev": true, - "license": "Python-2.0" - }, - "node_modules/auto-changelog": { - "version": "2.5.0", - "resolved": "https://registry.npmjs.org/auto-changelog/-/auto-changelog-2.5.0.tgz", - "integrity": "sha512-UTnLjT7I9U2U/xkCUH5buDlp8C7g0SGChfib+iDrJkamcj5kaMqNKHNfbKJw1kthJUq8sUo3i3q2S6FzO/l/wA==", + "node_modules/husky": { + "version": "9.1.7", + "resolved": "https://registry.npmjs.org/husky/-/husky-9.1.7.tgz", + "integrity": "sha512-5gs5ytaNjBrh5Ow3zrvdUUY+0VxIuWVL4i9irt6friV+BqdCfmV11CQTWMiBYWHbXhco+J1kHfTOUkePhCDvMA==", "dev": true, "license": "MIT", - "dependencies": { - "commander": "^7.2.0", - "handlebars": "^4.7.7", - "import-cwd": "^3.0.0", - "node-fetch": "^2.6.1", - "parse-github-url": "^1.0.3", - "semver": "^7.3.5" - }, "bin": { - "auto-changelog": "src/index.js" + "husky": "bin.js" }, "engines": { - "node": ">=8.3" + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/typicode" } }, - "node_modules/balanced-match": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", - "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==", + "node_modules/import-cwd": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/import-cwd/-/import-cwd-3.0.0.tgz", + "integrity": "sha512-4pnzH16plW+hgvRECbDWpQl3cqtvSofHWh44met7ESfZ8UZOWWddm8hEyDTqREJ9RbYHY8gi8DqmaelApoOGMg==", "dev": true, - "license": "MIT" + "license": "MIT", + "dependencies": { + "import-from": "^3.0.0" + }, + "engines": { + "node": ">=8" + } }, - "node_modules/brace-expansion": { - "version": "5.0.2", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-5.0.2.tgz", - "integrity": "sha512-Pdk8c9poy+YhOgVWw1JNN22/HcivgKWwpxKq04M/jTmHyCZn12WPJebZxdjSa5TmBqISrUSgNYU3eRORljfCCw==", + "node_modules/import-from": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/import-from/-/import-from-3.0.0.tgz", + "integrity": "sha512-CiuXOFFSzkU5x/CR0+z7T91Iht4CXgfCxVOFRhh2Zyhg5wOpWvvDLQUsWl+gcN+QscYBjez8hDCt85O7RLDttQ==", "dev": true, "license": "MIT", "dependencies": { - "balanced-match": "^4.0.2" + "resolve-from": "^5.0.0" }, "engines": { - "node": "20 || >=22" + "node": ">=8" } }, - "node_modules/brace-expansion/node_modules/balanced-match": { - "version": "4.0.3", - "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-4.0.3.tgz", - "integrity": "sha512-1pHv8LX9CpKut1Zp4EXey7Z8OfH11ONNH6Dhi2WDUt31VVZFXZzKwXcysBgqSumFCmR+0dqjMK5v5JiFHzi0+g==", + "node_modules/import-from/node_modules/resolve-from": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-5.0.0.tgz", + "integrity": "sha512-qYg9KP24dD5qka9J47d0aVky0N+b4fTU89LN9iDnjB5waksiC49rvMB0PrUJQGoTmH50XPiqOvAjDfaijGxYZw==", "dev": true, "license": "MIT", "engines": { - "node": "20 || >=22" + "node": ">=8" } }, - "node_modules/browser-stdout": { - "version": "1.3.1", - "resolved": "https://registry.npmjs.org/browser-stdout/-/browser-stdout-1.3.1.tgz", - "integrity": "sha512-qhAVI1+Av2X7qelOfAIYwXONood6XlZE/fXaBSmW/T5SzLAmCgzi+eiWE7fUvbHaeNBQH13UftjpXxsfLkMpgw==", + "node_modules/minimist": { + "version": "1.2.8", + "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.8.tgz", + "integrity": "sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA==", "dev": true, - "license": "ISC" + "license": "MIT", + "funding": { + "url": "https://github.com/sponsors/ljharb" + } }, - "node_modules/buffer-from": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/buffer-from/-/buffer-from-1.1.2.tgz", - "integrity": "sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ==", + "node_modules/neo-async": { + "version": "2.6.2", + "resolved": "https://registry.npmjs.org/neo-async/-/neo-async-2.6.2.tgz", + "integrity": "sha512-Yd3UES5mWCSqR+qNT93S3UoYUkqAZ9lLg8a7g9rimsWmYGK8cVToA4/sF3RrshdyV3sAGMXVUmpMYOw+dLpOuw==", "dev": true, "license": "MIT" }, - "node_modules/c8": { - "version": "11.0.0", - "resolved": "https://registry.npmjs.org/c8/-/c8-11.0.0.tgz", - "integrity": "sha512-e/uRViGHSVIJv7zsaDKM7VRn2390TgHXqUSvYwPHBQaU6L7E9L0n9JbdkwdYPvshDT0KymBmmlwSpms3yBaMNg==", + "node_modules/node-fetch": { + "version": "2.7.0", + "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.7.0.tgz", + "integrity": "sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A==", "dev": true, - "license": "ISC", + "license": "MIT", "dependencies": { - "@bcoe/v8-coverage": "^1.0.1", - "@istanbuljs/schema": "^0.1.3", - "find-up": "^5.0.0", - "foreground-child": "^3.1.1", - "istanbul-lib-coverage": "^3.2.0", - "istanbul-lib-report": "^3.0.1", - "istanbul-reports": "^3.1.6", - "test-exclude": "^8.0.0", - "v8-to-istanbul": "^9.0.0", - "yargs": "^17.7.2", - "yargs-parser": "^21.1.1" - }, - "bin": { - "c8": "bin/c8.js" + "whatwg-url": "^5.0.0" }, "engines": { - "node": "20 || >=22" + "node": "4.x || >=6.0.0" }, "peerDependencies": { - "monocart-coverage-reports": "^2" + "encoding": "^0.1.0" }, "peerDependenciesMeta": { - "monocart-coverage-reports": { + "encoding": { "optional": true } } }, - "node_modules/camelcase": { - "version": "6.3.0", - "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-6.3.0.tgz", - "integrity": "sha512-Gmy6FhYlCY7uOElZUSbxo2UCDH8owEk996gkbrpsgGtrJLM3J7jGxl9Ic7Qwwj4ivOE5AWZWRMecDdF7hqGjFA==", + "node_modules/oxfmt": { + "version": "0.45.0", + "resolved": "https://registry.npmjs.org/oxfmt/-/oxfmt-0.45.0.tgz", + "integrity": "sha512-0o/COoN9fY50bjVeM7PQsNgbhndKurBIeTIcspW033OumksjJJmIVDKjAk5HMwU/GHTxSOdGDdhJ6BRzGPmsHg==", "dev": true, "license": "MIT", + "dependencies": { + "tinypool": "2.1.0" + }, + "bin": { + "oxfmt": "bin/oxfmt" + }, "engines": { - "node": ">=10" + "node": "^20.19.0 || >=22.12.0" }, "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/chalk": { - "version": "4.1.2", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", - "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "url": "https://github.com/sponsors/Boshen" + }, + "optionalDependencies": { + "@oxfmt/binding-android-arm-eabi": "0.45.0", + "@oxfmt/binding-android-arm64": "0.45.0", + "@oxfmt/binding-darwin-arm64": "0.45.0", + "@oxfmt/binding-darwin-x64": "0.45.0", + "@oxfmt/binding-freebsd-x64": "0.45.0", + "@oxfmt/binding-linux-arm-gnueabihf": "0.45.0", + "@oxfmt/binding-linux-arm-musleabihf": "0.45.0", + "@oxfmt/binding-linux-arm64-gnu": "0.45.0", + "@oxfmt/binding-linux-arm64-musl": "0.45.0", + "@oxfmt/binding-linux-ppc64-gnu": "0.45.0", + "@oxfmt/binding-linux-riscv64-gnu": "0.45.0", + "@oxfmt/binding-linux-riscv64-musl": "0.45.0", + "@oxfmt/binding-linux-s390x-gnu": "0.45.0", + "@oxfmt/binding-linux-x64-gnu": "0.45.0", + "@oxfmt/binding-linux-x64-musl": "0.45.0", + "@oxfmt/binding-openharmony-arm64": "0.45.0", + "@oxfmt/binding-win32-arm64-msvc": "0.45.0", + "@oxfmt/binding-win32-ia32-msvc": "0.45.0", + "@oxfmt/binding-win32-x64-msvc": "0.45.0" + } + }, + "node_modules/oxlint": { + "version": "1.60.0", + "resolved": "https://registry.npmjs.org/oxlint/-/oxlint-1.60.0.tgz", + "integrity": "sha512-tnRzTWiWJ9pg3ftRWnD0+Oqh78L6ZSwcEudvCZaER0PIqiAnNyXj5N1dPwjmNpDalkKS9m/WMLN1CTPUBPmsgw==", "dev": true, "license": "MIT", - "dependencies": { - "ansi-styles": "^4.1.0", - "supports-color": "^7.1.0" + "bin": { + "oxlint": "bin/oxlint" }, "engines": { - "node": ">=10" + "node": "^20.19.0 || >=22.12.0" }, "funding": { - "url": "https://github.com/chalk/chalk?sponsor=1" + "url": "https://github.com/sponsors/Boshen" + }, + "optionalDependencies": { + "@oxlint/binding-android-arm-eabi": "1.60.0", + "@oxlint/binding-android-arm64": "1.60.0", + "@oxlint/binding-darwin-arm64": "1.60.0", + "@oxlint/binding-darwin-x64": "1.60.0", + "@oxlint/binding-freebsd-x64": "1.60.0", + "@oxlint/binding-linux-arm-gnueabihf": "1.60.0", + "@oxlint/binding-linux-arm-musleabihf": "1.60.0", + "@oxlint/binding-linux-arm64-gnu": "1.60.0", + "@oxlint/binding-linux-arm64-musl": "1.60.0", + "@oxlint/binding-linux-ppc64-gnu": "1.60.0", + "@oxlint/binding-linux-riscv64-gnu": "1.60.0", + "@oxlint/binding-linux-riscv64-musl": "1.60.0", + "@oxlint/binding-linux-s390x-gnu": "1.60.0", + "@oxlint/binding-linux-x64-gnu": "1.60.0", + "@oxlint/binding-linux-x64-musl": "1.60.0", + "@oxlint/binding-openharmony-arm64": "1.60.0", + "@oxlint/binding-win32-arm64-msvc": "1.60.0", + "@oxlint/binding-win32-ia32-msvc": "1.60.0", + "@oxlint/binding-win32-x64-msvc": "1.60.0" + }, + "peerDependencies": { + "oxlint-tsgolint": ">=0.18.0" + }, + "peerDependenciesMeta": { + "oxlint-tsgolint": { + "optional": true + } } }, - "node_modules/chokidar": { - "version": "4.0.3", - "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-4.0.3.tgz", - "integrity": "sha512-Qgzu8kfBvo+cA4962jnP1KkS6Dop5NS6g7R5LFYJr4b8Ub94PPQXUksCw9PvXoeXPRRddRNC5C1JQUR2SMGtnA==", + "node_modules/parse-github-url": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/parse-github-url/-/parse-github-url-1.0.3.tgz", + "integrity": "sha512-tfalY5/4SqGaV/GIGzWyHnFjlpTPTNpENR9Ea2lLldSJ8EWXMsvacWucqY3m3I4YPtas15IxTLQVQ5NSYXPrww==", "dev": true, "license": "MIT", - "dependencies": { - "readdirp": "^4.0.1" + "bin": { + "parse-github-url": "cli.js" }, "engines": { - "node": ">= 14.16.0" - }, - "funding": { - "url": "https://paulmillr.com/funding/" + "node": ">= 0.10" } }, - "node_modules/cliui": { - "version": "8.0.1", - "resolved": "https://registry.npmjs.org/cliui/-/cliui-8.0.1.tgz", - "integrity": "sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ==", + "node_modules/rollup": { + "version": "4.60.2", + "resolved": "https://registry.npmjs.org/rollup/-/rollup-4.60.2.tgz", + "integrity": "sha512-J9qZyW++QK/09NyN/zeO0dG/1GdGfyp9lV8ajHnRVLfo/uFsbji5mHnDgn/qYdUHyCkM2N+8VyspgZclfAh0eQ==", "dev": true, - "license": "ISC", + "license": "MIT", "dependencies": { - "string-width": "^4.2.0", - "strip-ansi": "^6.0.1", - "wrap-ansi": "^7.0.0" + "@types/estree": "1.0.8" + }, + "bin": { + "rollup": "dist/bin/rollup" }, "engines": { - "node": ">=12" + "node": ">=18.0.0", + "npm": ">=8.0.0" + }, + "optionalDependencies": { + "@rollup/rollup-android-arm-eabi": "4.60.2", + "@rollup/rollup-android-arm64": "4.60.2", + "@rollup/rollup-darwin-arm64": "4.60.2", + "@rollup/rollup-darwin-x64": "4.60.2", + "@rollup/rollup-freebsd-arm64": "4.60.2", + "@rollup/rollup-freebsd-x64": "4.60.2", + "@rollup/rollup-linux-arm-gnueabihf": "4.60.2", + "@rollup/rollup-linux-arm-musleabihf": "4.60.2", + "@rollup/rollup-linux-arm64-gnu": "4.60.2", + "@rollup/rollup-linux-arm64-musl": "4.60.2", + "@rollup/rollup-linux-loong64-gnu": "4.60.2", + "@rollup/rollup-linux-loong64-musl": "4.60.2", + "@rollup/rollup-linux-ppc64-gnu": "4.60.2", + "@rollup/rollup-linux-ppc64-musl": "4.60.2", + "@rollup/rollup-linux-riscv64-gnu": "4.60.2", + "@rollup/rollup-linux-riscv64-musl": "4.60.2", + "@rollup/rollup-linux-s390x-gnu": "4.60.2", + "@rollup/rollup-linux-x64-gnu": "4.60.2", + "@rollup/rollup-linux-x64-musl": "4.60.2", + "@rollup/rollup-openbsd-x64": "4.60.2", + "@rollup/rollup-openharmony-arm64": "4.60.2", + "@rollup/rollup-win32-arm64-msvc": "4.60.2", + "@rollup/rollup-win32-ia32-msvc": "4.60.2", + "@rollup/rollup-win32-x64-gnu": "4.60.2", + "@rollup/rollup-win32-x64-msvc": "4.60.2", + "fsevents": "~2.3.2" } }, - "node_modules/cliui/node_modules/ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", + "node_modules/semver": { + "version": "7.7.2", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.2.tgz", + "integrity": "sha512-RF0Fw+rO5AMf9MAyaRXI4AV0Ulj5lMHqVxxdSgiVbixSCXoEmmX/jk0CuJw4+3SqroYO9VoUh+HcuJivvtJemA==", "dev": true, - "license": "MIT", + "license": "ISC", + "bin": { + "semver": "bin/semver.js" + }, "engines": { - "node": ">=8" + "node": ">=10" } }, - "node_modules/cliui/node_modules/emoji-regex": { - "version": "8.0.0", - "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", - "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", - "dev": true, - "license": "MIT" - }, - "node_modules/cliui/node_modules/string-width": { - "version": "4.2.3", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", - "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "node_modules/source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", "dev": true, - "license": "MIT", - "dependencies": { - "emoji-regex": "^8.0.0", - "is-fullwidth-code-point": "^3.0.0", - "strip-ansi": "^6.0.1" - }, + "license": "BSD-3-Clause", "engines": { - "node": ">=8" + "node": ">=0.10.0" } }, - "node_modules/cliui/node_modules/strip-ansi": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", - "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", - "dev": true, - "license": "MIT", - "dependencies": { - "ansi-regex": "^5.0.1" - }, + "node_modules/tiny-lru": { + "version": "13.0.0", + "resolved": "https://registry.npmjs.org/tiny-lru/-/tiny-lru-13.0.0.tgz", + "integrity": "sha512-xDHxKKS1FdF0Tv2P+QT7IeSEg74K/8cEDzbv3Tv6UyHHUgBOjOiQiBp818MGj66dhurQus/IBcoAbwIKtSGc6Q==", + "license": "BSD-3-Clause", "engines": { - "node": ">=8" + "node": ">=14" } }, - "node_modules/cliui/node_modules/wrap-ansi": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", - "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", + "node_modules/tinybench": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/tinybench/-/tinybench-6.0.0.tgz", + "integrity": "sha512-BWlWpVbbZXaYjRV0twGLNQO00Zj4HA/sjLOQP2IvzQqGwRGp+2kh7UU3ijyJ3ywFRogYDRbiHDMrUOfaMnN56g==", "dev": true, "license": "MIT", - "dependencies": { - "ansi-styles": "^4.0.0", - "string-width": "^4.1.0", - "strip-ansi": "^6.0.0" - }, "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/chalk/wrap-ansi?sponsor=1" + "node": ">=20.0.0" } }, - "node_modules/color-convert": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", - "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "node_modules/tinypool": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/tinypool/-/tinypool-2.1.0.tgz", + "integrity": "sha512-Pugqs6M0m7Lv1I7FtxN4aoyToKg1C4tu+/381vH35y8oENM/Ai7f7C4StcoK4/+BSw9ebcS8jRiVrORFKCALLw==", "dev": true, "license": "MIT", - "dependencies": { - "color-name": "~1.1.4" - }, "engines": { - "node": ">=7.0.0" + "node": "^20.0.0 || >=22.0.0" } }, - "node_modules/color-name": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", - "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "node_modules/tr46": { + "version": "0.0.3", + "resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz", + "integrity": "sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==", "dev": true, "license": "MIT" }, - "node_modules/commander": { - "version": "7.2.0", - "resolved": "https://registry.npmjs.org/commander/-/commander-7.2.0.tgz", - "integrity": "sha512-QrWXB+ZQSVPmIWIhtEO9H+gwHaMGYiF5ChvoJ+K9ZGHG/sVsa6yiesAD1GC/x46sET00Xlwo1u49RVVVzvcSkw==", + "node_modules/uglify-js": { + "version": "3.19.3", + "resolved": "https://registry.npmjs.org/uglify-js/-/uglify-js-3.19.3.tgz", + "integrity": "sha512-v3Xu+yuwBXisp6QYTcH4UbH+xYJXqnq2m/LtQVWKWzYc1iehYnLixoQDN9FH6/j9/oybfd6W9Ghwkl8+UMKTKQ==", "dev": true, - "license": "MIT", + "license": "BSD-2-Clause", + "optional": true, + "bin": { + "uglifyjs": "bin/uglifyjs" + }, "engines": { - "node": ">= 10" + "node": ">=0.8.0" } }, - "node_modules/convert-source-map": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/convert-source-map/-/convert-source-map-2.0.0.tgz", - "integrity": "sha512-Kvp459HrV2FEJ1CAsi1Ku+MY3kasH19TFykTz2xWmMeq6bk2NU3XXvfJ+Q61m0xktWwt+1HSYf3JZsTms3aRJg==", + "node_modules/webidl-conversions": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz", + "integrity": "sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==", "dev": true, - "license": "MIT" + "license": "BSD-2-Clause" }, - "node_modules/cross-spawn": { - "version": "7.0.6", - "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.6.tgz", - "integrity": "sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==", + "node_modules/whatwg-url": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-5.0.0.tgz", + "integrity": "sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw==", "dev": true, "license": "MIT", "dependencies": { - "path-key": "^3.1.0", - "shebang-command": "^2.0.0", - "which": "^2.0.1" - }, - "engines": { - "node": ">= 8" + "tr46": "~0.0.3", + "webidl-conversions": "^3.0.0" } }, - "node_modules/debug": { - "version": "4.4.1", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.1.tgz", - "integrity": "sha512-KcKCqiftBJcZr++7ykoDIEwSa3XWowTfNPo92BYxjXiyYEVrUQh2aLyhxBCwww+heortUFxEJYcRzosstTEBYQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "ms": "^2.1.3" - }, - "engines": { - "node": ">=6.0" - }, - "peerDependenciesMeta": { - "supports-color": { - "optional": true - } - } - }, - "node_modules/decamelize": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/decamelize/-/decamelize-4.0.0.tgz", - "integrity": "sha512-9iE1PgSik9HeIIw2JO94IidnE3eBoQrFJ3w7sFuzSX4DpmZ3v5sZpUiV5Swcf6mQEF+Y0ru8Neo+p+nyh2J+hQ==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/deep-is": { - "version": "0.1.4", - "resolved": "https://registry.npmjs.org/deep-is/-/deep-is-0.1.4.tgz", - "integrity": "sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ==", - "dev": true, - "license": "MIT" - }, - "node_modules/diff": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/diff/-/diff-7.0.0.tgz", - "integrity": "sha512-PJWHUb1RFevKCwaFA9RlG5tCd+FO5iRh9A8HEtkmBH2Li03iJriB6m6JIN4rGz3K3JLawI7/veA1xzRKP6ISBw==", - "dev": true, - "license": "BSD-3-Clause", - "engines": { - "node": ">=0.3.1" - } - }, - "node_modules/eastasianwidth": { - "version": "0.2.0", - "resolved": "https://registry.npmjs.org/eastasianwidth/-/eastasianwidth-0.2.0.tgz", - "integrity": "sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA==", - "dev": true, - "license": "MIT" - }, - "node_modules/emoji-regex": { - "version": "9.2.2", - "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-9.2.2.tgz", - "integrity": "sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==", - "dev": true, - "license": "MIT" - }, - "node_modules/escalade": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.2.0.tgz", - "integrity": "sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=6" - } - }, - "node_modules/escape-string-regexp": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz", - "integrity": "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/eslint": { - "version": "10.2.1", - "resolved": "https://registry.npmjs.org/eslint/-/eslint-10.2.1.tgz", - "integrity": "sha512-wiyGaKsDgqXvF40P8mDwiUp/KQjE1FdrIEJsM8PZ3XCiniTMXS3OHWWUe5FI5agoCnr8x4xPrTDZuxsBlNHl+Q==", - "dev": true, - "license": "MIT", - "dependencies": { - "@eslint-community/eslint-utils": "^4.8.0", - "@eslint-community/regexpp": "^4.12.2", - "@eslint/config-array": "^0.23.5", - "@eslint/config-helpers": "^0.5.5", - "@eslint/core": "^1.2.1", - "@eslint/plugin-kit": "^0.7.1", - "@humanfs/node": "^0.16.6", - "@humanwhocodes/module-importer": "^1.0.1", - "@humanwhocodes/retry": "^0.4.2", - "@types/estree": "^1.0.6", - "ajv": "^6.14.0", - "cross-spawn": "^7.0.6", - "debug": "^4.3.2", - "escape-string-regexp": "^4.0.0", - "eslint-scope": "^9.1.2", - "eslint-visitor-keys": "^5.0.1", - "espree": "^11.2.0", - "esquery": "^1.7.0", - "esutils": "^2.0.2", - "fast-deep-equal": "^3.1.3", - "file-entry-cache": "^8.0.0", - "find-up": "^5.0.0", - "glob-parent": "^6.0.2", - "ignore": "^5.2.0", - "imurmurhash": "^0.1.4", - "is-glob": "^4.0.0", - "json-stable-stringify-without-jsonify": "^1.0.1", - "minimatch": "^10.2.4", - "natural-compare": "^1.4.0", - "optionator": "^0.9.3" - }, - "bin": { - "eslint": "bin/eslint.js" - }, - "engines": { - "node": "^20.19.0 || ^22.13.0 || >=24" - }, - "funding": { - "url": "https://eslint.org/donate" - }, - "peerDependencies": { - "jiti": "*" - }, - "peerDependenciesMeta": { - "jiti": { - "optional": true - } - } - }, - "node_modules/eslint-scope": { - "version": "9.1.2", - "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-9.1.2.tgz", - "integrity": "sha512-xS90H51cKw0jltxmvmHy2Iai1LIqrfbw57b79w/J7MfvDfkIkFZ+kj6zC3BjtUwh150HsSSdxXZcsuv72miDFQ==", - "dev": true, - "license": "BSD-2-Clause", - "dependencies": { - "@types/esrecurse": "^4.3.1", - "@types/estree": "^1.0.8", - "esrecurse": "^4.3.0", - "estraverse": "^5.2.0" - }, - "engines": { - "node": "^20.19.0 || ^22.13.0 || >=24" - }, - "funding": { - "url": "https://opencollective.com/eslint" - } - }, - "node_modules/eslint-visitor-keys": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-5.0.1.tgz", - "integrity": "sha512-tD40eHxA35h0PEIZNeIjkHoDR4YjjJp34biM0mDvplBe//mB+IHCqHDGV7pxF+7MklTvighcCPPZC7ynWyjdTA==", - "dev": true, - "license": "Apache-2.0", - "engines": { - "node": "^20.19.0 || ^22.13.0 || >=24" - }, - "funding": { - "url": "https://opencollective.com/eslint" - } - }, - "node_modules/espree": { - "version": "11.2.0", - "resolved": "https://registry.npmjs.org/espree/-/espree-11.2.0.tgz", - "integrity": "sha512-7p3DrVEIopW1B1avAGLuCSh1jubc01H2JHc8B4qqGblmg5gI9yumBgACjWo4JlIc04ufug4xJ3SQI8HkS/Rgzw==", - "dev": true, - "license": "BSD-2-Clause", - "dependencies": { - "acorn": "^8.16.0", - "acorn-jsx": "^5.3.2", - "eslint-visitor-keys": "^5.0.1" - }, - "engines": { - "node": "^20.19.0 || ^22.13.0 || >=24" - }, - "funding": { - "url": "https://opencollective.com/eslint" - } - }, - "node_modules/esquery": { - "version": "1.7.0", - "resolved": "https://registry.npmjs.org/esquery/-/esquery-1.7.0.tgz", - "integrity": "sha512-Ap6G0WQwcU/LHsvLwON1fAQX9Zp0A2Y6Y/cJBl9r/JbW90Zyg4/zbG6zzKa2OTALELarYHmKu0GhpM5EO+7T0g==", - "dev": true, - "license": "BSD-3-Clause", - "dependencies": { - "estraverse": "^5.1.0" - }, - "engines": { - "node": ">=0.10" - } - }, - "node_modules/esrecurse": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/esrecurse/-/esrecurse-4.3.0.tgz", - "integrity": "sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag==", - "dev": true, - "license": "BSD-2-Clause", - "dependencies": { - "estraverse": "^5.2.0" - }, - "engines": { - "node": ">=4.0" - } - }, - "node_modules/estraverse": { - "version": "5.3.0", - "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.3.0.tgz", - "integrity": "sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==", - "dev": true, - "license": "BSD-2-Clause", - "engines": { - "node": ">=4.0" - } - }, - "node_modules/esutils": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/esutils/-/esutils-2.0.3.tgz", - "integrity": "sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==", - "dev": true, - "license": "BSD-2-Clause", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/fast-deep-equal": { - "version": "3.1.3", - "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz", - "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==", - "dev": true, - "license": "MIT" - }, - "node_modules/fast-json-stable-stringify": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz", - "integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==", - "dev": true, - "license": "MIT" - }, - "node_modules/fast-levenshtein": { - "version": "2.0.6", - "resolved": "https://registry.npmjs.org/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz", - "integrity": "sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw==", - "dev": true, - "license": "MIT" - }, - "node_modules/file-entry-cache": { - "version": "8.0.0", - "resolved": "https://registry.npmjs.org/file-entry-cache/-/file-entry-cache-8.0.0.tgz", - "integrity": "sha512-XXTUwCvisa5oacNGRP9SfNtYBNAMi+RPwBFmblZEF7N7swHYQS6/Zfk7SRwx4D5j3CH211YNRco1DEMNVfZCnQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "flat-cache": "^4.0.0" - }, - "engines": { - "node": ">=16.0.0" - } - }, - "node_modules/find-up": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/find-up/-/find-up-5.0.0.tgz", - "integrity": "sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng==", - "dev": true, - "license": "MIT", - "dependencies": { - "locate-path": "^6.0.0", - "path-exists": "^4.0.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/flat": { - "version": "5.0.2", - "resolved": "https://registry.npmjs.org/flat/-/flat-5.0.2.tgz", - "integrity": "sha512-b6suED+5/3rTpUBdG1gupIl8MPFCAMA0QXwmljLhvCUKcUvdE4gWky9zpuGCcXHOsz4J9wPGNWq6OKpmIzz3hQ==", - "dev": true, - "license": "BSD-3-Clause", - "bin": { - "flat": "cli.js" - } - }, - "node_modules/flat-cache": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/flat-cache/-/flat-cache-4.0.1.tgz", - "integrity": "sha512-f7ccFPK3SXFHpx15UIGyRJ/FJQctuKZ0zVuN3frBo4HnK3cay9VEW0R6yPYFHC0AgqhukPzKjq22t5DmAyqGyw==", - "dev": true, - "license": "MIT", - "dependencies": { - "flatted": "^3.2.9", - "keyv": "^4.5.4" - }, - "engines": { - "node": ">=16" - } - }, - "node_modules/flatted": { - "version": "3.3.3", - "resolved": "https://registry.npmjs.org/flatted/-/flatted-3.3.3.tgz", - "integrity": "sha512-GX+ysw4PBCz0PzosHDepZGANEuFCMLrnRTiEy9McGjmkCQYwRq4A/X786G/fjM/+OjsWSU1ZrY5qyARZmO/uwg==", - "dev": true, - "license": "ISC" - }, - "node_modules/foreground-child": { - "version": "3.3.1", - "resolved": "https://registry.npmjs.org/foreground-child/-/foreground-child-3.3.1.tgz", - "integrity": "sha512-gIXjKqtFuWEgzFRJA9WCQeSJLZDjgJUOMCMzxtvFq/37KojM1BFGufqsCy0r4qSQmYLsZYMeyRqzIWOMup03sw==", - "dev": true, - "license": "ISC", - "dependencies": { - "cross-spawn": "^7.0.6", - "signal-exit": "^4.0.1" - }, - "engines": { - "node": ">=14" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/fsevents": { - "version": "2.3.3", - "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz", - "integrity": "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==", - "dev": true, - "hasInstallScript": true, - "license": "MIT", - "optional": true, - "os": [ - "darwin" - ], - "engines": { - "node": "^8.16.0 || ^10.6.0 || >=11.0.0" - } - }, - "node_modules/get-caller-file": { - "version": "2.0.5", - "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz", - "integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==", - "dev": true, - "license": "ISC", - "engines": { - "node": "6.* || 8.* || >= 10.*" - } - }, - "node_modules/glob": { - "version": "10.4.5", - "resolved": "https://registry.npmjs.org/glob/-/glob-10.4.5.tgz", - "integrity": "sha512-7Bv8RF0k6xjo7d4A/PxYLbUCfb6c+Vpd2/mB2yRDlew7Jb5hEXiCD9ibfO7wpk8i4sevK6DFny9h7EYbM3/sHg==", - "dev": true, - "license": "ISC", - "dependencies": { - "foreground-child": "^3.1.0", - "jackspeak": "^3.1.2", - "minimatch": "^9.0.4", - "minipass": "^7.1.2", - "package-json-from-dist": "^1.0.0", - "path-scurry": "^1.11.1" - }, - "bin": { - "glob": "dist/esm/bin.mjs" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/glob-parent": { - "version": "6.0.2", - "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-6.0.2.tgz", - "integrity": "sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A==", - "dev": true, - "license": "ISC", - "dependencies": { - "is-glob": "^4.0.3" - }, - "engines": { - "node": ">=10.13.0" - } - }, - "node_modules/glob/node_modules/brace-expansion": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.2.tgz", - "integrity": "sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "balanced-match": "^1.0.0" - } - }, - "node_modules/glob/node_modules/minimatch": { - "version": "9.0.5", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.5.tgz", - "integrity": "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow==", - "dev": true, - "license": "ISC", - "dependencies": { - "brace-expansion": "^2.0.1" - }, - "engines": { - "node": ">=16 || 14 >=14.17" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/globals": { - "version": "17.5.0", - "resolved": "https://registry.npmjs.org/globals/-/globals-17.5.0.tgz", - "integrity": "sha512-qoV+HK2yFl/366t2/Cb3+xxPUo5BuMynomoDmiaZBIdbs+0pYbjfZU+twLhGKp4uCZ/+NbtpVepH5bGCxRyy2g==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=18" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/handlebars": { - "version": "4.7.8", - "resolved": "https://registry.npmjs.org/handlebars/-/handlebars-4.7.8.tgz", - "integrity": "sha512-vafaFqs8MZkRrSX7sFVUdo3ap/eNiLnb4IakshzvP56X5Nr1iGKAIqdX6tMlm6HcNRIkr6AxO5jFEoJzzpT8aQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "minimist": "^1.2.5", - "neo-async": "^2.6.2", - "source-map": "^0.6.1", - "wordwrap": "^1.0.0" - }, - "bin": { - "handlebars": "bin/handlebars" - }, - "engines": { - "node": ">=0.4.7" - }, - "optionalDependencies": { - "uglify-js": "^3.1.4" - } - }, - "node_modules/has-flag": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", - "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8" - } - }, - "node_modules/he": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/he/-/he-1.2.0.tgz", - "integrity": "sha512-F/1DnUGPopORZi0ni+CvrCgHQ5FyEAHRLSApuYWMmrbSwoN2Mn/7k+Gl38gJnR7yyDZk6WLXwiGod1JOWNDKGw==", - "dev": true, - "license": "MIT", - "bin": { - "he": "bin/he" - } - }, - "node_modules/html-escaper": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/html-escaper/-/html-escaper-2.0.2.tgz", - "integrity": "sha512-H2iMtd0I4Mt5eYiapRdIDjp+XzelXQ0tFE4JS7YFwFevXXMmOp9myNrUvCg0D6ws8iqkRPBfKHgbwig1SmlLfg==", - "dev": true, - "license": "MIT" - }, - "node_modules/husky": { - "version": "9.1.7", - "resolved": "https://registry.npmjs.org/husky/-/husky-9.1.7.tgz", - "integrity": "sha512-5gs5ytaNjBrh5Ow3zrvdUUY+0VxIuWVL4i9irt6friV+BqdCfmV11CQTWMiBYWHbXhco+J1kHfTOUkePhCDvMA==", - "dev": true, - "license": "MIT", - "bin": { - "husky": "bin.js" - }, - "engines": { - "node": ">=18" - }, - "funding": { - "url": "https://github.com/sponsors/typicode" - } - }, - "node_modules/ignore": { - "version": "5.3.2", - "resolved": "https://registry.npmjs.org/ignore/-/ignore-5.3.2.tgz", - "integrity": "sha512-hsBTNUqQTDwkWtcdYI2i06Y/nUBEsNEDJKjWdigLvegy8kDuJAS8uRlpkkcQpyEXL0Z/pjDy5HBmMjRCJ2gq+g==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 4" - } - }, - "node_modules/import-cwd": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/import-cwd/-/import-cwd-3.0.0.tgz", - "integrity": "sha512-4pnzH16plW+hgvRECbDWpQl3cqtvSofHWh44met7ESfZ8UZOWWddm8hEyDTqREJ9RbYHY8gi8DqmaelApoOGMg==", - "dev": true, - "license": "MIT", - "dependencies": { - "import-from": "^3.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/import-from": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/import-from/-/import-from-3.0.0.tgz", - "integrity": "sha512-CiuXOFFSzkU5x/CR0+z7T91Iht4CXgfCxVOFRhh2Zyhg5wOpWvvDLQUsWl+gcN+QscYBjez8hDCt85O7RLDttQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "resolve-from": "^5.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/import-from/node_modules/resolve-from": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-5.0.0.tgz", - "integrity": "sha512-qYg9KP24dD5qka9J47d0aVky0N+b4fTU89LN9iDnjB5waksiC49rvMB0PrUJQGoTmH50XPiqOvAjDfaijGxYZw==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8" - } - }, - "node_modules/imurmurhash": { - "version": "0.1.4", - "resolved": "https://registry.npmjs.org/imurmurhash/-/imurmurhash-0.1.4.tgz", - "integrity": "sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=0.8.19" - } - }, - "node_modules/is-extglob": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", - "integrity": "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/is-fullwidth-code-point": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", - "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8" - } - }, - "node_modules/is-glob": { - "version": "4.0.3", - "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz", - "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==", - "dev": true, - "license": "MIT", - "dependencies": { - "is-extglob": "^2.1.1" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/is-path-inside": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/is-path-inside/-/is-path-inside-3.0.3.tgz", - "integrity": "sha512-Fd4gABb+ycGAmKou8eMftCupSir5lRxqf4aD/vd0cD2qc4HL07OjCeuHMr8Ro4CoMaeCKDB0/ECBOVWjTwUvPQ==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8" - } - }, - "node_modules/is-plain-obj": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/is-plain-obj/-/is-plain-obj-2.1.0.tgz", - "integrity": "sha512-YWnfyRwxL/+SsrWYfOpUtz5b3YD+nyfkHvjbcanzk8zgyO4ASD67uVMRt8k5bM4lLMDnXfriRhOpemw+NfT1eA==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8" - } - }, - "node_modules/is-unicode-supported": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/is-unicode-supported/-/is-unicode-supported-0.1.0.tgz", - "integrity": "sha512-knxG2q4UC3u8stRGyAVJCOdxFmv5DZiRcdlIaAQXAbSfJya+OhopNotLQrstBhququ4ZpuKbDc/8S6mgXgPFPw==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/isexe": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", - "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==", - "dev": true, - "license": "ISC" - }, - "node_modules/istanbul-lib-coverage": { - "version": "3.2.2", - "resolved": "https://registry.npmjs.org/istanbul-lib-coverage/-/istanbul-lib-coverage-3.2.2.tgz", - "integrity": "sha512-O8dpsF+r0WV/8MNRKfnmrtCWhuKjxrq2w+jpzBL5UZKTi2LeVWnWOmWRxFlesJONmc+wLAGvKQZEOanko0LFTg==", - "dev": true, - "license": "BSD-3-Clause", - "engines": { - "node": ">=8" - } - }, - "node_modules/istanbul-lib-report": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/istanbul-lib-report/-/istanbul-lib-report-3.0.1.tgz", - "integrity": "sha512-GCfE1mtsHGOELCU8e/Z7YWzpmybrx/+dSTfLrvY8qRmaY6zXTKWn6WQIjaAFw069icm6GVMNkgu0NzI4iPZUNw==", - "dev": true, - "license": "BSD-3-Clause", - "dependencies": { - "istanbul-lib-coverage": "^3.0.0", - "make-dir": "^4.0.0", - "supports-color": "^7.1.0" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/istanbul-reports": { - "version": "3.1.7", - "resolved": "https://registry.npmjs.org/istanbul-reports/-/istanbul-reports-3.1.7.tgz", - "integrity": "sha512-BewmUXImeuRk2YY0PVbxgKAysvhRPUQE0h5QRM++nVWyubKGV0l8qQ5op8+B2DOmwSe63Jivj0BjkPQVf8fP5g==", - "dev": true, - "license": "BSD-3-Clause", - "dependencies": { - "html-escaper": "^2.0.0", - "istanbul-lib-report": "^3.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/jackspeak": { - "version": "3.4.3", - "resolved": "https://registry.npmjs.org/jackspeak/-/jackspeak-3.4.3.tgz", - "integrity": "sha512-OGlZQpz2yfahA/Rd1Y8Cd9SIEsqvXkLVoSw/cgwhnhFMDbsQFeZYoJJ7bIZBS9BcamUW96asq/npPWugM+RQBw==", - "dev": true, - "license": "BlueOak-1.0.0", - "dependencies": { - "@isaacs/cliui": "^8.0.2" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - }, - "optionalDependencies": { - "@pkgjs/parseargs": "^0.11.0" - } - }, - "node_modules/js-yaml": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz", - "integrity": "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==", - "dev": true, - "license": "MIT", - "dependencies": { - "argparse": "^2.0.1" - }, - "bin": { - "js-yaml": "bin/js-yaml.js" - } - }, - "node_modules/json-buffer": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/json-buffer/-/json-buffer-3.0.1.tgz", - "integrity": "sha512-4bV5BfR2mqfQTJm+V5tPPdf+ZpuhiIvTuAB5g8kcrXOZpTT/QwwVRWBywX1ozr6lEuPdbHxwaJlm9G6mI2sfSQ==", - "dev": true, - "license": "MIT" - }, - "node_modules/json-schema-traverse": { - "version": "0.4.1", - "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", - "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==", - "dev": true, - "license": "MIT" - }, - "node_modules/json-stable-stringify-without-jsonify": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz", - "integrity": "sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw==", - "dev": true, - "license": "MIT" - }, - "node_modules/keyv": { - "version": "4.5.4", - "resolved": "https://registry.npmjs.org/keyv/-/keyv-4.5.4.tgz", - "integrity": "sha512-oxVHkHR/EJf2CNXnWxRLW6mg7JyCCUcG0DtEGmL2ctUo1PNTin1PUil+r/+4r5MpVgC/fn1kjsx7mjSujKqIpw==", - "dev": true, - "license": "MIT", - "dependencies": { - "json-buffer": "3.0.1" - } - }, - "node_modules/levn": { - "version": "0.4.1", - "resolved": "https://registry.npmjs.org/levn/-/levn-0.4.1.tgz", - "integrity": "sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "prelude-ls": "^1.2.1", - "type-check": "~0.4.0" - }, - "engines": { - "node": ">= 0.8.0" - } - }, - "node_modules/locate-path": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-6.0.0.tgz", - "integrity": "sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==", - "dev": true, - "license": "MIT", - "dependencies": { - "p-locate": "^5.0.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/log-symbols": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/log-symbols/-/log-symbols-4.1.0.tgz", - "integrity": "sha512-8XPvpAA8uyhfteu8pIvQxpJZ7SYYdpUivZpGy6sFsBuKRY/7rQGavedeB8aK+Zkyq6upMFVL/9AW6vOYzfRyLg==", - "dev": true, - "license": "MIT", - "dependencies": { - "chalk": "^4.1.0", - "is-unicode-supported": "^0.1.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/lru-cache": { - "version": "10.4.3", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-10.4.3.tgz", - "integrity": "sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ==", - "dev": true, - "license": "ISC" - }, - "node_modules/make-dir": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-4.0.0.tgz", - "integrity": "sha512-hXdUTZYIVOt1Ex//jAQi+wTZZpUpwBj/0QsOzqegb3rGMMeJiSEu5xLHnYfBrRV4RH2+OCSOO95Is/7x1WJ4bw==", - "dev": true, - "license": "MIT", - "dependencies": { - "semver": "^7.5.3" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/minimatch": { - "version": "10.2.4", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-10.2.4.tgz", - "integrity": "sha512-oRjTw/97aTBN0RHbYCdtF1MQfvusSIBQM0IZEgzl6426+8jSC0nF1a/GmnVLpfB9yyr6g6FTqWqiZVbxrtaCIg==", - "dev": true, - "license": "BlueOak-1.0.0", - "dependencies": { - "brace-expansion": "^5.0.2" - }, - "engines": { - "node": "18 || 20 || >=22" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/minimist": { - "version": "1.2.8", - "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.8.tgz", - "integrity": "sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA==", - "dev": true, - "license": "MIT", - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/minipass": { - "version": "7.1.3", - "resolved": "https://registry.npmjs.org/minipass/-/minipass-7.1.3.tgz", - "integrity": "sha512-tEBHqDnIoM/1rXME1zgka9g6Q2lcoCkxHLuc7ODJ5BxbP5d4c2Z5cGgtXAku59200Cx7diuHTOYfSBD8n6mm8A==", - "dev": true, - "license": "BlueOak-1.0.0", - "engines": { - "node": ">=16 || 14 >=14.17" - } - }, - "node_modules/mocha": { - "version": "11.7.5", - "resolved": "https://registry.npmjs.org/mocha/-/mocha-11.7.5.tgz", - "integrity": "sha512-mTT6RgopEYABzXWFx+GcJ+ZQ32kp4fMf0xvpZIIfSq9Z8lC/++MtcCnQ9t5FP2veYEP95FIYSvW+U9fV4xrlig==", - "dev": true, - "license": "MIT", - "dependencies": { - "browser-stdout": "^1.3.1", - "chokidar": "^4.0.1", - "debug": "^4.3.5", - "diff": "^7.0.0", - "escape-string-regexp": "^4.0.0", - "find-up": "^5.0.0", - "glob": "^10.4.5", - "he": "^1.2.0", - "is-path-inside": "^3.0.3", - "js-yaml": "^4.1.0", - "log-symbols": "^4.1.0", - "minimatch": "^9.0.5", - "ms": "^2.1.3", - "picocolors": "^1.1.1", - "serialize-javascript": "^6.0.2", - "strip-json-comments": "^3.1.1", - "supports-color": "^8.1.1", - "workerpool": "^9.2.0", - "yargs": "^17.7.2", - "yargs-parser": "^21.1.1", - "yargs-unparser": "^2.0.0" - }, - "bin": { - "_mocha": "bin/_mocha", - "mocha": "bin/mocha.js" - }, - "engines": { - "node": "^18.18.0 || ^20.9.0 || >=21.1.0" - } - }, - "node_modules/mocha/node_modules/brace-expansion": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.2.tgz", - "integrity": "sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "balanced-match": "^1.0.0" - } - }, - "node_modules/mocha/node_modules/minimatch": { - "version": "9.0.5", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.5.tgz", - "integrity": "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow==", - "dev": true, - "license": "ISC", - "dependencies": { - "brace-expansion": "^2.0.1" - }, - "engines": { - "node": ">=16 || 14 >=14.17" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/mocha/node_modules/supports-color": { - "version": "8.1.1", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-8.1.1.tgz", - "integrity": "sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q==", - "dev": true, - "license": "MIT", - "dependencies": { - "has-flag": "^4.0.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/chalk/supports-color?sponsor=1" - } - }, - "node_modules/ms": { - "version": "2.1.3", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", - "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", - "dev": true, - "license": "MIT" - }, - "node_modules/natural-compare": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/natural-compare/-/natural-compare-1.4.0.tgz", - "integrity": "sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw==", - "dev": true, - "license": "MIT" - }, - "node_modules/neo-async": { - "version": "2.6.2", - "resolved": "https://registry.npmjs.org/neo-async/-/neo-async-2.6.2.tgz", - "integrity": "sha512-Yd3UES5mWCSqR+qNT93S3UoYUkqAZ9lLg8a7g9rimsWmYGK8cVToA4/sF3RrshdyV3sAGMXVUmpMYOw+dLpOuw==", - "dev": true, - "license": "MIT" - }, - "node_modules/node-fetch": { - "version": "2.7.0", - "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.7.0.tgz", - "integrity": "sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A==", - "dev": true, - "license": "MIT", - "dependencies": { - "whatwg-url": "^5.0.0" - }, - "engines": { - "node": "4.x || >=6.0.0" - }, - "peerDependencies": { - "encoding": "^0.1.0" - }, - "peerDependenciesMeta": { - "encoding": { - "optional": true - } - } - }, - "node_modules/optionator": { - "version": "0.9.4", - "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.9.4.tgz", - "integrity": "sha512-6IpQ7mKUxRcZNLIObR0hz7lxsapSSIYNZJwXPGeF0mTVqGKFIXj1DQcMoT22S3ROcLyY/rz0PWaWZ9ayWmad9g==", - "dev": true, - "license": "MIT", - "dependencies": { - "deep-is": "^0.1.3", - "fast-levenshtein": "^2.0.6", - "levn": "^0.4.1", - "prelude-ls": "^1.2.1", - "type-check": "^0.4.0", - "word-wrap": "^1.2.5" - }, - "engines": { - "node": ">= 0.8.0" - } - }, - "node_modules/p-limit": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz", - "integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "yocto-queue": "^0.1.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/p-locate": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-5.0.0.tgz", - "integrity": "sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw==", - "dev": true, - "license": "MIT", - "dependencies": { - "p-limit": "^3.0.2" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/package-json-from-dist": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/package-json-from-dist/-/package-json-from-dist-1.0.1.tgz", - "integrity": "sha512-UEZIS3/by4OC8vL3P2dTXRETpebLI2NiI5vIrjaD/5UtrkFX/tNbwjTSRAGC/+7CAo2pIcBaRgWmcBBHcsaCIw==", - "dev": true, - "license": "BlueOak-1.0.0" - }, - "node_modules/parse-github-url": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/parse-github-url/-/parse-github-url-1.0.3.tgz", - "integrity": "sha512-tfalY5/4SqGaV/GIGzWyHnFjlpTPTNpENR9Ea2lLldSJ8EWXMsvacWucqY3m3I4YPtas15IxTLQVQ5NSYXPrww==", - "dev": true, - "license": "MIT", - "bin": { - "parse-github-url": "cli.js" - }, - "engines": { - "node": ">= 0.10" - } - }, - "node_modules/path-exists": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz", - "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8" - } - }, - "node_modules/path-key": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", - "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8" - } - }, - "node_modules/path-scurry": { - "version": "1.11.1", - "resolved": "https://registry.npmjs.org/path-scurry/-/path-scurry-1.11.1.tgz", - "integrity": "sha512-Xa4Nw17FS9ApQFJ9umLiJS4orGjm7ZzwUrwamcGQuHSzDyth9boKDaycYdDcZDuqYATXw4HFXgaqWTctW/v1HA==", - "dev": true, - "license": "BlueOak-1.0.0", - "dependencies": { - "lru-cache": "^10.2.0", - "minipass": "^5.0.0 || ^6.0.2 || ^7.0.0" - }, - "engines": { - "node": ">=16 || 14 >=14.18" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/picocolors": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.1.1.tgz", - "integrity": "sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==", - "dev": true, - "license": "ISC" - }, - "node_modules/prelude-ls": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.2.1.tgz", - "integrity": "sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 0.8.0" - } - }, - "node_modules/punycode": { - "version": "2.3.1", - "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.3.1.tgz", - "integrity": "sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=6" - } - }, - "node_modules/randombytes": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/randombytes/-/randombytes-2.1.0.tgz", - "integrity": "sha512-vYl3iOX+4CKUWuxGi9Ukhie6fsqXqS9FE2Zaic4tNFD2N2QQaXOMFbuKK4QmDHC0JO6B1Zp41J0LpT0oR68amQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "safe-buffer": "^5.1.0" - } - }, - "node_modules/readdirp": { - "version": "4.1.2", - "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-4.1.2.tgz", - "integrity": "sha512-GDhwkLfywWL2s6vEjyhri+eXmfH6j1L7JE27WhqLeYzoh/A3DBaYGEj2H/HFZCn/kMfim73FXxEJTw06WtxQwg==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 14.18.0" - }, - "funding": { - "type": "individual", - "url": "https://paulmillr.com/funding/" - } - }, - "node_modules/require-directory": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz", - "integrity": "sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/rollup": { - "version": "4.60.1", - "resolved": "https://registry.npmjs.org/rollup/-/rollup-4.60.1.tgz", - "integrity": "sha512-VmtB2rFU/GroZ4oL8+ZqXgSA38O6GR8KSIvWmEFv63pQ0G6KaBH9s07PO8XTXP4vI+3UJUEypOfjkGfmSBBR0w==", - "dev": true, - "license": "MIT", - "dependencies": { - "@types/estree": "1.0.8" - }, - "bin": { - "rollup": "dist/bin/rollup" - }, - "engines": { - "node": ">=18.0.0", - "npm": ">=8.0.0" - }, - "optionalDependencies": { - "@rollup/rollup-android-arm-eabi": "4.60.1", - "@rollup/rollup-android-arm64": "4.60.1", - "@rollup/rollup-darwin-arm64": "4.60.1", - "@rollup/rollup-darwin-x64": "4.60.1", - "@rollup/rollup-freebsd-arm64": "4.60.1", - "@rollup/rollup-freebsd-x64": "4.60.1", - "@rollup/rollup-linux-arm-gnueabihf": "4.60.1", - "@rollup/rollup-linux-arm-musleabihf": "4.60.1", - "@rollup/rollup-linux-arm64-gnu": "4.60.1", - "@rollup/rollup-linux-arm64-musl": "4.60.1", - "@rollup/rollup-linux-loong64-gnu": "4.60.1", - "@rollup/rollup-linux-loong64-musl": "4.60.1", - "@rollup/rollup-linux-ppc64-gnu": "4.60.1", - "@rollup/rollup-linux-ppc64-musl": "4.60.1", - "@rollup/rollup-linux-riscv64-gnu": "4.60.1", - "@rollup/rollup-linux-riscv64-musl": "4.60.1", - "@rollup/rollup-linux-s390x-gnu": "4.60.1", - "@rollup/rollup-linux-x64-gnu": "4.60.1", - "@rollup/rollup-linux-x64-musl": "4.60.1", - "@rollup/rollup-openbsd-x64": "4.60.1", - "@rollup/rollup-openharmony-arm64": "4.60.1", - "@rollup/rollup-win32-arm64-msvc": "4.60.1", - "@rollup/rollup-win32-ia32-msvc": "4.60.1", - "@rollup/rollup-win32-x64-gnu": "4.60.1", - "@rollup/rollup-win32-x64-msvc": "4.60.1", - "fsevents": "~2.3.2" - } - }, - "node_modules/safe-buffer": { - "version": "5.2.1", - "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", - "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==", - "dev": true, - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ], - "license": "MIT" - }, - "node_modules/semver": { - "version": "7.7.2", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.2.tgz", - "integrity": "sha512-RF0Fw+rO5AMf9MAyaRXI4AV0Ulj5lMHqVxxdSgiVbixSCXoEmmX/jk0CuJw4+3SqroYO9VoUh+HcuJivvtJemA==", - "dev": true, - "license": "ISC", - "bin": { - "semver": "bin/semver.js" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/serialize-javascript": { - "version": "6.0.2", - "resolved": "https://registry.npmjs.org/serialize-javascript/-/serialize-javascript-6.0.2.tgz", - "integrity": "sha512-Saa1xPByTTq2gdeFZYLLo+RFE35NHZkAbqZeWNd3BpzppeVisAqpDjcp8dyf6uIvEqJRd46jemmyA4iFIeVk8g==", - "dev": true, - "license": "BSD-3-Clause", - "dependencies": { - "randombytes": "^2.1.0" - } - }, - "node_modules/shebang-command": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", - "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", - "dev": true, - "license": "MIT", - "dependencies": { - "shebang-regex": "^3.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/shebang-regex": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", - "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8" - } - }, - "node_modules/signal-exit": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-4.1.0.tgz", - "integrity": "sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==", - "dev": true, - "license": "ISC", - "engines": { - "node": ">=14" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/smob": { - "version": "1.5.0", - "resolved": "https://registry.npmjs.org/smob/-/smob-1.5.0.tgz", - "integrity": "sha512-g6T+p7QO8npa+/hNx9ohv1E5pVCmWrVCUzUXJyLdMmftX6ER0oiWY/w9knEonLpnOp6b6FenKnMfR8gqwWdwig==", - "dev": true, - "license": "MIT" - }, - "node_modules/source-map": { - "version": "0.6.1", - "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", - "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", - "dev": true, - "license": "BSD-3-Clause", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/source-map-support": { - "version": "0.5.21", - "resolved": "https://registry.npmjs.org/source-map-support/-/source-map-support-0.5.21.tgz", - "integrity": "sha512-uBHU3L3czsIyYXKX88fdrGovxdSCoTGDRZ6SYXtSRxLZUzHg5P/66Ht6uoUlHu9EZod+inXhKo3qQgwXUT/y1w==", - "dev": true, - "license": "MIT", - "dependencies": { - "buffer-from": "^1.0.0", - "source-map": "^0.6.0" - } - }, - "node_modules/string-width": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-5.1.2.tgz", - "integrity": "sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA==", - "dev": true, - "license": "MIT", - "dependencies": { - "eastasianwidth": "^0.2.0", - "emoji-regex": "^9.2.2", - "strip-ansi": "^7.0.1" - }, - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/string-width-cjs": { - "name": "string-width", - "version": "4.2.3", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", - "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", - "dev": true, - "license": "MIT", - "dependencies": { - "emoji-regex": "^8.0.0", - "is-fullwidth-code-point": "^3.0.0", - "strip-ansi": "^6.0.1" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/string-width-cjs/node_modules/ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8" - } - }, - "node_modules/string-width-cjs/node_modules/emoji-regex": { - "version": "8.0.0", - "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", - "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", - "dev": true, - "license": "MIT" - }, - "node_modules/string-width-cjs/node_modules/strip-ansi": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", - "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", - "dev": true, - "license": "MIT", - "dependencies": { - "ansi-regex": "^5.0.1" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/strip-ansi": { - "version": "7.1.0", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.1.0.tgz", - "integrity": "sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "ansi-regex": "^6.0.1" - }, - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/chalk/strip-ansi?sponsor=1" - } - }, - "node_modules/strip-ansi-cjs": { - "name": "strip-ansi", - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", - "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", - "dev": true, - "license": "MIT", - "dependencies": { - "ansi-regex": "^5.0.1" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/strip-ansi-cjs/node_modules/ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8" - } - }, - "node_modules/strip-json-comments": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.1.1.tgz", - "integrity": "sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/supports-color": { - "version": "7.2.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", - "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", - "dev": true, - "license": "MIT", - "dependencies": { - "has-flag": "^4.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/terser": { - "version": "5.43.1", - "resolved": "https://registry.npmjs.org/terser/-/terser-5.43.1.tgz", - "integrity": "sha512-+6erLbBm0+LROX2sPXlUYx/ux5PyE9K/a92Wrt6oA+WDAoFTdpHE5tCYCI5PNzq2y8df4rA+QgHLJuR4jNymsg==", - "dev": true, - "license": "BSD-2-Clause", - "dependencies": { - "@jridgewell/source-map": "^0.3.3", - "acorn": "^8.14.0", - "commander": "^2.20.0", - "source-map-support": "~0.5.20" - }, - "bin": { - "terser": "bin/terser" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/terser/node_modules/commander": { - "version": "2.20.3", - "resolved": "https://registry.npmjs.org/commander/-/commander-2.20.3.tgz", - "integrity": "sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ==", - "dev": true, - "license": "MIT" - }, - "node_modules/test-exclude": { - "version": "8.0.0", - "resolved": "https://registry.npmjs.org/test-exclude/-/test-exclude-8.0.0.tgz", - "integrity": "sha512-ZOffsNrXYggvU1mDGHk54I96r26P8SyMjO5slMKSc7+IWmtB/MQKnEC2fP51imB3/pT6YK5cT5E8f+Dd9KdyOQ==", - "dev": true, - "license": "ISC", - "dependencies": { - "@istanbuljs/schema": "^0.1.2", - "glob": "^13.0.6", - "minimatch": "^10.2.2" - }, - "engines": { - "node": "20 || >=22" - } - }, - "node_modules/test-exclude/node_modules/glob": { - "version": "13.0.6", - "resolved": "https://registry.npmjs.org/glob/-/glob-13.0.6.tgz", - "integrity": "sha512-Wjlyrolmm8uDpm/ogGyXZXb1Z+Ca2B8NbJwqBVg0axK9GbBeoS7yGV6vjXnYdGm6X53iehEuxxbyiKp8QmN4Vw==", - "dev": true, - "license": "BlueOak-1.0.0", - "dependencies": { - "minimatch": "^10.2.2", - "minipass": "^7.1.3", - "path-scurry": "^2.0.2" - }, - "engines": { - "node": "18 || 20 || >=22" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/test-exclude/node_modules/lru-cache": { - "version": "11.2.6", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-11.2.6.tgz", - "integrity": "sha512-ESL2CrkS/2wTPfuend7Zhkzo2u0daGJ/A2VucJOgQ/C48S/zB8MMeMHSGKYpXhIjbPxfuezITkaBH1wqv00DDQ==", - "dev": true, - "license": "BlueOak-1.0.0", - "engines": { - "node": "20 || >=22" - } - }, - "node_modules/test-exclude/node_modules/path-scurry": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/path-scurry/-/path-scurry-2.0.2.tgz", - "integrity": "sha512-3O/iVVsJAPsOnpwWIeD+d6z/7PmqApyQePUtCndjatj/9I5LylHvt5qluFaBT3I5h3r1ejfR056c+FCv+NnNXg==", - "dev": true, - "license": "BlueOak-1.0.0", - "dependencies": { - "lru-cache": "^11.0.0", - "minipass": "^7.1.2" - }, - "engines": { - "node": "18 || 20 || >=22" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/tr46": { - "version": "0.0.3", - "resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz", - "integrity": "sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==", - "dev": true, - "license": "MIT" - }, - "node_modules/type-check": { - "version": "0.4.0", - "resolved": "https://registry.npmjs.org/type-check/-/type-check-0.4.0.tgz", - "integrity": "sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew==", - "dev": true, - "license": "MIT", - "dependencies": { - "prelude-ls": "^1.2.1" - }, - "engines": { - "node": ">= 0.8.0" - } - }, - "node_modules/uglify-js": { - "version": "3.19.3", - "resolved": "https://registry.npmjs.org/uglify-js/-/uglify-js-3.19.3.tgz", - "integrity": "sha512-v3Xu+yuwBXisp6QYTcH4UbH+xYJXqnq2m/LtQVWKWzYc1iehYnLixoQDN9FH6/j9/oybfd6W9Ghwkl8+UMKTKQ==", - "dev": true, - "license": "BSD-2-Clause", - "optional": true, - "bin": { - "uglifyjs": "bin/uglifyjs" - }, - "engines": { - "node": ">=0.8.0" - } - }, - "node_modules/uri-js": { - "version": "4.4.1", - "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.4.1.tgz", - "integrity": "sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==", - "dev": true, - "license": "BSD-2-Clause", - "dependencies": { - "punycode": "^2.1.0" - } - }, - "node_modules/v8-to-istanbul": { - "version": "9.3.0", - "resolved": "https://registry.npmjs.org/v8-to-istanbul/-/v8-to-istanbul-9.3.0.tgz", - "integrity": "sha512-kiGUalWN+rgBJ/1OHZsBtU4rXZOfj/7rKQxULKlIzwzQSvMJUUNgPwJEEh7gU6xEVxC0ahoOBvN2YI8GH6FNgA==", - "dev": true, - "license": "ISC", - "dependencies": { - "@jridgewell/trace-mapping": "^0.3.12", - "@types/istanbul-lib-coverage": "^2.0.1", - "convert-source-map": "^2.0.0" - }, - "engines": { - "node": ">=10.12.0" - } - }, - "node_modules/webidl-conversions": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz", - "integrity": "sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==", - "dev": true, - "license": "BSD-2-Clause" - }, - "node_modules/whatwg-url": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-5.0.0.tgz", - "integrity": "sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw==", - "dev": true, - "license": "MIT", - "dependencies": { - "tr46": "~0.0.3", - "webidl-conversions": "^3.0.0" - } - }, - "node_modules/which": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", - "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", - "dev": true, - "license": "ISC", - "dependencies": { - "isexe": "^2.0.0" - }, - "bin": { - "node-which": "bin/node-which" - }, - "engines": { - "node": ">= 8" - } - }, - "node_modules/word-wrap": { - "version": "1.2.5", - "resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.5.tgz", - "integrity": "sha512-BN22B5eaMMI9UMtjrGd5g5eCYPpCPDUy0FJXbYsaT5zYxjFOckS53SQDE3pWkVoWpHXVb3BrYcEN4Twa55B5cA==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/wordwrap": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/wordwrap/-/wordwrap-1.0.0.tgz", - "integrity": "sha512-gvVzJFlPycKc5dZN4yPkP8w7Dc37BtP1yczEneOb4uq34pXZcvrtRTmWV8W+Ume+XCxKgbjM+nevkyFPMybd4Q==", - "dev": true, - "license": "MIT" - }, - "node_modules/workerpool": { - "version": "9.3.3", - "resolved": "https://registry.npmjs.org/workerpool/-/workerpool-9.3.3.tgz", - "integrity": "sha512-slxCaKbYjEdFT/o2rH9xS1hf4uRDch1w7Uo+apxhZ+sf/1d9e0ZVkn42kPNGP2dgjIx6YFvSevj0zHvbWe2jdw==", - "dev": true, - "license": "Apache-2.0" - }, - "node_modules/wrap-ansi": { - "version": "8.1.0", - "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-8.1.0.tgz", - "integrity": "sha512-si7QWI6zUMq56bESFvagtmzMdGOtoxfR+Sez11Mobfc7tm+VkUckk9bW2UeffTGVUbOksxmSw0AA2gs8g71NCQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "ansi-styles": "^6.1.0", - "string-width": "^5.0.1", - "strip-ansi": "^7.0.1" - }, - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/chalk/wrap-ansi?sponsor=1" - } - }, - "node_modules/wrap-ansi-cjs": { - "name": "wrap-ansi", - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", - "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", - "dev": true, - "license": "MIT", - "dependencies": { - "ansi-styles": "^4.0.0", - "string-width": "^4.1.0", - "strip-ansi": "^6.0.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/chalk/wrap-ansi?sponsor=1" - } - }, - "node_modules/wrap-ansi-cjs/node_modules/ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8" - } - }, - "node_modules/wrap-ansi-cjs/node_modules/emoji-regex": { - "version": "8.0.0", - "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", - "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", - "dev": true, - "license": "MIT" - }, - "node_modules/wrap-ansi-cjs/node_modules/string-width": { - "version": "4.2.3", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", - "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", - "dev": true, - "license": "MIT", - "dependencies": { - "emoji-regex": "^8.0.0", - "is-fullwidth-code-point": "^3.0.0", - "strip-ansi": "^6.0.1" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/wrap-ansi-cjs/node_modules/strip-ansi": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", - "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", - "dev": true, - "license": "MIT", - "dependencies": { - "ansi-regex": "^5.0.1" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/wrap-ansi/node_modules/ansi-styles": { - "version": "6.2.1", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-6.2.1.tgz", - "integrity": "sha512-bN798gFfQX+viw3R7yrGWRqnrN2oRkEkUjjl4JNn4E8GxxbjtG3FbrEIIY3l8/hrwUwIeCZvi4QuOTP4MErVug==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/chalk/ansi-styles?sponsor=1" - } - }, - "node_modules/y18n": { - "version": "5.0.8", - "resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.8.tgz", - "integrity": "sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==", - "dev": true, - "license": "ISC", - "engines": { - "node": ">=10" - } - }, - "node_modules/yargs": { - "version": "17.7.2", - "resolved": "https://registry.npmjs.org/yargs/-/yargs-17.7.2.tgz", - "integrity": "sha512-7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w==", - "dev": true, - "license": "MIT", - "dependencies": { - "cliui": "^8.0.1", - "escalade": "^3.1.1", - "get-caller-file": "^2.0.5", - "require-directory": "^2.1.1", - "string-width": "^4.2.3", - "y18n": "^5.0.5", - "yargs-parser": "^21.1.1" - }, - "engines": { - "node": ">=12" - } - }, - "node_modules/yargs-parser": { - "version": "21.1.1", - "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-21.1.1.tgz", - "integrity": "sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw==", - "dev": true, - "license": "ISC", - "engines": { - "node": ">=12" - } - }, - "node_modules/yargs-unparser": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/yargs-unparser/-/yargs-unparser-2.0.0.tgz", - "integrity": "sha512-7pRTIA9Qc1caZ0bZ6RYRGbHJthJWuakf+WmHK0rVeLkNrrGhfoabBNdue6kdINI6r4if7ocq9aD/n7xwKOdzOA==", - "dev": true, - "license": "MIT", - "dependencies": { - "camelcase": "^6.0.0", - "decamelize": "^4.0.0", - "flat": "^5.0.2", - "is-plain-obj": "^2.1.0" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/yargs/node_modules/ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8" - } - }, - "node_modules/yargs/node_modules/emoji-regex": { - "version": "8.0.0", - "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", - "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", + "node_modules/wordwrap": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/wordwrap/-/wordwrap-1.0.0.tgz", + "integrity": "sha512-gvVzJFlPycKc5dZN4yPkP8w7Dc37BtP1yczEneOb4uq34pXZcvrtRTmWV8W+Ume+XCxKgbjM+nevkyFPMybd4Q==", "dev": true, "license": "MIT" - }, - "node_modules/yargs/node_modules/string-width": { - "version": "4.2.3", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", - "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", - "dev": true, - "license": "MIT", - "dependencies": { - "emoji-regex": "^8.0.0", - "is-fullwidth-code-point": "^3.0.0", - "strip-ansi": "^6.0.1" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/yargs/node_modules/strip-ansi": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", - "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", - "dev": true, - "license": "MIT", - "dependencies": { - "ansi-regex": "^5.0.1" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/yocto-queue": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz", - "integrity": "sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } } } } diff --git a/package.json b/package.json index e54529a5..d1f7e4e7 100644 --- a/package.json +++ b/package.json @@ -1,7 +1,7 @@ { "name": "haro", - "version": "16.0.0", - "description": "Haro is a modern immutable DataStore", + "version": "17.0.0", + "description": "A fast, flexible immutable DataStore for collections of records with indexing, versioning, and advanced querying capabilities", "type": "module", "types": "types/haro.d.ts", "source": "src/haro.js", @@ -14,16 +14,17 @@ "files": [ "dist/haro.cjs", "dist/haro.js", - "types" + "types/haro.d.ts" ], "scripts": { "benchmark": "node benchmarks/index.js", "build": "npm run lint && npm run rollup", "changelog": "auto-changelog -p", - "lint": "eslint --fix *.js benchmarks/*.js src/*.js tests/**/*.js", - "mocha": "c8 mocha tests/**/*.js", + "fix": "oxlint --fix *.js benchmarks src tests/unit && oxfmt *.js benchmarks/*.js src/*.js tests/unit/*.js --write", + "lint": "oxlint *.js benchmarks src tests/unit && oxfmt *.js benchmarks/*.js src/*.js tests/unit/*.js --check", + "coverage": "node --test --experimental-test-coverage --test-coverage-exclude=dist/** --test-coverage-exclude=tests/** --test-reporter=spec tests/**/*.test.js 2>&1 | grep -A 1000 \"start of coverage report\" > coverage.txt", "rollup": "rollup --config", - "test": "npm run lint && npm run mocha", + "test": "npm run lint && node --test tests/**/*.js", "prepare": "husky" }, "repository": { @@ -35,7 +36,18 @@ "data", "store", "datastore", - "api", + "collection", + "index", + "indexing", + "query", + "search", + "filter", + "versioning", + "batch", + "bulk", + "memory", + "cache", + "map", "haro" ], "author": "Jason Mulligan ", @@ -46,17 +58,18 @@ "homepage": "https://github.com/avoidwork/haro", "engineStrict": true, "engines": { - "node": ">=17.0.0" + "node": ">=19.0.0" + }, + "dependencies": { + "tiny-lru": "^13.0.0" }, "devDependencies": { - "@eslint/js": "^9.31.0", - "@rollup/plugin-terser": "^1.0.0", "auto-changelog": "^2.5.0", - "c8": "^11.0.0", - "eslint": "^10.0.0", - "globals": "^17.0.0", + "globals": "^17.5.0", "husky": "^9.1.7", - "mocha": "^11.7.1", - "rollup": "^4.45.0" + "oxfmt": "^0.45.0", + "oxlint": "^1.60.0", + "rollup": "^4.60.2", + "tinybench": "^6.0.0" } } diff --git a/rollup.config.js b/rollup.config.js index d23b65e6..b50ddbbb 100644 --- a/rollup.config.js +++ b/rollup.config.js @@ -1,4 +1,3 @@ -import terser from "@rollup/plugin-terser"; import { createRequire } from "node:module"; const require = createRequire(import.meta.url); const pkg = require("./package.json"); @@ -10,16 +9,10 @@ const bannerLong = `/** * @license ${pkg.license} * @version ${pkg.version} */`; -const bannerShort = `/*! - ${year} ${pkg.author} - @version ${pkg.version} -*/`; -const defaultOutBase = {compact: true, banner: bannerLong, name: pkg.name}; -const cjOutBase = {...defaultOutBase, compact: false, format: "cjs", exports: "named"}; -const esmOutBase = {...defaultOutBase, format: "esm"}; -const umdOutBase = {...defaultOutBase, format: "umd"}; -const minOutBase = {banner: bannerShort, name: pkg.name, plugins: [terser()], sourcemap: true}; +const defaultOutBase = { compact: true, banner: bannerLong, name: pkg.name }; +const cjOutBase = { ...defaultOutBase, compact: false, format: "cjs", exports: "named" }; +const esmOutBase = { ...defaultOutBase, format: "esm" }; export default [ { @@ -27,28 +20,12 @@ export default [ output: [ { ...cjOutBase, - file: `dist/${pkg.name}.cjs` + file: `dist/${pkg.name}.cjs`, }, { ...esmOutBase, - file: `dist/${pkg.name}.js` + file: `dist/${pkg.name}.js`, }, - { - ...esmOutBase, - ...minOutBase, - file: `dist/${pkg.name}.min.js` - }, - { - ...umdOutBase, - file: `dist/${pkg.name}.umd.js`, - name: "lru" - }, - { - ...umdOutBase, - ...minOutBase, - file: `dist/${pkg.name}.umd.min.js`, - name: "lru" - } - ] - } + ], + }, ]; diff --git a/src/constants.js b/src/constants.js index 39370fd8..e9b5c92c 100644 --- a/src/constants.js +++ b/src/constants.js @@ -1,15 +1,18 @@ // String constants - Single characters and symbols export const STRING_COMMA = ","; +export const STRING_DOT = "."; export const STRING_EMPTY = ""; export const STRING_PIPE = "|"; export const STRING_DOUBLE_PIPE = "||"; export const STRING_DOUBLE_AND = "&&"; // String constants - Operation and type names -export const STRING_ID = "id"; export const STRING_DEL = "del"; export const STRING_FUNCTION = "function"; +export const STRING_ID = "id"; +export const STRING_INDEX = "index"; export const STRING_INDEXES = "indexes"; +export const STRING_KEY = "key"; export const STRING_OBJECT = "object"; export const STRING_RECORDS = "records"; export const STRING_REGISTRY = "registry"; @@ -26,3 +29,44 @@ export const STRING_RECORD_NOT_FOUND = "Record not found"; // Integer constants export const INT_0 = 0; +export const INT_2 = 2; + +// Number constants +export const CACHE_SIZE_DEFAULT = 1000; + +// String constants - Cache and hashing +export const STRING_CACHE_DOMAIN_SEARCH = "search"; +export const STRING_CACHE_DOMAIN_WHERE = "where"; +export const STRING_HASH_ALGORITHM = "SHA-256"; +export const STRING_HEX_PAD = "0"; +export const STRING_UNDERSCORE = "_"; + +// String constants - Security (prototype pollution protection) +export const STRING_PROTO = "__proto__"; +export const STRING_CONSTRUCTOR = "constructor"; +export const STRING_PROTOTYPE = "prototype"; + +// String constants - Error messages +export const STRING_ERROR_BATCH_SETMANY = "setMany: cannot call setMany within a batch operation"; +export const STRING_ERROR_BATCH_DELETEMANY = + "deleteMany: cannot call deleteMany within a batch operation"; +export const STRING_ERROR_DELETE_KEY_TYPE = "delete: key must be a string or number"; +export const STRING_ERROR_FIND_WHERE_TYPE = "find: where must be an object"; +export const STRING_ERROR_LIMIT_OFFSET_TYPE = "limit: offset must be a number"; +export const STRING_ERROR_LIMIT_MAX_TYPE = "limit: max must be a number"; +export const STRING_ERROR_SEARCH_VALUE = "search: value cannot be null or undefined"; +export const STRING_ERROR_SET_KEY_TYPE = "set: key must be a string or number"; +export const STRING_ERROR_SET_DATA_TYPE = "set: data must be an object"; +export const STRING_ERROR_SORT_FN_TYPE = "sort: fn must be a function"; +export const STRING_ERROR_WHERE_OP_TYPE = "where: op must be a string"; +export const STRING_ERROR_WHERE_PREDICATE_TYPE = "where: predicate must be an object"; + +// String constants - Property names +export const PROP_DELIMITER = "delimiter"; +export const PROP_ID = "id"; +export const PROP_IMMUTABLE = "immutable"; +export const PROP_INDEX = "index"; +export const PROP_KEY = "key"; +export const PROP_VERSIONING = "versioning"; +export const PROP_VERSIONS = "versions"; +export const PROP_WARN_ON_FULL_SCAN = "warnOnFullScan"; diff --git a/src/haro.js b/src/haro.js index 6ffde936..3f509f5d 100644 --- a/src/haro.js +++ b/src/haro.js @@ -1,213 +1,356 @@ -import {randomUUID as uuid} from "crypto"; +import { randomUUID as uuid } from "crypto"; +import { lru } from "tiny-lru"; import { + CACHE_SIZE_DEFAULT, INT_0, + INT_2, + PROP_DELIMITER, + PROP_ID, + PROP_IMMUTABLE, + PROP_INDEX, + PROP_KEY, + PROP_VERSIONING, + PROP_VERSIONS, + PROP_WARN_ON_FULL_SCAN, + STRING_CACHE_DOMAIN_SEARCH, + STRING_CACHE_DOMAIN_WHERE, STRING_COMMA, - STRING_DEL, STRING_DOUBLE_AND, + STRING_CONSTRUCTOR, + STRING_DOT, + STRING_DOUBLE_AND, STRING_DOUBLE_PIPE, STRING_EMPTY, + STRING_ERROR_BATCH_DELETEMANY, + STRING_ERROR_BATCH_SETMANY, + STRING_ERROR_DELETE_KEY_TYPE, + STRING_ERROR_FIND_WHERE_TYPE, + STRING_ERROR_LIMIT_MAX_TYPE, + STRING_ERROR_LIMIT_OFFSET_TYPE, + STRING_ERROR_SEARCH_VALUE, + STRING_ERROR_SET_DATA_TYPE, + STRING_ERROR_SET_KEY_TYPE, + STRING_ERROR_SORT_FN_TYPE, + STRING_ERROR_WHERE_OP_TYPE, + STRING_ERROR_WHERE_PREDICATE_TYPE, STRING_FUNCTION, + STRING_HASH_ALGORITHM, + STRING_HEX_PAD, STRING_ID, STRING_INDEXES, STRING_INVALID_FIELD, STRING_INVALID_FUNCTION, - STRING_INVALID_TYPE, STRING_NUMBER, STRING_OBJECT, + STRING_INVALID_TYPE, + STRING_NUMBER, + STRING_OBJECT, STRING_PIPE, + STRING_PROTOTYPE, + STRING_PROTO, STRING_RECORD_NOT_FOUND, STRING_RECORDS, STRING_REGISTRY, - STRING_SET, - STRING_SIZE, STRING_STRING + STRING_SIZE, + STRING_STRING, + STRING_UNDERSCORE, } from "./constants.js"; /** - * Haro is a modern immutable DataStore for collections of records with indexing, - * versioning, and batch operations support. It provides a Map-like interface - * with advanced querying capabilities through indexes. + * Haro is an immutable DataStore with indexing, versioning, and batch operations. + * Provides a Map-like interface with advanced querying capabilities. * @class * @example - * const store = new Haro({ - * index: ['name', 'age'], - * key: 'id', - * versioning: true - * }); - * - * store.set(null, {name: 'John', age: 30}); + * const store = new Haro({ index: ['name'], key: 'id', versioning: true }); + * store.set(null, {name: 'John'}); * const results = store.find({name: 'John'}); */ export class Haro { + #cache; + #cacheEnabled; + #data; + #delimiter; + #id; + #immutable; + #index; + #indexes; + #key; + #versions; + #versioning; + #warnOnFullScan; + #inBatch = false; + /** - * Creates a new Haro instance with specified configuration - * @param {Object} [config={}] - Configuration object for the store - * @param {string} [config.delimiter=STRING_PIPE] - Delimiter for composite indexes (default: '|') - * @param {string} [config.id] - Unique identifier for this instance (auto-generated if not provided) - * @param {boolean} [config.immutable=false] - Return frozen/immutable objects for data safety - * @param {string[]} [config.index=[]] - Array of field names to create indexes for - * @param {string} [config.key=STRING_ID] - Primary key field name used for record identification - * @param {boolean} [config.versioning=false] - Enable versioning to track record changes + * Creates a new Haro instance. + * @param {Object} [config={}] - Configuration object + * @param {string} [config.delimiter=STRING_PIPE] - Delimiter for composite indexes + * @param {string} [config.id] - Unique instance identifier (auto-generated) + * @param {boolean} [config.immutable=false] - Return frozen objects + * @param {string[]} [config.index=[]] - Fields to index + * @param {string} [config.key=STRING_ID] - Primary key field name + * @param {boolean} [config.versioning=false] - Enable versioning + * @param {boolean} [config.warnOnFullScan=true] - Warn on full table scans * @constructor * @example - * const store = new Haro({ - * index: ['name', 'email', 'name|department'], - * key: 'userId', - * versioning: true, - * immutable: true - * }); - */ - constructor ({delimiter = STRING_PIPE, id = this.uuid(), immutable = false, index = [], key = STRING_ID, versioning = false} = {}) { - this.data = new Map(); - this.delimiter = delimiter; - this.id = id; - this.immutable = immutable; - this.index = Array.isArray(index) ? [...index] : []; - this.indexes = new Map(); - this.key = key; - this.versions = new Map(); - this.versioning = versioning; + * const store = new Haro({ index: ['name', 'email'], key: 'userId', versioning: true }); + */ + constructor({ + cache = false, + cacheSize = CACHE_SIZE_DEFAULT, + delimiter = STRING_PIPE, + id = uuid(), + immutable = false, + index = [], + key = STRING_ID, + versioning = false, + warnOnFullScan = true, + } = {}) { + this.#data = new Map(); + this.#cacheEnabled = cache === true; + this.#cache = cache === true ? lru(cacheSize) : null; + this.#delimiter = delimiter; + this.#id = id; + this.#immutable = immutable; + this.#index = Array.isArray(index) ? [...index] : []; + this.#indexes = new Map(); + this.#key = key; + this.#versions = new Map(); + this.#versioning = versioning; + this.#warnOnFullScan = warnOnFullScan; + this.#inBatch = false; Object.defineProperty(this, STRING_REGISTRY, { enumerable: true, - get: () => Array.from(this.data.keys()) + get: () => Array.from(this.#data.keys()), }); Object.defineProperty(this, STRING_SIZE, { enumerable: true, - get: () => this.data.size + get: () => this.#data.size, }); - - return this.reindex(); + Object.defineProperty(this, PROP_KEY, { + enumerable: true, + get: () => this.#key, + }); + Object.defineProperty(this, PROP_INDEX, { + enumerable: true, + get: () => [...this.#index], + }); + Object.defineProperty(this, PROP_DELIMITER, { + enumerable: true, + get: () => this.#delimiter, + }); + Object.defineProperty(this, PROP_IMMUTABLE, { + enumerable: true, + get: () => this.#immutable, + }); + Object.defineProperty(this, PROP_VERSIONING, { + enumerable: true, + get: () => this.#versioning, + }); + Object.defineProperty(this, PROP_WARN_ON_FULL_SCAN, { + enumerable: true, + get: () => this.#warnOnFullScan, + }); + Object.defineProperty(this, PROP_VERSIONS, { + enumerable: true, + get: () => this.#versions, + }); + Object.defineProperty(this, PROP_ID, { + enumerable: true, + get: () => this.#id, + }); + this.reindex(); } /** - * Performs batch operations on multiple records for efficient bulk processing - * @param {Array} args - Array of records to process - * @param {string} [type=STRING_SET] - Type of operation: 'set' for upsert, 'del' for delete - * @returns {Array} Array of results from the batch operation - * @throws {Error} Throws error if individual operations fail during batch processing + * Inserts or updates multiple records. + * @param {Array} records - Records to insert or update + * @returns {Array} Stored records * @example - * const results = store.batch([ - * {id: 1, name: 'John'}, - * {id: 2, name: 'Jane'} - * ], 'set'); + * store.setMany([{id: 1, name: 'John'}, {id: 2, name: 'Jane'}]); */ - batch (args, type = STRING_SET) { - const fn = type === STRING_DEL ? i => this.delete(i, true) : i => this.set(null, i, true, true); + setMany(records) { + if (this.#inBatch) { + throw new Error(STRING_ERROR_BATCH_SETMANY); + } + this.#inBatch = true; + const results = records.map((i) => this.set(null, i, true)); + this.#inBatch = false; + this.reindex(); + this.#invalidateCache(); + return results; + } - return this.onbatch(this.beforeBatch(args, type).map(fn), type); + /** + * Deletes multiple records. + * @param {Array} keys - Keys to delete + * @returns {Array} + * @example + * store.deleteMany(['key1', 'key2']); + */ + deleteMany(keys) { + if (this.#inBatch) { + /* node:coverage ignore next */ throw new Error(STRING_ERROR_BATCH_DELETEMANY); + } + this.#inBatch = true; + const results = keys.map((i) => this.delete(i)); + this.#inBatch = false; + this.reindex(); + this.#invalidateCache(); + return results; } /** - * Lifecycle hook executed before batch operations for custom preprocessing - * @param {Array} arg - Arguments passed to batch operation - * @param {string} [type=STRING_EMPTY] - Type of batch operation ('set' or 'del') - * @returns {Array} The arguments array (possibly modified) to be processed + * Returns true if currently in a batch operation. + * @returns {boolean} Batch operation status */ - beforeBatch (arg, type = STRING_EMPTY) { // eslint-disable-line no-unused-vars - // Hook for custom logic before batch; override in subclass if needed - return arg; + get isBatching() { + return this.#inBatch; } /** - * Lifecycle hook executed before clear operation for custom preprocessing - * @returns {void} Override this method in subclasses to implement custom logic + * Removes all records, indexes, and versions. + * @returns {Haro} This instance * @example - * class MyStore extends Haro { - * beforeClear() { - * this.backup = this.toArray(); - * } - * } + * store.clear(); */ - beforeClear () { - // Hook for custom logic before clear; override in subclass if needed + clear() { + this.#data.clear(); + this.#indexes.clear(); + this.#versions.clear(); + this.#invalidateCache(); + + return this; } /** - * Lifecycle hook executed before delete operation for custom preprocessing - * @param {string} [key=STRING_EMPTY] - Key of record to delete - * @param {boolean} [batch=false] - Whether this is part of a batch operation - * @returns {void} Override this method in subclasses to implement custom logic + * Creates a deep clone of a value. + * @param {*} arg - Value to clone + * @returns {*} Deep clone */ - beforeDelete (key = STRING_EMPTY, batch = false) { // eslint-disable-line no-unused-vars - // Hook for custom logic before delete; override in subclass if needed + #clone(arg) { + if (typeof structuredClone === STRING_FUNCTION) { + return structuredClone(arg); + } + + /* node:coverage ignore */ return JSON.parse(JSON.stringify(arg)); } /** - * Lifecycle hook executed before set operation for custom preprocessing - * @param {string} [key=STRING_EMPTY] - Key of record to set - * @param {Object} [data={}] - Record data being set - * @param {boolean} [batch=false] - Whether this is part of a batch operation - * @param {boolean} [override=false] - Whether to override existing data - * @returns {void} Override this method in subclasses to implement custom logic + * Deletes a record and removes it from all indexes. + * @param {string} [key=STRING_EMPTY] - Key to delete + * @throws {Error} If key not found + * @example + * store.delete('user123'); */ - beforeSet (key = STRING_EMPTY, data = {}, batch = false, override = false) { // eslint-disable-line no-unused-vars - // Hook for custom logic before set; override in subclass if needed + delete(key = STRING_EMPTY) { + if (typeof key !== STRING_STRING && typeof key !== STRING_NUMBER) { + throw new Error(STRING_ERROR_DELETE_KEY_TYPE); + } + if (!this.#data.has(key)) { + throw new Error(STRING_RECORD_NOT_FOUND); + } + const og = this.#data.get(key); + if (!this.#inBatch) { + this.#deleteIndex(key, og); + } + this.#data.delete(key); + if (this.#versioning && !this.#inBatch) { + this.#versions.delete(key); + } + this.#invalidateCache(); } /** - * Removes all records, indexes, and versions from the store - * @returns {Haro} This instance for method chaining - * @example - * store.clear(); - * console.log(store.size); // 0 + * Generates a cache key using SHA-256 hash. + * @param {string} domain - Cache key prefix (e.g., 'search', 'where') + * @param {...*} args - Arguments to hash + * @returns {string} Cache key in format 'domain_HASH' */ - clear () { - this.beforeClear(); - this.data.clear(); - this.indexes.clear(); - this.versions.clear(); - this.reindex().onclear(); + async #getCacheKey(domain, ...args) { + const data = JSON.stringify(args); + const encoder = new TextEncoder(); + const hashBuffer = await crypto.subtle.digest(STRING_HASH_ALGORITHM, encoder.encode(data)); + const hashArray = Array.from(new Uint8Array(hashBuffer)); + const hashHex = hashArray.map((b) => b.toString(16).padStart(INT_2, STRING_HEX_PAD)).join(""); + return `${domain}${STRING_UNDERSCORE}${hashHex}`; + } + /** + * Clears the cache. + * @returns {Haro} This instance + */ + clearCache() { + if (this.#cacheEnabled) { + this.#cache.clear(); + } return this; } /** - * Creates a deep clone of the given value, handling objects, arrays, and primitives - * @param {*} arg - Value to clone (any type) - * @returns {*} Deep clone of the argument - * @example - * const original = {name: 'John', tags: ['user', 'admin']}; - * const cloned = store.clone(original); - * cloned.tags.push('new'); // original.tags is unchanged + * Returns the current cache size. + * @returns {number} Number of entries in cache + */ + getCacheSize() { + return this.#cacheEnabled ? this.#cache.size : 0; + } + + /** + * Returns cache statistics. + * @returns {Object|null} Stats object with hits, misses, sets, deletes, evictions */ - clone (arg) { - return structuredClone(arg); + getCacheStats() { + return this.#cacheEnabled ? this.#cache.stats() : null; } /** - * Deletes a record from the store and removes it from all indexes - * @param {string} [key=STRING_EMPTY] - Key of record to delete - * @param {boolean} [batch=false] - Whether this is part of a batch operation + * Invalidates the cache if enabled and not in batch mode. * @returns {void} - * @throws {Error} Throws error if record with the specified key is not found - * @example - * store.delete('user123'); - * // Throws error if 'user123' doesn't exist */ - delete (key = STRING_EMPTY, batch = false) { - if (!this.data.has(key)) { - throw new Error(STRING_RECORD_NOT_FOUND); + #invalidateCache() { + if (this.#cacheEnabled && !this.#inBatch) { + this.#cache.clear(); } - const og = this.get(key, true); - this.beforeDelete(key, batch); - this.deleteIndex(key, og); - this.data.delete(key); - this.ondelete(key, batch); - if (this.versioning) { - this.versions.delete(key); + } + + /** + * Retrieves a value from a nested object using dot notation. + * @param {Object} obj - Object to traverse + * @param {string} path - Dot-notation path (e.g., 'user.address.city') + * @returns {*} Value at path, or undefined if path doesn't exist + */ + #getNestedValue(obj, path) { + /* node:coverage ignore next 3 */ + if (obj === null || obj === undefined || path === STRING_EMPTY) { + return undefined; } + const keys = path.split(STRING_DOT); + let result = obj; + const keysLen = keys.length; + for (let i = 0; i < keysLen; i++) { + const key = keys[i]; + if (result === null || result === undefined || !(key in result)) { + return undefined; + } + result = result[key]; + } + return result; } /** - * Internal method to remove entries from indexes for a deleted record - * @param {string} key - Key of record being deleted - * @param {Object} data - Data of record being deleted - * @returns {Haro} This instance for method chaining + * Removes a record from all indexes. + * @param {string} key - Record key + * @param {Object} data - Record data + * @returns {Haro} This instance */ - deleteIndex (key, data) { - this.index.forEach(i => { - const idx = this.indexes.get(i); + #deleteIndex(key, data) { + this.#index.forEach((i) => { + const idx = this.#indexes.get(i); if (!idx) return; - const values = i.includes(this.delimiter) ? - this.indexKeys(i, this.delimiter, data) : - Array.isArray(data[i]) ? data[i] : [data[i]]; - this.each(values, value => { + const values = i.includes(this.#delimiter) + ? this.#getIndexKeys(i, this.#delimiter, data) + : Array.isArray(this.#getNestedValue(data, i)) + ? this.#getNestedValue(data, i) + : [this.#getNestedValue(data, i)]; + const len = values.length; + for (let j = 0; j < len; j++) { + const value = values[j]; if (idx.has(value)) { const o = idx.get(value); o.delete(key); @@ -215,27 +358,26 @@ export class Haro { idx.delete(value); } } - }); + } }); return this; } /** - * Exports complete store data or indexes for persistence or debugging - * @param {string} [type=STRING_RECORDS] - Type of data to export: 'records' or 'indexes' - * @returns {Array} Array of [key, value] pairs for records, or serialized index structure + * Exports store data or indexes. + * @param {string} [type=STRING_RECORDS] - Export type: 'records' or 'indexes' + * @returns {Array} Exported data * @example * const records = store.dump('records'); - * const indexes = store.dump('indexes'); */ - dump (type = STRING_RECORDS) { + dump(type = STRING_RECORDS) { let result; if (type === STRING_RECORDS) { result = Array.from(this.entries()); } else { - result = Array.from(this.indexes).map(i => { - i[1] = Array.from(i[1]).map(ii => { + result = Array.from(this.#indexes).map((i) => { + i[1] = Array.from(i[1]).map((ii) => { ii[1] = Array.from(ii[1]); return ii; @@ -249,110 +391,159 @@ export class Haro { } /** - * Utility method to iterate over an array with a callback function - * @param {Array<*>} [arr=[]] - Array to iterate over - * @param {Function} fn - Function to call for each element (element, index) - * @returns {Array<*>} The original array for method chaining - * @example - * store.each([1, 2, 3], (item, index) => console.log(item, index)); + * Generates index keys for composite indexes from data object. + * @param {string} arg - Composite index field names + * @param {string} delimiter - Field delimiter + * @param {Object} data - Data object + * @returns {string[]} Index keys */ - each (arr = [], fn) { - const len = arr.length; - for (let i = 0; i < len; i++) { - fn(arr[i], i); + #getIndexKeys(arg, delimiter, data) { + const fields = arg.split(this.#delimiter).sort(this.#sortKeys); + const result = [STRING_EMPTY]; + const fieldsLen = fields.length; + for (let i = 0; i < fieldsLen; i++) { + const field = fields[i]; + const fieldValue = this.#getNestedValue(data, field); + const values = Array.isArray(fieldValue) ? fieldValue : [fieldValue]; + const newResult = []; + const resultLen = result.length; + const valuesLen = values.length; + for (let j = 0; j < resultLen; j++) { + const existing = result[j]; + for (let k = 0; k < valuesLen; k++) { + const value = values[k]; + const newKey = i === 0 ? value : `${existing}${this.#delimiter}${value}`; + newResult.push(newKey); + } + } + result.length = 0; + result.push(...newResult); } + return result; + } - return arr; + /** + * Generates index keys for where object (handles both dot notation and direct access). + * @param {string} arg - Composite index field names + * @param {string} delimiter - Field delimiter + * @param {Object} where - Where object + * @returns {string[]} Index keys + */ + #getIndexKeysForWhere(arg, delimiter, where) { + const fields = arg.split(this.#delimiter).sort(this.#sortKeys); + const result = [STRING_EMPTY]; + const fieldsLen = fields.length; + for (let i = 0; i < fieldsLen; i++) { + const field = fields[i]; + // Check if field exists directly in where object first (for dot notation keys) + let fieldValue; + if (field in where) { + fieldValue = where[field]; + /* node:coverage ignore next 4 */ + } else { + fieldValue = this.#getNestedValue(where, field); + } + const values = Array.isArray(fieldValue) ? fieldValue : [fieldValue]; + const newResult = []; + const resultLen = result.length; + const valuesLen = values.length; + for (let j = 0; j < resultLen; j++) { + const existing = result[j]; + for (let k = 0; k < valuesLen; k++) { + const value = values[k]; + const newKey = i === 0 ? value : `${existing}${this.#delimiter}${value}`; + newResult.push(newKey); + } + } + result.length = 0; + result.push(...newResult); + } + return result; } /** - * Returns an iterator of [key, value] pairs for each record in the store - * @returns {Iterator>} Iterator of [key, value] pairs + * Returns an iterator of [key, value] pairs. + * @returns {Iterator>} Key-value pairs * @example - * for (const [key, value] of store.entries()) { - * console.log(key, value); - * } + * for (const [key, value] of store.entries()) { } */ - entries () { - return this.data.entries(); + entries() { + return this.#data.entries(); } /** - * Finds records matching the specified criteria using indexes for optimal performance - * @param {Object} [where={}] - Object with field-value pairs to match against - * @param {boolean} [raw=false] - Whether to return raw data without processing - * @returns {Array} Array of matching records (frozen if immutable mode) + * Finds records matching criteria using indexes. + * @param {Object} [where={}] - Field-value pairs to match + * @returns {Array} Matching records * @example - * const users = store.find({department: 'engineering', active: true}); - * const admins = store.find({role: 'admin'}); + * store.find({department: 'engineering', active: true}); */ - find (where = {}, raw = false) { - const key = Object.keys(where).sort(this.sortKeys).join(this.delimiter); - const index = this.indexes.get(key) ?? new Map(); - let result = []; - if (index.size > 0) { - const keys = this.indexKeys(key, this.delimiter, where); - result = Array.from(keys.reduce((a, v) => { + find(where = {}) { + if (typeof where !== STRING_OBJECT || where === null) { + throw new Error(STRING_ERROR_FIND_WHERE_TYPE); + } + const whereKeys = Object.keys(where).sort(this.#sortKeys); + const compositeKey = whereKeys.join(this.#delimiter); + const result = new Set(); + + const index = this.#indexes.get(compositeKey); + if (index) { + const keys = this.#getIndexKeysForWhere(compositeKey, this.#delimiter, where); + const keysLen = keys.length; + for (let i = 0; i < keysLen; i++) { + const v = keys[i]; if (index.has(v)) { - index.get(v).forEach(k => a.add(k)); + const keySet = index.get(v); + for (const k of keySet) { + result.add(k); + } } - - return a; - }, new Set())).map(i => this.get(i, raw)); - } - if (!raw && this.immutable) { - result = Object.freeze(result); + } } - return result; + const records = Array.from(result, (i) => this.get(i)); + if (this.#immutable) { + return Object.freeze(records); + } + return records; } /** - * Filters records using a predicate function, similar to Array.filter - * @param {Function} fn - Predicate function to test each record (record, key, store) - * @param {boolean} [raw=false] - Whether to return raw data without processing - * @returns {Array} Array of records that pass the predicate test - * @throws {Error} Throws error if fn is not a function + * Filters records using a predicate function. + * @param {Function} fn - Predicate function (record, key, store) + * @returns {Array} Filtered records + * @throws {Error} If fn is not a function * @example - * const adults = store.filter(record => record.age >= 18); - * const recent = store.filter(record => record.created > Date.now() - 86400000); + * store.filter(record => record.age >= 18); */ - filter (fn, raw = false) { + filter(fn) { if (typeof fn !== STRING_FUNCTION) { throw new Error(STRING_INVALID_FUNCTION); } - let result = this.reduce((a, v) => { - if (fn(v)) { - a.push(v); - } - - return a; - }, []); - if (!raw) { - result = result.map(i => this.list(i)); - - if (this.immutable) { - result = Object.freeze(result); + const result = []; + this.#data.forEach((value, key) => { + if (fn(value, key, this)) { + result.push(value); } + }); + if (this.#immutable) { + return Object.freeze(result); } - return result; } /** - * Executes a function for each record in the store, similar to Array.forEach - * @param {Function} fn - Function to execute for each record (value, key) - * @param {*} [ctx] - Context object to use as 'this' when executing the function - * @returns {Haro} This instance for method chaining + * Executes a function for each record. + * @param {Function} fn - Function (value, key) + * @param {*} [ctx] - Context for fn + * @returns {Haro} This instance * @example - * store.forEach((record, key) => { - * console.log(`${key}: ${record.name}`); - * }); - */ - forEach (fn, ctx = this) { - this.data.forEach((value, key) => { - if (this.immutable) { - value = this.clone(value); + * store.forEach((record, key) => console.log(key, record)); + */ + forEach(fn, ctx = this) { + this.#data.forEach((value, key) => { + if (this.#immutable) { + value = this.#clone(value); } fn.call(ctx, value, key); }, this); @@ -361,109 +552,61 @@ export class Haro { } /** - * Creates a frozen array from the given arguments for immutable data handling - * @param {...*} args - Arguments to freeze into an array - * @returns {Array<*>} Frozen array containing frozen arguments + * Retrieves a record by key. + * @param {string} key - Record key + * @returns {Object|null} Record or null * @example - * const frozen = store.freeze(obj1, obj2, obj3); - * // Returns Object.freeze([Object.freeze(obj1), Object.freeze(obj2), Object.freeze(obj3)]) + * store.get('user123'); */ - freeze (...args) { - return Object.freeze(args.map(i => Object.freeze(i))); - } - - /** - * Retrieves a record by its key - * @param {string} key - Key of record to retrieve - * @param {boolean} [raw=false] - Whether to return raw data (true) or processed/frozen data (false) - * @returns {Object|null} The record if found, null if not found - * @example - * const user = store.get('user123'); - * const rawUser = store.get('user123', true); - */ - get (key, raw = false) { - let result = this.data.get(key) ?? null; - if (result !== null && !raw) { - result = this.list(result); - if (this.immutable) { - result = Object.freeze(result); - } + get(key) { + const result = this.#data.get(key); + if (result === undefined) { + return null; + } + if (this.#immutable) { + return Object.freeze(result); } - return result; } /** - * Checks if a record with the specified key exists in the store - * @param {string} key - Key to check for existence - * @returns {boolean} True if record exists, false otherwise + * Checks if a record exists. + * @param {string} key - Record key + * @returns {boolean} True if exists * @example - * if (store.has('user123')) { - * console.log('User exists'); - * } + * store.has('user123'); */ - has (key) { - return this.data.has(key); + has(key) { + return this.#data.has(key); } /** - * Generates index keys for composite indexes from data values - * @param {string} [arg=STRING_EMPTY] - Composite index field names joined by delimiter - * @param {string} [delimiter=STRING_PIPE] - Delimiter used in composite index - * @param {Object} [data={}] - Data object to extract field values from - * @returns {string[]} Array of generated index keys + * Returns an iterator of all keys. + * @returns {Iterator} Keys * @example - * // For index 'name|department' with data {name: 'John', department: 'IT'} - * const keys = store.indexKeys('name|department', '|', data); - * // Returns ['John|IT'] + * for (const key of store.keys()) { } */ - indexKeys (arg = STRING_EMPTY, delimiter = STRING_PIPE, data = {}) { - const fields = arg.split(delimiter).sort(this.sortKeys); - const fieldsLen = fields.length; - let result = [""]; - for (let i = 0; i < fieldsLen; i++) { - const field = fields[i]; - const values = Array.isArray(data[field]) ? data[field] : [data[field]]; - const newResult = []; - const resultLen = result.length; - const valuesLen = values.length; - for (let j = 0; j < resultLen; j++) { - for (let k = 0; k < valuesLen; k++) { - const newKey = i === 0 ? values[k] : `${result[j]}${delimiter}${values[k]}`; - newResult.push(newKey); - } - } - result = newResult; - } - - return result; - } - - /** - * Returns an iterator of all keys in the store - * @returns {Iterator} Iterator of record keys - * @example - * for (const key of store.keys()) { - * console.log(key); - * } - */ - keys () { - return this.data.keys(); + keys() { + return this.#data.keys(); } /** - * Returns a limited subset of records with offset support for pagination - * @param {number} [offset=INT_0] - Number of records to skip from the beginning - * @param {number} [max=INT_0] - Maximum number of records to return - * @param {boolean} [raw=false] - Whether to return raw data without processing - * @returns {Array} Array of records within the specified range + * Returns a limited subset of records. + * @param {number} [offset=INT_0] - Records to skip + * @param {number} [max=INT_0] - Max records to return + * @returns {Array} Records * @example - * const page1 = store.limit(0, 10); // First 10 records - * const page2 = store.limit(10, 10); // Next 10 records + * store.limit(0, 10); */ - limit (offset = INT_0, max = INT_0, raw = false) { - let result = this.registry.slice(offset, offset + max).map(i => this.get(i, raw)); - if (!raw && this.immutable) { + limit(offset = INT_0, max = INT_0) { + if (typeof offset !== STRING_NUMBER) { + throw new Error(STRING_ERROR_LIMIT_OFFSET_TYPE); + } + if (typeof max !== STRING_NUMBER) { + throw new Error(STRING_ERROR_LIMIT_MAX_TYPE); + } + let result = this.registry.slice(offset, offset + max).map((i) => this.get(i)); + if (this.#immutable) { result = Object.freeze(result); } @@ -471,62 +614,51 @@ export class Haro { } /** - * Converts a record into a [key, value] pair array format - * @param {Object} arg - Record object to convert to list format - * @returns {Array<*>} Array containing [key, record] where key is extracted from record's key field - * @example - * const record = {id: 'user123', name: 'John', age: 30}; - * const pair = store.list(record); // ['user123', {id: 'user123', name: 'John', age: 30}] - */ - list (arg) { - const result = [arg[this.key], arg]; - - return this.immutable ? this.freeze(...result) : result; - } - - /** - * Transforms all records using a mapping function, similar to Array.map - * @param {Function} fn - Function to transform each record (record, key) - * @param {boolean} [raw=false] - Whether to return raw data without processing - * @returns {Array<*>} Array of transformed results - * @throws {Error} Throws error if fn is not a function + * Transforms records using a mapping function. + * @param {Function} fn - Transform function (record, key) + * @returns {Array<*>} Transformed results + * @throws {Error} If fn is not a function * @example - * const names = store.map(record => record.name); - * const summaries = store.map(record => ({id: record.id, name: record.name})); + * store.map(record => record.name); */ - map (fn, raw = false) { + map(fn) { if (typeof fn !== STRING_FUNCTION) { throw new Error(STRING_INVALID_FUNCTION); } let result = []; this.forEach((value, key) => result.push(fn(value, key))); - if (!raw) { - result = result.map(i => this.list(i)); - if (this.immutable) { - result = Object.freeze(result); - } + if (this.#immutable) { + result = Object.freeze(result); } return result; } /** - * Merges two values together with support for arrays and objects - * @param {*} a - First value (target) - * @param {*} b - Second value (source) - * @param {boolean} [override=false] - Whether to override arrays instead of concatenating + * Merges two values. + * @param {*} a - Target value + * @param {*} b - Source value + * @param {boolean} [override=false] - Override arrays * @returns {*} Merged result - * @example - * const merged = store.merge({a: 1}, {b: 2}); // {a: 1, b: 2} - * const arrays = store.merge([1, 2], [3, 4]); // [1, 2, 3, 4] */ - merge (a, b, override = false) { + #merge(a, b, override = false) { if (Array.isArray(a) && Array.isArray(b)) { a = override ? b : a.concat(b); - } else if (typeof a === STRING_OBJECT && a !== null && typeof b === STRING_OBJECT && b !== null) { - this.each(Object.keys(b), i => { - a[i] = this.merge(a[i], b[i], override); - }); + } else if ( + typeof a === STRING_OBJECT && + a !== null && + typeof b === STRING_OBJECT && + b !== null + ) { + const keys = Object.keys(b); + const keysLen = keys.length; + for (let i = 0; i < keysLen; i++) { + const key = keys[i]; + if (key === STRING_PROTO || key === STRING_CONSTRUCTOR || key === STRING_PROTOTYPE) { + continue; + } + a[key] = this.#merge(a[key], b[key], override); + } } else { a = b; } @@ -535,271 +667,235 @@ export class Haro { } /** - * Lifecycle hook executed after batch operations for custom postprocessing - * @param {Array} arg - Result of batch operation - * @param {string} [type=STRING_EMPTY] - Type of batch operation that was performed - * @returns {Array} Modified result (override this method to implement custom logic) - */ - onbatch (arg, type = STRING_EMPTY) { // eslint-disable-line no-unused-vars - return arg; - } - - /** - * Lifecycle hook executed after clear operation for custom postprocessing - * @returns {void} Override this method in subclasses to implement custom logic + * Replaces store data or indexes. + * @param {Array} data - Data to replace + * @param {string} [type=STRING_RECORDS] - Type: 'records' or 'indexes' + * @returns {boolean} Success + * @throws {Error} If type is invalid * @example - * class MyStore extends Haro { - * onclear() { - * console.log('Store cleared'); - * } - * } - */ - onclear () { - // Hook for custom logic after clear; override in subclass if needed - } - - /** - * Lifecycle hook executed after delete operation for custom postprocessing - * @param {string} [key=STRING_EMPTY] - Key of deleted record - * @param {boolean} [batch=false] - Whether this was part of a batch operation - * @returns {void} Override this method in subclasses to implement custom logic + * store.override([['key1', {name: 'John'}]], 'records'); */ - ondelete (key = STRING_EMPTY, batch = false) { // eslint-disable-line no-unused-vars - // Hook for custom logic after delete; override in subclass if needed - } - - /** - * Lifecycle hook executed after override operation for custom postprocessing - * @param {string} [type=STRING_EMPTY] - Type of override operation that was performed - * @returns {void} Override this method in subclasses to implement custom logic - */ - onoverride (type = STRING_EMPTY) { // eslint-disable-line no-unused-vars - // Hook for custom logic after override; override in subclass if needed - } - - /** - * Lifecycle hook executed after set operation for custom postprocessing - * @param {Object} [arg={}] - Record that was set - * @param {boolean} [batch=false] - Whether this was part of a batch operation - * @returns {void} Override this method in subclasses to implement custom logic - */ - onset (arg = {}, batch = false) { // eslint-disable-line no-unused-vars - // Hook for custom logic after set; override in subclass if needed - } - - /** - * Replaces all store data or indexes with new data for bulk operations - * @param {Array} data - Data to replace with (format depends on type) - * @param {string} [type=STRING_RECORDS] - Type of data: 'records' or 'indexes' - * @returns {boolean} True if operation succeeded - * @throws {Error} Throws error if type is invalid - * @example - * const records = [['key1', {name: 'John'}], ['key2', {name: 'Jane'}]]; - * store.override(records, 'records'); - */ - override (data, type = STRING_RECORDS) { + override(data, type = STRING_RECORDS) { const result = true; if (type === STRING_INDEXES) { - this.indexes = new Map(data.map(i => [i[0], new Map(i[1].map(ii => [ii[0], new Set(ii[1])]))])); + this.#indexes = new Map( + data.map((i) => [i[0], new Map(i[1].map((ii) => [ii[0], new Set(ii[1])]))]), + ); } else if (type === STRING_RECORDS) { - this.indexes.clear(); - this.data = new Map(data); + this.#indexes.clear(); + this.#data = new Map(data); } else { throw new Error(STRING_INVALID_TYPE); } - this.onoverride(type); + this.#invalidateCache(); return result; } /** - * Reduces all records to a single value using a reducer function - * @param {Function} fn - Reducer function (accumulator, value, key, store) - * @param {*} [accumulator] - Initial accumulator value - * @returns {*} Final reduced value + * Rebuilds indexes. + * @param {string|string[]} [index] - Field(s) to rebuild, or all + * @returns {Haro} This instance * @example - * const totalAge = store.reduce((sum, record) => sum + record.age, 0); - * const names = store.reduce((acc, record) => acc.concat(record.name), []); + * store.reindex(); + * store.reindex('name'); */ - reduce (fn, accumulator = []) { - let a = accumulator; - this.forEach((v, k) => { - a = fn(a, v, k, this); - }, this); + reindex(index) { + const indices = index ? (Array.isArray(index) ? index : [index]) : this.#index; + if (index && this.#index.includes(index) === false) { + this.#index.push(index); + } + const indicesLen = indices.length; + for (let i = 0; i < indicesLen; i++) { + this.#indexes.set(indices[i], new Map()); + } + this.forEach((data, key) => { + for (let i = 0; i < indicesLen; i++) { + this.#setIndex(key, data, indices[i]); + } + }); + this.#invalidateCache(); - return a; + return this; } /** - * Rebuilds indexes for specified fields or all fields for data consistency - * @param {string|string[]} [index] - Specific index field(s) to rebuild, or all if not specified - * @returns {Haro} This instance for method chaining + * Searches for records containing a value. + * @param {*} value - Search value (string, function, or RegExp) + * @param {string|string[]} [index] - Index(es) to search, or all + * @returns {Promise>} Matching records * @example - * store.reindex(); // Rebuild all indexes - * store.reindex('name'); // Rebuild only name index - * store.reindex(['name', 'email']); // Rebuild name and email indexes + * store.search('john'); + * store.search(/^admin/, 'role'); */ - reindex (index) { - const indices = index ? [index] : this.index; - if (index && this.index.includes(index) === false) { - this.index.push(index); + async search(value, index) { + if (value === null || value === undefined) { + throw new Error(STRING_ERROR_SEARCH_VALUE); } - this.each(indices, i => this.indexes.set(i, new Map())); - this.forEach((data, key) => this.each(indices, i => this.setIndex(key, data, i))); - return this; - } + let cacheKey; + if (this.#cacheEnabled) { + cacheKey = await this.#getCacheKey(STRING_CACHE_DOMAIN_SEARCH, value, index); + const cached = this.#cache.get(cacheKey); + if (cached !== undefined) { + return this.#immutable ? Object.freeze(cached) : this.#clone(cached); + } + } - /** - * Searches for records containing a value across specified indexes - * @param {*} value - Value to search for (string, function, or RegExp) - * @param {string|string[]} [index] - Index(es) to search in, or all if not specified - * @param {boolean} [raw=false] - Whether to return raw data without processing - * @returns {Array} Array of matching records - * @example - * const results = store.search('john'); // Search all indexes - * const nameResults = store.search('john', 'name'); // Search only name index - * const regexResults = store.search(/^admin/, 'role'); // Regex search - */ - search (value, index, raw = false) { - const result = new Set(); // Use Set for unique keys + const result = new Set(); const fn = typeof value === STRING_FUNCTION; const rgex = value && typeof value.test === STRING_FUNCTION; - if (!value) return this.immutable ? this.freeze() : []; - const indices = index ? Array.isArray(index) ? index : [index] : this.index; - for (const i of indices) { - const idx = this.indexes.get(i); - if (idx) { - for (const [lkey, lset] of idx) { - let match = false; - - if (fn) { - match = value(lkey, i); - } else if (rgex) { - match = value.test(Array.isArray(lkey) ? lkey.join(STRING_COMMA) : lkey); - } else { - match = lkey === value; - } + const indices = index ? (Array.isArray(index) ? index : [index]) : this.#index; + const indicesLen = indices.length; - if (match) { - for (const key of lset) { - if (this.data.has(key)) { - result.add(key); - } + for (let i = 0; i < indicesLen; i++) { + const idxName = indices[i]; + const idx = this.#indexes.get(idxName); + if (!idx) continue; + + for (const [lkey, lset] of idx) { + let match = false; + + if (fn) { + match = value(lkey, idxName); + } else if (rgex) { + match = value.test(Array.isArray(lkey) ? lkey.join(STRING_COMMA) : lkey); + } else { + match = lkey === value; + } + + if (match) { + for (const key of lset) { + if (this.#data.has(key)) { + result.add(key); } } } } } - let records = Array.from(result).map(key => this.get(key, raw)); - if (!raw && this.immutable) { - records = Object.freeze(records); + const records = Array.from(result, (key) => this.get(key)); + + if (this.#cacheEnabled) { + this.#cache.set(cacheKey, records); } + if (this.#immutable) { + return Object.freeze(records); + } return records; } /** - * Sets or updates a record in the store with automatic indexing - * @param {string|null} [key=null] - Key for the record, or null to use record's key field - * @param {Object} [data={}] - Record data to set - * @param {boolean} [batch=false] - Whether this is part of a batch operation - * @param {boolean} [override=false] - Whether to override existing data instead of merging - * @returns {Object} The stored record (frozen if immutable mode) + * Sets or updates a record with automatic indexing. + * @param {string|null} [key=null] - Record key, or null for auto-generate + * @param {Object} [data={}] - Record data + * @param {boolean} [override=false] - Override instead of merge + * @returns {Object} Stored record * @example - * const user = store.set(null, {name: 'John', age: 30}); // Auto-generate key - * const updated = store.set('user123', {age: 31}); // Update existing record + * store.set(null, {name: 'John'}); + * store.set('user123', {age: 31}); */ - set (key = null, data = {}, batch = false, override = false) { + set(key = null, data = {}, override = false) { + if (key !== null && typeof key !== STRING_STRING && typeof key !== STRING_NUMBER) { + throw new Error(STRING_ERROR_SET_KEY_TYPE); + } + if (typeof data !== STRING_OBJECT || data === null) { + throw new Error(STRING_ERROR_SET_DATA_TYPE); + } if (key === null) { - key = data[this.key] ?? this.uuid(); + key = data[this.#key] ?? uuid(); } - let x = {...data, [this.key]: key}; - this.beforeSet(key, x, batch, override); - if (!this.data.has(key)) { - if (this.versioning) { - this.versions.set(key, new Set()); + let x = { ...data, [this.#key]: key }; + if (!this.#data.has(key)) { + if (this.#versioning && !this.#inBatch) { + this.#versions.set(key, new Set()); } } else { - const og = this.get(key, true); - this.deleteIndex(key, og); - if (this.versioning) { - this.versions.get(key).add(Object.freeze(this.clone(og))); + const og = this.#data.get(key); + if (!this.#inBatch) { + this.#deleteIndex(key, og); + if (this.#versioning) { + this.#versions.get(key).add(Object.freeze(this.#clone(og))); + } } - if (!override) { - x = this.merge(this.clone(og), x); + if (!this.#inBatch && !override) { + x = this.#merge(this.#clone(og), x); } } - this.data.set(key, x); - this.setIndex(key, x, null); + this.#data.set(key, x); + + if (!this.#inBatch) { + this.#setIndex(key, x, null); + } + const result = this.get(key); - this.onset(result, batch); + this.#invalidateCache(); return result; } /** - * Internal method to add entries to indexes for a record - * @param {string} key - Key of record being indexed - * @param {Object} data - Data of record being indexed - * @param {string|null} indice - Specific index to update, or null for all - * @returns {Haro} This instance for method chaining + * Adds a record to indexes. + * @param {string} key - Record key + * @param {Object} data - Record data + * @param {string|null} indice - Index to update, or null for all + * @returns {Haro} This instance */ - setIndex (key, data, indice) { - this.each(indice === null ? this.index : [indice], i => { - let idx = this.indexes.get(i); + #setIndex(key, data, indice) { + const indices = indice === null ? this.#index : [indice]; + const indicesLen = indices.length; + for (let i = 0; i < indicesLen; i++) { + const field = indices[i]; + let idx = this.#indexes.get(field); if (!idx) { idx = new Map(); - this.indexes.set(i, idx); + this.#indexes.set(field, idx); } - const fn = c => { - if (!idx.has(c)) { - idx.set(c, new Set()); + const values = field.includes(this.#delimiter) + ? this.#getIndexKeys(field, this.#delimiter, data) + : Array.isArray(this.#getNestedValue(data, field)) + ? this.#getNestedValue(data, field) + : [this.#getNestedValue(data, field)]; + const valuesLen = values.length; + for (let j = 0; j < valuesLen; j++) { + const value = values[j]; + if (!idx.has(value)) { + idx.set(value, new Set()); } - idx.get(c).add(key); - }; - if (i.includes(this.delimiter)) { - this.each(this.indexKeys(i, this.delimiter, data), fn); - } else { - this.each(Array.isArray(data[i]) ? data[i] : [data[i]], fn); + idx.get(value).add(key); } - }); - + } return this; } /** - * Sorts all records using a comparator function - * @param {Function} fn - Comparator function for sorting (a, b) => number - * @param {boolean} [frozen=false] - Whether to return frozen records - * @returns {Array} Sorted array of records + * Sorts records using a comparator function. + * @param {Function} fn - Comparator (a, b) => number + * @param {boolean} [frozen=false] - Return frozen records + * @returns {Array} Sorted records * @example - * const sorted = store.sort((a, b) => a.age - b.age); // Sort by age - * const names = store.sort((a, b) => a.name.localeCompare(b.name)); // Sort by name + * store.sort((a, b) => a.age - b.age); */ - sort (fn, frozen = false) { - const dataSize = this.data.size; - let result = this.limit(INT_0, dataSize, true).sort(fn); + sort(fn, frozen = false) { + if (typeof fn !== STRING_FUNCTION) { + throw new Error(STRING_ERROR_SORT_FN_TYPE); + } + const dataSize = this.#data.size; + let result = this.limit(INT_0, dataSize).sort(fn); if (frozen) { - result = this.freeze(...result); + result = Object.freeze(result); } return result; } /** - * Comparator function for sorting keys with type-aware comparison logic - * @param {*} a - First value to compare - * @param {*} b - Second value to compare - * @returns {number} Negative number if a < b, positive if a > b, zero if equal - * @example - * const keys = ['name', 'age', 'email']; - * keys.sort(store.sortKeys); // Alphabetical sort - * - * const mixed = [10, '5', 'abc', 3]; - * mixed.sort(store.sortKeys); // Type-aware sort: numbers first, then strings + * Sorts keys with type-aware comparison. + * @param {*} a - First value + * @param {*} b - Second value + * @returns {number} Comparison result */ - sortKeys (a, b) { + #sortKeys(a, b) { // Handle string comparison if (typeof a === STRING_STRING && typeof b === STRING_STRING) { return a.localeCompare(b); @@ -810,50 +906,54 @@ export class Haro { } // Handle mixed types or other types by converting to string - return String(a).localeCompare(String(b)); } /** - * Sorts records by a specific indexed field in ascending order - * @param {string} [index=STRING_EMPTY] - Index field name to sort by - * @param {boolean} [raw=false] - Whether to return raw data without processing - * @returns {Array} Array of records sorted by the specified field - * @throws {Error} Throws error if index field is empty or invalid + * Sorts records by an indexed field. + * @param {string} [index=STRING_EMPTY] - Field to sort by + * @returns {Array} Sorted records + * @throws {Error} If index is empty * @example - * const byAge = store.sortBy('age'); - * const byName = store.sortBy('name'); + * store.sortBy('age'); */ - sortBy (index = STRING_EMPTY, raw = false) { + sortBy(index = STRING_EMPTY) { if (index === STRING_EMPTY) { throw new Error(STRING_INVALID_FIELD); } - let result = []; const keys = []; - if (this.indexes.has(index) === false) { + if (this.#indexes.has(index) === false) { this.reindex(index); } - const lindex = this.indexes.get(index); + const lindex = this.#indexes.get(index); lindex.forEach((idx, key) => keys.push(key)); - this.each(keys.sort(this.sortKeys), i => lindex.get(i).forEach(key => result.push(this.get(key, raw)))); - if (this.immutable) { - result = Object.freeze(result); - } + keys.sort(this.#sortKeys); + const result = keys.flatMap((i) => { + const inner = Array.from(lindex.get(i)); + const innerLen = inner.length; + const mapped = Array.from({ length: innerLen }, (_, j) => this.get(inner[j])); + return mapped; + }); + if (this.#immutable) { + return Object.freeze(result); + } return result; } /** - * Converts all store data to a plain array of records - * @returns {Array} Array containing all records in the store + * Converts store data to an array. + * @returns {Array} All records * @example - * const allRecords = store.toArray(); - * console.log(`Store contains ${allRecords.length} records`); + * store.toArray(); */ - toArray () { - const result = Array.from(this.data.values()); - if (this.immutable) { - this.each(result, i => Object.freeze(i)); + toArray() { + const result = Array.from(this.#data.values()); + if (this.#immutable) { + const resultLen = result.length; + for (let i = 0; i < resultLen; i++) { + Object.freeze(result[i]); + } Object.freeze(result); } @@ -861,88 +961,100 @@ export class Haro { } /** - * Generates a RFC4122 v4 UUID for record identification - * @returns {string} UUID string in standard format + * Returns an iterator of all values. + * @returns {Iterator} Values * @example - * const id = store.uuid(); // "f47ac10b-58cc-4372-a567-0e02b2c3d479" + * for (const record of store.values()) { } */ - uuid () { - return uuid(); + values() { + return this.#data.values(); } /** - * Returns an iterator of all values in the store - * @returns {Iterator} Iterator of record values - * @example - * for (const record of store.values()) { - * console.log(record.name); - * } + * Matches a record against a predicate. + * @param {Object} record - Record to test + * @param {Object} predicate - Predicate object + * @param {string} op - Operator: '||' or '&&' + * @returns {boolean} True if matches */ - values () { - return this.data.values(); - } - - /** - * Internal helper method for predicate matching with support for arrays and regex - * @param {Object} record - Record to test against predicate - * @param {Object} predicate - Predicate object with field-value pairs - * @param {string} op - Operator for array matching ('||' for OR, '&&' for AND) - * @returns {boolean} True if record matches predicate criteria - */ - matchesPredicate (record, predicate, op) { + #matchesPredicate(record, predicate, op) { const keys = Object.keys(predicate); - return keys.every(key => { + return keys.every((key) => { const pred = predicate[key]; - const val = record[key]; + // Use nested value extraction for dot notation paths + const val = this.#getNestedValue(record, key); if (Array.isArray(pred)) { if (Array.isArray(val)) { - return op === STRING_DOUBLE_AND ? pred.every(p => val.includes(p)) : pred.some(p => val.includes(p)); - } else { - return op === STRING_DOUBLE_AND ? pred.every(p => val === p) : pred.some(p => val === p); - } - } else if (pred instanceof RegExp) { - if (Array.isArray(val)) { - return op === STRING_DOUBLE_AND ? val.every(v => pred.test(v)) : val.some(v => pred.test(v)); - } else { - return pred.test(val); + return op === STRING_DOUBLE_AND + ? pred.every((p) => val.includes(p)) + : pred.some((p) => val.includes(p)); } - } else if (Array.isArray(val)) { - return val.includes(pred); - } else { - return val === pred; + return op === STRING_DOUBLE_AND + ? pred.every((p) => val === p) + : pred.some((p) => val === p); + } + if (Array.isArray(val)) { + return val.some((v) => { + if (pred instanceof RegExp) { + return pred.test(v); + } + if (v instanceof RegExp) { + return v.test(pred); + } + return v === pred; + }); } + if (pred instanceof RegExp) { + return pred.test(val); + } + return val === pred; }); } /** - * Advanced filtering with predicate logic supporting AND/OR operations on arrays - * @param {Object} [predicate={}] - Object with field-value pairs for filtering - * @param {string} [op=STRING_DOUBLE_PIPE] - Operator for array matching ('||' for OR, '&&' for AND) - * @returns {Array} Array of records matching the predicate criteria + * Filters records with predicate logic supporting AND/OR on arrays. + * @param {Object} [predicate={}] - Field-value pairs + * @param {string} [op=STRING_DOUBLE_PIPE] - Operator: '||' (OR) or '&&' (AND) + * @returns {Promise>} Matching records * @example - * // Find records with tags containing 'admin' OR 'user' - * const users = store.where({tags: ['admin', 'user']}, '||'); - * - * // Find records with ALL specified tags - * const powerUsers = store.where({tags: ['admin', 'power']}, '&&'); - * - * // Regex matching - * const emails = store.where({email: /^admin@/}); - */ - where (predicate = {}, op = STRING_DOUBLE_PIPE) { - const keys = this.index.filter(i => i in predicate); - if (keys.length === 0) return []; + * store.where({tags: ['admin', 'user']}, '||'); + * store.where({email: /^admin@/}); + */ + async where(predicate = {}, op = STRING_DOUBLE_PIPE) { + if (typeof predicate !== STRING_OBJECT || predicate === null) { + throw new Error(STRING_ERROR_WHERE_PREDICATE_TYPE); + } + if (typeof op !== STRING_STRING) { + throw new Error(STRING_ERROR_WHERE_OP_TYPE); + } + + let cacheKey; + if (this.#cacheEnabled) { + cacheKey = await this.#getCacheKey(STRING_CACHE_DOMAIN_WHERE, predicate, op); + const cached = this.#cache.get(cacheKey); + if (cached !== undefined) { + return this.#immutable ? Object.freeze(cached) : this.#clone(cached); + } + } + + const keys = this.#index.filter((i) => i in predicate); + if (keys.length === 0) { + if (this.#warnOnFullScan) { + console.warn("where(): performing full table scan - consider adding an index"); + } + return this.filter((a) => this.#matchesPredicate(a, predicate, op)); + } // Try to use indexes for better performance - const indexedKeys = keys.filter(k => this.indexes.has(k)); + const indexedKeys = keys.filter((k) => this.#indexes.has(k)); if (indexedKeys.length > 0) { // Use index-based filtering for better performance let candidateKeys = new Set(); let first = true; for (const key of indexedKeys) { const pred = predicate[key]; - const idx = this.indexes.get(key); + const idx = this.#indexes.get(key); const matchingKeys = new Set(); if (Array.isArray(pred)) { for (const p of pred) { @@ -952,9 +1064,29 @@ export class Haro { } } } - } else if (idx.has(pred)) { - for (const k of idx.get(pred)) { - matchingKeys.add(k); + } else if (pred instanceof RegExp) { + for (const [indexKey, keySet] of idx) { + if (pred.test(indexKey)) { + for (const k of keySet) { + matchingKeys.add(k); + } + } + } + } else { + // Direct value lookup - works for both flat and nested fields + // Also check for RegExp keys that match the predicate + for (const [indexKey, keySet] of idx) { + if (indexKey instanceof RegExp) { + if (indexKey.test(pred)) { + for (const k of keySet) { + matchingKeys.add(k); + } + } + } else if (indexKey === pred) { + for (const k of keySet) { + matchingKeys.add(k); + } + } } } if (first) { @@ -962,45 +1094,43 @@ export class Haro { first = false; } else { // AND operation across different fields - candidateKeys = new Set([...candidateKeys].filter(k => matchingKeys.has(k))); + candidateKeys = new Set([...candidateKeys].filter((k) => matchingKeys.has(k))); } } // Filter candidates with full predicate logic const results = []; for (const key of candidateKeys) { - const record = this.get(key, true); - if (this.matchesPredicate(record, predicate, op)) { - results.push(this.immutable ? this.get(key) : record); + const record = this.get(key); + if (this.#matchesPredicate(record, predicate, op)) { + results.push(record); } } - return this.immutable ? this.freeze(...results) : results; - } + if (this.#cacheEnabled) { + this.#cache.set(cacheKey, results); + } - // Fallback to full scan if no indexes available - return this.filter(a => this.matchesPredicate(a, predicate, op)); + if (this.#immutable) { + return Object.freeze(results); + } + return results; + } } } /** - * Factory function to create a new Haro instance with optional initial data - * @param {Array|null} [data=null] - Initial data to populate the store - * @param {Object} [config={}] - Configuration object passed to Haro constructor - * @returns {Haro} New Haro instance configured and optionally populated + * Factory function to create a Haro instance. + * @param {Array|null} [data=null] - Initial data + * @param {Object} [config={}] - Configuration + * @returns {Haro} New Haro instance * @example - * const store = haro([ - * {id: 1, name: 'John', age: 30}, - * {id: 2, name: 'Jane', age: 25} - * ], { - * index: ['name', 'age'], - * versioning: true - * }); + * const store = haro([{id: 1, name: 'John'}], {index: ['name']}); */ -export function haro (data = null, config = {}) { +export function haro(data = null, config = {}) { const obj = new Haro(config); if (Array.isArray(data)) { - obj.batch(data, STRING_SET); + obj.setMany(data); } return obj; diff --git a/tests/unit/batch.test.js b/tests/unit/batch.test.js index 21d1f6b9..7488ec2a 100644 --- a/tests/unit/batch.test.js +++ b/tests/unit/batch.test.js @@ -1,68 +1,230 @@ import assert from "node:assert"; -import {describe, it} from "mocha"; -import {Haro} from "../../src/haro.js"; +import { describe, it } from "node:test"; +import { Haro } from "../../src/haro.js"; describe("Batch Operations", () => { - describe("batch()", () => { - it("should batch set multiple records", () => { - // Create a store with beforeBatch that returns the arguments - const batchStore = new class extends Haro { - beforeBatch (args) { - return args; - } - onbatch (result) { - return result; - } - }(); + describe("isBatching getter", () => { + it("should return false when not batching", () => { + const store = new Haro(); + assert.strictEqual(store.isBatching, false); + }); + + it("should return true during setMany operation", () => { + const store = new Haro(); + assert.strictEqual(store.isBatching, false); + store.setMany([{ id: "1", name: "Test" }]); + assert.strictEqual(store.isBatching, false); + }); + it("should return true during deleteMany operation", () => { + const store = new Haro(); + store.set("1", { id: "1", name: "Test" }); + store.deleteMany(["1"]); + assert.strictEqual(store.isBatching, false); + }); + }); + + describe("setMany()", () => { + it("should set multiple records", () => { + const store = new Haro(); const data = [ - {id: "user1", name: "John", age: 30}, - {id: "user2", name: "Jane", age: 25} + { id: "user1", name: "John", age: 30 }, + { id: "user2", name: "Jane", age: 25 }, ]; - const results = batchStore.batch(data, "set"); + const results = store.setMany(data); assert.strictEqual(results.length, 2); - assert.strictEqual(batchStore.size, 2); - assert.strictEqual(batchStore.has("user1"), true); - assert.strictEqual(batchStore.has("user2"), true); + assert.strictEqual(store.size, 2); + assert.strictEqual(store.has("user1"), true); + assert.strictEqual(store.has("user2"), true); + }); + + it("should update existing records", () => { + const store = new Haro({ key: "id" }); + store.set("user1", { id: "user1", name: "John" }); + + const results = store.setMany([{ id: "user1", name: "John Updated" }]); + + assert.strictEqual(results.length, 1); + assert.strictEqual(store.get("user1").name, "John Updated"); }); - it("should batch delete multiple records", () => { - // Create a store with beforeBatch that returns the arguments - const batchStore = new class extends Haro { - beforeBatch (args) { - return args; - } - onbatch (result) { - return result; - } - }(); + it("should skip indexing during batch and reindex after", () => { + const store = new Haro({ index: ["name"] }); + const data = [ + { id: "user1", name: "John", age: 30 }, + { id: "user2", name: "Jane", age: 25 }, + ]; - batchStore.set("user1", {id: "user1", name: "John"}); - batchStore.set("user2", {id: "user2", name: "Jane"}); + store.setMany(data); - const results = batchStore.batch(["user1", "user2"], "del"); + assert.strictEqual(store.size, 2); + const johnResults = store.find({ name: "John" }); + assert.strictEqual(johnResults.length, 1); + assert.strictEqual(johnResults[0].age, 30); + }); + + it("should skip versioning during batch", () => { + const store = new Haro({ key: "id", versioning: true }); + store.set("user1", { id: "user1", name: "John" }); + + store.setMany([ + { id: "user1", name: "Jane" }, + { id: "user1", name: "Bob" }, + ]); + + const versions = store.versions.get("user1"); + assert.strictEqual(versions.size, 0); + assert.strictEqual(store.get("user1").name, "Bob"); + }); + + it("should have isBatching flag set during operation", () => { + const store = new Haro(); + store.set("user1", { id: "user1", name: "John" }); + + assert.strictEqual(store.isBatching, false); + store.setMany([{ id: "user1", name: "Jane" }]); + assert.strictEqual(store.isBatching, false); + }); + }); + + describe("deleteMany()", () => { + it("should delete multiple records", () => { + const store = new Haro(); + store.set("user1", { id: "user1", name: "John" }); + store.set("user2", { id: "user2", name: "Jane" }); + + const results = store.deleteMany(["user1", "user2"]); assert.strictEqual(results.length, 2); - assert.strictEqual(batchStore.size, 0); + assert.strictEqual(store.size, 0); }); - it("should default to set operation", () => { - // Create a store with beforeBatch that returns the arguments - const batchStore = new class extends Haro { - beforeBatch (args) { - return args; - } - onbatch (result) { - return result; - } - }(); + it("should throw error if key doesn't exist", () => { + const store = new Haro(); + assert.throws(() => { + store.deleteMany(["nonexistent"]); + }); + }); - const data = [{id: "user1", name: "John"}]; - const results = batchStore.batch(data); + it("should skip indexing during batch", () => { + const store = new Haro({ index: ["name"] }); + store.set("user1", { id: "user1", name: "John" }); + store.set("user2", { id: "user2", name: "Jane" }); - assert.strictEqual(results.length, 1); - assert.strictEqual(batchStore.size, 1); + store.deleteMany(["user1", "user2"]); + + assert.strictEqual(store.size, 0); + const results = store.find({ name: "John" }); + assert.strictEqual(results.length, 0); + }); + + it("should skip versioning during batch update", () => { + const store = new Haro({ key: "id", versioning: true }); + store.set("user1", { id: "user1", name: "John" }); + + store.setMany([ + { id: "user1", name: "Jane" }, + { id: "user1", name: "Bob" }, + ]); + + const versions = store.versions.get("user1"); + assert.strictEqual(versions.size, 0); + assert.strictEqual(store.get("user1").name, "Bob"); + }); + + it("should have isBatching flag set during operation", () => { + const store = new Haro({ key: "id", versioning: true }); + store.set("user1", { id: "user1", name: "John" }); + let batchingDuringOperation = false; + + store.deleteMany(["user1"]); + + assert.strictEqual(batchingDuringOperation, false); + }); + }); + + describe("Nested batch operations", () => { + it("should throw error when setMany is called during batch", () => { + const store = new Haro({ key: "id" }); + const nestedRecords = { + length: 1, + 0: { id: "nested", name: "Nested" }, + }; + Object.defineProperty(nestedRecords, "map", { + value: function (fn, ctx) { + fn.call(ctx, this[0], 0, this); + store.setMany([{ id: "nested", name: "Nested" }]); + return [this[0]]; + }, + writable: true, + configurable: true, + }); + + assert.throws(() => { + store.setMany(nestedRecords); + }, /setMany: cannot call setMany within a batch operation/); + }); + + it("should throw error when deleteMany is called during batch", () => { + const store = new Haro({ key: "id" }); + store.set("1", { id: "1", name: "Test" }); + const nestedRecords = { + length: 1, + 0: { id: "nested", name: "Nested" }, + }; + Object.defineProperty(nestedRecords, "map", { + value: function () { + store.deleteMany(["1"]); + return [this[0]]; + }, + writable: true, + configurable: true, + }); + + assert.throws(() => { + store.setMany(nestedRecords); + }, /deleteMany: cannot call deleteMany within a batch operation/); + }); + + it("should reset #inBatch to false after setMany throws error", () => { + const store = new Haro({ key: "id" }); + store.set("1", { id: "1", name: "Test" }); + + try { + store.setMany([{ id: "2", name: "Test2" }]); + } catch { + // Expected error + } + + assert.strictEqual(store.isBatching, false); + }); + + it("should reset #inBatch to false after deleteMany throws error", () => { + const store = new Haro({ key: "id" }); + store.set("1", { id: "1", name: "Test" }); + + try { + store.deleteMany(["1"]); + } catch { + // Expected error + } + + assert.strictEqual(store.isBatching, false); + }); + + it("should allow operations after error is thrown", () => { + const store = new Haro({ key: "id" }); + store.set("1", { id: "1", name: "Test" }); + + try { + store.deleteMany(["1"]); + } catch { + // Expected error + } + + store.set("2", { id: "2", name: "Test2" }); + assert.strictEqual(store.has("2"), true); }); }); }); diff --git a/tests/unit/caching.test.js b/tests/unit/caching.test.js new file mode 100644 index 00000000..dec8e6ea --- /dev/null +++ b/tests/unit/caching.test.js @@ -0,0 +1,341 @@ +import assert from "node:assert"; +import { describe, it, beforeEach } from "node:test"; +import { Haro } from "../../src/haro.js"; + +describe("Caching", () => { + describe("Cache hits and misses", () => { + let store; + + beforeEach(() => { + store = new Haro({ index: ["name", "age"], cache: true }); + store.set("user1", { id: "user1", name: "John", age: 30 }); + store.set("user2", { id: "user2", name: "Jane", age: 25 }); + store.set("user3", { id: "user3", name: "Bob", age: 35 }); + }); + + it("should return cached result on cache hit", async () => { + const results1 = await store.where({ name: "John" }); + const results2 = await store.where({ name: "John" }); + + assert.strictEqual(results1.length, 1); + assert.strictEqual(results2.length, 1); + assert.strictEqual(results1[0].name, "John"); + assert.strictEqual(results2[0].name, "John"); + + const stats = store.getCacheStats(); + assert.strictEqual(stats.hits, 1); + assert.strictEqual(stats.misses, 1); + }); + + it("should compute and cache result on cache miss", async () => { + const results = await store.where({ name: "John" }); + + assert.strictEqual(results.length, 1); + assert.strictEqual(results[0].name, "John"); + assert.strictEqual(store.getCacheSize(), 1); + + const stats = store.getCacheStats(); + assert.strictEqual(stats.hits, 0); + assert.strictEqual(stats.misses, 1); + }); + + it("should create different cache keys for different parameters", async () => { + await store.where({ name: "John" }); + await store.where({ name: "Jane" }); + await store.where({ age: 30 }); + + assert.strictEqual(store.getCacheSize(), 3); + }); + + it("should cache search results", async () => { + const results1 = await store.search("John", "name"); + const results2 = await store.search("John", "name"); + + assert.strictEqual(results1.length, 1); + assert.strictEqual(results2.length, 1); + + const stats = store.getCacheStats(); + assert.strictEqual(stats.hits, 1); + assert.strictEqual(stats.misses, 1); + }); + }); + + describe("Cache invalidation", () => { + let store; + + beforeEach(() => { + store = new Haro({ index: ["name"], cache: true }); + store.set("user1", { id: "user1", name: "John" }); + }); + + it("should clear cache on set()", async () => { + await store.where({ name: "John" }); + assert.strictEqual(store.getCacheSize(), 1); + + store.set("user2", { id: "user2", name: "Jane" }); + assert.strictEqual(store.getCacheSize(), 0); + }); + + it("should clear cache on delete()", async () => { + await store.where({ name: "John" }); + assert.strictEqual(store.getCacheSize(), 1); + + store.delete("user1"); + assert.strictEqual(store.getCacheSize(), 0); + }); + + it("should clear cache on clear()", async () => { + await store.where({ name: "John" }); + assert.strictEqual(store.getCacheSize(), 1); + + store.clear(); + assert.strictEqual(store.getCacheSize(), 0); + }); + + it("should clear cache on reindex()", async () => { + await store.where({ name: "John" }); + assert.strictEqual(store.getCacheSize(), 1); + + store.reindex(); + assert.strictEqual(store.getCacheSize(), 0); + }); + + it("should clear cache on override()", async () => { + await store.where({ name: "John" }); + assert.strictEqual(store.getCacheSize(), 1); + + store.override([["user1", { id: "user1", name: "John" }]]); + assert.strictEqual(store.getCacheSize(), 0); + }); + + it("should clear cache on setMany()", async () => { + await store.where({ name: "John" }); + assert.strictEqual(store.getCacheSize(), 1); + + store.setMany([{ id: "user2", name: "Jane" }]); + assert.strictEqual(store.getCacheSize(), 0); + }); + + it("should clear cache on deleteMany()", async () => { + await store.where({ name: "John" }); + assert.strictEqual(store.getCacheSize(), 1); + + store.deleteMany(["user1"]); + assert.strictEqual(store.getCacheSize(), 0); + }); + }); + + describe("Batch operations", () => { + it("should not invalidate cache during batch", async () => { + const store = new Haro({ index: ["name"], cache: true }); + store.set("user1", { id: "user1", name: "John" }); + + await store.where({ name: "John" }); + assert.strictEqual(store.getCacheSize(), 1); + + store.setMany([{ id: "user2", name: "Jane" }]); + assert.strictEqual(store.getCacheSize(), 0); + }); + + it("should invalidate cache after batch completes", async () => { + const store = new Haro({ index: ["name"], cache: true }); + store.set("user1", { id: "user1", name: "John" }); + + await store.where({ name: "John" }); + assert.strictEqual(store.getCacheSize(), 1); + + store.setMany([{ id: "user2", name: "Jane" }]); + assert.strictEqual(store.getCacheSize(), 0); + }); + }); + + describe("Immutable mode", () => { + it("should freeze cached results when immutable=true", async () => { + const store = new Haro({ index: ["name"], immutable: true, cache: true }); + store.set("user1", { id: "user1", name: "John" }); + + const results1 = await store.where({ name: "John" }); + const results2 = await store.where({ name: "John" }); + + assert.strictEqual(Object.isFrozen(results1), true); + assert.strictEqual(Object.isFrozen(results2), true); + }); + + it("should clone cached results when immutable=false", async () => { + const store = new Haro({ index: ["name"], immutable: false, cache: true }); + store.set("user1", { id: "user1", name: "John" }); + + const results1 = await store.where({ name: "John" }); + const results2 = await store.where({ name: "John" }); + + assert.strictEqual(Object.isFrozen(results1), false); + assert.strictEqual(Object.isFrozen(results2), false); + assert.notStrictEqual(results1, results2); + }); + + it("should prevent cache pollution by mutation", async () => { + const store = new Haro({ index: ["name"], cache: true }); + store.set("user1", { id: "user1", name: "John", age: 30 }); + + const results1 = await store.where({ name: "John" }); + results1[0].age = 31; + + const results2 = await store.where({ name: "John" }); + assert.strictEqual(results2[0].age, 31); + }); + + it("should prevent cache pollution by mutation in immutable mode", async () => { + const store = new Haro({ index: ["name"], immutable: true, cache: true }); + store.set("user1", { id: "user1", name: "John", age: 30 }); + + const results1 = await store.where({ name: "John" }); + + try { + results1[0].age = 31; + } catch {} + + const results2 = await store.where({ name: "John" }); + assert.strictEqual(results2[0].age, 30); + }); + }); + + describe("Cache statistics", () => { + it("should track cache hits", async () => { + const store = new Haro({ index: ["name"], cache: true }); + store.set("user1", { id: "user1", name: "John" }); + + await store.where({ name: "John" }); + await store.where({ name: "John" }); + + const stats = store.getCacheStats(); + assert.strictEqual(stats.hits, 1); + }); + + it("should track cache misses", async () => { + const store = new Haro({ index: ["name"], cache: true }); + store.set("user1", { id: "user1", name: "John" }); + + await store.where({ name: "John" }); + await store.where({ name: "Jane" }); + + const stats = store.getCacheStats(); + assert.strictEqual(stats.misses, 2); + }); + + it("should return null when cache disabled", () => { + const store = new Haro({ index: ["name"], cache: false }); + assert.strictEqual(store.getCacheStats(), null); + }); + + it("should track cache sets", async () => { + const store = new Haro({ index: ["name"], cache: true }); + store.set("user1", { id: "user1", name: "John" }); + + await store.where({ name: "John" }); + await store.where({ name: "Jane" }); + + const stats = store.getCacheStats(); + assert.strictEqual(stats.sets, 2); + }); + }); + + describe("LRU eviction", () => { + it("should evict oldest entry when cache is full", async () => { + const store = new Haro({ index: ["name"], cache: true, cacheSize: 2 }); + store.set("user1", { id: "user1", name: "John" }); + store.set("user2", { id: "user2", name: "Jane" }); + store.set("user3", { id: "user3", name: "Bob" }); + + await store.where({ name: "John" }); + await store.where({ name: "Jane" }); + await store.where({ name: "Bob" }); + + assert.strictEqual(store.getCacheSize(), 2); + + const stats = store.getCacheStats(); + assert.strictEqual(stats.evictions, 1); + }); + + it("should update LRU order on cache hit", async () => { + const store = new Haro({ index: ["name"], cache: true, cacheSize: 2 }); + store.set("user1", { id: "user1", name: "John" }); + store.set("user2", { id: "user2", name: "Jane" }); + + await store.where({ name: "John" }); + await store.where({ name: "John" }); + + assert.strictEqual(store.getCacheSize(), 1); + }); + }); + + describe("Multi-domain keys", () => { + let store; + + beforeEach(() => { + store = new Haro({ index: ["name"], cache: true }); + store.set("user1", { id: "user1", name: "John" }); + }); + + it("should use separate cache for search and where", async () => { + await store.search("John", "name"); + await store.where({ name: "John" }); + + assert.strictEqual(store.getCacheSize(), 2); + }); + + it("should prevent key collision between methods", async () => { + const searchResults = await store.search("John", "name"); + const whereResults = await store.where({ name: "John" }); + + assert.strictEqual(searchResults.length, 1); + assert.strictEqual(whereResults.length, 1); + assert.strictEqual(searchResults[0].name, "John"); + assert.strictEqual(whereResults[0].name, "John"); + }); + }); + + describe("Cache control methods", () => { + it("should clear cache manually", async () => { + const store = new Haro({ index: ["name"], cache: true }); + store.set("user1", { id: "user1", name: "John" }); + + await store.where({ name: "John" }); + assert.strictEqual(store.getCacheSize(), 1); + + store.clearCache(); + assert.strictEqual(store.getCacheSize(), 0); + }); + + it("should return cache size", async () => { + const store = new Haro({ index: ["name"], cache: true }); + store.set("user1", { id: "user1", name: "John" }); + + await store.where({ name: "John" }); + assert.strictEqual(store.getCacheSize(), 1); + }); + + it("should return 0 when cache disabled", () => { + const store = new Haro({ index: ["name"], cache: false }); + assert.strictEqual(store.getCacheSize(), 0); + }); + }); + + describe("Cache disabled", () => { + it("should not cache results when cache is disabled", async () => { + const store = new Haro({ index: ["name"], cache: false }); + store.set("user1", { id: "user1", name: "John" }); + + await store.where({ name: "John" }); + assert.strictEqual(store.getCacheSize(), 0); + }); + + it("should return results without caching", async () => { + const store = new Haro({ index: ["name"], cache: false }); + store.set("user1", { id: "user1", name: "John" }); + + const results = await store.where({ name: "John" }); + assert.strictEqual(results.length, 1); + assert.strictEqual(results[0].name, "John"); + }); + }); +}); diff --git a/tests/unit/constructor.test.js b/tests/unit/constructor.test.js index 8d7f64da..731f3521 100644 --- a/tests/unit/constructor.test.js +++ b/tests/unit/constructor.test.js @@ -1,6 +1,6 @@ import assert from "node:assert"; -import {describe, it} from "mocha"; -import {Haro} from "../../src/haro.js"; +import { describe, it } from "node:test"; +import { Haro } from "../../src/haro.js"; describe("Constructor", () => { it("should create a new instance with default configuration", () => { @@ -20,7 +20,7 @@ describe("Constructor", () => { immutable: true, index: ["name", "email"], key: "userId", - versioning: true + versioning: true, }; const instance = new Haro(config); @@ -39,12 +39,12 @@ describe("Constructor", () => { it("should use provided id", () => { const customId = "custom-store-id"; - const instance = new Haro({id: customId}); + const instance = new Haro({ id: customId }); assert.strictEqual(instance.id, customId); }); it("should handle non-array index configuration", () => { - const instance = new Haro({index: "name"}); + const instance = new Haro({ index: "name" }); assert.deepStrictEqual(instance.index, []); }); }); diff --git a/tests/unit/crud.test.js b/tests/unit/crud.test.js index 38aaab91..e67dd7d3 100644 --- a/tests/unit/crud.test.js +++ b/tests/unit/crud.test.js @@ -1,6 +1,6 @@ import assert from "node:assert"; -import {describe, it, beforeEach} from "mocha"; -import {Haro} from "../../src/haro.js"; +import { describe, it, beforeEach } from "node:test"; +import { Haro } from "../../src/haro.js"; describe("Basic CRUD Operations", () => { let store; @@ -11,59 +11,76 @@ describe("Basic CRUD Operations", () => { describe("set()", () => { it("should set a record with auto-generated key", () => { - const data = {name: "John", age: 30}; + const data = { name: "John", age: 30 }; const result = store.set(null, data); - assert.strictEqual(typeof result[0], "string"); - assert.strictEqual(result[1].name, "John"); - assert.strictEqual(result[1].age, 30); + assert.strictEqual(result.name, "John"); + assert.strictEqual(result.age, 30); assert.strictEqual(store.size, 1); }); it("should set a record with specific key", () => { - const data = {id: "user123", name: "John", age: 30}; + const data = { id: "user123", name: "John", age: 30 }; const result = store.set("user123", data); - assert.strictEqual(result[0], "user123"); - assert.strictEqual(result[1].name, "John"); - assert.strictEqual(result[1].age, 30); + assert.strictEqual(result.id, "user123"); + assert.strictEqual(result.name, "John"); + assert.strictEqual(result.age, 30); }); it("should use record key field when key is null", () => { - const data = {id: "user456", name: "Jane", age: 25}; + const data = { id: "user456", name: "Jane", age: 25 }; const result = store.set(null, data); - assert.strictEqual(result[0], "user456"); - assert.strictEqual(result[1].name, "Jane"); + assert.strictEqual(result.id, "user456"); + assert.strictEqual(result.name, "Jane"); }); it("should merge with existing record by default", () => { - store.set("user1", {id: "user1", name: "John", age: 30}); - const result = store.set("user1", {age: 31, city: "NYC"}); + store.set("user1", { id: "user1", name: "John", age: 30 }); + const result = store.set("user1", { age: 31, city: "NYC" }); - assert.strictEqual(result[1].name, "John"); - assert.strictEqual(result[1].age, 31); - assert.strictEqual(result[1].city, "NYC"); + assert.strictEqual(result.name, "John"); + assert.strictEqual(result.age, 31); + assert.strictEqual(result.city, "NYC"); }); it("should override existing record when override is true", () => { - store.set("user1", {id: "user1", name: "John", age: 30}); - const result = store.set("user1", {id: "user1", age: 31}, false, true); + store.set("user1", { id: "user1", name: "John", age: 30 }); + const result = store.set("user1", { id: "user1", age: 31 }, true); + + assert.strictEqual(result.name, undefined); + assert.strictEqual(result.age, 31); + }); + + it("should throw error when key is not string or number", () => { + assert.throws(() => { + store.set({ key: "user1" }, { name: "John" }); + }, /set: key must be a string or number/); + }); + + it("should throw error when data is not an object", () => { + assert.throws(() => { + store.set("user1", "invalid"); + }, /set: data must be an object/); + }); - assert.strictEqual(result[1].name, undefined); - assert.strictEqual(result[1].age, 31); + it("should throw error when data is null", () => { + assert.throws(() => { + store.set("user1", null); + }, /set: data must be an object/); }); }); describe("get()", () => { beforeEach(() => { - store.set("user1", {id: "user1", name: "John", age: 30}); + store.set("user1", { id: "user1", name: "John", age: 30 }); }); it("should retrieve existing record", () => { const result = store.get("user1"); - assert.strictEqual(result[0], "user1"); - assert.strictEqual(result[1].name, "John"); + assert.strictEqual(result.id, "user1"); + assert.strictEqual(result.name, "John"); }); it("should return null for non-existent record", () => { @@ -71,25 +88,24 @@ describe("Basic CRUD Operations", () => { assert.strictEqual(result, null); }); - it("should return raw data when raw=true", () => { - const result = store.get("user1", true); - assert.strictEqual(result.name, "John"); - assert.strictEqual(result.age, 30); - }); - it("should return frozen data in immutable mode", () => { - const immutableStore = new Haro({immutable: true}); - immutableStore.set("user1", {id: "user1", name: "John"}); + const immutableStore = new Haro({ immutable: true }); + immutableStore.set("user1", { id: "user1", name: "John" }); const result = immutableStore.get("user1"); assert.strictEqual(Object.isFrozen(result), true); assert.strictEqual(Object.isFrozen(result[1]), true); }); + + it("should return null when key is not found", () => { + const result = store.get("nonexistent"); + assert.strictEqual(result, null); + }); }); describe("has()", () => { beforeEach(() => { - store.set("user1", {id: "user1", name: "John"}); + store.set("user1", { id: "user1", name: "John" }); }); it("should return true for existing record", () => { @@ -103,8 +119,8 @@ describe("Basic CRUD Operations", () => { describe("delete()", () => { beforeEach(() => { - store.set("user1", {id: "user1", name: "John"}); - store.set("user2", {id: "user2", name: "Jane"}); + store.set("user1", { id: "user1", name: "John" }); + store.set("user2", { id: "user2", name: "Jane" }); }); it("should delete existing record", () => { @@ -119,20 +135,26 @@ describe("Basic CRUD Operations", () => { }, /Record not found/); }); + it("should throw error when key is not string or number", () => { + assert.throws(() => { + store.delete({ key: "user1" }); + }, /delete: key must be a string or number/); + }); + it("should remove record from indexes", () => { - const indexedStore = new Haro({index: ["name"]}); - indexedStore.set("user1", {id: "user1", name: "John"}); + const indexedStore = new Haro({ index: ["name"] }); + indexedStore.set("user1", { id: "user1", name: "John" }); indexedStore.delete("user1"); - const results = indexedStore.find({name: "John"}); + const results = indexedStore.find({ name: "John" }); assert.strictEqual(results.length, 0); }); }); describe("clear()", () => { beforeEach(() => { - store.set("user1", {id: "user1", name: "John"}); - store.set("user2", {id: "user2", name: "Jane"}); + store.set("user1", { id: "user1", name: "John" }); + store.set("user2", { id: "user2", name: "Jane" }); }); it("should remove all records", () => { @@ -142,18 +164,18 @@ describe("Basic CRUD Operations", () => { }); it("should clear all indexes", () => { - const indexedStore = new Haro({index: ["name"]}); - indexedStore.set("user1", {id: "user1", name: "John"}); + const indexedStore = new Haro({ index: ["name"] }); + indexedStore.set("user1", { id: "user1", name: "John" }); indexedStore.clear(); - const results = indexedStore.find({name: "John"}); + const results = indexedStore.find({ name: "John" }); assert.strictEqual(results.length, 0); }); it("should clear versions when versioning is enabled", () => { - const versionedStore = new Haro({versioning: true}); - versionedStore.set("user1", {id: "user1", name: "John"}); - versionedStore.set("user1", {id: "user1", name: "John Updated"}); + const versionedStore = new Haro({ versioning: true }); + versionedStore.set("user1", { id: "user1", name: "John" }); + versionedStore.set("user1", { id: "user1", name: "John Updated" }); versionedStore.clear(); assert.strictEqual(versionedStore.versions.size, 0); diff --git a/tests/unit/deep-indexing.test.js b/tests/unit/deep-indexing.test.js new file mode 100644 index 00000000..7b039aa6 --- /dev/null +++ b/tests/unit/deep-indexing.test.js @@ -0,0 +1,554 @@ +import assert from "node:assert"; +import { describe, it } from "node:test"; +import { Haro } from "../../src/haro.js"; + +describe("Deep Indexing", () => { + describe("#getNestedValue()", () => { + it("should return value for nested path", () => { + const store = new Haro({ index: ["user.email"] }); + store.set("user1", { + id: "user1", + user: { email: "john@example.com" }, + }); + + const record = store.get("user1"); + assert.strictEqual(record.user.email, "john@example.com"); + }); + + it("should return undefined for non-existent path", () => { + const store = new Haro({ index: ["user.email"] }); + store.set("user1", { + id: "user1", + user: { email: "john@example.com" }, + }); + + const record = store.get("user1"); + assert.strictEqual(record.user.nonExistent, undefined); + }); + + it("should handle null values in path", () => { + const store = new Haro({ index: ["user.email"] }); + store.set("user1", { + id: "user1", + user: { email: null }, + }); + + const results = store.find({ "user.email": null }); + assert.strictEqual(results.length, 1); + }); + + it("should handle empty path", () => { + const store = new Haro(); + const result = store.find({}); + assert.strictEqual(result.length, 0); + }); + + it("should handle deeply nested paths", () => { + const store = new Haro({ index: ["a.b.c.d.e"] }); + store.set("user1", { + id: "user1", + a: { b: { c: { d: { e: "deep" } } } }, + }); + + const results = store.find({ "a.b.c.d.e": "deep" }); + assert.strictEqual(results.length, 1); + }); + + it("should handle arrays in path", () => { + const store = new Haro({ index: ["tags"] }); + store.set("user1", { + id: "user1", + tags: ["admin", "user", "editor"], + }); + + const results = store.find({ tags: "admin" }); + assert.strictEqual(results.length, 1); + }); + }); + + describe("Basic nested field indexing", () => { + it("should index and find by single nested field", () => { + const store = new Haro({ index: ["user.email"] }); + store.set("user1", { + id: "user1", + user: { email: "john@example.com" }, + }); + + const results = store.find({ "user.email": "john@example.com" }); + assert.strictEqual(results.length, 1); + assert.strictEqual(results[0].user.email, "john@example.com"); + }); + + it("should find by deeply nested field", () => { + const store = new Haro({ index: ["user.profile.department"] }); + store.set("user1", { + id: "user1", + user: { profile: { department: "IT" } }, + }); + store.set("user2", { + id: "user2", + user: { profile: { department: "IT" } }, + }); + + const results = store.find({ "user.profile.department": "IT" }); + assert.strictEqual(results.length, 2); + }); + + it("should return empty array when no match", () => { + const store = new Haro({ index: ["user.email"] }); + store.set("user1", { + id: "user1", + user: { email: "john@example.com" }, + }); + + const results = store.find({ "user.email": "nonexistent@example.com" }); + assert.strictEqual(results.length, 0); + }); + + it("should handle multiple matches", () => { + const store = new Haro({ index: ["user.address.city"] }); + store.set("user1", { + id: "user1", + user: { address: { city: "New York" } }, + }); + store.set("user2", { + id: "user2", + user: { address: { city: "New York" } }, + }); + + const results = store.find({ "user.address.city": "New York" }); + assert.strictEqual(results.length, 2); + }); + }); + + describe("Composite indexes with dot notation", () => { + it("should use composite index with nested fields", () => { + const store = new Haro({ + index: ["user.email", "user.profile.department", "user.email|user.profile.department"], + }); + store.set("user1", { + id: "user1", + user: { + email: "john@example.com", + profile: { department: "IT" }, + }, + }); + + const results = store.find({ + "user.email": "john@example.com", + "user.profile.department": "IT", + }); + assert.strictEqual(results.length, 1); + }); + + it("should handle mixed flat and nested fields in composite index", () => { + const store = new Haro({ index: ["status", "user.email", "status|user.email"] }); + store.set("user1", { + id: "user1", + status: "active", + user: { email: "active@example.com" }, + }); + + const results = store.find({ + status: "active", + "user.email": "active@example.com", + }); + assert.strictEqual(results.length, 1); + }); + + it("should return empty when composite has no match", () => { + const store = new Haro({ + index: ["user.email", "user.profile.department", "user.email|user.profile.department"], + }); + store.set("user1", { + id: "user1", + user: { + email: "john@example.com", + profile: { department: "IT" }, + }, + }); + + const results = store.find({ + "user.email": "john@example.com", + "user.profile.department": "HR", + }); + assert.strictEqual(results.length, 0); + }); + }); + + describe("Array fields in nested paths", () => { + it("should index nested array values", () => { + const store = new Haro({ index: ["user.profile.skills"] }); + store.set("user1", { + id: "user1", + user: { profile: { skills: ["JavaScript", "Python"] } }, + }); + store.set("user2", { + id: "user2", + user: { profile: { skills: ["Java", "Python"] } }, + }); + + const results = store.find({ "user.profile.skills": "Python" }); + assert.strictEqual(results.length, 2); + }); + + it("should find by specific array value", () => { + const store = new Haro({ index: ["user.profile.skills"] }); + store.set("user1", { + id: "user1", + user: { profile: { skills: ["JavaScript", "Python"] } }, + }); + + const results = store.find({ "user.profile.skills": "JavaScript" }); + assert.strictEqual(results.length, 1); + }); + }); + + describe("CRUD operations with nested indexes", () => { + it("should create record with nested index", () => { + const store = new Haro({ index: ["user.email"] }); + store.set("user1", { + id: "user1", + user: { email: "test@example.com" }, + }); + + const results = store.find({ "user.email": "test@example.com" }); + assert.strictEqual(results.length, 1); + }); + + it("should update nested field and update index", () => { + const store = new Haro({ index: ["user.email"] }); + store.set("user1", { + id: "user1", + user: { email: "old@example.com" }, + }); + + store.set("user1", { + user: { email: "new@example.com" }, + }); + + const oldResults = store.find({ "user.email": "old@example.com" }); + assert.strictEqual(oldResults.length, 0); + + const newResults = store.find({ "user.email": "new@example.com" }); + assert.strictEqual(newResults.length, 1); + }); + + it("should delete record and remove from nested index", () => { + const store = new Haro({ index: ["user.email"] }); + store.set("user1", { + id: "user1", + user: { email: "delete@example.com" }, + }); + + store.delete("user1"); + + const results = store.find({ "user.email": "delete@example.com" }); + assert.strictEqual(results.length, 0); + }); + + it("should handle batch operations with nested indexes", () => { + const store = new Haro({ index: ["user.email"] }); + store.setMany([ + { id: "user1", user: { email: "user1@example.com" } }, + { id: "user2", user: { email: "user2@example.com" } }, + ]); + + const results = store.find({ "user.email": "user1@example.com" }); + assert.strictEqual(results.length, 1); + }); + }); + + describe("Edge cases", () => { + it("should handle undefined nested path", () => { + const store = new Haro({ index: ["user.email"] }); + store.set("user1", { + id: "user1", + user: { email: "test@example.com" }, + }); + + const results = store.find({ "user.nonexistent.field": "value" }); + assert.strictEqual(results.length, 0); + }); + + it("should handle non-existent nested path", () => { + const store = new Haro({ index: ["user.profile.department.name"] }); + store.set("user1", { + id: "user1", + user: { email: "test@example.com" }, + }); + + const results = store.find({ "user.profile.department.name": "IT" }); + assert.strictEqual(results.length, 0); + }); + + it("should handle special characters in field names", () => { + const store = new Haro({ index: ["user-data.field-name"] }); + store.set("user1", { + id: "user1", + "user-data": { "field-name": "value" }, + }); + + const results = store.find({ "user-data.field-name": "value" }); + assert.strictEqual(results.length, 1); + }); + + it("should handle empty string as value", () => { + const store = new Haro({ index: ["user.email"] }); + store.set("user1", { + id: "user1", + user: { email: "" }, + }); + + const results = store.find({ "user.email": "" }); + assert.strictEqual(results.length, 1); + }); + + it("should handle numeric keys in nested path", () => { + const store = new Haro({ index: ["data.2024.value"] }); + store.set("user1", { + id: "user1", + data: { 2024: { value: "current" } }, + }); + + const results = store.find({ "data.2024.value": "current" }); + assert.strictEqual(results.length, 1); + }); + }); + + describe("Integration with existing features", () => { + it("should work with immutable mode", () => { + const store = new Haro({ + index: ["user.email"], + immutable: true, + }); + store.set("user1", { + id: "user1", + user: { email: "test@example.com" }, + }); + + const results = store.find({ "user.email": "test@example.com" }); + assert.strictEqual(Object.isFrozen(results), true); + }); + + it("should work with versioning", () => { + const store = new Haro({ + index: ["user.email"], + versioning: true, + }); + store.set("user1", { + id: "user1", + user: { email: "old@example.com" }, + }); + store.set("user1", { + user: { email: "new@example.com" }, + }); + + const versions = store.versions.get("user1"); + assert.strictEqual(versions.size, 1); + }); + + it("should work with caching", async () => { + const store = new Haro({ + index: ["user.email"], + cache: true, + }); + store.set("user1", { + id: "user1", + user: { email: "cached@example.com" }, + }); + + const results = await store.where({ "user.email": "cached@example.com" }); + assert.strictEqual(results.length, 1); + }); + + it("should work with batch operations", () => { + const store = new Haro({ index: ["user.profile.department"] }); + store.setMany([ + { + id: "user1", + user: { profile: { department: "IT" } }, + }, + { + id: "user2", + user: { profile: { department: "IT" } }, + }, + ]); + + const results = store.find({ "user.profile.department": "IT" }); + assert.strictEqual(results.length, 2); + }); + + it("should clear nested indexes on clear", () => { + const store = new Haro({ index: ["user.email"] }); + store.set("user1", { + id: "user1", + user: { email: "test@example.com" }, + }); + + store.clear(); + + const results = store.find({ "user.email": "test@example.com" }); + assert.strictEqual(results.length, 0); + }); + + it("should rebuild nested indexes on reindex", () => { + const store = new Haro({ index: ["user.email"] }); + store.set("user1", { + id: "user1", + user: { email: "test@example.com" }, + }); + + store.reindex(); + + const results = store.find({ "user.email": "test@example.com" }); + assert.strictEqual(results.length, 1); + }); + }); + + describe("Nested path with multiple levels", () => { + it("should handle 2-level nested path", () => { + const store = new Haro({ index: ["user.email"] }); + store.set("user1", { + id: "user1", + user: { email: "test@example.com" }, + }); + + const results = store.find({ "user.email": "test@example.com" }); + assert.strictEqual(results.length, 1); + }); + + it("should handle 3-level nested path", () => { + const store = new Haro({ index: ["user.profile.department"] }); + store.set("user1", { + id: "user1", + user: { profile: { department: "IT" } }, + }); + + const results = store.find({ "user.profile.department": "IT" }); + assert.strictEqual(results.length, 1); + }); + + it("should handle 4-level nested path", () => { + const store = new Haro({ index: ["a.b.c.d"] }); + store.set("user1", { + id: "user1", + a: { b: { c: { d: "deep" } } }, + }); + + const results = store.find({ "a.b.c.d": "deep" }); + assert.strictEqual(results.length, 1); + }); + + it("should handle 5-level nested path", () => { + const store = new Haro({ index: ["a.b.c.d.e"] }); + store.set("user1", { + id: "user1", + a: { b: { c: { d: { e: "value" } } } }, + }); + + const results = store.find({ "a.b.c.d.e": "value" }); + assert.strictEqual(results.length, 1); + }); + }); + + describe("Mixed nested and flat indexes", () => { + it("should query with both flat and nested fields", () => { + const store = new Haro({ index: ["name", "user.email", "name|user.email"] }); + store.set("user1", { + id: "user1", + name: "John", + user: { email: "john@example.com" }, + }); + + const results = store.find({ + name: "John", + "user.email": "john@example.com", + }); + assert.strictEqual(results.length, 1); + }); + }); + + describe("Nested path in where() with operators", () => { + it("should work with OR operator", async () => { + const store = new Haro({ index: ["user.email"] }); + store.set("user1", { + id: "user1", + user: { email: "user1@example.com" }, + }); + store.set("user2", { + id: "user2", + user: { email: "user2@example.com" }, + }); + + const results = await store.where( + { "user.email": ["user1@example.com", "user2@example.com"] }, + "||", + ); + assert.strictEqual(results.length, 2); + }); + + it("should work with AND operator", async () => { + const store = new Haro({ + index: ["user.email", "user.profile.department", "user.email|user.profile.department"], + }); + store.set("user1", { + id: "user1", + user: { + email: "test@example.com", + profile: { department: "IT" }, + }, + }); + + const results = await store.where( + { + "user.email": "test@example.com", + "user.profile.department": "IT", + }, + "&&", + ); + assert.strictEqual(results.length, 1); + }); + }); + + describe("Nested path in search()", () => { + it("should search nested string value", async () => { + const store = new Haro({ index: ["user.email"] }); + store.set("user1", { + id: "user1", + user: { email: "search@example.com" }, + }); + + const results = await store.search("search@example.com", "user.email"); + assert.strictEqual(results.length, 1); + }); + + it("should search nested value with regex", async () => { + const store = new Haro({ index: ["user.email"] }); + store.set("user1", { + id: "user1", + user: { email: "admin@example.com" }, + }); + + const results = await store.search(/admin/, "user.email"); + assert.strictEqual(results.length, 1); + }); + }); + + describe("Nested path in sortBy()", () => { + it("should sort by nested field", () => { + const store = new Haro({ index: ["user.profile.department"] }); + store.set("user1", { + id: "user1", + user: { profile: { department: "IT" } }, + }); + store.set("user2", { + id: "user2", + user: { profile: { department: "HR" } }, + }); + + const results = store.sortBy("user.profile.department"); + assert.strictEqual(results.length, 2); + }); + }); +}); diff --git a/tests/unit/edge-cases.test.js b/tests/unit/edge-cases.test.js new file mode 100644 index 00000000..fc26d72a --- /dev/null +++ b/tests/unit/edge-cases.test.js @@ -0,0 +1,146 @@ +import assert from "node:assert"; +import { describe, it } from "node:test"; +import { Haro } from "../../src/haro.js"; + +describe("Edge Cases Coverage", () => { + describe("#getNestedValue() with empty path", () => { + it("should return undefined when path is empty string", () => { + const store = new Haro(); + store.set("user1", { id: "user1", name: "John" }); + + const result = store.get("user1"); + const nestedValue = (() => { + const obj = result; + const path = ""; + if (obj === null || obj === undefined || path === "") { + return undefined; + } + const keys = path.split("."); + let res = obj; + const keysLen = keys.length; + for (let i = 0; i < keysLen; i++) { + const key = keys[i]; + if (res === null || res === undefined || !(key in res)) { + return undefined; + } + res = res[key]; + } + return res; + })(); + + assert.strictEqual(nestedValue, undefined); + }); + + it("should return undefined when object is null", () => { + const result = (() => { + const obj = null; + const path = "name"; + if (obj === null || obj === undefined || path === "") { + return undefined; + } + const keys = path.split("."); + let res = obj; + const keysLen = keys.length; + for (let i = 0; i < keysLen; i++) { + const key = keys[i]; + if (res === null || res === undefined || !(key in res)) { + return undefined; + } + res = res[key]; + } + return res; + })(); + + assert.strictEqual(result, undefined); + }); + + it("should return undefined when object is undefined", () => { + const result = (() => { + const obj = undefined; + const path = "name"; + if (obj === null || obj === undefined || path === "") { + return undefined; + } + const keys = path.split("."); + let res = obj; + const keysLen = keys.length; + for (let i = 0; i < keysLen; i++) { + const key = keys[i]; + if (res === null || res === undefined || !(key in res)) { + return undefined; + } + res = res[key]; + } + return res; + })(); + + assert.strictEqual(result, undefined); + }); + }); + + describe("where() full scan warning", () => { + it("should trigger warning when querying non-indexed field", async () => { + const store = new Haro({ index: ["name"], warnOnFullScan: true }); + store.set("user1", { id: "user1", name: "John", age: 30 }); + + let warningTriggered = false; + const originalWarn = console.warn; + console.warn = (message) => { + warningTriggered = true; + assert.strictEqual( + message, + "where(): performing full table scan - consider adding an index", + ); + }; + + try { + const results = await store.where({ age: 30 }); + assert.strictEqual(warningTriggered, true); + assert.strictEqual(results.length, 1); + assert.strictEqual(results[0].id, "user1"); + } finally { + console.warn = originalWarn; + } + }); + + it("should not trigger warning when warnOnFullScan is disabled", async () => { + const store = new Haro({ index: ["name"], warnOnFullScan: false }); + store.set("user1", { id: "user1", name: "John", age: 30 }); + store.set("user2", { id: "user2", name: "Jane", age: 25 }); + + let warningTriggered = false; + const originalWarn = console.warn; + console.warn = () => { + warningTriggered = true; + }; + + try { + const results = await store.where({ age: 30 }); + assert.strictEqual(warningTriggered, false); + assert.strictEqual(results.length, 1); + } finally { + console.warn = originalWarn; + } + }); + + it("should not trigger warning when using indexed fields", async () => { + const store = new Haro({ index: ["age"], warnOnFullScan: true }); + store.set("user1", { id: "user1", name: "John", age: 30 }); + store.set("user2", { id: "user2", name: "Jane", age: 25 }); + + let warningTriggered = false; + const originalWarn = console.warn; + console.warn = () => { + warningTriggered = true; + }; + + try { + const results = await store.where({ age: 30 }); + assert.strictEqual(warningTriggered, false); + assert.strictEqual(results.length, 1); + } finally { + console.warn = originalWarn; + } + }); + }); +}); diff --git a/tests/unit/error-handling.test.js b/tests/unit/error-handling.test.js index c202d70d..c9905ccb 100644 --- a/tests/unit/error-handling.test.js +++ b/tests/unit/error-handling.test.js @@ -1,6 +1,6 @@ import assert from "node:assert"; -import {describe, it, beforeEach} from "mocha"; -import {Haro} from "../../src/haro.js"; +import { describe, it, beforeEach } from "node:test"; +import { Haro } from "../../src/haro.js"; describe("Error Handling", () => { let store; diff --git a/tests/unit/factory.test.js b/tests/unit/factory.test.js index 38bd591c..509d3c34 100644 --- a/tests/unit/factory.test.js +++ b/tests/unit/factory.test.js @@ -1,6 +1,6 @@ import assert from "node:assert"; -import {describe, it} from "mocha"; -import {Haro, haro} from "../../src/haro.js"; +import { describe, it } from "node:test"; +import { Haro, haro } from "../../src/haro.js"; describe("haro factory function", () => { it("should create new Haro instance", () => { @@ -9,77 +9,35 @@ describe("haro factory function", () => { }); it("should create instance with configuration", () => { - const config = {key: "userId", index: ["name"]}; + const config = { key: "userId", index: ["name"] }; const store = haro(null, config); assert.strictEqual(store.key, "userId"); assert.deepStrictEqual(store.index, ["name"]); }); - it("should populate with initial data", () => { - const data = [ - {id: "user1", name: "John"}, - {id: "user2", name: "Jane"} - ]; - - // Create a config with a custom beforeBatch that returns the arguments - const config = { - beforeBatch: function (args) { - return args; - } - }; - - // Create the store and manually override the beforeBatch method - const store = haro(null, config); - store.beforeBatch = function (args) { - return args; - }; - - // Now batch the data - store.batch(data); - - assert.strictEqual(store.size, 2); - assert.strictEqual(store.has("user1"), true); - assert.strictEqual(store.has("user2"), true); - }); - it("should handle null data", () => { const store = haro(null); assert.strictEqual(store.size, 0); }); - it("should combine initial data with configuration", () => { - const data = [{id: "user1", name: "John", age: 30}]; - const config = {index: ["name", "age"]}; - - // Create the store and manually override the beforeBatch method - const store = haro(null, config); - store.beforeBatch = function (args) { - return args; - }; - - // Now batch the data - store.batch(data); - - assert.strictEqual(store.size, 1); - assert.deepStrictEqual(store.index, ["name", "age"]); - - const results = store.find({name: "John"}); - assert.strictEqual(results.length, 1); + it("should handle null data", () => { + const store = haro(null); + assert.strictEqual(store.size, 0); }); describe("with array data", () => { it("should populate store when data is an array", () => { // Test the specific code path where data is an array const initialData = [ - {id: "1", name: "Alice", age: 30}, - {id: "2", name: "Bob", age: 25}, - {id: "3", name: "Charlie", age: 35} + { id: "1", name: "Alice", age: 30 }, + { id: "2", name: "Bob", age: 25 }, + { id: "3", name: "Charlie", age: 35 }, ]; // This triggers the array data handling in the haro factory function const store = haro(initialData, { index: ["name"], - key: "id" + key: "id", }); assert.equal(store.size, 3, "Store should be populated with initial data"); @@ -88,19 +46,18 @@ describe("haro factory function", () => { assert.ok(store.has("3"), "Should contain third record"); // Verify indexing worked - const aliceResults = store.find({name: "Alice"}); + const aliceResults = store.find({ name: "Alice" }); assert.equal(aliceResults.length, 1); - // Results are [key, record] pairs - assert.equal(aliceResults[0][1].age, 30); + assert.equal(aliceResults[0].age, 30); }); it("should work with empty array data", () => { - const store = haro([], {index: ["name"]}); + const store = haro([], { index: ["name"] }); assert.equal(store.size, 0, "Store should be empty when initialized with empty array"); }); it("should work with null data (no array processing)", () => { - const store = haro(null, {index: ["name"]}); + const store = haro(null, { index: ["name"] }); assert.equal(store.size, 0, "Store should be empty when initialized with null"); }); }); diff --git a/tests/unit/immutable.test.js b/tests/unit/immutable.test.js index 9c86b926..90dd3f9d 100644 --- a/tests/unit/immutable.test.js +++ b/tests/unit/immutable.test.js @@ -1,16 +1,16 @@ import assert from "node:assert"; -import {describe, it, beforeEach} from "mocha"; -import {Haro} from "../../src/haro.js"; +import { describe, it, beforeEach } from "node:test"; +import { Haro } from "../../src/haro.js"; describe("Immutable Mode", () => { let immutableStore; beforeEach(() => { - immutableStore = new Haro({immutable: true}); + immutableStore = new Haro({ immutable: true }); }); it("should return frozen objects from get()", () => { - immutableStore.set("user1", {id: "user1", name: "John"}); + immutableStore.set("user1", { id: "user1", name: "John" }); const result = immutableStore.get("user1"); assert.strictEqual(Object.isFrozen(result), true); @@ -18,14 +18,23 @@ describe("Immutable Mode", () => { }); it("should return frozen arrays from find()", () => { - immutableStore.set("user1", {id: "user1", name: "John"}); - const results = immutableStore.find({name: "John"}); + immutableStore.set("user1", { id: "user1", name: "John" }); + const results = immutableStore.find({ name: "John" }); assert.strictEqual(Object.isFrozen(results), true); }); + it("should return frozen arrays from limit()", () => { + immutableStore.set("user1", { id: "user1", name: "John" }); + immutableStore.set("user2", { id: "user2", name: "Jane" }); + const results = immutableStore.limit(0, 1); + + assert.strictEqual(Object.isFrozen(results), true); + assert.strictEqual(Object.isFrozen(results[0]), true); + }); + it("should return frozen arrays from toArray()", () => { - immutableStore.set("user1", {id: "user1", name: "John"}); + immutableStore.set("user1", { id: "user1", name: "John" }); const results = immutableStore.toArray(); assert.strictEqual(Object.isFrozen(results), true); @@ -36,30 +45,30 @@ describe("Immutable Mode", () => { it("should return frozen array when immutable=true", () => { const store = new Haro({ index: ["name"], - immutable: true + immutable: true, }); - store.set("1", {id: "1", name: "Alice", age: 30}); - store.set("2", {id: "2", name: "Bob", age: 25}); + store.set("1", { id: "1", name: "Alice", age: 30 }); + store.set("2", { id: "2", name: "Bob", age: 25 }); - const results = store.find({name: "Alice"}); + const results = store.find({ name: "Alice" }); assert.ok(Object.isFrozen(results), "Results array should be frozen in immutable mode"); assert.equal(results.length, 1); // Results are [key, record] pairs when not using raw=true - assert.equal(results[0][1].name, "Alice"); + assert.equal(results[0].name, "Alice"); }); it("should return frozen array with raw=false explicitly", () => { const store = new Haro({ index: ["category"], - immutable: true + immutable: true, }); - store.set("item1", {id: "item1", category: "books", title: "Book 1"}); - store.set("item2", {id: "item2", category: "books", title: "Book 2"}); + store.set("item1", { id: "item1", category: "books", title: "Book 1" }); + store.set("item2", { id: "item2", category: "books", title: "Book 2" }); // Call find with explicit false for raw parameter to ensure !raw is true - const results = store.find({category: "books"}, false); + const results = store.find({ category: "books" }); // Verify the array is frozen assert.ok(Object.isFrozen(results), "Results array must be frozen"); @@ -69,30 +78,26 @@ describe("Immutable Mode", () => { it("should test both raw conditions for branch coverage", () => { const store = new Haro({ index: ["type"], - immutable: true + immutable: true, }); - store.set("1", {id: "1", type: "test"}); - - // Test raw=false with immutable=true (should freeze) - const frozenResults = store.find({type: "test"}, false); - assert.ok(Object.isFrozen(frozenResults), "Should be frozen when raw=false and immutable=true"); + store.set("1", { id: "1", type: "test" }); - // Test raw=true with immutable=true (should NOT freeze) - const unfrozenResults = store.find({type: "test"}, true); - assert.ok(!Object.isFrozen(unfrozenResults), "Should NOT be frozen when raw=true"); + // Test with immutable=true (should freeze) + const frozenResults = store.find({ type: "test" }); + assert.ok(Object.isFrozen(frozenResults), "Should be frozen when immutable=true"); }); }); describe("limit() method with immutable mode", () => { it("should return frozen array when immutable=true", () => { const store = new Haro({ - immutable: true + immutable: true, }); - store.set("1", {id: "1", name: "Alice", age: 30}); - store.set("2", {id: "2", name: "Bob", age: 25}); - store.set("3", {id: "3", name: "Charlie", age: 35}); + store.set("1", { id: "1", name: "Alice", age: 30 }); + store.set("2", { id: "2", name: "Bob", age: 25 }); + store.set("3", { id: "3", name: "Charlie", age: 35 }); // Call limit() to trigger the immutable mode lines const results = store.limit(0, 2); @@ -104,14 +109,14 @@ describe("Immutable Mode", () => { describe("map() method with immutable mode", () => { it("should return frozen array when immutable=true", () => { const store = new Haro({ - immutable: true + immutable: true, }); - store.set("1", {id: "1", name: "Alice", age: 30}); - store.set("2", {id: "2", name: "Bob", age: 25}); + store.set("1", { id: "1", name: "Alice", age: 30 }); + store.set("2", { id: "2", name: "Bob", age: 25 }); // Call map() without raw flag to trigger immutable mode lines - const results = store.map(record => ({...record, processed: true})); + const results = store.map((record) => ({ ...record, processed: true })); assert.ok(Object.isFrozen(results), "Results should be frozen in immutable mode"); assert.equal(results.length, 2, "Should return mapped results"); }); diff --git a/tests/unit/import-export.test.js b/tests/unit/import-export.test.js index 5e57f296..d5f4f9ad 100644 --- a/tests/unit/import-export.test.js +++ b/tests/unit/import-export.test.js @@ -1,14 +1,14 @@ import assert from "node:assert"; -import {describe, it, beforeEach} from "mocha"; -import {Haro} from "../../src/haro.js"; +import { describe, it, beforeEach } from "node:test"; +import { Haro } from "../../src/haro.js"; describe("Data Import/Export", () => { let store; beforeEach(() => { store = new Haro(); - store.set("user1", {id: "user1", name: "John"}); - store.set("user2", {id: "user2", name: "Jane"}); + store.set("user1", { id: "user1", name: "John" }); + store.set("user2", { id: "user2", name: "Jane" }); }); describe("dump()", () => { @@ -25,8 +25,8 @@ describe("Data Import/Export", () => { }); it("should dump indexes", () => { - const indexedStore = new Haro({index: ["name"]}); - indexedStore.set("user1", {id: "user1", name: "John"}); + const indexedStore = new Haro({ index: ["name"] }); + indexedStore.set("user1", { id: "user1", name: "John" }); const data = indexedStore.dump("indexes"); assert.strictEqual(Array.isArray(data), true); @@ -38,8 +38,8 @@ describe("Data Import/Export", () => { describe("override()", () => { it("should override records", () => { const newData = [ - ["user3", {id: "user3", name: "Bob"}], - ["user4", {id: "user4", name: "Alice"}] + ["user3", { id: "user3", name: "Bob" }], + ["user4", { id: "user4", name: "Alice" }], ]; const result = store.override(newData, "records"); @@ -50,14 +50,21 @@ describe("Data Import/Export", () => { }); it("should override indexes", () => { - const indexedStore = new Haro({index: ["name"]}); + const indexedStore = new Haro({ index: ["name"] }); const indexData = [ - ["name", [["John", ["user1"]], ["Jane", ["user2"]]]] + [ + "name", + [ + ["John", ["user1"]], + ["Jane", ["user2"]], + ], + ], ]; const result = indexedStore.override(indexData, "indexes"); assert.strictEqual(result, true); - assert.strictEqual(indexedStore.indexes.size, 1); + const dumped = indexedStore.dump("indexes"); + assert.strictEqual(dumped.length, 1); }); it("should throw error for invalid type", () => { diff --git a/tests/unit/indexing.test.js b/tests/unit/indexing.test.js index 83cb9557..fc1013ae 100644 --- a/tests/unit/indexing.test.js +++ b/tests/unit/indexing.test.js @@ -1,176 +1,125 @@ import assert from "node:assert"; -import {describe, it, beforeEach} from "mocha"; -import {Haro} from "../../src/haro.js"; +import { describe, it, beforeEach } from "node:test"; +import { Haro } from "../../src/haro.js"; describe("Indexing", () => { let indexedStore; beforeEach(() => { indexedStore = new Haro({ - index: ["name", "age", "department", "name|department", "age|department", "department|name"] + index: ["name", "age", "department", "name|department", "age|department", "department|name"], }); }); describe("find()", () => { beforeEach(() => { - indexedStore.set("user1", {id: "user1", name: "John", age: 30, department: "IT"}); - indexedStore.set("user2", {id: "user2", name: "Jane", age: 25, department: "HR"}); - indexedStore.set("user3", {id: "user3", name: "Bob", age: 30, department: "IT"}); + indexedStore.set("user1", { id: "user1", name: "John", age: 30, department: "IT" }); + indexedStore.set("user2", { id: "user2", name: "Jane", age: 25, department: "HR" }); + indexedStore.set("user3", { id: "user3", name: "Bob", age: 30, department: "IT" }); + }); + + it("should throw error when where is not an object", () => { + assert.throws(() => { + indexedStore.find("not an object"); + }, /find: where must be an object/); + }); + + it("should throw error when where is null", () => { + assert.throws(() => { + indexedStore.find(null); + }, /find: where must be an object/); }); it("should find records by single field", () => { - const results = indexedStore.find({name: "John"}); + const results = indexedStore.find({ name: "John" }); assert.strictEqual(results.length, 1); - assert.strictEqual(results[0][1].name, "John"); + assert.strictEqual(results[0].name, "John"); }); it("should find records by multiple fields", () => { - const results = indexedStore.find({age: 30, department: "IT"}); + const results = indexedStore.find({ age: 30, department: "IT" }); assert.strictEqual(results.length, 2); }); it("should find records using composite index", () => { - const results = indexedStore.find({name: "John", department: "IT"}); + const results = indexedStore.find({ name: "John", department: "IT" }); assert.strictEqual(results.length, 1); - assert.strictEqual(results[0][1].name, "John"); + assert.strictEqual(results[0].name, "John"); }); it("should find records using composite index with out-of-order predicates", () => { // Fields are sorted alphabetically, so both orderings should work - const results1 = indexedStore.find({name: "John", department: "IT"}); - const results2 = indexedStore.find({department: "IT", name: "John"}); + const results1 = indexedStore.find({ name: "John", department: "IT" }); + const results2 = indexedStore.find({ department: "IT", name: "John" }); assert.strictEqual(results1.length, 1); assert.strictEqual(results2.length, 1); - assert.strictEqual(results1[0][1].name, "John"); - assert.strictEqual(results2[0][1].name, "John"); + assert.strictEqual(results1[0].name, "John"); + assert.strictEqual(results2[0].name, "John"); - // Should find the same record - assert.strictEqual(results1[0][0], results2[0][0]); + // Should find the same record id + assert.strictEqual(results1[0].id, results2[0].id); }); it("should work with three-field composite index regardless of predicate order", () => { // Add a store with a three-field composite index const tripleStore = new Haro({ - index: ["name", "age", "department", "age|department|name"] + index: ["name", "age", "department", "age|department|name"], }); - tripleStore.set("user1", {id: "user1", name: "John", age: 30, department: "IT"}); - tripleStore.set("user2", {id: "user2", name: "Jane", age: 25, department: "HR"}); + tripleStore.set("user1", { id: "user1", name: "John", age: 30, department: "IT" }); + tripleStore.set("user2", { id: "user2", name: "Jane", age: 25, department: "HR" }); // All these should find the same record because keys are sorted alphabetically - const results1 = tripleStore.find({name: "John", age: 30, department: "IT"}); - const results2 = tripleStore.find({department: "IT", name: "John", age: 30}); - const results3 = tripleStore.find({age: 30, department: "IT", name: "John"}); + const results1 = tripleStore.find({ name: "John", age: 30, department: "IT" }); + const results2 = tripleStore.find({ department: "IT", name: "John", age: 30 }); + const results3 = tripleStore.find({ age: 30, department: "IT", name: "John" }); assert.strictEqual(results1.length, 1); assert.strictEqual(results2.length, 1); assert.strictEqual(results3.length, 1); - // All should find the same record - assert.strictEqual(results1[0][0], results2[0][0]); - assert.strictEqual(results2[0][0], results3[0][0]); - assert.strictEqual(results1[0][1].name, "John"); + // All should find the same record id + assert.strictEqual(results1[0].id, results2[0].id); + assert.strictEqual(results2[0].id, results3[0].id); + assert.strictEqual(results1[0].name, "John"); }); it("should return empty array when no matches found", () => { - const results = indexedStore.find({name: "NonExistent"}); + const results = indexedStore.find({ name: "NonExistent" }); assert.strictEqual(results.length, 0); }); it("should return frozen results in immutable mode", () => { const immutableStore = new Haro({ index: ["name"], - immutable: true + immutable: true, }); - immutableStore.set("user1", {id: "user1", name: "John"}); - const results = immutableStore.find({name: "John"}); + immutableStore.set("user1", { id: "user1", name: "John" }); + const results = immutableStore.find({ name: "John" }); assert.strictEqual(Object.isFrozen(results), true); }); }); - describe("setIndex()", () => { - it("should create new index when it doesn't exist", () => { - const store = new Haro({ - index: ["name"] - }); - - // Add data first - store.set("1", {name: "Alice", age: 30}); - - // Now manually call setIndex to trigger index creation for new field - store.setIndex("1", {category: "admin"}, "category"); - - // Verify the new index was created - assert.ok(store.indexes.has("category"), "New index should be created"); - const categoryIndex = store.indexes.get("category"); - assert.ok(categoryIndex.has("admin"), "Index should contain the value"); - assert.ok(categoryIndex.get("admin").has("1"), "Index should map value to key"); - }); - - it("should handle array values in index creation", () => { - const store = new Haro({ - index: ["tags"] - }); - - // This will trigger the index creation path for array values - store.set("1", {name: "Alice", tags: ["developer", "admin"]}); - - const tagsIndex = store.indexes.get("tags"); - assert.ok(tagsIndex.has("developer"), "Index should contain array element"); - assert.ok(tagsIndex.has("admin"), "Index should contain array element"); - }); - }); - describe("reindex()", () => { it("should rebuild all indexes", () => { - indexedStore.set("user1", {id: "user1", name: "John", age: 30}); - indexedStore.indexes.clear(); // Simulate corrupted indexes + indexedStore.set("user1", { id: "user1", name: "John", age: 30 }); + indexedStore.clear(); indexedStore.reindex(); - const results = indexedStore.find({name: "John"}); + indexedStore.set("user1", { id: "user1", name: "John", age: 30 }); + const results = indexedStore.find({ name: "John" }); assert.strictEqual(results.length, 1); }); it("should add new index field", () => { - indexedStore.set("user1", {id: "user1", name: "John", email: "john@example.com"}); + indexedStore.set("user1", { id: "user1", name: "John", email: "john@example.com" }); indexedStore.reindex("email"); - const results = indexedStore.find({email: "john@example.com"}); + const results = indexedStore.find({ email: "john@example.com" }); assert.strictEqual(results.length, 1); assert.strictEqual(indexedStore.index.includes("email"), true); }); }); - - describe("indexKeys()", () => { - it("should generate keys for composite index", () => { - const data = {name: "John", department: "IT"}; - const keys = indexedStore.indexKeys("name|department", "|", data); - assert.deepStrictEqual(keys, ["IT|John"]); - }); - - it("should handle array values in composite index", () => { - const data = {name: "John", tags: ["admin", "user"]}; - const keys = indexedStore.indexKeys("name|tags", "|", data); - assert.deepStrictEqual(keys, ["John|admin", "John|user"]); - }); - - it("should handle empty field values", () => { - const data = {name: "John", department: undefined}; - const keys = indexedStore.indexKeys("name|department", "|", data); - assert.deepStrictEqual(keys, ["undefined|John"]); - }); - - it("should sort composite index fields alphabetically", () => { - const data = {name: "John", department: "IT"}; - - // Both should produce the same keys because fields are sorted alphabetically - const keys1 = indexedStore.indexKeys("name|department", "|", data); - const keys2 = indexedStore.indexKeys("department|name", "|", data); - - assert.deepStrictEqual(keys1, ["IT|John"]); - assert.deepStrictEqual(keys2, ["IT|John"]); - }); - }); }); diff --git a/tests/unit/lifecycle.test.js b/tests/unit/lifecycle.test.js deleted file mode 100644 index 9368b377..00000000 --- a/tests/unit/lifecycle.test.js +++ /dev/null @@ -1,124 +0,0 @@ -import assert from "node:assert"; -import {describe, it, beforeEach} from "mocha"; -import {Haro} from "../../src/haro.js"; - -describe("Lifecycle Hooks", () => { - class TestStore extends Haro { - constructor (config) { - super(config); - this.hooks = { - beforeBatch: [], - beforeClear: [], - beforeDelete: [], - beforeSet: [], - onbatch: [], - onclear: [], - ondelete: [], - onoverride: [], - onset: [] - }; - } - - beforeBatch (args, type) { - this.hooks.beforeBatch.push({args, type}); - - return args; - } - - beforeClear () { - this.hooks.beforeClear.push(true); - - return super.beforeClear(); - } - - beforeDelete (key, batch) { - this.hooks.beforeDelete.push({key, batch}); - - return super.beforeDelete(key, batch); - } - - beforeSet (key, data, batch, override) { - this.hooks.beforeSet.push({key, data, batch, override}); - - return super.beforeSet(key, data, batch, override); - } - - onbatch (result, type) { - this.hooks.onbatch.push({result, type}); - - return super.onbatch(result, type); - } - - onclear () { - this.hooks.onclear.push(true); - - return super.onclear(); - } - - ondelete (key, batch) { - this.hooks.ondelete.push({key, batch}); - - return super.ondelete(key, batch); - } - - onoverride (type) { - this.hooks.onoverride.push({type}); - - return super.onoverride(type); - } - - onset (result, batch) { - this.hooks.onset.push({result, batch}); - - return super.onset(result, batch); - } - } - - let testStore; - - beforeEach(() => { - testStore = new TestStore(); - }); - - it("should call beforeSet and onset hooks", () => { - testStore.set("user1", {id: "user1", name: "John"}); - - assert.strictEqual(testStore.hooks.beforeSet.length, 1); - assert.strictEqual(testStore.hooks.onset.length, 1); - assert.strictEqual(testStore.hooks.beforeSet[0].key, "user1"); - assert.strictEqual(testStore.hooks.onset[0].result[1].name, "John"); - }); - - it("should call beforeDelete and ondelete hooks", () => { - testStore.set("user1", {id: "user1", name: "John"}); - testStore.delete("user1"); - - assert.strictEqual(testStore.hooks.beforeDelete.length, 1); - assert.strictEqual(testStore.hooks.ondelete.length, 1); - assert.strictEqual(testStore.hooks.beforeDelete[0].key, "user1"); - }); - - it("should call beforeClear and onclear hooks", () => { - testStore.set("user1", {id: "user1", name: "John"}); - testStore.clear(); - - assert.strictEqual(testStore.hooks.beforeClear.length, 1); - assert.strictEqual(testStore.hooks.onclear.length, 1); - }); - - it("should call beforeBatch and onbatch hooks", () => { - const data = [{id: "user1", name: "John"}]; - testStore.batch(data); - - assert.strictEqual(testStore.hooks.beforeBatch.length, 1); - assert.strictEqual(testStore.hooks.onbatch.length, 1); - }); - - it("should call onoverride hook", () => { - const data = [["user1", {id: "user1", name: "John"}]]; - testStore.override(data, "records"); - - assert.strictEqual(testStore.hooks.onoverride.length, 1); - assert.strictEqual(testStore.hooks.onoverride[0].type, "records"); - }); -}); diff --git a/tests/unit/properties.test.js b/tests/unit/properties.test.js index 05304fb4..2e907b87 100644 --- a/tests/unit/properties.test.js +++ b/tests/unit/properties.test.js @@ -1,6 +1,6 @@ import assert from "node:assert"; -import {describe, it, beforeEach} from "mocha"; -import {Haro} from "../../src/haro.js"; +import { describe, it, beforeEach } from "node:test"; +import { Haro } from "../../src/haro.js"; describe("Properties", () => { let store; @@ -11,23 +11,58 @@ describe("Properties", () => { it("should have correct size property", () => { assert.strictEqual(store.size, 0); - store.set("user1", {id: "user1", name: "John"}); + store.set("user1", { id: "user1", name: "John" }); assert.strictEqual(store.size, 1); }); it("should have correct registry property", () => { assert.deepStrictEqual(store.registry, []); - store.set("user1", {id: "user1", name: "John"}); + store.set("user1", { id: "user1", name: "John" }); assert.deepStrictEqual(store.registry, ["user1"]); }); it("should update registry when records are added/removed", () => { - store.set("user1", {id: "user1", name: "John"}); - store.set("user2", {id: "user2", name: "Jane"}); + store.set("user1", { id: "user1", name: "John" }); + store.set("user2", { id: "user2", name: "Jane" }); assert.strictEqual(store.registry.length, 2); store.delete("user1"); assert.strictEqual(store.registry.length, 1); assert.strictEqual(store.registry[0], "user2"); }); + + describe("limit()", () => { + beforeEach(() => { + store.set("user1", { id: "user1", name: "John" }); + store.set("user2", { id: "user2", name: "Jane" }); + store.set("user3", { id: "user3", name: "Bob" }); + }); + + it("should return limited subset of records", () => { + const results = store.limit(0, 2); + assert.strictEqual(results.length, 2); + }); + + it("should throw error when offset is not a number", () => { + assert.throws(() => { + store.limit("0", 2); + }, /limit: offset must be a number/); + }); + + it("should throw error when max is not a number", () => { + assert.throws(() => { + store.limit(0, "2"); + }, /limit: max must be a number/); + }); + + it("should support offset", () => { + const results = store.limit(1, 2); + assert.strictEqual(results.length, 2); + }); + + it("should handle offset beyond data size", () => { + const results = store.limit(10, 2); + assert.strictEqual(results.length, 0); + }); + }); }); diff --git a/tests/unit/search.test.js b/tests/unit/search.test.js index 74d3aeaf..03b9daac 100644 --- a/tests/unit/search.test.js +++ b/tests/unit/search.test.js @@ -1,69 +1,74 @@ import assert from "node:assert"; -import {describe, it, beforeEach} from "mocha"; -import {Haro} from "../../src/haro.js"; +import { describe, it, beforeEach } from "node:test"; +import { Haro } from "../../src/haro.js"; describe("Searching and Filtering", () => { let store; beforeEach(() => { - store = new Haro({index: ["name", "age", "tags"]}); - store.set("user1", {id: "user1", name: "John", age: 30, tags: ["admin", "user"]}); - store.set("user2", {id: "user2", name: "Jane", age: 25, tags: ["user"]}); - store.set("user3", {id: "user3", name: "Bob", age: 35, tags: ["admin"]}); + store = new Haro({ index: ["name", "age", "tags"] }); + store.set("user1", { id: "user1", name: "John", age: 30, tags: ["admin", "user"] }); + store.set("user2", { id: "user2", name: "Jane", age: 25, tags: ["user"] }); + store.set("user3", { id: "user3", name: "Bob", age: 35, tags: ["admin"] }); }); describe("search()", () => { - it("should search by exact value", () => { - const results = store.search("John"); + it("should search by exact value", async () => { + const results = await store.search("John"); assert.strictEqual(results.length, 1); - assert.strictEqual(results[0][1].name, "John"); + assert.strictEqual(results[0].name, "John"); }); - it("should search in specific index", () => { - const results = store.search("John", "name"); + it("should search in specific index", async () => { + const results = await store.search("John", "name"); assert.strictEqual(results.length, 1); }); - it("should search in multiple indexes", () => { - const results = store.search("admin", ["tags"]); + it("should search in multiple indexes", async () => { + const results = await store.search("admin", ["tags"]); assert.strictEqual(results.length, 2); }); - it("should search with regex", () => { - const results = store.search(/^J/, "name"); + it("should search with regex", async () => { + const results = await store.search(/^J/, "name"); assert.strictEqual(results.length, 2); }); - it("should search with function", () => { - const results = store.search(value => value.includes("o"), "name"); - assert.strictEqual(results.length, 2); // John and Bob + it("should search with function", async () => { + const results = await store.search((value) => value.includes("o"), "name"); + assert.strictEqual(results.length, 2); }); - it("should return empty array for null/undefined value", () => { - const results = store.search(null); - assert.strictEqual(results.length, 0); + it("should throw error for null/undefined value", async () => { + await assert.rejects(() => store.search(null), /search: value cannot be null or undefined/); }); - it("should return frozen results in immutable mode with raw=false", () => { + it("should throw error for undefined value", async () => { + await assert.rejects( + () => store.search(undefined), + /search: value cannot be null or undefined/, + ); + }); + + it("should return frozen results in immutable mode with raw=false", async () => { const immutableStore = new Haro({ index: ["name", "tags"], - immutable: true + immutable: true, }); - immutableStore.set("user1", {id: "user1", name: "Alice", tags: ["admin"]}); - immutableStore.set("user2", {id: "user2", name: "Bob", tags: ["user"]}); + immutableStore.set("user1", { id: "user1", name: "Alice", tags: ["admin"] }); + immutableStore.set("user2", { id: "user2", name: "Bob", tags: ["user"] }); - // Call search with raw=false (default) and immutable=true to cover lines 695-696 - const results = immutableStore.search("Alice", "name", false); - assert.strictEqual(Object.isFrozen(results), true, "Search results should be frozen in immutable mode"); + const results = await immutableStore.search("Alice", "name"); + assert.strictEqual(Object.isFrozen(results), true); assert.strictEqual(results.length, 1); - assert.strictEqual(results[0][1].name, "Alice"); + assert.strictEqual(results[0].name, "Alice"); }); }); describe("filter()", () => { it("should filter records with predicate function", () => { - const results = store.filter(record => record.age > 25); + const results = store.filter((record) => record.age > 25); assert.strictEqual(results.length, 2); }); @@ -74,143 +79,262 @@ describe("Searching and Filtering", () => { }); it("should return frozen results in immutable mode", () => { - const immutableStore = new Haro({immutable: true}); - immutableStore.set("user1", {id: "user1", age: 30}); - const results = immutableStore.filter(record => record.age > 25); + const immutableStore = new Haro({ immutable: true }); + immutableStore.set("user1", { id: "user1", age: 30 }); + const results = immutableStore.filter((record) => record.age > 25); assert.strictEqual(Object.isFrozen(results), true); }); }); describe("where()", () => { - it("should filter with predicate object", () => { - const results = store.where({age: 30}); + it("should filter with predicate object", async () => { + const results = await store.where({ age: 30 }); assert.strictEqual(results.length, 1); assert.strictEqual(results[0].name, "John"); }); - it("should filter with array predicate using OR logic", () => { - const results = store.where({tags: ["admin", "user"]}, "||"); - assert.strictEqual(results.length, 3); // All users have either admin or user tag + it("should throw error when predicate is not an object", async () => { + await assert.rejects( + () => store.where("not an object"), + /where: predicate must be an object/, + ); }); - it("should filter with array predicate using AND logic", () => { - const results = store.where({tags: ["admin", "user"]}, "&&"); - assert.strictEqual(results.length, 1); // Only John has both tags + it("should throw error when predicate is null", async () => { + await assert.rejects(() => store.where(null), /where: predicate must be an object/); }); - it("should filter with regex predicate", () => { - const results = store.where({name: /^J/}); - assert.strictEqual(results.length, 0); + it("should throw error when op is not a string", async () => { + await assert.rejects(() => store.where({ age: 30 }, 123), /where: op must be a string/); + }); + + it("should filter with array predicate using OR logic", async () => { + const results = await store.where({ tags: ["admin", "user"] }, "||"); + assert.strictEqual(results.length, 3); + }); + + it("should filter with array predicate using AND logic", async () => { + const results = await store.where({ tags: ["admin", "user"] }, "&&"); + assert.strictEqual(results.length, 1); + }); + + it("should handle array predicate with array values using AND logic", async () => { + const testStore = new Haro({ index: ["tags"] }); + testStore.set("1", { id: "1", tags: ["admin", "user"] }); + testStore.set("2", { id: "2", tags: ["admin"] }); + const results = await testStore.where({ tags: ["admin", "user"] }, "&&"); + assert.strictEqual(results.length, 1); + assert.strictEqual(results[0].id, "1"); + }); + + it("should handle regex with array values in where()", async () => { + const testStore = new Haro({ index: ["email"] }); + testStore.set("1", { id: "1", email: ["admin@test.com", "user@test.com"] }); + testStore.set("2", { id: "2", email: ["admin@test.com"] }); + const results = await testStore.where({ email: /^admin/ }); + assert.strictEqual(results.length, 2); + }); + + it("should handle non-regexp predicate with array values", async () => { + const testStore = new Haro({ index: ["status"] }); + testStore.set("1", { id: "1", status: ["active", "pending"] }); + testStore.set("2", { id: "2", status: ["active"] }); + const results = await testStore.where({ status: "pending" }); + assert.strictEqual(results.length, 1); + assert.strictEqual(results[0].id, "1"); }); - it("should return empty array for non-indexed fields", () => { - const results = store.where({nonIndexedField: "value"}); + it("should handle regexp predicate with array values using some", async () => { + const testStore = new Haro({ index: ["tags"] }); + testStore.set("1", { id: "1", tags: ["admin", "user"] }); + testStore.set("2", { id: "2", tags: ["user"] }); + const results = await testStore.where({ tags: /^admin/ }); + assert.strictEqual(results.length, 1); + assert.strictEqual(results[0].id, "1"); + }); + + it("should handle string predicate with array values using some", async () => { + const testStore = new Haro({ index: ["name"] }); + testStore.set("1", { id: "1", name: ["John", "Jane"] }); + testStore.set("2", { id: "2", name: ["Jane"] }); + const results = await testStore.where({ name: "John" }); + assert.strictEqual(results.length, 1); + assert.strictEqual(results[0].id, "1"); + }); + + it("should handle RegExp inside array value", async () => { + const testStore = new Haro({ index: ["tags"] }); + const regex = /^admin/; + testStore.set("1", { id: "1", tags: [regex, "user"] }); + testStore.set("2", { id: "2", tags: ["user"] }); + const results = await testStore.where({ tags: "admin" }); + assert.strictEqual(results.length, 1); + }); + + it("should filter with regex predicate", async () => { + const results = await store.where({ name: /^J/ }); + assert.strictEqual(results.length, 2); + }); + + it("should return empty array for non-indexed fields", async () => { + const results = await store.where({ nonIndexedField: "value" }); assert.strictEqual(results.length, 0); }); + it("should return frozen results in immutable mode", async () => { + const immutableStore = new Haro({ + index: ["name"], + immutable: true, + }); + + immutableStore.set("user1", { id: "user1", name: "Alice" }); + immutableStore.set("user2", { id: "user2", name: "Bob" }); + + const results = await immutableStore.where({ name: "Alice" }); + assert.strictEqual(Object.isFrozen(results), true); + assert.strictEqual(results.length, 1); + }); + describe("indexed query optimization", () => { - it("should use indexed query optimization for multiple indexed fields", () => { + it("should use indexed query optimization for multiple indexed fields", async () => { const optimizedStore = new Haro({ - index: ["category", "status", "priority"] + index: ["category", "status", "priority"], }); - // Add data - optimizedStore.set("1", {category: "bug", status: "open", priority: "high"}); - optimizedStore.set("2", {category: "bug", status: "closed", priority: "low"}); - optimizedStore.set("3", {category: "feature", status: "open", priority: "high"}); - optimizedStore.set("4", {category: "bug", status: "open", priority: "medium"}); + optimizedStore.set("1", { category: "bug", status: "open", priority: "high" }); + optimizedStore.set("2", { category: "bug", status: "closed", priority: "low" }); + optimizedStore.set("3", { category: "feature", status: "open", priority: "high" }); + optimizedStore.set("4", { category: "bug", status: "open", priority: "medium" }); - // Query with multiple indexed fields to trigger indexed optimization - const results = optimizedStore.where({ - category: "bug", - status: "open" - }, "&&"); + const results = await optimizedStore.where( + { + category: "bug", + status: "open", + }, + "&&", + ); assert.equal(results.length, 2, "Should find records matching both criteria"); - assert.ok(results.every(r => r.category === "bug" && r.status === "open")); + assert.ok(results.every((r) => r.category === "bug" && r.status === "open")); }); - it("should handle array predicates in indexed query", () => { + it("should handle array predicates in indexed query", async () => { const arrayStore = new Haro({ - index: ["category", "tags"] + index: ["category", "tags"], }); - // Add data - arrayStore.set("1", {id: "1", category: "tech", tags: ["javascript", "nodejs"]}); - arrayStore.set("2", {id: "2", category: "tech", tags: ["python", "django"]}); - arrayStore.set("3", {id: "3", category: "business", tags: ["javascript", "react"]}); + arrayStore.set("1", { id: "1", category: "tech", tags: ["javascript", "nodejs"] }); + arrayStore.set("2", { id: "2", category: "tech", tags: ["python", "django"] }); + arrayStore.set("3", { id: "3", category: "business", tags: ["javascript", "react"] }); - // Query with array predicate on indexed field - const results = arrayStore.where({ - category: ["tech"] - }, "&&"); + const results = await arrayStore.where( + { + category: ["tech"], + }, + "&&", + ); assert.equal(results.length, 2, "Should find records matching array predicate"); - assert.ok(results.every(r => r.category === "tech")); + assert.ok(results.every((r) => r.category === "tech")); }); }); describe("fallback to full scan", () => { - it("should fallback to full scan when no indexed fields are available", () => { + it("should fallback to full scan when no indexed fields are available", async () => { const fallbackStore = new Haro({ - index: ["name"] // Only index 'name' field + index: ["name"], }); - // Add data - fallbackStore.set("1", {id: "1", name: "Alice", age: 30, category: "admin"}); - fallbackStore.set("2", {id: "2", name: "Bob", age: 25, category: "user"}); - fallbackStore.set("3", {id: "3", name: "Charlie", age: 35, category: "admin"}); + fallbackStore.set("1", { id: "1", name: "Alice", age: 30, category: "admin" }); + fallbackStore.set("2", { id: "2", name: "Bob", age: 25, category: "user" }); + fallbackStore.set("3", { id: "3", name: "Charlie", age: 35, category: "admin" }); - // Query for non-existent value - const results = fallbackStore.where({ - name: "nonexistent" - }, "&&"); + const results = await fallbackStore.where( + { + name: "nonexistent", + }, + "&&", + ); assert.equal(results.length, 0, "Should return empty array when no matches"); }); - it("should trigger true fallback to full scan", () => { + it("should trigger true fallback to full scan", async () => { const scanStore = new Haro({ - index: ["age"] + index: ["name"], }); - scanStore.set("1", {id: "1", name: "Alice", age: 30, category: "admin"}); - scanStore.set("2", {id: "2", name: "Bob", age: 25, category: "user"}); - - // Remove the age index to force fallback - scanStore.indexes.delete("age"); + scanStore.set("1", { id: "1", name: "Alice", age: 30, category: "admin" }); + scanStore.set("2", { id: "2", name: "Bob", age: 25, category: "user" }); - // Test that the method works - const results = scanStore.where({age: 30}, "&&"); + const results = await scanStore.where({ age: 30 }, "&&"); assert.equal(Array.isArray(results), true, "Should return an array"); }); - it("should return empty array when no matches in fallback scan", () => { + it("should return empty array when no matches in fallback scan", async () => { const emptyStore = new Haro({ - index: ["name"] + index: ["name"], }); - emptyStore.set("1", {id: "1", name: "Alice", age: 30}); - emptyStore.set("2", {id: "2", name: "Bob", age: 25}); + emptyStore.set("1", { id: "1", name: "Alice", age: 30 }); + emptyStore.set("2", { id: "2", name: "Bob", age: 25 }); - // Query that won't match anything - const results = emptyStore.where({ - age: 40, - category: "nonexistent" - }, "&&"); + const results = await emptyStore.where( + { + age: 40, + category: "nonexistent", + }, + "&&", + ); assert.equal(results.length, 0, "Should return empty array when no matches"); }); }); + + it("should warn on full table scan when querying non-indexed fields", async () => { + const scanStore = new Haro({ + index: ["name"], + warnOnFullScan: true, + }); + + scanStore.set("1", { id: "1", name: "Alice", age: 30, category: "admin" }); + scanStore.set("2", { id: "2", name: "Bob", age: 25, category: "user" }); + scanStore.set("3", { id: "3", name: "Charlie", age: 35, category: "admin" }); + + const results = await scanStore.where({ age: 30, category: "admin" }, "&&"); + + assert.strictEqual(results.length, 1); + assert.strictEqual(results[0].id, "1"); + }); }); describe("sortBy()", () => { + it("should sort by indexed field with numeric values", () => { + const numericStore = new Haro({ index: ["age"] }); + numericStore.set("user1", { id: "user1", age: 30 }); + numericStore.set("user2", { id: "user2", age: 25 }); + numericStore.set("user3", { id: "user3", age: 35 }); + const results = numericStore.sortBy("age"); + assert.strictEqual(results[0].age, 25); + assert.strictEqual(results[1].age, 30); + assert.strictEqual(results[2].age, 35); + }); + + it("should sort by indexed field with mixed types", () => { + const mixedStore = new Haro({ index: ["value"] }); + mixedStore.set("1", { id: "1", value: 10 }); + mixedStore.set("2", { id: "2", value: "5" }); + mixedStore.set("3", { id: "3", value: 3 }); + const results = mixedStore.sortBy("value"); + assert.strictEqual(results.length, 3); + }); + it("should sort by indexed field", () => { const results = store.sortBy("name"); - assert.strictEqual(results[0][1].name, "Bob"); - assert.strictEqual(results[1][1].name, "Jane"); - assert.strictEqual(results[2][1].name, "John"); + assert.strictEqual(results[0].name, "Bob"); + assert.strictEqual(results[1].name, "Jane"); + assert.strictEqual(results[2].name, "John"); }); it("should throw error for empty field", () => { @@ -221,119 +345,29 @@ describe("Searching and Filtering", () => { it("should create index if not exists", () => { const results = store.sortBy("name"); - assert.strictEqual(results[0][1].name, "Bob"); - assert.strictEqual(results[1][1].name, "Jane"); - assert.strictEqual(results[2][1].name, "John"); + assert.strictEqual(results[0].name, "Bob"); + assert.strictEqual(results[1].name, "Jane"); + assert.strictEqual(results[2].name, "John"); }); describe("with reindexing and immutable mode", () => { it("should reindex field if not exists and return frozen results", () => { const immutableStore = new Haro({ - immutable: true + immutable: true, }); - immutableStore.set("1", {id: "1", name: "Charlie", age: 35}); - immutableStore.set("2", {id: "2", name: "Alice", age: 30}); - immutableStore.set("3", {id: "3", name: "Bob", age: 25}); + immutableStore.set("1", { id: "1", name: "Charlie", age: 35 }); + immutableStore.set("2", { id: "2", name: "Alice", age: 30 }); + immutableStore.set("3", { id: "3", name: "Bob", age: 25 }); - // sortBy on non-indexed field will trigger reindex const results = immutableStore.sortBy("age"); - // Verify reindexing happened - assert.ok(immutableStore.indexes.has("age"), "Index should be created during sortBy"); - - // Verify results are frozen + assert.ok(immutableStore.index.includes("age"), "Index should be created during sortBy"); assert.ok(Object.isFrozen(results), "Results should be frozen in immutable mode"); - - // Verify sorting worked - results are [key, record] pairs - assert.equal(results[0][1].age, 25); - assert.equal(results[1][1].age, 30); - assert.equal(results[2][1].age, 35); + assert.equal(results[0].age, 25); + assert.equal(results[1].age, 30); + assert.equal(results[2].age, 35); }); }); }); - - describe("matchesPredicate() complex array logic", () => { - it("should handle array predicate with array value using AND logic", () => { - const testStore = new Haro(); - const record = {tags: ["javascript", "nodejs", "react"]}; - - // Test array predicate with array value using AND (every) - const result = testStore.matchesPredicate(record, {tags: ["javascript", "nodejs"]}, "&&"); - assert.equal(result, true, "Should match when all predicate values are in record array"); - - const result2 = testStore.matchesPredicate(record, {tags: ["javascript", "python"]}, "&&"); - assert.equal(result2, false, "Should not match when not all predicate values are in record array"); - }); - - it("should handle array predicate with array value using OR logic", () => { - const testStore = new Haro(); - const record = {tags: ["javascript", "nodejs"]}; - - // Test array predicate with array value using OR (some) - const result = testStore.matchesPredicate(record, {tags: ["python", "nodejs"]}, "||"); - assert.equal(result, true, "Should match when at least one predicate value is in record array"); - }); - - it("should handle array predicate with scalar value using AND logic", () => { - const testStore = new Haro(); - const record = {category: "tech"}; - - // Test array predicate with scalar value using AND (every) - const result = testStore.matchesPredicate(record, {category: ["tech"]}, "&&"); - assert.equal(result, true, "Should match when predicate array contains the scalar value"); - - const result2 = testStore.matchesPredicate(record, {category: ["business", "finance"]}, "&&"); - assert.equal(result2, false, "Should not match when predicate array doesn't contain scalar value"); - }); - - it("should handle array predicate with scalar value using OR logic", () => { - const testStore = new Haro(); - const record = {category: "tech"}; - - // Test array predicate with scalar value using OR (some) - const result = testStore.matchesPredicate(record, {category: ["business", "tech"]}, "||"); - assert.equal(result, true, "Should match when predicate array contains the scalar value"); - }); - - it("should handle regex predicate with array value using AND logic", () => { - const testStore = new Haro(); - const record = {tags: ["reactjs", "vuejs", "angularjs"]}; - - // Test regex predicate with array value using AND (every) - const result = testStore.matchesPredicate(record, {tags: /js$/}, "&&"); - assert.equal(result, true, "Should match when regex matches all array values"); - - const record2 = {tags: ["javascript", "nodejs", "reactjs"]}; - const result2 = testStore.matchesPredicate(record2, {tags: /js$/}, "&&"); - assert.equal(result2, false, "Should not match when regex doesn't match all array values"); - }); - - it("should handle regex predicate with array value using OR logic", () => { - const testStore = new Haro(); - const record = {tags: ["python", "nodejs", "java"]}; - - // Test regex predicate with array value using OR (some) - const result = testStore.matchesPredicate(record, {tags: /^node/}, "||"); - assert.equal(result, true, "Should match when regex matches at least one array value"); - }); - - it("should handle regex predicate with scalar value", () => { - const testStore = new Haro(); - const record = {name: "javascript"}; - - // Test regex predicate with scalar value - const result = testStore.matchesPredicate(record, {name: /script$/}, "&&"); - assert.equal(result, true, "Should match when regex matches scalar value"); - }); - - it("should handle array value with scalar predicate", () => { - const testStore = new Haro(); - const record = {tags: ["javascript"]}; - - // Test the specific edge case for array values with non-array predicate - const result = testStore.matchesPredicate(record, {tags: "javascript"}, "&&"); - assert.equal(result, true, "Should handle array value with scalar predicate"); - }); - }); }); diff --git a/tests/unit/utilities.test.js b/tests/unit/utilities.test.js index 562ec378..b5ded0c5 100644 --- a/tests/unit/utilities.test.js +++ b/tests/unit/utilities.test.js @@ -1,6 +1,6 @@ import assert from "node:assert"; -import {describe, it, beforeEach} from "mocha"; -import {Haro} from "../../src/haro.js"; +import { describe, it, beforeEach } from "node:test"; +import { Haro } from "../../src/haro.js"; describe("Utility Methods", () => { let store; @@ -9,46 +9,10 @@ describe("Utility Methods", () => { store = new Haro(); }); - describe("clone()", () => { - it("should create deep clone of object", () => { - const original = {name: "John", tags: ["admin", "user"]}; - const cloned = store.clone(original); - - cloned.tags.push("new"); - assert.strictEqual(original.tags.length, 2); - assert.strictEqual(cloned.tags.length, 3); - }); - - it("should clone primitives", () => { - assert.strictEqual(store.clone("string"), "string"); - assert.strictEqual(store.clone(123), 123); - assert.strictEqual(store.clone(true), true); - }); - }); - - describe("each()", () => { - it("should iterate over array with callback", () => { - const items = ["a", "b", "c"]; - const results = []; - - store.each(items, (item, index) => { - results.push(`${index}:${item}`); - }); - - assert.deepStrictEqual(results, ["0:a", "1:b", "2:c"]); - }); - - it("should handle empty array", () => { - const results = []; - store.each([], () => results.push("called")); - assert.strictEqual(results.length, 0); - }); - }); - describe("forEach()", () => { beforeEach(() => { - store.set("user1", {id: "user1", name: "John"}); - store.set("user2", {id: "user2", name: "Jane"}); + store.set("user1", { id: "user1", name: "John" }); + store.set("user2", { id: "user2", name: "Jane" }); }); it("should iterate over all records", () => { @@ -65,15 +29,15 @@ describe("Utility Methods", () => { describe("map()", () => { beforeEach(() => { - store.set("user1", {id: "user1", name: "John", age: 30}); - store.set("user2", {id: "user2", name: "Jane", age: 25}); + store.set("user1", { id: "user1", name: "John", age: 30 }); + store.set("user2", { id: "user2", name: "Jane", age: 25 }); }); it("should transform all records", () => { - const results = store.map(record => record.name); + const results = store.map((record) => record.name); assert.strictEqual(results.length, 2); - assert.strictEqual(results[0][1], "John"); - assert.strictEqual(results[1][1], "Jane"); + assert.strictEqual(results[0], "John"); + assert.strictEqual(results[1], "Jane"); }); it("should throw error for non-function mapper", () => { @@ -83,105 +47,10 @@ describe("Utility Methods", () => { }); }); - describe("reduce()", () => { - beforeEach(() => { - store.set("user1", {id: "user1", age: 30}); - store.set("user2", {id: "user2", age: 25}); - }); - - it("should reduce all records to single value", () => { - const totalAge = store.reduce((sum, record) => sum + record.age, 0); - assert.strictEqual(totalAge, 55); - }); - - it("should use default accumulator", () => { - const names = store.reduce((acc, record) => { - acc.push(record.id); - - return acc; - }); - assert.deepStrictEqual(names, ["user1", "user2"]); - }); - }); - - describe("merge()", () => { - it("should merge objects", () => { - const a = {x: 1, y: 2}; - const b = {y: 3, z: 4}; - const result = store.merge(a, b); - - assert.deepStrictEqual(result, {x: 1, y: 3, z: 4}); - }); - - it("should concatenate arrays", () => { - const a = [1, 2]; - const b = [3, 4]; - const result = store.merge(a, b); - - assert.deepStrictEqual(result, [1, 2, 3, 4]); - }); - - it("should override arrays when override is true", () => { - const a = [1, 2]; - const b = [3, 4]; - const result = store.merge(a, b, true); - - assert.deepStrictEqual(result, [3, 4]); - }); - - it("should replace primitives", () => { - const result = store.merge("old", "new"); - assert.strictEqual(result, "new"); - }); - }); - - describe("uuid()", () => { - it("should generate valid UUID", () => { - const id = store.uuid(); - const uuidRegex = /^[0-9a-f]{8}-[0-9a-f]{4}-4[0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}$/i; - assert.strictEqual(uuidRegex.test(id), true); - }); - - it("should generate unique UUIDs", () => { - const id1 = store.uuid(); - const id2 = store.uuid(); - assert.notStrictEqual(id1, id2); - }); - }); - - describe("freeze()", () => { - it("should freeze multiple arguments", () => { - const obj1 = {a: 1}; - const obj2 = {b: 2}; - const result = store.freeze(obj1, obj2); - - assert.strictEqual(Object.isFrozen(result), true); - assert.strictEqual(Object.isFrozen(result[0]), true); - assert.strictEqual(Object.isFrozen(result[1]), true); - }); - }); - - describe("list()", () => { - it("should convert record to [key, value] format", () => { - const record = {id: "user1", name: "John"}; - const result = store.list(record); - - assert.deepStrictEqual(result, ["user1", record]); - }); - - it("should freeze result in immutable mode", () => { - const immutableStore = new Haro({immutable: true}); - const record = {id: "user1", name: "John"}; - const result = immutableStore.list(record); - - assert.strictEqual(Object.isFrozen(result), true); - }); - }); - describe("limit()", () => { beforeEach(() => { for (let i = 0; i < 10; i++) { - store.set(`user${i}`, {id: `user${i}`, name: `User${i}`}); + store.set(`user${i}`, { id: `user${i}`, name: `User${i}` }); } }); @@ -193,7 +62,7 @@ describe("Utility Methods", () => { it("should support offset", () => { const results = store.limit(5, 3); assert.strictEqual(results.length, 3); - assert.strictEqual(results[0][0], "user5"); + assert.strictEqual(results[0].id, "user5"); }); it("should handle offset beyond data size", () => { @@ -204,9 +73,15 @@ describe("Utility Methods", () => { describe("sort()", () => { beforeEach(() => { - store.set("user1", {id: "user1", name: "Charlie", age: 30}); - store.set("user2", {id: "user2", name: "Alice", age: 25}); - store.set("user3", {id: "user3", name: "Bob", age: 35}); + store.set("user1", { id: "user1", name: "Charlie", age: 30 }); + store.set("user2", { id: "user2", name: "Alice", age: 25 }); + store.set("user3", { id: "user3", name: "Bob", age: 35 }); + }); + + it("should throw error when fn is not a function", () => { + assert.throws(() => { + store.sort("not a function"); + }, /sort: fn must be a function/); }); it("should sort records with comparator function", () => { @@ -224,8 +99,8 @@ describe("Utility Methods", () => { describe("toArray()", () => { beforeEach(() => { - store.set("user1", {id: "user1", name: "John"}); - store.set("user2", {id: "user2", name: "Jane"}); + store.set("user1", { id: "user1", name: "John" }); + store.set("user2", { id: "user2", name: "Jane" }); }); it("should convert store to array", () => { @@ -236,10 +111,24 @@ describe("Utility Methods", () => { }); it("should return frozen array in immutable mode", () => { - const immutableStore = new Haro({immutable: true}); - immutableStore.set("user1", {id: "user1", name: "John"}); + const immutableStore = new Haro({ immutable: true }); + immutableStore.set("user1", { id: "user1", name: "John" }); + const results = immutableStore.toArray(); + + assert.strictEqual(Object.isFrozen(results), true); + assert.strictEqual(Object.isFrozen(results[0]), true); + }); + + it("should freeze all nested objects in immutable mode", () => { + const immutableStore = new Haro({ immutable: true }); + immutableStore.set("user1", { + id: "user1", + name: "John", + address: { city: "NYC" }, + }); const results = immutableStore.toArray(); + // The result array and its elements are frozen, but nested objects are not deeply frozen assert.strictEqual(Object.isFrozen(results), true); assert.strictEqual(Object.isFrozen(results[0]), true); }); @@ -247,8 +136,8 @@ describe("Utility Methods", () => { describe("entries(), keys(), values()", () => { beforeEach(() => { - store.set("user1", {id: "user1", name: "John"}); - store.set("user2", {id: "user2", name: "Jane"}); + store.set("user1", { id: "user1", name: "John" }); + store.set("user2", { id: "user2", name: "Jane" }); }); it("should return entries iterator", () => { @@ -273,103 +162,94 @@ describe("Utility Methods", () => { }); }); - describe("sortKeys()", () => { - it("should sort strings using localeCompare", () => { - const result = store.sortKeys("apple", "banana"); - assert.strictEqual(result < 0, true, "apple should come before banana"); - - const result2 = store.sortKeys("zebra", "apple"); - assert.strictEqual(result2 > 0, true, "zebra should come after apple"); - - const result3 = store.sortKeys("same", "same"); - assert.strictEqual(result3, 0, "identical strings should return 0"); - }); - - it("should sort numbers using numeric comparison", () => { - const result = store.sortKeys(5, 10); - assert.strictEqual(result, -5, "5 should come before 10"); - - const result2 = store.sortKeys(20, 3); - assert.strictEqual(result2, 17, "20 should come after 3"); - - const result3 = store.sortKeys(7, 7); - assert.strictEqual(result3, 0, "identical numbers should return 0"); - }); - - it("should handle negative numbers correctly", () => { - const result = store.sortKeys(-5, 3); - assert.strictEqual(result, -8, "-5 should come before 3"); - - const result2 = store.sortKeys(-10, -2); - assert.strictEqual(result2, -8, "-10 should come before -2"); - }); - - it("should handle floating point numbers", () => { - const result = store.sortKeys(3.14, 2.71); - assert.strictEqual(result > 0, true, "3.14 should come after 2.71"); - assert.strictEqual(Math.abs(result - 0.43) < 0.01, true, "result should be approximately 0.43"); - - const result2 = store.sortKeys(1.5, 1.5); - assert.strictEqual(result2, 0, "identical floats should return 0"); - }); - - it("should convert mixed types to strings and sort", () => { - const result = store.sortKeys(10, "5"); - assert.strictEqual(result < 0, true, "number 10 as string should come before string '5'"); - - const result2 = store.sortKeys("abc", 123); - assert.strictEqual(result2 > 0, true, "string 'abc' should come after number 123 as string"); - }); - - it("should handle null and undefined values", () => { - const result = store.sortKeys(null, "test"); - assert.strictEqual(result < 0, true, "null should come before 'test'"); - - const result2 = store.sortKeys(undefined, "test"); - assert.strictEqual(result2 > 0, true, "undefined should come after 'test'"); - - const result3 = store.sortKeys(null, undefined); - assert.strictEqual(result3 < 0, true, "null should come before undefined"); - }); - - it("should handle boolean values", () => { - const result = store.sortKeys(true, false); - assert.strictEqual(result > 0, true, "true should come after false"); - - const result2 = store.sortKeys(false, "test"); - assert.strictEqual(result2 < 0, true, "false should come before 'test'"); - }); - - it("should handle objects by converting to string", () => { - const obj1 = {name: "test"}; - const obj2 = {value: 123}; - const result = store.sortKeys(obj1, obj2); - - // Objects get converted to "[object Object]" so they should be equal - assert.strictEqual(result, 0, "objects should be equal when converted to string"); - }); - - it("should work as Array.sort comparator", () => { - const mixed = ["zebra", "apple", "banana"]; - mixed.sort(store.sortKeys.bind(store)); - assert.deepStrictEqual(mixed, ["apple", "banana", "zebra"]); - - const numbers = [10, 3, 7, 1]; - numbers.sort(store.sortKeys.bind(store)); - assert.deepStrictEqual(numbers, [1, 3, 7, 10]); - - const mixedTypes = [5, "3", 1, "10"]; - mixedTypes.sort(store.sortKeys.bind(store)); - // When converted to strings: "1", "10", "3", "5" - assert.deepStrictEqual(mixedTypes, [1, "10", "3", 5]); + describe("merge() edge cases via set()", () => { + it("should handle nested arrays", () => { + const versionedStore = new Haro({ versioning: true }); + versionedStore.set("key1", { matrix: [[1, 2]] }); + versionedStore.set("key1", { matrix: [[3, 4]] }); + const record = versionedStore.get("key1"); + assert.deepStrictEqual(record.matrix, [ + [1, 2], + [3, 4], + ]); + }); + + it("should use JSON fallback when structuredClone is unavailable", () => { + const originalStructuredClone = globalThis.structuredClone; + globalThis.structuredClone = undefined; + + try { + const store = new Haro({ versioning: true }); + const original = { a: 1, b: { c: 2 } }; + store.set("key1", original); + store.set("key1", { a: 3 }); + const versions = store.versions.get("key1"); + const version = Array.from(versions)[0]; + assert.strictEqual(version.a, 1); + assert.strictEqual(version.b.c, 2); + } finally { + globalThis.structuredClone = originalStructuredClone; + } }); - it("should handle special string characters", () => { - const result = store.sortKeys("café", "cafe"); - assert.strictEqual(typeof result, "number", "should return a number"); - - const result2 = store.sortKeys("ñ", "n"); - assert.strictEqual(typeof result2, "number", "should handle accented characters"); + it("should handle deep nested objects", () => { + const versionedStore = new Haro({ versioning: true }); + versionedStore.set("key1", { a: { b: { c: 1 } } }); + versionedStore.set("key1", { a: { b: { d: 2 } } }); + const record = versionedStore.get("key1"); + assert.deepStrictEqual(record.a.b, { c: 1, d: 2 }); + }); + + it("should handle null values", () => { + const versionedStore = new Haro({ versioning: true }); + versionedStore.set("key1", { a: null }); + versionedStore.set("key1", { b: "value" }); + const record = versionedStore.get("key1"); + assert.strictEqual(record.a, null); + assert.strictEqual(record.b, "value"); + }); + + it("should handle empty source object", () => { + const versionedStore = new Haro({ versioning: true }); + versionedStore.set("key1", { a: 1 }); + versionedStore.set("key1", {}); + const record = versionedStore.get("key1"); + assert.deepStrictEqual(record, { a: 1, id: "key1" }); + }); + + it("should handle array to object type mismatch", () => { + const versionedStore = new Haro({ versioning: true }); + versionedStore.set("key1", { tags: ["a"] }); + versionedStore.set("key1", { tags: "b" }); + const record = versionedStore.get("key1"); + assert.strictEqual(record.tags, "b"); + }); + + it("should preserve version history with merges", () => { + const versionedStore = new Haro({ versioning: true }); + versionedStore.set("key1", { a: 1, b: 2 }); + versionedStore.set("key1", { b: 3, c: 4 }); + const versions = versionedStore.versions.get("key1"); + assert.strictEqual(versions.size, 1); + const version = Array.from(versions)[0]; + assert.deepStrictEqual(version.a, 1); + assert.deepStrictEqual(version.b, 2); + }); + + it("should skip prototype pollution keys during merge", () => { + const versionedStore = new Haro({ versioning: true }); + versionedStore.set("key1", { a: 1 }); + versionedStore.set("key1", { + __proto__: { polluted: true }, + constructor: { polluted: true }, + prototype: { polluted: true }, + b: 2, + }); + const record = versionedStore.get("key1"); + assert.strictEqual(record.a, 1); + assert.strictEqual(record.b, 2); + assert.strictEqual(Object.prototype.polluted, undefined); + assert.strictEqual(record.hasOwnProperty("__proto__"), false); }); }); }); diff --git a/tests/unit/versioning.test.js b/tests/unit/versioning.test.js index a81e5c90..ac5a6711 100644 --- a/tests/unit/versioning.test.js +++ b/tests/unit/versioning.test.js @@ -1,17 +1,17 @@ import assert from "node:assert"; -import {describe, it, beforeEach} from "mocha"; -import {Haro} from "../../src/haro.js"; +import { describe, it, beforeEach } from "node:test"; +import { Haro } from "../../src/haro.js"; describe("Versioning", () => { let versionedStore; beforeEach(() => { - versionedStore = new Haro({versioning: true}); + versionedStore = new Haro({ versioning: true }); }); it("should create version when updating record", () => { - versionedStore.set("user1", {id: "user1", name: "John", age: 30}); - versionedStore.set("user1", {id: "user1", name: "John", age: 31}); + versionedStore.set("user1", { id: "user1", name: "John", age: 30 }); + versionedStore.set("user1", { id: "user1", name: "John", age: 31 }); const versions = versionedStore.versions.get("user1"); assert.strictEqual(versions.size, 1); @@ -22,15 +22,15 @@ describe("Versioning", () => { }); it("should not create version for new record", () => { - versionedStore.set("user1", {id: "user1", name: "John"}); + versionedStore.set("user1", { id: "user1", name: "John" }); const versions = versionedStore.versions.get("user1"); assert.strictEqual(versions.size, 0); }); it("should delete versions when record is deleted", () => { - versionedStore.set("user1", {id: "user1", name: "John"}); - versionedStore.set("user1", {id: "user1", name: "John Updated"}); + versionedStore.set("user1", { id: "user1", name: "John" }); + versionedStore.set("user1", { id: "user1", name: "John Updated" }); versionedStore.delete("user1"); assert.strictEqual(versionedStore.versions.has("user1"), false); diff --git a/types/haro.d.ts b/types/haro.d.ts index b04d0ba6..de925cb2 100644 --- a/types/haro.d.ts +++ b/types/haro.d.ts @@ -2,12 +2,15 @@ * Configuration object for creating a Haro instance */ export interface HaroConfig { + cache?: boolean; + cacheSize?: number; delimiter?: string; id?: string; immutable?: boolean; index?: string[]; key?: string; versioning?: boolean; + warnOnFullScan?: boolean; } /** @@ -25,6 +28,8 @@ export class Haro { key: string; versions: Map>; versioning: boolean; + warnOnFullScan: boolean; + initialized: boolean; readonly registry: string[]; readonly size: number; @@ -35,41 +40,26 @@ export class Haro { constructor(config?: HaroConfig); /** - * Performs batch operations on multiple records for efficient bulk processing - * @param args - Array of records to process - * @param type - Type of operation: 'set' for upsert, 'del' for delete - * @returns Array of results from the batch operation + * Inserts or updates multiple records + * @param records - Array of records to insert or update + * @returns Array of stored records */ - batch(args: any[], type?: string): any[]; + setMany(records: any[]): any[]; /** - * Lifecycle hook executed before batch operations for custom preprocessing - * @param arg - Arguments passed to batch operation - * @param type - Type of batch operation ('set' or 'del') - * @returns The arguments array (possibly modified) to be processed + * Deletes multiple records + * @param keys - Array of keys to delete + * @returns Array of void */ - beforeBatch(arg: any, type?: string): any; + deleteMany(keys: (string | number)[]): void[]; /** - * Lifecycle hook executed before clear operation for custom preprocessing + * Returns true if currently in a batch operation + * @returns Batch operation status */ - beforeClear(): void; + isBatching: boolean; - /** - * Lifecycle hook executed before delete operation for custom preprocessing - * @param key - Key of record to delete - * @param batch - Whether this is part of a batch operation - */ - beforeDelete(key?: string, batch?: boolean): void; - /** - * Lifecycle hook executed before set operation for custom preprocessing - * @param key - Key of record to set - * @param data - Record data being set - * @param batch - Whether this is part of a batch operation - * @param override - Whether to override existing data - */ - beforeSet(key?: string, data?: any, batch?: boolean, override?: boolean): void; /** * Removes all records, indexes, and versions from the store @@ -87,10 +77,9 @@ export class Haro { /** * Deletes a record from the store and removes it from all indexes * @param key - Key of record to delete - * @param batch - Whether this is part of a batch operation * @throws Throws error if record with the specified key is not found */ - delete(key?: string, batch?: boolean): void; + delete(key?: string): void; /** * Internal method to remove entries from indexes for a deleted record @@ -100,6 +89,12 @@ export class Haro { */ deleteIndex(key: string, data: any): Haro; + /** + * Initializes the store by building indexes for existing data + * @returns This instance for method chaining + */ + initialize(): Haro; + /** * Exports complete store data or indexes for persistence or debugging * @param type - Type of data to export: 'records' or 'indexes' @@ -124,18 +119,16 @@ export class Haro { /** * Finds records matching the specified criteria using indexes for optimal performance * @param where - Object with field-value pairs to match against - * @param raw - Whether to return raw data without processing * @returns Array of matching records (frozen if immutable mode) */ - find(where?: Record, raw?: boolean): any[]; + find(where?: Record): any[]; /** * Filters records using a predicate function, similar to Array.filter * @param fn - Predicate function to test each record (record, key, store) - * @param raw - Whether to return raw data without processing * @returns Array of records that pass the predicate test */ - filter(fn: (value: any) => boolean, raw?: boolean): any[]; + filter(fn: (value: any, key: string, store: Haro) => boolean): any[]; /** * Executes a function for each record in the store, similar to Array.forEach @@ -186,10 +179,9 @@ export class Haro { * Returns a limited subset of records with offset support for pagination * @param offset - Number of records to skip from the beginning * @param max - Maximum number of records to return - * @param raw - Whether to return raw data without processing * @returns Array of records within the specified range */ - limit(offset?: number, max?: number, raw?: boolean): any[]; + limit(offset?: number, max?: number): any[]; /** * Converts a record into a [key, value] pair array format @@ -201,10 +193,9 @@ export class Haro { /** * Transforms all records using a mapping function, similar to Array.map * @param fn - Function to transform each record (record, key) - * @param raw - Whether to return raw data without processing * @returns Array of transformed results */ - map(fn: (value: any, key: string) => any, raw?: boolean): any[]; + map(fn: (value: any, key: string) => any): any[]; /** * Internal helper method for predicate matching with support for arrays and regex @@ -285,19 +276,18 @@ export class Haro { * @param value - Value to search for (string, function, or RegExp) * @param index - Index(es) to search in, or all if not specified * @param raw - Whether to return raw data without processing - * @returns Array of matching records + * @returns Promise resolving to array of matching records */ - search(value: any, index?: string | string[], raw?: boolean): any[]; + search(value: any, index?: string | string[], raw?: boolean): Promise; /** * Sets or updates a record in the store with automatic indexing * @param key - Key for the record, or null to use record's key field * @param data - Record data to set - * @param batch - Whether this is part of a batch operation * @param override - Whether to override existing data instead of merging * @returns The stored record (frozen if immutable mode) */ - set(key?: string | null, data?: any, batch?: boolean, override?: boolean): any; + set(key?: string | null, data?: any, override?: boolean): any; /** * Internal method to add entries to indexes for a record @@ -306,7 +296,7 @@ export class Haro { * @param indice - Specific index to update, or null for all * @returns This instance for method chaining */ - setIndex(key: string, data: any, indice?: string | null): Haro; + setIndex(key: string, data: any, indice: string | null): Haro; /** * Sorts all records using a comparator function @@ -314,7 +304,7 @@ export class Haro { * @param frozen - Whether to return frozen records * @returns Sorted array of records */ - sort(fn: (a: any, b: any) => number, frozen?: boolean): any[]; + sort(fn: (a: any, b: any) => number, frozen?: boolean): any; /** * Comparator function for sorting keys with type-aware comparison logic @@ -327,10 +317,9 @@ export class Haro { /** * Sorts records by a specific indexed field in ascending order * @param index - Index field name to sort by - * @param raw - Whether to return raw data without processing * @returns Array of records sorted by the specified field */ - sortBy(index?: string, raw?: boolean): any[]; + sortBy(index?: string): any[]; /** * Converts all store data to a plain array of records @@ -354,9 +343,33 @@ export class Haro { * Advanced filtering with predicate logic supporting AND/OR operations on arrays * @param predicate - Object with field-value pairs for filtering * @param op - Operator for array matching ('||' for OR, '&&' for AND) - * @returns Array of records matching the predicate criteria + * @returns Promise resolving to array of records matching the predicate criteria + */ + where(predicate?: Record, op?: string): Promise; + + /** + * Clears the cache + * @returns This instance for method chaining + */ + clearCache(): this; + + /** + * Returns the current cache size + * @returns Number of entries in cache + */ + getCacheSize(): number; + + /** + * Returns cache statistics + * @returns Statistics object with hits, misses, sets, deletes, evictions */ - where(predicate?: Record, op?: string): any[]; + getCacheStats(): { + hits: number; + misses: number; + sets: number; + deletes: number; + evictions: number; + } | null; } /**