diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..3d55f18 --- /dev/null +++ b/.gitignore @@ -0,0 +1,4 @@ +node_modules +coverage +dist +yarn* diff --git a/README.md b/README.md new file mode 100644 index 0000000..d4788e1 --- /dev/null +++ b/README.md @@ -0,0 +1,96 @@ +[![npm version][npm-image]][npm-url] +[![downloads][downloads-image]][npm-url] +[![build status][build-image]][build-url] +[![coverage status][coverage-image]][coverage-url] +[![Language grade: JavaScript][lgtm-image]][lgtm-url] + + +# edit-json + +Edit a _textual_ JSON (not a JavaScript object) for a minimal diff, either programatically or by applying a [_JSON Patch_ (RFC6902)](https://www.rfc-editor.org/rfc/rfc6902). + +When serializing the result down to a string, it'll resemble the source JSON as much as possible with regards to property order, whitespace (indentation) and _flow types_ (arrays and objects on one line). + +Editing JSON is easy, just `JSON.parse()` and play around, then `JSON.stringify()`. To apply a _JSON Patch_, there are [several](https://www.npmjs.com/package/fast-json-patch) [packages](https://www.npmjs.com/package/rfc6902) [out](https://www.npmjs.com/package/json-bigint-patch) [there](https://www.npmjs.com/package/jsonpatch). + +This package focuses not on working with JSON as a JavaScript object, but as its textual representation. The package parses the JSON string (as e.g. from a file) as tokens, builds up a logical representation of it, and then applies transformations to that representation. Whitespace (tabs, spaces) as well as multi-line or single-line arrays/objects are remembered. + +To do the same with YAML, check out [yaml-diff-patch](https://www.npmjs.com/package/yaml-diff-patch). + + +# Example + +Given: + +```json +{ + "x": "non-alphanumerically ordered properties, obviously", + "foo": [ "same", "line", "array" ], + "bar": { + "some": "object" + } +} +``` + +Applying the JSON Patch: + +```json +[ { + "op": "move", + "from": "/foo", + "path": "/bar/herenow" +} ] +``` + +Produces: + +```json +{ + "x": "non-alphanumerically ordered properties, obviously", + "bar": { + "herenow": [ "same", "line", "array" ], + "some": "object" + } +} +``` + +Properties aren't re-ordered ("x" is still first), but by default, it will try to _insert_ properties orderly, such as when creating "herenow" in "bar". It'll be added before "some", "h" < "s". This is done with a best effort, since it's not always possible (the object might have unordered properties). + +Note also that the array is not split into multiple lines, which would happen with default `JSON.stringify` (unless the whole document is one line of course). The source format is kept if possible. + + +# Install + +`npm i edit-json` or `yarn add edit-json` + +This is a [pure ESM][pure-esm] package, and requires Node.js >=14.13.1 + + +# Simple usage + +### Exports + +The package exports `parseJson` (to be documented) and `jsonPatch`. + +### Definition + +`jsonPatch( json: string, operations: Operations[], options: Options ): string` + +Applies a list of _JSON Patch_ operations to the source `json` and returns the new json string. + +The options are: + + - `whitespace` ('auto' | 'tabs' | number): Specifies whitespace strategy. Defaults to 'auto'. Force tabs using 'tabs' or spaces using number (e.g. 2 or 4). + - `ordered` (boolean): Try to insert new properties in order. + + +[npm-image]: https://img.shields.io/npm/v/edit-json.svg +[npm-url]: https://npmjs.org/package/edit-json +[downloads-image]: https://img.shields.io/npm/dm/edit-json.svg +[build-image]: https://img.shields.io/github/workflow/status/grantila/edit-json/Master.svg +[build-url]: https://github.com/grantila/edit-json/actions?query=workflow%3AMaster +[coverage-image]: https://coveralls.io/repos/github/grantila/edit-json/badge.svg?branch=master +[coverage-url]: https://coveralls.io/github/grantila/edit-json?branch=master +[lgtm-image]: https://img.shields.io/lgtm/grade/javascript/g/grantila/edit-json.svg?logo=lgtm&logoWidth=18 +[lgtm-url]: https://lgtm.com/projects/g/grantila/edit-json/context:javascript +[pure-esm]: https://gist.github.com/sindresorhus/a39789f98801d908bbc7ff3ecc99d99c diff --git a/babel.config.cjs b/babel.config.cjs new file mode 100644 index 0000000..72ced87 --- /dev/null +++ b/babel.config.cjs @@ -0,0 +1,14 @@ +module.exports = { + presets: [ + [ + '@babel/preset-env', + { + modules: false, + targets: { + node: 'current', + }, + }, + ], + '@babel/preset-typescript', + ], +} diff --git a/jest.config.js b/jest.config.js new file mode 100644 index 0000000..e5f3fc3 --- /dev/null +++ b/jest.config.js @@ -0,0 +1,13 @@ +export default { + resolver: 'ts-jest-resolver', + testEnvironment: 'node', + testMatch: [ + '/lib/**/*.test.ts', + ], + modulePathIgnorePatterns: [], + collectCoverageFrom: ['/lib/**/*.ts', 'index.ts'], + coveragePathIgnorePatterns: [ '/node_modules/' ], + coverageReporters: ['lcov', 'text', 'html'], + collectCoverage: true, + extensionsToTreatAsEsm: ['.ts'], +} diff --git a/lib/document/document.test.ts b/lib/document/document.test.ts new file mode 100644 index 0000000..1120ddc --- /dev/null +++ b/lib/document/document.test.ts @@ -0,0 +1,105 @@ +import { fileURLToPath } from 'node:url' +import { resolve as resolvePath, dirname } from 'node:path' +import { readFile } from 'node:fs/promises' + +import { parseJson } from './document.js' + + +const __dirname = dirname( fileURLToPath( import.meta.url ) ); +const rootDir = resolvePath( __dirname, '..', '..' ); + +describe( 'document', ( ) => +{ + describe( 'primitives', ( ) => + { + it( 'null', ( ) => + { + const parsed = parseJson( 'null' ); + + expect( parsed.toJSON( ) ).toBe( 'null' ); + } ); + + it( 'boolean', ( ) => + { + const parsed = parseJson( 'false' ); + + expect( parsed.toJSON( ) ).toBe( 'false' ); + } ); + + it( 'boolean with whitespace', ( ) => + { + const parsed = parseJson( ' true ' ); + + expect( parsed.toJSON( ) ).toBe( ' true' ); + } ); + + it( 'string', ( ) => + { + const parsed = parseJson( ' "foo bar\\nnext line"' ); + + expect( parsed.toJSON( ) ).toBe( ' "foo bar\\nnext line"' ); + } ); + + it( 'number pos', ( ) => + { + const parsed = parseJson( '3.14' ); + + expect( parsed.toJSON( ) ).toBe( '3.14' ); + } ); + + it( 'number neg', ( ) => + { + const parsed = parseJson( '-3.14' ); + + expect( parsed.toJSON( ) ).toBe( '-3.14' ); + } ); + + it( 'number sci pos', ( ) => + { + const parsed = parseJson( '123e5' ); + + expect( parsed.toJSON( ) ).toBe( '123e5' ); + } ); + + it( 'number sci neg', ( ) => + { + const parsed = parseJson( '-123e5' ); + + expect( parsed.toJSON( ) ).toBe( '-123e5' ); + } ); + } ); + + describe( 'objects', ( ) => + { + it( 'empty', ( ) => + { + const parsed = parseJson( ' {}' ); + + expect( parsed.toJSON( ) ).toBe( ' { }' ); + } ); + + it( 'flow', ( ) => + { + const parsed = parseJson( '{ "foo": "bar" }' ); + + expect( parsed.toJSON( ) ).toBe( '{ "foo": "bar" }' ); + } ); + + it( 'non-flow', ( ) => + { + const parsed = parseJson( '{\n "foo": "bar"}' ); + + expect( parsed.toJSON( ) ).toBe( '{\n "foo": "bar"\n}' ); + } ); + + it( 'self packge.json', async ( ) => + { + const pkgJsonFile = resolvePath( rootDir, 'package.json' ); + const pkgJson = await readFile( pkgJsonFile, 'utf-8' ); + + const parsed = parseJson( pkgJson ); + + expect( parsed.toJSON( ).trimEnd( ) ).toBe( pkgJson.trimEnd( ) ); + } ); + } ); +} ); diff --git a/lib/document/document.ts b/lib/document/document.ts new file mode 100644 index 0000000..2c0c168 --- /dev/null +++ b/lib/document/document.ts @@ -0,0 +1,175 @@ +import type { LocationPath } from 'jsonpos' + +import { + JsonArray, + JsonNodeType, + JsonObject, + JsonPrimitiveBase, +} from './nodes.js' +import { Indentable } from './indentable.js' +import { parse } from './parse-to-nodes.js' +import { ensureNumericIndex } from '../utils.js' +import { type JsonDocumentOptions } from './types.js' +import { getDocumentOptions } from './utils.js' + + +export class JsonDocument extends Indentable +{ + public readonly options: JsonDocumentOptions; + + constructor( + private json: string, + public root: JsonNodeType, + private rootIndentation: Indentable, + options?: Partial< JsonDocumentOptions > + ) + { + super( ); + + this.options = getDocumentOptions( options ); + } + + public getAt( path: LocationPath ): JsonNodeType | undefined + { + if ( path.length === 0 ) + return this.root; + + return path.reduce( + ( prev: JsonNodeType | undefined, cur ) => + { + if ( !prev ) + return undefined; + + if ( prev instanceof JsonObject ) + { + const prop = `${cur}`; + + return prev.properties + .find( entry => entry.name === prop ) + ?.value; + } + else if ( prev instanceof JsonArray ) + { + const index = ensureNumericIndex( cur ); + + return prev.elements[ index ]; + } + + return undefined; + }, + this.root + ); + } + + toString( ): string + { + const chooseTabs = this.#useTabs( + this.root instanceof Indentable + ? this.root.tabs + : undefined + ); + + const rootIndent = + this.rootIndentation.indentString( chooseTabs ); + + if ( this.root instanceof JsonPrimitiveBase ) + return rootIndent + this.root.raw; + + const stringify = ( node: JsonNodeType, parentIndent: string ) => + { + if ( node instanceof JsonPrimitiveBase ) + return node.raw; + + const indent = node.flow ? '' : node.indentString( chooseTabs ); + + if ( node instanceof JsonArray ) + { + const ret = [ node.flow ? '[ ' : '[\n' ]; + + node.elements.forEach( ( element, i ) => + { + ret.push( indent + stringify( element, indent ) ); + + if ( i < node.elements.length - 1 ) + ret.push( node.flow ? ', ' : ',\n' ); + else + ret.push( node.flow ? ' ' : `\n${parentIndent}` ); + } ); + + ret.push( ']' ); + return ret.join( '' ); + } + else if ( node instanceof JsonObject ) + { + const ret = [ node.flow ? '{ ' : '{\n' ]; + + node.properties.forEach( ( prop, i ) => + { + ret.push( + indent + + JSON.stringify( prop.name ) + + ': ' + + stringify( prop.value, indent ) + ); + + if ( i < node.properties.length - 1 ) + ret.push( node.flow ? ', ' : ',\n' ); + else + ret.push( node.flow ? ' ' : `\n${parentIndent}` ); + } ); + + ret.push( '}' ); + return ret.join( '' ); + } + else + throw new Error( `Unexpected node type` ); + }; + + return rootIndent + stringify( this.root, rootIndent ); + } + + toJSON( ): string + { + return this.toString( ); + } + + // Detect whether to use tabs or spaces by looking at the input JSON + #useTabs( tabs?: boolean ) + { + if ( tabs === true || tabs === false ) + return tabs; + + const partition = this.json + .split( '\n' ) + .map( line => + line.startsWith( '\t' ) + ? true + : line.startsWith( ' ' ) + ? false + : undefined + ) + .filter( ( v ): v is NonNullable< typeof v > => v !== undefined ); + + if ( partition.length === 0 ) + return undefined; + + if ( partition.filter( val => val ).length * 2 > partition.length ) + return true; + return false; + } +} + +export function parseJson( json: string, options?: JsonDocumentOptions ) +{ + const parsed = parse( json ); + + return new JsonDocument( + json, + parsed.root, + parsed.initialIndentation, + { + whitespace: 'auto', + ...options, + } + ); +} diff --git a/lib/document/indentable.ts b/lib/document/indentable.ts new file mode 100644 index 0000000..ef178ce --- /dev/null +++ b/lib/document/indentable.ts @@ -0,0 +1,78 @@ +export class Indentable +{ + constructor( private _depth = -1, private _tabs?: boolean | undefined ) + { + } + + /** + * The indentation depth of this collection. + * + * Could be zero, and -1 if no depth detected (e.g. empty object/array or + * flow collection). + */ + get depth( ) + { + return this._depth; + } + + /** + * Whether tabs or spaces are used. + * + * True means tabs, false means spaces, and undefined means unknown. + */ + get tabs( ) + { + return this._tabs; + } + + get char( ) + { + return this.tabs ? '\t' : ' '; + } + + setIndent( depth: number, tabs: boolean ): void; + setIndent( from: Indentable ): void; + + setIndent( depth: number | Indentable, tabs?: boolean ) + { + if ( typeof depth === 'number' ) + { + this._depth = depth; + this._tabs = tabs!; + } + else + { + this._depth = depth.depth; + this._tabs = depth.tabs; + } + } + + /** + * Gets the indentation string given the indentable settings. + * + * If `tabs` is set to true or false, this will overwrite the settings in + * this indentable, and change tabs into spaces or vice versa. + */ + indentString( tabs?: boolean ) + { + if ( this.depth <= 0 ) + return ''; + + const char = tabs === true ? '\t' : tabs === false ? ' ' : this.char; + const depth = + ( tabs === undefined || !!tabs === this.tabs ) + ? this.depth + : tabs === true ? this.depth / 2 : this.depth * 2; + + return char.repeat( depth ); + } +} + +/** + * Takes a set of Indentables and figures out the most common one + */ +export function decideIndentations( indentations: Indentable[ ] ): Indentable +{ + // TODO: Implement + return indentations[ 0 ] ?? new Indentable( ); +} diff --git a/lib/document/nodes.ts b/lib/document/nodes.ts new file mode 100644 index 0000000..9d1ebca --- /dev/null +++ b/lib/document/nodes.ts @@ -0,0 +1,301 @@ +import { Indentable } from './indentable.js' + + +interface JsonValue +{ + toJS( ): unknown; +} + +export class JsonArray extends Indentable implements JsonValue +{ + /** Flow means a one-line array */ + public flow: boolean = false; + + #_elements: Array< JsonNodeType > = [ ]; + + get elements( ): ReadonlyArray< JsonNodeType > + { + return this.#_elements; + } + + set elements( elements: ReadonlyArray< JsonNodeType > ) + { + this.#_elements = [ ...elements ]; + } + + add( value: JsonNodeType ) + { + this.#_elements.push( value ); + } + + insert( value: JsonNodeType, beforeIndex: number ) + { + this.#_elements.splice( beforeIndex, 0, value ); + } + + get( index: number ) + { + return this.#_elements[ index ]; + } + + removeAt( index: number ) + { + if ( index < 0 || index >= this.#_elements.length ) + throw new Error( `Can't remove element at ${index}` ); + return this.#_elements.splice( index, 1 )[ 0 ]!; + } + + toJS( ): unknown + { + return this.#_elements.map( elem => elem.toJS( ) ); + } +} + +interface JsonObjectProperty +{ + name: string; + value: JsonNodeType; +} + +export class JsonObject extends Indentable implements JsonValue +{ + /** Flow means a one-line object */ + public flow: boolean = false; + + #_properties: Array< JsonObjectProperty > = [ ]; + + get properties( ): ReadonlyArray< JsonObjectProperty > + { + return this.#_properties; + } + + set properties( properties: ReadonlyArray< JsonObjectProperty > ) + { + const uniq = + [ + ...new Map( + properties.map( prop => [ prop.name, prop.value ] ) + ).entries( ) + ] + .map( ( entry ): JsonObjectProperty => ( { + name: entry[ 0 ], + value: entry[ 1 ], + } ) ); + + this.#_properties = uniq; + } + + add( name: string, value: JsonNodeType, ordered: boolean ) + { + const existing = this.#_properties.find( prop => prop.name === name ); + if ( existing ) + existing.value = value; + else + { + if ( !ordered ) + this.#_properties.push( { name, value } ); + else + { + // Find the first good place to put this property. + // Since the source object might not be sorted, this is a best + // effort implementation. + let i = 0; + for ( ; i < this.#_properties.length; ++i ) + { + const cmp = + this.#_properties[ i ]!.name.localeCompare( name ); + + if ( cmp === 1 ) + break; + } + this.#_properties.splice( i, 0, { name, value } ); + } + } + } + + get( prop: string ) + { + return this.#_properties.find( ( { name } ) => name === prop )?.value; + } + + remove( prop: string ) + { + const index = this.#_properties.findIndex( ( { name } ) => + name === prop + ); + if ( index < 0 || index >= this.#_properties.length ) + throw new Error( `Can't remove property ${prop}, doesn't exist` ); + + const value = this.#_properties[ index ]!.value; + this.#_properties.splice( index, 1 ); + return value; + } + + toJS( ): unknown + { + return Object.fromEntries( + this.#_properties.map( ( { name, value } ) => + [ name, value.toJS( ) ] + ) + ); + } +} + +export class JsonPrimitiveBase< Type > implements JsonValue +{ + public constructor( private _value: Type, private _raw: string ) { } + + get value( ) + { + return this._value; + } + + set value( value: Type ) + { + this._value = value; + this._raw = JSON.stringify( value ); + } + + get raw( ) + { + return this._raw; + } + + toJS( ): unknown + { + return this.value; + } +} + +export class JsonNull extends JsonPrimitiveBase< null > { } +export class JsonBoolean extends JsonPrimitiveBase< boolean > { } +export class JsonNumber extends JsonPrimitiveBase< number > { } +export class JsonString extends JsonPrimitiveBase< string > { } + +export type JsonPrimitive = JsonNull | JsonBoolean | JsonNumber | JsonString; + +export type JsonNodeType = + | JsonArray + | JsonObject + | JsonNull + | JsonBoolean + | JsonNumber + | JsonString; + +export type JsonNodeTypeNames = + | 'array' + | 'object' + | 'null' + | 'boolean' + | 'number' + | 'string'; + +export function getNodeTypeName( node: JsonNodeType ): JsonNodeTypeNames +{ + if ( node instanceof JsonArray ) return 'array'; + else if ( node instanceof JsonObject) return 'object'; + else if ( node instanceof JsonNull) return 'null'; + else if ( node instanceof JsonBoolean) return 'boolean'; + else if ( node instanceof JsonNumber) return 'number'; + else if ( node instanceof JsonString) return 'string'; + else throw new Error( `Internal error` ); +} + +type PrimitiveJSType = + | undefined + | Function + | symbol + | bigint + | number + | string + | boolean + | null; + +type ComplexJSType = ( Record< any, unknown > ) | ( unknown[ ] ); + +function isPrimitiveJS( value: unknown ): value is PrimitiveJSType +{ + return ( + typeof value === 'undefined' + || + typeof value === 'function' + || + typeof value === 'symbol' + || + typeof value === 'bigint' + || + typeof value === 'number' + || + typeof value === 'string' + || + typeof value === 'boolean' + || + value === null + ); +} + +export function nodeFromPrimitiveJS( value: PrimitiveJSType ) +: JsonNodeType | undefined +{ + if ( + typeof value === 'undefined' + || + typeof value === 'function' + || + typeof value === 'symbol' + ) + return undefined; + else if ( typeof value === 'bigint' || typeof value === 'number' ) + // Coerces BigInt to Number internally, but keeps JSON resolution + return new JsonNumber( Number( value ).valueOf( ), `${value}` ); + else if ( typeof value === 'string' ) + return new JsonString( value, JSON.stringify( value ) ); + else if ( typeof value === 'boolean' ) + return new JsonBoolean( value, JSON.stringify( value ) ); + else if ( value === null ) + return new JsonNull( value, JSON.stringify( value ) ); +} + +export function nodeFromJS( value: unknown ): JsonNodeType | undefined +{ + if ( isPrimitiveJS( value ) ) + return nodeFromPrimitiveJS( value ); + + const container = value as ComplexJSType; + + if ( Array.isArray( container ) ) + { + const ret = new JsonArray( ); + + container.forEach( elem => + { + const node = nodeFromJS( elem ); + if ( node !== undefined ) + ret.add( node ); + } ); + + return ret; + } + + const ret = new JsonObject( ); + + const props = Object.entries( container ) + .map( ( [ key, value ] ) => + { + const node = nodeFromJS( value ); + + if ( typeof key !== 'string' || node === undefined ) + return; + + return [ key, node ] as const; + } ) + .filter( ( v ): v is NonNullable< typeof v > => !!v ) + .sort( ( a, b ) => a[ 0 ].localeCompare( b[ 0 ] ) ); + + props.forEach( ( [ key, node ] ) => + { + ret.add( key, node, false ); + } ); + + return ret; +} diff --git a/lib/document/parse-to-nodes.ts b/lib/document/parse-to-nodes.ts new file mode 100644 index 0000000..72b9d44 --- /dev/null +++ b/lib/document/parse-to-nodes.ts @@ -0,0 +1,213 @@ +// @ts-expect-error +import lexer from 'json-lexer' + +import type { AnyPrimitiveToken, LexerTokens } from '../types-internal.js' +import { isPrimitiveToken, extractWhitespace, Whitespace } from './tokens.js' +import { decideIndentations, Indentable } from './indentable.js' +import { + JsonObject, + JsonArray, + JsonNumber, + JsonString, + JsonNull, + JsonBoolean, + type JsonPrimitive, + type JsonNodeType, +} from './nodes.js' + + +export interface ParseResult +{ + initialIndentation: Indentable; + root: JsonNodeType; +} + +export function parse( json: string ): ParseResult +{ + const tokens = lexer( json ); + + const { whitespace: initialWhitespace, consumedTokens: pos } = + extractWhitespace( tokens, 0 ); + + return { + initialIndentation: initialWhitespace.indentable, + root: makeJsonAny( tokens, pos ).value, + }; +} + +interface ConstructedStep< T extends JsonNodeType > +{ + value: T; + consumedTokens: number; +} + +function makeJsonPrimitive( token: AnyPrimitiveToken ): JsonPrimitive +{ + return token.type === 'string' + ? new JsonString( token.value, token.raw ) + : token.type === 'number' + ? new JsonNumber( token.value, token.raw ) + : typeof token.value === 'boolean' + ? new JsonBoolean( token.value, token.raw ) + : new JsonNull( token.value, token.raw ); +} + +// tokens begins _inside_ the '{' or '[' +function makeJsonObject( tokens: LexerTokens, pos: number ) +: ConstructedStep< JsonObject > +{ + const ret = new JsonObject( ); + + const whitespaces: Array< Whitespace > = [ ]; + + let i = pos; + for ( ; i < tokens.length; ++i ) + { + const { whitespace, consumedTokens } = extractWhitespace( tokens, i ); + + i += consumedTokens; + whitespaces.push( whitespace ); + + const peekToken = tokens[ i ]!; + + if ( peekToken.type === 'string' ) + { + // Property name + const name = peekToken.value; + ++i; + + i += jumpWhitespace( tokens, i ); + + if ( + tokens[ i ]!.type !== 'punctuator' + || + tokens[ i ]!.value !== ':' + ) + throw new Error( `Unexpected token ${ tokens[ i ]!.type }` ); + ++i; + + i += jumpWhitespace( tokens, i ); + + const { value, consumedTokens } = makeJsonAny( tokens, i ); + + i += consumedTokens; + + // Jump whitespace until ',' or '}' + i += jumpWhitespace( tokens, i ); + + // Jump back, since looping will ++ + --i; + + ret.add( name, value, false ); + } + else if ( peekToken.type === 'punctuator' ) + { + if ( peekToken.value === '}' ) + { + // End of object + ++i; + break; + } + else if ( peekToken.value !== ',' ) + throw new Error( + `Unexpected punctuation "${peekToken.value}"` + ); + } + else + throw new Error( + `Failed to parse JSON. Unexpected ${tokens[ i ]!.type}` + ); + } + + const hasNewline = whitespaces.some( whitespace => whitespace.hasNewline ); + ret.setIndent( + decideIndentations( + whitespaces.map( whitespace => whitespace.indentable ) + ) + ); + ret.flow = !hasNewline; + + return { value: ret, consumedTokens: i - pos + 1 }; +} + +function makeJsonArray( tokens: LexerTokens, pos: number ) +: ConstructedStep< JsonArray > +{ + const ret = new JsonArray( ); + + const whitespaces: Array< Whitespace > = [ ]; + + let i = pos; + for ( ; i < tokens.length; ++i ) + { + const { whitespace, consumedTokens } = extractWhitespace( tokens, i ); + + i += consumedTokens; + whitespaces.push( whitespace ); + + const peekToken = tokens[ i ]!; + + if ( peekToken.type === 'punctuator' ) + { + if ( peekToken.value === ']' ) + { + // End of array + ++i; + break; + } + else if ( peekToken.value !== ',' ) + throw new Error( + `Unexpected punctuation "${peekToken.value}"` + ); + } + else + { + const { value, consumedTokens } = makeJsonAny( tokens, i ); + i += consumedTokens; + + // Jump whitespace until ',' or ']' + i += jumpWhitespace( tokens, i ); + + // Jump back, since looping will ++ + --i; + + ret.add( value ); + } + } + + const hasNewline = whitespaces.some( whitespace => whitespace.hasNewline ); + ret.setIndent( + decideIndentations( + whitespaces.map( whitespace => whitespace.indentable ) + ) + ); + ret.flow = !hasNewline; + + return { value: ret, consumedTokens: i - pos + 1 }; +} + +function makeJsonAny( tokens: LexerTokens, pos: number ) +: ConstructedStep< JsonNodeType > +{ + const firstToken = tokens[ pos ]!; + + if ( isPrimitiveToken( firstToken ) ) + return { + consumedTokens: 1, + value: makeJsonPrimitive( firstToken ), + }; + else if ( firstToken.value === '{' ) + return makeJsonObject( tokens, pos + 1 ); + else if ( firstToken.value === '[' ) + return makeJsonArray( tokens, pos + 1 ); + + throw new Error( 'Failed to parse JSON document' ); +} + +/** + * Returns 1 if the first token at is whitespace, otherwise 0 + */ +function jumpWhitespace( tokens: LexerTokens, pos: number ): 0 | 1 +{ + return tokens[ pos ]?.type === 'whitespace' ? 1 : 0; +} diff --git a/lib/document/tokens.ts b/lib/document/tokens.ts new file mode 100644 index 0000000..d0ad4c1 --- /dev/null +++ b/lib/document/tokens.ts @@ -0,0 +1,82 @@ +import { + AnyPrimitiveToken, + LexerToken, + LexerTokens, + WhitespaceToken, +} from '../types-internal.js' +import { Indentable } from './indentable.js' + + +export function isPrimitiveToken( token: LexerToken ) +: token is AnyPrimitiveToken +{ + return ( + token.type === 'string' + || + token.type === 'number' + || + token.type === 'literal' + ); +} + +export interface ExtractedWhitespace +{ + whitespace: Whitespace; + consumedTokens: number; +} + +export function extractWhitespace( tokens: LexerTokens, pos: number ) +: ExtractedWhitespace +{ + const hasWhitespace = tokens[ pos ]?.type === 'whitespace'; + + const whitespace = + hasWhitespace + ? getWhitespace( tokens[ pos ] as WhitespaceToken ) + : { hasNewline: false, indentable: new Indentable( ) }; + + return { whitespace, consumedTokens: hasWhitespace ? 1 : 0 }; +} + +const reWhitespace = /(?:^|(?:.*[\t ])\n)\n*([^\n]+)$/; + +// Potentially reconsider the assumption of a tab being 2 spaces +// which is also assumed in Indentable + +export interface Whitespace +{ + hasNewline: boolean; + indentable: Indentable; +} + +export function getWhitespace( token: WhitespaceToken ): Whitespace +{ + if ( token.type !== 'whitespace' ) + throw new Error( `Invalid whitespace token: ${token.type}` ); + + const hasNewline = token.raw.includes( '\n' ); + + const m = token.raw.match( reWhitespace ); + + if ( !m || m[ 1 ]!.length === 0 ) + return { hasNewline, indentable: new Indentable( ) }; + + const chars = m[ 1 ]!.split( '' ); + + const chooseTab = chars[ 0 ] === '\t'; + + const numTabs = chars.filter( char => char === '\t' ).length; + const numSpaces = chars.length - numTabs; + + const numAllSpaces = numSpaces + numTabs * 2; + + if ( chooseTab ) + return { + hasNewline, + indentable: new Indentable( numAllSpaces / 2, true ), + }; + return { + hasNewline, + indentable: new Indentable( numAllSpaces, false ), + }; +} diff --git a/lib/document/types.ts b/lib/document/types.ts new file mode 100644 index 0000000..1f9a99c --- /dev/null +++ b/lib/document/types.ts @@ -0,0 +1,18 @@ +export interface JsonDocumentOptions +{ + /** + * Whitespace strategy + * + * - 'auto': Try to maintain the whitespace strategy from the source + * - 'tabs': Force tabs + * - [number]: Force [number] spaces + * + * @default 'auto' + */ + whitespace: 'auto' | 'tabs' | number; + + /** + * Try to insert new properties in order + */ + ordered: boolean; +} diff --git a/lib/document/utils.ts b/lib/document/utils.ts new file mode 100644 index 0000000..fc075ab --- /dev/null +++ b/lib/document/utils.ts @@ -0,0 +1,12 @@ +import type { JsonDocumentOptions } from './types.js' + + +export function getDocumentOptions( options?: Partial< JsonDocumentOptions > ) +: JsonDocumentOptions +{ + return { + ordered: true, + whitespace: 'auto', + ...options, + }; +} diff --git a/lib/examples.test.ts b/lib/examples.test.ts new file mode 100644 index 0000000..765151d --- /dev/null +++ b/lib/examples.test.ts @@ -0,0 +1,38 @@ +import { jsonPatch } from './rfc6902.js' +import type { Operation } from './types-rfc6902.js' + + +describe( 'readme', ( ) => +{ + it( 'first example', ( ) => + { + const before = +`{ + "x": "non-alphanumerically ordered properties, obviously", + "foo": [ "same", "line", "array" ], + "bar": { + "some": "object" + } +}`; + const after = +`{ + "x": "non-alphanumerically ordered properties, obviously", + "bar": { + "herenow": [ "same", "line", "array" ], + "some": "object" + } +}`; + + const operations: Operation[ ] = [ + { + "op": "move", + "from": "/foo", + "path": "/bar/herenow" + } + ]; + + const res = jsonPatch( before, operations ); + + expect( res ).toBe( after ); + } ); +} ); diff --git a/lib/index.ts b/lib/index.ts new file mode 100644 index 0000000..dc9912a --- /dev/null +++ b/lib/index.ts @@ -0,0 +1,13 @@ +export { JsonDocumentOptions } from './document/types.js' +export { JsonDocument, parseJson } from './document/document.js' + +export { jsonPatch } from './rfc6902.js' +export { + Operation, + AddOperation, + RemoveOperation, + ReplaceOperation, + MoveOperation, + CopyOperation, + TestOperation, +} from './types-rfc6902.js' diff --git a/lib/rfc6902.test.ts b/lib/rfc6902.test.ts new file mode 100644 index 0000000..a8442e3 --- /dev/null +++ b/lib/rfc6902.test.ts @@ -0,0 +1,404 @@ +import { jsonPatch } from './rfc6902.js' +import type { Operation } from './types-rfc6902.js' + + +describe( 'rfc6902', ( ) => +{ + describe( 'add', ( ) => + { + it( 'object add ordered (after)', ( ) => + { + const before = `{\n "foo": "bar"\n}`; + const after = `{\n "foo": "bar",\n "next": 42\n}`; + + const operations: Operation[ ] = [ + { op: 'add', path: '/next', value: 42 } + ]; + + const res = jsonPatch( before, operations ); + + expect( res ).toBe( after ); + } ); + + it( 'object add ordered (before)', ( ) => + { + const before = `{\n "foo": "bar"\n}`; + const after = `{\n "bar": 42,\n "foo": "bar"\n}`; + + const operations: Operation[ ] = [ + { op: 'add', path: '/bar', value: 42 } + ]; + + const res = jsonPatch( before, operations ); + + expect( res ).toBe( after ); + } ); + + it( 'array add at index', ( ) => + { + const before = `[\n "foo",\n "bar"\n]`; + const after = `[\n "foo",\n "baz",\n "bar"\n]`; + + const operations: Operation[ ] = [ + { op: 'add', path: '/1', value: "baz" } + ]; + + const res = jsonPatch( before, operations ); + + expect( res ).toBe( after ); + } ); + + it( 'array add at the end', ( ) => + { + const before = `[\n "foo",\n "bar"\n]`; + const after = `[\n "foo",\n "bar",\n "baz"\n]`; + + const operations: Operation[ ] = [ + { op: 'add', path: '/-', value: "baz" } + ]; + + const res = jsonPatch( before, operations ); + + expect( res ).toBe( after ); + } ); + + it( 'array add invalid index', ( ) => + { + const before = `[\n "foo",\n "bar"\n]`; + + const operations: Operation[ ] = [ + { op: 'add', path: '/x', value: "baz" } + ]; + + expect( ( ) => jsonPatch( before, operations ) ) + .toThrowError( /invalid numeric index/i ); + } ); + } ); + + describe( 'replace', ( ) => + { + it( 'object replace doesn\'t exist', ( ) => + { + const before = `{\n "foo": "bar"\n}`; + + const operations: Operation[ ] = [ + { op: 'replace', path: '/next', value: 42 } + ]; + + expect( ( ) => jsonPatch( before, operations ) ) + .toThrowError( /since.*previous.*exist/i ); + } ); + + it( 'object replace ordered', ( ) => + { + const before = `{\n "foo": "bar"\n}`; + const after = `{\n "foo": 42\n}`; + + const operations: Operation[ ] = [ + { op: 'replace', path: '/foo', value: 42 } + ]; + + const res = jsonPatch( before, operations ); + + expect( res ).toBe( after ); + } ); + + it( 'array replace at index', ( ) => + { + const before = `[\n "foo",\n "bar"\n]`; + const after = `[\n "foo",\n "baz"\n]`; + + const operations: Operation[ ] = [ + { op: 'replace', path: '/1', value: "baz" } + ]; + + const res = jsonPatch( before, operations ); + + expect( res ).toBe( after ); + } ); + + it( 'array replace at the end', ( ) => + { + const before = `[\n "foo",\n "bar"\n]`; + + const operations: Operation[ ] = [ + { op: 'replace', path: '/-', value: "baz" } + ]; + + expect( ( ) => jsonPatch( before, operations ) ) + .toThrowError( /invalid index/i ); + } ); + + it( 'array replace out-of-bounds', ( ) => + { + const before = `[\n "foo",\n "bar"\n]`; + + const operations: Operation[ ] = [ + { op: 'replace', path: '/3', value: "baz" } + ]; + + expect( ( ) => jsonPatch( before, operations ) ) + .toThrowError( /invalid index/i ); + } ); + + it( 'array replace invalid index', ( ) => + { + const before = `[\n "foo",\n "bar"\n]`; + + const operations: Operation[ ] = [ + { op: 'replace', path: '/x', value: "baz" } + ]; + + expect( ( ) => jsonPatch( before, operations ) ) + .toThrowError( /invalid numeric index/i ); + } ); + } ); + + describe( 'copy', ( ) => + { + it( 'from object to object', ( ) => + { + const before = `{\n "foo": { "bar": "baz" },\n "fee": { }\n}`; + const after = + `{\n "foo": { "bar": "baz" },\n "fee": { "bar": "baz" }\n}`; + + const operations: Operation[ ] = [ + { op: 'copy', from: '/foo/bar', path: '/fee/bar' } + ]; + + const res = jsonPatch( before, operations ); + + expect( res ).toBe( after ); + } ); + + it( 'from object to array at index', ( ) => + { + const before = `{\n "foo": { "bar": "baz" },\n "fee": [ 1 ]\n}`; + const after = + `{\n "foo": { "bar": "baz" },\n "fee": [ "baz", 1 ]\n}`; + + const operations: Operation[ ] = [ + { op: 'copy', from: '/foo/bar', path: '/fee/0' } + ]; + + const res = jsonPatch( before, operations ); + + expect( res ).toBe( after ); + } ); + + it( 'from object to array at end', ( ) => + { + const before = `{\n "foo": { "bar": "baz" },\n "fee": [ 1 ]\n}`; + const after = + `{\n "foo": { "bar": "baz" },\n "fee": [ 1, "baz" ]\n}`; + + const operations: Operation[ ] = [ + { op: 'copy', from: '/foo/bar', path: '/fee/-' } + ]; + + const res = jsonPatch( before, operations ); + + expect( res ).toBe( after ); + } ); + + it( 'from object that doesn\'t exist', ( ) => + { + const before = `{\n "foo": { "bar": "baz" },\n "fee": { }\n}`; + + const operations: Operation[ ] = [ + { op: 'copy', from: '/fxx/bar', path: '/fee/bar' } + ]; + + expect( ( ) => jsonPatch( before, operations ) ) + .toThrowError( /which.*doesn.*exist/i ); + } ); + + it( 'from object without such property', ( ) => + { + const before = `{\n "foo": { "bar": "baz" },\n "fee": { }\n}`; + + const operations: Operation[ ] = [ + { op: 'copy', from: '/foo/bad', path: '/fee/bar' } + ]; + + expect( ( ) => jsonPatch( before, operations ) ) + .toThrowError( /no value found/i ); + } ); + + it( 'from array to array', ( ) => + { + const before = `{\n "foo": [ "bar", "baz" ],\n "fee": [ ]\n}`; + const after = + `{\n "foo": [ "bar", "baz" ],\n "fee": [ "bar" ]\n}`; + + const operations: Operation[ ] = [ + { op: 'copy', from: '/foo/0', path: '/fee/-' } + ]; + + const res = jsonPatch( before, operations ); + + expect( res ).toBe( after ); + } ); + + it( 'from array to object', ( ) => + { + const before = `{\n "foo": [ "bar", "baz" ],\n "fee": { }\n}`; + const after = + `{\n "foo": [ "bar", "baz" ],\n "fee": { "bak": "bar" }\n}`; + + const operations: Operation[ ] = [ + { op: 'copy', from: '/foo/0', path: '/fee/bak' } + ]; + + const res = jsonPatch( before, operations ); + + expect( res ).toBe( after ); + } ); + + it( 'from array that doesn\'t exist', ( ) => + { + const before = `{\n "foo": [ "bar", "baz" ],\n "fee": { }\n}`; + + const operations: Operation[ ] = [ + { op: 'copy', from: '/fxx/0', path: '/fee/bar' } + ]; + + expect( ( ) => jsonPatch( before, operations ) ) + .toThrowError( /which.*doesn.*exist/i ); + } ); + + it( 'from array without such element', ( ) => + { + const before = `{\n "foo": [ "bar", "baz" ],\n "fee": { }\n}`; + + const operations: Operation[ ] = [ + { op: 'copy', from: '/foo/4', path: '/fee/bar' } + ]; + + expect( ( ) => jsonPatch( before, operations ) ) + .toThrowError( /no value found/i ); + } ); + } ); + + describe( 'move', ( ) => + { + it( 'from object to object', ( ) => + { + const before = `{\n "foo": { "bar": "baz" },\n "fee": { }\n}`; + const after = `{\n "foo": { },\n "fee": { "bar": "baz" }\n}`; + + const operations: Operation[ ] = [ + { op: 'move', from: '/foo/bar', path: '/fee/bar' } + ]; + + const res = jsonPatch( before, operations ); + + expect( res ).toBe( after ); + } ); + + it( 'from object to array at index', ( ) => + { + const before = `{\n "foo": { "bar": "baz" },\n "fee": [ 1 ]\n}`; + const after = `{\n "foo": { },\n "fee": [ "baz", 1 ]\n}`; + + const operations: Operation[ ] = [ + { op: 'move', from: '/foo/bar', path: '/fee/0' } + ]; + + const res = jsonPatch( before, operations ); + + expect( res ).toBe( after ); + } ); + + it( 'from object to array at end', ( ) => + { + const before = `{\n "foo": { "bar": "baz" },\n "fee": [ 1 ]\n}`; + const after = `{\n "foo": { },\n "fee": [ 1, "baz" ]\n}`; + + const operations: Operation[ ] = [ + { op: 'move', from: '/foo/bar', path: '/fee/-' } + ]; + + const res = jsonPatch( before, operations ); + + expect( res ).toBe( after ); + } ); + + it( 'from object that doesn\'t exist', ( ) => + { + const before = `{\n "foo": { "bar": "baz" },\n "fee": { }\n}`; + + const operations: Operation[ ] = [ + { op: 'move', from: '/fxx/bar', path: '/fee/bar' } + ]; + + expect( ( ) => jsonPatch( before, operations ) ) + .toThrowError( /which.*doesn.*exist/i ); + } ); + + it( 'from object without such property', ( ) => + { + const before = `{\n "foo": { "bar": "baz" },\n "fee": { }\n}`; + + const operations: Operation[ ] = [ + { op: 'move', from: '/foo/bad', path: '/fee/bar' } + ]; + + expect( ( ) => jsonPatch( before, operations ) ) + .toThrowError( /remove property/i ); + } ); + + it( 'from array to array', ( ) => + { + const before = `{\n "foo": [ "bar", "baz" ],\n "fee": [ ]\n}`; + const after = `{\n "foo": [ "baz" ],\n "fee": [ "bar" ]\n}`; + + const operations: Operation[ ] = [ + { op: 'move', from: '/foo/0', path: '/fee/-' } + ]; + + const res = jsonPatch( before, operations ); + + expect( res ).toBe( after ); + } ); + + it( 'from array to object', ( ) => + { + const before = `{\n "foo": [ "bar", "baz" ],\n "fee": { }\n}`; + const after = + `{\n "foo": [ "baz" ],\n "fee": { "bak": "bar" }\n}`; + + const operations: Operation[ ] = [ + { op: 'move', from: '/foo/0', path: '/fee/bak' } + ]; + + const res = jsonPatch( before, operations ); + + expect( res ).toBe( after ); + } ); + + it( 'from array that doesn\'t exist', ( ) => + { + const before = `{\n "foo": [ "bar", "baz" ],\n "fee": { }\n}`; + + const operations: Operation[ ] = [ + { op: 'move', from: '/fxx/0', path: '/fee/bar' } + ]; + + expect( ( ) => jsonPatch( before, operations ) ) + .toThrowError( /which.*doesn.*exist/i ); + } ); + + it( 'from array without such element', ( ) => + { + const before = `{\n "foo": [ "bar", "baz" ],\n "fee": { }\n}`; + + const operations: Operation[ ] = [ + { op: 'move', from: '/foo/4', path: '/fee/bar' } + ]; + + expect( ( ) => jsonPatch( before, operations ) ) + .toThrowError( /remove element/i ); + } ); + } ); +} ); diff --git a/lib/rfc6902.ts b/lib/rfc6902.ts new file mode 100644 index 0000000..017f585 --- /dev/null +++ b/lib/rfc6902.ts @@ -0,0 +1,290 @@ +import { encodeJsonPointerPath } from 'jsonpos' +import deepEqual from 'fast-deep-equal' + +import { type JsonDocument, parseJson } from './document/document.js' +import { + JsonArray, + JsonObject, + getNodeTypeName, + nodeFromJS, +} from './document/nodes.js' +import { type JsonDocumentOptions } from './document/types.js' +import { getDocumentOptions } from './document/utils.js' +import { type Operation } from './types-rfc6902.js' +import { ensureRFC6902ArrayIndex, getJsonPath } from './utils.js' + + +export function jsonPatch( + json: string, + operations: ReadonlyArray< Operation >, + options?: Partial< JsonDocumentOptions > +): string; + +export function jsonPatch( + doc: JsonDocument, + operations: ReadonlyArray< Operation >, + options?: Partial< JsonDocumentOptions > +): JsonDocument; + +export function jsonPatch( + jsonOrDoc: string | JsonDocument, + operations: ReadonlyArray< Operation >, + _options?: Partial< JsonDocumentOptions > +) +: string | JsonDocument +{ + const options = getDocumentOptions( _options ); + const { ordered } = options; + + const doc = + typeof jsonOrDoc === 'string' + ? parseJson( jsonOrDoc, options ) + : jsonOrDoc; + + operations.forEach( operation => + { + const path = getJsonPath( operation.path ); + + if ( operation.op === 'add' || operation.op === 'replace' ) + { + const child = path.pop( ); + + const parent = doc.getAt( path ); + + const { op } = operation; + + if ( child === undefined ) + throw new Error( + `Can't ${op} value to "${child}" at ` + + `${encodeJsonPointerPath( path )} ` + ); + + if ( !parent ) + throw new Error( + `Can't ${op} value at ${encodeJsonPointerPath( path )} ` + + `which doesn't exist.` + ); + + const node = nodeFromJS( operation.value ); + if ( node === undefined ) + return; + + if ( + !( parent instanceof JsonObject ) + && + !( parent instanceof JsonArray ) + ) + throw new Error( + `Can't ${op} value at ${encodeJsonPointerPath( path )} ` + + `of type ${getNodeTypeName( parent )}` + ); + + if ( parent instanceof JsonObject ) + { + const cur = parent.properties.find( ( { name } ) => + name === `${child}` + ); + + if ( op === 'replace' && !cur ) + throw new Error( + `Can't ${op} value at ` + + `${encodeJsonPointerPath( [ ...path, child ] )} ` + + `since no previous value exist` + ); + + parent.add( `${child}`, node, ordered ); + } + else + { + const index = ensureRFC6902ArrayIndex( child ); + + if ( + op === 'replace' + && + ( index === -1 || index >= parent.elements.length ) + ) + throw new Error( + `Can't ${op} value at ` + + `${encodeJsonPointerPath( [ ...path, child ] )},` + + ` either invalid index, or no previous value exist` + ); + + if ( op === 'replace' ) + // Remove last + parent.removeAt( index ); + + if ( index === -1 ) + parent.add( node ); + else + parent.insert( node, index ); + } + } + else if ( operation.op === 'copy' || operation.op === 'move' ) + { + const fromPath = getJsonPath( operation.from ); + const fromChild = fromPath.pop( ); + const toChild = path.pop( ); + + const fromParent = doc.getAt( fromPath ); + const toParent = doc.getAt( path ); + + const { op } = operation; + + if ( fromChild === undefined ) + throw new Error( + `Can't ${op} value from "${fromChild}" at ` + + `${encodeJsonPointerPath( fromPath )} ` + ); + + if ( toChild === undefined ) + throw new Error( + `Can't ${op} value to "${toChild}" at ` + + `${encodeJsonPointerPath( path )} ` + ); + + if ( !fromParent ) + throw new Error( + `Can't ${op} value from ${encodeJsonPointerPath( fromPath )}` + + ` which doesn't exist.` + ); + + if ( !toParent ) + throw new Error( + `Can't ${op} value to ${encodeJsonPointerPath( path )}` + + ` which doesn't exist.` + ); + + if ( + !( fromParent instanceof JsonObject ) + && + !( fromParent instanceof JsonArray ) + ) + throw new Error( + `Can't ${op} value from ${encodeJsonPointerPath( path )}` + + ` of type ${getNodeTypeName( fromParent )}` + ); + + if ( + !( toParent instanceof JsonObject ) + && + !( toParent instanceof JsonArray ) + ) + throw new Error( + `Can't ${op} value to ${encodeJsonPointerPath( path )}` + + ` of type ${getNodeTypeName( toParent )}` + ); + + const node = + fromParent instanceof JsonArray + ? op === 'move' + ? fromParent.removeAt( ensureRFC6902ArrayIndex( fromChild ) ) + : fromParent.get( ensureRFC6902ArrayIndex( fromChild ) ) + : op === 'move' + ? fromParent.remove( `${fromChild}` ) + : fromParent.get( `${fromChild}` ); + + if ( node === undefined ) + throw new Error( + `Can't ${op} from ` + + encodeJsonPointerPath( [ ...fromPath, fromChild ] ) + + `: no value found` + ); + + if ( toParent instanceof JsonObject ) + { + toParent.add( `${toChild}`, node, ordered ); + } + else + { + const index = ensureRFC6902ArrayIndex( toChild ); + if ( index === -1 ) + toParent.add( node ); + else + toParent.insert( node, index ); + } + } + else if ( operation.op === 'remove' ) + { + const child = path.pop( ); + + const parent = doc.getAt( path ); + + if ( child === undefined ) + throw new Error( + `Can't remove value "${child}" at ` + + `${encodeJsonPointerPath( path )}, doesn't exist` + ); + + if ( !parent ) + throw new Error( + `Can't remove value at ${encodeJsonPointerPath( path )} ` + + `which doesn't exist.` + ); + + if ( + !( parent instanceof JsonObject ) + && + !( parent instanceof JsonArray ) + ) + throw new Error( + `Can't remove value at ${encodeJsonPointerPath( path )} ` + + `of type ${getNodeTypeName( parent )}` + ); + + if ( parent instanceof JsonObject ) + { + parent.remove( `${child}` ); + } + else + { + const index = ensureRFC6902ArrayIndex( child ); + parent.removeAt( index ); + } + } + else if ( operation.op === 'test' ) + { + const child = path.pop( ); + + const parent = doc.getAt( path ); + + if ( child === undefined ) + throw new Error( + `Can't test value "${child}" at ` + + `${encodeJsonPointerPath( path )}, doesn't exist` + ); + + if ( !parent ) + throw new Error( + `Can't test value at ${encodeJsonPointerPath( path )} ` + + `which doesn't exist.` + ); + + if ( + !( parent instanceof JsonObject ) + && + !( parent instanceof JsonArray ) + ) + throw new Error( + `Can't test value at ${encodeJsonPointerPath( path )} ` + + `of type ${getNodeTypeName( parent )}` + ); + + const node = + parent instanceof JsonArray + ? parent.get( ensureRFC6902ArrayIndex( child ) ) + : parent.get( `${child}` ); + + if ( node === undefined ) + throw new Error( + `Can't test value at ` + + encodeJsonPointerPath( [ ...path, child ] ) + + `: no value found` + ); + + if ( !deepEqual( node.toJS( ), operation.value ) ) + throw new Error( `test operation failed, values mismatch` ); + } + } ); + + return typeof jsonOrDoc === 'string' ? `${doc}` : doc; +} diff --git a/lib/types-internal.ts b/lib/types-internal.ts new file mode 100644 index 0000000..89a2276 --- /dev/null +++ b/lib/types-internal.ts @@ -0,0 +1,43 @@ +export type LexerTokenType = + | 'whitespace' + | 'punctuator' + | 'string' + | 'number' + | 'literal'; + +export type NonPrimitiveLexerTokenType = + Exclude< LexerTokenType, 'string' | 'number' >; + +export type PunctuationOpen = '{' | '['; +export type PunctuationClose = '}' | ']'; +export type PunctuationColon = ':'; +export type PunctuationNext = ','; +export type Punctuation = + | PunctuationOpen + | PunctuationClose + | PunctuationColon + | PunctuationNext; + +export type WhitespaceToken = + { type: 'whitespace', value: string, raw: string }; +export type PunctuatorToken = + { type: 'punctuator', value: Punctuation, raw: string }; +export type StringToken = + { type: 'string', value: string, raw: string }; +export type NumberToken = + { type: 'number', value: number, raw: string }; +export type LiteralToken = + { type: 'literal', value: boolean | null, raw: string }; + +export type AnyPrimitiveToken = StringToken | NumberToken | LiteralToken; + +export type LexerToken = +| WhitespaceToken +| PunctuatorToken +| StringToken +| NumberToken +| LiteralToken; + +export type NonWhitespaceToken = Exclude< LexerToken, WhitespaceToken >; + +export type LexerTokens = Array< LexerToken >; diff --git a/lib/types-rfc6902.ts b/lib/types-rfc6902.ts new file mode 100644 index 0000000..54829c3 --- /dev/null +++ b/lib/types-rfc6902.ts @@ -0,0 +1,51 @@ +interface OperationBase +{ + /** + * An extended RFC6901 path which is either a JSON Pointer, or an array of + * unencoded path segments. + */ + path: string | string[ ]; +} + +export interface AddOperation extends OperationBase +{ + op: 'add'; + value: any; +} + +export interface RemoveOperation extends OperationBase +{ + op: 'remove'; +} + +export interface ReplaceOperation extends OperationBase +{ + op: 'replace'; + value: any; +} + +export interface MoveOperation extends OperationBase +{ + op: 'move'; + from: string; +} + +export interface CopyOperation extends OperationBase +{ + op: 'copy'; + from: string; +} + +export interface TestOperation extends OperationBase +{ + op: 'test'; + value: any; +} + +export type Operation = + | AddOperation + | RemoveOperation + | ReplaceOperation + | MoveOperation + | CopyOperation + | TestOperation; diff --git a/lib/utils.ts b/lib/utils.ts new file mode 100644 index 0000000..cb5776d --- /dev/null +++ b/lib/utils.ts @@ -0,0 +1,31 @@ +import { parseJsonPointerPath, LocationPath } from 'jsonpos' + +export function getJsonPath( path: string | LocationPath ): LocationPath +{ + return Array.isArray( path ) ? path : parseJsonPointerPath( path ); +} + +export function ensureNumericIndex( val: string | number ): number +{ + if ( typeof val === 'number' ) + return val; + + const num = parseInt( val ); + + if ( val !== `${num}` ) + throw new Error( `Not numeric: "${val}"` ); + + return num; +} + +export function ensureRFC6902ArrayIndex( val: string | number ): number +{ + if ( typeof val === 'number' ) + return val; + else if ( val === '-' ) + return -1; + const num = parseInt( val ); + if ( `${num}` !== val ) + throw new Error( `Invalid numeric index: "${val}"` ); + return num; +} diff --git a/package.json b/package.json new file mode 100644 index 0000000..319d260 --- /dev/null +++ b/package.json @@ -0,0 +1,58 @@ +{ + "name": "edit-json", + "version": "0.0.0-development", + "description": "Edit JSON text in-place for a minimal diff", + "author": "Gustaf Räntilä", + "license": "MIT", + "bugs": { + "url": "https://github.com/grantila/edit-json/issues" + }, + "homepage": "https://github.com/grantila/edit-json#readme", + "main": "./dist/index.js", + "exports": "./dist/index.js", + "types": "./dist/index.d.ts", + "type": "module", + "sideEffects": false, + "engines": { + "node": "^14.13.1 || >=16.0.0" + }, + "files": [ + "dist" + ], + "scripts": { + "build": "rimraf dist && tsc -p tsconfig.prod.json", + "test": "NODE_OPTIONS=--experimental-vm-modules jest", + "cz": "git-cz" + }, + "repository": { + "type": "git", + "url": "https://github.com/grantila/edit-json" + }, + "keywords": [ + "edit", + "json", + "inplace", + "in-place" + ], + "devDependencies": { + "@babel/preset-env": "^7.16.11", + "@babel/preset-typescript": "^7.16.7", + "@types/jest": "^27.4.1", + "cz-conventional-changelog": "^3.3.0", + "jest": "^27.5.1", + "rimraf": "^3.0.2", + "ts-jest-resolver": "^2.0.0", + "ts-node": "^10.7.0", + "typescript": "^4.6.3" + }, + "dependencies": { + "fast-deep-equal": "^3.1.3", + "json-lexer": "^1.1.1", + "jsonpos": "^3.2.0" + }, + "config": { + "commitizen": { + "path": "./node_modules/cz-conventional-changelog" + } + } +} diff --git a/tsconfig.json b/tsconfig.json new file mode 100644 index 0000000..dfe02f7 --- /dev/null +++ b/tsconfig.json @@ -0,0 +1,26 @@ +{ + "compilerOptions": { + "allowJs": false, + "declaration": true, + "sourceMap": true, + "lib": [ "ES2019" ], + "types": [ + "node", + "jest" + ], + "noEmit": true, + "target": "ES2019", + "module": "ES2020", + "esModuleInterop": true, + "moduleResolution": "node", + "noImplicitAny": true, + "noUnusedLocals": true, + "pretty": true, + "noUncheckedIndexedAccess": true, + "strict": true, + "alwaysStrict": true, + }, + "include": [ + "lib" + ] +} diff --git a/tsconfig.prod.json b/tsconfig.prod.json new file mode 100644 index 0000000..6d1f6af --- /dev/null +++ b/tsconfig.prod.json @@ -0,0 +1,11 @@ +{ + "extends": "./tsconfig.json", + "compilerOptions": { + "sourceMap": false, + "outDir": "dist", + "noEmit": false + }, + "exclude": [ + "**/*.test.ts" + ] +}