diff --git a/src/init/RootContainerInitializer.ts b/src/init/RootContainerInitializer.ts index 53c0bb20a6..2d4e6a7196 100644 --- a/src/init/RootContainerInitializer.ts +++ b/src/init/RootContainerInitializer.ts @@ -8,7 +8,6 @@ import { NotFoundHttpError } from '../util/errors/NotFoundHttpError'; import { ensureTrailingSlash } from '../util/PathUtil'; import { generateResourceQuads } from '../util/ResourceUtil'; import { guardedStreamFrom } from '../util/StreamUtil'; -import { toCachedNamedNode } from '../util/UriUtil'; import { PIM, RDF } from '../util/Vocabularies'; import { Initializer } from './Initializer'; import namedNode = DataFactory.namedNode; @@ -60,7 +59,7 @@ export class RootContainerInitializer extends Initializer { // Make sure the root container is a pim:Storage // This prevents deletion of the root container as storage root containers can not be deleted - metadata.add(RDF.type, toCachedNamedNode(PIM.Storage)); + metadata.add(RDF.type, PIM.terms.Storage); metadata.contentType = TEXT_TURTLE; diff --git a/src/ldp/representation/RepresentationMetadata.ts b/src/ldp/representation/RepresentationMetadata.ts index 9ee337ddb7..879790586d 100644 --- a/src/ldp/representation/RepresentationMetadata.ts +++ b/src/ldp/representation/RepresentationMetadata.ts @@ -2,6 +2,7 @@ import { DataFactory, Store } from 'n3'; import type { BlankNode, Literal, NamedNode, Quad, Term } from 'rdf-js'; import { getLoggerFor } from '../../logging/LogUtil'; import { toObjectTerm, toCachedNamedNode, isTerm } from '../../util/UriUtil'; +import { CONTENT_TYPE_TERM } from '../../util/Vocabularies'; import type { ResourceIdentifier } from './ResourceIdentifier'; import { isResourceIdentifier } from './ResourceIdentifier'; @@ -223,10 +224,10 @@ export class RepresentationMetadata { * Shorthand for the CONTENT_TYPE predicate. */ public get contentType(): string | undefined { - return this.get(toCachedNamedNode('contentType'))?.value; + return this.get(CONTENT_TYPE_TERM)?.value; } public set contentType(input) { - this.set(toCachedNamedNode('contentType'), input); + this.set(CONTENT_TYPE_TERM, input); } } diff --git a/src/storage/accessors/FileDataAccessor.ts b/src/storage/accessors/FileDataAccessor.ts index 19e07198fb..0bd6786da0 100644 --- a/src/storage/accessors/FileDataAccessor.ts +++ b/src/storage/accessors/FileDataAccessor.ts @@ -16,8 +16,8 @@ import type { Guarded } from '../../util/GuardedStream'; import { isContainerIdentifier } from '../../util/PathUtil'; import { parseQuads, pushQuad, serializeQuads } from '../../util/QuadUtil'; import { generateContainmentQuads, generateResourceQuads } from '../../util/ResourceUtil'; -import { toCachedNamedNode, toLiteral } from '../../util/UriUtil'; -import { CONTENT_TYPE, DCTERMS, LDP, POSIX, RDF, XSD } from '../../util/Vocabularies'; +import { toLiteral } from '../../util/UriUtil'; +import { CONTENT_TYPE, DC, LDP, POSIX, RDF, XSD } from '../../util/Vocabularies'; import type { FileIdentifierMapper, ResourceLink } from '../mapping/FileIdentifierMapper'; import type { DataAccessor } from './DataAccessor'; @@ -210,9 +210,9 @@ export class FileDataAccessor implements DataAccessor { */ private async writeMetadata(link: ResourceLink, metadata: RepresentationMetadata): Promise { // These are stored by file system conventions - metadata.remove(RDF.type, toCachedNamedNode(LDP.Resource)); - metadata.remove(RDF.type, toCachedNamedNode(LDP.Container)); - metadata.remove(RDF.type, toCachedNamedNode(LDP.BasicContainer)); + metadata.remove(RDF.type, LDP.terms.Resource); + metadata.remove(RDF.type, LDP.terms.Container); + metadata.remove(RDF.type, LDP.terms.BasicContainer); metadata.removeAll(CONTENT_TYPE); const quads = metadata.quads(); const metadataLink = await this.getMetadataLink(link.identifier); @@ -329,10 +329,10 @@ export class FileDataAccessor implements DataAccessor { */ private generatePosixQuads(subject: NamedNode, stats: Stats): Quad[] { const quads: Quad[] = []; - pushQuad(quads, subject, toCachedNamedNode(POSIX.size), toLiteral(stats.size, XSD.integer)); - pushQuad(quads, subject, toCachedNamedNode(DCTERMS.modified), toLiteral(stats.mtime.toISOString(), XSD.dateTime)); - pushQuad(quads, subject, toCachedNamedNode(POSIX.mtime), toLiteral( - Math.floor(stats.mtime.getTime() / 1000), XSD.integer, + pushQuad(quads, subject, POSIX.terms.size, toLiteral(stats.size, XSD.terms.integer)); + pushQuad(quads, subject, DC.terms.modified, toLiteral(stats.mtime.toISOString(), XSD.terms.dateTime)); + pushQuad(quads, subject, POSIX.terms.mtime, toLiteral( + Math.floor(stats.mtime.getTime() / 1000), XSD.terms.integer, )); return quads; } diff --git a/src/storage/accessors/SparqlDataAccessor.ts b/src/storage/accessors/SparqlDataAccessor.ts index 2b3dea737d..db1039cc13 100644 --- a/src/storage/accessors/SparqlDataAccessor.ts +++ b/src/storage/accessors/SparqlDataAccessor.ts @@ -27,7 +27,6 @@ import { guardStream } from '../../util/GuardedStream'; import type { Guarded } from '../../util/GuardedStream'; import type { IdentifierStrategy } from '../../util/identifiers/IdentifierStrategy'; import { isContainerIdentifier } from '../../util/PathUtil'; -import { toCachedNamedNode } from '../../util/UriUtil'; import { CONTENT_TYPE, LDP } from '../../util/Vocabularies'; import type { DataAccessor } from './DataAccessor'; @@ -226,7 +225,7 @@ export class SparqlDataAccessor implements DataAccessor { // Insert new metadata and containment triple const insert: GraphQuads[] = [ this.sparqlUpdateGraph(metaName, metadata.quads()) ]; if (parent) { - insert.push(this.sparqlUpdateGraph(parent, [ quad(parent, toCachedNamedNode(LDP.contains), name) ])); + insert.push(this.sparqlUpdateGraph(parent, [ quad(parent, LDP.terms.contains, name) ])); } // Necessary updates: delete metadata and insert new data @@ -272,7 +271,7 @@ export class SparqlDataAccessor implements DataAccessor { if (parent) { update.updates.push({ updateType: 'delete', - delete: [ this.sparqlUpdateGraph(parent, [ quad(parent, toCachedNamedNode(LDP.contains), name) ]) ], + delete: [ this.sparqlUpdateGraph(parent, [ quad(parent, LDP.terms.contains, name) ]) ], }); } diff --git a/src/util/ResourceUtil.ts b/src/util/ResourceUtil.ts index a3c2011aab..98ce2f9a7f 100644 --- a/src/util/ResourceUtil.ts +++ b/src/util/ResourceUtil.ts @@ -2,7 +2,6 @@ import { DataFactory } from 'n3'; import type { NamedNode, Quad } from 'rdf-js'; import { RepresentationMetadata } from '../ldp/representation/RepresentationMetadata'; import { pushQuad } from './QuadUtil'; -import { toCachedNamedNode } from './UriUtil'; import { LDP, RDF } from './Vocabularies'; /** @@ -15,10 +14,10 @@ import { LDP, RDF } from './Vocabularies'; export const generateResourceQuads = (subject: NamedNode, isContainer: boolean): Quad[] => { const quads: Quad[] = []; if (isContainer) { - pushQuad(quads, subject, toCachedNamedNode(RDF.type), toCachedNamedNode(LDP.Container)); - pushQuad(quads, subject, toCachedNamedNode(RDF.type), toCachedNamedNode(LDP.BasicContainer)); + pushQuad(quads, subject, RDF.terms.type, LDP.terms.Container); + pushQuad(quads, subject, RDF.terms.type, LDP.terms.BasicContainer); } - pushQuad(quads, subject, toCachedNamedNode(RDF.type), toCachedNamedNode(LDP.Resource)); + pushQuad(quads, subject, RDF.terms.type, LDP.terms.Resource); return quads; }; diff --git a/src/util/UriUtil.ts b/src/util/UriUtil.ts index 4e461e726d..6a9c4391bb 100644 --- a/src/util/UriUtil.ts +++ b/src/util/UriUtil.ts @@ -1,12 +1,12 @@ import { DataFactory } from 'n3'; import type { Literal, NamedNode, Term } from 'rdf-js'; -import { CONTENT_TYPE } from './Vocabularies'; +import { CONTENT_TYPE_TERM } from './Vocabularies'; const { namedNode, literal } = DataFactory; // Shorthands for commonly used predicates const shorthands: Record = { - contentType: DataFactory.namedNode(CONTENT_TYPE), + contentType: CONTENT_TYPE_TERM, }; // Caches named node conversions @@ -63,5 +63,5 @@ export const toObjectTerm = (object: T | string, preferLiteral = * @param object - Object value. * @param dataType - Object data type (as string). */ -export const toLiteral = (object: string | number, dataType: string | NamedNode): Literal => +export const toLiteral = (object: string | number, dataType: NamedNode): Literal => DataFactory.literal(object, toCachedNamedNode(dataType)); diff --git a/src/util/Vocabularies.ts b/src/util/Vocabularies.ts index 6850721a1c..f18fefa574 100644 --- a/src/util/Vocabularies.ts +++ b/src/util/Vocabularies.ts @@ -1,4 +1,6 @@ /* eslint-disable @typescript-eslint/naming-convention, function-paren-newline */ +import { namedNode } from '@rdfjs/data-model'; +import type { NamedNode } from 'rdf-js'; type PrefixResolver = (localName: string) => T; type RecordOf = Record; @@ -10,25 +12,54 @@ export type Namespace = * Creates a function that expands local names from the given base URI, * and exports the given local names as properties on the returned object. */ -export const createNamespace = (baseUri: string, ...localNames: T[]): -Namespace => { +export const createNamespace = ( + baseUri: string, + toValue: (expanded: string) => TValue, + ...localNames: TKey[]): +Namespace => { // Create a function that expands local names - const expanded = {} as Record; - const namespace = ((localName: string): string => { + const expanded = {} as Record; + const namespace = ((localName: string): TValue => { if (!(localName in expanded)) { - expanded[localName] = `${baseUri}${localName}`; + expanded[localName] = toValue(`${baseUri}${localName}`); } return expanded[localName]; - }) as Namespace; + }) as Namespace; // Expose the listed local names as properties for (const localName of localNames) { - (namespace as RecordOf)[localName] = namespace(localName); + (namespace as RecordOf)[localName] = namespace(localName); } return namespace; }; -export const ACL = createNamespace('http://www.w3.org/ns/auth/acl#', +/** + * Creates a function that expands local names from the given base URI into strings, + * and exports the given local names as properties on the returned object. + */ +export const createUriNamespace = (baseUri: string, ...localNames: T[]): +Namespace => + createNamespace(baseUri, (expanded): string => expanded, ...localNames); + +/** + * Creates a function that expands local names from the given base URI into named nodes, + * and exports the given local names as properties on the returned object. + */ +export const createTermNamespace = (baseUri: string, ...localNames: T[]): +Namespace => + createNamespace(baseUri, namedNode, ...localNames); + +/** + * Creates a function that expands local names from the given base URI into string, + * and exports the given local names as properties on the returned object. + * Under the `terms` property, it exposes the expanded local names as named nodes. + */ +export const createUriAndTermNamespace = (baseUri: string, ...localNames: T[]): +Namespace & { terms: Namespace } => + Object.assign(createUriNamespace(baseUri, ...localNames), + { terms: createTermNamespace(baseUri, ...localNames) }); + +export const ACL = createUriAndTermNamespace('http://www.w3.org/ns/auth/acl#', 'accessTo', 'agent', 'agentClass', @@ -41,21 +72,21 @@ export const ACL = createNamespace('http://www.w3.org/ns/auth/acl#', 'Control', ); -export const DCTERMS = createNamespace('http://purl.org/dc/terms/', +export const DC = createUriAndTermNamespace('http://purl.org/dc/terms/', 'modified', ); -export const FOAF = createNamespace('http://xmlns.com/foaf/0.1/', +export const FOAF = createUriAndTermNamespace('http://xmlns.com/foaf/0.1/', 'Agent', 'AuthenticatedAgent', ); -export const HTTP = createNamespace('urn:solid:http:', +export const HTTP = createUriAndTermNamespace('urn:solid:http:', 'location', 'slug', ); -export const LDP = createNamespace('http://www.w3.org/ns/ldp#', +export const LDP = createUriAndTermNamespace('http://www.w3.org/ns/ldp#', 'contains', 'BasicContainer', @@ -63,27 +94,28 @@ export const LDP = createNamespace('http://www.w3.org/ns/ldp#', 'Resource', ); -export const MA = createNamespace('http://www.w3.org/ns/ma-ont#', +export const MA = createUriAndTermNamespace('http://www.w3.org/ns/ma-ont#', 'format', ); -export const PIM = createNamespace('http://www.w3.org/ns/pim/space#', +export const PIM = createUriAndTermNamespace('http://www.w3.org/ns/pim/space#', 'Storage', ); -export const POSIX = createNamespace('http://www.w3.org/ns/posix/stat#', +export const POSIX = createUriAndTermNamespace('http://www.w3.org/ns/posix/stat#', 'mtime', 'size', ); -export const RDF = createNamespace('http://www.w3.org/1999/02/22-rdf-syntax-ns#', +export const RDF = createUriAndTermNamespace('http://www.w3.org/1999/02/22-rdf-syntax-ns#', 'type', ); -export const XSD = createNamespace('http://www.w3.org/2001/XMLSchema#', +export const XSD = createUriAndTermNamespace('http://www.w3.org/2001/XMLSchema#', 'dateTime', 'integer', ); // Alias for most commonly used URI export const CONTENT_TYPE = MA.format; +export const CONTENT_TYPE_TERM = MA.terms.format; diff --git a/test/unit/ldp/http/metadata/LinkRelMetadataWriter.test.ts b/test/unit/ldp/http/metadata/LinkRelMetadataWriter.test.ts index 39df5155a4..b77cc33a2b 100644 --- a/test/unit/ldp/http/metadata/LinkRelMetadataWriter.test.ts +++ b/test/unit/ldp/http/metadata/LinkRelMetadataWriter.test.ts @@ -1,7 +1,6 @@ import { createResponse } from 'node-mocks-http'; import { LinkRelMetadataWriter } from '../../../../../src/ldp/http/metadata/LinkRelMetadataWriter'; import { RepresentationMetadata } from '../../../../../src/ldp/representation/RepresentationMetadata'; -import { toCachedNamedNode } from '../../../../../src/util/UriUtil'; import { LDP, RDF } from '../../../../../src/util/Vocabularies'; describe('A LinkRelMetadataWriter', (): void => { @@ -9,7 +8,7 @@ describe('A LinkRelMetadataWriter', (): void => { it('adds the correct link headers.', async(): Promise => { const response = createResponse(); - const metadata = new RepresentationMetadata({ [RDF.type]: toCachedNamedNode(LDP.Resource), unused: 'text' }); + const metadata = new RepresentationMetadata({ [RDF.type]: LDP.terms.Resource, unused: 'text' }); await expect(writer.handle({ response, metadata })).resolves.toBeUndefined(); expect(response.getHeaders()).toEqual({ link: `<${LDP.Resource}>; rel="type"` }); }); diff --git a/test/unit/storage/DataAccessorBasedStore.test.ts b/test/unit/storage/DataAccessorBasedStore.test.ts index 4b9449d2da..5f3d2746f3 100644 --- a/test/unit/storage/DataAccessorBasedStore.test.ts +++ b/test/unit/storage/DataAccessorBasedStore.test.ts @@ -18,7 +18,6 @@ import type { Guarded } from '../../../src/util/GuardedStream'; import { SingleRootIdentifierStrategy } from '../../../src/util/identifiers/SingleRootIdentifierStrategy'; import * as quadUtil from '../../../src/util/QuadUtil'; import { guardedStreamFrom } from '../../../src/util/StreamUtil'; -import { toCachedNamedNode } from '../../../src/util/UriUtil'; import { CONTENT_TYPE, HTTP, LDP, PIM, RDF } from '../../../src/util/Vocabularies'; import quad = DataFactory.quad; import namedNode = DataFactory.namedNode; @@ -160,7 +159,7 @@ describe('A DataAccessorBasedStore', (): void => { it('errors when trying to create a container with non-RDF data.', async(): Promise => { const resourceID = { path: root }; - representation.metadata.add(RDF.type, toCachedNamedNode(LDP.Container)); + representation.metadata.add(RDF.type, LDP.terms.Container); await expect(store.addResource(resourceID, representation)).rejects.toThrow(BadRequestHttpError); }); @@ -169,7 +168,7 @@ describe('A DataAccessorBasedStore', (): void => { const mock = jest.spyOn(quadUtil, 'parseQuads').mockImplementationOnce(async(): Promise => { throw 'apple'; }); - representation.metadata.add(RDF.type, toCachedNamedNode(LDP.Container)); + representation.metadata.add(RDF.type, LDP.terms.Container); await expect(store.addResource(resourceID, representation)).rejects.toBe('apple'); mock.mockRestore(); }); @@ -186,7 +185,7 @@ describe('A DataAccessorBasedStore', (): void => { it('can write containers.', async(): Promise => { const resourceID = { path: root }; - representation.metadata.add(RDF.type, toCachedNamedNode(LDP.Container)); + representation.metadata.add(RDF.type, LDP.terms.Container); representation.metadata.contentType = 'text/turtle'; representation.data = guardedStreamFrom([ `<${`${root}resource/`}> a .` ]); const result = await store.addResource(resourceID, representation); @@ -269,14 +268,14 @@ describe('A DataAccessorBasedStore', (): void => { representation.metadata.identifier = DataFactory.namedNode(resourceID.path); const newRepresentation = { ...representation }; newRepresentation.metadata = new RepresentationMetadata(representation.metadata); - newRepresentation.metadata.add(RDF.type, toCachedNamedNode(LDP.Container)); + newRepresentation.metadata.add(RDF.type, LDP.terms.Container); await expect(store.setRepresentation(resourceID, newRepresentation)) .rejects.toThrow(new ConflictHttpError('Input resource type does not match existing resource type.')); }); it('will error if the ending slash does not match its resource type.', async(): Promise => { const resourceID = { path: `${root}resource` }; - representation.metadata.add(RDF.type, toCachedNamedNode(LDP.Container)); + representation.metadata.add(RDF.type, LDP.terms.Container); await expect(store.setRepresentation(resourceID, representation)).rejects.toThrow( new BadRequestHttpError('Containers should have a `/` at the end of their path, resources should not.'), ); @@ -294,7 +293,7 @@ describe('A DataAccessorBasedStore', (): void => { it('errors when trying to create a container with non-RDF data.', async(): Promise => { const resourceID = { path: `${root}container/` }; - representation.metadata.add(RDF.type, toCachedNamedNode(LDP.Container)); + representation.metadata.add(RDF.type, LDP.terms.Container); await expect(store.setRepresentation(resourceID, representation)).rejects.toThrow(BadRequestHttpError); }); @@ -332,7 +331,7 @@ describe('A DataAccessorBasedStore', (): void => { it('errors when trying to create a container with containment triples.', async(): Promise => { const resourceID = { path: `${root}container/` }; - representation.metadata.add(RDF.type, toCachedNamedNode(LDP.Container)); + representation.metadata.add(RDF.type, LDP.terms.Container); representation.metadata.contentType = 'text/turtle'; representation.metadata.identifier = DataFactory.namedNode(`${root}resource/`); representation.data = guardedStreamFrom( @@ -390,7 +389,7 @@ describe('A DataAccessorBasedStore', (): void => { }); it('will error when deleting a root storage container.', async(): Promise => { - representation.metadata.add(RDF.type, toCachedNamedNode(PIM.Storage)); + representation.metadata.add(RDF.type, PIM.terms.Storage); accessor.data[`${root}container`] = representation; await expect(store.deleteResource({ path: `${root}container` })) .rejects.toThrow(new MethodNotAllowedHttpError('Cannot delete a root storage container.')); diff --git a/test/unit/storage/accessors/FileDataAccessor.test.ts b/test/unit/storage/accessors/FileDataAccessor.test.ts index 6d4f208572..74ebb09c34 100644 --- a/test/unit/storage/accessors/FileDataAccessor.test.ts +++ b/test/unit/storage/accessors/FileDataAccessor.test.ts @@ -1,6 +1,6 @@ import 'jest-rdf'; import type { Readable } from 'stream'; -import { DataFactory } from 'n3'; +import { namedNode } from '@rdfjs/data-model'; import type { Representation } from '../../../../src/ldp/representation/Representation'; import { RepresentationMetadata } from '../../../../src/ldp/representation/RepresentationMetadata'; import { FileDataAccessor } from '../../../../src/storage/accessors/FileDataAccessor'; @@ -12,8 +12,8 @@ import type { SystemError } from '../../../../src/util/errors/SystemError'; import { UnsupportedMediaTypeHttpError } from '../../../../src/util/errors/UnsupportedMediaTypeHttpError'; import type { Guarded } from '../../../../src/util/GuardedStream'; import { guardedStreamFrom, readableToString } from '../../../../src/util/StreamUtil'; -import { toCachedNamedNode, toLiteral } from '../../../../src/util/UriUtil'; -import { CONTENT_TYPE, DCTERMS, LDP, POSIX, RDF, XSD } from '../../../../src/util/Vocabularies'; +import { toLiteral } from '../../../../src/util/UriUtil'; +import { CONTENT_TYPE, DC, LDP, POSIX, RDF, XSD } from '../../../../src/util/Vocabularies'; import { mockFs } from '../../../util/Util'; jest.mock('fs'); @@ -98,9 +98,9 @@ describe('A FileDataAccessor', (): void => { expect(metadata.identifier.value).toBe(`${base}resource.ttl`); expect(metadata.contentType).toBe('text/turtle'); expect(metadata.get(RDF.type)?.value).toBe(LDP.Resource); - expect(metadata.get(POSIX.size)).toEqualRdfTerm(toLiteral('data'.length, XSD.integer)); - expect(metadata.get(DCTERMS.modified)).toEqualRdfTerm(toLiteral(now.toISOString(), XSD.dateTime)); - expect(metadata.get(POSIX.mtime)).toEqualRdfTerm(toLiteral(Math.floor(now.getTime() / 1000), XSD.integer)); + expect(metadata.get(POSIX.size)).toEqualRdfTerm(toLiteral('data'.length, XSD.terms.integer)); + expect(metadata.get(DC.modified)).toEqualRdfTerm(toLiteral(now.toISOString(), XSD.terms.dateTime)); + expect(metadata.get(POSIX.mtime)).toEqualRdfTerm(toLiteral(Math.floor(now.getTime() / 1000), XSD.terms.integer)); }); it('generates the metadata for a container and its non-meta children.', async(): Promise => { @@ -108,23 +108,23 @@ describe('A FileDataAccessor', (): void => { metadata = await accessor.getMetadata({ path: `${base}container/` }); expect(metadata.identifier.value).toBe(`${base}container/`); expect(metadata.getAll(RDF.type)).toEqualRdfTermArray( - [ toCachedNamedNode(LDP.Container), toCachedNamedNode(LDP.BasicContainer), toCachedNamedNode(LDP.Resource) ], + [ LDP.terms.Container, LDP.terms.BasicContainer, LDP.terms.Resource ], ); - expect(metadata.get(POSIX.size)).toEqualRdfTerm(toLiteral(0, XSD.integer)); - expect(metadata.get(DCTERMS.modified)).toEqualRdfTerm(toLiteral(now.toISOString(), XSD.dateTime)); - expect(metadata.get(POSIX.mtime)).toEqualRdfTerm(toLiteral(Math.floor(now.getTime() / 1000), XSD.integer)); + expect(metadata.get(POSIX.size)).toEqualRdfTerm(toLiteral(0, XSD.terms.integer)); + expect(metadata.get(DC.modified)).toEqualRdfTerm(toLiteral(now.toISOString(), XSD.terms.dateTime)); + expect(metadata.get(POSIX.mtime)).toEqualRdfTerm(toLiteral(Math.floor(now.getTime() / 1000), XSD.terms.integer)); expect(metadata.getAll(LDP.contains)).toEqualRdfTermArray( - [ toCachedNamedNode(`${base}container/resource`), toCachedNamedNode(`${base}container/container2/`) ], + [ namedNode(`${base}container/resource`), namedNode(`${base}container/container2/`) ], ); const childQuads = metadata.quads().filter((quad): boolean => quad.subject.value === `${base}container/resource`); const childMetadata = new RepresentationMetadata({ path: `${base}container/resource` }).addQuads(childQuads); expect(childMetadata.get(RDF.type)?.value).toBe(LDP.Resource); - expect(childMetadata.get(POSIX.size)).toEqualRdfTerm(toLiteral('data'.length, XSD.integer)); - expect(childMetadata.get(DCTERMS.modified)).toEqualRdfTerm(toLiteral(now.toISOString(), XSD.dateTime)); + expect(childMetadata.get(POSIX.size)).toEqualRdfTerm(toLiteral('data'.length, XSD.terms.integer)); + expect(childMetadata.get(DC.modified)).toEqualRdfTerm(toLiteral(now.toISOString(), XSD.terms.dateTime)); expect(childMetadata.get(POSIX.mtime)).toEqualRdfTerm(toLiteral(Math.floor(now.getTime() / 1000), - XSD.integer)); + XSD.terms.integer)); }); it('adds stored metadata when requesting metadata.', async(): Promise => { @@ -168,7 +168,7 @@ describe('A FileDataAccessor', (): void => { }); it('does not write metadata that is stored by the file system.', async(): Promise => { - metadata.add(RDF.type, toCachedNamedNode(LDP.Resource)); + metadata.add(RDF.type, LDP.terms.Resource); await expect(accessor.writeDocument({ path: `${base}resource` }, data, metadata)).resolves.toBeUndefined(); expect(cache.data.resource).toBe('data'); expect(cache.data['resource.meta']).toBeUndefined(); @@ -212,7 +212,7 @@ describe('A FileDataAccessor', (): void => { it('updates the filename if the content-type gets updated.', async(): Promise => { cache.data = { 'resource$.ttl': ' .', 'resource.meta': ' .' }; - metadata.identifier = DataFactory.namedNode(`${base}resource`); + metadata.identifier = namedNode(`${base}resource`); metadata.contentType = 'text/plain'; metadata.add('new', 'metadata'); await expect(accessor.writeDocument({ path: `${base}resource` }, data, metadata)) @@ -224,7 +224,7 @@ describe('A FileDataAccessor', (): void => { }); it('does not try to update the content-type if there is no original file.', async(): Promise => { - metadata.identifier = DataFactory.namedNode(`${base}resource.txt`); + metadata.identifier = namedNode(`${base}resource.txt`); metadata.contentType = 'text/turtle'; metadata.add('new', 'metadata'); await expect(accessor.writeDocument({ path: `${base}resource.txt` }, data, metadata)) @@ -289,7 +289,7 @@ describe('A FileDataAccessor', (): void => { it('does not write metadata that is stored by the file system.', async(): Promise => { metadata = new RepresentationMetadata( { path: `${base}container/` }, - { [RDF.type]: [ toCachedNamedNode(LDP.BasicContainer), toCachedNamedNode(LDP.Resource) ]}, + { [RDF.type]: [ LDP.terms.BasicContainer, LDP.terms.Resource ]}, ); await expect(accessor.writeContainer({ path: `${base}container/` }, metadata)).resolves.toBeUndefined(); expect(cache.data.container).toEqual({}); diff --git a/test/unit/storage/accessors/InMemoryDataAccessor.test.ts b/test/unit/storage/accessors/InMemoryDataAccessor.test.ts index 8a9ce7e703..706a8e9162 100644 --- a/test/unit/storage/accessors/InMemoryDataAccessor.test.ts +++ b/test/unit/storage/accessors/InMemoryDataAccessor.test.ts @@ -1,12 +1,12 @@ import 'jest-rdf'; import type { Readable } from 'stream'; +import { DataFactory } from 'n3'; import { RepresentationMetadata } from '../../../../src/ldp/representation/RepresentationMetadata'; import { InMemoryDataAccessor } from '../../../../src/storage/accessors/InMemoryDataAccessor'; import { APPLICATION_OCTET_STREAM } from '../../../../src/util/ContentTypes'; import { NotFoundHttpError } from '../../../../src/util/errors/NotFoundHttpError'; import type { Guarded } from '../../../../src/util/GuardedStream'; import { guardedStreamFrom, readableToString } from '../../../../src/util/StreamUtil'; -import { toCachedNamedNode } from '../../../../src/util/UriUtil'; import { CONTENT_TYPE, LDP, RDF } from '../../../../src/util/Vocabularies'; describe('An InMemoryDataAccessor', (): void => { @@ -80,13 +80,13 @@ describe('An InMemoryDataAccessor', (): void => { await expect(accessor.writeContainer({ path: `${base}container/container2` }, metadata)).resolves.toBeUndefined(); metadata = await accessor.getMetadata({ path: `${base}container/` }); expect(metadata.getAll(LDP.contains)).toEqualRdfTermArray( - [ toCachedNamedNode(`${base}container/resource`), toCachedNamedNode(`${base}container/container2/`) ], + [ DataFactory.namedNode(`${base}container/resource`), DataFactory.namedNode(`${base}container/container2/`) ], ); }); it('adds stored metadata when requesting document metadata.', async(): Promise => { const identifier = { path: `${base}resource` }; - const inputMetadata = new RepresentationMetadata(identifier, { [RDF.type]: toCachedNamedNode(LDP.Resource) }); + const inputMetadata = new RepresentationMetadata(identifier, { [RDF.type]: LDP.terms.Resource }); await expect(accessor.writeDocument(identifier, data, inputMetadata)).resolves.toBeUndefined(); metadata = await accessor.getMetadata(identifier); expect(metadata.identifier.value).toBe(`${base}resource`); @@ -97,7 +97,7 @@ describe('An InMemoryDataAccessor', (): void => { it('adds stored metadata when requesting container metadata.', async(): Promise => { const identifier = { path: `${base}container/` }; - const inputMetadata = new RepresentationMetadata(identifier, { [RDF.type]: toCachedNamedNode(LDP.Container) }); + const inputMetadata = new RepresentationMetadata(identifier, { [RDF.type]: LDP.terms.Container }); await expect(accessor.writeContainer(identifier, inputMetadata)).resolves.toBeUndefined(); metadata = await accessor.getMetadata(identifier); @@ -109,7 +109,7 @@ describe('An InMemoryDataAccessor', (): void => { it('can overwrite the metadata of an existing container without overwriting children.', async(): Promise => { const identifier = { path: `${base}container/` }; - const inputMetadata = new RepresentationMetadata(identifier, { [RDF.type]: toCachedNamedNode(LDP.Container) }); + const inputMetadata = new RepresentationMetadata(identifier, { [RDF.type]: LDP.terms.Container }); await expect(accessor.writeContainer(identifier, inputMetadata)).resolves.toBeUndefined(); const resourceMetadata = new RepresentationMetadata(); await expect(accessor.writeDocument( @@ -117,7 +117,7 @@ describe('An InMemoryDataAccessor', (): void => { )).resolves.toBeUndefined(); const newMetadata = new RepresentationMetadata(inputMetadata); - newMetadata.add(RDF.type, toCachedNamedNode(LDP.BasicContainer)); + newMetadata.add(RDF.type, LDP.terms.BasicContainer); await expect(accessor.writeContainer(identifier, newMetadata)).resolves.toBeUndefined(); metadata = await accessor.getMetadata(identifier); @@ -135,7 +135,7 @@ describe('An InMemoryDataAccessor', (): void => { it('can write to the root container without overriding its children.', async(): Promise => { const identifier = { path: `${base}` }; - const inputMetadata = new RepresentationMetadata(identifier, { [RDF.type]: toCachedNamedNode(LDP.Container) }); + const inputMetadata = new RepresentationMetadata(identifier, { [RDF.type]: LDP.terms.Container }); await expect(accessor.writeContainer(identifier, inputMetadata)).resolves.toBeUndefined(); const resourceMetadata = new RepresentationMetadata(); await expect(accessor.writeDocument( diff --git a/test/unit/storage/accessors/SparqlDataAccessor.test.ts b/test/unit/storage/accessors/SparqlDataAccessor.test.ts index d414c71a87..98d77b5495 100644 --- a/test/unit/storage/accessors/SparqlDataAccessor.test.ts +++ b/test/unit/storage/accessors/SparqlDataAccessor.test.ts @@ -14,8 +14,7 @@ import { UnsupportedMediaTypeHttpError } from '../../../../src/util/errors/Unsup import type { Guarded } from '../../../../src/util/GuardedStream'; import { SingleRootIdentifierStrategy } from '../../../../src/util/identifiers/SingleRootIdentifierStrategy'; import { guardedStreamFrom } from '../../../../src/util/StreamUtil'; -import { toCachedNamedNode } from '../../../../src/util/UriUtil'; -import { CONTENT_TYPE, LDP, RDF } from '../../../../src/util/Vocabularies'; +import { CONTENT_TYPE_TERM, LDP, RDF } from '../../../../src/util/Vocabularies'; const { literal, namedNode, quad } = DataFactory; @@ -94,7 +93,7 @@ describe('A SparqlDataAccessor', (): void => { metadata = await accessor.getMetadata({ path: 'http://identifier' }); expect(metadata.quads()).toBeRdfIsomorphic([ quad(namedNode('this'), namedNode('a'), namedNode('triple')), - quad(namedNode('http://identifier'), toCachedNamedNode(CONTENT_TYPE), literal(INTERNAL_QUADS)), + quad(namedNode('http://identifier'), CONTENT_TYPE_TERM, literal(INTERNAL_QUADS)), ]); expect(fetchTriples).toHaveBeenCalledTimes(1); @@ -135,7 +134,7 @@ describe('A SparqlDataAccessor', (): void => { it('overwrites the metadata when writing a container and updates parent.', async(): Promise => { metadata = new RepresentationMetadata({ path: 'http://test.com/container/' }, - { [RDF.type]: [ toCachedNamedNode(LDP.Resource), toCachedNamedNode(LDP.Container) ]}); + { [RDF.type]: [ LDP.terms.Resource, LDP.terms.Container ]}); await expect(accessor.writeContainer({ path: 'http://test.com/container/' }, metadata)).resolves.toBeUndefined(); expect(fetchUpdate).toHaveBeenCalledTimes(1); @@ -154,7 +153,7 @@ describe('A SparqlDataAccessor', (): void => { it('does not write containment triples when writing to a root container.', async(): Promise => { metadata = new RepresentationMetadata({ path: 'http://test.com/' }, - { [RDF.type]: [ toCachedNamedNode(LDP.Resource), toCachedNamedNode(LDP.Container) ]}); + { [RDF.type]: [ LDP.terms.Resource, LDP.terms.Container ]}); await expect(accessor.writeContainer({ path: 'http://test.com/' }, metadata)).resolves.toBeUndefined(); expect(fetchUpdate).toHaveBeenCalledTimes(1); @@ -172,7 +171,7 @@ describe('A SparqlDataAccessor', (): void => { it('overwrites the data and metadata when writing a resource and updates parent.', async(): Promise => { metadata = new RepresentationMetadata({ path: 'http://test.com/container/resource' }, - { [RDF.type]: [ toCachedNamedNode(LDP.Resource) ]}); + { [RDF.type]: [ LDP.terms.Resource ]}); await expect(accessor.writeDocument({ path: 'http://test.com/container/resource' }, data, metadata)) .resolves.toBeUndefined(); @@ -191,7 +190,7 @@ describe('A SparqlDataAccessor', (): void => { it('overwrites the data and metadata when writing an empty resource.', async(): Promise => { metadata = new RepresentationMetadata({ path: 'http://test.com/container/resource' }, - { [RDF.type]: [ toCachedNamedNode(LDP.Resource) ]}); + { [RDF.type]: [ LDP.terms.Resource ]}); const empty = guardedStreamFrom([]); await expect(accessor.writeDocument({ path: 'http://test.com/container/resource' }, empty, metadata)) .resolves.toBeUndefined(); @@ -210,7 +209,7 @@ describe('A SparqlDataAccessor', (): void => { it('removes all references when deleting a resource.', async(): Promise => { metadata = new RepresentationMetadata({ path: 'http://test.com/container/' }, - { [RDF.type]: [ toCachedNamedNode(LDP.Resource), toCachedNamedNode(LDP.Container) ]}); + { [RDF.type]: [ LDP.terms.Resource, LDP.terms.Container ]}); await expect(accessor.deleteResource({ path: 'http://test.com/container/' })).resolves.toBeUndefined(); expect(fetchUpdate).toHaveBeenCalledTimes(1); @@ -224,7 +223,7 @@ describe('A SparqlDataAccessor', (): void => { it('does not try to remove containment triples when deleting a root container.', async(): Promise => { metadata = new RepresentationMetadata({ path: 'http://test.com/' }, - { [RDF.type]: [ toCachedNamedNode(LDP.Resource), toCachedNamedNode(LDP.Container) ]}); + { [RDF.type]: [ LDP.terms.Resource, LDP.terms.Container ]}); await expect(accessor.deleteResource({ path: 'http://test.com/' })).resolves.toBeUndefined(); expect(fetchUpdate).toHaveBeenCalledTimes(1); diff --git a/test/unit/util/UriUtil.test.ts b/test/unit/util/UriUtil.test.ts index bb8add3476..4290963e84 100644 --- a/test/unit/util/UriUtil.test.ts +++ b/test/unit/util/UriUtil.test.ts @@ -84,7 +84,7 @@ describe('An UriUtil', (): void => { describe('toLiteral function', (): void => { it('converts the input to a valid literal with the given type.', async(): Promise => { const expected = literal('5', namedNode(XSD.integer)); - expect(toLiteral(5, XSD.integer)).toEqualRdfTerm(expected); + expect(toLiteral(5, XSD.terms.integer)).toEqualRdfTerm(expected); }); }); }); diff --git a/test/unit/util/Vocabularies.test.ts b/test/unit/util/Vocabularies.test.ts index 30775c7c1e..0885bea6d6 100644 --- a/test/unit/util/Vocabularies.test.ts +++ b/test/unit/util/Vocabularies.test.ts @@ -1,3 +1,4 @@ +import { namedNode } from '@rdfjs/data-model'; import { LDP } from '../../../src/util/Vocabularies'; describe('Vocabularies', (): void => { @@ -6,12 +7,24 @@ describe('Vocabularies', (): void => { expect(LDP('new')).toBe('http://www.w3.org/ns/ldp#new'); }); - it('caches new properties.', (): void => { - expect(LDP('new')).toBe(LDP('new')); + it('can create new properties as terms.', (): void => { + expect(LDP.terms('new')).toEqual(namedNode('http://www.w3.org/ns/ldp#new')); + }); + + it('caches new properties as terms.', (): void => { + expect(LDP.terms('new')).toBe(LDP.terms('new')); }); it('exposes ldp:contains.', (): void => { expect(LDP.contains).toBe('http://www.w3.org/ns/ldp#contains'); }); + + it('exposes ldp:contains as a term.', (): void => { + expect(LDP.terms.contains).toEqual(namedNode('http://www.w3.org/ns/ldp#contains')); + }); + + it('caches ldp:contains as a term.', (): void => { + expect(LDP.terms.contains).toBe(LDP.terms.contains); + }); }); });