Skip to content

Commit

Permalink
Merge pull request #1513 from zazuko/datePublished
Browse files Browse the repository at this point in the history
Date published
  • Loading branch information
giacomociti authored Jun 3, 2024
2 parents 9eeb42e + 3fb4e00 commit 6ea3bb1
Show file tree
Hide file tree
Showing 5 changed files with 55 additions and 5 deletions.
6 changes: 6 additions & 0 deletions .changeset/eighty-geckos-burn.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
---
"@cube-creator/core-api": patch
"@cube-creator/cli": patch
---

Avoid duplicate datePublished on first revision
2 changes: 1 addition & 1 deletion apis/core/lib/domain/job/update.ts
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ export async function update({ resource, store }: JobUpdateParams): Promise<Grap
project.incrementPublishedRevision()

const dataset = await store.getResource<Dataset>(project.dataset.id)
if (job.revision === 1) {
if (job.revision === 1 && !dataset.published) {
dataset.setPublishedDate(job.modified)
}
} else if (isTransformJob(job)) {
Expand Down
27 changes: 27 additions & 0 deletions apis/core/test/domain/job/update.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -202,6 +202,33 @@ describe('domain/job/update', () => {
})
})

it('does not change dataset published date on first revision', async () => {
// given
const resource = clownface({ dataset: $rdf.dataset() })
.namedNode('job')
.addOut(schema.actionStatus, schema.CompletedActionStatus)
.addOut(dcterms.modified, $rdf.literal('2020-12-12T11:30:30', xsd.dateTime))
job
.addOut(cc.revision, 1)
dataset.addOut(schema.datePublished, $rdf.literal('2020-10-12', xsd.date))

// when
await update({
resource,
store,
})

// then
expect(dataset).to.matchShape({
property: {
path: schema.datePublished,
hasValue: $rdf.literal('2020-10-12', xsd.date),
minCount: 1,
maxCount: 1,
},
})
})

it('does not change dataset published date on revision>1', async () => {
// given
const resource = clownface({ dataset: $rdf.dataset() })
Expand Down
14 changes: 10 additions & 4 deletions cli/lib/metadata.ts
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import type { Literal, NamedNode, Quad } from '@rdfjs/types'
import { obj } from 'through2'
import $rdf from 'rdf-ext'
import { dcat, dcterms, rdf, schema, sh, _void, foaf } from '@tpluscode/rdf-ns-builders'
import { dcat, dcterms, rdf, schema, sh, _void, foaf, xsd } from '@tpluscode/rdf-ns-builders'
import { cc, cube } from '@cube-creator/core/namespace'
import { Dataset, Project, PublishJob } from '@cube-creator/model'
import { HydraClient } from 'alcaeus/alcaeus'
Expand All @@ -10,7 +10,7 @@ import { Published } from '@cube-creator/model/Cube'
import { CONSTRUCT, sparql } from '@tpluscode/sparql-builder'
import StreamClient from 'sparql-http-client/StreamClient'
import { Readable } from 'readable-stream'
import { toRdf } from 'rdf-literal'
import { fromRdf, toRdf } from 'rdf-literal'
import { tracer } from './otel/tracer'
import { loadProject } from './project'

Expand Down Expand Up @@ -54,6 +54,7 @@ interface QueryParams {
project: Project
revision: Literal
cubeIdentifier: string
timestamp: Literal
}

function sourceCubeAndShape({ project, revision, cubeIdentifier }: QueryParams) {
Expand Down Expand Up @@ -87,7 +88,8 @@ function sourceCubeAndShape({ project, revision, cubeIdentifier }: QueryParams)
`
}

function cubeMetadata({ project, revision }: QueryParams) {
function cubeMetadata({ project, revision, timestamp }: QueryParams) {
const defaultDatePublished = toRdf(fromRdf(timestamp), { datatype: xsd.date })
return sparql`
graph ${project.cubeGraph} {
?cube a ${cube.Cube} .
Expand All @@ -96,6 +98,9 @@ function cubeMetadata({ project, revision }: QueryParams) {
graph ${project.dataset.id} {
${project.dataset.id} ?cubeProp ?cubeMeta .
FILTER (?cubeProp != ${schema.datePublished})
OPTIONAL { ${project.dataset.id} ${schema.datePublished} ?d }
BIND(COALESCE(?d, ${defaultDatePublished}) AS ?datePublished)
MINUS {
${project.dataset.id} ${schema.hasPart}|${cc.dimensionMetadata} ?cubeMeta
Expand Down Expand Up @@ -188,6 +193,7 @@ export async function loadCubeMetadata(this: Context, { jobUri, endpoint, user,
project,
cubeIdentifier,
revision,
timestamp,
}

const stream = await CONSTRUCT`
Expand All @@ -202,7 +208,7 @@ export async function loadCubeMetadata(this: Context, { jobUri, endpoint, user,
${dcat.accessURL} ${maintainer.accessURL} ;
${_void.sparqlEndpoint} ${maintainer.sparqlEndpoint} ;
${revision.value === '1' ? sparql`${schema.datePublished} ${timestamp}` : ''}
${schema.datePublished} ?datePublished ;
.
?cube a ${schema.CreativeWork} ; ${schema.hasPart} ?cubeVersion .
Expand Down
11 changes: 11 additions & 0 deletions cli/test/lib/commands/publish.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -306,6 +306,17 @@ describe('@cube-creator/cli/lib/commands/publish', function () {
})
})

it('adds published date to published cube', async function () {
expect(cubePointer.namedNode(targetCube())).to.matchShape({
property: [{
path: schema.datePublished,
datatype: xsd.date,
minCount: 1,
maxCount: 1,
}],
})
})

it('adds project identifier to dataset metadata', async function () {
expect(cubePointer.namedNode(targetCube())).to.matchShape({
property: [{
Expand Down

0 comments on commit 6ea3bb1

Please sign in to comment.