diff --git a/src/language/context-mapper-dsl.langium b/src/language/context-mapper-dsl.langium index 6b36211..e6f73ce 100644 --- a/src/language/context-mapper-dsl.langium +++ b/src/language/context-mapper-dsl.langium @@ -2,15 +2,13 @@ grammar ContextMapperDsl terminal ID: /[_a-zA-Z][\w_]*/; terminal STRING: /"(\\.|[^"\\])*"|'(\\.|[^'\\])*'/; -terminal ML_COMMENT: /\/\*[\s\S]*?\*\//; -terminal SL_COMMENT: /\/\/[^\n\r]*/; terminal OPEN: '{'; terminal CLOSE: '}'; hidden terminal WS: /\s+/; -hidden terminal HIDDEN_ML_COMMENT: ML_COMMENT; -hidden terminal HIDDEN_SL_COMMENT: SL_COMMENT; +hidden terminal ML_COMMENT: /\/\*[\s\S]*?\*\//; +hidden terminal SL_COMMENT: /\/\/[^\n\r]*/; entry ContextMappingModel: ( @@ -169,7 +167,7 @@ CustomerSupplierRelationship: (OPEN ( ('implementationTechnology' ('=')? implementationTechnology=STRING) & - (('exposedAggregates' ('=')? upstreamExposedAggregates+=[Aggregate]) ("," upstreamExposedAggregates+=[Aggregate])* (exposedAggregatesComment=SL_COMMENT)?) & + (('exposedAggregates' ('=')? upstreamExposedAggregates+=[Aggregate]) ("," upstreamExposedAggregates+=[Aggregate])*) & ('downstreamRights' ('=')? downstreamGovernanceRights=DownstreamGovernanceRights) ) CLOSE)? @@ -182,8 +180,8 @@ Aggregate: ( (('responsibilities' ('=')? responsibilities+=STRING) ("," responsibilities+=STRING)*) & ( - (('useCases' ('=')? userRequirements+=[UseCase]) ("," userRequirements+=[UseCase])*) | - (('userStories' ('=')? userRequirements+=[UserStory]) ("," userRequirements+=[UserStory])*) | + (('useCases' ('=')? useCases+=[UseCase]) ("," useCases+=[UseCase])*) | + (('userStories' ('=')? userStories+=[UserStory]) ("," userStories+=[UserStory])*) | ((('features' | 'userRequirements') ('=')? userRequirements+=[UserRequirement]) ("," userRequirements+=[UserRequirement])*) ) & ('owner' ('=')? owner=[BoundedContext]) & diff --git a/src/language/semantictokens/HighlightingHelper.ts b/src/language/semantictokens/HighlightingHelper.ts index f8214ac..d61f17e 100644 --- a/src/language/semantictokens/HighlightingHelper.ts +++ b/src/language/semantictokens/HighlightingHelper.ts @@ -18,6 +18,14 @@ export function highlightKeyword (node: AstNode, acceptor: SemanticTokenAcceptor }) } +export function highlightOperator (node: AstNode, acceptor: SemanticTokenAcceptor, operator: string) { + acceptor({ + node, + type: SemanticTokenTypes.operator, + keyword: operator + }) +} + export function highlightType (node: AstNode, acceptor: SemanticTokenAcceptor, property: string, modifiers: string[] = []) { acceptor({ node, diff --git a/src/language/semantictokens/boundedContext/AggregateSemanticTokenProvider.ts b/src/language/semantictokens/boundedContext/AggregateSemanticTokenProvider.ts index f031599..f180fb5 100644 --- a/src/language/semantictokens/boundedContext/AggregateSemanticTokenProvider.ts +++ b/src/language/semantictokens/boundedContext/AggregateSemanticTokenProvider.ts @@ -1,4 +1,4 @@ -import { Aggregate, isAggregate, isUseCase, isUserRequirement, isUserStory } from '../../generated/ast.js' +import { Aggregate, isAggregate } from '../../generated/ast.js' import { SemanticTokenAcceptor } from 'langium/lsp' import { SemanticTokenTypes } from 'vscode-languageserver-types' import { highlightField, highlightString, highlightTypeDeclaration } from '../HighlightingHelper.js' @@ -22,7 +22,15 @@ export class AggregateSemanticTokenProvider implements ContextMapperSemanticToke } if (node.userRequirements.length > 0) { - this.highlightUserRequirements(node, acceptor) + highlightField(node, acceptor, ['userRequirements', 'features'], 'userRequirements', SemanticTokenTypes.type) + } + + if (node.useCases.length > 0) { + highlightField(node, acceptor, ['useCases'], 'useCases', SemanticTokenTypes.type) + } + + if (node.userStories.length > 0) { + highlightField(node, acceptor, ['userStories'], 'userStories', SemanticTokenTypes.type) } if (node.owner) { @@ -53,10 +61,6 @@ export class AggregateSemanticTokenProvider implements ContextMapperSemanticToke highlightField(node, acceptor, ['storageSimilarity'], 'storageSimilarity') } - if (node.storageSimilarity) { - highlightField(node, acceptor, ['storageSimilarity'], 'storageSimilarity') - } - if (node.securityCriticality) { highlightField(node, acceptor, ['securityCriticality'], 'securityCriticality') } @@ -69,18 +73,4 @@ export class AggregateSemanticTokenProvider implements ContextMapperSemanticToke highlightField(node, acceptor, ['securityAccessGroup'], 'securityAccessGroup', SemanticTokenTypes.string) } } - - private highlightUserRequirements (node: Aggregate, acceptor: SemanticTokenAcceptor) { - const keywords = [] - if (isUseCase(node.userRequirements[0])) { - keywords.push('useCases') - } else if (isUserRequirement(node.userRequirements[0])) { - keywords.push('userRequirements') - keywords.push('features') - } else if (isUserStory(node.userRequirements[0])) { - keywords.push('userStories') - } - - highlightField(node, acceptor, keywords, 'userRequirements', SemanticTokenTypes.type) - } } diff --git a/src/language/semantictokens/contextMap/RelationshipSemanticTokenProvider.ts b/src/language/semantictokens/contextMap/RelationshipSemanticTokenProvider.ts index 19e949a..0f3fb2d 100644 --- a/src/language/semantictokens/contextMap/RelationshipSemanticTokenProvider.ts +++ b/src/language/semantictokens/contextMap/RelationshipSemanticTokenProvider.ts @@ -15,6 +15,7 @@ import { import { SemanticTokenAcceptor } from 'langium/lsp' import { SemanticTokenModifiers, SemanticTokenTypes } from 'vscode-languageserver-types' import { ContextMapperSemanticTokenProvider } from '../ContextMapperSemanticTokenProvider.js' +import { highlightField, highlightKeyword, highlightOperator, highlightType } from '../HighlightingHelper.js' export class RelationshipSemanticTokenProvider implements ContextMapperSemanticTokenProvider { supports (node: Relationship): node is Relationship { @@ -22,6 +23,14 @@ export class RelationshipSemanticTokenProvider implements ContextMapperSemanticT } highlight (node: Relationship, acceptor: SemanticTokenAcceptor) { + if (node.name) { + highlightType(node, acceptor, 'name', [SemanticTokenModifiers.declaration]) + } + + if (node.implementationTechnology) { + highlightField(node, acceptor, ['implementationTechnology'], 'implementationTechnology', SemanticTokenTypes.string) + } + if (isSymmetricRelationship(node)) { this.highlightSymmetricRelationship(node, acceptor) } else if (isUpstreamDownstreamRelationship(node)) { @@ -30,38 +39,8 @@ export class RelationshipSemanticTokenProvider implements ContextMapperSemanticT } private highlightSymmetricRelationship (node: SymmetricRelationship, acceptor: SemanticTokenAcceptor) { - acceptor({ - node, - type: SemanticTokenTypes.type, - property: 'participant1' - }) - acceptor({ - node, - type: SemanticTokenTypes.type, - property: 'participant2' - }) - - if (node.name) { - acceptor({ - node, - type: SemanticTokenTypes.type, - modifier: SemanticTokenModifiers.declaration, - property: 'name' - }) - } - - if (node.implementationTechnology) { - acceptor({ - node, - type: SemanticTokenTypes.keyword, - keyword: 'implementationTechnology' - }) - acceptor({ - node, - type: SemanticTokenTypes.string, - property: 'implementationTechnology' - }) - } + highlightType(node, acceptor, 'participant1') + highlightType(node, acceptor, 'participant2') if (isPartnership(node)) { this.highlightPartnership(node, acceptor) @@ -71,99 +50,30 @@ export class RelationshipSemanticTokenProvider implements ContextMapperSemanticT } private highlightPartnership (node: Partnership, acceptor: SemanticTokenAcceptor) { - acceptor({ - node, - type: SemanticTokenTypes.keyword, - keyword: 'P' - }) - acceptor({ - node, - type: SemanticTokenTypes.operator, - keyword: '<->' - }) - acceptor({ - node, - type: SemanticTokenTypes.keyword, - keyword: 'Partnership' - }) + highlightKeyword(node, acceptor, 'P') + highlightOperator(node, acceptor, '<->') + highlightKeyword(node, acceptor, 'Partnership') } private highlightSharedKernel (node: SharedKernel, acceptor: SemanticTokenAcceptor) { - acceptor({ - node, - type: SemanticTokenTypes.keyword, - keyword: 'SK' - }) - acceptor({ - node, - type: SemanticTokenTypes.operator, - keyword: '<->' - }) - acceptor({ - node, - type: SemanticTokenTypes.keyword, - keyword: 'Shared-Kernel' - }) + highlightKeyword(node, acceptor, 'SK') + highlightOperator(node, acceptor, '<->') + highlightKeyword(node, acceptor, 'Shared-Kernel') } private highlightUpstreamDownstreamRelationship (node: UpstreamDownstreamRelationship, acceptor: SemanticTokenAcceptor) { - if (isCustomerSupplierRelationship(node)) { - this.highlightCustomerSupplierRelationship(node, acceptor) - return - } - - if (node.name) { + highlightType(node, acceptor, 'upstream') + highlightKeyword(node, acceptor, 'U') + if (node.upstreamRoles.length > 0) { acceptor({ node, - type: SemanticTokenTypes.type, - modifier: SemanticTokenModifiers.declaration, - property: 'name' + type: SemanticTokenTypes.enumMember, + property: 'upstreamRoles' }) } - acceptor({ - node, - type: SemanticTokenTypes.type, - property: 'downstream' - }) - acceptor({ - node, - type: SemanticTokenTypes.keyword, - keyword: 'D' - }) - - acceptor({ - node, - type: SemanticTokenTypes.type, - property: 'upstream' - }) - acceptor({ - node, - type: SemanticTokenTypes.keyword, - keyword: 'U' - }) - - acceptor({ - node, - type: SemanticTokenTypes.keyword, - keyword: 'Upstream-Downstream' - }) - acceptor({ - node, - type: SemanticTokenTypes.keyword, - keyword: 'Downstream-Upstream' - }) - acceptor({ - node, - type: SemanticTokenTypes.operator, - keyword: '<-' - }) - acceptor({ - node, - type: SemanticTokenTypes.operator, - keyword: '->' - }) - + highlightType(node, acceptor, 'downstream') + highlightKeyword(node, acceptor, 'D') if (node.downstreamRoles.length > 0) { acceptor({ node, @@ -172,120 +82,31 @@ export class RelationshipSemanticTokenProvider implements ContextMapperSemanticT }) } - if (node.upstreamRoles.length > 0) { - acceptor({ - node, - type: SemanticTokenTypes.keyword, - property: 'upstreamRoles' - }) - } - - if (node.implementationTechnology) { - acceptor({ - node, - type: SemanticTokenTypes.keyword, - keyword: 'implementationTechnology' - }) - acceptor({ - node, - type: SemanticTokenTypes.string, - property: 'implementationTechnology' - }) - } + highlightOperator(node, acceptor, '->') + highlightOperator(node, acceptor, '<-') if (node.upstreamExposedAggregates.length > 0) { - acceptor({ - node, - type: SemanticTokenTypes.keyword, - keyword: 'exposedAggregates' - }) - acceptor({ - node, - type: SemanticTokenTypes.type, - property: 'upstreamExposedAggregates' - }) + highlightField(node, acceptor, ['exposedAggregates'], 'upstreamExposedAggregates', SemanticTokenTypes.type) } if (node.downstreamGovernanceRights) { - acceptor({ - node, - type: SemanticTokenTypes.keyword, - keyword: 'downstreamRights' - }) - acceptor({ - node, - type: SemanticTokenTypes.enumMember, - property: 'downstreamGovernanceRights' - }) + highlightField(node, acceptor, ['downstreamRights'], 'downstreamGovernanceRights', SemanticTokenTypes.enumMember) } - } - highlightCustomerSupplierRelationship (node: CustomerSupplierRelationship, acceptor: SemanticTokenAcceptor) { - acceptor({ - node, - type: SemanticTokenTypes.type, - property: 'upstream' - }) - acceptor({ - node, - type: SemanticTokenTypes.keyword, - keyword: 'U' - }) - if (node.upstreamRoles.length > 0) { - acceptor({ - node, - type: SemanticTokenTypes.keyword, - property: 'upstreamRoles' - }) + if (isCustomerSupplierRelationship(node)) { + this.highlightCustomerSupplierRelationship(node, acceptor) + return } - acceptor({ - node, - type: SemanticTokenTypes.operator, - keyword: '->' - }) - acceptor({ - node, - type: SemanticTokenTypes.keyword, - keyword: 'Supplier-Customer' - }) - acceptor({ - node, - type: SemanticTokenTypes.type, - property: 'downstream' - }) - acceptor({ - node, - type: SemanticTokenTypes.keyword, - keyword: 'D' - }) - if (node.downstreamRoles.length > 0) { - acceptor({ - node, - type: SemanticTokenTypes.keyword, - property: 'downstreamRoles' - }) - } - acceptor({ - node, - type: SemanticTokenTypes.operator, - keyword: '<-' - }) - acceptor({ - node, - type: SemanticTokenTypes.keyword, - keyword: 'Customer-Supplier' - }) + highlightKeyword(node, acceptor, 'Upstream-Downstream') + highlightKeyword(node, acceptor, 'Downstream-Upstream') + } + + highlightCustomerSupplierRelationship (node: CustomerSupplierRelationship, acceptor: SemanticTokenAcceptor) { + highlightKeyword(node, acceptor, 'Supplier-Customer') + highlightKeyword(node, acceptor, 'Customer-Supplier') - acceptor({ - node, - type: SemanticTokenTypes.keyword, - keyword: 'C' - }) - acceptor({ - node, - type: SemanticTokenTypes.keyword, - keyword: 'S' - }) + highlightKeyword(node, acceptor, 'S') + highlightKeyword(node, acceptor, 'C') } } diff --git a/src/language/semantictokens/requirements/FeatureSemanticTokenProvider.ts b/src/language/semantictokens/requirements/FeatureSemanticTokenProvider.ts index abd6101..fe92b8a 100644 --- a/src/language/semantictokens/requirements/FeatureSemanticTokenProvider.ts +++ b/src/language/semantictokens/requirements/FeatureSemanticTokenProvider.ts @@ -1,5 +1,11 @@ import { ContextMapperSemanticTokenProvider } from '../ContextMapperSemanticTokenProvider.js' -import { Feature, isFeature, isNormalFeature, isStoryFeature, NormalFeature, StoryFeature } from '../../generated/ast.js' +import { + Feature, + isFeature, + isStoryFeature, + isUserActivityDefaultVerb, + StoryFeature +} from '../../generated/ast.js' import { AstNode } from 'langium' import { SemanticTokenAcceptor } from 'langium/lsp' import { highlightKeyword, highlightString } from '../HighlightingHelper.js' @@ -10,15 +16,19 @@ export class FeatureSemanticTokenProvider implements ContextMapperSemanticTokenP } public highlight (node: Feature, acceptor: SemanticTokenAcceptor) { - if (isNormalFeature(node)) { - this.highlightNormalFeature(node, acceptor) - } else if (isStoryFeature(node)) { + if (isStoryFeature(node)) { this.highlightStoryFeature(node, acceptor) } + + this.highlightNormalFeature(node, acceptor) } - private highlightNormalFeature (node: NormalFeature, acceptor: SemanticTokenAcceptor) { - highlightString(node, acceptor, 'verb') + private highlightNormalFeature (node: Feature, acceptor: SemanticTokenAcceptor) { + if (isUserActivityDefaultVerb(node.verb)) { + highlightKeyword(node, acceptor, node.verb) + } else { + highlightString(node, acceptor, 'verb') + } if (node.entityArticle) { highlightKeyword(node, acceptor, node.entityArticle) diff --git a/src/language/semantictokens/requirements/RequirementsSemanticTokenProvider.ts b/src/language/semantictokens/requirements/RequirementsSemanticTokenProvider.ts index 3a21850..7ed270b 100644 --- a/src/language/semantictokens/requirements/RequirementsSemanticTokenProvider.ts +++ b/src/language/semantictokens/requirements/RequirementsSemanticTokenProvider.ts @@ -33,7 +33,7 @@ export class RequirementsSemanticTokenProvider implements ContextMapperSemanticT highlightTypeDeclaration(node, acceptor, 'UseCase') if (node.role) { - highlightField(node, acceptor, ['actor'], 'role') + highlightField(node, acceptor, ['actor'], 'role', SemanticTokenTypes.string) } if (node.secondaryActors.length > 0) { diff --git a/src/language/semantictokens/requirements/StoryValuationSemanticTokenProvider.ts b/src/language/semantictokens/requirements/StoryValuationSemanticTokenProvider.ts index 8c40247..563b528 100644 --- a/src/language/semantictokens/requirements/StoryValuationSemanticTokenProvider.ts +++ b/src/language/semantictokens/requirements/StoryValuationSemanticTokenProvider.ts @@ -20,8 +20,7 @@ export class StoryValuationSemanticTokenProvider implements ContextMapperSemanti highlightKeyword(node, acceptor, 'accepting that') highlightString(node, acceptor, 'harmedValues') - highlightKeyword(node, acceptor, 'is') - highlightKeyword(node, acceptor, 'are') + // is/are already covered highlightKeyword(node, acceptor, 'reduced') highlightKeyword(node, acceptor, 'harmed') } diff --git a/src/language/semantictokens/vdad/ActionSemanticTokenProvider.ts b/src/language/semantictokens/vdad/ActionSemanticTokenProvider.ts index c8b9c03..83cdf66 100644 --- a/src/language/semantictokens/vdad/ActionSemanticTokenProvider.ts +++ b/src/language/semantictokens/vdad/ActionSemanticTokenProvider.ts @@ -3,6 +3,7 @@ import { Action, isAction } from '../../generated/ast.js' import { AstNode } from 'langium' import { SemanticTokenAcceptor } from 'langium/lsp' import { highlightKeyword, highlightString } from '../HighlightingHelper.js' +import { SemanticTokenTypes } from 'vscode-languageserver-types' export class ActionSemanticTokenProvider implements ContextMapperSemanticTokenProvider { supports (node: AstNode): node is Action { @@ -15,7 +16,11 @@ export class ActionSemanticTokenProvider implements ContextMapperSemanticTokenPr const typeKeywords = ['ACT', 'MONITOR'] if (typeKeywords.includes(node.type)) { - highlightKeyword(node, acceptor, node.type) + acceptor({ + node, + type: SemanticTokenTypes.keyword, + property: 'type' + }) } else { highlightString(node, acceptor, 'type') } diff --git a/src/language/semantictokens/vdad/ValueClusterSemanticTokenProvider.ts b/src/language/semantictokens/vdad/ValueClusterSemanticTokenProvider.ts index 2984c25..60b9e6b 100644 --- a/src/language/semantictokens/vdad/ValueClusterSemanticTokenProvider.ts +++ b/src/language/semantictokens/vdad/ValueClusterSemanticTokenProvider.ts @@ -14,7 +14,7 @@ export class ValueClusterSemanticTokenProvider implements ContextMapperSemanticT highlightTypeDeclaration(node, acceptor, 'ValueCluster') if (node.coreValue) { - highlightField(node, acceptor, ['core'], 'coreValue') + highlightField(node, acceptor, ['core'], 'coreValue', SemanticTokenTypes.string) } if (node.coreValue7000) { highlightField(node, acceptor, ['core'], 'coreValue7000') diff --git a/src/language/semantictokens/vdad/ValueRegisterSemanticTokenProvider.ts b/src/language/semantictokens/vdad/ValueRegisterSemanticTokenProvider.ts index 2609f59..6ebdb8a 100644 --- a/src/language/semantictokens/vdad/ValueRegisterSemanticTokenProvider.ts +++ b/src/language/semantictokens/vdad/ValueRegisterSemanticTokenProvider.ts @@ -2,7 +2,7 @@ import { ContextMapperSemanticTokenProvider } from '../ContextMapperSemanticToke import { isValueRegister, ValueRegister } from '../../generated/ast.js' import { AstNode } from 'langium' import { SemanticTokenAcceptor } from 'langium/lsp' -import { highlightAttribute, highlightKeyword } from '../HighlightingHelper.js' +import { highlightAttribute, highlightTypeDeclaration } from '../HighlightingHelper.js' export class ValueRegisterSemanticTokenProvider implements ContextMapperSemanticTokenProvider { supports (node: AstNode): node is ValueRegister { @@ -10,10 +10,10 @@ export class ValueRegisterSemanticTokenProvider implements ContextMapperSemantic } highlight (node: ValueRegister, acceptor: SemanticTokenAcceptor) { - highlightKeyword(node, acceptor, 'ValueRegister') + highlightTypeDeclaration(node, acceptor, 'ValueRegister') if (node.context) { - highlightAttribute(node, acceptor, ['of'], 'context') + highlightAttribute(node, acceptor, ['for'], 'context') } } } diff --git a/test/parsing/CommentParsing.test.ts b/test/parsing/CommentParsing.test.ts new file mode 100644 index 0000000..63595dd --- /dev/null +++ b/test/parsing/CommentParsing.test.ts @@ -0,0 +1,104 @@ +import { createContextMapperDslServices } from '../../src/language/ContextMapperDslModule.js' +import { clearDocuments, parseHelper } from 'langium/test' +import { ContextMappingModel } from '../../src/language/generated/ast.js' +import { EmptyFileSystem, LangiumDocument } from 'langium' +import { afterEach, beforeAll, describe, test } from 'vitest' +import { parseValidInput } from './ParsingTestHelper.js' + +let services: ReturnType +let parse: ReturnType> +let document: LangiumDocument | undefined + +beforeAll(async () => { + services = createContextMapperDslServices(EmptyFileSystem) + parse = parseHelper(services.ContextMapperDsl) +}) + +afterEach(async () => { + document && await clearDocuments(services.shared, [document]) +}) + +describe('Comment semantic token tests', () => { + test('parse multiline comment at snippet start', async () => { + document = await parseValidInput(parse, ` + /* + This is a multiline comment + */ + ContextMap {} + `) + }) + + test('parse multiline comment at snippet end', async () => { + document = await parseValidInput(parse, ` + ContextMap {} + /* + This is a multiline comment + */ + `) + }) + + test('parse single-line comment at snippet start', async () => { + document = await parseValidInput(parse, ` + // This is a single-line comment + ContextMap {} + `) + }) + + test('parse single-line comment at snippet end', async () => { + document = await parseValidInput(parse, ` + ContextMap {} + // This is a single-line comment + `) + }) + + test('parse multiple multiline comments', async () => { + document = await parseValidInput(parse, ` + /* + TestContext description + */ + BoundedContext TestContext + /* + AnotherContext description + */ + BoundedContext AnotherContext + `) + }) + + test('parse multiple single-line comments', async () => { + document = await parseValidInput(parse, ` + // TestContext description + BoundedContext TestContext + + // AnotherContext description + BoundedContext AnotherContext + `) + }) + + test('parse multiline comment in nested structure', async () => { + document = await parseValidInput(parse, ` + BoundedContext TestContext { + /* + This is a multiline comment + */ + Module TestModule + } + `) + }) + + test('parse single-line comment in nested structure', async () => { + document = await parseValidInput(parse, ` + BoundedContext TestContext { + // This is a multiline comment + Module TestModule + } + `) + }) + + test('parse single-line comment after a field', async () => { + document = await parseValidInput(parse, ` + BoundedContext TestContext { + type TEAM // This is a single-line comment + } + `) + }) +}) diff --git a/test/parsing/AggregateParsing.test.ts b/test/parsing/boundedContext/AggregateParsing.test.ts similarity index 86% rename from test/parsing/AggregateParsing.test.ts rename to test/parsing/boundedContext/AggregateParsing.test.ts index 44c4851..d73d859 100644 --- a/test/parsing/AggregateParsing.test.ts +++ b/test/parsing/boundedContext/AggregateParsing.test.ts @@ -1,9 +1,9 @@ -import { createContextMapperDslServices } from '../../src/language/ContextMapperDslModule.js' +import { createContextMapperDslServices } from '../../../src/language/ContextMapperDslModule.js' import { parseHelper } from 'langium/test' -import { Aggregate, ContextMappingModel } from '../../src/language/generated/ast.js' +import { Aggregate, ContextMappingModel } from '../../../src/language/generated/ast.js' import { EmptyFileSystem, LangiumDocument } from 'langium' import { beforeAll, describe, expect, test } from 'vitest' -import { parseValidInput } from './ParsingTestHelper.js' +import { parseValidInput } from '../ParsingTestHelper.js' let services: ReturnType let parse: ReturnType> @@ -71,7 +71,9 @@ describe('Aggregate parsing tests', () => { expect(aggregate.responsibilities[0]).toEqual('resp1') expect(aggregate.responsibilities[1]).toEqual('resp2') expect(aggregate.owner).not.toBeUndefined() - expect(aggregate.userRequirements).toHaveLength(1) + expect(aggregate.userRequirements).toHaveLength(0) + expect(aggregate.userStories).toHaveLength(0) + expect(aggregate.useCases).toHaveLength(1) expect(aggregate.knowledgeLevel).toEqual('META') expect(aggregate.contentVolatility).toEqual('RARELY') expect(aggregate.likelihoodForChange).toEqual('NORMAL') @@ -109,7 +111,10 @@ describe('Aggregate parsing tests', () => { expect(document.parseResult.value.boundedContexts).toHaveLength(1) expect(document.parseResult.value.boundedContexts[0].aggregates).toHaveLength(1) - expect(document.parseResult.value.boundedContexts[0].aggregates[0].userRequirements).toHaveLength(1) + const aggregate = document.parseResult.value.boundedContexts[0].aggregates[0] + expect(aggregate.userStories).toHaveLength(1) + expect(aggregate.userRequirements).toHaveLength(0) + expect(aggregate.useCases).toHaveLength(0) }) test('parse features', async () => { @@ -125,7 +130,10 @@ describe('Aggregate parsing tests', () => { expect(document.parseResult.value.boundedContexts).toHaveLength(1) expect(document.parseResult.value.boundedContexts[0].aggregates).toHaveLength(1) - expect(document.parseResult.value.boundedContexts[0].aggregates[0].userRequirements).toHaveLength(2) + const aggregate = document.parseResult.value.boundedContexts[0].aggregates[0] + expect(aggregate.userStories).toHaveLength(0) + expect(aggregate.userRequirements).toHaveLength(2) + expect(aggregate.useCases).toHaveLength(0) }) test('parse userRequirements', async () => { @@ -150,6 +158,8 @@ function expectAggregateToBeEmpty (aggregate: Aggregate) { expect(aggregate.name).toEqual('TestAggregate') expect(aggregate.responsibilities).toHaveLength(0) expect(aggregate.userRequirements).toHaveLength(0) + expect(aggregate.useCases).toHaveLength(0) + expect(aggregate.userStories).toHaveLength(0) expect(aggregate.owner).toBeUndefined() expect(aggregate.knowledgeLevel).toBeUndefined() expect(aggregate.likelihoodForChange).toBeUndefined() diff --git a/test/parsing/BoundedContextParsing.test.ts b/test/parsing/boundedContext/BoundedContextParsing.test.ts similarity index 95% rename from test/parsing/BoundedContextParsing.test.ts rename to test/parsing/boundedContext/BoundedContextParsing.test.ts index 09b988e..88d9a41 100644 --- a/test/parsing/BoundedContextParsing.test.ts +++ b/test/parsing/boundedContext/BoundedContextParsing.test.ts @@ -1,9 +1,9 @@ -import { createContextMapperDslServices } from '../../src/language/ContextMapperDslModule.js' +import { createContextMapperDslServices } from '../../../src/language/ContextMapperDslModule.js' import { parseHelper } from 'langium/test' -import { ContextMappingModel } from '../../src/language/generated/ast.js' +import { ContextMappingModel } from '../../../src/language/generated/ast.js' import { EmptyFileSystem, LangiumDocument } from 'langium' import { beforeAll, describe, test, expect } from 'vitest' -import { parseValidInput } from './ParsingTestHelper.js' +import { parseValidInput } from '../ParsingTestHelper.js' let services: ReturnType let parse: ReturnType> diff --git a/test/parsing/SculptorModuleParsing.test.ts b/test/parsing/boundedContext/SculptorModuleParsing.test.ts similarity index 91% rename from test/parsing/SculptorModuleParsing.test.ts rename to test/parsing/boundedContext/SculptorModuleParsing.test.ts index 0d810c4..28d589e 100644 --- a/test/parsing/SculptorModuleParsing.test.ts +++ b/test/parsing/boundedContext/SculptorModuleParsing.test.ts @@ -1,9 +1,9 @@ -import { createContextMapperDslServices } from '../../src/language/ContextMapperDslModule.js' +import { createContextMapperDslServices } from '../../../src/language/ContextMapperDslModule.js' import { parseHelper } from 'langium/test' -import { ContextMappingModel, SculptorModule } from '../../src/language/generated/ast.js' +import { ContextMappingModel, SculptorModule } from '../../../src/language/generated/ast.js' import { EmptyFileSystem, LangiumDocument } from 'langium' import { beforeAll, describe, expect, test } from 'vitest' -import { parseValidInput } from './ParsingTestHelper.js' +import { parseValidInput } from '../ParsingTestHelper.js' let services: ReturnType let parse: ReturnType> diff --git a/test/parsing/ContextMapParsing.test.ts b/test/parsing/contextMap/ContextMapParsing.test.ts similarity index 92% rename from test/parsing/ContextMapParsing.test.ts rename to test/parsing/contextMap/ContextMapParsing.test.ts index a324fd4..576d847 100644 --- a/test/parsing/ContextMapParsing.test.ts +++ b/test/parsing/contextMap/ContextMapParsing.test.ts @@ -1,9 +1,9 @@ -import { createContextMapperDslServices } from '../../src/language/ContextMapperDslModule.js' +import { createContextMapperDslServices } from '../../../src/language/ContextMapperDslModule.js' import { parseHelper } from 'langium/test' -import { ContextMappingModel } from '../../src/language/generated/ast.js' +import { ContextMappingModel } from '../../../src/language/generated/ast.js' import { EmptyFileSystem, LangiumDocument } from 'langium' import { beforeAll, describe, expect, test } from 'vitest' -import { parseValidInput } from './ParsingTestHelper.js' +import { parseValidInput } from '../ParsingTestHelper.js' let services: ReturnType let parse: ReturnType> diff --git a/test/parsing/RelationshipParsing.test.ts b/test/parsing/contextMap/RelationshipParsing.test.ts similarity index 95% rename from test/parsing/RelationshipParsing.test.ts rename to test/parsing/contextMap/RelationshipParsing.test.ts index c57b160..1361221 100644 --- a/test/parsing/RelationshipParsing.test.ts +++ b/test/parsing/contextMap/RelationshipParsing.test.ts @@ -1,14 +1,14 @@ -import { createContextMapperDslServices } from '../../src/language/ContextMapperDslModule.js' +import { createContextMapperDslServices } from '../../../src/language/ContextMapperDslModule.js' import { parseHelper } from 'langium/test' import { ContextMappingModel, CustomerSupplierRelationship, Partnership, SharedKernel, UpstreamDownstreamRelationship -} from '../../src/language/generated/ast.js' +} from '../../../src/language/generated/ast.js' import { EmptyFileSystem, LangiumDocument } from 'langium' import { beforeAll, describe, expect, test } from 'vitest' -import { parseValidInput } from './ParsingTestHelper.js' +import { parseValidInput } from '../ParsingTestHelper.js' let services: ReturnType let parse: ReturnType> @@ -112,7 +112,7 @@ describe('Relationship parsing tests', () => { expectRelationshipType(document, 'SharedKernel') }) - test('parse Participant relationship properties', async () => { + test('parse Partnership relationship properties', async () => { document = await parseValidInput(parse, ` ContextMap { TestContext [P] <-> [P] FirstContext : RelName { @@ -133,7 +133,7 @@ describe('Relationship parsing tests', () => { expect(relationship.participant2).not.toBeUndefined() }) - test('parse Participant relationship variation 1', async () => { + test('parse Partnership relationship variation 1', async () => { document = await parseValidInput(parse, ` ContextMap { TestContext [P] <-> [P] FirstContext @@ -145,7 +145,7 @@ describe('Relationship parsing tests', () => { expectRelationshipType(document, 'Partnership') }) - test('parse Participant relationship variation 2', async () => { + test('parse Partnership relationship variation 2', async () => { document = await parseValidInput(parse, ` ContextMap { [P] TestContext <-> [P] FirstContext @@ -157,7 +157,7 @@ describe('Relationship parsing tests', () => { expectRelationshipType(document, 'Partnership') }) - test('parse Participant relationship variation 3', async () => { + test('parse Partnership relationship variation 3', async () => { document = await parseValidInput(parse, ` ContextMap { TestContext [P] <-> FirstContext [P] @@ -169,7 +169,7 @@ describe('Relationship parsing tests', () => { expectRelationshipType(document, 'Partnership') }) - test('parse Participant relationship variation 4', async () => { + test('parse Partnership relationship variation 4', async () => { document = await parseValidInput(parse, ` ContextMap { [P] TestContext <-> FirstContext [P] @@ -181,7 +181,7 @@ describe('Relationship parsing tests', () => { expectRelationshipType(document, 'Partnership') }) - test('parse Participant relationship variation 5', async () => { + test('parse Partnership relationship variation 5', async () => { document = await parseValidInput(parse, ` ContextMap { TestContext Partnership FirstContext diff --git a/test/parsing/DomainParsing.test.ts b/test/parsing/domain/DomainParsing.test.ts similarity index 92% rename from test/parsing/DomainParsing.test.ts rename to test/parsing/domain/DomainParsing.test.ts index d8981eb..58b7479 100644 --- a/test/parsing/DomainParsing.test.ts +++ b/test/parsing/domain/DomainParsing.test.ts @@ -1,9 +1,9 @@ -import { createContextMapperDslServices } from '../../src/language/ContextMapperDslModule.js' +import { createContextMapperDslServices } from '../../../src/language/ContextMapperDslModule.js' import { parseHelper } from 'langium/test' -import { ContextMappingModel } from '../../src/language/generated/ast.js' +import { ContextMappingModel } from '../../../src/language/generated/ast.js' import { EmptyFileSystem, LangiumDocument } from 'langium' import { beforeAll, describe, expect, test } from 'vitest' -import { parseValidInput } from './ParsingTestHelper.js' +import { parseValidInput } from '../ParsingTestHelper.js' let services: ReturnType let parse: ReturnType> diff --git a/test/parsing/UserRequirementParsing.test.ts b/test/parsing/requirements/UserRequirementParsing.test.ts similarity index 97% rename from test/parsing/UserRequirementParsing.test.ts rename to test/parsing/requirements/UserRequirementParsing.test.ts index 501cea9..7b6a8bd 100644 --- a/test/parsing/UserRequirementParsing.test.ts +++ b/test/parsing/requirements/UserRequirementParsing.test.ts @@ -1,9 +1,9 @@ -import { createContextMapperDslServices } from '../../src/language/ContextMapperDslModule.js' +import { createContextMapperDslServices } from '../../../src/language/ContextMapperDslModule.js' import { parseHelper } from 'langium/test' -import { ContextMappingModel, NormalFeature, StoryFeature, UseCase, UserStory } from '../../src/language/generated/ast.js' +import { ContextMappingModel, NormalFeature, StoryFeature, UseCase, UserStory } from '../../../src/language/generated/ast.js' import { EmptyFileSystem, LangiumDocument } from 'langium' import { beforeAll, describe, expect, test } from 'vitest' -import { parseValidInput } from './ParsingTestHelper.js' +import { parseValidInput } from '../ParsingTestHelper.js' let services: ReturnType let parse: ReturnType> diff --git a/test/parsing/StakeholdersParsing.test.ts b/test/parsing/vdad/StakeholdersParsing.test.ts similarity index 96% rename from test/parsing/StakeholdersParsing.test.ts rename to test/parsing/vdad/StakeholdersParsing.test.ts index b5227f4..0b0970b 100644 --- a/test/parsing/StakeholdersParsing.test.ts +++ b/test/parsing/vdad/StakeholdersParsing.test.ts @@ -1,9 +1,9 @@ -import { createContextMapperDslServices } from '../../src/language/ContextMapperDslModule.js' +import { createContextMapperDslServices } from '../../../src/language/ContextMapperDslModule.js' import { parseHelper } from 'langium/test' -import { ContextMappingModel, Stakeholder, StakeholderGroup, Stakeholders } from '../../src/language/generated/ast.js' +import { ContextMappingModel, Stakeholder, StakeholderGroup, Stakeholders } from '../../../src/language/generated/ast.js' import { EmptyFileSystem, LangiumDocument } from 'langium' import { beforeAll, describe, expect, test } from 'vitest' -import { parseValidInput } from './ParsingTestHelper.js' +import { parseValidInput } from '../ParsingTestHelper.js' let services: ReturnType let parse: ReturnType> diff --git a/test/parsing/ValueRegisterParsing.test.ts b/test/parsing/vdad/ValueRegisterParsing.test.ts similarity index 98% rename from test/parsing/ValueRegisterParsing.test.ts rename to test/parsing/vdad/ValueRegisterParsing.test.ts index 79e22ee..276742f 100644 --- a/test/parsing/ValueRegisterParsing.test.ts +++ b/test/parsing/vdad/ValueRegisterParsing.test.ts @@ -1,4 +1,4 @@ -import { createContextMapperDslServices } from '../../src/language/ContextMapperDslModule.js' +import { createContextMapperDslServices } from '../../../src/language/ContextMapperDslModule.js' import { parseHelper } from 'langium/test' import { ContextMappingModel, @@ -6,10 +6,10 @@ import { ValueElicitation, ValueEpic, ValueRegister -} from '../../src/language/generated/ast.js' +} from '../../../src/language/generated/ast.js' import { EmptyFileSystem, LangiumDocument } from 'langium' import { beforeAll, describe, expect, test } from 'vitest' -import { parseInvalidInput, parseValidInput } from './ParsingTestHelper.js' +import { parseInvalidInput, parseValidInput } from '../ParsingTestHelper.js' let services: ReturnType let parse: ReturnType> @@ -20,7 +20,7 @@ beforeAll(async () => { parse = parseHelper(services.ContextMapperDsl) }) -describe('Value cluster parsing tests', () => { +describe('Value register parsing tests', () => { test('parse value register without body', async () => { document = await parseValidInput(parse, ` ValueRegister TestRegister diff --git a/test/semnantictokens/BoundedContextSemanticTokens.test.ts b/test/semnantictokens/BoundedContextSemanticTokens.test.ts deleted file mode 100644 index 4714e16..0000000 --- a/test/semnantictokens/BoundedContextSemanticTokens.test.ts +++ /dev/null @@ -1,110 +0,0 @@ -import { afterEach, beforeAll, describe, test } from 'vitest' -import { createContextMapperDslServices } from '../../src/language/ContextMapperDslModule.js' -import { EmptyFileSystem, type LangiumDocument } from 'langium' -import { SemanticTokenProvider } from 'langium/lsp' -import { clearDocuments, parseHelper } from 'langium/test' -import { ContextMappingModel } from '../../src/language/generated/ast.js' -import { - assertSemanticToken, - assertSemanticTokenLength, createSemanticTokenParams, - extractSemanticTokens -} from './SemanticTokenTestHelper.js' -import { SemanticTokens } from 'vscode-languageserver-types' - -let services: ReturnType -let parse: ReturnType> -let document: LangiumDocument | undefined -let semanticTokenProvider: SemanticTokenProvider - -beforeAll(async () => { - services = createContextMapperDslServices(EmptyFileSystem) - parse = parseHelper(services.ContextMapperDsl) - semanticTokenProvider = services.ContextMapperDsl.lsp.SemanticTokenProvider!! -}) - -afterEach(async () => { - document && await clearDocuments(services.shared, [document]) -}) - -describe('BoundedContext semantic token test', () => { - test('check bounded context without body', async () => { - document = await parse('BoundedContext TestContext') - const params = createSemanticTokenParams(document) - const result = await semanticTokenProvider.semanticHighlight(document, params) - assertEmptyBoundedContext(result) - }) - - test('check bounded context with empty body', async () => { - document = await parse('BoundedContext TestContext {}') - const params = createSemanticTokenParams(document) - const result = await semanticTokenProvider.semanticHighlight(document, params) - assertEmptyBoundedContext(result) - }) - - test('check bounded context with member attributes', async () => { - document = await parse(` - BoundedContext TestContext { - type = UNDEFINED - implementationTechnology = "java" - responsibilities = "resp1", "resp2" - businessModel = "model" - domainVisionStatement = "Test" - knowledgeLevel = CONCRETE - evolution GENESIS - } - `) - const params = createSemanticTokenParams(document) - const result = await semanticTokenProvider.semanticHighlight(document, params) - - const expectedNumberOfTokens = 17 - assertSemanticTokenLength(result, expectedNumberOfTokens) - const tokens = extractSemanticTokens(result, expectedNumberOfTokens) - - assertSemanticToken(tokens[2], 1, 6, 4, semanticTokenProvider.tokenTypes.keyword, 0) - assertSemanticToken(tokens[3], 0, 7, 9, semanticTokenProvider.tokenTypes.enumMember, 0) - - assertSemanticToken(tokens[4], 1, 6, 24, semanticTokenProvider.tokenTypes.keyword, 0) - assertSemanticToken(tokens[5], 0, 27, 6, semanticTokenProvider.tokenTypes.string, 0) - - assertSemanticToken(tokens[6], 1, 6, 16, semanticTokenProvider.tokenTypes.keyword, 0) - assertSemanticToken(tokens[7], 0, 19, 7, semanticTokenProvider.tokenTypes.string, 0) - assertSemanticToken(tokens[8], 0, 9, 7, semanticTokenProvider.tokenTypes.string, 0) - - assertSemanticToken(tokens[9], 1, 6, 13, semanticTokenProvider.tokenTypes.keyword, 0) - assertSemanticToken(tokens[10], 0, 16, 7, semanticTokenProvider.tokenTypes.string, 0) - - assertSemanticToken(tokens[11], 1, 6, 21, semanticTokenProvider.tokenTypes.keyword, 0) - assertSemanticToken(tokens[12], 0, 24, 6, semanticTokenProvider.tokenTypes.string, 0) - - assertSemanticToken(tokens[13], 1, 6, 14, semanticTokenProvider.tokenTypes.keyword, 0) - assertSemanticToken(tokens[14], 0, 17, 8, semanticTokenProvider.tokenTypes.enumMember, 0) - - assertSemanticToken(tokens[15], 1, 6, 9, semanticTokenProvider.tokenTypes.keyword, 0) - assertSemanticToken(tokens[16], 0, 10, 7, semanticTokenProvider.tokenTypes.enumMember, 0) - }) -}) - -function assertEmptyBoundedContext (result: SemanticTokens) { - const expectedNumberOfTokens = 2 - assertSemanticTokenLength(result, expectedNumberOfTokens) - - const tokens = extractSemanticTokens(result, expectedNumberOfTokens) - - assertSemanticToken( - tokens[0], - 0, - 0, - 14, - semanticTokenProvider.tokenTypes.keyword, - 0 - ) - - assertSemanticToken( - tokens[1], - 0, - 15, - 11, - semanticTokenProvider.tokenTypes.type, - semanticTokenProvider.tokenModifiers.declaration - ) -} diff --git a/test/semnantictokens/CommentSemanticTokenProvider.test.ts b/test/semnantictokens/CommentSemanticTokenProvider.test.ts new file mode 100644 index 0000000..30740e8 --- /dev/null +++ b/test/semnantictokens/CommentSemanticTokenProvider.test.ts @@ -0,0 +1,194 @@ +import { createContextMapperDslServices } from '../../src/language/ContextMapperDslModule.js' +import { clearDocuments, parseHelper } from 'langium/test' +import { ContextMappingModel } from '../../src/language/generated/ast.js' +import { EmptyFileSystem, LangiumDocument } from 'langium' +import { SemanticTokenProvider } from 'langium/lsp' +import { afterEach, beforeAll, describe, test } from 'vitest' +import { + createSemanticTokenParams, expectSemanticTokensToEqual, + expectSemanticTokensToHaveLength, + extractSemanticTokens +} from './SemanticTokenTestHelper.js' + +let services: ReturnType +let parse: ReturnType> +let document: LangiumDocument | undefined +let semanticTokenProvider: SemanticTokenProvider + +beforeAll(async () => { + services = createContextMapperDslServices(EmptyFileSystem) + parse = parseHelper(services.ContextMapperDsl) + semanticTokenProvider = services.ContextMapperDsl.lsp.SemanticTokenProvider!! +}) + +afterEach(async () => { + document && await clearDocuments(services.shared, [document]) +}) + +describe('Comment semantic token tests', () => { + test('check semantic tokens for multiline comment at snippet start', async () => { + document = await parse(` + /* + This is a multiline comment + */ + ContextMap {} + `) + const params = createSemanticTokenParams(document) + + const result = await semanticTokenProvider.semanticHighlight(document, params) + + const expectedNumberOfTokens = 2 + expectSemanticTokensToHaveLength(result, expectedNumberOfTokens) + const tokens = extractSemanticTokens(result, expectedNumberOfTokens) + + expectSemanticTokensToEqual(tokens[0], 1, 6, 47, semanticTokenProvider.tokenTypes.comment, 0) + }) + + test('check semantic tokens for multiline comment at snippet end', async () => { + document = await parse(` + ContextMap {} + /* + This is a multiline comment + */ + `) + const params = createSemanticTokenParams(document) + + const result = await semanticTokenProvider.semanticHighlight(document, params) + + const expectedNumberOfTokens = 2 + expectSemanticTokensToHaveLength(result, expectedNumberOfTokens) + const tokens = extractSemanticTokens(result, expectedNumberOfTokens) + + expectSemanticTokensToEqual(tokens[1], 1, 6, 47, semanticTokenProvider.tokenTypes.comment, 0) + }) + + test('check semantic tokens for single-line comment at snippet start', async () => { + document = await parse(` + // This is a single-line comment + ContextMap {} + `) + const params = createSemanticTokenParams(document) + + const result = await semanticTokenProvider.semanticHighlight(document, params) + + const expectedNumberOfTokens = 2 + expectSemanticTokensToHaveLength(result, expectedNumberOfTokens) + const tokens = extractSemanticTokens(result, expectedNumberOfTokens) + + expectSemanticTokensToEqual(tokens[0], 1, 6, 32, semanticTokenProvider.tokenTypes.comment, 0) + }) + + test('check semantic tokens for single-line comment at snippet end', async () => { + document = await parse(` + ContextMap {} + // This is a single-line comment + `) + const params = createSemanticTokenParams(document) + + const result = await semanticTokenProvider.semanticHighlight(document, params) + + const expectedNumberOfTokens = 2 + expectSemanticTokensToHaveLength(result, expectedNumberOfTokens) + const tokens = extractSemanticTokens(result, expectedNumberOfTokens) + + expectSemanticTokensToEqual(tokens[1], 1, 6, 32, semanticTokenProvider.tokenTypes.comment, 0) + }) + + test('check semantic tokens for multiple multiline comments', async () => { + document = await parse(` + /* + TestContext description + */ + BoundedContext TestContext + /* + AnotherContext description + */ + BoundedContext AnotherContext + `) + const params = createSemanticTokenParams(document) + + const result = await semanticTokenProvider.semanticHighlight(document, params) + + const expectedNumberOfTokens = 6 + expectSemanticTokensToHaveLength(result, expectedNumberOfTokens) + const tokens = extractSemanticTokens(result, expectedNumberOfTokens) + + expectSemanticTokensToEqual(tokens[0], 1, 6, 43, semanticTokenProvider.tokenTypes.comment, 0) + expectSemanticTokensToEqual(tokens[3], 1, 6, 46, semanticTokenProvider.tokenTypes.comment, 0) + }) + + test('check semantic tokens for multiple single-line comments', async () => { + document = await parse(` + // TestContext description + BoundedContext TestContext + + // AnotherContext description + BoundedContext AnotherContext + `) + const params = createSemanticTokenParams(document) + + const result = await semanticTokenProvider.semanticHighlight(document, params) + + const expectedNumberOfTokens = 6 + expectSemanticTokensToHaveLength(result, expectedNumberOfTokens) + const tokens = extractSemanticTokens(result, expectedNumberOfTokens) + + expectSemanticTokensToEqual(tokens[0], 1, 6, 26, semanticTokenProvider.tokenTypes.comment, 0) + expectSemanticTokensToEqual(tokens[3], 2, 6, 29, semanticTokenProvider.tokenTypes.comment, 0) + }) + + test('check semantic tokens for multiline comment in nested structure', async () => { + document = await parse(` + BoundedContext TestContext { + /* + This is a multiline comment + */ + Module TestModule + } + `) + const params = createSemanticTokenParams(document) + + const result = await semanticTokenProvider.semanticHighlight(document, params) + + const expectedNumberOfTokens = 5 + expectSemanticTokensToHaveLength(result, expectedNumberOfTokens) + const tokens = extractSemanticTokens(result, expectedNumberOfTokens) + + expectSemanticTokensToEqual(tokens[2], 1, 8, 51, semanticTokenProvider.tokenTypes.comment, 0) + }) + + test('check semantic tokens for single-line comment in nested structure', async () => { + document = await parse(` + BoundedContext TestContext { + // This is a multiline comment + Module TestModule + } + `) + const params = createSemanticTokenParams(document) + + const result = await semanticTokenProvider.semanticHighlight(document, params) + + const expectedNumberOfTokens = 5 + expectSemanticTokensToHaveLength(result, expectedNumberOfTokens) + const tokens = extractSemanticTokens(result, expectedNumberOfTokens) + + expectSemanticTokensToEqual(tokens[2], 1, 8, 30, semanticTokenProvider.tokenTypes.comment, 0) + }) + + test('check semantic tokens for single-line comment after a field', async () => { + document = await parse(` + BoundedContext TestContext { + type TEAM // This is a single-line comment + } + `) + const params = createSemanticTokenParams(document) + + const result = await semanticTokenProvider.semanticHighlight(document, params) + + const expectedNumberOfTokens = 5 + expectSemanticTokensToHaveLength(result, expectedNumberOfTokens) + const tokens = extractSemanticTokens(result, expectedNumberOfTokens) + + expectSemanticTokensToEqual(tokens[4], 0, 5, 32, semanticTokenProvider.tokenTypes.comment, 0) + }) +}) diff --git a/test/semnantictokens/SemanticTokenTestHelper.ts b/test/semnantictokens/SemanticTokenTestHelper.ts index fba3fe8..b985631 100644 --- a/test/semnantictokens/SemanticTokenTestHelper.ts +++ b/test/semnantictokens/SemanticTokenTestHelper.ts @@ -19,7 +19,7 @@ export function createSemanticTokenParams (document: LangiumDocument +let parse: ReturnType> +let document: LangiumDocument | undefined +let semanticTokenProvider: SemanticTokenProvider + +beforeAll(async () => { + services = createContextMapperDslServices(EmptyFileSystem) + parse = parseHelper(services.ContextMapperDsl) + semanticTokenProvider = services.ContextMapperDsl.lsp.SemanticTokenProvider!! +}) + +afterEach(async () => { + document && await clearDocuments(services.shared, [document]) +}) + +describe('Aggregate semantic token tests', () => { + test('check semantic tokens for Aggregate without body', async () => { + document = await parse(` + BoundedContext TestContext { + Aggregate TestAggregate + } + `) + const params = createSemanticTokenParams(document) + + const result = await semanticTokenProvider.semanticHighlight(document, params) + + expectEmptyAggregate(result) + }) + + test('check semantic tokens for Aggregate with empty body', async () => { + document = await parse(` + BoundedContext TestContext { + Aggregate TestAggregate {} + } + `) + const params = createSemanticTokenParams(document) + + const result = await semanticTokenProvider.semanticHighlight(document, params) + + expectEmptyAggregate(result) + }) + + test('check semantic tokens for Aggregate with full body', async () => { + document = await parse(` + BoundedContext TestContext { + "doc" + Aggregate TestAggregate { + responsibilities "resp1", "resp2" + owner = TestContext + useCases TestUseCase + knowledgeLevel = META + contentVolatility = RARELY + likelihoodForChange = NORMAL + availabilityCriticality = HIGH + consistencyCriticality = HIGH + securityZone "testZone" + securityCriticality = LOW + securityAccessGroup = "testGroup" + storageSimilarity = TINY + } + } + UseCase TestUseCase + `) + const params = createSemanticTokenParams(document) + + const result = await semanticTokenProvider.semanticHighlight(document, params) + + const expectedNumberOfTokens = 32 + expectSemanticTokensToHaveLength(result, expectedNumberOfTokens) + const tokens = extractSemanticTokens(result, expectedNumberOfTokens) + + expectSemanticTokensToEqual(tokens[2], 1, 8, 5, semanticTokenProvider.tokenTypes.string, 0) + + expectSemanticTokensToEqual(tokens[3], 1, 8, 9, semanticTokenProvider.tokenTypes.keyword, 0) + expectSemanticTokensToEqual(tokens[4], 0, 10, 13, semanticTokenProvider.tokenTypes.type, semanticTokenProvider.tokenModifiers.declaration) + + // responsibilities + expectSemanticTokensToEqual(tokens[5], 1, 10, 16, semanticTokenProvider.tokenTypes.keyword, 0) + expectSemanticTokensToEqual(tokens[6], 0, 17, 7, semanticTokenProvider.tokenTypes.string, 0) + expectSemanticTokensToEqual(tokens[7], 0, 9, 7, semanticTokenProvider.tokenTypes.string, 0) + + // owner + expectSemanticTokensToEqual(tokens[8], 1, 10, 5, semanticTokenProvider.tokenTypes.keyword, 0) + expectSemanticTokensToEqual(tokens[9], 0, 8, 11, semanticTokenProvider.tokenTypes.type, 0) + + // useCases + expectSemanticTokensToEqual(tokens[10], 1, 10, 8, semanticTokenProvider.tokenTypes.keyword, 0) + expectSemanticTokensToEqual(tokens[11], 0, 9, 11, semanticTokenProvider.tokenTypes.type, 0) + + // knowledgeLevel + expectSemanticTokensToEqual(tokens[12], 1, 10, 14, semanticTokenProvider.tokenTypes.keyword, 0) + expectSemanticTokensToEqual(tokens[13], 0, 17, 4, semanticTokenProvider.tokenTypes.enumMember, 0) + + // contentVolatility + expectSemanticTokensToEqual(tokens[14], 1, 10, 17, semanticTokenProvider.tokenTypes.keyword, 0) + expectSemanticTokensToEqual(tokens[15], 0, 20, 6, semanticTokenProvider.tokenTypes.enumMember, 0) + + // likelihoodForChange + expectSemanticTokensToEqual(tokens[16], 1, 10, 19, semanticTokenProvider.tokenTypes.keyword, 0) + expectSemanticTokensToEqual(tokens[17], 0, 22, 6, semanticTokenProvider.tokenTypes.enumMember, 0) + + // availabilityCriticality + expectSemanticTokensToEqual(tokens[18], 1, 10, 23, semanticTokenProvider.tokenTypes.keyword, 0) + expectSemanticTokensToEqual(tokens[19], 0, 26, 4, semanticTokenProvider.tokenTypes.enumMember, 0) + + // consistencyCriticality + expectSemanticTokensToEqual(tokens[20], 1, 10, 22, semanticTokenProvider.tokenTypes.keyword, 0) + expectSemanticTokensToEqual(tokens[21], 0, 25, 4, semanticTokenProvider.tokenTypes.enumMember, 0) + + // securityZone + expectSemanticTokensToEqual(tokens[22], 1, 10, 12, semanticTokenProvider.tokenTypes.keyword, 0) + expectSemanticTokensToEqual(tokens[23], 0, 13, 10, semanticTokenProvider.tokenTypes.string, 0) + + // securityCriticality + expectSemanticTokensToEqual(tokens[24], 1, 10, 19, semanticTokenProvider.tokenTypes.keyword, 0) + expectSemanticTokensToEqual(tokens[25], 0, 22, 3, semanticTokenProvider.tokenTypes.enumMember, 0) + + // securityAccessGroup + expectSemanticTokensToEqual(tokens[26], 1, 10, 19, semanticTokenProvider.tokenTypes.keyword, 0) + expectSemanticTokensToEqual(tokens[27], 0, 22, 11, semanticTokenProvider.tokenTypes.string, 0) + + // storageSimilarity + expectSemanticTokensToEqual(tokens[28], 1, 10, 17, semanticTokenProvider.tokenTypes.keyword, 0) + expectSemanticTokensToEqual(tokens[29], 0, 20, 4, semanticTokenProvider.tokenTypes.enumMember, 0) + }) + + test('check semantic tokens for Aggregate with UserStory', async () => { + document = await parse(` + BoundedContext TestContext { + Aggregate TestAggregate { + userStories = TestStory + } + } + UserStory TestStory + `) + const params = createSemanticTokenParams(document) + + const result = await semanticTokenProvider.semanticHighlight(document, params) + + const expectedNumberOfTokens = 8 + expectSemanticTokensToHaveLength(result, expectedNumberOfTokens) + const tokens = extractSemanticTokens(result, expectedNumberOfTokens) + + expectSemanticTokensToEqual(tokens[4], 1, 10, 11, semanticTokenProvider.tokenTypes.keyword, 0) + expectSemanticTokensToEqual(tokens[5], 0, 14, 9, semanticTokenProvider.tokenTypes.type, 0) + }) + + test('check semantic tokens for Aggregate with features', async () => { + document = await parse(` + BoundedContext TestContext { + Aggregate TestAggregate { + features TestStory + } + } + UserStory TestStory + `) + const params = createSemanticTokenParams(document) + + const result = await semanticTokenProvider.semanticHighlight(document, params) + + const expectedNumberOfTokens = 8 + expectSemanticTokensToHaveLength(result, expectedNumberOfTokens) + const tokens = extractSemanticTokens(result, expectedNumberOfTokens) + + expectSemanticTokensToEqual(tokens[4], 1, 10, 8, semanticTokenProvider.tokenTypes.keyword, 0) + expectSemanticTokensToEqual(tokens[5], 0, 9, 9, semanticTokenProvider.tokenTypes.type, 0) + }) + + test('check semantic tokens for Aggregate with user requirements', async () => { + document = await parse(` + BoundedContext TestContext { + Aggregate TestAggregate { + userRequirements TestStory + } + } + UserStory TestStory + `) + const params = createSemanticTokenParams(document) + + const result = await semanticTokenProvider.semanticHighlight(document, params) + + const expectedNumberOfTokens = 8 + expectSemanticTokensToHaveLength(result, expectedNumberOfTokens) + const tokens = extractSemanticTokens(result, expectedNumberOfTokens) + + expectSemanticTokensToEqual(tokens[4], 1, 10, 16, semanticTokenProvider.tokenTypes.keyword, 0) + expectSemanticTokensToEqual(tokens[5], 0, 17, 9, semanticTokenProvider.tokenTypes.type, 0) + }) +}) + +function expectEmptyAggregate (result: SemanticTokens) { + const expectedNumberOfTokens = 4 + expectSemanticTokensToHaveLength(result, expectedNumberOfTokens) + const tokens = extractSemanticTokens(result, expectedNumberOfTokens) + + expectSemanticTokensToEqual(tokens[2], 1, 8, 9, semanticTokenProvider.tokenTypes.keyword, 0) + expectSemanticTokensToEqual(tokens[3], 0, 10, 13, semanticTokenProvider.tokenTypes.type, semanticTokenProvider.tokenModifiers.declaration) +} diff --git a/test/semnantictokens/boundedContext/BoundedContextSemanticTokens.test.ts b/test/semnantictokens/boundedContext/BoundedContextSemanticTokens.test.ts new file mode 100644 index 0000000..cdf0e80 --- /dev/null +++ b/test/semnantictokens/boundedContext/BoundedContextSemanticTokens.test.ts @@ -0,0 +1,138 @@ +import { afterEach, beforeAll, describe, test } from 'vitest' +import { createContextMapperDslServices } from '../../../src/language/ContextMapperDslModule.js' +import { EmptyFileSystem, type LangiumDocument } from 'langium' +import { SemanticTokenProvider } from 'langium/lsp' +import { clearDocuments, parseHelper } from 'langium/test' +import { ContextMappingModel } from '../../../src/language/generated/ast.js' +import { + expectSemanticTokensToEqual, + expectSemanticTokensToHaveLength, createSemanticTokenParams, + extractSemanticTokens +} from '../SemanticTokenTestHelper.js' +import { SemanticTokens } from 'vscode-languageserver-types' + +let services: ReturnType +let parse: ReturnType> +let document: LangiumDocument | undefined +let semanticTokenProvider: SemanticTokenProvider + +beforeAll(async () => { + services = createContextMapperDslServices(EmptyFileSystem) + parse = parseHelper(services.ContextMapperDsl) + semanticTokenProvider = services.ContextMapperDsl.lsp.SemanticTokenProvider!! +}) + +afterEach(async () => { + document && await clearDocuments(services.shared, [document]) +}) + +describe('BoundedContext semantic token tests', () => { + test('check semantic tokens for bounded context without body', async () => { + document = await parse('BoundedContext TestContext') + const params = createSemanticTokenParams(document) + const result = await semanticTokenProvider.semanticHighlight(document, params) + expectEmptyBoundedContext(result) + }) + + test('check semantic tokens for bounded context with empty body', async () => { + document = await parse('BoundedContext TestContext {}') + const params = createSemanticTokenParams(document) + const result = await semanticTokenProvider.semanticHighlight(document, params) + expectEmptyBoundedContext(result) + }) + + test('check semantic tokens bounded context with full body', async () => { + document = await parse(` + BoundedContext TestContext { + type = UNDEFINED + implementationTechnology = "java" + responsibilities = "resp1", "resp2" + businessModel = "model" + domainVisionStatement = "Test" + knowledgeLevel = CONCRETE + evolution GENESIS + } + `) + const params = createSemanticTokenParams(document) + const result = await semanticTokenProvider.semanticHighlight(document, params) + + const expectedNumberOfTokens = 17 + expectSemanticTokensToHaveLength(result, expectedNumberOfTokens) + const tokens = extractSemanticTokens(result, expectedNumberOfTokens) + + expectSemanticTokensToEqual(tokens[2], 1, 6, 4, semanticTokenProvider.tokenTypes.keyword, 0) + expectSemanticTokensToEqual(tokens[3], 0, 7, 9, semanticTokenProvider.tokenTypes.enumMember, 0) + + expectSemanticTokensToEqual(tokens[4], 1, 6, 24, semanticTokenProvider.tokenTypes.keyword, 0) + expectSemanticTokensToEqual(tokens[5], 0, 27, 6, semanticTokenProvider.tokenTypes.string, 0) + + expectSemanticTokensToEqual(tokens[6], 1, 6, 16, semanticTokenProvider.tokenTypes.keyword, 0) + expectSemanticTokensToEqual(tokens[7], 0, 19, 7, semanticTokenProvider.tokenTypes.string, 0) + expectSemanticTokensToEqual(tokens[8], 0, 9, 7, semanticTokenProvider.tokenTypes.string, 0) + + expectSemanticTokensToEqual(tokens[9], 1, 6, 13, semanticTokenProvider.tokenTypes.keyword, 0) + expectSemanticTokensToEqual(tokens[10], 0, 16, 7, semanticTokenProvider.tokenTypes.string, 0) + + expectSemanticTokensToEqual(tokens[11], 1, 6, 21, semanticTokenProvider.tokenTypes.keyword, 0) + expectSemanticTokensToEqual(tokens[12], 0, 24, 6, semanticTokenProvider.tokenTypes.string, 0) + + expectSemanticTokensToEqual(tokens[13], 1, 6, 14, semanticTokenProvider.tokenTypes.keyword, 0) + expectSemanticTokensToEqual(tokens[14], 0, 17, 8, semanticTokenProvider.tokenTypes.enumMember, 0) + + expectSemanticTokensToEqual(tokens[15], 1, 6, 9, semanticTokenProvider.tokenTypes.keyword, 0) + expectSemanticTokensToEqual(tokens[16], 0, 10, 7, semanticTokenProvider.tokenTypes.enumMember, 0) + }) + + test('check semantic tokens bounded context with attributes', async () => { + document = await parse(` + BoundedContext TestContext + implements TestDomain + realizes OtherContext + refines NextContext { + } + BoundedContext OtherContext + BoundedContext NextContext + Domain TestDomain + `) + const params = createSemanticTokenParams(document) + const result = await semanticTokenProvider.semanticHighlight(document, params) + + const expectedNumberOfTokens = 14 + expectSemanticTokensToHaveLength(result, expectedNumberOfTokens) + const tokens = extractSemanticTokens(result, expectedNumberOfTokens) + + expectSemanticTokensToEqual(tokens[2], 1, 8, 10, semanticTokenProvider.tokenTypes.keyword, 0) + expectSemanticTokensToEqual(tokens[3], 0, 11, 10, semanticTokenProvider.tokenTypes.type, 0) + + expectSemanticTokensToEqual(tokens[4], 1, 8, 8, semanticTokenProvider.tokenTypes.keyword, 0) + expectSemanticTokensToEqual(tokens[5], 0, 9, 12, semanticTokenProvider.tokenTypes.type, 0) + + expectSemanticTokensToEqual(tokens[6], 1, 8, 7, semanticTokenProvider.tokenTypes.keyword, 0) + expectSemanticTokensToEqual(tokens[7], 0, 8, 11, semanticTokenProvider.tokenTypes.type, 0) + }) +}) + +function expectEmptyBoundedContext (result: SemanticTokens) { + const expectedNumberOfTokens = 2 + expectSemanticTokensToHaveLength(result, expectedNumberOfTokens) + + const tokens = extractSemanticTokens(result, expectedNumberOfTokens) + + expectSemanticTokensToEqual( + tokens[0], + 0, + 0, + 14, + semanticTokenProvider.tokenTypes.keyword, + 0 + ) + + expectSemanticTokensToEqual( + tokens[1], + 0, + 15, + 11, + semanticTokenProvider.tokenTypes.type, + semanticTokenProvider.tokenModifiers.declaration + ) +} diff --git a/test/semnantictokens/boundedContext/SculptorModuleSemanticTokenProvider.test.ts b/test/semnantictokens/boundedContext/SculptorModuleSemanticTokenProvider.test.ts new file mode 100644 index 0000000..8aa07e1 --- /dev/null +++ b/test/semnantictokens/boundedContext/SculptorModuleSemanticTokenProvider.test.ts @@ -0,0 +1,103 @@ +import { createContextMapperDslServices } from '../../../src/language/ContextMapperDslModule.js' +import { clearDocuments, parseHelper } from 'langium/test' +import { ContextMappingModel } from '../../../src/language/generated/ast.js' +import { EmptyFileSystem, LangiumDocument } from 'langium' +import { SemanticTokenProvider } from 'langium/lsp' +import { afterEach, beforeAll, describe, test } from 'vitest' +import { SemanticTokens } from 'vscode-languageserver-types' +import { + createSemanticTokenParams, + expectSemanticTokensToEqual, + expectSemanticTokensToHaveLength, + extractSemanticTokens +} from '../SemanticTokenTestHelper.js' + +let services: ReturnType +let parse: ReturnType> +let document: LangiumDocument | undefined +let semanticTokenProvider: SemanticTokenProvider + +beforeAll(async () => { + services = createContextMapperDslServices(EmptyFileSystem) + parse = parseHelper(services.ContextMapperDsl) + semanticTokenProvider = services.ContextMapperDsl.lsp.SemanticTokenProvider!! +}) + +afterEach(async () => { + document && await clearDocuments(services.shared, [document]) +}) + +describe('SculptorModule semantic token tests', () => { + test('check semantic tokens of SculptorModule without body', async () => { + document = await parse(` + BoundedContext TestContext { + Module TestModule + } + `) + const params = createSemanticTokenParams(document) + + const result = await semanticTokenProvider.semanticHighlight(document, params) + + expectEmptySculptorModule(result) + }) + + test('check semantic tokens of SculptorModule with empty body', async () => { + document = await parse(` + BoundedContext TestContext { + Module TestModule { + } + } + `) + const params = createSemanticTokenParams(document) + + const result = await semanticTokenProvider.semanticHighlight(document, params) + + expectEmptySculptorModule(result) + }) + + test('check semantic tokens of SculptorModule with full body', async () => { + document = await parse(` + BoundedContext TestContext { + "doc" + Module TestModule { + hint = "hint" + external + basePackage = base.package + Aggregate SecondAggregate + } + } + `) + const params = createSemanticTokenParams(document) + + const result = await semanticTokenProvider.semanticHighlight(document, params) + + const expectedNumberOfTokens = 12 + expectSemanticTokensToHaveLength(result, expectedNumberOfTokens) + const tokens = extractSemanticTokens(result, expectedNumberOfTokens) + + expectSemanticTokensToEqual(tokens[2], 1, 8, 5, semanticTokenProvider.tokenTypes.string, 0) + + expectSemanticTokensToEqual(tokens[3], 1, 8, 6, semanticTokenProvider.tokenTypes.keyword, 0) + expectSemanticTokensToEqual(tokens[4], 0, 7, 10, semanticTokenProvider.tokenTypes.type, semanticTokenProvider.tokenModifiers.declaration) + + expectSemanticTokensToEqual(tokens[5], 1, 10, 4, semanticTokenProvider.tokenTypes.keyword, 0) + expectSemanticTokensToEqual(tokens[6], 0, 7, 6, semanticTokenProvider.tokenTypes.string, 0) + + expectSemanticTokensToEqual(tokens[7], 1, 10, 8, semanticTokenProvider.tokenTypes.keyword, 0) + + expectSemanticTokensToEqual(tokens[8], 1, 10, 11, semanticTokenProvider.tokenTypes.keyword, 0) + expectSemanticTokensToEqual(tokens[9], 0, 14, 12, semanticTokenProvider.tokenTypes.namespace, 0) + + expectSemanticTokensToEqual(tokens[10], 1, 10, 9, semanticTokenProvider.tokenTypes.keyword, 0) + expectSemanticTokensToEqual(tokens[11], 0, 10, 15, semanticTokenProvider.tokenTypes.type, semanticTokenProvider.tokenModifiers.declaration) + }) +}) + +function expectEmptySculptorModule (result: SemanticTokens) { + const expectedNumberOfTokens = 4 + expectSemanticTokensToHaveLength(result, expectedNumberOfTokens) + const tokens = extractSemanticTokens(result, expectedNumberOfTokens) + + expectSemanticTokensToEqual(tokens[2], 1, 8, 6, semanticTokenProvider.tokenTypes.keyword, 0) + expectSemanticTokensToEqual(tokens[3], 0, 7, 10, semanticTokenProvider.tokenTypes.type, semanticTokenProvider.tokenModifiers.declaration) +} diff --git a/test/semnantictokens/contextMap/ContextMapSemanticTokenProvider.test.ts b/test/semnantictokens/contextMap/ContextMapSemanticTokenProvider.test.ts new file mode 100644 index 0000000..6582440 --- /dev/null +++ b/test/semnantictokens/contextMap/ContextMapSemanticTokenProvider.test.ts @@ -0,0 +1,81 @@ +import { createContextMapperDslServices } from '../../../src/language/ContextMapperDslModule.js' +import { clearDocuments, parseHelper } from 'langium/test' +import { ContextMappingModel } from '../../../src/language/generated/ast.js' +import { EmptyFileSystem, LangiumDocument } from 'langium' +import { SemanticTokenProvider } from 'langium/lsp' +import { afterEach, beforeAll, describe, test } from 'vitest' +import { + createSemanticTokenParams, + expectSemanticTokensToEqual, expectSemanticTokensToHaveLength, + extractSemanticTokens +} from '../SemanticTokenTestHelper.js' +import { SemanticTokens } from 'vscode-languageserver-types' + +let services: ReturnType +let parse: ReturnType> +let document: LangiumDocument | undefined +let semanticTokenProvider: SemanticTokenProvider + +beforeAll(async () => { + services = createContextMapperDslServices(EmptyFileSystem) + parse = parseHelper(services.ContextMapperDsl) + semanticTokenProvider = services.ContextMapperDsl.lsp.SemanticTokenProvider!! +}) + +afterEach(async () => { + document && await clearDocuments(services.shared, [document]) +}) + +describe('ContextMap semantic token tests', () => { + test('check semantic tokens of ContextMap with empty body', async () => { + document = await parse(` + ContextMap {} + `) + const params = createSemanticTokenParams(document) + const result = await semanticTokenProvider.semanticHighlight(document, params) + + expectEmptyContextMap(result) + }) + + test('check semantic tokens of ContextMap with full body', async () => { + document = await parse(` + ContextMap TestMap { + state = AS_IS + type = ORGANIZATIONAL + contains FirstContext, SecondContext + + FirstContext [SK] <-> [SK] SecondContext + } + + BoundedContext FirstContext + BoundedContext SecondContext + `) + + const params = createSemanticTokenParams(document) + const result = await semanticTokenProvider.semanticHighlight(document, params) + + const expectedNumberOfTokens = 18 + expectSemanticTokensToHaveLength(result, expectedNumberOfTokens) + const tokens = extractSemanticTokens(result, expectedNumberOfTokens) + + expectSemanticTokensToEqual(tokens[0], 1, 6, 10, semanticTokenProvider.tokenTypes.keyword, 0) + expectSemanticTokensToEqual(tokens[1], 0, 11, 7, semanticTokenProvider.tokenTypes.type, semanticTokenProvider.tokenModifiers.declaration) + + expectSemanticTokensToEqual(tokens[2], 1, 8, 5, semanticTokenProvider.tokenTypes.keyword, 0) + expectSemanticTokensToEqual(tokens[3], 0, 8, 5, semanticTokenProvider.tokenTypes.enumMember, 0) + + expectSemanticTokensToEqual(tokens[4], 1, 8, 4, semanticTokenProvider.tokenTypes.keyword, 0) + expectSemanticTokensToEqual(tokens[5], 0, 7, 14, semanticTokenProvider.tokenTypes.enumMember, 0) + + expectSemanticTokensToEqual(tokens[6], 1, 8, 8, semanticTokenProvider.tokenTypes.keyword, 0) + expectSemanticTokensToEqual(tokens[7], 0, 9, 12, semanticTokenProvider.tokenTypes.type, 0) + expectSemanticTokensToEqual(tokens[8], 0, 14, 13, semanticTokenProvider.tokenTypes.type, 0) + }) +}) + +function expectEmptyContextMap (result: SemanticTokens) { + const expectedNumberOfTokens = 1 + const tokens = extractSemanticTokens(result, expectedNumberOfTokens) + + expectSemanticTokensToEqual(tokens[0], 1, 6, 10, semanticTokenProvider.tokenTypes.keyword, 0) +} diff --git a/test/semnantictokens/contextMap/RelationshipSemanticTokenProvider.test.ts b/test/semnantictokens/contextMap/RelationshipSemanticTokenProvider.test.ts new file mode 100644 index 0000000..bb49ba6 --- /dev/null +++ b/test/semnantictokens/contextMap/RelationshipSemanticTokenProvider.test.ts @@ -0,0 +1,588 @@ +import { createContextMapperDslServices } from '../../../src/language/ContextMapperDslModule.js' +import { clearDocuments, parseHelper } from 'langium/test' +import { ContextMappingModel } from '../../../src/language/generated/ast.js' +import { EmptyFileSystem, LangiumDocument } from 'langium' +import { SemanticTokenProvider } from 'langium/lsp' +import { afterEach, beforeAll, describe, test } from 'vitest' +import { parseValidInput } from '../../parsing/ParsingTestHelper.js' +import { + createSemanticTokenParams, expectSemanticTokensToEqual, + expectSemanticTokensToHaveLength, + extractSemanticTokens +} from '../SemanticTokenTestHelper.js' + +let services: ReturnType +let parse: ReturnType> +let document: LangiumDocument | undefined +let semanticTokenProvider: SemanticTokenProvider + +beforeAll(async () => { + services = createContextMapperDslServices(EmptyFileSystem) + parse = parseHelper(services.ContextMapperDsl) + semanticTokenProvider = services.ContextMapperDsl.lsp.SemanticTokenProvider!! +}) + +afterEach(async () => { + document && await clearDocuments(services.shared, [document]) +}) + +describe('Relationship semantic token tests', () => { + test('check semantic tokens of Partnership with full body', async () => { + document = await parseValidInput(parse, ` + ContextMap { + TestContext [P] <-> [P] FirstContext : RelName { + implementationTechnology "Java" + } + } + BoundedContext FirstContext + BoundedContext TestContext + `) + const params = createSemanticTokenParams(document) + + const result = await semanticTokenProvider.semanticHighlight(document, params) + + const expectedNumberOfTokens = 13 + expectSemanticTokensToHaveLength(result, expectedNumberOfTokens) + const tokens = extractSemanticTokens(result, expectedNumberOfTokens) + + expectSemanticTokensToEqual(tokens[1], 1, 10, 11, semanticTokenProvider.tokenTypes.type, 0) + expectSemanticTokensToEqual(tokens[2], 0, 13, 1, semanticTokenProvider.tokenTypes.keyword, 0) + expectSemanticTokensToEqual(tokens[3], 0, 3, 3, semanticTokenProvider.tokenTypes.operator, 0) + expectSemanticTokensToEqual(tokens[4], 0, 5, 1, semanticTokenProvider.tokenTypes.keyword, 0) + expectSemanticTokensToEqual(tokens[5], 0, 3, 12, semanticTokenProvider.tokenTypes.type, 0) + expectSemanticTokensToEqual(tokens[6], 0, 15, 7, semanticTokenProvider.tokenTypes.type, semanticTokenProvider.tokenModifiers.declaration) + + expectSemanticTokensToEqual(tokens[7], 1, 12, 24, semanticTokenProvider.tokenTypes.keyword, 0) + expectSemanticTokensToEqual(tokens[8], 0, 25, 6, semanticTokenProvider.tokenTypes.string, 0) + }) + + test('check semantic tokens of Partnership variation 1', async () => { + document = await parse(` + ContextMap { + TestContext [P] <-> [P] FirstContext + } + BoundedContext FirstContext + BoundedContext TestContext + `) + const params = createSemanticTokenParams(document) + + const result = await semanticTokenProvider.semanticHighlight(document, params) + + const expectedNumberOfTokens = 10 + expectSemanticTokensToHaveLength(result, expectedNumberOfTokens) + const tokens = extractSemanticTokens(result, expectedNumberOfTokens) + + expectSemanticTokensToEqual(tokens[1], 1, 10, 11, semanticTokenProvider.tokenTypes.type, 0) + expectSemanticTokensToEqual(tokens[2], 0, 13, 1, semanticTokenProvider.tokenTypes.keyword, 0) + expectSemanticTokensToEqual(tokens[3], 0, 3, 3, semanticTokenProvider.tokenTypes.operator, 0) + expectSemanticTokensToEqual(tokens[4], 0, 5, 1, semanticTokenProvider.tokenTypes.keyword, 0) + expectSemanticTokensToEqual(tokens[5], 0, 3, 12, semanticTokenProvider.tokenTypes.type, 0) + }) + + test('check semantic tokens of Partnership variation 2', async () => { + document = await parseValidInput(parse, ` + ContextMap { + [P] TestContext <-> [P] FirstContext + } + BoundedContext FirstContext + BoundedContext TestContext + `) + const params = createSemanticTokenParams(document) + + const result = await semanticTokenProvider.semanticHighlight(document, params) + + const expectedNumberOfTokens = 10 + expectSemanticTokensToHaveLength(result, expectedNumberOfTokens) + const tokens = extractSemanticTokens(result, expectedNumberOfTokens) + + expectSemanticTokensToEqual(tokens[1], 1, 11, 1, semanticTokenProvider.tokenTypes.keyword, 0) + expectSemanticTokensToEqual(tokens[2], 0, 3, 11, semanticTokenProvider.tokenTypes.type, 0) + expectSemanticTokensToEqual(tokens[3], 0, 12, 3, semanticTokenProvider.tokenTypes.operator, 0) + expectSemanticTokensToEqual(tokens[4], 0, 5, 1, semanticTokenProvider.tokenTypes.keyword, 0) + expectSemanticTokensToEqual(tokens[5], 0, 3, 12, semanticTokenProvider.tokenTypes.type, 0) + }) + + test('check semantic tokens of Partnership variation 3', async () => { + document = await parseValidInput(parse, ` + ContextMap { + TestContext [P] <-> FirstContext [P] + } + BoundedContext FirstContext + BoundedContext TestContext + `) + const params = createSemanticTokenParams(document) + + const result = await semanticTokenProvider.semanticHighlight(document, params) + + const expectedNumberOfTokens = 10 + expectSemanticTokensToHaveLength(result, expectedNumberOfTokens) + const tokens = extractSemanticTokens(result, expectedNumberOfTokens) + + expectSemanticTokensToEqual(tokens[1], 1, 10, 11, semanticTokenProvider.tokenTypes.type, 0) + expectSemanticTokensToEqual(tokens[2], 0, 13, 1, semanticTokenProvider.tokenTypes.keyword, 0) + expectSemanticTokensToEqual(tokens[3], 0, 3, 3, semanticTokenProvider.tokenTypes.operator, 0) + expectSemanticTokensToEqual(tokens[4], 0, 4, 12, semanticTokenProvider.tokenTypes.type, 0) + expectSemanticTokensToEqual(tokens[5], 0, 14, 1, semanticTokenProvider.tokenTypes.keyword, 0) + }) + + test('check semantic tokens of Partnership variation 4', async () => { + document = await parseValidInput(parse, ` + ContextMap { + [P] TestContext <-> FirstContext [P] + } + BoundedContext FirstContext + BoundedContext TestContext + `) + const params = createSemanticTokenParams(document) + + const result = await semanticTokenProvider.semanticHighlight(document, params) + + const expectedNumberOfTokens = 10 + expectSemanticTokensToHaveLength(result, expectedNumberOfTokens) + const tokens = extractSemanticTokens(result, expectedNumberOfTokens) + + expectSemanticTokensToEqual(tokens[1], 1, 11, 1, semanticTokenProvider.tokenTypes.keyword, 0) + expectSemanticTokensToEqual(tokens[2], 0, 3, 11, semanticTokenProvider.tokenTypes.type, 0) + expectSemanticTokensToEqual(tokens[3], 0, 12, 3, semanticTokenProvider.tokenTypes.operator, 0) + expectSemanticTokensToEqual(tokens[4], 0, 4, 12, semanticTokenProvider.tokenTypes.type, 0) + expectSemanticTokensToEqual(tokens[5], 0, 14, 1, semanticTokenProvider.tokenTypes.keyword, 0) + }) + + test('check semantic tokens of Partnership variation 5', async () => { + document = await parseValidInput(parse, ` + ContextMap { + TestContext Partnership FirstContext + } + BoundedContext FirstContext + BoundedContext TestContext + `) + const params = createSemanticTokenParams(document) + + const result = await semanticTokenProvider.semanticHighlight(document, params) + + const expectedNumberOfTokens = 8 + expectSemanticTokensToHaveLength(result, expectedNumberOfTokens) + const tokens = extractSemanticTokens(result, expectedNumberOfTokens) + + expectSemanticTokensToEqual(tokens[1], 1, 10, 11, semanticTokenProvider.tokenTypes.type, 0) + expectSemanticTokensToEqual(tokens[2], 0, 12, 11, semanticTokenProvider.tokenTypes.keyword, 0) + expectSemanticTokensToEqual(tokens[3], 0, 12, 12, semanticTokenProvider.tokenTypes.type, 0) + }) + + test('check semantic tokens of SharedKernel with full body', async () => { + document = await parseValidInput(parse, ` + ContextMap { + TestContext <-> FirstContext : RelName { + implementationTechnology "Java" + } + } + BoundedContext FirstContext + BoundedContext TestContext + `) + const params = createSemanticTokenParams(document) + + const result = await semanticTokenProvider.semanticHighlight(document, params) + + const expectedNumberOfTokens = 11 + expectSemanticTokensToHaveLength(result, expectedNumberOfTokens) + const tokens = extractSemanticTokens(result, expectedNumberOfTokens) + + expectSemanticTokensToEqual(tokens[1], 1, 10, 11, semanticTokenProvider.tokenTypes.type, 0) + expectSemanticTokensToEqual(tokens[2], 0, 12, 3, semanticTokenProvider.tokenTypes.operator, 0) + expectSemanticTokensToEqual(tokens[3], 0, 4, 12, semanticTokenProvider.tokenTypes.type, 0) + expectSemanticTokensToEqual(tokens[4], 0, 15, 7, semanticTokenProvider.tokenTypes.type, semanticTokenProvider.tokenModifiers.declaration) + + expectSemanticTokensToEqual(tokens[5], 1, 12, 24, semanticTokenProvider.tokenTypes.keyword, 0) + expectSemanticTokensToEqual(tokens[6], 0, 25, 6, semanticTokenProvider.tokenTypes.string, 0) + }) + + test('check semantic tokens of SharedKernel variation 1', async () => { + document = await parseValidInput(parse, ` + ContextMap { + TestContext [SK] <-> [SK] FirstContext + } + BoundedContext FirstContext + BoundedContext TestContext + `) + const params = createSemanticTokenParams(document) + + const result = await semanticTokenProvider.semanticHighlight(document, params) + + const expectedNumberOfTokens = 10 + expectSemanticTokensToHaveLength(result, expectedNumberOfTokens) + const tokens = extractSemanticTokens(result, expectedNumberOfTokens) + + expectSemanticTokensToEqual(tokens[1], 1, 10, 11, semanticTokenProvider.tokenTypes.type, 0) + expectSemanticTokensToEqual(tokens[2], 0, 13, 2, semanticTokenProvider.tokenTypes.keyword, 0) + expectSemanticTokensToEqual(tokens[3], 0, 4, 3, semanticTokenProvider.tokenTypes.operator, 0) + expectSemanticTokensToEqual(tokens[4], 0, 5, 2, semanticTokenProvider.tokenTypes.keyword, 0) + expectSemanticTokensToEqual(tokens[5], 0, 4, 12, semanticTokenProvider.tokenTypes.type, 0) + }) + + test('check semantic tokens of SharedKernel variation 2', async () => { + document = await parseValidInput(parse, ` + ContextMap { + [SK] TestContext <-> [SK] FirstContext + } + BoundedContext FirstContext + BoundedContext TestContext + `) + const params = createSemanticTokenParams(document) + + const result = await semanticTokenProvider.semanticHighlight(document, params) + + const expectedNumberOfTokens = 10 + expectSemanticTokensToHaveLength(result, expectedNumberOfTokens) + const tokens = extractSemanticTokens(result, expectedNumberOfTokens) + + expectSemanticTokensToEqual(tokens[1], 1, 11, 2, semanticTokenProvider.tokenTypes.keyword, 0) + expectSemanticTokensToEqual(tokens[2], 0, 4, 11, semanticTokenProvider.tokenTypes.type, 0) + expectSemanticTokensToEqual(tokens[3], 0, 12, 3, semanticTokenProvider.tokenTypes.operator, 0) + expectSemanticTokensToEqual(tokens[4], 0, 5, 2, semanticTokenProvider.tokenTypes.keyword, 0) + expectSemanticTokensToEqual(tokens[5], 0, 4, 12, semanticTokenProvider.tokenTypes.type, 0) + }) + + test('check semantic tokens of SharedKernel variation 3', async () => { + document = await parseValidInput(parse, ` + ContextMap { + TestContext [SK] <-> FirstContext [SK] + } + BoundedContext FirstContext + BoundedContext TestContext + `) + const params = createSemanticTokenParams(document) + + const result = await semanticTokenProvider.semanticHighlight(document, params) + + const expectedNumberOfTokens = 10 + expectSemanticTokensToHaveLength(result, expectedNumberOfTokens) + const tokens = extractSemanticTokens(result, expectedNumberOfTokens) + + expectSemanticTokensToEqual(tokens[1], 1, 10, 11, semanticTokenProvider.tokenTypes.type, 0) + expectSemanticTokensToEqual(tokens[2], 0, 13, 2, semanticTokenProvider.tokenTypes.keyword, 0) + expectSemanticTokensToEqual(tokens[3], 0, 4, 3, semanticTokenProvider.tokenTypes.operator, 0) + expectSemanticTokensToEqual(tokens[4], 0, 4, 12, semanticTokenProvider.tokenTypes.type, 0) + expectSemanticTokensToEqual(tokens[5], 0, 14, 2, semanticTokenProvider.tokenTypes.keyword, 0) + }) + + test('check semantic tokens of SharedKernel variation 4', async () => { + document = await parseValidInput(parse, ` + ContextMap { + [SK] TestContext <-> FirstContext [SK] + } + BoundedContext FirstContext + BoundedContext TestContext + `) + const params = createSemanticTokenParams(document) + + const result = await semanticTokenProvider.semanticHighlight(document, params) + + const expectedNumberOfTokens = 10 + expectSemanticTokensToHaveLength(result, expectedNumberOfTokens) + const tokens = extractSemanticTokens(result, expectedNumberOfTokens) + + expectSemanticTokensToEqual(tokens[1], 1, 11, 2, semanticTokenProvider.tokenTypes.keyword, 0) + expectSemanticTokensToEqual(tokens[2], 0, 4, 11, semanticTokenProvider.tokenTypes.type, 0) + expectSemanticTokensToEqual(tokens[3], 0, 12, 3, semanticTokenProvider.tokenTypes.operator, 0) + expectSemanticTokensToEqual(tokens[4], 0, 4, 12, semanticTokenProvider.tokenTypes.type, 0) + expectSemanticTokensToEqual(tokens[5], 0, 14, 2, semanticTokenProvider.tokenTypes.keyword, 0) + }) + + test('check semantic tokens of SharedKernel variation 5', async () => { + document = await parseValidInput(parse, ` + ContextMap { + TestContext Shared-Kernel FirstContext + } + BoundedContext FirstContext + BoundedContext TestContext + `) + const params = createSemanticTokenParams(document) + + const result = await semanticTokenProvider.semanticHighlight(document, params) + + const expectedNumberOfTokens = 8 + expectSemanticTokensToHaveLength(result, expectedNumberOfTokens) + const tokens = extractSemanticTokens(result, expectedNumberOfTokens) + + expectSemanticTokensToEqual(tokens[1], 1, 10, 11, semanticTokenProvider.tokenTypes.type, 0) + expectSemanticTokensToEqual(tokens[2], 0, 12, 13, semanticTokenProvider.tokenTypes.keyword, 0) + expectSemanticTokensToEqual(tokens[3], 0, 14, 12, semanticTokenProvider.tokenTypes.type, 0) + }) + + test('check semantic tokens of SharedKernel variation 6', async () => { + document = await parseValidInput(parse, ` + ContextMap { + TestContext <-> FirstContext + } + BoundedContext FirstContext + BoundedContext TestContext + `) + const params = createSemanticTokenParams(document) + + const result = await semanticTokenProvider.semanticHighlight(document, params) + + const expectedNumberOfTokens = 8 + expectSemanticTokensToHaveLength(result, expectedNumberOfTokens) + const tokens = extractSemanticTokens(result, expectedNumberOfTokens) + + expectSemanticTokensToEqual(tokens[1], 1, 10, 11, semanticTokenProvider.tokenTypes.type, 0) + expectSemanticTokensToEqual(tokens[2], 0, 12, 3, semanticTokenProvider.tokenTypes.operator, 0) + expectSemanticTokensToEqual(tokens[3], 0, 4, 12, semanticTokenProvider.tokenTypes.type, 0) + }) + + test('check semantic tokens of CustomSupplier relationship with full body', async () => { + document = await parseValidInput(parse, ` + ContextMap { + TestContext [S,OHS] -> [C,CF] FirstContext : RelName { + implementationTechnology "Java" + downstreamRights INFLUENCER + exposedAggregates = TestAggregate + } + } + BoundedContext FirstContext + BoundedContext TestContext { + Aggregate TestAggregate + } + `) + const params = createSemanticTokenParams(document) + + const result = await semanticTokenProvider.semanticHighlight(document, params) + + const expectedNumberOfTokens = 21 + expectSemanticTokensToHaveLength(result, expectedNumberOfTokens) + const tokens = extractSemanticTokens(result, expectedNumberOfTokens) + + expectSemanticTokensToEqual(tokens[1], 1, 10, 11, semanticTokenProvider.tokenTypes.type, 0) + expectSemanticTokensToEqual(tokens[2], 0, 13, 1, semanticTokenProvider.tokenTypes.keyword, 0) + expectSemanticTokensToEqual(tokens[3], 0, 2, 3, semanticTokenProvider.tokenTypes.enumMember, 0) + expectSemanticTokensToEqual(tokens[4], 0, 5, 2, semanticTokenProvider.tokenTypes.operator, 0) + expectSemanticTokensToEqual(tokens[5], 0, 4, 1, semanticTokenProvider.tokenTypes.keyword, 0) + expectSemanticTokensToEqual(tokens[6], 0, 2, 2, semanticTokenProvider.tokenTypes.enumMember, 0) + expectSemanticTokensToEqual(tokens[7], 0, 4, 12, semanticTokenProvider.tokenTypes.type, 0) + expectSemanticTokensToEqual(tokens[8], 0, 15, 7, semanticTokenProvider.tokenTypes.type, semanticTokenProvider.tokenModifiers.declaration) + + expectSemanticTokensToEqual(tokens[9], 1, 12, 24, semanticTokenProvider.tokenTypes.keyword, 0) + expectSemanticTokensToEqual(tokens[10], 0, 25, 6, semanticTokenProvider.tokenTypes.string, 0) + + expectSemanticTokensToEqual(tokens[11], 1, 12, 16, semanticTokenProvider.tokenTypes.keyword, 0) + expectSemanticTokensToEqual(tokens[12], 0, 17, 10, semanticTokenProvider.tokenTypes.enumMember, 0) + + expectSemanticTokensToEqual(tokens[13], 1, 12, 17, semanticTokenProvider.tokenTypes.keyword, 0) + expectSemanticTokensToEqual(tokens[14], 0, 20, 13, semanticTokenProvider.tokenTypes.type, 0) + }) + + test('check semantic tokens of CustomerSupplier variation 1', async () => { + document = await parseValidInput(parse, ` + ContextMap { + TestContext [S] -> [C] FirstContext + } + BoundedContext FirstContext + BoundedContext TestContext + `) + const params = createSemanticTokenParams(document) + + const result = await semanticTokenProvider.semanticHighlight(document, params) + + const expectedNumberOfTokens = 10 + expectSemanticTokensToHaveLength(result, expectedNumberOfTokens) + const tokens = extractSemanticTokens(result, expectedNumberOfTokens) + + expectSemanticTokensToEqual(tokens[1], 1, 10, 11, semanticTokenProvider.tokenTypes.type, 0) + expectSemanticTokensToEqual(tokens[2], 0, 13, 1, semanticTokenProvider.tokenTypes.keyword, 0) + expectSemanticTokensToEqual(tokens[3], 0, 3, 2, semanticTokenProvider.tokenTypes.operator, 0) + expectSemanticTokensToEqual(tokens[4], 0, 4, 1, semanticTokenProvider.tokenTypes.keyword, 0) + expectSemanticTokensToEqual(tokens[5], 0, 3, 12, semanticTokenProvider.tokenTypes.type, 0) + }) + + test('check semantic tokens of CustomerSupplier variation 2', async () => { + document = await parseValidInput(parse, ` + ContextMap { + TestContext [C] <- [S] FirstContext + } + BoundedContext FirstContext + BoundedContext TestContext + `) + const params = createSemanticTokenParams(document) + + const result = await semanticTokenProvider.semanticHighlight(document, params) + + const expectedNumberOfTokens = 10 + expectSemanticTokensToHaveLength(result, expectedNumberOfTokens) + const tokens = extractSemanticTokens(result, expectedNumberOfTokens) + + expectSemanticTokensToEqual(tokens[1], 1, 10, 11, semanticTokenProvider.tokenTypes.type, 0) + expectSemanticTokensToEqual(tokens[2], 0, 13, 1, semanticTokenProvider.tokenTypes.keyword, 0) + expectSemanticTokensToEqual(tokens[3], 0, 3, 2, semanticTokenProvider.tokenTypes.operator, 0) + expectSemanticTokensToEqual(tokens[4], 0, 4, 1, semanticTokenProvider.tokenTypes.keyword, 0) + expectSemanticTokensToEqual(tokens[5], 0, 3, 12, semanticTokenProvider.tokenTypes.type, 0) + }) + + test('check semantic tokens of CustomerSupplier variation 3', async () => { + document = await parseValidInput(parse, ` + ContextMap { + TestContext Customer-Supplier FirstContext + } + BoundedContext FirstContext + BoundedContext TestContext + `) + const params = createSemanticTokenParams(document) + + const result = await semanticTokenProvider.semanticHighlight(document, params) + + const expectedNumberOfTokens = 8 + expectSemanticTokensToHaveLength(result, expectedNumberOfTokens) + const tokens = extractSemanticTokens(result, expectedNumberOfTokens) + + expectSemanticTokensToEqual(tokens[1], 1, 10, 11, semanticTokenProvider.tokenTypes.type, 0) + expectSemanticTokensToEqual(tokens[2], 0, 12, 17, semanticTokenProvider.tokenTypes.keyword, 0) + expectSemanticTokensToEqual(tokens[3], 0, 18, 12, semanticTokenProvider.tokenTypes.type, 0) + }) + + test('check semantic tokens of CustomerSupplier variation 4', async () => { + document = await parseValidInput(parse, ` + ContextMap { + TestContext Supplier-Customer FirstContext + } + BoundedContext FirstContext + BoundedContext TestContext + `) + const params = createSemanticTokenParams(document) + + const result = await semanticTokenProvider.semanticHighlight(document, params) + + const expectedNumberOfTokens = 8 + expectSemanticTokensToHaveLength(result, expectedNumberOfTokens) + const tokens = extractSemanticTokens(result, expectedNumberOfTokens) + + expectSemanticTokensToEqual(tokens[1], 1, 10, 11, semanticTokenProvider.tokenTypes.type, 0) + expectSemanticTokensToEqual(tokens[2], 0, 12, 17, semanticTokenProvider.tokenTypes.keyword, 0) + expectSemanticTokensToEqual(tokens[3], 0, 18, 12, semanticTokenProvider.tokenTypes.type, 0) + }) + + test('check semantic tokens of UpstreamDownstream relationship with full body', async () => { + document = await parseValidInput(parse, ` + ContextMap { + TestContext [U,OHS] -> [D,CF] FirstContext : RelName { + downstreamRights INFLUENCER + exposedAggregates = TestAggregate + implementationTechnology "Java" + } + } + BoundedContext FirstContext + BoundedContext TestContext + `) + const params = createSemanticTokenParams(document) + + const result = await semanticTokenProvider.semanticHighlight(document, params) + + const expectedNumberOfTokens = 19 + expectSemanticTokensToHaveLength(result, expectedNumberOfTokens) + const tokens = extractSemanticTokens(result, expectedNumberOfTokens) + + expectSemanticTokensToEqual(tokens[1], 1, 10, 11, semanticTokenProvider.tokenTypes.type, 0) + expectSemanticTokensToEqual(tokens[2], 0, 13, 1, semanticTokenProvider.tokenTypes.keyword, 0) + expectSemanticTokensToEqual(tokens[3], 0, 2, 3, semanticTokenProvider.tokenTypes.enumMember, 0) + expectSemanticTokensToEqual(tokens[4], 0, 5, 2, semanticTokenProvider.tokenTypes.operator, 0) + expectSemanticTokensToEqual(tokens[5], 0, 4, 1, semanticTokenProvider.tokenTypes.keyword, 0) + expectSemanticTokensToEqual(tokens[6], 0, 2, 2, semanticTokenProvider.tokenTypes.enumMember, 0) + expectSemanticTokensToEqual(tokens[7], 0, 4, 12, semanticTokenProvider.tokenTypes.type, 0) + expectSemanticTokensToEqual(tokens[8], 0, 15, 7, semanticTokenProvider.tokenTypes.type, semanticTokenProvider.tokenModifiers.declaration) + + expectSemanticTokensToEqual(tokens[9], 1, 12, 16, semanticTokenProvider.tokenTypes.keyword, 0) + expectSemanticTokensToEqual(tokens[10], 0, 17, 10, semanticTokenProvider.tokenTypes.enumMember, 0) + + expectSemanticTokensToEqual(tokens[11], 1, 12, 17, semanticTokenProvider.tokenTypes.keyword, 0) + expectSemanticTokensToEqual(tokens[12], 0, 20, 13, semanticTokenProvider.tokenTypes.type, 0) + + expectSemanticTokensToEqual(tokens[13], 1, 12, 24, semanticTokenProvider.tokenTypes.keyword, 0) + expectSemanticTokensToEqual(tokens[14], 0, 25, 6, semanticTokenProvider.tokenTypes.string, 0) + }) + + test('check semantic tokens of UpstreamDownstream variation 1', async () => { + document = await parseValidInput(parse, ` + ContextMap { + TestContext [U] -> [D] FirstContext + } + BoundedContext FirstContext + BoundedContext TestContext + `) + const params = createSemanticTokenParams(document) + + const result = await semanticTokenProvider.semanticHighlight(document, params) + + const expectedNumberOfTokens = 10 + expectSemanticTokensToHaveLength(result, expectedNumberOfTokens) + const tokens = extractSemanticTokens(result, expectedNumberOfTokens) + + expectSemanticTokensToEqual(tokens[1], 1, 10, 11, semanticTokenProvider.tokenTypes.type, 0) + expectSemanticTokensToEqual(tokens[2], 0, 13, 1, semanticTokenProvider.tokenTypes.keyword, 0) + expectSemanticTokensToEqual(tokens[3], 0, 3, 2, semanticTokenProvider.tokenTypes.operator, 0) + expectSemanticTokensToEqual(tokens[4], 0, 4, 1, semanticTokenProvider.tokenTypes.keyword, 0) + expectSemanticTokensToEqual(tokens[5], 0, 3, 12, semanticTokenProvider.tokenTypes.type, 0) + }) + + test('check semantic tokens of UpstreamDownstream variation 2', async () => { + document = await parseValidInput(parse, ` + ContextMap { + TestContext [D] <- [U] FirstContext + } + BoundedContext FirstContext + BoundedContext TestContext + `) + const params = createSemanticTokenParams(document) + + const result = await semanticTokenProvider.semanticHighlight(document, params) + + const expectedNumberOfTokens = 10 + expectSemanticTokensToHaveLength(result, expectedNumberOfTokens) + const tokens = extractSemanticTokens(result, expectedNumberOfTokens) + + expectSemanticTokensToEqual(tokens[1], 1, 10, 11, semanticTokenProvider.tokenTypes.type, 0) + expectSemanticTokensToEqual(tokens[2], 0, 13, 1, semanticTokenProvider.tokenTypes.keyword, 0) + expectSemanticTokensToEqual(tokens[3], 0, 3, 2, semanticTokenProvider.tokenTypes.operator, 0) + expectSemanticTokensToEqual(tokens[4], 0, 4, 1, semanticTokenProvider.tokenTypes.keyword, 0) + expectSemanticTokensToEqual(tokens[5], 0, 3, 12, semanticTokenProvider.tokenTypes.type, 0) + }) + + test('check semantic tokens of UpstreamDownstream variation 3', async () => { + document = await parseValidInput(parse, ` + ContextMap { + TestContext Upstream-Downstream FirstContext + } + BoundedContext FirstContext + BoundedContext TestContext + `) + const params = createSemanticTokenParams(document) + + const result = await semanticTokenProvider.semanticHighlight(document, params) + + const expectedNumberOfTokens = 8 + expectSemanticTokensToHaveLength(result, expectedNumberOfTokens) + const tokens = extractSemanticTokens(result, expectedNumberOfTokens) + + expectSemanticTokensToEqual(tokens[1], 1, 10, 11, semanticTokenProvider.tokenTypes.type, 0) + expectSemanticTokensToEqual(tokens[2], 0, 12, 19, semanticTokenProvider.tokenTypes.keyword, 0) + expectSemanticTokensToEqual(tokens[3], 0, 20, 12, semanticTokenProvider.tokenTypes.type, 0) + }) + + test('check semantic tokens of UpstreamDownstream variation 4', async () => { + document = await parseValidInput(parse, ` + ContextMap { + TestContext Downstream-Upstream FirstContext + } + BoundedContext FirstContext + BoundedContext TestContext + `) + const params = createSemanticTokenParams(document) + + const result = await semanticTokenProvider.semanticHighlight(document, params) + + const expectedNumberOfTokens = 8 + expectSemanticTokensToHaveLength(result, expectedNumberOfTokens) + const tokens = extractSemanticTokens(result, expectedNumberOfTokens) + + expectSemanticTokensToEqual(tokens[1], 1, 10, 11, semanticTokenProvider.tokenTypes.type, 0) + expectSemanticTokensToEqual(tokens[2], 0, 12, 19, semanticTokenProvider.tokenTypes.keyword, 0) + expectSemanticTokensToEqual(tokens[3], 0, 20, 12, semanticTokenProvider.tokenTypes.type, 0) + }) +}) diff --git a/test/semnantictokens/domain/DomainSemanticTokenProvider.test.ts b/test/semnantictokens/domain/DomainSemanticTokenProvider.test.ts new file mode 100644 index 0000000..ea6c507 --- /dev/null +++ b/test/semnantictokens/domain/DomainSemanticTokenProvider.test.ts @@ -0,0 +1,153 @@ +import { createContextMapperDslServices } from '../../../src/language/ContextMapperDslModule.js' +import { clearDocuments, parseHelper } from 'langium/test' +import { ContextMappingModel } from '../../../src/language/generated/ast.js' +import { EmptyFileSystem, LangiumDocument } from 'langium' +import { SemanticTokenProvider } from 'langium/lsp' +import { afterEach, beforeAll, describe, test } from 'vitest' +import { + createSemanticTokenParams, expectSemanticTokensToEqual, + expectSemanticTokensToHaveLength, + extractSemanticTokens +} from '../SemanticTokenTestHelper.js' +import { SemanticTokens } from 'vscode-languageserver-types' + +let services: ReturnType +let parse: ReturnType> +let document: LangiumDocument | undefined +let semanticTokenProvider: SemanticTokenProvider + +beforeAll(async () => { + services = createContextMapperDslServices(EmptyFileSystem) + parse = parseHelper(services.ContextMapperDsl) + semanticTokenProvider = services.ContextMapperDsl.lsp.SemanticTokenProvider!! +}) + +afterEach(async () => { + document && await clearDocuments(services.shared, [document]) +}) + +describe('Domain semantic token tests', () => { + test('check semantic tokens of Domain without body', async () => { + document = await parse(` + Domain TestDomain + `) + const params = createSemanticTokenParams(document) + + const result = await semanticTokenProvider.semanticHighlight(document, params) + + expectEmptyDomain(result) + }) + + test('check semantic tokens of Domain with empty body', async () => { + document = await parse(` + Domain TestDomain { + } + `) + const params = createSemanticTokenParams(document) + + const result = await semanticTokenProvider.semanticHighlight(document, params) + + expectEmptyDomain(result) + }) + + test('check semantic tokens of Domain with full body', async () => { + document = await parse(` + Domain TestDomain { + domainVisionStatement = "vision" + Subdomain FirstSubdomain + Subdomain SecondSubdomain + } + `) + const params = createSemanticTokenParams(document) + + const result = await semanticTokenProvider.semanticHighlight(document, params) + + const expectedNumberOfTokens = 8 + expectSemanticTokensToHaveLength(result, expectedNumberOfTokens) + const tokens = extractSemanticTokens(result, expectedNumberOfTokens) + + expectSemanticTokensToEqual(tokens[0], 1, 6, 6, semanticTokenProvider.tokenTypes.keyword, 0) + expectSemanticTokensToEqual(tokens[1], 0, 7, 10, semanticTokenProvider.tokenTypes.type, semanticTokenProvider.tokenModifiers.declaration) + + expectSemanticTokensToEqual(tokens[2], 1, 10, 21, semanticTokenProvider.tokenTypes.keyword, 0) + expectSemanticTokensToEqual(tokens[3], 0, 24, 8, semanticTokenProvider.tokenTypes.string, 0) + }) + + test('check semantic tokens of Subdomain without body', async () => { + document = await parse(` + Domain TestDomain { + Subdomain TestSubdomain + } + `) + const params = createSemanticTokenParams(document) + + const result = await semanticTokenProvider.semanticHighlight(document, params) + + expectEmptySubdomain(result) + }) + + test('check semantic tokens of Subdomain with empty body', async () => { + document = await parse(` + Domain TestDomain { + Subdomain TestSubdomain { + } + } + `) + const params = createSemanticTokenParams(document) + + const result = await semanticTokenProvider.semanticHighlight(document, params) + + expectEmptySubdomain(result) + }) + + test('check semantic tokens of Subdomain with full body', async () => { + document = await parse(` + Domain TestDomain { + Subdomain TestSubdomain + supports TestUseCase + { + domainVisionStatement "vision" + type = CORE_DOMAIN + } + } + UseCase TestUseCase + `) + const params = createSemanticTokenParams(document) + + const result = await semanticTokenProvider.semanticHighlight(document, params) + + const expectedNumberOfTokens = 12 + expectSemanticTokensToHaveLength(result, expectedNumberOfTokens) + const tokens = extractSemanticTokens(result, expectedNumberOfTokens) + + expectSemanticTokensToEqual(tokens[2], 1, 10, 9, semanticTokenProvider.tokenTypes.keyword, 0) + expectSemanticTokensToEqual(tokens[3], 0, 10, 13, semanticTokenProvider.tokenTypes.type, semanticTokenProvider.tokenModifiers.declaration) + + expectSemanticTokensToEqual(tokens[4], 1, 12, 8, semanticTokenProvider.tokenTypes.keyword, 0) + expectSemanticTokensToEqual(tokens[5], 0, 9, 11, semanticTokenProvider.tokenTypes.type, 0) + + expectSemanticTokensToEqual(tokens[6], 2, 12, 21, semanticTokenProvider.tokenTypes.keyword, 0) + expectSemanticTokensToEqual(tokens[7], 0, 22, 8, semanticTokenProvider.tokenTypes.string, 0) + + expectSemanticTokensToEqual(tokens[8], 1, 12, 4, semanticTokenProvider.tokenTypes.keyword, 0) + expectSemanticTokensToEqual(tokens[9], 0, 7, 11, semanticTokenProvider.tokenTypes.enumMember, 0) + }) +}) + +function expectEmptyDomain (result: SemanticTokens) { + const expectedNumberOfTokens = 2 + expectSemanticTokensToHaveLength(result, expectedNumberOfTokens) + const tokens = extractSemanticTokens(result, expectedNumberOfTokens) + + expectSemanticTokensToEqual(tokens[0], 1, 6, 6, semanticTokenProvider.tokenTypes.keyword, 0) + expectSemanticTokensToEqual(tokens[1], 0, 7, 10, semanticTokenProvider.tokenTypes.type, semanticTokenProvider.tokenModifiers.declaration) +} + +function expectEmptySubdomain (result: SemanticTokens) { + const expectedNumberOfTokens = 4 + expectSemanticTokensToHaveLength(result, expectedNumberOfTokens) + const tokens = extractSemanticTokens(result, expectedNumberOfTokens) + + expectSemanticTokensToEqual(tokens[2], 1, 8, 9, semanticTokenProvider.tokenTypes.keyword, 0) + expectSemanticTokensToEqual(tokens[3], 0, 10, 13, semanticTokenProvider.tokenTypes.type, semanticTokenProvider.tokenModifiers.declaration) +} diff --git a/test/semnantictokens/requirements/FeatureSemanticTokenProvider.test.ts b/test/semnantictokens/requirements/FeatureSemanticTokenProvider.test.ts new file mode 100644 index 0000000..15f26fd --- /dev/null +++ b/test/semnantictokens/requirements/FeatureSemanticTokenProvider.test.ts @@ -0,0 +1,79 @@ +import { createContextMapperDslServices } from '../../../src/language/ContextMapperDslModule.js' +import { clearDocuments, parseHelper } from 'langium/test' +import { ContextMappingModel } from '../../../src/language/generated/ast.js' +import { EmptyFileSystem, LangiumDocument } from 'langium' +import { SemanticTokenProvider } from 'langium/lsp' +import { afterEach, beforeAll, describe, test } from 'vitest' +import { + createSemanticTokenParams, expectSemanticTokensToEqual, + expectSemanticTokensToHaveLength, + extractSemanticTokens +} from '../SemanticTokenTestHelper.js' + +let services: ReturnType +let parse: ReturnType> +let document: LangiumDocument | undefined +let semanticTokenProvider: SemanticTokenProvider + +beforeAll(async () => { + services = createContextMapperDslServices(EmptyFileSystem) + parse = parseHelper(services.ContextMapperDsl) + semanticTokenProvider = services.ContextMapperDsl.lsp.SemanticTokenProvider!! +}) + +afterEach(async () => { + document && await clearDocuments(services.shared, [document]) +}) + +describe('Feature semantic token tests', () => { + test('check semantic tokens of NormalFeature', async () => { + document = await parse(` + UseCase TestUseCase { + interactions = create an "order" with its "products", "prices" in a "cart" + } + `) + const params = createSemanticTokenParams(document) + + const result = await semanticTokenProvider.semanticHighlight(document, params) + + const expectedNumberOfTokens = 12 + expectSemanticTokensToHaveLength(result, expectedNumberOfTokens) + const tokens = extractSemanticTokens(result, expectedNumberOfTokens) + + expectSemanticTokensToEqual(tokens[3], 0, 15, 6, semanticTokenProvider.tokenTypes.keyword, 0) + expectSemanticTokensToEqual(tokens[4], 0, 7, 2, semanticTokenProvider.tokenTypes.keyword, 0) + expectSemanticTokensToEqual(tokens[5], 0, 3, 7, semanticTokenProvider.tokenTypes.string, 0) + expectSemanticTokensToEqual(tokens[6], 0, 8, 8, semanticTokenProvider.tokenTypes.keyword, 0) + expectSemanticTokensToEqual(tokens[7], 0, 9, 10, semanticTokenProvider.tokenTypes.string, 0) + expectSemanticTokensToEqual(tokens[8], 0, 12, 8, semanticTokenProvider.tokenTypes.string, 0) + expectSemanticTokensToEqual(tokens[9], 0, 9, 2, semanticTokenProvider.tokenTypes.keyword, 0) + expectSemanticTokensToEqual(tokens[10], 0, 3, 1, semanticTokenProvider.tokenTypes.keyword, 0) + expectSemanticTokensToEqual(tokens[11], 0, 2, 6, semanticTokenProvider.tokenTypes.string, 0) + }) + + test('check semantic tokens of StoryFeature', async () => { + document = await parse(` + UseCase TestUseCase { + interactions = I want to "place" an "order" with its "products", "prices" in a "cart" + } + `) + const params = createSemanticTokenParams(document) + + const result = await semanticTokenProvider.semanticHighlight(document, params) + + const expectedNumberOfTokens = 13 + expectSemanticTokensToHaveLength(result, expectedNumberOfTokens) + const tokens = extractSemanticTokens(result, expectedNumberOfTokens) + + expectSemanticTokensToEqual(tokens[3], 0, 15, 9, semanticTokenProvider.tokenTypes.keyword, 0) + expectSemanticTokensToEqual(tokens[4], 0, 10, 7, semanticTokenProvider.tokenTypes.string, 0) + expectSemanticTokensToEqual(tokens[5], 0, 8, 2, semanticTokenProvider.tokenTypes.keyword, 0) + expectSemanticTokensToEqual(tokens[6], 0, 3, 7, semanticTokenProvider.tokenTypes.string, 0) + expectSemanticTokensToEqual(tokens[7], 0, 8, 8, semanticTokenProvider.tokenTypes.keyword, 0) + expectSemanticTokensToEqual(tokens[8], 0, 9, 10, semanticTokenProvider.tokenTypes.string, 0) + expectSemanticTokensToEqual(tokens[9], 0, 12, 8, semanticTokenProvider.tokenTypes.string, 0) + expectSemanticTokensToEqual(tokens[10], 0, 9, 2, semanticTokenProvider.tokenTypes.keyword, 0) + expectSemanticTokensToEqual(tokens[11], 0, 3, 1, semanticTokenProvider.tokenTypes.keyword, 0) + expectSemanticTokensToEqual(tokens[12], 0, 2, 6, semanticTokenProvider.tokenTypes.string, 0) + }) +}) diff --git a/test/semnantictokens/requirements/RequirementsSemanticTokenProvider.test.ts b/test/semnantictokens/requirements/RequirementsSemanticTokenProvider.test.ts new file mode 100644 index 0000000..915ef3b --- /dev/null +++ b/test/semnantictokens/requirements/RequirementsSemanticTokenProvider.test.ts @@ -0,0 +1,177 @@ +import { createContextMapperDslServices } from '../../../src/language/ContextMapperDslModule.js' +import { clearDocuments, parseHelper } from 'langium/test' +import { ContextMappingModel } from '../../../src/language/generated/ast.js' +import { EmptyFileSystem, LangiumDocument } from 'langium' +import { SemanticTokenProvider } from 'langium/lsp' +import { afterEach, beforeAll, describe, test } from 'vitest' +import { + createSemanticTokenParams, expectSemanticTokensToEqual, + expectSemanticTokensToHaveLength, + extractSemanticTokens +} from '../SemanticTokenTestHelper.js' +import { SemanticTokens } from 'vscode-languageserver-types' +import { parseValidInput } from '../../parsing/ParsingTestHelper.js' + +let services: ReturnType +let parse: ReturnType> +let document: LangiumDocument | undefined +let semanticTokenProvider: SemanticTokenProvider + +beforeAll(async () => { + services = createContextMapperDslServices(EmptyFileSystem) + parse = parseHelper(services.ContextMapperDsl) + semanticTokenProvider = services.ContextMapperDsl.lsp.SemanticTokenProvider!! +}) + +afterEach(async () => { + document && await clearDocuments(services.shared, [document]) +}) + +describe('User Requirements semantic token tests', () => { + test('check semantic tokens of UseCase without body', async () => { + document = await parse(` + UseCase TestUseCase + `) + const params = createSemanticTokenParams(document) + + const result = await semanticTokenProvider.semanticHighlight(document, params) + + expectEmptyUseCase(result) + }) + + test('check semantic tokens of UseCase with empty body', async () => { + document = await parse(` + UseCase TestUseCase + `) + const params = createSemanticTokenParams(document) + + const result = await semanticTokenProvider.semanticHighlight(document, params) + + expectEmptyUseCase(result) + }) + + test('check semantic tokens of UseCase with full body', async () => { + document = await parseValidInput(parse, ` + UseCase TestUseCase { + secondaryActors = "actor1", "actor2" + actor "role" + benefit = "benefit" + level = "level" + scope = "scope" + interactions + create an "order", + "edit" an "order" + } + `) + const params = createSemanticTokenParams(document) + + const result = await semanticTokenProvider.semanticHighlight(document, params) + + const expectedNumberOfTokens = 20 + expectSemanticTokensToHaveLength(result, expectedNumberOfTokens) + const tokens = extractSemanticTokens(result, expectedNumberOfTokens) + + // secondaryActors + expectSemanticTokensToEqual(tokens[2], 1, 8, 15, semanticTokenProvider.tokenTypes.keyword, 0) + expectSemanticTokensToEqual(tokens[3], 0, 18, 8, semanticTokenProvider.tokenTypes.string, 0) + expectSemanticTokensToEqual(tokens[4], 0, 10, 8, semanticTokenProvider.tokenTypes.string, 0) + + // actor + expectSemanticTokensToEqual(tokens[5], 1, 8, 5, semanticTokenProvider.tokenTypes.keyword, 0) + expectSemanticTokensToEqual(tokens[6], 0, 6, 6, semanticTokenProvider.tokenTypes.string, 0) + + // benefit + expectSemanticTokensToEqual(tokens[7], 1, 8, 7, semanticTokenProvider.tokenTypes.keyword, 0) + expectSemanticTokensToEqual(tokens[8], 0, 10, 9, semanticTokenProvider.tokenTypes.string, 0) + + // level + expectSemanticTokensToEqual(tokens[9], 1, 8, 5, semanticTokenProvider.tokenTypes.keyword, 0) + expectSemanticTokensToEqual(tokens[10], 0, 8, 7, semanticTokenProvider.tokenTypes.string, 0) + + // scope + expectSemanticTokensToEqual(tokens[11], 1, 8, 5, semanticTokenProvider.tokenTypes.keyword, 0) + expectSemanticTokensToEqual(tokens[12], 0, 8, 7, semanticTokenProvider.tokenTypes.string, 0) + + // interactions + expectSemanticTokensToEqual(tokens[13], 1, 8, 12, semanticTokenProvider.tokenTypes.keyword, 0) + expectSemanticTokensToEqual(tokens[14], 1, 10, 6, semanticTokenProvider.tokenTypes.keyword, 0) + }) + + test('check semantic tokens of UserStory without body', async () => { + document = await parseValidInput(parse, ` + UserStory TestUserStory + `) + const params = createSemanticTokenParams(document) + + const result = await semanticTokenProvider.semanticHighlight(document, params) + + expectEmptyUserStory(result) + }) + + test('check semantic tokens of UserStory with empty body', async () => { + document = await parseValidInput(parse, ` + UserStory TestUserStory { + } + `) + const params = createSemanticTokenParams(document) + + const result = await semanticTokenProvider.semanticHighlight(document, params) + + expectEmptyUserStory(result) + }) + + test('check semantic tokens of UserStory with full body', async () => { + document = await parseValidInput(parse, ` + UserStory TestUserStory + split by AnotherUserStory + { + As a "user" I want to create an "order" so that "I can buy stuff" and that "consumption" is promoted, accepting that "savings" are reduced + } + UserStory AnotherUserStory + `) + const params = createSemanticTokenParams(document) + + const result = await semanticTokenProvider.semanticHighlight(document, params) + + const expectedNumberOfTokens = 22 + expectSemanticTokensToHaveLength(result, expectedNumberOfTokens) + const tokens = extractSemanticTokens(result, expectedNumberOfTokens) + + expectSemanticTokensToEqual(tokens[2], 1, 8, 8, semanticTokenProvider.tokenTypes.keyword, 0) + expectSemanticTokensToEqual(tokens[3], 0, 9, 16, semanticTokenProvider.tokenTypes.type, 0) + + expectSemanticTokensToEqual(tokens[4], 2, 8, 4, semanticTokenProvider.tokenTypes.keyword, 0) + expectSemanticTokensToEqual(tokens[5], 0, 5, 6, semanticTokenProvider.tokenTypes.string, 0) + + expectSemanticTokensToEqual(tokens[10], 0, 8, 7, semanticTokenProvider.tokenTypes.keyword, 0) + expectSemanticTokensToEqual(tokens[11], 0, 8, 17, semanticTokenProvider.tokenTypes.string, 0) + + expectSemanticTokensToEqual(tokens[12], 0, 18, 8, semanticTokenProvider.tokenTypes.keyword, 0) + expectSemanticTokensToEqual(tokens[13], 0, 9, 13, semanticTokenProvider.tokenTypes.string, 0) + expectSemanticTokensToEqual(tokens[14], 0, 14, 2, semanticTokenProvider.tokenTypes.keyword, 0) + expectSemanticTokensToEqual(tokens[15], 0, 3, 8, semanticTokenProvider.tokenTypes.keyword, 0) + + expectSemanticTokensToEqual(tokens[16], 0, 10, 14, semanticTokenProvider.tokenTypes.keyword, 0) + expectSemanticTokensToEqual(tokens[17], 0, 15, 9, semanticTokenProvider.tokenTypes.string, 0) + expectSemanticTokensToEqual(tokens[18], 0, 10, 3, semanticTokenProvider.tokenTypes.keyword, 0) + expectSemanticTokensToEqual(tokens[19], 0, 4, 7, semanticTokenProvider.tokenTypes.keyword, 0) + }) +}) + +function expectEmptyUseCase (result: SemanticTokens) { + const expectedNumberOfTokens = 2 + expectSemanticTokensToHaveLength(result, expectedNumberOfTokens) + const tokens = extractSemanticTokens(result, expectedNumberOfTokens) + + expectSemanticTokensToEqual(tokens[0], 1, 6, 7, semanticTokenProvider.tokenTypes.keyword, 0) + expectSemanticTokensToEqual(tokens[1], 0, 8, 11, semanticTokenProvider.tokenTypes.type, semanticTokenProvider.tokenModifiers.declaration) +} + +function expectEmptyUserStory (result: SemanticTokens) { + const expectedNumberOfTokens = 2 + expectSemanticTokensToHaveLength(result, expectedNumberOfTokens) + const tokens = extractSemanticTokens(result, expectedNumberOfTokens) + + expectSemanticTokensToEqual(tokens[0], 1, 6, 9, semanticTokenProvider.tokenTypes.keyword, 0) + expectSemanticTokensToEqual(tokens[1], 0, 10, 13, semanticTokenProvider.tokenTypes.type, semanticTokenProvider.tokenModifiers.declaration) +} diff --git a/test/semnantictokens/vdad/StakeholdersSemanticTokenProvider.test.ts b/test/semnantictokens/vdad/StakeholdersSemanticTokenProvider.test.ts new file mode 100644 index 0000000..6b87f3b --- /dev/null +++ b/test/semnantictokens/vdad/StakeholdersSemanticTokenProvider.test.ts @@ -0,0 +1,199 @@ +import { clearDocuments, parseHelper } from 'langium/test' +import { ContextMappingModel } from '../../../src/language/generated/ast.js' +import { EmptyFileSystem, LangiumDocument } from 'langium' +import { SemanticTokenProvider } from 'langium/lsp' +import { afterEach, beforeAll, describe, test } from 'vitest' +import { + createSemanticTokenParams, expectSemanticTokensToEqual, + expectSemanticTokensToHaveLength, + extractSemanticTokens +} from '../SemanticTokenTestHelper.js' +import { SemanticTokens } from 'vscode-languageserver-types' +import { createContextMapperDslServices } from '../../../src/language/ContextMapperDslModule.js' + +let services: ReturnType +let parse: ReturnType> +let document: LangiumDocument | undefined +let semanticTokenProvider: SemanticTokenProvider + +beforeAll(async () => { + services = createContextMapperDslServices(EmptyFileSystem) + parse = parseHelper(services.ContextMapperDsl) + semanticTokenProvider = services.ContextMapperDsl.lsp.SemanticTokenProvider!! +}) + +afterEach(async () => { + document && await clearDocuments(services.shared, [document]) +}) + +describe('Stakeholders semantic token tests', () => { + test('check semantic tokens of Stakeholders without body', async () => { + document = await parse(` + Stakeholders + `) + const params = createSemanticTokenParams(document) + + const result = await semanticTokenProvider.semanticHighlight(document, params) + + expectEmptyStakeholders(result) + }) + + test('check semantic tokens of Stakeholders without body', async () => { + document = await parse(` + Stakeholders { + } + `) + const params = createSemanticTokenParams(document) + + const result = await semanticTokenProvider.semanticHighlight(document, params) + + expectEmptyStakeholders(result) + }) + + test('check semantic tokens of Stakeholders without attributes', async () => { + document = await parse(` + Stakeholders of BC1, BC2 { + } + BoundedContext BC1 + BoundedContext BC2 + `) + const params = createSemanticTokenParams(document) + + const result = await semanticTokenProvider.semanticHighlight(document, params) + + const expectedNumberOfTokens = 8 + expectSemanticTokensToHaveLength(result, expectedNumberOfTokens) + const tokens = extractSemanticTokens(result, expectedNumberOfTokens) + + expectSemanticTokensToEqual(tokens[0], 1, 6, 12, semanticTokenProvider.tokenTypes.keyword, 0) + expectSemanticTokensToEqual(tokens[1], 0, 13, 2, semanticTokenProvider.tokenTypes.keyword, 0) + expectSemanticTokensToEqual(tokens[2], 0, 3, 3, semanticTokenProvider.tokenTypes.type, 0) + expectSemanticTokensToEqual(tokens[3], 0, 5, 3, semanticTokenProvider.tokenTypes.type, 0) + }) + + test('check semantic tokens of Stakeholder without body', async () => { + document = await parse(` + Stakeholders { + Stakeholder TestStakeholder + } + `) + const params = createSemanticTokenParams(document) + + const result = await semanticTokenProvider.semanticHighlight(document, params) + + expectEmptyStakeholder(result) + }) + + test('check semantic tokens of Stakeholder with empty body', async () => { + document = await parse(` + Stakeholders { + Stakeholder TestStakeholder { + } + } + `) + const params = createSemanticTokenParams(document) + + const result = await semanticTokenProvider.semanticHighlight(document, params) + + expectEmptyStakeholder(result) + }) + + test('check semantic tokens of Stakeholder with full body', async () => { + document = await parse(` + Stakeholders { + Stakeholder TestStakeholder { + interest = HIGH + influence MEDIUM + description = "description" + } + } + `) + const params = createSemanticTokenParams(document) + + const result = await semanticTokenProvider.semanticHighlight(document, params) + + const expectedNumberOfTokens = 9 + expectSemanticTokensToHaveLength(result, expectedNumberOfTokens) + const tokens = extractSemanticTokens(result, expectedNumberOfTokens) + + expectSemanticTokensToEqual(tokens[3], 1, 10, 8, semanticTokenProvider.tokenTypes.keyword, 0) + expectSemanticTokensToEqual(tokens[4], 0, 11, 4, semanticTokenProvider.tokenTypes.enumMember, 0) + + expectSemanticTokensToEqual(tokens[5], 1, 10, 9, semanticTokenProvider.tokenTypes.keyword, 0) + expectSemanticTokensToEqual(tokens[6], 0, 10, 6, semanticTokenProvider.tokenTypes.enumMember, 0) + + expectSemanticTokensToEqual(tokens[7], 1, 10, 11, semanticTokenProvider.tokenTypes.keyword, 0) + expectSemanticTokensToEqual(tokens[8], 0, 14, 13, semanticTokenProvider.tokenTypes.string, 0) + }) + + test('check semantic tokens of StakeholderGroup without body', async () => { + document = await parse(` + Stakeholders { + StakeholderGroup TestStakeholderGroup + } + `) + const params = createSemanticTokenParams(document) + + const result = await semanticTokenProvider.semanticHighlight(document, params) + + expectEmptyStakeholderGroup(result) + }) + + test('check semantic tokens of StakeholderGroup with empty body', async () => { + document = await parse(` + Stakeholders { + StakeholderGroup TestStakeholderGroup { + } + } + `) + const params = createSemanticTokenParams(document) + + const result = await semanticTokenProvider.semanticHighlight(document, params) + + expectEmptyStakeholderGroup(result) + }) + + test('check semantic tokens of StakeholderGroup with full body', async () => { + document = await parse(` + Stakeholders { + StakeholderGroup TestStakeholderGroup { + Stakeholder TestStakeholder + } + `) + const params = createSemanticTokenParams(document) + + const result = await semanticTokenProvider.semanticHighlight(document, params) + + const expectedNumberOfTokens = 5 + expectSemanticTokensToHaveLength(result, expectedNumberOfTokens) + const tokens = extractSemanticTokens(result, expectedNumberOfTokens) + + expectSemanticTokensToEqual(tokens[3], 1, 10, 11, semanticTokenProvider.tokenTypes.keyword, 0) + }) +}) + +function expectEmptyStakeholders (result: SemanticTokens) { + const expectedNumberOfTokens = 1 + expectSemanticTokensToHaveLength(result, expectedNumberOfTokens) + const tokens = extractSemanticTokens(result, expectedNumberOfTokens) + + expectSemanticTokensToEqual(tokens[0], 1, 6, 12, semanticTokenProvider.tokenTypes.keyword, 0) +} + +function expectEmptyStakeholder (result: SemanticTokens) { + const expectedNumberOfTokens = 3 + expectSemanticTokensToHaveLength(result, expectedNumberOfTokens) + const tokens = extractSemanticTokens(result, expectedNumberOfTokens) + + expectSemanticTokensToEqual(tokens[1], 1, 8, 11, semanticTokenProvider.tokenTypes.keyword, 0) + expectSemanticTokensToEqual(tokens[2], 0, 12, 15, semanticTokenProvider.tokenTypes.type, semanticTokenProvider.tokenModifiers.declaration) +} + +function expectEmptyStakeholderGroup (result: SemanticTokens) { + const expectedNumberOfTokens = 3 + expectSemanticTokensToHaveLength(result, expectedNumberOfTokens) + const tokens = extractSemanticTokens(result, expectedNumberOfTokens) + + expectSemanticTokensToEqual(tokens[1], 1, 8, 16, semanticTokenProvider.tokenTypes.keyword, 0) + expectSemanticTokensToEqual(tokens[2], 0, 17, 20, semanticTokenProvider.tokenTypes.type, semanticTokenProvider.tokenModifiers.declaration) +} diff --git a/test/semnantictokens/vdad/ValueRegisterSemanticTokenProvider.test.ts b/test/semnantictokens/vdad/ValueRegisterSemanticTokenProvider.test.ts new file mode 100644 index 0000000..30c90a3 --- /dev/null +++ b/test/semnantictokens/vdad/ValueRegisterSemanticTokenProvider.test.ts @@ -0,0 +1,529 @@ +import { createContextMapperDslServices } from '../../../src/language/ContextMapperDslModule.js' +import { clearDocuments, parseHelper } from 'langium/test' +import { ContextMappingModel } from '../../../src/language/generated/ast.js' +import { EmptyFileSystem, LangiumDocument } from 'langium' +import { SemanticTokenProvider } from 'langium/lsp' +import { afterEach, beforeAll, describe, test } from 'vitest' +import { SemanticTokens } from 'vscode-languageserver-types' +import { + createSemanticTokenParams, + expectSemanticTokensToEqual, + expectSemanticTokensToHaveLength, + extractSemanticTokens +} from '../SemanticTokenTestHelper.js' +import { parseValidInput } from '../../parsing/ParsingTestHelper.js' + +let services: ReturnType +let parse: ReturnType> +let document: LangiumDocument | undefined +let semanticTokenProvider: SemanticTokenProvider + +beforeAll(async () => { + services = createContextMapperDslServices(EmptyFileSystem) + parse = parseHelper(services.ContextMapperDsl) + semanticTokenProvider = services.ContextMapperDsl.lsp.SemanticTokenProvider!! +}) + +afterEach(async () => { + document && await clearDocuments(services.shared, [document]) +}) + +describe('ValueRegister semantic token tests', () => { + test('check semantic tokens of ValueRegister without body', async () => { + document = await parse(` + ValueRegister TestRegister + `) + const params = createSemanticTokenParams(document) + + const result = await semanticTokenProvider.semanticHighlight(document, params) + + expectEmptyValueRegister(result) + }) + + test('check semantic tokens of ValueRegister with empty body', async () => { + document = await parse(` + ValueRegister TestRegister { + } + `) + const params = createSemanticTokenParams(document) + + const result = await semanticTokenProvider.semanticHighlight(document, params) + + expectEmptyValueRegister(result) + }) + + test('check semantic tokens of ValueRegister with attribute', async () => { + document = await parse(` + ValueRegister TestRegister for TestContext { + } + BoundedContext TestContext + `) + const params = createSemanticTokenParams(document) + + const result = await semanticTokenProvider.semanticHighlight(document, params) + + const expectedNumberOfTokens = 6 + expectSemanticTokensToHaveLength(result, expectedNumberOfTokens) + const tokens = extractSemanticTokens(result, expectedNumberOfTokens) + + expectSemanticTokensToEqual(tokens[2], 0, 13, 3, semanticTokenProvider.tokenTypes.keyword, 0) + expectSemanticTokensToEqual(tokens[3], 0, 4, 11, semanticTokenProvider.tokenTypes.type, 0) + }) + + test('check semantic tokens of Value without body', async () => { + document = await parse(` + ValueRegister TestRegister { + Value TestValue + } + `) + const params = createSemanticTokenParams(document) + + const result = await semanticTokenProvider.semanticHighlight(document, params) + + expectEmptyValue(result) + }) + + test('check semantic tokens of Value with empty body', async () => { + document = await parse(` + ValueRegister TestRegister { + Value TestValue { + } + } + `) + const params = createSemanticTokenParams(document) + + const result = await semanticTokenProvider.semanticHighlight(document, params) + + expectEmptyValue(result) + }) + + test('check semantic tokens of Value with full body', async () => { + document = await parse(` + Stakeholders { + Stakeholder TestStakeholder + } + ValueRegister TestRegister { + Value TestValue { + relatedValue = "relVal" + isCore + opposingValue "oppo" + demonstrator = "dem" + + Stakeholder TestStakeholder + } + } + `) + const params = createSemanticTokenParams(document) + + const result = await semanticTokenProvider.semanticHighlight(document, params) + + const expectedNumberOfTokens = 16 + expectSemanticTokensToHaveLength(result, expectedNumberOfTokens) + const tokens = extractSemanticTokens(result, expectedNumberOfTokens) + + expectSemanticTokensToEqual(tokens[5], 1, 8, 5, semanticTokenProvider.tokenTypes.keyword, 0) + expectSemanticTokensToEqual(tokens[6], 0, 6, 9, semanticTokenProvider.tokenTypes.type, semanticTokenProvider.tokenModifiers.declaration) + + expectSemanticTokensToEqual(tokens[7], 1, 10, 12, semanticTokenProvider.tokenTypes.keyword, 0) + expectSemanticTokensToEqual(tokens[8], 0, 15, 8, semanticTokenProvider.tokenTypes.string, 0) + + expectSemanticTokensToEqual(tokens[9], 1, 10, 6, semanticTokenProvider.tokenTypes.keyword, 0) + + expectSemanticTokensToEqual(tokens[10], 1, 10, 13, semanticTokenProvider.tokenTypes.keyword, 0) + expectSemanticTokensToEqual(tokens[11], 0, 14, 6, semanticTokenProvider.tokenTypes.string, 0) + + expectSemanticTokensToEqual(tokens[12], 1, 10, 12, semanticTokenProvider.tokenTypes.keyword, 0) + expectSemanticTokensToEqual(tokens[13], 0, 15, 5, semanticTokenProvider.tokenTypes.string, 0) + }) + + test('check semantic tokens of ValueCluster without body', async () => { + document = await parse(` + ValueRegister TestRegister { + ValueCluster TestValueCluster + } + `) + const params = createSemanticTokenParams(document) + + const result = await semanticTokenProvider.semanticHighlight(document, params) + + expectEmptyValueCluster(result) + }) + + test('check semantic tokens of ValueCluster with empty body', async () => { + document = await parse(` + ValueRegister TestRegister { + ValueCluster TestValueCluster { + } + } + `) + const params = createSemanticTokenParams(document) + + const result = await semanticTokenProvider.semanticHighlight(document, params) + + expectEmptyValueCluster(result) + }) + + test('check semantic tokens of ValueCluster with full body', async () => { + document = await parse(` + Stakeholders { + Stakeholder TestStakeholder + } + ValueRegister TestRegister { + ValueCluster TestCluster { + core "testCore" + relatedValue = "relVal" + demonstrator = "dem" + opposingValue "oppo" + + Stakeholder TestStakeholder + Value TestValue + } + } + `) + const params = createSemanticTokenParams(document) + + const result = await semanticTokenProvider.semanticHighlight(document, params) + + const expectedNumberOfTokens = 19 + expectSemanticTokensToHaveLength(result, expectedNumberOfTokens) + const tokens = extractSemanticTokens(result, expectedNumberOfTokens) + + expectSemanticTokensToEqual(tokens[7], 1, 10, 4, semanticTokenProvider.tokenTypes.keyword, 0) + expectSemanticTokensToEqual(tokens[8], 0, 5, 10, semanticTokenProvider.tokenTypes.string, 0) + + expectSemanticTokensToEqual(tokens[9], 1, 10, 12, semanticTokenProvider.tokenTypes.keyword, 0) + expectSemanticTokensToEqual(tokens[10], 0, 15, 8, semanticTokenProvider.tokenTypes.string, 0) + + expectSemanticTokensToEqual(tokens[11], 1, 10, 12, semanticTokenProvider.tokenTypes.keyword, 0) + expectSemanticTokensToEqual(tokens[12], 0, 15, 5, semanticTokenProvider.tokenTypes.string, 0) + + expectSemanticTokensToEqual(tokens[13], 1, 10, 13, semanticTokenProvider.tokenTypes.keyword, 0) + expectSemanticTokensToEqual(tokens[14], 0, 14, 6, semanticTokenProvider.tokenTypes.string, 0) + }) + + test('check semantic tokens of ValueCluster with coreValue7000', async () => { + document = await parse(` + ValueRegister TestRegister { + ValueCluster TestCluster { + core CARE + } + } + `) + const params = createSemanticTokenParams(document) + + const result = await semanticTokenProvider.semanticHighlight(document, params) + + const numberOfExpectedTokens = 6 + expectSemanticTokensToHaveLength(result, numberOfExpectedTokens) + const tokens = extractSemanticTokens(result, numberOfExpectedTokens) + + expectSemanticTokensToEqual(tokens[4], 1, 10, 4, semanticTokenProvider.tokenTypes.keyword, 0) + expectSemanticTokensToEqual(tokens[5], 0, 5, 4, semanticTokenProvider.tokenTypes.enumMember, 0) + }) + + test('check semantic tokens of ValueEpic without body', async () => { + document = await parse(` + ValueRegister TestRegister { + ValueEpic TestValueEpic + } + `) + const params = createSemanticTokenParams(document) + + const result = await semanticTokenProvider.semanticHighlight(document, params) + + expectEmptyValueEpic(result) + }) + + test('check semantic tokens of ValueEpic without body', async () => { + document = await parse(` + ValueRegister TestRegister { + ValueEpic TestValueEpic { + } + } + `) + const params = createSemanticTokenParams(document) + + const result = await semanticTokenProvider.semanticHighlight(document, params) + + expectEmptyValueEpic(result) + }) + + test('check semantic tokens of ValueEpic with full body', async () => { + document = await parse(` + Stakeholders { + Stakeholder TestStakeholder + } + ValueRegister TestRegister { + ValueEpic TestEpic { + As a TestStakeholder I value "val" as demonstrated in + reduction of "redVal1" + realization of "relVal1" + } + } + `) + const params = createSemanticTokenParams(document) + + const result = await semanticTokenProvider.semanticHighlight(document, params) + + const expectedNumberOfTokens = 16 + expectSemanticTokensToHaveLength(result, expectedNumberOfTokens) + const tokens = extractSemanticTokens(result, expectedNumberOfTokens) + + expectSemanticTokensToEqual(tokens[7], 1, 10, 4, semanticTokenProvider.tokenTypes.keyword, 0) + expectSemanticTokensToEqual(tokens[8], 0, 5, 15, semanticTokenProvider.tokenTypes.type, 0) + expectSemanticTokensToEqual(tokens[9], 0, 16, 7, semanticTokenProvider.tokenTypes.keyword, 0) + expectSemanticTokensToEqual(tokens[10], 0, 8, 5, semanticTokenProvider.tokenTypes.string, 0) + expectSemanticTokensToEqual(tokens[11], 0, 6, 18, semanticTokenProvider.tokenTypes.keyword, 0) + + expectSemanticTokensToEqual(tokens[12], 1, 10, 12, semanticTokenProvider.tokenTypes.keyword, 0) + expectSemanticTokensToEqual(tokens[13], 0, 13, 9, semanticTokenProvider.tokenTypes.string, 0) + + expectSemanticTokensToEqual(tokens[14], 1, 10, 14, semanticTokenProvider.tokenTypes.keyword, 0) + expectSemanticTokensToEqual(tokens[15], 0, 15, 9, semanticTokenProvider.tokenTypes.string, 0) + }) + + test('check semantic tokens of ValueNarrative', async () => { + document = await parse(` + ValueRegister TestRegister { + ValueNarrative TestNarrative { + When the SOI executes "feat", + stakeholders expect it to promote, protect or create "promoValue", + possibly degrading or prohibiting "harmValue" + with the following externally observable and/or internally auditable behavior: "conditions" + } + } + `) + const params = createSemanticTokenParams(document) + + const result = await semanticTokenProvider.semanticHighlight(document, params) + + const expectedNumberOfTokens = 12 + expectSemanticTokensToHaveLength(result, expectedNumberOfTokens) + const tokens = extractSemanticTokens(result, expectedNumberOfTokens) + + expectSemanticTokensToEqual(tokens[4], 1, 10, 21, semanticTokenProvider.tokenTypes.keyword, 0) + expectSemanticTokensToEqual(tokens[5], 0, 22, 6, semanticTokenProvider.tokenTypes.string, 0) + + expectSemanticTokensToEqual(tokens[6], 1, 10, 52, semanticTokenProvider.tokenTypes.keyword, 0) + expectSemanticTokensToEqual(tokens[7], 0, 53, 12, semanticTokenProvider.tokenTypes.string, 0) + + expectSemanticTokensToEqual(tokens[8], 1, 10, 33, semanticTokenProvider.tokenTypes.keyword, 0) + expectSemanticTokensToEqual(tokens[9], 0, 34, 11, semanticTokenProvider.tokenTypes.string, 0) + + expectSemanticTokensToEqual(tokens[10], 1, 10, 78, semanticTokenProvider.tokenTypes.keyword, 0) + expectSemanticTokensToEqual(tokens[11], 0, 79, 12, semanticTokenProvider.tokenTypes.string, 0) + }) + + test('check semantic tokens of ValueWeighting', async () => { + document = await parse(` + Stakeholders { + Stakeholder TestStakeholder + } + ValueRegister TestRegister { + ValueWeighting TestWeighting { + In the context of the SOI, + stakeholder TestStakeholder values "val1" more than "val2" + expecting benefits such as "benefits" + running the risk of harms such as "harms" + } + } + `) + const params = createSemanticTokenParams(document) + + const result = await semanticTokenProvider.semanticHighlight(document, params) + + const expectedNumberOfTokens = 18 + expectSemanticTokensToHaveLength(result, expectedNumberOfTokens) + const tokens = extractSemanticTokens(result, expectedNumberOfTokens) + + expectSemanticTokensToEqual(tokens[5], 1, 8, 14, semanticTokenProvider.tokenTypes.keyword, 0) + expectSemanticTokensToEqual(tokens[6], 0, 15, 13, semanticTokenProvider.tokenTypes.type, semanticTokenProvider.tokenModifiers.declaration) + + expectSemanticTokensToEqual(tokens[7], 1, 10, 26, semanticTokenProvider.tokenTypes.keyword, 0) + + expectSemanticTokensToEqual(tokens[8], 1, 10, 11, semanticTokenProvider.tokenTypes.keyword, 0) + expectSemanticTokensToEqual(tokens[9], 0, 12, 15, semanticTokenProvider.tokenTypes.type, 0) + expectSemanticTokensToEqual(tokens[10], 0, 16, 6, semanticTokenProvider.tokenTypes.keyword, 0) + expectSemanticTokensToEqual(tokens[11], 0, 7, 6, semanticTokenProvider.tokenTypes.string, 0) + expectSemanticTokensToEqual(tokens[12], 0, 7, 9, semanticTokenProvider.tokenTypes.keyword, 0) + expectSemanticTokensToEqual(tokens[13], 0, 10, 6, semanticTokenProvider.tokenTypes.string, 0) + + expectSemanticTokensToEqual(tokens[14], 1, 10, 26, semanticTokenProvider.tokenTypes.keyword, 0) + expectSemanticTokensToEqual(tokens[15], 0, 27, 10, semanticTokenProvider.tokenTypes.string, 0) + + expectSemanticTokensToEqual(tokens[16], 1, 10, 33, semanticTokenProvider.tokenTypes.keyword, 0) + expectSemanticTokensToEqual(tokens[17], 0, 34, 7, semanticTokenProvider.tokenTypes.string, 0) + }) + + test('check semantic tokens of ValueElicitation without body', async () => { + document = await parse(` + Stakeholders { + Stakeholder TestStakeholder + } + ValueRegister TestRegister { + Value TestVal { + Stakeholder TestStakeholder + } + } + `) + const params = createSemanticTokenParams(document) + + const result = await semanticTokenProvider.semanticHighlight(document, params) + + expectEmptyValueElicitation(result) + }) + + test('check semantic tokens of ValueElicitation with empty body', async () => { + document = await parse(` + Stakeholders { + Stakeholder TestStakeholder + } + ValueRegister TestRegister { + Value TestVal { + Stakeholder TestStakeholder { + } + } + } + `) + const params = createSemanticTokenParams(document) + + const result = await semanticTokenProvider.semanticHighlight(document, params) + + expectEmptyValueElicitation(result) + }) + + test('check semantic tokens of ValueElicitation with full body', async () => { + document = await parseValidInput(parse, ` + Stakeholders { + Stakeholder TestStakeholder + } + ValueRegister TestRegister { + Value TestVal { + Stakeholder TestStakeholder { + impact = MEDIUM + consequences good "conseq" + priority = LOW + } + } + } + `) + const params = createSemanticTokenParams(document) + + const result = await semanticTokenProvider.semanticHighlight(document, params) + + const expectedNumberOfTokens = 16 + expectSemanticTokensToHaveLength(result, expectedNumberOfTokens) + const tokens = extractSemanticTokens(result, expectedNumberOfTokens) + + expectSemanticTokensToEqual(tokens[9], 1, 12, 6, semanticTokenProvider.tokenTypes.keyword, 0) + expectSemanticTokensToEqual(tokens[10], 0, 9, 6, semanticTokenProvider.tokenTypes.enumMember, 0) + + expectSemanticTokensToEqual(tokens[11], 1, 12, 12, semanticTokenProvider.tokenTypes.keyword, 0) + expectSemanticTokensToEqual(tokens[12], 0, 13, 4, semanticTokenProvider.tokenTypes.keyword, 0) + expectSemanticTokensToEqual(tokens[13], 0, 5, 8, semanticTokenProvider.tokenTypes.string, 0) + + expectSemanticTokensToEqual(tokens[14], 1, 12, 8, semanticTokenProvider.tokenTypes.keyword, 0) + expectSemanticTokensToEqual(tokens[15], 0, 11, 3, semanticTokenProvider.tokenTypes.enumMember, 0) + }) + + test('check semantic tokens of ValueElicitation with Consequence Action', async () => { + document = await parseValidInput(parse, ` + Stakeholders { + Stakeholder TestStakeholder + } + ValueRegister TestRegister { + Value TestVal { + Stakeholder TestStakeholder { + impact = MEDIUM + consequences good "conseq" action "do" ACT + priority = LOW + } + } + } + `) + const params = createSemanticTokenParams(document) + + const result = await semanticTokenProvider.semanticHighlight(document, params) + + const expectedNumberOfTokens = 19 + expectSemanticTokensToHaveLength(result, expectedNumberOfTokens) + const tokens = extractSemanticTokens(result, expectedNumberOfTokens) + + expectSemanticTokensToEqual(tokens[14], 0, 9, 6, semanticTokenProvider.tokenTypes.keyword, 0) + expectSemanticTokensToEqual(tokens[15], 0, 7, 4, semanticTokenProvider.tokenTypes.string, 0) + expectSemanticTokensToEqual(tokens[16], 0, 5, 3, semanticTokenProvider.tokenTypes.keyword, 0) + }) + + test('check semantic tokens of ValueElicitation with Consequence Action using custom type', async () => { + document = await parseValidInput(parse, ` + Stakeholders { + Stakeholder TestStakeholder + } + ValueRegister TestRegister { + Value TestVal { + Stakeholder TestStakeholder { + impact = MEDIUM + consequences good "conseq" action "do" "TEST" + priority = LOW + } + } + } + `) + const params = createSemanticTokenParams(document) + + const result = await semanticTokenProvider.semanticHighlight(document, params) + + const expectedNumberOfTokens = 19 + expectSemanticTokensToHaveLength(result, expectedNumberOfTokens) + const tokens = extractSemanticTokens(result, expectedNumberOfTokens) + + expectSemanticTokensToEqual(tokens[14], 0, 9, 6, semanticTokenProvider.tokenTypes.keyword, 0) + expectSemanticTokensToEqual(tokens[15], 0, 7, 4, semanticTokenProvider.tokenTypes.string, 0) + expectSemanticTokensToEqual(tokens[16], 0, 5, 6, semanticTokenProvider.tokenTypes.string, 0) + }) +}) + +function expectEmptyValueRegister (result: SemanticTokens) { + const expectedNumberOfTokens = 2 + expectSemanticTokensToHaveLength(result, expectedNumberOfTokens) + const tokens = extractSemanticTokens(result, expectedNumberOfTokens) + + expectSemanticTokensToEqual(tokens[0], 1, 6, 13, semanticTokenProvider.tokenTypes.keyword, 0) + expectSemanticTokensToEqual(tokens[1], 0, 14, 12, semanticTokenProvider.tokenTypes.type, semanticTokenProvider.tokenModifiers.declaration) +} + +function expectEmptyValue (result: SemanticTokens) { + const expectedNumberOfTokens = 4 + expectSemanticTokensToHaveLength(result, expectedNumberOfTokens) + const tokens = extractSemanticTokens(result, expectedNumberOfTokens) + + expectSemanticTokensToEqual(tokens[2], 1, 8, 5, semanticTokenProvider.tokenTypes.keyword, 0) + expectSemanticTokensToEqual(tokens[3], 0, 6, 9, semanticTokenProvider.tokenTypes.type, semanticTokenProvider.tokenModifiers.declaration) +} + +function expectEmptyValueCluster (result: SemanticTokens) { + const expectedNumberOfTokens = 4 + expectSemanticTokensToHaveLength(result, expectedNumberOfTokens) + const tokens = extractSemanticTokens(result, expectedNumberOfTokens) + + expectSemanticTokensToEqual(tokens[2], 1, 8, 12, semanticTokenProvider.tokenTypes.keyword, 0) + expectSemanticTokensToEqual(tokens[3], 0, 13, 16, semanticTokenProvider.tokenTypes.type, semanticTokenProvider.tokenModifiers.declaration) +} + +function expectEmptyValueEpic (result: SemanticTokens) { + const expectedNumberOfTokens = 4 + expectSemanticTokensToHaveLength(result, expectedNumberOfTokens) + const tokens = extractSemanticTokens(result, expectedNumberOfTokens) + + expectSemanticTokensToEqual(tokens[2], 1, 8, 9, semanticTokenProvider.tokenTypes.keyword, 0) + expectSemanticTokensToEqual(tokens[3], 0, 10, 13, semanticTokenProvider.tokenTypes.type, semanticTokenProvider.tokenModifiers.declaration) +} + +function expectEmptyValueElicitation (result: SemanticTokens) { + const expectedNumberOfTokens = 9 + expectSemanticTokensToHaveLength(result, expectedNumberOfTokens) + const tokens = extractSemanticTokens(result, expectedNumberOfTokens) + + expectSemanticTokensToEqual(tokens[7], 1, 10, 11, semanticTokenProvider.tokenTypes.keyword, 0) + expectSemanticTokensToEqual(tokens[8], 0, 12, 15, semanticTokenProvider.tokenTypes.type, 0) +}