diff --git a/.github/workflows/sonar.yml b/.github/workflows/sonar.yml new file mode 100644 index 0000000..0590bd2 --- /dev/null +++ b/.github/workflows/sonar.yml @@ -0,0 +1,19 @@ +name: SonarCloud Analysis +on: + push: + branches: + - main + pull_request: + types: [opened, synchronize, reopened] +jobs: + sonarqube: + name: SonarQube + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + with: + fetch-depth: 0 # Shallow clones should be disabled for a better relevancy of analysis + - name: SonarQube Scan + uses: SonarSource/sonarqube-scan-action@v5 + env: + SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }} \ No newline at end of file diff --git a/.yarn/install-state.gz b/.yarn/install-state.gz index 81b6af3..64f688c 100644 Binary files a/.yarn/install-state.gz and b/.yarn/install-state.gz differ diff --git a/langium-quickstart.md b/langium-quickstart.md index 90c1deb..5ac0d4d 100644 --- a/langium-quickstart.md +++ b/langium-quickstart.md @@ -8,8 +8,8 @@ This folder contains all necessary files for your language extension. * `src/extension/main.ts` - the main code of the extension, which is responsible for launching a language server and client. * `src/language/context-mapper-dsl.langium` - the grammar definition of your language. * `src/language/main.ts` - the entry point of the language server process. - * `src/language/context-mapper-dsl-module.ts` - the dependency injection module of your language implementation. Use this to register overridden and added services. - * `src/language/context-mapper-dsl-validator.ts` - an example validator. You should change it to reflect the semantics of your language. + * `src/language/ContextMapperDslModule.ts` - the dependency injection module of your language implementation. Use this to register overridden and added services. + * `src/language/ContextMapperDslValidator.ts` - an example validator. You should change it to reflect the semantics of your language. * `src/cli/main.ts` - the entry point of the command line interface (CLI) of your language. * `src/cli/generator.ts` - the code generator used by the CLI to write output files from DSL documents. * `src/cli/cli-util.ts` - utility code for the CLI. diff --git a/sonar-project.properties b/sonar-project.properties new file mode 100644 index 0000000..28c714c --- /dev/null +++ b/sonar-project.properties @@ -0,0 +1,14 @@ +sonar.projectKey=lstreckeisen_context-mapper-language-server +sonar.organization=lstreckeisen + + +# This is the name and version displayed in the SonarCloud UI. +#sonar.projectName=context-mapper-language-server +#sonar.projectVersion=1.0 + + +# Path is relative to the sonar-project.properties file. Replace "\" by "/" on Windows. +#sonar.sources=. + +# Encoding of the source code. Default is default system encoding +#sonar.sourceEncoding=UTF-8 \ No newline at end of file diff --git a/src/language/context-mapper-dsl-module.ts b/src/language/ContextMapperDslModule.ts similarity index 98% rename from src/language/context-mapper-dsl-module.ts rename to src/language/ContextMapperDslModule.ts index bc7a855..de81b3a 100644 --- a/src/language/context-mapper-dsl-module.ts +++ b/src/language/ContextMapperDslModule.ts @@ -8,7 +8,7 @@ import { type PartialLangiumServices } from 'langium/lsp' import { ContextMapperDslGeneratedModule, ContextMapperDslGeneratedSharedModule } from './generated/module.js' -import { ContextMapperDslValidator, registerValidationChecks } from './context-mapper-dsl-validator.js' +import { ContextMapperDslValidator, registerValidationChecks } from './ContextMapperDslValidator.js' import { ContextMapperDslSemanticTokenProvider } from './semantictokens/ContextMapperDslSemanticTokenProvider.js' /** diff --git a/src/language/context-mapper-dsl-validator.ts b/src/language/ContextMapperDslValidator.ts similarity index 52% rename from src/language/context-mapper-dsl-validator.ts rename to src/language/ContextMapperDslValidator.ts index 4ef8e75..d3c9b58 100644 --- a/src/language/context-mapper-dsl-validator.ts +++ b/src/language/ContextMapperDslValidator.ts @@ -1,6 +1,8 @@ import type { ValidationChecks } from 'langium' import type { ContextMapperDslAstType } from './generated/ast.js' -import type { ContextMapperDslServices } from './context-mapper-dsl-module.js' +import type { ContextMapperDslServices } from './ContextMapperDslModule.js' +import { ContextMappingModelValidator } from './validation/ContextMappingModelValidator.js' +import { ValueValidator } from './validation/ValueValidator.js' /** * Register custom validation checks. @@ -8,13 +10,13 @@ import type { ContextMapperDslServices } from './context-mapper-dsl-module.js' export function registerValidationChecks (services: ContextMapperDslServices) { const registry = services.validation.ValidationRegistry const validator = services.validation.ContextMapperDslValidator - const checks: ValidationChecks = {} + const checks: ValidationChecks = { + ContextMappingModel: new ContextMappingModelValidator().validate, + Value: new ValueValidator().validate + } registry.register(checks, validator) } -/** - * Implementation of custom validations. - */ export class ContextMapperDslValidator { } diff --git a/src/language/context-mapper-dsl.langium b/src/language/context-mapper-dsl.langium index 897f49f..e10bc8f 100644 --- a/src/language/context-mapper-dsl.langium +++ b/src/language/context-mapper-dsl.langium @@ -1,57 +1,75 @@ grammar ContextMapperDsl -hidden terminal WS: /\s+/; terminal ID: /[_a-zA-Z][\w_]*/; terminal STRING: /"(\\.|[^"\\])*"|'(\\.|[^'\\])*'/; - terminal ML_COMMENT: /\/\*[\s\S]*?\*\//; terminal SL_COMMENT: /\/\/[^\n\r]*/; terminal OPEN: '{'; terminal CLOSE: '}'; -entry ContextMappingModel: - (topComment=SL_COMMENT | topComment=ML_COMMENT)? - (imports+=Import)* - (map=ContextMap)? - (boundedContexts+=BoundedContext)* - (domains+=Domain)* - (userRequirements+=UserRequirement)* - (stakeholders+=Stakeholders)* - (valueRegisters+=ValueRegister)* -; +hidden terminal WS: /\s+/; +hidden terminal HIDDEN_ML_COMMENT: ML_COMMENT; +hidden terminal HIDDEN_SL_COMMENT: SL_COMMENT; -Import: - 'import' importURI=STRING; +entry ContextMappingModel: + ( + (contextMaps+=ContextMap) | + (boundedContexts+=BoundedContext) | + (domains+=Domain) | + (userRequirements+=UserRequirement) | + (stakeholders+=Stakeholders) | + (valueRegisters+=ValueRegister) + )* +; + +/* + In Langium, elements in unordered groups are optional by default. + However, the usage of the ? cardinality is not allowed in unordered groups. + Therefore, to create a rule with optional, unordered elements, one needs to omit the ? operator and combine them with the & operator. + This behavior may change in the future. + Also, unordered groups may cause unreadable parsing errors. To resolve that, unordered groups can be replaced with a (A | B | C)* rule and enforce non-repetition of elements with a validator. + https://github.com/eclipse-langium/langium/discussions/1903 + */ ContextMap: - {infer ContextMap} // make sure there is always a context map + // {ContextMap} make sure there is always a context map 'ContextMap' (name=ID)? OPEN - ('type' ('=')? type=ContextMapType)? - ('state' ('=')? state=ContextMapState)? + ( + ('type' ('=')? type=ContextMapType) & + ('state' ('=')? state=ContextMapState) + ) ('contains' boundedContexts+=[BoundedContext] ("," boundedContexts+=[BoundedContext])*)* relationships+=Relationship* CLOSE ; BoundedContext: - (comment=ML_COMMENT | comment=SL_COMMENT)? 'BoundedContext' name=ID ( - ('implements' (implementedDomainParts+=[DomainPart]) ("," implementedDomainParts+=[DomainPart])*)? - ('realizes' (realizedBoundedContexts+=[BoundedContext]) ("," realizedBoundedContexts+=[BoundedContext])*)? - ('refines' refinedBoundedContext=[BoundedContext])? + ( + ('implements' (implementedDomainParts+=[DomainPart]) ("," implementedDomainParts+=[DomainPart])*) & + ('realizes' (realizedBoundedContexts+=[BoundedContext]) ("," realizedBoundedContexts+=[BoundedContext])*) & + ('refines' refinedBoundedContext=[BoundedContext]) + ) ) ( OPEN - ('domainVisionStatement' ('=')? domainVisionStatement=STRING)? - ('type' ('=')? type=BoundedContextType)? - (('responsibilities' ('=')? responsibilities+=STRING) ("," responsibilities+=STRING)*)? - ('implementationTechnology' ('=')? implementationTechnology=STRING)? - ('knowledgeLevel' ('=')? knowledgeLevel=KnowledgeLevel)? - ('businessModel' ('=')? businessModel=STRING)? - ('evolution' ('=')? evolution=Evolution)? - (aggregates+=Aggregate)* + ( + ('domainVisionStatement' ('=') domainVisionStatement=STRING) & + ('type' ('=') type=BoundedContextType) & + (('responsibilities' ('=')? responsibilities+=STRING) ("," responsibilities+=STRING)*) & + ('implementationTechnology' ('=') implementationTechnology=STRING) & + ('knowledgeLevel' ('=') knowledgeLevel=KnowledgeLevel) & + ('businessModel' ('=') businessModel=STRING) & + ('evolution' ('=') evolution=Evolution) + ) + ( + ( + modules+=SculptorModule | + aggregates+=Aggregate + )* + ) CLOSE )? ; @@ -74,14 +92,15 @@ Subdomain: 'Subdomain' name=ID ('supports' (supportedFeatures+=[UserRequirement]) ("," supportedFeatures+=[UserRequirement])*)? ( OPEN - (('type' ('=')? type=SubDomainType)? - ('domainVisionStatement' ('=')? domainVisionStatement=STRING)?) + ( + ('type' ('=')? type=SubDomainType) & + ('domainVisionStatement' ('=')? domainVisionStatement=STRING) + ) CLOSE )? ; Relationship: - (comment=ML_COMMENT | comment=SL_COMMENT)? SymmetricRelationship | UpstreamDownstreamRelationship ; @@ -128,11 +147,12 @@ UpstreamDownstreamRelationship: (downstream=[BoundedContext] ('['((downstreamRoles+=DownstreamRole) ("," downstreamRoles+=DownstreamRole)*)?']')?'Downstream-Upstream'('['((upstreamRoles+=UpstreamRole) ("," upstreamRoles+=UpstreamRole)*)?']')? upstream=[BoundedContext]) ) (':' name=ID)? - (OPEN ( - ('implementationTechnology' ('=')? implementationTechnology=STRING)? - (('exposedAggregates' ('=')? upstreamExposedAggregates+=[Aggregate]) ("," upstreamExposedAggregates+=[Aggregate])* (exposedAggregatesComment=SL_COMMENT)?)? - ('downstreamRights' ('=')? downstreamGovernanceRights=DownstreamGovernanceRights)? - ) + (OPEN + ( + ('implementationTechnology' ('=')? implementationTechnology=STRING) & + (('exposedAggregates' ('=')? upstreamExposedAggregates+=[Aggregate]) ("," upstreamExposedAggregates+=[Aggregate])*) & + ('downstreamRights' ('=')? downstreamGovernanceRights=DownstreamGovernanceRights) + ) CLOSE)? ) ; @@ -146,36 +166,36 @@ CustomerSupplierRelationship: (upstream=[BoundedContext] ('['((upstreamRoles+=UpstreamRole) ("," upstreamRoles+=UpstreamRole)*)?']')?'Supplier-Customer'('['((downstreamRoles+=DownstreamRole) ("," downstreamRoles+=DownstreamRole)*)?']')? downstream=[BoundedContext]) ) (':' name=ID)? - (OPEN ( - ('implementationTechnology' ('=')? implementationTechnology=STRING)? - (('exposedAggregates' ('=')? upstreamExposedAggregates+=[Aggregate]) ("," upstreamExposedAggregates+=[Aggregate])* (exposedAggregatesComment=SL_COMMENT)?)? - ('downstreamRights' ('=')? downstreamGovernanceRights=DownstreamGovernanceRights)? - ) + (OPEN + ( + ('implementationTechnology' ('=')? implementationTechnology=STRING) & + (('exposedAggregates' ('=')? upstreamExposedAggregates+=[Aggregate]) ("," upstreamExposedAggregates+=[Aggregate])* (exposedAggregatesComment=SL_COMMENT)?) & + ('downstreamRights' ('=')? downstreamGovernanceRights=DownstreamGovernanceRights) + ) CLOSE)? ) ; Aggregate: - (comment=ML_COMMENT)? - (doc=STRING)? + (doc=STRING)? "Aggregate" name=ID (OPEN ( - (('responsibilities' ('=')? responsibilities+=STRING) ("," responsibilities+=STRING)*)? + (('responsibilities' ('=')? responsibilities+=STRING) ("," responsibilities+=STRING)*) & ( (('useCases' ('=')? userRequirements+=[UseCase]) ("," userRequirements+=[UseCase])*) | (('userStories' ('=')? userRequirements+=[UserStory]) ("," userRequirements+=[UserStory])*) | ((('features' | 'userRequirements') ('=')? userRequirements+=[UserRequirement]) ("," userRequirements+=[UserRequirement])*) - )? - ('owner' ('=')? owner=[BoundedContext])? - ('knowledgeLevel' ('=')? knowledgeLevel=KnowledgeLevel)? - (('likelihoodForChange' | 'structuralVolatility') ('=')? likelihoodForChange=Volatility)? - ('contentVolatility' ('=')? contentVolatility=Volatility)? - ('availabilityCriticality' ('=')? availabilityCriticality=Criticality)? - ('consistencyCriticality' ('=')? consistencyCriticality=Criticality)? - ('storageSimilarity' ('=')? storageSimilarity=Similarity)? - ('securityCriticality' ('=')? securityCriticality=Criticality)? - ('securityZone' ('=')? securityZone=STRING)? - ('securityAccessGroup' ('=')? securityAccessGroup=STRING)? + ) & + ('owner' ('=')? owner=[BoundedContext]) & + ('knowledgeLevel' ('=')? knowledgeLevel=KnowledgeLevel) & + (('likelihoodForChange' | 'structuralVolatility') ('=')? likelihoodForChange=Volatility) & + ('contentVolatility' ('=')? contentVolatility=Volatility) & + ('availabilityCriticality' ('=')? availabilityCriticality=Criticality) & + ('consistencyCriticality' ('=')? consistencyCriticality=Criticality) & + ('storageSimilarity' ('=')? storageSimilarity=Similarity) & + ('securityCriticality' ('=')? securityCriticality=Criticality) & + ('securityZone' ('=')? securityZone=STRING) & + ('securityAccessGroup' ('=')? securityAccessGroup=STRING) ) CLOSE)? ; @@ -186,12 +206,14 @@ UserRequirement: UseCase: 'UseCase' name=ID (OPEN - (('actor' ('=')? role=STRING)? - ('secondaryActors' ('=')? secondaryActors+=STRING ("," secondaryActors+=STRING)*)? - ('interactions' ('=')? features+=Feature ("," features+=Feature)*)? - ('benefit' ('=')? benefit=STRING)? - ('scope' ('=')? scope=STRING)? - ('level' ('=')? level=STRING)?) + ( + ('actor' ('=')? role=STRING) & + ('secondaryActors' ('=')? secondaryActors+=STRING ("," secondaryActors+=STRING)*) & + ('interactions' ('=')? features+=Feature ("," features+=Feature)*) & + ('benefit' ('=')? benefit=STRING) & + ('scope' ('=')? scope=STRING) & + ('level' ('=')? level=STRING) + ) CLOSE)? ; @@ -218,12 +240,33 @@ StoryValuation: 'accepting that' harmedValues+=STRING (',' harmedValues+=STRING)* ('is' | 'are') ('reduced' | 'harmed') ; -terminal UserActivityDefaultVerb: +UserActivityDefaultVerb returns string: 'create' | 'read' | 'update' | 'delete' ; +SculptorModule: + (doc=STRING)? + 'Module' name=ID ( + OPEN + ( + (external?='external') & + ('basePackage' '=' basePackage=JavaIdentifier) & + ('hint' '=' hint=STRING) + ) + ( + (aggregates+=Aggregate) + )* + CLOSE + )? +; + +JavaIdentifier returns string: + (ID ("." ID )* ) +; + Stakeholders: - {infer Stakeholders} 'Stakeholders' ('of' (contexts+=[BoundedContext]) (',' contexts+=[BoundedContext])*)? (OPEN + // {Stakeholders} + 'Stakeholders' ('of' (contexts+=[BoundedContext]) (',' contexts+=[BoundedContext])*)? (OPEN ( (stakeholders+=AbstractStakeholder)* ) @@ -237,7 +280,7 @@ AbstractStakeholder: StakeholderGroup: 'StakeholderGroup' name=ID (OPEN ( - (stakeholders+=Stakeholder)* + (stakeholders+=Stakeholder)* ) CLOSE)? ; @@ -245,9 +288,9 @@ StakeholderGroup: Stakeholder: 'Stakeholder' name=ID (OPEN ( - ('influence' ('=')? influence=INFLUENCE)? - ('interest' ('=')? interest=INTEREST)? - ('description' ('=')? description=STRING)? + ('influence' ('=')? influence=INFLUENCE) & + ('interest' ('=')? interest=INTEREST) & + ('description' ('=')? description=STRING) ) CLOSE)? ; @@ -255,48 +298,47 @@ Stakeholder: ValueRegister: 'ValueRegister' name=ID ('for' context=[BoundedContext])? (OPEN ( - (valueClusters+=ValueCluster)* - (values+=Value)* - (valueEpics+=ValueEpic)* - (valueNarratives+=ValueNarrative)* - (valueWeightings+=ValueWeigthing)* - ) + (valueClusters+=ValueCluster) | + (values+=Value) | + (valueEpics+=ValueEpic) | + (valueNarratives+=ValueNarrative) | + (valueWeightings+=ValueWeighting) + )* CLOSE)? ; ValueCluster: 'ValueCluster' name=ID (OPEN + // required value first, then optional values + (('core' ('=')? coreValue7000=CoreValue) | ('core' ('=')? coreValue=STRING)) ( - (('core' ('=')? coreValue7000=CoreValue) | ('core' ('=')? coreValue=STRING)) - ('demonstrator' ('=')? demonstrators+=STRING)* - ('relatedValue' ('=')? relatedValues+=STRING)* - ('opposingValue' ('=')? opposingValues+=STRING)* - (values+=Value)* - (elicitations+=ValueElicitation)* - ) + ('demonstrator' ('=')? demonstrators+=STRING) | + ('relatedValue' ('=')? relatedValues+=STRING) | + ('opposingValue' ('=')? opposingValues+=STRING) | + (values+=Value) | + (elicitations+=ValueElicitation) + )* CLOSE)? ; Value: 'Value' name=ID (OPEN ( - (coreValue?='isCore')? - ('demonstrator' ('=')? demonstrators+=STRING)* - ('relatedValue' ('=')? relatedValues+=STRING)* - ('opposingValue' ('=')? opposingValues+=STRING)* - (elicitations+=ValueElicitation)* - ) + (coreValue+='isCore') | + ('demonstrator' ('=')? demonstrators+=STRING) | + ('relatedValue' ('=')? relatedValues+=STRING) | + ('opposingValue' ('=')? opposingValues+=STRING) | + (elicitations+=ValueElicitation) + )* CLOSE)? ; -// Q: table B.1 abbilden? if, how? - ValueElicitation: ('Stakeholder'|'Stakeholders') stakeholder=[AbstractStakeholder] (OPEN ( - ('priority' ('=')? priority=PRIORITY)? - ('impact' ('=')? impact=IMPACT)? - ('consequences' (consequences+=Consequence)+)? + ('priority' ('=')? priority=PRIORITY) & + ('impact' ('=')? impact=IMPACT) & + ('consequences' (consequences+=Consequence)+) ) CLOSE)? ; @@ -305,9 +347,11 @@ ValueElicitation: ValueEpic: 'ValueEpic' name=ID (OPEN ( - 'As a' stakeholder=[AbstractStakeholder] 'I value' value=STRING 'as demonstrated in' - (('realization of' realizedValues+=STRING)+ - ('reduction of' reducedValues+=STRING)+) + 'As a' stakeholder=[AbstractStakeholder] 'I value' value=STRING 'as demonstrated in' + ( + ('realization of' realizedValues+=STRING)+ & + ('reduction of' reducedValues+=STRING)+ + ) ) CLOSE)? ; @@ -317,19 +361,19 @@ ValueNarrative: ( 'When the SOI executes' feature=STRING ',' 'stakeholders expect it to promote, protect or create' promotedValues=STRING ',' - 'possibly degrading or prohibiting' harmedValues=STRING - 'with the following externally observable and/or internally auditable behavior:' preAndPostConditions=STRING + 'possibly degrading or prohibiting' harmedValues=STRING + 'with the following externally observable and/or internally auditable behavior:' preAndPostConditions=STRING ) CLOSE) ; -ValueWeigthing: - 'ValueWeigthing' name=ID (OPEN +ValueWeighting: + 'ValueWeighting' name=ID (OPEN ( - 'In the context of the SOI,' + 'In the context of the SOI,' 'stakeholder' stakeholder=[AbstractStakeholder] 'values' value1=STRING 'more than' value2=STRING - 'expecting benefits such as' benefits=STRING - 'running the risk of harms such as' harms=STRING + 'expecting benefits such as' benefits=STRING + 'running the risk of harms such as' harms=STRING ) CLOSE) ; @@ -343,71 +387,70 @@ Action: ; -UpstreamRole: - PUBLISHED_LANGUAGE='PL' | OPEN_HOST_SERVICE='OHS' +UpstreamRole returns string: + 'PL'| 'OHS' ; -DownstreamRole: - ANTICORRUPTION_LAYER='ACL' | CONFORMIST='CF' +DownstreamRole returns string: + 'ACL' | 'CF' ; -ContextMapState: - UNDEFINED='UNDEFINED' | AS_IS='AS_IS' | TO_BE='TO_BE' +ContextMapState returns string: + 'UNDEFINED' | 'AS_IS' | 'TO_BE' ; -ContextMapType: - UNDEFINED='UNDEFINED' | SYSTEM_LANDSCAPE='SYSTEM_LANDSCAPE' | ORGANIZATIONAL='ORGANIZATIONAL' +ContextMapType returns string: + 'UNDEFINED' | 'SYSTEM_LANDSCAPE' | 'ORGANIZATIONAL' ; -BoundedContextType: - UNDEFINED='UNDEFINED' | FEATURE='FEATURE' | APPLICATION='APPLICATION' | SYSTEM='SYSTEM' | TEAM='TEAM' +BoundedContextType returns string: + 'UNDEFINED' | 'FEATURE' | 'APPLICATION' | 'SYSTEM' | 'TEAM' ; -SubDomainType: - UNDEFINED='UNDEFINED' | CORE_DOMAIN='CORE_DOMAIN' | SUPPORTING_DOMAIN='SUPPORTING_DOMAIN' | GENERIC_SUBDOMAIN='GENERIC_SUBDOMAIN' +SubDomainType returns string: + 'UNDEFINED' | 'CORE_DOMAIN' | 'SUPPORTING_DOMAIN' | 'GENERIC_SUBDOMAIN' ; -DownstreamGovernanceRights: - INFLUENCER='INFLUENCER' | OPINION_LEADER='OPINION_LEADER' | VETO_RIGHT='VETO_RIGHT' | DECISION_MAKER='DECISION_MAKER' | MONOPOLIST='MONOPOLIST' +DownstreamGovernanceRights returns string: + 'INFLUENCER' | 'OPINION_LEADER' | 'VETO_RIGHT' | 'DECISION_MAKER' | 'MONOPOLIST' ; -KnowledgeLevel: - META='META' | CONCRETE='CONCRETE' +KnowledgeLevel returns string: + "META" | "CONCRETE" ; -Volatility: - UNDEFINED='UNDEFINED' | NORMAL='NORMAL'| RARELY='RARELY' | OFTEN='OFTEN' +Volatility returns string: + 'UNDEFINED' | 'NORMAL' | 'RARELY' | 'OFTEN' ; -Criticality: - UNDEFINED='UNDEFINED' | NORMAL='NORMAL' | HIGH='HIGH' | LOW='LOW' +Criticality returns string: + 'UNDEFINED' | 'NORMAL' | 'HIGH' | 'LOW' ; -Similarity: - UNDEFINED='UNDEFINED' | NORMAL='NORMAL' | HUGE='HUGE' | TINY='TINY' +Similarity returns string: + 'UNDEFINED' | 'NORMAL' | 'HUGE' | 'TINY' ; -Evolution: - UNDEFINED='UNDEFINED' | GENESIS='GENESIS' | CUSTOM_BUILT='CUSTOM_BUILT' | PRODUCT='PRODUCT' | COMMODITY='COMMODITY' +Evolution returns string: + 'UNDEFINED' | 'GENESIS' | 'CUSTOM_BUILT' | 'PRODUCT' | 'COMMODITY' ; -INFLUENCE: - UNDEFINED='UNDEFINED' | HIGH='HIGH' | MEDIUM='MEDIUM' | LOW='LOW' +INFLUENCE returns string: + 'UNDEFINED' | 'HIGH' | 'MEDIUM' | 'LOW' ; -INTEREST: - UNDEFINED='UNDEFINED' | HIGH='HIGH' | MEDIUM='MEDIUM' | LOW='LOW' +INTEREST returns string: + 'UNDEFINED' | 'HIGH' | 'MEDIUM' | 'LOW' ; -PRIORITY: - UNDEFINED='UNDEFINED' | HIGH='HIGH' | MEDIUM='MEDIUM' | LOW='LOW' +PRIORITY returns string: + 'UNDEFINED' | 'HIGH' | 'MEDIUM' | 'LOW' ; -IMPACT: - UNDEFINED='UNDEFINED' | HIGH='HIGH' | MEDIUM='MEDIUM' | LOW='LOW' +IMPACT returns string: + 'UNDEFINED' | 'HIGH' | 'MEDIUM' | 'LOW' ; -CoreValue: - UNDEFINED='UNDEFINED' | AUTONOMY='AUTONOMY' | CARE='CARE' | CONTROL='CONTROL' | FAIRNESS='FAIRNESS' | INCLUSIVENESS='INCLUSIVENESS' | INNOVATION='INNOVATION' | PERFECTION='PERFECTION' | PRIVACY='PRIVACY' | RESPECT='RESPECT' | SUSTAINABILITY='SUSTAINABILITY' | TRANSPARENCY='TRANSPARENCY' | TRUST='TRUST' +CoreValue returns string: + 'UNDEFINED' | 'AUTONOMY' | 'CARE' | 'CONTROL' | 'FAIRNESS' | 'INCLUSIVENESS' | 'INNOVATION' | 'PERFECTION' | 'PRIVACY' | 'RESPECT' | 'SUSTAINABILITY' | 'TRANSPARENCY' | 'TRUST' ; - diff --git a/src/language/main.ts b/src/language/main.ts index 500a5a3..2ea3b6b 100644 --- a/src/language/main.ts +++ b/src/language/main.ts @@ -1,8 +1,7 @@ import { startLanguageServer } from 'langium/lsp' import { NodeFileSystem } from 'langium/node' -// eslint-disable-next-line import/extensions import { createConnection, ProposedFeatures } from 'vscode-languageserver/node.js' -import { createContextMapperDslServices } from './context-mapper-dsl-module.js' +import { createContextMapperDslServices } from './ContextMapperDslModule.js' // Create a connection to the client const connection = createConnection(ProposedFeatures.all) diff --git a/src/language/semantictokens/ContextMapperDslSemanticTokenProvider.ts b/src/language/semantictokens/ContextMapperDslSemanticTokenProvider.ts index 2c4ccac..adb9fcd 100644 --- a/src/language/semantictokens/ContextMapperDslSemanticTokenProvider.ts +++ b/src/language/semantictokens/ContextMapperDslSemanticTokenProvider.ts @@ -1,15 +1,88 @@ import { AstNode } from 'langium' import { AbstractSemanticTokenProvider, SemanticTokenAcceptor } from 'langium/lsp' +import { + ContextMappingModel, + isContextMappingModel +} from '../generated/ast.js' +import { ContextMapSemanticTokenProvider } from './contextMap/ContextMapSemanticTokenProvider.js' import { SemanticTokenTypes } from 'vscode-languageserver-types' -import { isContextMap } from '../generated/ast.js' +import { BoundedContextSemanticTokenProvider } from './boundedContext/BoundedContextSemanticTokenProvider.js' +import { DomainSemanticTokenProvider } from './domain/DomainSemanticTokenProvider.js' +import { AggregateSemanticTokenProvider } from './boundedContext/AggregateSemanticTokenProvider.js' +import { RequirementsSemanticTokenProvider } from './requirements/RequirementsSemanticTokenProvider.js' +import { ValueSemanticTokenProvider } from './vdad/ValueSemanticTokenProvider.js' +import { ContextMapperSemanticTokenProvider } from './ContextMapperSemanticTokenProvider.js' +import { SculptorModuleSemanticTokenProvider } from './boundedContext/SculptorModuleSemanticTokenProvider.js' +import { RelationshipSemanticTokenProvider } from './contextMap/RelationshipSemanticTokenProvider.js' +import { FeatureSemanticTokenProvider } from './requirements/FeatureSemanticTokenProvider.js' +import { StoryValuationSemanticTokenProvider } from './requirements/StoryValuationSemanticTokenProvider.js' +import { AbstractStakeholderSemanticTokenProvider } from './vdad/AbstractStakeholderSemanticTokenProvider.js' +import { ActionSemanticTokenProvider } from './vdad/ActionSemanticTokenProvider.js' +import { ConsequenceSemanticTokenProvider } from './vdad/ConsequenceSemanticTokenProvider.js' +import { StakeholderSemanticTokenProvider } from './vdad/StakeholderSemanticTokenProvider.js' +import { ValueClusterSemanticTokenProvider } from './vdad/ValueClusterSemanticTokenProvider.js' +import { ValueElicitationSemanticTokenProvider } from './vdad/ValueElicitationSemanticTokenProvider.js' +import { ValueEpicSemanticTokenProvider } from './vdad/ValueEpicSemanticTokenProvider.js' +import { ValueNarrativeSemanticTokenProvider } from './vdad/ValueNarrativeSemanticTokenProvider.js' +import { ValueRegisterSemanticTokenProvider } from './vdad/ValueRegisterSemanticTokenProvider.js' +import { ValueWeightingSemanticTokenProvider } from './vdad/ValueWeightingSemanticTokenProvider.js' export class ContextMapperDslSemanticTokenProvider extends AbstractSemanticTokenProvider { + private semanticTokenProviders: ContextMapperSemanticTokenProvider[] = [ + new AggregateSemanticTokenProvider(), + new BoundedContextSemanticTokenProvider(), + new SculptorModuleSemanticTokenProvider(), + new ContextMapSemanticTokenProvider(), + new RelationshipSemanticTokenProvider(), + new DomainSemanticTokenProvider(), + new FeatureSemanticTokenProvider(), + new RequirementsSemanticTokenProvider(), + new StoryValuationSemanticTokenProvider(), + new AbstractStakeholderSemanticTokenProvider(), + new ActionSemanticTokenProvider(), + new ConsequenceSemanticTokenProvider(), + new StakeholderSemanticTokenProvider(), + new ValueClusterSemanticTokenProvider(), + new ValueElicitationSemanticTokenProvider(), + new ValueEpicSemanticTokenProvider(), + new ValueNarrativeSemanticTokenProvider(), + new ValueRegisterSemanticTokenProvider(), + new ValueSemanticTokenProvider(), + new ValueWeightingSemanticTokenProvider() + ] + protected override highlightElement (node: AstNode, acceptor: SemanticTokenAcceptor) { - if (isContextMap(node)) { + if (isContextMappingModel(node)) { + const modelNode = node as ContextMappingModel + + if (modelNode.$cstNode) { + this.highlightComments(/\/\*[\s\S]*?\*\//g, modelNode, acceptor) + this.highlightComments(/\/\/[^\n\r]*/g, modelNode, acceptor) + } + } else { + for (const provider of this.semanticTokenProviders) { + if (provider.supports(node)) { + provider.highlight(node, acceptor) + return + } + } + + console.error('Uncaught node type', node.$type) + } + } + + private highlightComments (regex: RegExp, node: ContextMappingModel, acceptor: SemanticTokenAcceptor) { + const text = node.$document!!.textDocument.getText() + for (const match of text.matchAll(regex)) { + if (match == null || match.index == null) { + continue + } + const position = node.$document!!.textDocument.positionAt(match.index) acceptor({ - node, - type: SemanticTokenTypes.keyword, - keyword: 'ContextMap' + type: SemanticTokenTypes.comment, + line: position.line, + char: position.character, + length: match[0].length }) } } diff --git a/src/language/semantictokens/ContextMapperSemanticTokenProvider.ts b/src/language/semantictokens/ContextMapperSemanticTokenProvider.ts new file mode 100644 index 0000000..a3fe6ea --- /dev/null +++ b/src/language/semantictokens/ContextMapperSemanticTokenProvider.ts @@ -0,0 +1,7 @@ +import { AstNode } from 'langium' +import { SemanticTokenAcceptor } from 'langium/lsp' + +export interface ContextMapperSemanticTokenProvider { + supports(node: AstNode): node is T; + highlight(node: T, acceptor: SemanticTokenAcceptor): void +} diff --git a/src/language/semantictokens/HighlightingHelper.ts b/src/language/semantictokens/HighlightingHelper.ts new file mode 100644 index 0000000..516eca3 --- /dev/null +++ b/src/language/semantictokens/HighlightingHelper.ts @@ -0,0 +1,61 @@ +import { AstNode } from 'langium' +import { SemanticTokenAcceptor } from 'langium/lsp' +import { SemanticTokenModifiers, SemanticTokenTypes } from 'vscode-languageserver-types' + +export function highlightString (node: AstNode, acceptor: SemanticTokenAcceptor, property: string) { + acceptor({ + node, + type: SemanticTokenTypes.string, + property + }) +} + +export function highlightKeyword (node: AstNode, acceptor: SemanticTokenAcceptor, keyword: string) { + acceptor({ + node, + type: SemanticTokenTypes.keyword, + keyword + }) +} + +export function highlightProperty (node: AstNode, acceptor: SemanticTokenAcceptor, property: string) { + acceptor({ + node, + type: SemanticTokenTypes.property, + property + }) +} + +export function highlightType (node: AstNode, acceptor: SemanticTokenAcceptor, property: string, modifiers: string[] = []) { + acceptor({ + node, + type: SemanticTokenTypes.type, + property, + modifier: modifiers + }) +} + +export function highlightTypeDeclaration (node: AstNode, acceptor: SemanticTokenAcceptor, keyword: string, hasName: Boolean = true) { + highlightKeyword(node, acceptor, keyword) + if (hasName) { + highlightType(node, acceptor, 'name', [SemanticTokenModifiers.declaration]) + } +} + +export function highlightMemberAttribute (node: AstNode, acceptor: SemanticTokenAcceptor, keywords: string[], property: string, type: SemanticTokenTypes = SemanticTokenTypes.property) { + keywords.forEach(keyword => highlightKeyword(node, acceptor, keyword)) + acceptor({ + node, + type, + property + }) +} + +export function highlightAttribute (node: AstNode, acceptor: SemanticTokenAcceptor, keywords: string[], property: string, type: SemanticTokenTypes = SemanticTokenTypes.type) { + keywords.forEach(keyword => highlightKeyword(node, acceptor, keyword)) + acceptor({ + node, + type, + property + }) +} diff --git a/src/language/semantictokens/boundedContext/AggregateSemanticTokenProvider.ts b/src/language/semantictokens/boundedContext/AggregateSemanticTokenProvider.ts new file mode 100644 index 0000000..a3007d7 --- /dev/null +++ b/src/language/semantictokens/boundedContext/AggregateSemanticTokenProvider.ts @@ -0,0 +1,82 @@ +import { Aggregate, isAggregate, isUseCase, isUserRequirement, isUserStory } from '../../generated/ast.js' +import { SemanticTokenAcceptor } from 'langium/lsp' +import { SemanticTokenTypes } from 'vscode-languageserver-types' +import { highlightMemberAttribute, highlightString, highlightTypeDeclaration } from '../HighlightingHelper.js' +import { ContextMapperSemanticTokenProvider } from '../ContextMapperSemanticTokenProvider.js' +import { AstNode } from 'langium' + +export class AggregateSemanticTokenProvider implements ContextMapperSemanticTokenProvider { + supports (node: AstNode): node is Aggregate { + return isAggregate(node) + } + + highlight (node: Aggregate, acceptor: SemanticTokenAcceptor) { + if (node.doc) { + highlightString(node, acceptor, 'doc') + } + + highlightTypeDeclaration(node, acceptor, 'Aggregate') + + if (node.responsibilities.length > 0) { + highlightMemberAttribute(node, acceptor, ['responsibilities'], 'responsibilities', SemanticTokenTypes.string) + } + + if (node.userRequirements.length > 0) { + const keywords = [] + if (isUseCase(node.userRequirements[0])) { + keywords.push('useCases') + } else if (isUserRequirement(node.userRequirements[0])) { + keywords.push('userRequirements') + keywords.push('features') + } else if (isUserStory(node.userRequirements[0])) { + keywords.push('userStories') + } + + highlightMemberAttribute(node, acceptor, keywords, 'userRequirements', SemanticTokenTypes.type) + } + + if (node.owner) { + highlightMemberAttribute(node, acceptor, ['owner'], 'owner', SemanticTokenTypes.type) + } + + if (node.knowledgeLevel) { + highlightMemberAttribute(node, acceptor, ['knowledgeLevel'], 'knowledgeLevel') + } + + if (node.likelihoodForChange) { + highlightMemberAttribute(node, acceptor, ['likelihoodForChange', 'structuralVolatility'], 'likelihoodForChange') + } + + if (node.contentVolatility) { + highlightMemberAttribute(node, acceptor, ['contentVolatility'], 'contentVolatility') + } + + if (node.availabilityCriticality) { + highlightMemberAttribute(node, acceptor, ['availabilityCriticality'], 'availabilityCriticality') + } + + if (node.consistencyCriticality) { + highlightMemberAttribute(node, acceptor, ['consistencyCriticality'], 'consistencyCriticality') + } + + if (node.storageSimilarity) { + highlightMemberAttribute(node, acceptor, ['storageSimilarity'], 'storageSimilarity') + } + + if (node.storageSimilarity) { + highlightMemberAttribute(node, acceptor, ['storageSimilarity'], 'storageSimilarity') + } + + if (node.securityCriticality) { + highlightMemberAttribute(node, acceptor, ['securityCriticality'], 'securityCriticality') + } + + if (node.securityZone) { + highlightMemberAttribute(node, acceptor, ['securityZone'], 'securityZone', SemanticTokenTypes.string) + } + + if (node.securityAccessGroup) { + highlightMemberAttribute(node, acceptor, ['securityAccessGroup'], 'securityAccessGroup', SemanticTokenTypes.string) + } + } +} diff --git a/src/language/semantictokens/boundedContext/BoundedContextSemanticTokenProvider.ts b/src/language/semantictokens/boundedContext/BoundedContextSemanticTokenProvider.ts new file mode 100644 index 0000000..7320927 --- /dev/null +++ b/src/language/semantictokens/boundedContext/BoundedContextSemanticTokenProvider.ts @@ -0,0 +1,60 @@ +import { SemanticTokenAcceptor } from 'langium/lsp' +import { BoundedContext, isBoundedContext } from '../../generated/ast.js' +import { SemanticTokenTypes } from 'vscode-languageserver-types' +import { + highlightAttribute, + highlightMemberAttribute, + highlightTypeDeclaration +} from '../HighlightingHelper.js' +import { ContextMapperSemanticTokenProvider } from '../ContextMapperSemanticTokenProvider.js' +import { AstNode } from 'langium' + +export class BoundedContextSemanticTokenProvider implements ContextMapperSemanticTokenProvider { + supports (node: AstNode): node is BoundedContext { + return isBoundedContext(node) + } + + highlight (node: BoundedContext, acceptor: SemanticTokenAcceptor) { + highlightTypeDeclaration(node, acceptor, 'BoundedContext') + + if (node.implementedDomainParts.length > 0) { + highlightAttribute(node, acceptor, ['implements'], 'implementedDomainParts') + } + + if (node.realizedBoundedContexts.length > 0) { + highlightAttribute(node, acceptor, ['realizes'], 'realizedBoundedContexts') + } + + if (node.refinedBoundedContext) { + highlightAttribute(node, acceptor, ['refines'], 'refinedBoundedContext') + } + + if (node.domainVisionStatement) { + highlightMemberAttribute(node, acceptor, ['domainVisionStatement'], 'domainVisionStatement', SemanticTokenTypes.string) + } + + if (node.type) { + highlightMemberAttribute(node, acceptor, ['type'], 'type') + } + + if (node.responsibilities) { + highlightMemberAttribute(node, acceptor, ['responsibilities'], 'responsibilities', SemanticTokenTypes.string) + } + + if (node.implementationTechnology) { + highlightMemberAttribute(node, acceptor, ['implementationTechnology'], 'implementationTechnology', SemanticTokenTypes.string) + } + + if (node.knowledgeLevel) { + highlightMemberAttribute(node, acceptor, ['knowledgeLevel'], 'knowledgeLevel') + } + + if (node.businessModel) { + highlightMemberAttribute(node, acceptor, ['businessModel'], 'businessModel', SemanticTokenTypes.string) + } + + if (node.evolution) { + highlightMemberAttribute(node, acceptor, ['evolution'], 'evolution') + } + } +} diff --git a/src/language/semantictokens/boundedContext/SculptorModuleSemanticTokenProvider.ts b/src/language/semantictokens/boundedContext/SculptorModuleSemanticTokenProvider.ts new file mode 100644 index 0000000..8cafdd4 --- /dev/null +++ b/src/language/semantictokens/boundedContext/SculptorModuleSemanticTokenProvider.ts @@ -0,0 +1,36 @@ +import { ContextMapperSemanticTokenProvider } from '../ContextMapperSemanticTokenProvider.js' +import { isSculptorModule, SculptorModule } from '../../generated/ast.js' +import { SemanticTokenAcceptor } from 'langium/lsp' +import { + highlightKeyword, + highlightMemberAttribute, + highlightString, + highlightTypeDeclaration +} from '../HighlightingHelper.js' +import { SemanticTokenTypes } from 'vscode-languageserver-types' +import { AstNode } from 'langium' + +export class SculptorModuleSemanticTokenProvider implements ContextMapperSemanticTokenProvider { + supports (node: AstNode): node is SculptorModule { + return isSculptorModule(node) + } + + highlight (node: SculptorModule, acceptor: SemanticTokenAcceptor) { + if (node.doc) { + highlightString(node, acceptor, 'doc') + } + highlightTypeDeclaration(node, acceptor, 'Module') + + if (node.external) { + highlightKeyword(node, acceptor, 'external') + } + + if (node.basePackage) { + highlightMemberAttribute(node, acceptor, ['basePackage'], 'basePackage', SemanticTokenTypes.namespace) + } + + if (node.hint) { + highlightMemberAttribute(node, acceptor, ['hint'], 'hint', SemanticTokenTypes.string) + } + } +} diff --git a/src/language/semantictokens/contextMap/ContextMapSemanticTokenProvider.ts b/src/language/semantictokens/contextMap/ContextMapSemanticTokenProvider.ts new file mode 100644 index 0000000..5a82aba --- /dev/null +++ b/src/language/semantictokens/contextMap/ContextMapSemanticTokenProvider.ts @@ -0,0 +1,30 @@ +import { SemanticTokenAcceptor } from 'langium/lsp' +import { + ContextMap, + isContextMap +} from '../../generated/ast.js' +import { highlightAttribute, highlightMemberAttribute, highlightTypeDeclaration } from '../HighlightingHelper.js' +import { ContextMapperSemanticTokenProvider } from '../ContextMapperSemanticTokenProvider.js' +import { AstNode } from 'langium' + +export class ContextMapSemanticTokenProvider implements ContextMapperSemanticTokenProvider { + supports (node: AstNode): node is ContextMap { + return isContextMap(node) + } + + public highlight (node: ContextMap, acceptor: SemanticTokenAcceptor) { + highlightTypeDeclaration(node, acceptor, 'ContextMap', node.name != null) + + if (node.type) { + highlightMemberAttribute(node, acceptor, ['type'], 'type') + } + + if (node.state) { + highlightMemberAttribute(node, acceptor, ['state'], 'state') + } + + if (node.boundedContexts.length > 0) { + highlightAttribute(node, acceptor, ['contains'], 'boundedContexts') + } + } +} diff --git a/src/language/semantictokens/contextMap/RelationshipSemanticTokenProvider.ts b/src/language/semantictokens/contextMap/RelationshipSemanticTokenProvider.ts new file mode 100644 index 0000000..f721174 --- /dev/null +++ b/src/language/semantictokens/contextMap/RelationshipSemanticTokenProvider.ts @@ -0,0 +1,286 @@ +import { + CustomerSupplierRelationship, + isCustomerSupplierRelationship, + isPartnership, isRelationship, isSharedKernel, + isSymmetricRelationship, + isUpstreamDownstreamRelationship, Partnership, + Relationship, SharedKernel, + SymmetricRelationship, UpstreamDownstreamRelationship +} from '../../generated/ast.js' +import { SemanticTokenAcceptor } from 'langium/lsp' +import { SemanticTokenModifiers, SemanticTokenTypes } from 'vscode-languageserver-types' +import { ContextMapperSemanticTokenProvider } from '../ContextMapperSemanticTokenProvider.js' + +export class RelationshipSemanticTokenProvider implements ContextMapperSemanticTokenProvider { + supports (node: Relationship): node is Relationship { + return isRelationship(node) + } + + highlight (node: Relationship, acceptor: SemanticTokenAcceptor) { + if (isSymmetricRelationship(node)) { + this.highlightSymmetricRelationship(node, acceptor) + } else if (isUpstreamDownstreamRelationship(node)) { + this.highlightUpstreamDownstreamRelationship(node, acceptor) + } + } + + private highlightSymmetricRelationship (node: SymmetricRelationship, acceptor: SemanticTokenAcceptor) { + acceptor({ + node, + type: SemanticTokenTypes.type, + property: 'participant1' + }) + acceptor({ + node, + type: SemanticTokenTypes.type, + property: 'participant2' + }) + + if (node.name) { + acceptor({ + node, + type: SemanticTokenTypes.type, + modifier: SemanticTokenModifiers.declaration, + property: 'name' + }) + } + + if (node.implementationTechnology) { + acceptor({ + node, + type: SemanticTokenTypes.keyword, + keyword: 'implementationTechnology' + }) + acceptor({ + node, + type: SemanticTokenTypes.property, + property: 'implementationTechnology' + }) + } + + if (isPartnership(node)) { + this.highlightPartnership(node, acceptor) + } else if (isSharedKernel(node)) { + this.highlightSharedKernel(node, acceptor) + } + } + + private highlightPartnership (node: Partnership, acceptor: SemanticTokenAcceptor) { + acceptor({ + node, + type: SemanticTokenTypes.keyword, + keyword: 'P' + }) + acceptor({ + node, + type: SemanticTokenTypes.keyword, + keyword: '<->' + }) + acceptor({ + node, + type: SemanticTokenTypes.keyword, + keyword: 'Partnership' + }) + } + + private highlightSharedKernel (node: SharedKernel, acceptor: SemanticTokenAcceptor) { + acceptor({ + node, + type: SemanticTokenTypes.keyword, + keyword: 'SK' + }) + acceptor({ + node, + type: SemanticTokenTypes.keyword, + keyword: '<->' + }) + acceptor({ + node, + type: SemanticTokenTypes.keyword, + keyword: 'Shared-Kernel' + }) + } + + private highlightUpstreamDownstreamRelationship (node: UpstreamDownstreamRelationship, acceptor: SemanticTokenAcceptor) { + if (isCustomerSupplierRelationship(node)) { + this.highlightCustomerSupplierRelationship(node, acceptor) + return + } + + if (node.name) { + acceptor({ + node, + type: SemanticTokenTypes.type, + modifier: SemanticTokenModifiers.declaration, + property: 'name' + }) + } + + acceptor({ + node, + type: SemanticTokenTypes.type, + property: 'downstream' + }) + acceptor({ + node, + type: SemanticTokenTypes.keyword, + keyword: 'D' + }) + + acceptor({ + node, + type: SemanticTokenTypes.type, + property: 'upstream' + }) + acceptor({ + node, + type: SemanticTokenTypes.keyword, + keyword: 'U' + }) + + acceptor({ + node, + type: SemanticTokenTypes.keyword, + keyword: 'Upstream-Downstream' + }) + acceptor({ + node, + type: SemanticTokenTypes.keyword, + keyword: 'Downstream-Upstream' + }) + acceptor({ + node, + type: SemanticTokenTypes.keyword, + keyword: '<-' + }) + acceptor({ + node, + type: SemanticTokenTypes.keyword, + keyword: '->' + }) + + if (node.downstreamRoles.length > 0) { + acceptor({ + node, + type: SemanticTokenTypes.property, + property: 'downstreamRoles' + }) + } + + if (node.upstreamRoles.length > 0) { + acceptor({ + node, + type: SemanticTokenTypes.property, + property: 'upstreamRoles' + }) + } + + if (node.implementationTechnology) { + acceptor({ + node, + type: SemanticTokenTypes.keyword, + keyword: 'implementationTechnology' + }) + acceptor({ + node, + type: SemanticTokenTypes.property, + property: 'implementationTechnology' + }) + } + + if (node.upstreamExposedAggregates.length > 0) { + acceptor({ + node, + type: SemanticTokenTypes.keyword, + keyword: 'exposedAggregates' + }) + acceptor({ + node, + type: SemanticTokenTypes.type, + property: 'upstreamExposedAggregates' + }) + } + + if (node.downstreamGovernanceRights) { + acceptor({ + node, + type: SemanticTokenTypes.keyword, + keyword: 'downstreamRights' + }) + acceptor({ + node, + type: SemanticTokenTypes.property, + property: 'downstreamGovernanceRights' + }) + } + } + + highlightCustomerSupplierRelationship (node: CustomerSupplierRelationship, acceptor: SemanticTokenAcceptor) { + acceptor({ + node, + type: SemanticTokenTypes.type, + property: 'upstream' + }) + acceptor({ + node, + type: SemanticTokenTypes.keyword, + keyword: 'U' + }) + if (node.upstreamRoles.length > 0) { + acceptor({ + node, + type: SemanticTokenTypes.property, + property: 'upstreamRoles' + }) + } + acceptor({ + node, + type: SemanticTokenTypes.keyword, + keyword: '->' + }) + acceptor({ + node, + type: SemanticTokenTypes.keyword, + keyword: 'Supplier-Customer' + }) + + acceptor({ + node, + type: SemanticTokenTypes.type, + property: 'downstream' + }) + acceptor({ + node, + type: SemanticTokenTypes.keyword, + keyword: 'D' + }) + if (node.downstreamRoles.length > 0) { + acceptor({ + node, + type: SemanticTokenTypes.property, + property: 'downstreamRoles' + }) + } + acceptor({ + node, + type: SemanticTokenTypes.keyword, + keyword: '<-' + }) + acceptor({ + node, + type: SemanticTokenTypes.keyword, + keyword: 'Customer-Supplier' + }) + + acceptor({ + node, + type: SemanticTokenTypes.keyword, + keyword: 'C' + }) + acceptor({ + node, + type: SemanticTokenTypes.keyword, + keyword: 'S' + }) + } +} diff --git a/src/language/semantictokens/domain/DomainSemanticTokenProvider.ts b/src/language/semantictokens/domain/DomainSemanticTokenProvider.ts new file mode 100644 index 0000000..c2b0f4e --- /dev/null +++ b/src/language/semantictokens/domain/DomainSemanticTokenProvider.ts @@ -0,0 +1,42 @@ +import { DomainPart, isDomain, isDomainPart, isSubdomain, Subdomain } from '../../generated/ast.js' +import { SemanticTokenAcceptor } from 'langium/lsp' +import { SemanticTokenTypes } from 'vscode-languageserver-types' +import { highlightAttribute, highlightMemberAttribute, highlightTypeDeclaration } from '../HighlightingHelper.js' +import { ContextMapperSemanticTokenProvider } from '../ContextMapperSemanticTokenProvider.js' +import { AstNode } from 'langium' + +export class DomainSemanticTokenProvider implements ContextMapperSemanticTokenProvider { + supports (node: AstNode): node is DomainPart { + return isDomainPart(node) + } + + highlight (node: DomainPart, acceptor: SemanticTokenAcceptor) { + let keyword = null + if (isDomain(node)) { + keyword = 'Domain' + } else if (isSubdomain(node)) { + keyword = 'Subdomain' + } + if (keyword) { + highlightTypeDeclaration(node, acceptor, keyword, node.name != null) + } + + if (node.domainVisionStatement) { + highlightMemberAttribute(node, acceptor, ['domainVisionStatement'], 'domainVisionStatement', SemanticTokenTypes.string) + } + + if (isSubdomain(node)) { + this.highlightSubdomain(node, acceptor) + } + } + + private highlightSubdomain (node: Subdomain, acceptor: SemanticTokenAcceptor) { + if (node.supportedFeatures.length > 0) { + highlightAttribute(node, acceptor, ['supports'], 'supportedFeatures') + } + + if (node.type) { + highlightMemberAttribute(node, acceptor, ['type'], 'type') + } + } +} diff --git a/src/language/semantictokens/requirements/FeatureSemanticTokenProvider.ts b/src/language/semantictokens/requirements/FeatureSemanticTokenProvider.ts new file mode 100644 index 0000000..abd6101 --- /dev/null +++ b/src/language/semantictokens/requirements/FeatureSemanticTokenProvider.ts @@ -0,0 +1,53 @@ +import { ContextMapperSemanticTokenProvider } from '../ContextMapperSemanticTokenProvider.js' +import { Feature, isFeature, isNormalFeature, isStoryFeature, NormalFeature, StoryFeature } from '../../generated/ast.js' +import { AstNode } from 'langium' +import { SemanticTokenAcceptor } from 'langium/lsp' +import { highlightKeyword, highlightString } from '../HighlightingHelper.js' + +export class FeatureSemanticTokenProvider implements ContextMapperSemanticTokenProvider { + supports (node: AstNode): node is Feature { + return isFeature(node) + } + + public highlight (node: Feature, acceptor: SemanticTokenAcceptor) { + if (isNormalFeature(node)) { + this.highlightNormalFeature(node, acceptor) + } else if (isStoryFeature(node)) { + this.highlightStoryFeature(node, acceptor) + } + } + + private highlightNormalFeature (node: NormalFeature, acceptor: SemanticTokenAcceptor) { + highlightString(node, acceptor, 'verb') + + if (node.entityArticle) { + highlightKeyword(node, acceptor, node.entityArticle) + } + + highlightString(node, acceptor, 'entity') + + if (node.entityAttributesPreposition) { + highlightKeyword(node, acceptor, node.entityAttributesPreposition) + } + + if (node.entityAttributes.length > 0) { + highlightString(node, acceptor, 'entityAttributes') + } + + if (node.containerEntityPreposition) { + highlightKeyword(node, acceptor, node.containerEntityPreposition) + } + + if (node.containerEntityArticle) { + highlightKeyword(node, acceptor, node.containerEntityArticle) + } + + if (node.containerEntity) { + highlightString(node, acceptor, 'containerEntity') + } + } + + private highlightStoryFeature (node: StoryFeature, acceptor: SemanticTokenAcceptor) { + highlightKeyword(node, acceptor, 'I want to') + } +} diff --git a/src/language/semantictokens/requirements/RequirementsSemanticTokenProvider.ts b/src/language/semantictokens/requirements/RequirementsSemanticTokenProvider.ts new file mode 100644 index 0000000..423b039 --- /dev/null +++ b/src/language/semantictokens/requirements/RequirementsSemanticTokenProvider.ts @@ -0,0 +1,75 @@ +import { + isUseCase, isUserRequirement, + isUserStory, + UseCase, + UserRequirement, + UserStory +} from '../../generated/ast.js' +import { SemanticTokenAcceptor } from 'langium/lsp' +import { + highlightAttribute, + highlightKeyword, + highlightMemberAttribute, + highlightTypeDeclaration +} from '../HighlightingHelper.js' +import { SemanticTokenTypes } from 'vscode-languageserver-types' +import { ContextMapperSemanticTokenProvider } from '../ContextMapperSemanticTokenProvider.js' +import { AstNode } from 'langium' + +export class RequirementsSemanticTokenProvider implements ContextMapperSemanticTokenProvider { + supports (node: AstNode): node is UserRequirement { + return isUserRequirement(node) + } + + highlight (node: UserRequirement, acceptor: SemanticTokenAcceptor) { + if (isUseCase(node)) { + this.highlightUseCase(node, acceptor) + } else if (isUserStory(node)) { + this.highlightUserStory(node, acceptor) + } + } + + private highlightUseCase (node: UseCase, acceptor: SemanticTokenAcceptor) { + highlightTypeDeclaration(node, acceptor, 'UseCase') + + if (node.role) { + highlightMemberAttribute(node, acceptor, ['actor'], 'role') + } + + if (node.secondaryActors.length > 0) { + highlightMemberAttribute(node, acceptor, ['secondaryActors'], 'secondaryActors', SemanticTokenTypes.string) + } + + if (node.features.length > 0) { + highlightKeyword(node, acceptor, 'interactions') + } + + if (node.benefit) { + highlightMemberAttribute(node, acceptor, ['benefit'], 'benefit', SemanticTokenTypes.string) + } + + if (node.scope) { + highlightMemberAttribute(node, acceptor, ['scope'], 'scope', SemanticTokenTypes.string) + } + + if (node.level) { + highlightMemberAttribute(node, acceptor, ['level'], 'level', SemanticTokenTypes.string) + } + } + + private highlightUserStory (node: UserStory, acceptor: SemanticTokenAcceptor) { + highlightTypeDeclaration(node, acceptor, 'UserStory') + + if (node.splittingStory) { + highlightAttribute(node, acceptor, ['split by'], 'splittingStory') + } + + if (node.role) { + highlightMemberAttribute(node, acceptor, ['As a', 'As an'], 'role', SemanticTokenTypes.string) + } + + if (node.benefit) { + highlightMemberAttribute(node, acceptor, ['so that'], 'benefit', SemanticTokenTypes.string) + } + } +} diff --git a/src/language/semantictokens/requirements/StoryValuationSemanticTokenProvider.ts b/src/language/semantictokens/requirements/StoryValuationSemanticTokenProvider.ts new file mode 100644 index 0000000..8c40247 --- /dev/null +++ b/src/language/semantictokens/requirements/StoryValuationSemanticTokenProvider.ts @@ -0,0 +1,28 @@ +import { ContextMapperSemanticTokenProvider } from '../ContextMapperSemanticTokenProvider.js' +import { isStoryValuation, StoryValuation } from '../../generated/ast.js' +import { AstNode } from 'langium' +import { SemanticTokenAcceptor } from 'langium/lsp' +import { highlightKeyword, highlightString } from '../HighlightingHelper.js' + +export class StoryValuationSemanticTokenProvider implements ContextMapperSemanticTokenProvider { + supports (node: AstNode): node is StoryValuation { + return isStoryValuation(node) + } + + highlight (node: StoryValuation, acceptor: SemanticTokenAcceptor) { + highlightKeyword(node, acceptor, 'and that') + highlightString(node, acceptor, 'promotedValues') + + highlightKeyword(node, acceptor, 'is') + highlightKeyword(node, acceptor, 'are') + highlightKeyword(node, acceptor, 'promoted') + + highlightKeyword(node, acceptor, 'accepting that') + highlightString(node, acceptor, 'harmedValues') + + highlightKeyword(node, acceptor, 'is') + highlightKeyword(node, acceptor, 'are') + highlightKeyword(node, acceptor, 'reduced') + highlightKeyword(node, acceptor, 'harmed') + } +} diff --git a/src/language/semantictokens/vdad/AbstractStakeholderSemanticTokenProvider.ts b/src/language/semantictokens/vdad/AbstractStakeholderSemanticTokenProvider.ts new file mode 100644 index 0000000..8ac05f2 --- /dev/null +++ b/src/language/semantictokens/vdad/AbstractStakeholderSemanticTokenProvider.ts @@ -0,0 +1,46 @@ +import { ContextMapperSemanticTokenProvider } from '../ContextMapperSemanticTokenProvider.js' +import { + AbstractStakeholder, + isAbstractStakeholder, + isStakeholder, + isStakeholderGroup, + Stakeholder, StakeholderGroup +} from '../../generated/ast.js' +import { AstNode } from 'langium' +import { SemanticTokenAcceptor } from 'langium/lsp' +import { highlightMemberAttribute, highlightTypeDeclaration } from '../HighlightingHelper.js' +import { SemanticTokenTypes } from 'vscode-languageserver-types' + +export class AbstractStakeholderSemanticTokenProvider implements ContextMapperSemanticTokenProvider { + supports (node: AstNode): node is AbstractStakeholder { + return isAbstractStakeholder(node) + } + + highlight (node: AbstractStakeholder, acceptor: SemanticTokenAcceptor) { + if (isStakeholder(node)) { + this.highlightStakeholder(node, acceptor) + } else if (isStakeholderGroup(node)) { + this.highlightStakeholderGroup(node, acceptor) + } + } + + private highlightStakeholder (node: Stakeholder, acceptor: SemanticTokenAcceptor) { + highlightTypeDeclaration(node, acceptor, 'Stakeholder') + + if (node.influence) { + highlightMemberAttribute(node, acceptor, ['influence'], 'influence') + } + + if (node.interest) { + highlightMemberAttribute(node, acceptor, ['interest'], 'interest') + } + + if (node.description) { + highlightMemberAttribute(node, acceptor, ['description'], 'description', SemanticTokenTypes.string) + } + } + + private highlightStakeholderGroup (node: StakeholderGroup, acceptor: SemanticTokenAcceptor) { + highlightTypeDeclaration(node, acceptor, 'StakeholderGroup') + } +} diff --git a/src/language/semantictokens/vdad/ActionSemanticTokenProvider.ts b/src/language/semantictokens/vdad/ActionSemanticTokenProvider.ts new file mode 100644 index 0000000..c8b9c03 --- /dev/null +++ b/src/language/semantictokens/vdad/ActionSemanticTokenProvider.ts @@ -0,0 +1,23 @@ +import { ContextMapperSemanticTokenProvider } from '../ContextMapperSemanticTokenProvider.js' +import { Action, isAction } from '../../generated/ast.js' +import { AstNode } from 'langium' +import { SemanticTokenAcceptor } from 'langium/lsp' +import { highlightKeyword, highlightString } from '../HighlightingHelper.js' + +export class ActionSemanticTokenProvider implements ContextMapperSemanticTokenProvider { + supports (node: AstNode): node is Action { + return isAction(node) + } + + highlight (node: Action, acceptor: SemanticTokenAcceptor) { + highlightKeyword(node, acceptor, 'action') + highlightString(node, acceptor, 'action') + + const typeKeywords = ['ACT', 'MONITOR'] + if (typeKeywords.includes(node.type)) { + highlightKeyword(node, acceptor, node.type) + } else { + highlightString(node, acceptor, 'type') + } + } +} diff --git a/src/language/semantictokens/vdad/ConsequenceSemanticTokenProvider.ts b/src/language/semantictokens/vdad/ConsequenceSemanticTokenProvider.ts new file mode 100644 index 0000000..cfa1785 --- /dev/null +++ b/src/language/semantictokens/vdad/ConsequenceSemanticTokenProvider.ts @@ -0,0 +1,16 @@ +import { ContextMapperSemanticTokenProvider } from '../ContextMapperSemanticTokenProvider.js' +import { Consequence, isConsequence } from '../../generated/ast.js' +import { AstNode } from 'langium' +import { SemanticTokenAcceptor } from 'langium/lsp' +import { highlightKeyword, highlightString } from '../HighlightingHelper.js' + +export class ConsequenceSemanticTokenProvider implements ContextMapperSemanticTokenProvider { + supports (node: AstNode): node is Consequence { + return isConsequence(node) + } + + highlight (node: Consequence, acceptor: SemanticTokenAcceptor) { + highlightKeyword(node, acceptor, node.type) + highlightString(node, acceptor, 'consequence') + } +} diff --git a/src/language/semantictokens/vdad/StakeholderSemanticTokenProvider.ts b/src/language/semantictokens/vdad/StakeholderSemanticTokenProvider.ts new file mode 100644 index 0000000..e102d19 --- /dev/null +++ b/src/language/semantictokens/vdad/StakeholderSemanticTokenProvider.ts @@ -0,0 +1,18 @@ +import { ContextMapperSemanticTokenProvider } from '../ContextMapperSemanticTokenProvider.js' +import { isStakeholders, Stakeholders } from '../../generated/ast.js' +import { SemanticTokenAcceptor } from 'langium/lsp' +import { highlightAttribute, highlightKeyword } from '../HighlightingHelper.js' + +export class StakeholderSemanticTokenProvider implements ContextMapperSemanticTokenProvider { + supports (node: any): node is Stakeholders { + return isStakeholders(node) + } + + highlight (node: Stakeholders, acceptor: SemanticTokenAcceptor) { + highlightKeyword(node, acceptor, 'Stakeholders') + + if (node.contexts.length > 0) { + highlightAttribute(node, acceptor, ['of'], 'contexts') + } + } +} diff --git a/src/language/semantictokens/vdad/ValueClusterSemanticTokenProvider.ts b/src/language/semantictokens/vdad/ValueClusterSemanticTokenProvider.ts new file mode 100644 index 0000000..9f87ed9 --- /dev/null +++ b/src/language/semantictokens/vdad/ValueClusterSemanticTokenProvider.ts @@ -0,0 +1,35 @@ +import { ContextMapperSemanticTokenProvider } from '../ContextMapperSemanticTokenProvider.js' +import { isValueCluster, ValueCluster } from '../../generated/ast.js' +import { AstNode } from 'langium' +import { SemanticTokenAcceptor } from 'langium/lsp' +import { highlightMemberAttribute, highlightTypeDeclaration } from '../HighlightingHelper.js' +import { SemanticTokenTypes } from 'vscode-languageserver-types' + +export class ValueClusterSemanticTokenProvider implements ContextMapperSemanticTokenProvider { + supports (node: AstNode): node is ValueCluster { + return isValueCluster(node) + } + + highlight (node: ValueCluster, acceptor: SemanticTokenAcceptor) { + highlightTypeDeclaration(node, acceptor, 'ValueCluster') + + if (node.coreValue) { + highlightMemberAttribute(node, acceptor, ['core'], 'coreValue') + } + if (node.coreValue7000) { + highlightMemberAttribute(node, acceptor, ['core'], 'coreValue7000') + } + + if (node.demonstrators.length > 0) { + highlightMemberAttribute(node, acceptor, ['demonstrator'], 'demonstrators', SemanticTokenTypes.string) + } + + if (node.relatedValues.length > 0) { + highlightMemberAttribute(node, acceptor, ['relatedValue'], 'relatedValues', SemanticTokenTypes.string) + } + + if (node.opposingValues.length > 0) { + highlightMemberAttribute(node, acceptor, ['opposingValue'], 'opposingValues', SemanticTokenTypes.string) + } + } +} diff --git a/src/language/semantictokens/vdad/ValueElicitationSemanticTokenProvider.ts b/src/language/semantictokens/vdad/ValueElicitationSemanticTokenProvider.ts new file mode 100644 index 0000000..fa0ddaa --- /dev/null +++ b/src/language/semantictokens/vdad/ValueElicitationSemanticTokenProvider.ts @@ -0,0 +1,29 @@ +import { ContextMapperSemanticTokenProvider } from '../ContextMapperSemanticTokenProvider.js' +import { isValueElicitation, ValueElicitation } from '../../generated/ast.js' +import { AstNode } from 'langium' +import { SemanticTokenAcceptor } from 'langium/lsp' +import { highlightKeyword, highlightMemberAttribute, highlightType } from '../HighlightingHelper.js' + +export class ValueElicitationSemanticTokenProvider implements ContextMapperSemanticTokenProvider { + supports (node: AstNode): node is ValueElicitation { + return isValueElicitation(node) + } + + highlight (node: ValueElicitation, acceptor: SemanticTokenAcceptor) { + highlightKeyword(node, acceptor, 'Stakeholder') + highlightKeyword(node, acceptor, 'Stakeholders') + highlightType(node, acceptor, 'stakeholder') + + if (node.priority) { + highlightMemberAttribute(node, acceptor, ['priority'], 'priority') + } + + if (node.impact) { + highlightMemberAttribute(node, acceptor, ['impact'], 'impact') + } + + if (node.consequences.length > 0) { + highlightKeyword(node, acceptor, 'consequences') + } + } +} diff --git a/src/language/semantictokens/vdad/ValueEpicSemanticTokenProvider.ts b/src/language/semantictokens/vdad/ValueEpicSemanticTokenProvider.ts new file mode 100644 index 0000000..18c23b3 --- /dev/null +++ b/src/language/semantictokens/vdad/ValueEpicSemanticTokenProvider.ts @@ -0,0 +1,27 @@ +import { ContextMapperSemanticTokenProvider } from '../ContextMapperSemanticTokenProvider.js' +import { isValueEpic, ValueEpic } from '../../generated/ast.js' +import { AstNode } from 'langium' +import { SemanticTokenAcceptor } from 'langium/lsp' +import { highlightKeyword, highlightString, highlightType, highlightTypeDeclaration } from '../HighlightingHelper.js' + +export class ValueEpicSemanticTokenProvider implements ContextMapperSemanticTokenProvider { + supports (node: AstNode): node is ValueEpic { + return isValueEpic(node) + } + + highlight (node: ValueEpic, acceptor: SemanticTokenAcceptor) { + highlightTypeDeclaration(node, acceptor, 'ValueEpic') + + highlightKeyword(node, acceptor, 'As a') + highlightType(node, acceptor, 'stakeholder') + highlightKeyword(node, acceptor, 'I value') + highlightString(node, acceptor, 'value') + highlightKeyword(node, acceptor, 'as demonstrated in') + + highlightKeyword(node, acceptor, 'realization of') + highlightString(node, acceptor, 'realizedValues') + + highlightKeyword(node, acceptor, 'reduction of') + highlightString(node, acceptor, 'reducedValues') + } +} diff --git a/src/language/semantictokens/vdad/ValueNarrativeSemanticTokenProvider.ts b/src/language/semantictokens/vdad/ValueNarrativeSemanticTokenProvider.ts new file mode 100644 index 0000000..570521a --- /dev/null +++ b/src/language/semantictokens/vdad/ValueNarrativeSemanticTokenProvider.ts @@ -0,0 +1,27 @@ +import { ContextMapperSemanticTokenProvider } from '../ContextMapperSemanticTokenProvider.js' +import { isValueNarrative, ValueNarrative } from '../../generated/ast.js' +import { AstNode } from 'langium' +import { SemanticTokenAcceptor } from 'langium/lsp' +import { highlightKeyword, highlightString, highlightTypeDeclaration } from '../HighlightingHelper.js' + +export class ValueNarrativeSemanticTokenProvider implements ContextMapperSemanticTokenProvider { + supports (node: AstNode): node is ValueNarrative { + return isValueNarrative(node) + } + + highlight (node: ValueNarrative, acceptor: SemanticTokenAcceptor) { + highlightTypeDeclaration(node, acceptor, 'ValueNarrative') + + highlightKeyword(node, acceptor, 'When the SOI executes') + highlightString(node, acceptor, 'feature') + + highlightKeyword(node, acceptor, 'stakeholders expect it to promote, protect or create') + highlightString(node, acceptor, 'promotedValues') + + highlightKeyword(node, acceptor, 'possibly degrading or prohibiting') + highlightString(node, acceptor, 'harmedValues') + + highlightKeyword(node, acceptor, 'with the following externally observable and/or internally auditable behavior:') + highlightString(node, acceptor, 'preAndPostConditions') + } +} diff --git a/src/language/semantictokens/vdad/ValueRegisterSemanticTokenProvider.ts b/src/language/semantictokens/vdad/ValueRegisterSemanticTokenProvider.ts new file mode 100644 index 0000000..2609f59 --- /dev/null +++ b/src/language/semantictokens/vdad/ValueRegisterSemanticTokenProvider.ts @@ -0,0 +1,19 @@ +import { ContextMapperSemanticTokenProvider } from '../ContextMapperSemanticTokenProvider.js' +import { isValueRegister, ValueRegister } from '../../generated/ast.js' +import { AstNode } from 'langium' +import { SemanticTokenAcceptor } from 'langium/lsp' +import { highlightAttribute, highlightKeyword } from '../HighlightingHelper.js' + +export class ValueRegisterSemanticTokenProvider implements ContextMapperSemanticTokenProvider { + supports (node: AstNode): node is ValueRegister { + return isValueRegister(node) + } + + highlight (node: ValueRegister, acceptor: SemanticTokenAcceptor) { + highlightKeyword(node, acceptor, 'ValueRegister') + + if (node.context) { + highlightAttribute(node, acceptor, ['of'], 'context') + } + } +} diff --git a/src/language/semantictokens/vdad/ValueSemanticTokenProvider.ts b/src/language/semantictokens/vdad/ValueSemanticTokenProvider.ts new file mode 100644 index 0000000..3c37f2f --- /dev/null +++ b/src/language/semantictokens/vdad/ValueSemanticTokenProvider.ts @@ -0,0 +1,39 @@ +import { + isValue, + Value +} from '../../generated/ast.js' +import { SemanticTokenAcceptor } from 'langium/lsp' +import { + highlightKeyword, + highlightMemberAttribute, + highlightTypeDeclaration +} from '../HighlightingHelper.js' +import { SemanticTokenTypes } from 'vscode-languageserver-types' +import { ContextMapperSemanticTokenProvider } from '../ContextMapperSemanticTokenProvider.js' +import { AstNode } from 'langium' + +export class ValueSemanticTokenProvider implements ContextMapperSemanticTokenProvider { + supports (node: AstNode): node is Value { + return isValue(node) + } + + highlight (node: Value, acceptor: SemanticTokenAcceptor) { + highlightTypeDeclaration(node, acceptor, 'Value') + + if (node.coreValue) { + highlightKeyword(node, acceptor, 'isCore') + } + + if (node.demonstrators.length > 0) { + highlightMemberAttribute(node, acceptor, ['demonstrator'], 'demonstrators', SemanticTokenTypes.string) + } + + if (node.relatedValues.length > 0) { + highlightMemberAttribute(node, acceptor, ['relatedValue'], 'relatedValues', SemanticTokenTypes.string) + } + + if (node.opposingValues.length > 0) { + highlightMemberAttribute(node, acceptor, ['opposingValue'], 'opposingValues', SemanticTokenTypes.string) + } + } +} diff --git a/src/language/semantictokens/vdad/ValueWeightingSemanticTokenProvider.ts b/src/language/semantictokens/vdad/ValueWeightingSemanticTokenProvider.ts new file mode 100644 index 0000000..c28941a --- /dev/null +++ b/src/language/semantictokens/vdad/ValueWeightingSemanticTokenProvider.ts @@ -0,0 +1,30 @@ +import { ContextMapperSemanticTokenProvider } from '../ContextMapperSemanticTokenProvider.js' +import { isValueWeighting, ValueWeighting } from '../../generated/ast.js' +import { AstNode } from 'langium' +import { SemanticTokenAcceptor } from 'langium/lsp' +import { highlightKeyword, highlightString, highlightType, highlightTypeDeclaration } from '../HighlightingHelper.js' + +export class ValueWeightingSemanticTokenProvider implements ContextMapperSemanticTokenProvider { + supports (node: AstNode): node is ValueWeighting { + return isValueWeighting(node) + } + + highlight (node: ValueWeighting, acceptor: SemanticTokenAcceptor) { + highlightTypeDeclaration(node, acceptor, 'ValueWeighting') + + highlightKeyword(node, acceptor, 'In the context of the SOI,') + + highlightKeyword(node, acceptor, 'stakeholder') + highlightType(node, acceptor, 'stakeholder') + highlightKeyword(node, acceptor, 'values') + highlightString(node, acceptor, 'value1') + highlightKeyword(node, acceptor, 'more than') + highlightString(node, acceptor, 'value2') + + highlightKeyword(node, acceptor, 'expecting benefits such as') + highlightString(node, acceptor, 'benefits') + + highlightKeyword(node, acceptor, 'running the risk of harms such as') + highlightString(node, acceptor, 'harms') + } +} diff --git a/src/language/validation/AbstractContextMapperValidator.ts b/src/language/validation/AbstractContextMapperValidator.ts new file mode 100644 index 0000000..5ff5003 --- /dev/null +++ b/src/language/validation/AbstractContextMapperValidator.ts @@ -0,0 +1,5 @@ +import { AstNode, ValidationAcceptor } from 'langium' + +export interface AbstractContextMapperValidator { + validate (node: T, acceptor: ValidationAcceptor): void +} diff --git a/src/language/validation/ContextMappingModelValidator.ts b/src/language/validation/ContextMappingModelValidator.ts new file mode 100644 index 0000000..53d3183 --- /dev/null +++ b/src/language/validation/ContextMappingModelValidator.ts @@ -0,0 +1,18 @@ +import type { ValidationAcceptor } from 'langium' +import type { ContextMappingModel } from '../generated/ast.js' +import { AbstractContextMapperValidator } from './AbstractContextMapperValidator.js' + +export class ContextMappingModelValidator implements AbstractContextMapperValidator { + validate (model: ContextMappingModel, acceptor: ValidationAcceptor): void { + checkForZeroOrOneContextMap(model, acceptor) + } +} + +export function checkForZeroOrOneContextMap (model: ContextMappingModel, acceptor: ValidationAcceptor): void { + if (model.contextMaps.length > 1) { + acceptor('error', 'There must be zero or one context map', { + node: model, + property: 'contextMaps' + }) + } +} diff --git a/src/language/validation/ValueValidator.ts b/src/language/validation/ValueValidator.ts new file mode 100644 index 0000000..b17ac5d --- /dev/null +++ b/src/language/validation/ValueValidator.ts @@ -0,0 +1,14 @@ +import { AbstractContextMapperValidator } from './AbstractContextMapperValidator.js' +import { Value } from '../generated/ast.js' +import { ValidationAcceptor } from 'langium' + +export class ValueValidator implements AbstractContextMapperValidator { + validate (node: Value, acceptor: ValidationAcceptor): void { + if (node.coreValue.length > 1) { + acceptor('error', 'There must be zero or one isCore attribute', { + node, + property: 'coreValue' + }) + } + } +} diff --git a/test/linking/linking.test.ts b/test/linking/BoundedContextLinking.test.ts similarity index 85% rename from test/linking/linking.test.ts rename to test/linking/BoundedContextLinking.test.ts index 85bb9b6..6e58005 100644 --- a/test/linking/linking.test.ts +++ b/test/linking/BoundedContextLinking.test.ts @@ -1,7 +1,7 @@ import { afterEach, beforeAll, describe, expect, test } from 'vitest' import { EmptyFileSystem, type LangiumDocument } from 'langium' import { clearDocuments, parseHelper } from 'langium/test' -import { createContextMapperDslServices } from '../../src/language/context-mapper-dsl-module.js' +import { createContextMapperDslServices } from '../../src/language/ContextMapperDslModule.js' import { ContextMappingModel, isSharedKernel, SharedKernel } from '../../src/language/generated/ast.js' import { checkDocumentValid } from '../TestHelper.js' @@ -18,11 +18,11 @@ beforeAll(async () => { }) afterEach(async () => { - document && clearDocuments(services.shared, [document]) + document && await clearDocuments(services.shared, [document]) }) -describe('Linking tests', () => { - test('linking of greetings', async () => { +describe('Bounded context linking tests', () => { + test('linking of bounded contexts in context map', async () => { document = await parse(` ContextMap { TestContext [SK] <-> [SK] FirstContext @@ -36,7 +36,7 @@ describe('Linking tests', () => { const referencedContexts: Array = [] - document.parseResult.value.map?.relationships.forEach(r => { + document.parseResult.value.contextMaps[0].relationships.forEach(r => { if (isSharedKernel(r)) { referencedContexts.push((r as SharedKernel).participant1.ref?.name) referencedContexts.push((r as SharedKernel).participant2.ref?.name) diff --git a/test/parsing/AggregateParsing.test.ts b/test/parsing/AggregateParsing.test.ts new file mode 100644 index 0000000..44c4851 --- /dev/null +++ b/test/parsing/AggregateParsing.test.ts @@ -0,0 +1,163 @@ +import { createContextMapperDslServices } from '../../src/language/ContextMapperDslModule.js' +import { parseHelper } from 'langium/test' +import { Aggregate, ContextMappingModel } from '../../src/language/generated/ast.js' +import { EmptyFileSystem, LangiumDocument } from 'langium' +import { beforeAll, describe, expect, test } from 'vitest' +import { parseValidInput } from './ParsingTestHelper.js' + +let services: ReturnType +let parse: ReturnType> +let document: LangiumDocument | undefined + +beforeAll(async () => { + services = createContextMapperDslServices(EmptyFileSystem) + parse = parseHelper(services.ContextMapperDsl) +}) + +describe('Aggregate parsing tests', () => { + test('parse aggregate without body', async () => { + document = await parseValidInput(parse, ` + BoundedContext TestContext { + Aggregate TestAggregate + } + `) + + expect(document.parseResult.value.boundedContexts).toHaveLength(1) + expect(document.parseResult.value.boundedContexts[0].aggregates).toHaveLength(1) + const aggregate = document.parseResult.value.boundedContexts[0].aggregates[0] + expectAggregateToBeEmpty(aggregate) + }) + + test('parse aggregate with empty body', async () => { + document = await parseValidInput(parse, ` + BoundedContext TestContext { + Aggregate TestAggregate { + } + } + `) + + expect(document.parseResult.value.boundedContexts).toHaveLength(1) + expect(document.parseResult.value.boundedContexts[0].aggregates).toHaveLength(1) + const aggregate = document.parseResult.value.boundedContexts[0].aggregates[0] + expectAggregateToBeEmpty(aggregate) + }) + + test('parse aggregate with full body', async () => { + document = await parseValidInput(parse, ` + BoundedContext TestContext { + Aggregate TestAggregate { + responsibilities "resp1", "resp2" + owner = TestContext + useCases TestUseCase + knowledgeLevel = META + contentVolatility = RARELY + likelihoodForChange = NORMAL + availabilityCriticality = HIGH + consistencyCriticality = HIGH + securityZone "testZone" + securityCriticality = LOW + securityAccessGroup = "testGroup" + storageSimilarity = TINY + } + } + UseCase TestUseCase + `) + + expect(document.parseResult.value.boundedContexts).toHaveLength(1) + expect(document.parseResult.value.boundedContexts[0].aggregates).toHaveLength(1) + const aggregate = document.parseResult.value.boundedContexts[0].aggregates[0] + expect(aggregate.name).toEqual('TestAggregate') + expect(aggregate.responsibilities).toHaveLength(2) + expect(aggregate.responsibilities[0]).toEqual('resp1') + expect(aggregate.responsibilities[1]).toEqual('resp2') + expect(aggregate.owner).not.toBeUndefined() + expect(aggregate.userRequirements).toHaveLength(1) + expect(aggregate.knowledgeLevel).toEqual('META') + expect(aggregate.contentVolatility).toEqual('RARELY') + expect(aggregate.likelihoodForChange).toEqual('NORMAL') + expect(aggregate.availabilityCriticality).toEqual('HIGH') + expect(aggregate.consistencyCriticality).toEqual('HIGH') + expect(aggregate.securityZone).toEqual('testZone') + expect(aggregate.securityCriticality).toEqual('LOW') + expect(aggregate.securityAccessGroup).toEqual('testGroup') + expect(aggregate.storageSimilarity).toEqual('TINY') + }) + + test('parse likelihood variation', async () => { + document = await parseValidInput(parse, ` + BoundedContext TestContext { + Aggregate TestAggregate { + structuralVolatility = NORMAL + } + } + `) + + expect(document.parseResult.value.boundedContexts).toHaveLength(1) + expect(document.parseResult.value.boundedContexts[0].aggregates).toHaveLength(1) + expect(document.parseResult.value.boundedContexts[0].aggregates[0].likelihoodForChange).toEqual('NORMAL') + }) + + test('parse userStory', async () => { + document = await parseValidInput(parse, ` + BoundedContext TestContext { + Aggregate TestAggregate { + userStories TestStory + } + } + UserStory TestStory + `) + + expect(document.parseResult.value.boundedContexts).toHaveLength(1) + expect(document.parseResult.value.boundedContexts[0].aggregates).toHaveLength(1) + expect(document.parseResult.value.boundedContexts[0].aggregates[0].userRequirements).toHaveLength(1) + }) + + test('parse features', async () => { + document = await parseValidInput(parse, ` + BoundedContext TestContext { + Aggregate TestAggregate { + features TestStory, TestUseCase + } + } + UserStory TestStory + UseCase TestUseCase + `) + + expect(document.parseResult.value.boundedContexts).toHaveLength(1) + expect(document.parseResult.value.boundedContexts[0].aggregates).toHaveLength(1) + expect(document.parseResult.value.boundedContexts[0].aggregates[0].userRequirements).toHaveLength(2) + }) + + test('parse userRequirements', async () => { + document = await parseValidInput(parse, ` + BoundedContext TestContext { + Aggregate TestAggregate { + userRequirements TestStory, TestUseCase + } + } + UserStory TestStory + UseCase TestUseCase + `) + + expect(document.parseResult.value.boundedContexts).toHaveLength(1) + expect(document.parseResult.value.boundedContexts[0].aggregates).toHaveLength(1) + expect(document.parseResult.value.boundedContexts[0].aggregates[0].userRequirements).toHaveLength(2) + }) +}) + +function expectAggregateToBeEmpty (aggregate: Aggregate) { + expect(aggregate).not.toBeUndefined() + expect(aggregate.name).toEqual('TestAggregate') + expect(aggregate.responsibilities).toHaveLength(0) + expect(aggregate.userRequirements).toHaveLength(0) + expect(aggregate.owner).toBeUndefined() + expect(aggregate.knowledgeLevel).toBeUndefined() + expect(aggregate.likelihoodForChange).toBeUndefined() + expect(aggregate.contentVolatility).toBeUndefined() + expect(aggregate.availabilityCriticality).toBeUndefined() + expect(aggregate.consistencyCriticality).toBeUndefined() + expect(aggregate.storageSimilarity).toBeUndefined() + expect(aggregate.securityCriticality).toBeUndefined() + expect(aggregate.securityZone).toBeUndefined() + expect(aggregate.securityAccessGroup).toBeUndefined() +} diff --git a/test/parsing/BoundedContextParsing.test.ts b/test/parsing/BoundedContextParsing.test.ts new file mode 100644 index 0000000..09b988e --- /dev/null +++ b/test/parsing/BoundedContextParsing.test.ts @@ -0,0 +1,118 @@ +import { createContextMapperDslServices } from '../../src/language/ContextMapperDslModule.js' +import { parseHelper } from 'langium/test' +import { ContextMappingModel } from '../../src/language/generated/ast.js' +import { EmptyFileSystem, LangiumDocument } from 'langium' +import { beforeAll, describe, test, expect } from 'vitest' +import { parseValidInput } from './ParsingTestHelper.js' + +let services: ReturnType +let parse: ReturnType> +let document: LangiumDocument | undefined + +beforeAll(async () => { + services = createContextMapperDslServices(EmptyFileSystem) + parse = parseHelper(services.ContextMapperDsl) +}) + +describe('BoundedContext parsing tests', () => { + test('parse BoundedContext without body', async () => { + document = await parseValidInput(parse, ` + BoundedContext FirstContext + `) + + const contextMappingModel = document.parseResult.value + expect(contextMappingModel).not.toBeUndefined() + expect(contextMappingModel.boundedContexts.length).toEqual(1) + expect(contextMappingModel.contextMaps.length).toEqual(0) + expect(contextMappingModel.userRequirements.length).toEqual(0) + expect(contextMappingModel.domains.length).toEqual(0) + expect(contextMappingModel.stakeholders.length).toEqual(0) + expect(contextMappingModel.valueRegisters.length).toEqual(0) + + const boundedContext = contextMappingModel.boundedContexts[0] + expect(boundedContext).not.toBeUndefined() + expect(boundedContext.name).toEqual('FirstContext') + expect(boundedContext.realizedBoundedContexts.length).toEqual(0) + expect(boundedContext.refinedBoundedContext).toBeUndefined() + expect(boundedContext.implementedDomainParts.length).toEqual(0) + expect(boundedContext.domainVisionStatement).toBeUndefined() + expect(boundedContext.knowledgeLevel).toBeUndefined() + expect(boundedContext.type).toBeUndefined() + expect(boundedContext.responsibilities.length).toEqual(0) + expect(boundedContext.implementationTechnology).toBeUndefined() + expect(boundedContext.businessModel).toBeUndefined() + expect(boundedContext.evolution).toBeUndefined() + expect(boundedContext.aggregates.length).toEqual(0) + }) + + test('parse BoundedContext with full body', async () => { + document = await parseValidInput(parse, ` + BoundedContext TestContext + implements TestDomain, TestSubDomain + realizes ContextToRealize + refines ContextToRefine + { + type = UNDEFINED + responsibilities = "resp1", "resp2" + knowledgeLevel = CONCRETE + implementationTechnology = "java" + businessModel = "model" + evolution = GENESIS + domainVisionStatement = "vision" + } + + BoundedContext ContextToRefine + BoundedContext ContextToRealize + + Domain TestDomain + Domain AnotherDomain { + Subdomain TestSubDomain + } + `) + + const contextMappingModel = document.parseResult.value + expect(contextMappingModel).not.toBeUndefined() + expect(contextMappingModel.boundedContexts.length).toEqual(3) + + const boundedContext = contextMappingModel.boundedContexts[0] + expect(boundedContext).not.toBeUndefined() + expect(boundedContext.name).toEqual('TestContext') + expect(boundedContext.implementedDomainParts).toHaveLength(2) + expect(boundedContext.realizedBoundedContexts).toHaveLength(1) + expect(boundedContext.refinedBoundedContext).not.toBeUndefined() + expect(boundedContext.domainVisionStatement).toEqual('vision') + expect(boundedContext.type).toEqual('UNDEFINED') + expect(boundedContext.implementationTechnology).toEqual('java') + expect(boundedContext.responsibilities.length).toEqual(2) + expect(boundedContext.businessModel).toEqual('model') + expect(boundedContext.knowledgeLevel).toEqual('CONCRETE') + expect(boundedContext.evolution).toEqual('GENESIS') + expect(boundedContext.aggregates.length).toEqual(0) + }) + + test('parse BoundedContext with partial body', async () => { + document = await parseValidInput(parse, ` + BoundedContext TestContext { + type = FEATURE + implementationTechnology = "c#" + domainVisionStatement = "vision" + } + `) + + const contextMappingModel = document.parseResult.value + expect(contextMappingModel).not.toBeUndefined() + expect(contextMappingModel.boundedContexts.length).toEqual(1) + + const boundedContext = contextMappingModel.boundedContexts[0] + expect(boundedContext).not.toBeUndefined() + expect(boundedContext.name).toEqual('TestContext') + expect(boundedContext.domainVisionStatement).toEqual('vision') + expect(boundedContext.type).toEqual('FEATURE') + expect(boundedContext.implementationTechnology).toEqual('c#') + expect(boundedContext.responsibilities.length).toEqual(0) + expect(boundedContext.businessModel).toBeUndefined() + expect(boundedContext.knowledgeLevel).toBeUndefined() + expect(boundedContext.evolution).toBeUndefined() + expect(boundedContext.aggregates.length).toEqual(0) + }) +}) diff --git a/test/parsing/ContextMapParsing.test.ts b/test/parsing/ContextMapParsing.test.ts new file mode 100644 index 0000000..a324fd4 --- /dev/null +++ b/test/parsing/ContextMapParsing.test.ts @@ -0,0 +1,86 @@ +import { createContextMapperDslServices } from '../../src/language/ContextMapperDslModule.js' +import { parseHelper } from 'langium/test' +import { ContextMappingModel } from '../../src/language/generated/ast.js' +import { EmptyFileSystem, LangiumDocument } from 'langium' +import { beforeAll, describe, expect, test } from 'vitest' +import { parseValidInput } from './ParsingTestHelper.js' + +let services: ReturnType +let parse: ReturnType> +let document: LangiumDocument | undefined + +beforeAll(async () => { + services = createContextMapperDslServices(EmptyFileSystem) + parse = parseHelper(services.ContextMapperDsl) +}) + +describe('Context Map parsing tests', () => { + test('parse empty context map', async () => { + document = await parseValidInput(parse, ` + ContextMap { + } + `) + + expect(document.parseResult.value.contextMaps).toHaveLength(1) + const contextMap = document.parseResult.value.contextMaps[0] + expect(contextMap).not.toBeUndefined() + expect(contextMap.name).toBeUndefined() + expect(contextMap.boundedContexts).toHaveLength(0) + expect(contextMap.type).toBeUndefined() + expect(contextMap.state).toBeUndefined() + expect(contextMap.relationships).toHaveLength(0) + }) + + test('parse context map with name', async () => { + document = await parseValidInput(parse, ` + ContextMap TestMap { + } + `) + + expect(document.parseResult.value.contextMaps).toHaveLength(1) + const contextMap = document.parseResult.value.contextMaps[0] + expect(contextMap).not.toBeUndefined() + expect(contextMap.name).toEqual('TestMap') + }) + + test('parse context map with full body', async () => { + document = await parseValidInput(parse, ` + ContextMap TestMap { + state = AS_IS + type = ORGANIZATIONAL + contains FirstContext, SecondContext + + FirstContext [SK] <-> [SK] SecondContext + } + + BoundedContext FirstContext + BoundedContext SecondContext + `) + + expect(document.parseResult.value.contextMaps).toHaveLength(1) + const contextMap = document.parseResult.value.contextMaps[0] + expect(contextMap).not.toBeUndefined() + expect(contextMap.name).toEqual('TestMap') + expect(contextMap.state).toEqual('AS_IS') + expect(contextMap.type).toEqual('ORGANIZATIONAL') + expect(contextMap.boundedContexts).toHaveLength(2) + expect(contextMap.relationships).toHaveLength(1) + }) + + test('parse context map contains variation', async () => { + document = await parseValidInput(parse, ` + ContextMap TestMap { + contains FirstContext + contains SecondContext + } + + BoundedContext FirstContext + BoundedContext SecondContext + `) + + expect(document.parseResult.value.contextMaps).toHaveLength(1) + const contextMap = document.parseResult.value.contextMaps[0] + expect(contextMap).not.toBeUndefined() + expect(contextMap.boundedContexts).toHaveLength(2) + }) +}) diff --git a/test/parsing/ContextMappingModelParsing.test.ts b/test/parsing/ContextMappingModelParsing.test.ts new file mode 100644 index 0000000..d6ed057 --- /dev/null +++ b/test/parsing/ContextMappingModelParsing.test.ts @@ -0,0 +1,28 @@ +import { createContextMapperDslServices } from '../../src/language/ContextMapperDslModule.js' +import { parseHelper } from 'langium/test' +import { ContextMappingModel } from '../../src/language/generated/ast.js' +import { EmptyFileSystem, LangiumDocument } from 'langium' +import { beforeAll, describe, expect, test } from 'vitest' +import { parseValidInput } from './ParsingTestHelper.js' + +let services: ReturnType +let parse: ReturnType> +let document: LangiumDocument | undefined + +beforeAll(async () => { + services = createContextMapperDslServices(EmptyFileSystem) + parse = parseHelper(services.ContextMapperDsl) +}) + +describe('ContextMappingModel tests', () => { + test('parse empty file', async () => { + document = await parseValidInput(parse, '') + + expect(document.parseResult.value.valueRegisters).toHaveLength(0) + expect(document.parseResult.value.contextMaps).toHaveLength(0) + expect(document.parseResult.value.boundedContexts).toHaveLength(0) + expect(document.parseResult.value.domains).toHaveLength(0) + expect(document.parseResult.value.stakeholders).toHaveLength(0) + expect(document.parseResult.value.userRequirements).toHaveLength(0) + }) +}) diff --git a/test/parsing/DomainParsing.test.ts b/test/parsing/DomainParsing.test.ts new file mode 100644 index 0000000..d8981eb --- /dev/null +++ b/test/parsing/DomainParsing.test.ts @@ -0,0 +1,84 @@ +import { createContextMapperDslServices } from '../../src/language/ContextMapperDslModule.js' +import { parseHelper } from 'langium/test' +import { ContextMappingModel } from '../../src/language/generated/ast.js' +import { EmptyFileSystem, LangiumDocument } from 'langium' +import { beforeAll, describe, expect, test } from 'vitest' +import { parseValidInput } from './ParsingTestHelper.js' + +let services: ReturnType +let parse: ReturnType> +let document: LangiumDocument | undefined + +beforeAll(async () => { + services = createContextMapperDslServices(EmptyFileSystem) + parse = parseHelper(services.ContextMapperDsl) +}) + +describe('Domain parsing tests', () => { + test('parse domain without body', async () => { + document = await parseValidInput(parse, ` + Domain TestDomain + `) + + expect(document.parseResult.value.domains).toHaveLength(1) + const domain = document.parseResult.value.domains[0] + expect(domain).not.toBeUndefined() + expect(domain.name).toEqual('TestDomain') + expect(domain.domainVisionStatement).toBeUndefined() + expect(domain.subdomains).toHaveLength(0) + }) + + test('parse domain with empty body', async () => { + document = await parseValidInput(parse, ` + Domain TestDomain { + } + `) + + expect(document.parseResult.value.domains).toHaveLength(1) + const domain = document.parseResult.value.domains[0] + expect(domain).not.toBeUndefined() + expect(domain.name).toEqual('TestDomain') + expect(domain.domainVisionStatement).toBeUndefined() + expect(domain.subdomains).toHaveLength(0) + }) + + test('parse domain with full body', async () => { + document = await parseValidInput(parse, ` + Domain TestDomain { + domainVisionStatement = "vision" + Subdomain FirstSubdomain + Subdomain SecondSubdomain + } + `) + + expect(document.parseResult.value.domains).toHaveLength(1) + const domain = document.parseResult.value.domains[0] + expect(domain).not.toBeUndefined() + expect(domain.name).toEqual('TestDomain') + expect(domain.domainVisionStatement).toEqual('vision') + expect(domain.subdomains).toHaveLength(2) + }) + + test('parse subdomain', async () => { + document = await parseValidInput(parse, ` + Domain TestDomain { + Subdomain TestSubdomain + supports TestUseCase + { + domainVisionStatement "vision" + type = CORE_DOMAIN + } + } + UseCase TestUseCase + `) + + expect(document.parseResult.value.domains).toHaveLength(1) + expect(document.parseResult.value.domains[0].subdomains).toHaveLength(1) + const subdomain = document.parseResult.value.domains[0].subdomains[0] + expect(subdomain).not.toBeUndefined() + expect(subdomain.name).toEqual('TestSubdomain') + expect(subdomain.supportedFeatures).toHaveLength(1) + expect(subdomain.type).toEqual('CORE_DOMAIN') + expect(subdomain.domainVisionStatement).toEqual('vision') + }) +}) diff --git a/test/parsing/ParsingTestHelper.ts b/test/parsing/ParsingTestHelper.ts new file mode 100644 index 0000000..9caf258 --- /dev/null +++ b/test/parsing/ParsingTestHelper.ts @@ -0,0 +1,21 @@ +import { expect } from 'vitest' +import { LangiumDocument } from 'langium' +import { ContextMappingModel } from '../../src/language/generated/ast.js' +import { parseHelper } from 'langium/test' + +export async function parseValidInput (parse: ReturnType>, input: string): Promise> { + const document = await parse(input) + + expectNoParsingErrors(document) + return document +} + +export async function parseInvalidInput (parse: ReturnType>, input: string): Promise { + const document = await parse(input) + + expect(document.parseResult.parserErrors.length).toBeGreaterThanOrEqual(1) +} + +export function expectNoParsingErrors (document: LangiumDocument) { + expect(document.parseResult.parserErrors.length).toEqual(0) +} diff --git a/test/parsing/RelationshipParsing.test.ts b/test/parsing/RelationshipParsing.test.ts new file mode 100644 index 0000000..c57b160 --- /dev/null +++ b/test/parsing/RelationshipParsing.test.ts @@ -0,0 +1,359 @@ +import { createContextMapperDslServices } from '../../src/language/ContextMapperDslModule.js' +import { parseHelper } from 'langium/test' +import { + ContextMappingModel, + CustomerSupplierRelationship, + Partnership, + SharedKernel, UpstreamDownstreamRelationship +} from '../../src/language/generated/ast.js' +import { EmptyFileSystem, LangiumDocument } from 'langium' +import { beforeAll, describe, expect, test } from 'vitest' +import { parseValidInput } from './ParsingTestHelper.js' + +let services: ReturnType +let parse: ReturnType> +let document: LangiumDocument | undefined + +beforeAll(async () => { + services = createContextMapperDslServices(EmptyFileSystem) + parse = parseHelper(services.ContextMapperDsl) +}) + +describe('Relationship parsing tests', () => { + test('parse SharedKernel relationship properties', async () => { + document = await parseValidInput(parse, ` + ContextMap { + TestContext <-> FirstContext : RelName { + implementationTechnology "Java" + } + } + BoundedContext FirstContext + BoundedContext TestContext + `) + + expect(document.parseResult.value.contextMaps).toHaveLength(1) + expect(document.parseResult.value.contextMaps[0].relationships).toHaveLength(1) + const relationship = document.parseResult.value.contextMaps[0].relationships[0] as SharedKernel + expect(relationship).not.toBeUndefined() + expect(relationship.name).toEqual('RelName') + expect(relationship.implementationTechnology).toEqual('Java') + expect(relationship.participant1).not.toBeUndefined() + expect(relationship.participant2).not.toBeUndefined() + }) + + test('parse SharedKernel relationship variation 1', async () => { + document = await parseValidInput(parse, ` + ContextMap { + TestContext [SK] <-> [SK] FirstContext + } + BoundedContext FirstContext + BoundedContext TestContext + `) + expectRelationshipType(document, 'SharedKernel') + }) + + test('parse SharedKernel relationship variation 2', async () => { + document = await parseValidInput(parse, ` + ContextMap { + [SK] TestContext <-> [SK] FirstContext + } + BoundedContext FirstContext + BoundedContext TestContext + `) + + expectRelationshipType(document, 'SharedKernel') + }) + + test('parse SharedKernel relationship variation 3', async () => { + document = await parseValidInput(parse, ` + ContextMap { + TestContext [SK] <-> FirstContext [SK] + } + BoundedContext FirstContext + BoundedContext TestContext + `) + + expectRelationshipType(document, 'SharedKernel') + }) + + test('parse SharedKernel relationship variation 4', async () => { + document = await parseValidInput(parse, ` + ContextMap { + [SK] TestContext <-> FirstContext [SK] + } + BoundedContext FirstContext + BoundedContext TestContext + `) + + expectRelationshipType(document, 'SharedKernel') + }) + + test('parse SharedKernel relationship variation 5', async () => { + document = await parseValidInput(parse, ` + ContextMap { + TestContext Shared-Kernel FirstContext + } + BoundedContext FirstContext + BoundedContext TestContext + `) + + expectRelationshipType(document, 'SharedKernel') + }) + + test('parse SharedKernel relationship variation 6', async () => { + document = await parseValidInput(parse, ` + ContextMap { + TestContext <-> FirstContext + } + BoundedContext FirstContext + BoundedContext TestContext + `) + + expectRelationshipType(document, 'SharedKernel') + }) + + test('parse Participant relationship properties', async () => { + document = await parseValidInput(parse, ` + ContextMap { + TestContext [P] <-> [P] FirstContext : RelName { + implementationTechnology "Java" + } + } + BoundedContext FirstContext + BoundedContext TestContext + `) + + expect(document.parseResult.value.contextMaps).toHaveLength(1) + expect(document.parseResult.value.contextMaps[0].relationships).toHaveLength(1) + const relationship = document.parseResult.value.contextMaps[0].relationships[0] as Partnership + expect(relationship).not.toBeUndefined() + expect(relationship.name).toEqual('RelName') + expect(relationship.implementationTechnology).toEqual('Java') + expect(relationship.participant1).not.toBeUndefined() + expect(relationship.participant2).not.toBeUndefined() + }) + + test('parse Participant relationship variation 1', async () => { + document = await parseValidInput(parse, ` + ContextMap { + TestContext [P] <-> [P] FirstContext + } + BoundedContext FirstContext + BoundedContext TestContext + `) + + expectRelationshipType(document, 'Partnership') + }) + + test('parse Participant relationship variation 2', async () => { + document = await parseValidInput(parse, ` + ContextMap { + [P] TestContext <-> [P] FirstContext + } + BoundedContext FirstContext + BoundedContext TestContext + `) + + expectRelationshipType(document, 'Partnership') + }) + + test('parse Participant relationship variation 3', async () => { + document = await parseValidInput(parse, ` + ContextMap { + TestContext [P] <-> FirstContext [P] + } + BoundedContext FirstContext + BoundedContext TestContext + `) + + expectRelationshipType(document, 'Partnership') + }) + + test('parse Participant relationship variation 4', async () => { + document = await parseValidInput(parse, ` + ContextMap { + [P] TestContext <-> FirstContext [P] + } + BoundedContext FirstContext + BoundedContext TestContext + `) + + expectRelationshipType(document, 'Partnership') + }) + + test('parse Participant relationship variation 5', async () => { + document = await parseValidInput(parse, ` + ContextMap { + TestContext Partnership FirstContext + } + BoundedContext FirstContext + BoundedContext TestContext + `) + + expectRelationshipType(document, 'Partnership') + }) + + test('parse CustomerSupplier relationship properties', async () => { + document = await parseValidInput(parse, ` + ContextMap { + TestContext [S,OHS] -> [C,CF] FirstContext : RelName { + implementationTechnology "Java" + downstreamRights INFLUENCER + exposedAggregates = TestAggregate + } + } + BoundedContext FirstContext + BoundedContext TestContext { + Aggregate TestAggregate + } + `) + + expect(document.parseResult.value.contextMaps).toHaveLength(1) + expect(document.parseResult.value.contextMaps[0].relationships).toHaveLength(1) + const relationship = document.parseResult.value.contextMaps[0].relationships[0] as CustomerSupplierRelationship + expect(relationship).not.toBeUndefined() + expect(relationship.name).toEqual('RelName') + expect(relationship.implementationTechnology).toEqual('Java') + expect(relationship.downstreamGovernanceRights).toEqual('INFLUENCER') + expect(relationship.upstreamExposedAggregates).toHaveLength(1) + expect(relationship.upstream).not.toBeUndefined() + expect(relationship.downstream).not.toBeUndefined() + expect(relationship.upstreamRoles).toHaveLength(1) + expect(relationship.upstreamRoles[0]).toEqual('OHS') + expect(relationship.downstreamRoles).toHaveLength(1) + expect(relationship.downstreamRoles[0]).toEqual('CF') + }) + + test('parse CustomerSupplier relationship variation 1', async () => { + document = await parseValidInput(parse, ` + ContextMap { + TestContext [S] -> [C] FirstContext + } + BoundedContext FirstContext + BoundedContext TestContext + `) + + expectRelationshipType(document, 'CustomerSupplierRelationship') + }) + + test('parse CustomerSupplier relationship variation 2', async () => { + document = await parseValidInput(parse, ` + ContextMap { + TestContext [C] <- [S] FirstContext + } + BoundedContext FirstContext + BoundedContext TestContext + `) + + expectRelationshipType(document, 'CustomerSupplierRelationship') + }) + + test('parse CustomerSupplier relationship variation 3', async () => { + document = await parseValidInput(parse, ` + ContextMap { + TestContext Customer-Supplier FirstContext + } + BoundedContext FirstContext + BoundedContext TestContext + `) + + expectRelationshipType(document, 'CustomerSupplierRelationship') + }) + + test('parse CustomerSupplier relationship variation 4', async () => { + document = await parseValidInput(parse, ` + ContextMap { + TestContext Supplier-Customer FirstContext + } + BoundedContext FirstContext + BoundedContext TestContext + `) + + expectRelationshipType(document, 'CustomerSupplierRelationship') + }) + + test('parse UpstreamDownstream relationship properties', async () => { + document = await parseValidInput(parse, ` + ContextMap { + TestContext [U,OHS] -> [D,CF] FirstContext : RelName { + downstreamRights INFLUENCER + exposedAggregates = TestAggregate + implementationTechnology "Java" + } + } + BoundedContext FirstContext + BoundedContext TestContext + `) + + expect(document.parseResult.value.contextMaps).toHaveLength(1) + expect(document.parseResult.value.contextMaps[0].relationships).toHaveLength(1) + const relationship = document.parseResult.value.contextMaps[0].relationships[0] as UpstreamDownstreamRelationship + expect(relationship).not.toBeUndefined() + expect(relationship.name).toEqual('RelName') + expect(relationship.downstreamGovernanceRights).toEqual('INFLUENCER') + expect(relationship.upstreamExposedAggregates).toHaveLength(1) + expect(relationship.implementationTechnology).toEqual('Java') + expect(relationship.upstream).not.toBeUndefined() + expect(relationship.upstreamRoles).toHaveLength(1) + expect(relationship.upstreamRoles[0]).toEqual('OHS') + expect(relationship.downstream).not.toBeUndefined() + expect(relationship.downstreamRoles).toHaveLength(1) + expect(relationship.downstreamRoles[0]).toEqual('CF') + }) + + test('parse UpstreamDownstream relationship variation 1', async () => { + document = await parseValidInput(parse, ` + ContextMap { + TestContext [U] -> [D] FirstContext + } + BoundedContext FirstContext + BoundedContext TestContext + `) + + expectRelationshipType(document, 'UpstreamDownstreamRelationship') + }) + + test('parse UpstreamDownstream relationship variation 2', async () => { + document = await parseValidInput(parse, ` + ContextMap { + TestContext [D] <- [U] FirstContext + } + BoundedContext FirstContext + BoundedContext TestContext + `) + + expectRelationshipType(document, 'UpstreamDownstreamRelationship') + }) + + test('parse UpstreamDownstream relationship variation 3', async () => { + document = await parseValidInput(parse, ` + ContextMap { + TestContext Upstream-Downstream FirstContext + } + BoundedContext FirstContext + BoundedContext TestContext + `) + + expectRelationshipType(document, 'UpstreamDownstreamRelationship') + }) + + test('parse UpstreamDownstream relationship variation 4', async () => { + document = await parseValidInput(parse, ` + ContextMap { + TestContext Downstream-Upstream FirstContext + } + BoundedContext FirstContext + BoundedContext TestContext + `) + + expectRelationshipType(document, 'UpstreamDownstreamRelationship') + }) +}) + +function expectRelationshipType (document: LangiumDocument, type: string) { + expect(document.parseResult.value.contextMaps).toHaveLength(1) + expect(document.parseResult.value.contextMaps[0].relationships).toHaveLength(1) + const relationship = document.parseResult.value.contextMaps[0].relationships[0] + expect(relationship).not.toBeUndefined() + expect(relationship.$type).toEqual(type) +} diff --git a/test/parsing/SculptorModuleParsing.test.ts b/test/parsing/SculptorModuleParsing.test.ts new file mode 100644 index 0000000..0d810c4 --- /dev/null +++ b/test/parsing/SculptorModuleParsing.test.ts @@ -0,0 +1,78 @@ +import { createContextMapperDslServices } from '../../src/language/ContextMapperDslModule.js' +import { parseHelper } from 'langium/test' +import { ContextMappingModel, SculptorModule } from '../../src/language/generated/ast.js' +import { EmptyFileSystem, LangiumDocument } from 'langium' +import { beforeAll, describe, expect, test } from 'vitest' +import { parseValidInput } from './ParsingTestHelper.js' + +let services: ReturnType +let parse: ReturnType> +let document: LangiumDocument | undefined + +beforeAll(async () => { + services = createContextMapperDslServices(EmptyFileSystem) + parse = parseHelper(services.ContextMapperDsl) +}) + +describe('Sculptor module parsing tests', () => { + test('parse module without body', async () => { + document = await parseValidInput(parse, ` + BoundedContext TestContext { + Module TestModule + } + `) + + expect(document.parseResult.value.boundedContexts).toHaveLength(1) + expect(document.parseResult.value.boundedContexts[0].modules).toHaveLength(1) + const module = document.parseResult.value.boundedContexts[0].modules[0] + expectEmptyModule(module) + }) + + test('parse module with empty body', async () => { + document = await parseValidInput(parse, ` + BoundedContext TestContext { + Module TestModule { + } + } + `) + + expect(document.parseResult.value.boundedContexts).toHaveLength(1) + expect(document.parseResult.value.boundedContexts[0].modules).toHaveLength(1) + const module = document.parseResult.value.boundedContexts[0].modules[0] + expectEmptyModule(module) + }) + + test('parse module with full body', async () => { + document = await parseValidInput(parse, ` + BoundedContext TestContext { + "doc" + Module TestModule { + hint = "hint" + external + basePackage = base.package + Aggregate SecondAggregate + } + } + `) + + expect(document.parseResult.value.boundedContexts).toHaveLength(1) + expect(document.parseResult.value.boundedContexts[0].modules).toHaveLength(1) + const module = document.parseResult.value.boundedContexts[0].modules[0] + expect(module.doc).toEqual('doc') + expect(module.name).toEqual('TestModule') + expect(module.external).toEqual(true) + expect(module.basePackage).toEqual('base.package') + expect(module.hint).toEqual('hint') + expect(module.aggregates).toHaveLength(1) + }) +}) + +function expectEmptyModule (module: SculptorModule): void { + expect(module).not.toBeUndefined() + expect(module.name).toEqual('TestModule') + expect(module.doc).toBeUndefined() + expect(module.external).toEqual(false) + expect(module.basePackage).toBeUndefined() + expect(module.hint).toBeUndefined() + expect(module.aggregates).toHaveLength(0) +} diff --git a/test/parsing/StakeholdersParsing.test.ts b/test/parsing/StakeholdersParsing.test.ts new file mode 100644 index 0000000..b5227f4 --- /dev/null +++ b/test/parsing/StakeholdersParsing.test.ts @@ -0,0 +1,167 @@ +import { createContextMapperDslServices } from '../../src/language/ContextMapperDslModule.js' +import { parseHelper } from 'langium/test' +import { ContextMappingModel, Stakeholder, StakeholderGroup, Stakeholders } from '../../src/language/generated/ast.js' +import { EmptyFileSystem, LangiumDocument } from 'langium' +import { beforeAll, describe, expect, test } from 'vitest' +import { parseValidInput } from './ParsingTestHelper.js' + +let services: ReturnType +let parse: ReturnType> +let document: LangiumDocument | undefined + +beforeAll(async () => { + services = createContextMapperDslServices(EmptyFileSystem) + parse = parseHelper(services.ContextMapperDsl) +}) + +describe('Stakeholders parsing tests', () => { + test('parse stakeholders without body', async () => { + document = await parseValidInput(parse, ` + Stakeholders + `) + + expect(document.parseResult.value.stakeholders).toHaveLength(1) + const stakeholders = document.parseResult.value.stakeholders[0] + expectEmptyStakeholders(stakeholders) + }) + + test('parse stakeholders with empty body', async () => { + document = await parseValidInput(parse, ` + Stakeholders { + } + `) + + expect(document.parseResult.value.stakeholders).toHaveLength(1) + const stakeholders = document.parseResult.value.stakeholders[0] + expectEmptyStakeholders(stakeholders) + }) + + test('parse stakeholders with full body', async () => { + document = await parseValidInput(parse, ` + BoundedContext TestContext + + Stakeholders of TestContext { + StakeholderGroup TestGroup + Stakeholder TestStakeholder + } + `) + + expect(document.parseResult.value.stakeholders).toHaveLength(1) + const stakeholders = document.parseResult.value.stakeholders[0] + expect(stakeholders).not.toBeUndefined() + expect(stakeholders.contexts).toHaveLength(1) + expect(stakeholders.stakeholders).toHaveLength(2) + }) + + test('parse stakeholder group without body', async () => { + document = await parseValidInput(parse, ` + Stakeholders { + StakeholderGroup TestGroup + } + `) + + expect(document.parseResult.value.stakeholders).toHaveLength(1) + expect(document.parseResult.value.stakeholders[0].stakeholders).toHaveLength(1) + const group = document.parseResult.value.stakeholders[0].stakeholders[0] as StakeholderGroup + expectEmptyStakeholderGroup(group) + }) + + test('parse stakeholder group with empty body', async () => { + document = await parseValidInput(parse, ` + Stakeholders { + StakeholderGroup TestGroup { + } + } + `) + + expect(document.parseResult.value.stakeholders).toHaveLength(1) + expect(document.parseResult.value.stakeholders[0].stakeholders).toHaveLength(1) + const group = document.parseResult.value.stakeholders[0].stakeholders[0] as StakeholderGroup + expectEmptyStakeholderGroup(group) + }) + + test('parse stakeholder group with full body', async () => { + document = await parseValidInput(parse, ` + Stakeholders { + StakeholderGroup TestGroup { + Stakeholder TestStakeholder + } + } + `) + + expect(document.parseResult.value.stakeholders).toHaveLength(1) + expect(document.parseResult.value.stakeholders[0].stakeholders).toHaveLength(1) + const group = document.parseResult.value.stakeholders[0].stakeholders[0] as StakeholderGroup + expect(group).not.toBeUndefined() + expect(group.name).toEqual('TestGroup') + expect(group.stakeholders).toHaveLength(1) + }) + + test('parse stakeholder without body', async () => { + document = await parseValidInput(parse, ` + Stakeholders { + Stakeholder TestStakeholder + } + `) + + expect(document.parseResult.value.stakeholders).toHaveLength(1) + expect(document.parseResult.value.stakeholders[0].stakeholders).toHaveLength(1) + const stakeholder = document.parseResult.value.stakeholders[0].stakeholders[0] as Stakeholder + expectEmptyStakeholder(stakeholder) + }) + + test('parse stakeholder with empty body', async () => { + document = await parseValidInput(parse, ` + Stakeholders { + Stakeholder TestStakeholder { + } + } + `) + + expect(document.parseResult.value.stakeholders).toHaveLength(1) + expect(document.parseResult.value.stakeholders[0].stakeholders).toHaveLength(1) + const stakeholder = document.parseResult.value.stakeholders[0].stakeholders[0] as Stakeholder + expectEmptyStakeholder(stakeholder) + }) + + test('parse stakeholder with full body', async () => { + document = await parseValidInput(parse, ` + Stakeholders { + Stakeholder TestStakeholder { + interest = HIGH + influence MEDIUM + description = "description" + } + } + `) + + expect(document.parseResult.value.stakeholders).toHaveLength(1) + expect(document.parseResult.value.stakeholders[0].stakeholders).toHaveLength(1) + const stakeholder = document.parseResult.value.stakeholders[0].stakeholders[0] as Stakeholder + expect(stakeholder.name).toEqual('TestStakeholder') + expect(stakeholder.interest).toEqual('HIGH') + expect(stakeholder.influence).toEqual('MEDIUM') + expect(stakeholder.description).toEqual('description') + }) +}) + +function expectEmptyStakeholders (stakeholders: Stakeholders) { + expect(stakeholders).not.toBeUndefined() + expect(stakeholders.contexts).toHaveLength(0) + expect(stakeholders.stakeholders).toHaveLength(0) +} + +function expectEmptyStakeholderGroup (group: StakeholderGroup): void { + expect(group).not.toBeUndefined() + expect(group.$type).toEqual('StakeholderGroup') + expect(group.name).toEqual('TestGroup') + expect(group.stakeholders).toHaveLength(0) +} + +function expectEmptyStakeholder (stakeholder: Stakeholder): void { + expect(stakeholder).not.toBeUndefined() + expect(stakeholder.name).toEqual('TestStakeholder') + expect(stakeholder.influence).toBeUndefined() + expect(stakeholder.interest).toBeUndefined() + expect(stakeholder.description).toBeUndefined() +} diff --git a/test/parsing/UserRequirementParsing.test.ts b/test/parsing/UserRequirementParsing.test.ts new file mode 100644 index 0000000..501cea9 --- /dev/null +++ b/test/parsing/UserRequirementParsing.test.ts @@ -0,0 +1,184 @@ +import { createContextMapperDslServices } from '../../src/language/ContextMapperDslModule.js' +import { parseHelper } from 'langium/test' +import { ContextMappingModel, NormalFeature, StoryFeature, UseCase, UserStory } from '../../src/language/generated/ast.js' +import { EmptyFileSystem, LangiumDocument } from 'langium' +import { beforeAll, describe, expect, test } from 'vitest' +import { parseValidInput } from './ParsingTestHelper.js' + +let services: ReturnType +let parse: ReturnType> +let document: LangiumDocument | undefined + +beforeAll(async () => { + services = createContextMapperDslServices(EmptyFileSystem) + parse = parseHelper(services.ContextMapperDsl) +}) + +describe('User requirement parsing tests', () => { + test('parse UseCase without body', async () => { + document = await parseValidInput(parse, ` + UseCase TestUseCase + `) + + expect(document.parseResult.value.userRequirements).toHaveLength(1) + const userRequirement = document.parseResult.value.userRequirements[0] + expect(userRequirement).not.toBeUndefined() + expect(userRequirement.$type).toEqual('UseCase') + const useCase = userRequirement as UseCase + expectUseCaseToBeEmpty(useCase) + }) + + test('parse UseCase with empty body', async () => { + document = await parseValidInput(parse, ` + UseCase TestUseCase { + } + `) + + expect(document.parseResult.value.userRequirements).toHaveLength(1) + const useCase = document.parseResult.value.userRequirements[0] as UseCase + expectUseCaseToBeEmpty(useCase) + }) + + test('parse UseCase with full body', async () => { + document = await parseValidInput(parse, ` + UseCase TestUseCase { + secondaryActors = "actor1", "actor2" + actor "role" + benefit = "benefit" + level = "level" + scope = "scope" + interactions + create an "order", + "edit" an "order" + } + `) + + expect(document.parseResult.value.userRequirements).toHaveLength(1) + const useCase = document.parseResult.value.userRequirements[0] as UseCase + expect(useCase).not.toBeUndefined() + expect(useCase.name).toEqual('TestUseCase') + expect(useCase.secondaryActors).toHaveLength(2) + expect(useCase.secondaryActors[0]).toEqual('actor1') + expect(useCase.secondaryActors[1]).toEqual('actor2') + expect(useCase.role).toEqual('role') + expect(useCase.benefit).toEqual('benefit') + expect(useCase.level).toEqual('level') + expect(useCase.scope).toEqual('scope') + expect(useCase.features).toHaveLength(2) + }) + + test('parse UserStory without body', async () => { + document = await parseValidInput(parse, ` + UserStory TestUserStory + `) + + expect(document.parseResult.value.userRequirements).toHaveLength(1) + const requirement = document.parseResult.value.userRequirements[0] + expect(requirement).not.toBeUndefined() + expect(requirement.$type).toEqual('UserStory') + const userStory = requirement as UserStory + expectUserStoryToBeEmpty(userStory) + }) + + test('parse UserStory with empty body', async () => { + document = await parseValidInput(parse, ` + UserStory TestUserStory { + } + `) + + expect(document.parseResult.value.userRequirements).toHaveLength(1) + const userStory = document.parseResult.value.userRequirements[0] as UserStory + expectUserStoryToBeEmpty(userStory) + }) + + test('parse UserStory with full body', async () => { + document = await parseValidInput(parse, ` + UserStory TestUserStory + split by AnotherUserStory + { + As a "user" create an "order" so that "I can buy stuff" and that "consumption" is promoted, accepting that "savings" are reduced + } + UserStory AnotherUserStory + `) + + expect(document.parseResult.value.userRequirements).toHaveLength(2) + const userStory = document.parseResult.value.userRequirements[0] as UserStory + expect(userStory.name).toEqual('TestUserStory') + expect(userStory.splittingStory).not.toBeUndefined() + expect(userStory.role).toEqual('user') + expect(userStory.features).toHaveLength(1) + expect(userStory.benefit).toEqual('I can buy stuff') + expect(userStory.valuation).not.toBeUndefined() + expect(userStory.valuation?.promotedValues).toHaveLength(1) + expect(userStory.valuation?.promotedValues[0]).toEqual('consumption') + expect(userStory.valuation?.harmedValues).toHaveLength(1) + expect(userStory.valuation?.harmedValues[0]).toEqual('savings') + }) + + test('parse NormalFeature', async () => { + document = await parseValidInput(parse, ` + UseCase TestUseCase { + interactions = create an "order" with its "products", "prices" in a "cart" + } + `) + + expect(document.parseResult.value.userRequirements).toHaveLength(1) + const useCase = document.parseResult.value.userRequirements[0] as UseCase + expect(useCase.features).toHaveLength(1) + const feature = useCase.features[0] as NormalFeature + expect(feature.$type).toEqual('NormalFeature') + expect(feature.verb).toEqual('create') + expect(feature.entityArticle).toEqual('an') + expect(feature.entity).toEqual('order') + expect(feature.entityAttributesPreposition).toEqual('with its') + expect(feature.entityAttributes).toHaveLength(2) + expect(feature.entityAttributes[0]).toEqual('products') + expect(feature.entityAttributes[1]).toEqual('prices') + expect(feature.containerEntityPreposition).toEqual('in') + expect(feature.containerEntityArticle).toEqual('a') + expect(feature.containerEntity).toEqual('cart') + }) + + test('parse StoryFeature', async () => { + document = await parseValidInput(parse, ` + UseCase TestUseCase { + interactions = I want to "create" an "order" with its "products", "prices" in a "cart" + } + `) + + expect(document.parseResult.value.userRequirements).toHaveLength(1) + const useCase = document.parseResult.value.userRequirements[0] as UseCase + expect(useCase.features).toHaveLength(1) + const feature = useCase.features[0] as StoryFeature + expect(feature.$type).toEqual('StoryFeature') + expect(feature.verb).toEqual('create') + expect(feature.entityArticle).toEqual('an') + expect(feature.entity).toEqual('order') + expect(feature.entityAttributesPreposition).toEqual('with its') + expect(feature.entityAttributes).toHaveLength(2) + expect(feature.entityAttributes[0]).toEqual('products') + expect(feature.entityAttributes[1]).toEqual('prices') + expect(feature.containerEntityPreposition).toEqual('in') + expect(feature.containerEntityArticle).toEqual('a') + expect(feature.containerEntity).toEqual('cart') + }) +}) + +function expectUseCaseToBeEmpty (useCase: UseCase) { + expect(useCase.name).toEqual('TestUseCase') + expect(useCase.role).toBeUndefined() + expect(useCase.secondaryActors).toHaveLength(0) + expect(useCase.features).toHaveLength(0) + expect(useCase.benefit).toBeUndefined() + expect(useCase.scope).toBeUndefined() + expect(useCase.level).toBeUndefined() +} + +function expectUserStoryToBeEmpty (userStory: UserStory) { + expect(userStory.name).toEqual('TestUserStory') + expect(userStory.splittingStory).toBeUndefined() + expect(userStory.role).toBeUndefined() + expect(userStory.features).toHaveLength(0) + expect(userStory.benefit).toBeUndefined() + expect(userStory.valuation).toBeUndefined() +} diff --git a/test/parsing/ValueRegisterParsing.test.ts b/test/parsing/ValueRegisterParsing.test.ts new file mode 100644 index 0000000..79e22ee --- /dev/null +++ b/test/parsing/ValueRegisterParsing.test.ts @@ -0,0 +1,471 @@ +import { createContextMapperDslServices } from '../../src/language/ContextMapperDslModule.js' +import { parseHelper } from 'langium/test' +import { + ContextMappingModel, + Value, + ValueElicitation, + ValueEpic, + ValueRegister +} from '../../src/language/generated/ast.js' +import { EmptyFileSystem, LangiumDocument } from 'langium' +import { beforeAll, describe, expect, test } from 'vitest' +import { parseInvalidInput, parseValidInput } from './ParsingTestHelper.js' + +let services: ReturnType +let parse: ReturnType> +let document: LangiumDocument | undefined + +beforeAll(async () => { + services = createContextMapperDslServices(EmptyFileSystem) + parse = parseHelper(services.ContextMapperDsl) +}) + +describe('Value cluster parsing tests', () => { + test('parse value register without body', async () => { + document = await parseValidInput(parse, ` + ValueRegister TestRegister + `) + + expect(document.parseResult.value.valueRegisters).toHaveLength(1) + const valueRegister = document.parseResult.value.valueRegisters[0] + expectEmptyValueRegister(valueRegister) + }) + + test('parse value register with empty body', async () => { + document = await parseValidInput(parse, ` + ValueRegister TestRegister { + } + `) + + expect(document.parseResult.value.valueRegisters).toHaveLength(1) + const valueRegister = document.parseResult.value.valueRegisters[0] + expectEmptyValueRegister(valueRegister) + }) + + test('parse value register with full body', async () => { + document = await parseValidInput(parse, ` + BoundedContext TestContext + Stakeholders { + Stakeholder TestStakeholder + } + ValueRegister TestRegister for TestContext { + ValueCluster TestCluster + ValueNarrative TestNarrative { + When the SOI executes "feature", + stakeholders expect it to promote, protect or create "promoValue", + possibly degrading or prohibiting "harmValue" + with the following externally observable and/or internally auditable behavior: "conditions" + } + Value TestValue + ValueEpic TestEpic + ValueWeighting TestWeighting { + In the context of the SOI, + stakeholder TestStakeholder values "val1" more than "val2" + expecting benefits such as "benefit" + running the risk of harms such as "harm" + } + } + `) + + expect(document.parseResult.value.valueRegisters).toHaveLength(1) + const valueRegister = document.parseResult.value.valueRegisters[0] + expect(valueRegister.name).toEqual('TestRegister') + expect(valueRegister.context).not.toBeUndefined() + expect(valueRegister.valueClusters).toHaveLength(1) + expect(valueRegister.values).toHaveLength(1) + expect(valueRegister.valueEpics).toHaveLength(1) + expect(valueRegister.valueNarratives).toHaveLength(1) + expect(valueRegister.valueWeightings).toHaveLength(1) + }) + + test('parse value cluster without body', async () => { + document = await parseValidInput(parse, ` + ValueRegister TestRegister { + ValueCluster TestCluster + } + `) + + expect(document.parseResult.value.valueRegisters).toHaveLength(1) + expect(document.parseResult.value.valueRegisters[0].valueClusters).toHaveLength(1) + const valueCluster = document.parseResult.value.valueRegisters[0].valueClusters[0] + expect(valueCluster).not.toBeUndefined() + expect(valueCluster.name).toEqual('TestCluster') + expect(valueCluster.coreValue).toBeUndefined() + expect(valueCluster.coreValue7000).toBeUndefined() + expect(valueCluster.demonstrators).toHaveLength(0) + expect(valueCluster.relatedValues).toHaveLength(0) + expect(valueCluster.opposingValues).toHaveLength(0) + expect(valueCluster.values).toHaveLength(0) + expect(valueCluster.elicitations).toHaveLength(0) + }) + + test('parse value cluster with invalid empty body', async () => { + await parseInvalidInput(parse, ` + ValueRegister TestRegister { + ValueCluster TestCluster { + } + } + `) + }) + + test('parse value cluster with minimal body', async () => { + document = await parseValidInput(parse, ` + ValueRegister TestRegister { + ValueCluster TestCluster { + core AUTONOMY + } + } + `) + + expect(document.parseResult.value.valueRegisters).toHaveLength(1) + expect(document.parseResult.value.valueRegisters[0].valueClusters).toHaveLength(1) + const valueCluster = document.parseResult.value.valueRegisters[0].valueClusters[0] + expect(valueCluster).not.toBeUndefined() + expect(valueCluster.name).toEqual('TestCluster') + expect(valueCluster.coreValue).toBeUndefined() + expect(valueCluster.coreValue7000).toEqual('AUTONOMY') + }) + + test('parse value cluster with full body', async () => { + document = await parseValidInput(parse, ` + Stakeholders { + Stakeholder TestStakeholder + } + ValueRegister TestRegister { + ValueCluster TestCluster { + core "testCore" + relatedValue = "relVal" + demonstrator = "dem" + opposingValue "oppo" + + Stakeholder TestStakeholder + Value TestValue + } + } + `) + + expect(document.parseResult.value.valueRegisters).toHaveLength(1) + expect(document.parseResult.value.valueRegisters[0].valueClusters).toHaveLength(1) + const valueCluster = document.parseResult.value.valueRegisters[0].valueClusters[0] + expect(valueCluster.name).toEqual('TestCluster') + expect(valueCluster.coreValue).toEqual('testCore') + expect(valueCluster.coreValue7000).toBeUndefined() + expect(valueCluster.relatedValues).toHaveLength(1) + expect(valueCluster.relatedValues[0]).toEqual('relVal') + expect(valueCluster.demonstrators).toHaveLength(1) + expect(valueCluster.demonstrators[0]).toEqual('dem') + expect(valueCluster.opposingValues).toHaveLength(1) + expect(valueCluster.opposingValues[0]).toEqual('oppo') + expect(valueCluster.elicitations).toHaveLength(1) + expect(valueCluster.values).toHaveLength(1) + }) + + test('parse value without body', async () => { + document = await parseValidInput(parse, ` + ValueRegister TestRegister { + Value TestValue + } + `) + + expect(document.parseResult.value.valueRegisters).toHaveLength(1) + expect(document.parseResult.value.valueRegisters[0].values).toHaveLength(1) + const value = document.parseResult.value.valueRegisters[0].values[0] + expectEmptyValue(value) + }) + + test('parse value with empty body', async () => { + document = await parseValidInput(parse, ` + ValueRegister TestRegister { + Value TestValue { + } + } + `) + + expect(document.parseResult.value.valueRegisters).toHaveLength(1) + expect(document.parseResult.value.valueRegisters[0].values).toHaveLength(1) + const value = document.parseResult.value.valueRegisters[0].values[0] + expectEmptyValue(value) + }) + + test('parse value with full body', async () => { + document = await parseValidInput(parse, ` + Stakeholders { + Stakeholder TestStakeholder + } + ValueRegister TestRegister { + Value TestValue { + relatedValue = "relVal" + isCore + opposingValue "oppo" + demonstrator = "dem" + + Stakeholder TestStakeholder + } + } + `) + + expect(document.parseResult.value.valueRegisters).toHaveLength(1) + expect(document.parseResult.value.valueRegisters[0].values).toHaveLength(1) + const value = document.parseResult.value.valueRegisters[0].values[0] + expect(value.name).toEqual('TestValue') + expect(value.coreValue).toHaveLength(1) + expect(value.demonstrators).toHaveLength(1) + expect(value.demonstrators[0]).toEqual('dem') + expect(value.relatedValues).toHaveLength(1) + expect(value.relatedValues[0]).toEqual('relVal') + expect(value.opposingValues).toHaveLength(1) + expect(value.elicitations).toHaveLength(1) + }) + + test('parse value epic without body', async () => { + document = await parseValidInput(parse, ` + ValueRegister TestRegister { + ValueEpic TestEpic + } + `) + + expect(document.parseResult.value.valueRegisters).toHaveLength(1) + expect(document.parseResult.value.valueRegisters[0].valueEpics).toHaveLength(1) + const epic = document.parseResult.value.valueRegisters[0].valueEpics[0] + expectEmptyEpic(epic) + }) + + test('parse value epic with invalid empty body', async () => { + await parseInvalidInput(parse, ` + ValueRegister TestRegister { + ValueEpic TestEpic { + } + } + `) + }) + + test('parse value epic with full body', async () => { + document = await parseValidInput(parse, ` + Stakeholders { + Stakeholder TestStakeholder + } + ValueRegister TestRegister { + ValueEpic TestEpic { + As a TestStakeholder I value "val" as demonstrated in + reduction of "redVal1" + reduction of "redVal2" + realization of "relVal1" + } + } + `) + + expect(document.parseResult.value.valueRegisters).toHaveLength(1) + expect(document.parseResult.value.valueRegisters[0].valueEpics).toHaveLength(1) + const epic = document.parseResult.value.valueRegisters[0].valueEpics[0] + expect(epic.name).toEqual('TestEpic') + expect(epic.stakeholder).not.toBeUndefined() + expect(epic.value).toEqual('val') + expect(epic.realizedValues).toHaveLength(1) + expect(epic.realizedValues[0]).toEqual('relVal1') + expect(epic.reducedValues).toHaveLength(2) + expect(epic.reducedValues[0]).toEqual('redVal1') + expect(epic.reducedValues[1]).toEqual('redVal2') + }) + + test('parse value narrative', async () => { + document = await parseValidInput(parse, ` + ValueRegister TestRegister { + ValueNarrative TestNarrative { + When the SOI executes "feat", + stakeholders expect it to promote, protect or create "promoValue", + possibly degrading or prohibiting "harmValue" + with the following externally observable and/or internally auditable behavior: "conditions" + } + } + `) + + expect(document.parseResult.value.valueRegisters).toHaveLength(1) + expect(document.parseResult.value.valueRegisters[0].valueNarratives).toHaveLength(1) + const narrative = document.parseResult.value.valueRegisters[0].valueNarratives[0] + expect(narrative).not.toBeUndefined() + expect(narrative.name).toEqual('TestNarrative') + expect(narrative.feature).toEqual('feat') + expect(narrative.promotedValues).toEqual('promoValue') + expect(narrative.harmedValues).toEqual('harmValue') + expect(narrative.preAndPostConditions).toEqual('conditions') + }) + + test('parse value weighting', async () => { + document = await parseValidInput(parse, ` + Stakeholders { + Stakeholder TestStakeholder + } + ValueRegister TestRegister { + ValueWeighting TestWeighting { + In the context of the SOI, + stakeholder TestStakeholder values "val1" more than "val2" + expecting benefits such as "benefits" + running the risk of harms such as "harms" + } + } + `) + + expect(document.parseResult.value.valueRegisters).toHaveLength(1) + expect(document.parseResult.value.valueRegisters[0].valueWeightings).toHaveLength(1) + const weighting = document.parseResult.value.valueRegisters[0].valueWeightings[0] + expect(weighting).not.toBeUndefined() + expect(weighting.name).toEqual('TestWeighting') + expect(weighting.stakeholder).not.toBeUndefined() + expect(weighting.value1).toEqual('val1') + expect(weighting.value2).toEqual('val2') + expect(weighting.benefits).toEqual('benefits') + expect(weighting.harms).toEqual('harms') + }) + + test('parse value elicitation without body', async () => { + document = await parseValidInput(parse, ` + Stakeholders { + Stakeholder TestStakeholder + } + ValueRegister TestRegister { + Value TestVal { + Stakeholder TestStakeholder + } + } + `) + + expect(document.parseResult.value.valueRegisters).toHaveLength(1) + expect(document.parseResult.value.valueRegisters[0].values).toHaveLength(1) + expect(document.parseResult.value.valueRegisters[0].values[0].elicitations).toHaveLength(1) + const elicitation = document.parseResult.value.valueRegisters[0].values[0].elicitations[0] + expectEmptyValueElicitation(elicitation) + }) + + test('parse value elicitation with empty body', async () => { + document = await parseValidInput(parse, ` + Stakeholders { + Stakeholder TestStakeholder + } + ValueRegister TestRegister { + Value TestVal { + Stakeholder TestStakeholder { + } + } + } + `) + + expect(document.parseResult.value.valueRegisters).toHaveLength(1) + expect(document.parseResult.value.valueRegisters[0].values).toHaveLength(1) + expect(document.parseResult.value.valueRegisters[0].values[0].elicitations).toHaveLength(1) + const elicitation = document.parseResult.value.valueRegisters[0].values[0].elicitations[0] + expectEmptyValueElicitation(elicitation) + }) + + test('parse value elicitation with full body', async () => { + document = await parseValidInput(parse, ` + Stakeholders { + Stakeholder TestStakeholder + } + ValueRegister TestRegister { + Value TestVal { + Stakeholder TestStakeholder { + impact = MEDIUM + consequences good "conseq" + priority = LOW + } + } + } + `) + + expect(document.parseResult.value.valueRegisters).toHaveLength(1) + expect(document.parseResult.value.valueRegisters[0].values).toHaveLength(1) + expect(document.parseResult.value.valueRegisters[0].values[0].elicitations).toHaveLength(1) + const elicitation = document.parseResult.value.valueRegisters[0].values[0].elicitations[0] + expect(elicitation.stakeholder).not.toBeUndefined() + expect(elicitation.priority).toEqual('LOW') + expect(elicitation.impact).toEqual('MEDIUM') + expect(elicitation.consequences).toHaveLength(1) + }) + + test('parse consequence without action', async () => { + document = await parseValidInput(parse, ` + Stakeholders { + Stakeholder TestStakeholder + } + ValueRegister TestRegister { + Value TestVal { + Stakeholder TestStakeholder { + consequences good "conseq" + } + } + } + `) + + expect(document.parseResult.value.valueRegisters).toHaveLength(1) + expect(document.parseResult.value.valueRegisters[0].values).toHaveLength(1) + expect(document.parseResult.value.valueRegisters[0].values[0].elicitations).toHaveLength(1) + expect(document.parseResult.value.valueRegisters[0].values[0].elicitations[0].consequences).toHaveLength(1) + const consequence = document.parseResult.value.valueRegisters[0].values[0].elicitations[0].consequences[0] + expect(consequence.type).toEqual('good') + expect(consequence.consequence).toEqual('conseq') + expect(consequence.action).toBeUndefined() + }) + + test('parse consequence with action', async () => { + document = await parseValidInput(parse, ` + Stakeholders { + Stakeholder TestStakeholder + } + ValueRegister TestRegister { + Value TestVal { + Stakeholder TestStakeholder { + consequences good "conseq" action "act" "typ" + } + } + } + `) + + expect(document.parseResult.value.valueRegisters).toHaveLength(1) + expect(document.parseResult.value.valueRegisters[0].values).toHaveLength(1) + expect(document.parseResult.value.valueRegisters[0].values[0].elicitations).toHaveLength(1) + expect(document.parseResult.value.valueRegisters[0].values[0].elicitations[0].consequences).toHaveLength(1) + const consequence = document.parseResult.value.valueRegisters[0].values[0].elicitations[0].consequences[0] + expect(consequence).not.toBeUndefined() + const action = consequence.action + expect(action?.action).toEqual('act') + expect(action?.type).toEqual('typ') + }) +}) + +function expectEmptyValueRegister (valueRegister: ValueRegister) { + expect(valueRegister).not.toBeUndefined() + expect(valueRegister.name).toEqual('TestRegister') + expect(valueRegister.context).toBeUndefined() + expect(valueRegister.valueClusters).toHaveLength(0) + expect(valueRegister.values).toHaveLength(0) + expect(valueRegister.valueEpics).toHaveLength(0) + expect(valueRegister.valueNarratives).toHaveLength(0) + expect(valueRegister.valueWeightings).toHaveLength(0) +} + +function expectEmptyValue (value: Value) { + expect(value).not.toBeUndefined() + expect(value.name).toEqual('TestValue') + expect(value.coreValue).toHaveLength(0) + expect(value.demonstrators).toHaveLength(0) + expect(value.relatedValues).toHaveLength(0) + expect(value.opposingValues).toHaveLength(0) + expect(value.elicitations).toHaveLength(0) +} + +function expectEmptyEpic (epic: ValueEpic) { + expect(epic).not.toBeUndefined() + expect(epic.name).toEqual('TestEpic') + expect(epic.stakeholder).toBeUndefined() + expect(epic.value).toBeUndefined() + expect(epic.realizedValues).toHaveLength(0) + expect(epic.reducedValues).toHaveLength(0) +} + +function expectEmptyValueElicitation (elicitation: ValueElicitation) { + expect(elicitation).not.toBeUndefined() + expect(elicitation.stakeholder).not.toBeUndefined() + expect(elicitation.priority).toBeUndefined() + expect(elicitation.impact).toBeUndefined() + expect(elicitation.consequences).toHaveLength(0) +} diff --git a/test/parsing/parsing.test.ts b/test/parsing/parsing.test.ts deleted file mode 100644 index 0fee769..0000000 --- a/test/parsing/parsing.test.ts +++ /dev/null @@ -1,32 +0,0 @@ -import { beforeAll, describe, expect, test } from 'vitest' -import { EmptyFileSystem, type LangiumDocument } from 'langium' -import { parseHelper } from 'langium/test' -import { createContextMapperDslServices } from '../../src/language/context-mapper-dsl-module.js' -import { ContextMappingModel } from '../../src/language/generated/ast.js' -import { checkDocumentValid } from '../TestHelper.js' - -let services: ReturnType -let parse: ReturnType> -let document: LangiumDocument | undefined - -beforeAll(async () => { - services = createContextMapperDslServices(EmptyFileSystem) - parse = parseHelper(services.ContextMapperDsl) - - // activate the following if your linking test requires elements from a built-in library, for example - // await services.shared.workspace.WorkspaceManager.initializeWorkspace([]) -}) - -describe('Parsing tests', () => { - test('parse simple model', async () => { - document = await parse(` - BoundedContext FirstContext - `) - - const errors = checkDocumentValid(document) - expect(errors == null).toBeTruthy() - - expect(document.parseResult.value?.boundedContexts?.map(b => b.name)) - .toEqual(['FirstContext']) - }) -}) diff --git a/test/semnantictokens/BoundedContextSemanticTokens.test.ts b/test/semnantictokens/BoundedContextSemanticTokens.test.ts new file mode 100644 index 0000000..91a8dac --- /dev/null +++ b/test/semnantictokens/BoundedContextSemanticTokens.test.ts @@ -0,0 +1,110 @@ +import { afterEach, beforeAll, describe, test } from 'vitest' +import { createContextMapperDslServices } from '../../src/language/ContextMapperDslModule.js' +import { EmptyFileSystem, type LangiumDocument } from 'langium' +import { SemanticTokenProvider } from 'langium/lsp' +import { clearDocuments, parseHelper } from 'langium/test' +import { ContextMappingModel } from '../../src/language/generated/ast.js' +import { + assertSemanticToken, + assertSemanticTokenLength, createSemanticTokenParams, + extractSemanticTokens +} from './SemanticTokenTestHelper.js' +import { SemanticTokens } from 'vscode-languageserver-types' + +let services: ReturnType +let parse: ReturnType> +let document: LangiumDocument | undefined +let semanticTokenProvider: SemanticTokenProvider + +beforeAll(async () => { + services = createContextMapperDslServices(EmptyFileSystem) + parse = parseHelper(services.ContextMapperDsl) + semanticTokenProvider = services.ContextMapperDsl.lsp.SemanticTokenProvider!! +}) + +afterEach(async () => { + document && await clearDocuments(services.shared, [document]) +}) + +describe('BoundedContext semantic token test', () => { + test('check bounded context without body', async () => { + document = await parse('BoundedContext TestContext') + const params = createSemanticTokenParams(document) + const result = await semanticTokenProvider.semanticHighlight(document, params) + assertEmptyBoundedContext(result) + }) + + test('check bounded context with empty body', async () => { + document = await parse('BoundedContext TestContext {}') + const params = createSemanticTokenParams(document) + const result = await semanticTokenProvider.semanticHighlight(document, params) + assertEmptyBoundedContext(result) + }) + + test('check bounded context with member attributes', async () => { + document = await parse(` + BoundedContext TestContext { + type = UNDEFINED + implementationTechnology = "java" + responsibilities = "resp1", "resp2" + businessModel = "model" + domainVisionStatement = "Test" + knowledgeLevel = CONCRETE + evolution GENESIS + } + `) + const params = createSemanticTokenParams(document) + const result = await semanticTokenProvider.semanticHighlight(document, params) + + const expectedNumberOfTokens = 17 + assertSemanticTokenLength(result, expectedNumberOfTokens) + const tokens = extractSemanticTokens(result, expectedNumberOfTokens) + + assertSemanticToken(tokens[2], 1, 6, 4, semanticTokenProvider.tokenTypes.keyword, 0) + assertSemanticToken(tokens[3], 0, 7, 9, semanticTokenProvider.tokenTypes.property, 0) + + assertSemanticToken(tokens[4], 1, 6, 24, semanticTokenProvider.tokenTypes.keyword, 0) + assertSemanticToken(tokens[5], 0, 27, 6, semanticTokenProvider.tokenTypes.string, 0) + + assertSemanticToken(tokens[6], 1, 6, 16, semanticTokenProvider.tokenTypes.keyword, 0) + assertSemanticToken(tokens[7], 0, 19, 7, semanticTokenProvider.tokenTypes.string, 0) + assertSemanticToken(tokens[8], 0, 9, 7, semanticTokenProvider.tokenTypes.string, 0) + + assertSemanticToken(tokens[9], 1, 6, 13, semanticTokenProvider.tokenTypes.keyword, 0) + assertSemanticToken(tokens[10], 0, 16, 7, semanticTokenProvider.tokenTypes.string, 0) + + assertSemanticToken(tokens[11], 1, 6, 21, semanticTokenProvider.tokenTypes.keyword, 0) + assertSemanticToken(tokens[12], 0, 24, 6, semanticTokenProvider.tokenTypes.string, 0) + + assertSemanticToken(tokens[13], 1, 6, 14, semanticTokenProvider.tokenTypes.keyword, 0) + assertSemanticToken(tokens[14], 0, 17, 8, semanticTokenProvider.tokenTypes.property, 0) + + assertSemanticToken(tokens[15], 1, 6, 9, semanticTokenProvider.tokenTypes.keyword, 0) + assertSemanticToken(tokens[16], 0, 10, 7, semanticTokenProvider.tokenTypes.property, 0) + }) +}) + +function assertEmptyBoundedContext (result: SemanticTokens) { + const expectedNumberOfTokens = 2 + assertSemanticTokenLength(result, expectedNumberOfTokens) + + const tokens = extractSemanticTokens(result, expectedNumberOfTokens) + + assertSemanticToken( + tokens[0], + 0, + 0, + 14, + semanticTokenProvider.tokenTypes.keyword, + 0 + ) + + assertSemanticToken( + tokens[1], + 0, + 15, + 11, + semanticTokenProvider.tokenTypes.type, + semanticTokenProvider.tokenModifiers.declaration + ) +} diff --git a/test/semnantictokens/SemanticTokenTestHelper.ts b/test/semnantictokens/SemanticTokenTestHelper.ts new file mode 100644 index 0000000..fba3fe8 --- /dev/null +++ b/test/semnantictokens/SemanticTokenTestHelper.ts @@ -0,0 +1,43 @@ +import { expect } from 'vitest' +import { SemanticTokens } from 'vscode-languageserver-types' +import { SemanticTokensParams } from 'vscode-languageserver' +import { LangiumDocument } from 'langium' +import { ContextMappingModel } from '../../src/language/generated/ast.js' + +export const TOKEN_DATA_LENGTH = 5 + +export function createSemanticTokenParams (document: LangiumDocument): SemanticTokensParams { + return { + textDocument: { + uri: document.uri.path + } + } +} + +/** + * A Semantic Token data array consists of a sequence of integers. + * One token corresponds to a sequence of 5 integers, representing: deltaLine, deltaStart, length, type, modifier + * Tokens are given in their relative position to each other. deltaLine and deltaStart specifies how many lines & chars the start of the two tokens are apart + */ +export function assertSemanticToken (token: number[], startLine: number, startCharacter: number, length: number, tokenType: number, tokenModifiers: number) { + expect(token.length).toEqual(TOKEN_DATA_LENGTH) + + expect(token[0]).toEqual(startLine) + expect(token[1]).toEqual(startCharacter) + expect(token[2]).toEqual(length) + expect(token[3]).toEqual(tokenType) + expect(token[4]).toEqual(tokenModifiers) +} + +export function assertSemanticTokenLength (result: SemanticTokens, expectedNumberOfTokens: number) { + expect(result).not.toBeNull() + expect(result.data.length).toEqual(expectedNumberOfTokens * TOKEN_DATA_LENGTH) +} + +export function extractSemanticTokens (result: SemanticTokens, expectedNumberOfTokens: number): number[][] { + const tokens: number[][] = [] + for (let i = 0; i < expectedNumberOfTokens; i++) { + tokens.push(result.data.slice(i * TOKEN_DATA_LENGTH, (i + 1) * TOKEN_DATA_LENGTH)) + } + return tokens +} diff --git a/test/validating/validating.test.ts b/test/validating/validating.test.ts deleted file mode 100644 index 811cc5e..0000000 --- a/test/validating/validating.test.ts +++ /dev/null @@ -1,37 +0,0 @@ -import { beforeAll, describe, expect, test } from 'vitest' -import { EmptyFileSystem, type LangiumDocument } from 'langium' -import { parseHelper } from 'langium/test' -import type { Diagnostic } from 'vscode-languageserver-types' -import { createContextMapperDslServices } from '../../src/language/context-mapper-dsl-module.js' -import { ContextMappingModel } from '../../src/language/generated/ast.js' -import { checkDocumentValid } from '../TestHelper.js' - -let services: ReturnType -let parse: ReturnType> -let document: LangiumDocument | undefined - -beforeAll(async () => { - services = createContextMapperDslServices(EmptyFileSystem) - const doParse = parseHelper(services.ContextMapperDsl) - parse = (input: string) => doParse(input, { validation: true }) - - // activate the following if your linking test requires elements from a built-in library, for example - // await services.shared.workspace.WorkspaceManager.initializeWorkspace([]); -}) - -describe('Validating', () => { - test('check no errors', async () => { - document = await parse(` - BoundedContext Test - `) - - const errors = checkDocumentValid(document) - expect(errors == null).toBeTruthy() - - expect(document?.diagnostics?.map(diagnosticToString)).toHaveLength(0) - }) -}) - -function diagnosticToString (d: Diagnostic) { - return `[${d.range.start.line}:${d.range.start.character}..${d.range.end.line}:${d.range.end.character}]: ${d.message}` -} diff --git a/test/validation/ContextMappingModelValidator.test.ts b/test/validation/ContextMappingModelValidator.test.ts new file mode 100644 index 0000000..a174b17 --- /dev/null +++ b/test/validation/ContextMappingModelValidator.test.ts @@ -0,0 +1,58 @@ +import { beforeAll, describe, expect, test } from 'vitest' +import { createContextMapperDslServices } from '../../src/language/ContextMapperDslModule.js' +import { EmptyFileSystem, type LangiumDocument } from 'langium' +import { parseHelper } from 'langium/test' +import { ContextMappingModel } from '../../src/language/generated/ast.js' + +let services: ReturnType +let parse: ReturnType> +let document: LangiumDocument | undefined + +beforeAll(async () => { + services = createContextMapperDslServices(EmptyFileSystem) + const doParse = parseHelper(services.ContextMapperDsl) + parse = (input: string) => doParse(input, { validation: true }) + + // activate the following if your linking test requires elements from a built-in library, for example + // await services.shared.workspace.WorkspaceManager.initializeWorkspace([]); +}) + +describe('ContextMappingModelValidator tests', () => { + test('accept no context map', async () => { + document = await parse(` + BoundedContext FirstContext + `) + + expect(document.diagnostics).toHaveLength(0) + }) + + test('accept one context map', async () => { + document = await parse(` + ContextMap { + FirstContext [SK] <-> [SK] SecondContext + } + BoundedContext FirstContext + BoundedContext SecondContext + `) + + expect(document.diagnostics).toHaveLength(0) + }) + + test('report multiple context maps', async () => { + document = await parse(` + ContextMap { + FirstContext [SK] <-> [SK] SecondContext + } + ContextMap { + + } + BoundedContext FirstContext + BoundedContext SecondContext + `) + + expect(document.diagnostics).not.toBeUndefined() + expect(document.diagnostics).toHaveLength(1) + const diagnostic = document.diagnostics![0] + expect(diagnostic.range.start.line).toEqual(1) + }) +}) diff --git a/test/validation/ValueValidator.test.ts b/test/validation/ValueValidator.test.ts new file mode 100644 index 0000000..352fdb5 --- /dev/null +++ b/test/validation/ValueValidator.test.ts @@ -0,0 +1,59 @@ +import { beforeAll, describe, expect, test } from 'vitest' +import { createContextMapperDslServices } from '../../src/language/ContextMapperDslModule.js' +import { EmptyFileSystem, type LangiumDocument } from 'langium' +import { parseHelper } from 'langium/test' +import { ContextMappingModel } from '../../src/language/generated/ast.js' + +let services: ReturnType +let parse: ReturnType> +let document: LangiumDocument | undefined + +beforeAll(async () => { + services = createContextMapperDslServices(EmptyFileSystem) + const doParse = parseHelper(services.ContextMapperDsl) + parse = (input: string) => doParse(input, { validation: true }) + + // activate the following if your linking test requires elements from a built-in library, for example + // await services.shared.workspace.WorkspaceManager.initializeWorkspace([]); +}) + +describe('ContextMappingModelValidator tests', () => { + test('accept no isCore', async () => { + document = await parse(` + ValueRegister TestRegister { + Value TestValue { + } + } + `) + + expect(document.diagnostics).toHaveLength(0) + }) + + test('accept one isCore', async () => { + document = await parse(` + ValueRegister TestRegister { + Value TestValue { + isCore + } + } + `) + + expect(document.diagnostics).toHaveLength(0) + }) + + test('report multiple isCore', async () => { + document = await parse(` + ValueRegister TestRegister { + Value TestValue { + isCore + isCore + } + } + `) + + expect(document.diagnostics).not.toBeUndefined() + expect(document.diagnostics).toHaveLength(1) + const diagnostic = document.diagnostics![0] + expect(diagnostic.range.start.line).toEqual(3) + }) +})