diff --git a/.github/workflows/actions/download/action.yml b/.github/workflows/actions/download/action.yml index bf4778fb..25c8c8ba 100644 --- a/.github/workflows/actions/download/action.yml +++ b/.github/workflows/actions/download/action.yml @@ -27,12 +27,12 @@ runs: - name: Download Fuseki release shell: bash run: | - curl -LSfs https://archive.apache.org/dist/jena/binaries/apache-jena-fuseki-4.5.0.tar.gz -o RACK/rack-box/files/fuseki.tar.gz + curl -LSfs https://archive.apache.org/dist/jena/binaries/apache-jena-fuseki-4.6.1.tar.gz -o RACK/rack-box/files/fuseki.tar.gz - name: Download SemTK release shell: bash run: | - curl -LSfs https://github.com/ge-semtk/semtk/releases/download/v2.5.0-20220830/semtk-opensource-v2.5.0-20220830-dist.tar.gz -o RACK/rack-box/files/semtk.tar.gz + curl -LSfs https://github.com/ge-semtk/semtk/releases/download/v2.5.0-20221014/semtk-opensource-v2.5.0-20221014-dist.tar.gz -o RACK/rack-box/files/semtk.tar.gz - name: Download CSS stylesheet shell: bash diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 21f61a2d..62fcaa9c 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -111,12 +111,20 @@ jobs: # Won't work since windows-latest doesn't support nested virtualization - name: Enable Hyper-V - if: matrix.builder == 'hyperv' && matrix.os == 'windows-latest' + if: matrix.builder == 'hyperv' shell: powershell run: | Enable-WindowsOptionalFeature -Online -FeatureName Microsoft-Hyper-V -All Add-WindowsFeature RSAT-Hyper-V-Tools -IncludeAllSubFeature + # Download vagrant box in order to build virtualbox image + - name: Download Vagrant Box + if: matrix.builder == 'virtualbox' + run: | + mkdir -p RACK/rack-box/focal64 + curl -LOSfs https://app.vagrantup.com/ubuntu/boxes/focal64/versions/20221021.0.0/providers/virtualbox.box + tar -xf virtualbox.box -C RACK/rack-box/focal64 + - name: Build rack-box ${{ matrix.builder }} image run: | b=${{ matrix.builder }} @@ -150,7 +158,7 @@ jobs: b=${{ matrix.builder }} v=${{ github.event.release.tag_name }} cd RACK/rack-box - mv output-$b-iso rack-box-$b-$v + mv output-$b-* rack-box-$b-$v zip -r rack-box-$b-$v.zip rack-box-$b-$v split -b 1500m rack-box-$b-$v.zip rack-box-$b-$v.zip ls -l rack-box-$b-$v.zip?? diff --git a/.gitignore b/.gitignore index bea05b74..f1258253 100644 --- a/.gitignore +++ b/.gitignore @@ -59,6 +59,7 @@ TODO.org debug.log *.swp cli/env/ +cli/.project .settings/ /Turnstile-Example/Turnstile-IngestionPackage/CounterApplicationImplementation/*.d /Turnstile-Example/Turnstile-IngestionPackage/CounterApplicationImplementation/*.rack diff --git a/Boeing-Ontology/ontology/Boeing.sadl b/Boeing-Ontology/ontology/Boeing.sadl index 5ffbc319..a8324ab6 100644 --- a/Boeing-Ontology/ontology/Boeing.sadl +++ b/Boeing-Ontology/ontology/Boeing.sadl @@ -68,20 +68,61 @@ SoftwareCoding is a type of CODE_DEVELOPMENT. // Support for characterizing message types, valid signals, and tests // that verify these correspondences -Message is a type of DATA_DICTIONARY_TERM. +Message is a type of INTERFACE. Signal is a type of DATA_DICTIONARY_TERM. - enumValue describes Signal with values of type string. - -MessageSignal is a type of DATA_DICTIONARY_TERM. - message describes MessageSignal with a single value of type Message. - signal describes MessageSignal with a single value of type Signal. - -IDD is a type of INTERFACE. - messageSignal describes IDD with values of type MessageSignal. - -IDD_Test is a type of TEST. - verifies of IDD_Test only has values of type MessageSignal. + message describes Signal with a single value of type Message. + +// content of IDD_Doc can be Message and other things +IDD_Doc is a type of DOCUMENT. + +// subclass from core ontology related to SBVT and IDD +SBVT_Test_Procedure is a type of TEST_PROCEDURE. + content of SBVT_Test only has values of type SBVT_Test_Step. + verifies of SBVT_Test_Procedure only has values of type {SubDD_Req or SRS_Req}. + verifies of SBVT_Test_Procedure has at least 1 value. + +SBVT_Test_Step is a type of TEST_STEP. + stimulates of SBVT_Test_Step has values of type Signal. //<--how come "only has values" doesn't work here? + nextStep of SBVT_Test_Step only has values of type SBVT_Test_Step. + +IDD_Test is a type of TEST. // note "verifies" corresponds to "observes" +// verifies of IDD_Test only has values of type Signal. + observes of IDD_Test has values of type Signal. //<--how come "only has values" doesn't work here? + +//SBVT_Test_Step can contain HMI test etc. +SBVT_Test_Log is a type of TEST_LOG. + content of SBVT_Test_Log only has values of type SBVT_Test_Record. +SBVT_Test_Record is a type of TEST_RECORD. + logs of SBVT_Test_Record only has values of type SBVT_Test_Step. +IDD_Test_Result is a type of TEST_RESULT. + confirms of IDD_Test_Result only has values of type IDD_Test. + +IDDCoverageAnalysis is a type of ANALYSIS. // move it to "ToolOntology" + analysisInput of IDDCoverageAnalysis only has values of type IDD_Test. +IDDCoverageAnalysisResult is a type of TEST_RESULT. + confirms of IDDCoverageAnalysisResult only has values of type IDD_Test. + +Test_Station is a type of AGENT. + +SBVT_Test_Execution is a type of TEST_EXECUTION. + testProcedure of SBVT_Test_Execution only has values of type SBVT_Test_Procedure. + systemUnderTest describes SBVT_Test_Execution with values of type SYSTEM. + systemUnderTestBuildVersion describes SBVT_Test_Execution with values of type BuildVersion. //added + databaseVersion describes SBVT_Test_Execution with values of type Database. + databaseVersion is a type of used. +// softwareBuild describes SBVT_Test_Execution with values of type Executable. +// softwareBuild is a type of used. + testStation of SBVT_Test_Execution has a single value of type Test_Station. + testStation is a type of wasAssociatedWith. +BuildVersion is a type of ENTITY. + system describes BuildVersion with a single value of type SYSTEM. + buildVersion describes BuildVersion with a single value of type Executable. +testExecution of SBVT_Test_Log has a single value of type SBVT_Test_Execution. +testExecution is a type of wasGeneratedBy. + +Database is a type of SWCOMPONENT. +Executable is a type of SWCOMPONENT. // COMMON FILE FORMATS @@ -93,6 +134,7 @@ YamlFormat (note "Yet Another Markup Language file format") is a FORMAT ide JsonFormat (note "JavaScript Object Notation file format") is a FORMAT identifier "json". TomlFormat (note "Tom's Obvious Minimal Language file format") is a FORMAT identifier "toml". IniFormat (note "Initialization configuration file format") is a FORMAT identifier "ini". +OutFormat (note "Text file output, e.g. output of test results") is a FORMAT with identifier "out". // Image formats SvgFormat (note "Scalable Vector Graphics image file format") is a FORMAT identifier "svg". diff --git a/GE-Ontology/OwlModels/ont-policy.rdf b/GE-Ontology/OwlModels/ont-policy.rdf index d69ad970..356dd505 100644 --- a/GE-Ontology/OwlModels/ont-policy.rdf +++ b/GE-Ontology/OwlModels/ont-policy.rdf @@ -2,49 +2,49 @@ xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" xmlns:j.0="http://jena.hpl.hp.com/schemas/2003/03/ont-manager#"> - platform:/resource/GE-Ontology/ontology/CPS.sadl - CPS - SADL + + - - + SADL + turnstile + platform:/resource/GE-Ontology/ontology/GE.sadl - - sadlimplicitmodel - SADL - - platform:/resource/GE-Ontology/ImplicitModel/SadlImplicitModel.sadl + + + SADL + sadlimplicitmodel + - sadllistmodel - SADL + platform:/resource/GE-Ontology/ontology/CPS.sadl + - - + SADL + CPS + - - builtinfunctions - SADL + + - - platform:/resource/GE-Ontology/ImplicitModel/SadlBuiltinFunctions.sadl + SADL + sadlbasemodel - platform:/resource/GE-Ontology/ontology/GE.sadl - turnstile - SADL + + - - + SADL + sadllistmodel - sadlbasemodel - SADL + platform:/resource/GE-Ontology/ImplicitModel/SadlBuiltinFunctions.sadl + - - + SADL + builtinfunctions + diff --git a/GE-Ontology/ontology/CPS.sadl b/GE-Ontology/ontology/CPS.sadl index cf9557ff..77f49190 100644 --- a/GE-Ontology/ontology/CPS.sadl +++ b/GE-Ontology/ontology/CPS.sadl @@ -75,7 +75,7 @@ Connection (note "An INTERFACE with specific input and output ports") is a type described by infoFlowSeverity with a single value of type int // properties that allow for mitigating of threats - described by implControl with values of type ImplControl. + described by implConnControl with values of type ImplControl. ConnectionType is a type of THING. diff --git a/RACK-Ontology/ontology/CONFIDENCE.sadl b/RACK-Ontology/ontology/CONFIDENCE.sadl index a040fd36..cc8313a9 100644 --- a/RACK-Ontology/ontology/CONFIDENCE.sadl +++ b/RACK-Ontology/ontology/CONFIDENCE.sadl @@ -22,7 +22,7 @@ uri "http://arcos.rack/CONFIDENCE" alias CONFIDENCE. import "http://arcos.rack/PROV-S". -CONFIDENCE_ASSESSMENT (note "Superclass for confidence assessments over some other data in the ontology.") is a type of THING. +CONFIDENCE_ASSESSMENT (note "Superclass for confidence assessments over some other data in the ontology.") is a type of ENTITY. assesses (note "ENTITY(s) whose confidence is assessed") describes CONFIDENCE_ASSESSMENT with values of type ENTITY. assesses describes CONFIDENCE_ASSESSMENT with at most 1 value. @@ -31,8 +31,8 @@ CONFIDENCE_ASSESSMENT (note "Superclass for confidence assessments over some oth wasGeneratedBy of CONFIDENCE_ASSESSMENT only has values of type ASSESSING_CONFIDENCE. BDU_CONFIDENCE_ASSESSMENT (note "A belief-disbelief-uncertainty confidence assessment, c.f. Subjective Logic. belief, disbelief, and uncertainty should sum to 1") is a type of CONFIDENCE_ASSESSMENT. - belief (note "belief that an assessment is true") describes CONFIDENCE_ASSESSMENT with a single value of type float. // [0,1]. - disbelief (note "belief that an assessment is false") describes CONFIDENCE_ASSESSMENT with a single value of type float. // [0,1]. - uncertainty (note "uncommitted belief") describes CONFIDENCE_ASSESSMENT with a single value of type float. // [0,1]. + belief (note "belief that an assessment is true") describes BDU_CONFIDENCE_ASSESSMENT with a single value of type float. // [0,1]. + disbelief (note "belief that an assessment is false") describes BDU_CONFIDENCE_ASSESSMENT with a single value of type float. // [0,1]. + uncertainty (note "uncommitted belief") describes BDU_CONFIDENCE_ASSESSMENT with a single value of type float. // [0,1]. ASSESSING_CONFIDENCE (note "ACTIVITY that establishes a CONFIDENCE_ASSESSMENT") is a type of ACTIVITY. diff --git a/README.md b/README.md index 8515debe..81a24422 100644 --- a/README.md +++ b/README.md @@ -6,7 +6,7 @@ can be found [here](https://github.com/ge-high-assurance/RACK/wiki#log4j-securit # Introducing RACK -RACK (Rapid Assurance Curation Kit) is a research-grade database that uses a structured semantic data model tuned to the domain of the DARPA ARCOS (Automated Rapid Certification Of Software) program. +RACK (Rapid Assurance Curation Kit) is a research-grade database that uses a structured semantic data model tuned to the domain of the DARPA ARCOS (Automated Rapid Certification Of Software) program. Additionally, we offer a suite of tools for data curation and assurance interpretation. RACK Overview Diagram @@ -21,7 +21,7 @@ To make RACK easy for all ARCOS performers to use, we deploy RACK as a software RACK is available as both a Linux container and a virtual machine, and is supported on Linux, Windows, and MacOS systems. To learn more and get detailed instructions on how to get started, see our [Installation Instructions](https://github.com/ge-high-assurance/RACK/wiki/Home#installation-instructions). --- -Copyright (c) 2021, General Electric Company, Galois, Inc. +Copyright (c) 2021-2022 General Electric Company, Galois, Inc. All Rights Reserved diff --git a/Turnstile-Example/Turnstile-IngestionPackage/manifest.yaml b/Turnstile-Example/Turnstile-IngestionPackage/manifest.yaml new file mode 100644 index 00000000..f4a08b3c --- /dev/null +++ b/Turnstile-Example/Turnstile-IngestionPackage/manifest.yaml @@ -0,0 +1,28 @@ +name: "Turnstile" + +footprint: + model-graphs: + - http://rack001/model + data-graphs: + - http://rack001/turnstiledata + - http://rack001/do-178c + +steps: + - manifest: ../../manifests/turnstile.yaml + - data: CounterApplicationUnitTesting/OwlModels/import.yaml + - data: TurnstileDevelopmentPlanData/import.yaml + - data: PlanningDocuments/import.yaml + - data: HazardAssessment/import.yaml + - data: TurnstileSystemDesign/import.yaml + - data: TurnstileSystemRequirements/import.yaml + - data: TurnstileHighLevelRequirements/import.yaml + - data: TurnstileLowLevelRequirements/import.yaml + - data: TurnstileRequirementModel/import.yaml + - data: CounterApplicationReviews/import.yaml + - data: CounterApplicationTesting/import.yaml + - data: TurnstileSystemSpec/import.yaml + - data: CounterApplicationRequirementSpec/import.yaml + - data: CounterApplicationSoftwareDes/import.yaml + - data: SystemVerificationReport/import.yaml + - data: Objectives/import.yaml + - data: TurnstileBaselines/import.yaml diff --git a/assist/bin/rack/check.pl b/assist/bin/rack/check.pl index 95d472e4..c1ebb2cb 100644 --- a/assist/bin/rack/check.pl +++ b/assist/bin/rack/check.pl @@ -117,17 +117,16 @@ print_message(error, maybe_restriction(T, I, IName, Property, VSLen))). check_target_type(Property, I, T) :- - property_extra(T, Property, _Restr), - rdf(Property, rdfs:range, TTy), - rdf_reachable(Target, rdfs:subClassOf, TTy), + property(T, Property, _), + \+ rdf_is_bnode(T), has_interesting_prefix(Property), rdf(I, Property, Val), - \+ rdf_is_literal(Val), % TODO check these as well? - rdf(Val, rdf:type, DefTy), - DefTy \= Target, - \+ rdf_reachable(DefTy, rdfs:subClassOf, Target), + \+ rdf_is_literal(Val), + \+ rack_instance_target(I, Property, Val), rack_instance_ident(I, IName), - print_message(error, property_value_wrong_type(T, I, IName, Property, DefTy, Val, Target)). + rdf(Val, rdf:type, ValTy), + property_range_type(T, Property, ModelTy), + print_message(error, property_value_wrong_type(T, I, IName, Property, ValTy, Val, ModelTy)). check_target_type_restrictions(Property, I, T) :- rdf(T, rdfs:subClassOf, R), @@ -228,6 +227,39 @@ check_also_has_no_rel(_, _). +% Sometimes there will be things in SADL like: +% +% FOO is a type of X. +% p of FOO only has values of type Y. +% +% and the problem is that p is not defined for X, but for (unrelated) Z instead. +% SADL will not complain and will generate a property constraint, but that +% property cannot ever exist. This checks for that situation. +check_invalid_domain(Property) :- + check_invalid_domain_class(_SrcClass, Property, _DefinedClass). + +check_invalid_domain_class(SrcClass, Property, DefinedClass) :- + rdf(SrcClass, _, B), + rack_ref(_, SrcClass), + rdf_is_bnode(B), + rdf(B, rdf:type, owl:'Restriction'), + rdf(B, owl:onProperty, Property), + rdf(Property, rdfs:domain, DefinedClass), + \+ rdf_reachable(SrcClass, rdfs:subClassOf, DefinedClass), + print_message(error, invalid_domain(SrcClass, Property, DefinedClass)). + +check_invalid_domain_class(SrcClass, Property, DefinedClass) :- + property(SrcClass, Property, _Usage), + rdf_reachable(Property, rdfs:subPropertyOf, ParentProp), + property(DefinedClass, ParentProp, _ParentUsage), + \+ rdf_reachable(SrcClass, rdfs:subClassOf, DefinedClass), + ( Property = ParentProp, + print_message(error, invalid_domain(SrcClass, Property, DefinedClass)) + ; Property \= ParentProp, + print_message(error, invalid_subclass_domain(SrcClass, Property, ParentProp, DefinedClass)) + ). + + actual_val((V^^VT),VT,(V^^VT)). % normal actual_val(V,VT,Val) :- rdf_equal(V, VS^^(xsd:string)), @@ -352,3 +384,9 @@ prolog:message(missing_tgt(SrcClass, SrcInst, SrcIdent, Rel, TgtClass)) --> [ '~w ~w (~w) missing the ~w target of type ~w'-[ SrcClass, SrcInst, SrcIdent, Rel, TgtClass] ]. +prolog:message(invalid_domain(SrcClass, Property, DefinedClass)) --> + [ 'Property ~w was referenced on class ~w, but that property is defined for the unrelated class ~w'-[ + Property, SrcClass, DefinedClass] ]. +prolog:message(invalid_subclass_domain(SrcClass, Property, ParentProperty, DefinedClass)) --> + [ 'Property ~w was referenced on class ~w, but that property is a sub-type of ~w, which is defined for the unrelated class ~w'-[ + Property, SrcClass, ParentProperty, DefinedClass] ]. diff --git a/assist/bin/rack/check_runner.pl b/assist/bin/rack/check_runner.pl index 3979c2c6..bf7152c2 100644 --- a/assist/bin/rack/check_runner.pl +++ b/assist/bin/rack/check_runner.pl @@ -55,6 +55,9 @@ check_each_with(check_instance_types, Num). runnable_check("instance property issues", Num) :- check_each_with(check_instance_property_violations, Num). +runnable_check("property domain issues", Num) :- + check_each_with(check_invalid_domain, Num). + runnable_check("INTERFACE issues", Num) :- check_each_with(check_INTERFACE, Num). runnable_check("SBVT issues", Num) :- check_each_with(check_SBVT, Num). runnable_check("SRS issues", Num) :- check_each_with(check_SRS, Num). diff --git a/assist/bin/rack/model.pl b/assist/bin/rack/model.pl index 339d8e27..341cd75f 100644 --- a/assist/bin/rack/model.pl +++ b/assist/bin/rack/model.pl @@ -44,6 +44,7 @@ property/3, property_target/4, property_extra/3, + property_range_type/3, rack_instance/2, rack_instance_assert/2, rack_property_assert/3, @@ -54,6 +55,7 @@ rack_instance_relationship/3, rack_instance_relationship/4, rack_instance_ident/2, + rack_instance_target/3, rack_ontology_node/3, rdf_literal_val_type/3, @@ -449,7 +451,8 @@ property(Class, Property, unique) :- % Property is unique to this class and directly associated - rdf(Property, rdfs:domain, Class). + rdf(Property, rdfs:domain, Class), + \+ rdf_is_bnode(Class). property(Class, Property, shared) :- % Property is shared with multiple classes, specified in a list. rdf(Property, rdfs:domain, Intermediary), @@ -513,11 +516,20 @@ property_extra(Class, Property, value_from(Cls)) :- property_restriction(Class, Property, B), rdf(B, owl:someValuesFrom, Cls). +property_extra(Class, Property, value_from(Cls)) :- + property_restriction(Class, Property, B), + rdf(B, owl:allValuesFrom, Cls). property_extra(Class, Property, normal) :- property(Class, Property, _). rdf_numeric(Value, Num) :- rdf_equal(Value, Num^^xsd:int). rdf_numeric(Value, Num) :- rdf_equal(Value, Num^^xsd:integer). +property_range_type(Class, Property, RangeType) :- + property_restriction(Class, Property, value_from(RangeType)), !. +property_range_type(_Class, Property, RangeType) :- + rdf(Property, rdfs:domain, RangeType). + + %! rdf_literal_val_type(+Literal:atom, -Value:atom, -Type:Atom) is semidet. %! rdf_literal_val_type(-Literal:atom, +Value:atom, +Type:Atom) is semidet. % @@ -643,6 +655,22 @@ rdf(I, 'http://arcos.rack/PROV-S#identifier', N), !. rack_instance_ident(_, ""). +%! rack_instance_target(+SrcInst:atom, -Rel:atom, -TgtInst:atom) +% +% Returns the target instance for the source instance and a specific relationship +% or all relationships, subject to any property range constraints. + +rack_instance_target(SrcInst, Rel, TgtInst) :- + rdf(SrcInst, rdf:type, SrcClass), + property_extra(SrcClass, Rel, Restriction), + instance_target(SrcInst, Rel, Restriction, TgtInst). + +instance_target(SrcInst, Rel, value_from(TgtClass), TgtInst) :- + rdf(SrcInst, Rel, TgtInst), + rdf(TgtInst, rdfs:isSubClassOf, TgtClass). +instance_target(SrcInst, Rel, Restr, TgtInst) :- + Restr \= value_from(_), + rdf(SrcInst, Rel, TgtInst). %% ---------------------------------------------------------------------- %% Loading generated data from .rack files diff --git a/cli/README.md b/cli/README.md index d8f49da0..fe5ea03c 100644 --- a/cli/README.md +++ b/cli/README.md @@ -133,7 +133,8 @@ usage: rack [-h] [--base-url BASE_URL] [--triple-store TRIPLE_STORE] [--triple-s RACK in a Box toolkit positional arguments: - {data,model,nodegroups} + {manifest,data,model,nodegroups} + manifest Ingestion package automation data Import or export CSV data model Interact with SemTK model nodegroups Interact with SemTK nodegroups @@ -149,7 +150,7 @@ optional arguments: Assign logger severity level ``` -The `rack` command is split into three subcommands: `data`, `model`, +The `rack` command is split into four subcommands: `manifest`, `data`, `model`, and `nodegroups`. Each of these subcommands offers its own help listing. For example try `rack data --help` for more information about the flags available when interacting with the data store. @@ -165,6 +166,19 @@ The `data` subcommand is used to import CSV and OWL data files using the RACK ontology as well as exporting CSV files using nodegroups stored in SemTK. +The `manifest` subcommand is used to import a complete set of CSV and OWL data +from multiple files as specified by a single top-level manifest file. This +subcommand subsumes the `data`, `nodegroups`, and `model` subcommands and is the +recommended way to initialize a RACK instance for use. + +The following options default to their matching ENVIRONMENT variables if they exist: +* --base-url : $BASE_URL +* --triple-store : $TRIPLE_STORE +* --log-level : $LOG_LEVEL + +For example, **ingestion warnings can be suppressed** by either using ```rack --log-level ERROR data import...``` or by executing this command in a bash script before calling 'rack':```export LOG_LEVEL=ERROR``` + + ## Data Ingestion Configuration file format The import configuration files are YAML files that specify the target @@ -195,7 +209,7 @@ extra-data-graphs: - "http://rack001/otherdata" - "http://rack001/somedata" ingestion-steps: -- {nodegroup: "ingest_SYSTEM", csv: "SYSTEM.csv"} +- {nodegroup: "ingest_SYSTEM", csv: "SYSTEM.csv"} - {nodegroup: "ingest_INTERFACE", csv: "INTERFACE.csv"} - {class: "http://arcos.rack/HAZARD#HAZARD", csv: "HAZARD.csv"} - {owl: "example.owl"} @@ -354,6 +368,68 @@ Ingest-SoftwareComponentTestResult Node group to ingest Sof[...] [...] ``` +## Ingestion Packages (manifest) + +The bulk ingestion of multiple models, nodegrounds, and data can be +automated using a manifest file. + +```yaml +name: "short name" +description: "optional long package description" +footprint: + model-graphs: + - "http://rack001/model" + data-graphs: + - "http://rack001/data" +steps: + - manifest: another.yaml + - model: model-manifest.yaml + - data: data-manifest.yaml +``` + +The `name` and `description` fields are informational and are used to +provide a nicer UI for users loading an ingestion package. + +The `footprint` section is optional. When it is provided it allows +the ingestion UI to automatically populate a connection string. In +addition these graph URIs will be cleared if the manifest is loaded +using the `--clear` flag. + +The `steps` section is required. It describes the sequential process +of loading this ingestion package. This section must be a list of singleton +maps. Each map should have exactly one key describing which kind of +data should be imported. These keys will point to the same kind of +file as you'd use loading this kind of data individually. For example +a `data` section uses the same configuration file as `rack data import` +and a `model` section uses the same configuration file as `rack model import`. + +All file paths are resolved relative to the location of the manifest +YAML file. + +### CLI support + +``` +usage: rack manifest import [-h] [--clear] [--default-graph] manifest + +positional arguments: + manifest Manifest YAML file + +optional arguments: + -h, --help show this help message and exit + --clear Clear footprint before import + --default-graph Load whole manifest into default graph +``` + +Manifests can be loaded using `rack manifest import`. + +To clear all graphs mentioned in the `footprint` use `--clear`. For example: +`rack manifest import --clear my-manifest.yaml` + +Fuseki happens to run faster when data is stored in the *default graph*. +To load a complete ingestion manifest into the default graph use +`--default-graph`. For example: +`rack manifest import --default-graph my-manifest.yaml` + ## Hacking See [dev/README.md](https://github.com/ge-high-assurance/RACK/tree/master/cli/dev). @@ -363,7 +439,7 @@ Don't copy below to wiki; wiki already has copyright in _Footer.md --> --- -Copyright (c) 2021, Galois, Inc. +Copyright (c) 2021-2022, Galois, Inc. All Rights Reserved diff --git a/cli/rack/__init__.py b/cli/rack/__init__.py index eb916f8b..9454223e 100755 --- a/cli/rack/__init__.py +++ b/cli/rack/__init__.py @@ -746,7 +746,7 @@ def get_argument_parser() -> argparse.ArgumentParser: data_import_parser.set_defaults(func=dispatch_data_import) data_export_parser.add_argument('nodegroup', type=str, help='ID of nodegroup') - data_export_parser.add_argument('--model-graph', type=str, required=True, action='append', help='Model graph URL') + data_export_parser.add_argument('--model-graph', type=str, action='append', help='Model graph URL') data_export_parser.add_argument('--data-graph', type=str, required=True, action='append', help='Data graph URL') data_export_parser.add_argument('--format', type=ExportFormat, help='Export format', choices=list(ExportFormat), default=ExportFormat.TEXT) data_export_parser.add_argument('--no-headers', action='store_true', help='Omit header row') @@ -755,7 +755,7 @@ def get_argument_parser() -> argparse.ArgumentParser: data_export_parser.set_defaults(func=dispatch_data_export) data_count_parser.add_argument('nodegroup', type=str, help='ID of nodegroup') - data_count_parser.add_argument('--model-graph', type=str, required=True, action='append', help='Data graph URL') + data_count_parser.add_argument('--model-graph', type=str, action='append', help='Data graph URL') data_count_parser.add_argument('--data-graph', type=str, required=True, action='append', help='Data graph URL') data_count_parser.add_argument('--constraint', type=str, action='append', help='Runtime constraint: key=value') data_count_parser.set_defaults(func=dispatch_data_count) diff --git a/cli/requirements.txt b/cli/requirements.txt index 4a42b63b..26a7c48d 100644 --- a/cli/requirements.txt +++ b/cli/requirements.txt @@ -12,7 +12,7 @@ PyYAML==5.4.1 requests==2.28.1 Pillow==9.0.1 plotly==5.9.0 -semtk-python3 @ git+https://github.com/ge-semtk/semtk-python3@386cd9b7bb1e00aa92749648f6163384854b5dea +semtk-python3 @ git+https://github.com/ge-semtk/semtk-python3@56da802d279102d83ad951cfea1b3549145525ff six==1.16.0 tabulate==0.8.10 urllib3==1.26.10 diff --git a/cli/setup-arcos.sh b/cli/setup-arcos.sh index 3a3c23e6..7cb87bb0 100755 --- a/cli/setup-arcos.sh +++ b/cli/setup-arcos.sh @@ -4,4 +4,4 @@ set -eu ./ensure-cli-in-PATH.sh -rack manifest import --clear ../manifests/arcos.yaml +rack --log-level ERROR manifest import --clear ../manifests/arcos.yaml diff --git a/cli/setup-owl.sh b/cli/setup-owl.sh index 2a8730e2..e3a6aa18 100755 --- a/cli/setup-owl.sh +++ b/cli/setup-owl.sh @@ -6,7 +6,7 @@ set -e rack_dir=$(realpath "$(dirname "$0")"/..) rack_image="gehighassurance/rack-box" -rack_tag="v10.2" +rack_tag="v11" sadl_image="sadl/sadl-eclipse" sadl_tag="v3.5.0-20211204" diff --git a/manifests/rack.yaml b/manifests/rack.yaml index 60731b25..ac3f5074 100644 --- a/manifests/rack.yaml +++ b/manifests/rack.yaml @@ -1,4 +1,5 @@ name: 'RACK ontology' +description: 'Base ontology for assurance case curation' footprint: model-graphs: diff --git a/manifests/turnstile.yaml b/manifests/turnstile.yaml index aca9c108..aa0b66ca 100644 --- a/manifests/turnstile.yaml +++ b/manifests/turnstile.yaml @@ -4,12 +4,9 @@ footprint: model-graphs: - http://rack001/model data-graphs: - - http://rack001/nist-800-53 - - http://rack001/mitre-cwe - - http://rack001/turnstiledata + - http://rack001/do-178c steps: - - manifest: arcos.yaml + - manifest: rack.yaml - model: ../GE-Ontology/OwlModels/import.yaml - - nodegroups: ../nodegroups/ingestion/arcos.turnstile - data: ../RACK-Ontology/ontology/DO-178C/import.yaml diff --git a/migration/ontology_changes/change_property_domain.py b/migration/ontology_changes/change_property_domain.py new file mode 100644 index 00000000..a90dba4f --- /dev/null +++ b/migration/ontology_changes/change_property_domain.py @@ -0,0 +1,57 @@ +# Copyright (c) 2022, Galois, Inc. +# +# All Rights Reserved +# +# This material is based upon work supported by the Defense Advanced Research +# Projects Agency (DARPA) under Contract No. FA8750-20-C-0203. +# +# Any opinions, findings and conclusions or recommendations expressed in this +# material are those of the author(s) and do not necessarily reflect the views +# of the Defense Advanced Research Projects Agency (DARPA). + +from dataclasses import dataclass +from typing import Callable + +import semtk +from migration_helpers.name_space import NameSpace, get_uri + +from ontology_changes.ontology_change import ( + OntologyChange, + log_additional_change, + log_apply_change, + log_change, + stylize_class, + stylize_json, + stylize_property, +) + +@dataclass +class ChangePropertyDomain(OntologyChange): + """ + Represents an ontology change from: + + P describes X with values of type A. + + to: + + P describes Y with values of type B. + """ + + prop_name_space: NameSpace + prop_name: str + + from_name_space: NameSpace + from_domain: str + + to_name_space: NameSpace + to_domain: str + + def text_description(self) -> str: + prop = stylize_property(get_uri(self.prop_name_space, self.prop_name)) + from_domain = stylize_class(get_uri(self.from_name_space, self.from_domain)) + to_domain = stylize_class(get_uri(self.to_name_space, self.to_domain)) + return f"Domain of property {prop} was changed from {from_domain} to {to_domain}." + + def migrate_json(self, json: semtk.SemTKJSON) -> None: + # TODO + pass diff --git a/migration/ontology_changes/rename_property.py b/migration/ontology_changes/rename_property.py index 6f5a459b..9e4c2141 100644 --- a/migration/ontology_changes/rename_property.py +++ b/migration/ontology_changes/rename_property.py @@ -29,9 +29,7 @@ @dataclass class RenameProperty(OntologyChange): """ - Represents an ontology change where a property has been renamed. This also - encompasses cases where a property has effectively been moved across - classes. + Represents an ontology change where a property has been renamed. """ from_name_space: NameSpace diff --git a/migration/rack/commits/__init__.py b/migration/rack/commits/__init__.py index a3214475..dc66029b 100644 --- a/migration/rack/commits/__init__.py +++ b/migration/rack/commits/__init__.py @@ -20,6 +20,7 @@ commit0a89f70ff929380269a79fe2fc82f5dde346ed8c, commit10da69db606ebdc721fd3f8e003ef2099a5fdc43, commit13ed266ba5730cebe75c0c48f6ba83af69429122, + commit16f6fe3e2bb5c8c6fae59b10f400380a76863452, commit1834d0201254907fa50a32945716a3e0de985cad, commit183dbba72623c2585a0451a19ac1ddb30f8a0ea6, commit2439da7fb602f020e9a711511f84cd75e1522cdf, @@ -38,11 +39,13 @@ commit44393cc30bb0ba7482acd21b2e68576b577179f9, commit44da44c6877c881240c418d084ecb17de9443373, commit4687eafdd03e7c4ff6888691ed51c8ef388935b2, + commit4aff1ff6e25ec99d9acfc6863498c7b32241f9d4, commit4f60f85168ff8ef2513fa0e2f144c2ea5c3f87a3, commit4f9fce78e36a6dc75f1702ab50da6a4ac801dd5e, commit500958dae09d88f0b82c40faf93a634d108d360f, commit5329c949815afea87d8bae3768bf132258aad9a0, commit581f1820855eee2445d9e8bfdbb639e169e9391e, + commit5c7920fe44a3a60c76fefddd2b88cd27851f37ed, commit5db0d118642b541b811d23d32c5f3410d0507618, commit5dd1a584e19b8716f0f13dc3a2cb2ba2d409c325, commit620b89db747b9834013502061040f179da67f123, @@ -66,9 +69,11 @@ commitb25d07626e4693cd370a2070e17f6baa825a1d43, commitb6796936abe054edc9c4f9657c34bb0eadf0757a, commitb721c16f0f7420a8ccd92bda0d98a96c16dc62b8, + commitb85a66b005f4105ac5195cfd2cefec475f9e1f21, commitb865c663351f39c275f5fb9985b681a6ae432cac, commitbdfef3d7ea9b3c9fc085defa8e26256f646097d9, commitc41222325db52df0eb5c1e7cb3a091f8c62f5b57, + commitc47f4e58c0cb3d0925d7894e301e6a1f83e22580, commitc5306ce176984770b93070da829f60769cb19628, commitc6692fed3e150e7df53d4a2a8f8c84f760087420, commitcafce30763b5332106340cc8cbeb8fdac3b8132d, @@ -79,6 +84,7 @@ commite5e8a35322fab104a42cc0f46d16c27ffc10adbb, commite696969a9d85ca8f894eea12305412bdc05521b3, commitee148bca649a1b451085832a7e2a488ce4127de7, + commitef72564bbc4887c2d6f6654671427ba2780e0f67, commitf801242e4a8a763620571481fd83cc2af5aac2ac, commitfa603aad886439eb6a94e44e2c6f4851af16c9a3, commitff31a28051a5e348fd2474fce5360195999ddb3a, @@ -186,7 +192,15 @@ commit815f98911956aafea98b81787eec328b2833ec72.commit, # 2022 Feb 18 commit4687eafdd03e7c4ff6888691ed51c8ef388935b2.commit, # 2022 Feb 28 - # TODO # v10.0 + commitef72564bbc4887c2d6f6654671427ba2780e0f67.commit, # v10.0 + + commit5c7920fe44a3a60c76fefddd2b88cd27851f37ed.commit, # 2022 Apr 20 + commit4aff1ff6e25ec99d9acfc6863498c7b32241f9d4.commit, # 2022 Aug 03 + commitc47f4e58c0cb3d0925d7894e301e6a1f83e22580.commit, # 2022 Aug 25 + commitb85a66b005f4105ac5195cfd2cefec475f9e1f21.commit, # 2022 Oct 05 + commit16f6fe3e2bb5c8c6fae59b10f400380a76863452.commit, # 2022 Oct 05 + + # TODO: v11 # most recent (in history) ] diff --git a/migration/rack/commits/commit16f6fe3e2bb5c8c6fae59b10f400380a76863452.py b/migration/rack/commits/commit16f6fe3e2bb5c8c6fae59b10f400380a76863452.py new file mode 100644 index 00000000..ada09d74 --- /dev/null +++ b/migration/rack/commits/commit16f6fe3e2bb5c8c6fae59b10f400380a76863452.py @@ -0,0 +1,45 @@ +# Copyright (c) 2022, Galois, Inc. +# +# All Rights Reserved +# +# This material is based upon work supported by the Defense Advanced Research +# Projects Agency (DARPA) under Contract No. FA8750-20-C-0203. +# +# Any opinions, findings and conclusions or recommendations expressed in this +# material are those of the author(s) and do not necessarily reflect the views +# of the Defense Advanced Research Projects Agency (DARPA). + +from ontology_changes import Commit +from ontology_changes.change_property_domain import ChangePropertyDomain +from rack.namespaces.rack_ontology import CONFIDENCE + +commit = Commit( + number="16f6fe3e2bb5c8c6fae59b10f400380a76863452", + changes=[ + # CONFIDENCE.sadl + ChangePropertyDomain( + prop_name_space=CONFIDENCE, + prop_name="belief", + from_name_space=CONFIDENCE, + from_domain="CONFIDENCE_ASSESSMENT", + to_name_space=CONFIDENCE, + to_domain="BDU_CONFIDENCE_ASSESSMENT", + ), + ChangePropertyDomain( + prop_name_space=CONFIDENCE, + prop_name="disbelief", + from_name_space=CONFIDENCE, + from_domain="CONFIDENCE_ASSESSMENT", + to_name_space=CONFIDENCE, + to_domain="BDU_CONFIDENCE_ASSESSMENT", + ), + ChangePropertyDomain( + prop_name_space=CONFIDENCE, + prop_name="uncertainty", + from_name_space=CONFIDENCE, + from_domain="CONFIDENCE_ASSESSMENT", + to_name_space=CONFIDENCE, + to_domain="BDU_CONFIDENCE_ASSESSMENT", + ), + ], +) diff --git a/migration/rack/commits/commit4aff1ff6e25ec99d9acfc6863498c7b32241f9d4.py b/migration/rack/commits/commit4aff1ff6e25ec99d9acfc6863498c7b32241f9d4.py new file mode 100644 index 00000000..8c4881e3 --- /dev/null +++ b/migration/rack/commits/commit4aff1ff6e25ec99d9acfc6863498c7b32241f9d4.py @@ -0,0 +1,19 @@ +# Copyright (c) 2022, Galois, Inc. +# +# All Rights Reserved +# +# This material is based upon work supported by the Defense Advanced Research +# Projects Agency (DARPA) under Contract No. FA8750-20-C-0203. +# +# Any opinions, findings and conclusions or recommendations expressed in this +# material are those of the author(s) and do not necessarily reflect the views +# of the Defense Advanced Research Projects Agency (DARPA). + +from ontology_changes import Commit + +commit = Commit( + number="4aff1ff6e25ec99d9acfc6863498c7b32241f9d4", + changes=[ + # comment update only + ], +) diff --git a/migration/rack/commits/commit5c7920fe44a3a60c76fefddd2b88cd27851f37ed.py b/migration/rack/commits/commit5c7920fe44a3a60c76fefddd2b88cd27851f37ed.py new file mode 100644 index 00000000..9949a948 --- /dev/null +++ b/migration/rack/commits/commit5c7920fe44a3a60c76fefddd2b88cd27851f37ed.py @@ -0,0 +1,42 @@ +# Copyright (c) 2022, Galois, Inc. +# +# All Rights Reserved +# +# This material is based upon work supported by the Defense Advanced Research +# Projects Agency (DARPA) under Contract No. FA8750-20-C-0203. +# +# Any opinions, findings and conclusions or recommendations expressed in this +# material are those of the author(s) and do not necessarily reflect the views +# of the Defense Advanced Research Projects Agency (DARPA). + +from ontology_changes import Commit +from ontology_changes.create_property import CreateProperty +from ontology_changes.rename_class import RenameClass +from ontology_changes.rename_property import RenameProperty + +from rack.namespaces.rack_ontology import HARDWARE, SOFTWARE + +commit = Commit( + number="5c7920fe44a3a60c76fefddd2b88cd27851f37ed", + changes=[ + CreateProperty( + name_space=HARDWARE, + class_id="HWCOMPONENT", + property_id="partOf", + ), + RenameClass( + from_name_space=SOFTWARE, + from_name="COMPONENT_TYPE", + to_name_space=SOFTWARE, + to_name="SWCOMPONENT_TYPE", + ), + RenameProperty( + from_name_space=SOFTWARE, + from_class="SWCOMPONENT", + from_name="subcomponentOf", + to_name_space=SOFTWARE, + to_class="SWCOMPONENT", + to_name="partOf", + ), + ], +) diff --git a/migration/rack/commits/commitb85a66b005f4105ac5195cfd2cefec475f9e1f21.py b/migration/rack/commits/commitb85a66b005f4105ac5195cfd2cefec475f9e1f21.py new file mode 100644 index 00000000..0220a4da --- /dev/null +++ b/migration/rack/commits/commitb85a66b005f4105ac5195cfd2cefec475f9e1f21.py @@ -0,0 +1,29 @@ +# Copyright (c) 2022, Galois, Inc. +# +# All Rights Reserved +# +# This material is based upon work supported by the Defense Advanced Research +# Projects Agency (DARPA) under Contract No. FA8750-20-C-0203. +# +# Any opinions, findings and conclusions or recommendations expressed in this +# material are those of the author(s) and do not necessarily reflect the views +# of the Defense Advanced Research Projects Agency (DARPA). + +from ontology_changes import Commit +from ontology_changes.change_class_is_a_type_of import ChangeClassIsATypeOf +from rack.namespaces.rack_ontology import CONFIDENCE, PROV_S + +commit = Commit( + number="b85a66b005f4105ac5195cfd2cefec475f9e1f21", + changes=[ + # CONFIDENCE.sadl + ChangeClassIsATypeOf( + name_space=CONFIDENCE, + class_id="CONFIDENCE_ASSESSMENT", + from_name_space=PROV_S, + from_class_id="THING", + to_name_space=PROV_S, + to_class_id="ENTITY", + ) + ], +) diff --git a/migration/rack/commits/commitc47f4e58c0cb3d0925d7894e301e6a1f83e22580.py b/migration/rack/commits/commitc47f4e58c0cb3d0925d7894e301e6a1f83e22580.py new file mode 100644 index 00000000..b8dede68 --- /dev/null +++ b/migration/rack/commits/commitc47f4e58c0cb3d0925d7894e301e6a1f83e22580.py @@ -0,0 +1,19 @@ +# Copyright (c) 2022, Galois, Inc. +# +# All Rights Reserved +# +# This material is based upon work supported by the Defense Advanced Research +# Projects Agency (DARPA) under Contract No. FA8750-20-C-0203. +# +# Any opinions, findings and conclusions or recommendations expressed in this +# material are those of the author(s) and do not necessarily reflect the views +# of the Defense Advanced Research Projects Agency (DARPA). + +from ontology_changes import Commit + +commit = Commit( + number="c47f4e58c0cb3d0925d7894e301e6a1f83e22580", + changes=[ + # comment updates only + ], +) diff --git a/migration/rack/commits/commitef72564bbc4887c2d6f6654671427ba2780e0f67.py b/migration/rack/commits/commitef72564bbc4887c2d6f6654671427ba2780e0f67.py new file mode 100644 index 00000000..9ba83863 --- /dev/null +++ b/migration/rack/commits/commitef72564bbc4887c2d6f6654671427ba2780e0f67.py @@ -0,0 +1,20 @@ +# Copyright (c) 2022, Galois, Inc. +# +# All Rights Reserved +# +# This material is based upon work supported by the Defense Advanced Research +# Projects Agency (DARPA) under Contract No. FA8750-20-C-0203. +# +# Any opinions, findings and conclusions or recommendations expressed in this +# material are those of the author(s) and do not necessarily reflect the views +# of the Defense Advanced Research Projects Agency (DARPA). + +from ontology_changes import Commit + +commit = Commit( + number="ef72564bbc4887c2d6f6654671427ba2780e0f67", + tag="v10.0", + changes=[ + # no ontology change, just here for the tag + ], +) diff --git a/migration/rack/commits/template.py b/migration/rack/commits/template.py index 93dd22ab..f9bca1ba 100644 --- a/migration/rack/commits/template.py +++ b/migration/rack/commits/template.py @@ -1,4 +1,4 @@ -# Copyright (c) 2021, Galois, Inc. +# Copyright (c) 2022, Galois, Inc. # # All Rights Reserved # diff --git a/nodegroups/queries/query Requirement Review same Agent.json b/nodegroups/queries/query Requirement Review same Agent.json new file mode 100644 index 00000000..98743ca4 --- /dev/null +++ b/nodegroups/queries/query Requirement Review same Agent.json @@ -0,0 +1,192 @@ +{ + "version": 3, + "sparqlConn": { + "name": "Local", + "domain": "", + "enableOwlImports": true, + "model": [ + { + "type": "fuseki", + "url": "http://localhost:3030/RACK", + "graph": "http://rack001/model" + } + ], + "data": [ + { + "type": "fuseki", + "url": "http://localhost:3030/RACK", + "graph": "http://rack001/data" + } + ] + }, + "sNodeGroup": { + "version": 19, + "limit": 0, + "offset": 0, + "sNodeList": [ + { + "propList": [ + { + "valueTypes": [ + "string" + ], + "rangeURI": "http://www.w3.org/2001/XMLSchema#string", + "UriRelationship": "http://arcos.rack/PROV-S#identifier", + "Constraints": "", + "SparqlID": "?identifier", + "isReturned": false, + "optMinus": 0, + "isRuntimeConstrained": false, + "instanceValues": [], + "isMarkedForDeletion": false, + "binding": "?hl_identifier", + "isBindingReturned": true + } + ], + "nodeList": [], + "fullURIName": "http://arcos.rack/REQUIREMENTS#REQUIREMENT", + "SparqlID": "?REQUIREMENT_0", + "isReturned": false, + "isRuntimeConstrained": false, + "valueConstraint": "", + "instanceValue": null, + "deletionMode": "NO_DELETE", + "binding": "?HL_REQUIREMENT", + "isBindingReturned": false + }, + { + "propList": [ + { + "valueTypes": [ + "string" + ], + "rangeURI": "http://www.w3.org/2001/XMLSchema#string", + "UriRelationship": "http://arcos.rack/PROV-S#identifier", + "Constraints": "", + "SparqlID": "?ll_identifier", + "isReturned": true, + "optMinus": 0, + "isRuntimeConstrained": false, + "instanceValues": [], + "isMarkedForDeletion": false + } + ], + "nodeList": [ + { + "SnodeSparqlIDs": [ + "?REQUIREMENT_0" + ], + "OptionalMinus": [ + 0 + ], + "Qualifiers": [ + "" + ], + "DeletionMarkers": [ + false + ], + "range": [ + "http://arcos.rack/PROV-S#ENTITY" + ], + "ConnectBy": "satisfies", + "Connected": true, + "UriConnectBy": "http://arcos.rack/REQUIREMENTS#satisfies" + } + ], + "fullURIName": "http://arcos.rack/REQUIREMENTS#REQUIREMENT", + "SparqlID": "?REQUIREMENT", + "isReturned": false, + "isRuntimeConstrained": false, + "valueConstraint": "", + "instanceValue": null, + "deletionMode": "NO_DELETE", + "binding": "?LL_REQUIREMENT", + "isBindingReturned": false + }, + { + "propList": [ + { + "valueTypes": [ + "string" + ], + "rangeURI": "http://www.w3.org/2001/XMLSchema#string", + "UriRelationship": "http://arcos.rack/PROV-S#identifier", + "Constraints": "", + "SparqlID": "?baseline_identifier", + "isReturned": true, + "optMinus": 0, + "isRuntimeConstrained": false, + "instanceValues": [], + "isMarkedForDeletion": false + } + ], + "nodeList": [ + { + "SnodeSparqlIDs": [ + "?REQUIREMENT", + "?REQUIREMENT_0" + ], + "OptionalMinus": [ + 0, + 0 + ], + "Qualifiers": [ + "", + "" + ], + "DeletionMarkers": [ + false, + false + ], + "range": [ + "http://arcos.rack/PROV-S#ENTITY" + ], + "ConnectBy": "content", + "Connected": true, + "UriConnectBy": "http://arcos.rack/PROV-S#content" + } + ], + "fullURIName": "http://arcos.rack/BASELINE#BASELINE", + "SparqlID": "?BASELINE", + "isReturned": false, + "isRuntimeConstrained": false, + "valueConstraint": "", + "instanceValue": null, + "deletionMode": "NO_DELETE" + } + ], + "orderBy": [], + "groupBy": [], + "unionHash": {}, + "columnOrder": [] + }, + "importSpec": { + "version": "1", + "baseURI": "", + "columns": [], + "dataValidator": [], + "texts": [], + "transforms": [], + "nodes": [ + { + "sparqlID": "?BASELINE", + "type": "http://arcos.rack/BASELINE#BASELINE", + "mapping": [], + "props": [] + }, + { + "sparqlID": "?REQUIREMENT", + "type": "http://arcos.rack/REQUIREMENTS#REQUIREMENT", + "mapping": [], + "props": [] + }, + { + "sparqlID": "?REQUIREMENT_0", + "type": "http://arcos.rack/REQUIREMENTS#REQUIREMENT", + "mapping": [], + "props": [] + } + ] + }, + "plotSpecs": null +} \ No newline at end of file diff --git a/nodegroups/queries/store_data.csv b/nodegroups/queries/store_data.csv index 46f6453c..b5f83b44 100644 --- a/nodegroups/queries/store_data.csv +++ b/nodegroups/queries/store_data.csv @@ -30,3 +30,4 @@ query dataVer SYSTEM without partOf SYSTEM,number of SYSTEM w/o -partOf-> SYSTEM query dataVer unlinked SWCOMPONENT,Find SWCOMPONENT w/o -wasImpactedBy-> REQUIREMENT or w/o -subcomponentOf -> SWCOMPONENT,rack,query dataVer unlinked SWCOMPONENT.json,PrefabNodeGroup report data verification,Run the dataVer nodegroups,rack,report data verification.json,Report setup ARCOS Apache Phase 2,doesn't return anything; used to setup the connections with Apache datagraphs,rack,setup-arcos-ApachePhase2.json,PrefabNodeGroup +query Requirement Review same Agent,Demonstration of a nodegroup that isn't a tree structure,rack,query Requirement Review same Agent.json,PrefabNodeGroup diff --git a/rack-box/.gitignore b/rack-box/.gitignore index 82b9b51f..190f1008 100644 --- a/rack-box/.gitignore +++ b/rack-box/.gitignore @@ -6,5 +6,6 @@ crash.log # For built boxes files/ +focal64/ output-* rack-box-*-v* diff --git a/rack-box/Docker-Hub-README.md b/rack-box/Docker-Hub-README.md index d627dddf..f08f0cfd 100644 --- a/rack-box/Docker-Hub-README.md +++ b/rack-box/Docker-Hub-README.md @@ -17,7 +17,7 @@ If you do see these resource settings, make the following changes: Now you are ready to start your RACK box. Type the following command to run your RACK box on your computer: ```shell -docker run --detach -p 8080:80 -p 12050-12092:12050-12092 -p 3030:3030 gehighassurance/rack-box:v10.2 +docker run --detach -p 8080:80 -p 12050-12092:12050-12092 -p 3030:3030 gehighassurance/rack-box:v11 ``` Type "localhost:8080" in your web browser's address bar, hit Enter, and you should see your RACK box's welcome page appear in your browser. The welcome page will tell you some things you can do with your RACK box. diff --git a/rack-box/GitHub-Release-README.md b/rack-box/GitHub-Release-README.md index dbdfa6d3..649a8516 100644 --- a/rack-box/GitHub-Release-README.md +++ b/rack-box/GitHub-Release-README.md @@ -6,8 +6,8 @@ Here are very brief instructions how to run a RACK box container. You will find more detailed [instructions](https://github.com/ge-high-assurance/RACK/wiki/02-Run-a-RACK-Box-container) in the RACK Wiki. You will need to give your Docker Hub username to the RACK team so you can be given access to our Docker Hub repository. 1. Open a terminal window where you can run `docker`. -2. Type `docker pull gehighassurance/rack-box:v10.2` -3. Type `docker run --detach -p 8080:80 -p 12050-12092:12050-12092 -p 3030:3030 gehighassurance/rack-box:v10.2` +2. Type `docker pull gehighassurance/rack-box:v11` +3. Type `docker run --detach -p 8080:80 -p 12050-12092:12050-12092 -p 3030:3030 gehighassurance/rack-box:v11` 4. Visit in your browser to view the RACK box's welcome page. ## Run a RACK box virtual machine @@ -18,7 +18,7 @@ Here are very brief instructions how to run a RACK box virtual machine. You wil 2. Concatenate the split VirtualBox zip files together. 3. Unzip the newly concatenated zip file. 4. Start VirtualBox. -5. Import the VirtualBox VM from the rack-box-virtualbox-v10.2 folder. +5. Import the VirtualBox VM from the rack-box-virtualbox-v11 folder. 6. Open the VM's Settings. 7. Click on Network. 8. Change the first network adapter from NAT to Bridged. diff --git a/rack-box/README.md b/rack-box/README.md index a8f61742..cdde3150 100644 --- a/rack-box/README.md +++ b/rack-box/README.md @@ -37,8 +37,7 @@ our workflows for the most up to date way to download these files, although we will mention each file here as well: - `files/fuseki.tar.gz`: Download latest Fuseki release tarball from - and rename it (note we still are - using version 3.16.0 instead of the latest release, though) + and rename it - `files/semtk.tar.gz`: Download latest SemTK release tarball from and rename it @@ -51,11 +50,6 @@ although we will mention each file here as well: (`files/docker/systemctl3.py`) from [docker-systemd-replacement](https://github.com/gdraheim/docker-systemctl-replacement) -- `files/rack.tar.gz`: Package the RACK ontology and data (`tar cfz - RACK/rack-box/files/rack.tar.gz --exclude=.git --exclude=.github - --exclude=assist --exclude=cli --exclude=rack-box --exclude=tests - --exclude=tools RACK`) - - `files/rack-assist.tar.gz`: Package the RACK ASSIST (`tar cfz RACK/rack-box/files/rack-assist.tar.gz RACK/assist`) @@ -64,10 +58,19 @@ although we will mention each file here as well: RACK/cli/{*.sh,wheels}`), see [Build the RACK CLI](#Build-the-RACK-CLI) for build instructions first +- `files/rack-ui.tar.gz`: Package the RACK UI (`tar cfz + RACK/rack-box/files/rack-ui.tar.gz RACK/rack-ui`) + - `files/{documentation.html,index.html}`: Package the RACK documentation, see [Package RACK documentation](#Package-RACK-documentation) for instructions +- `files/rack.tar.gz`: Generate OWL/CDR files (see [instructions + below](#Generate-OWL-CDR-files)) and package the RACK ontology and + data (`tar cfz RACK/rack-box/files/rack.tar.gz --exclude=.git + --exclude=.github --exclude=assist --exclude=cli --exclude=rack-box + --exclude=tests --exclude=tools RACK`) + Once you have put these 9 files into the `files` subdirectory, skip to [Build the rack-box images](#Build-the-rack-box-images) for the next step. @@ -107,6 +110,19 @@ repositories, run these commands: sed -i -e 's/>NodeGroupService/ onclick="javascript:event.target.port=12058">NodeGroupService/' index.html mv documentation.html index.html RACK/rack-box/files +## Generate OWL/CDR files + +You will need a running rack-box dev image in order to generate OWL +and CDR files. Start a rack-box running in the background, then run +these commands, and finally stop the rack-box that was running in the +background once you're done: + + RACK/cli/setup-owl.sh -b + pip3 install RACK/cli/wheels/*.whl + tar xfz RACK/rack-box/files/semtk.tar.gz + semtk-opensource/standaloneExecutables/target/standaloneExecutables-jar-with-dependencies.jar + RACK/nodegroups/generate-cdrs.sh semtk-opensource/standaloneExecutables/target/standaloneExecutables-jar-with-dependencies.jar + ## Build the rack-box images You will need to install [Packer](https://www.packer.io/) if you don't @@ -126,46 +142,6 @@ VirtualBox rack-box images to new subdirectories called VirtualBox GUI program can import these subdirectories directly into newly created virtual machines. -### Troubleshooting - -### Using `act` to run CI locally - -The [act](https://github.com/nektos/act) tool can be used to run (an -approximation of) the Github Actions workflows locally: - -- Download a binary release of Packer for Ubuntu, and place the - `packer` executable in the `rack-box/` directory -- Install `act` -- Generate a Github [personal access - token](https://docs.github.com/en/free-pro-team@latest/github/authenticating-to-github/creating-a-personal-access-token) -- Create a `.secrets` file containing - `GITHUB_TOKEN=` -- Run `act --secret-file .secrets -P - ghcr.io/ubuntu-20.04=catthehacker/ubuntu:act-20.04` - -The first execution of `act` takes a while because it downloads the -Docker image `ghcr.io/catthehacker/ubuntu:act-20.04` and you'll need -enough free disk space to store the image. - -#### "volume is in use" - -If you see a message like this: - - Error: Error response from daemon: remove act-Build-Lint-shell-scripts-and-the-RACK-CLI: volume is in use - -You can forcibly stop and remove the `act` Docker containers and their volumes: - - docker stop $(docker ps -a | grep "ubuntu-act" | awk '{print $1}') - docker rm $(docker ps -a | grep "ubuntu-act" | awk '{print $1}') - docker volume rm $(docker volume ls --filter dangling=true | grep -o -E "act-.+$") - -There may also be a more precise solution to this issue, but the above works. - -#### "permission denied while trying to connect to the Docker daemon socket" - -`act` needs to be run with enough privileges to run Docker containers. Try -`sudo -g docker act ...` (or an equivalent invocation for your OS/distro). - --- Copyright (c) 2021, General Electric Company, Galois, Inc. diff --git a/rack-box/RELEASE.md b/rack-box/RELEASE.md index 557ca106..09b0b10c 100644 --- a/rack-box/RELEASE.md +++ b/rack-box/RELEASE.md @@ -6,11 +6,13 @@ steps: 1. Update version numbers or instructions in the following files within the RACK and RACK.wiki repositories: - RACK/ + RACK/rack-box/ - [ ] [Docker-Hub-README.md](Docker-Hub-README.md) - [ ] [GitHub-Release-README.md](GitHub-Release-README.md) - [ ] [RELEASE.md](RELEASE.md) + + RACK/cli/ - [ ] [setup-owl.sh](../cli/setup-owl.sh) RACK.wiki/ @@ -42,7 +44,7 @@ steps: ```shell cd RACK.wiki - git tag v10.2 + git tag v11 git push --tag ``` diff --git a/rack-box/files/GE_External_Root_CA_2_1.crt b/rack-box/files/GE_External_Root_CA_2_1.crt new file mode 100644 index 00000000..ae297f6c --- /dev/null +++ b/rack-box/files/GE_External_Root_CA_2_1.crt @@ -0,0 +1,22 @@ +-----BEGIN CERTIFICATE----- +MIIDozCCAougAwIBAgIQeO8XlqAMLhxvtCap35yktzANBgkqhkiG9w0BAQsFADBS +MQswCQYDVQQGEwJVUzEhMB8GA1UEChMYR2VuZXJhbCBFbGVjdHJpYyBDb21wYW55 +MSAwHgYDVQQDExdHRSBFeHRlcm5hbCBSb290IENBIDIuMTAeFw0xNTAzMDUwMDAw +MDBaFw0zNTAzMDQyMzU5NTlaMFIxCzAJBgNVBAYTAlVTMSEwHwYDVQQKExhHZW5l +cmFsIEVsZWN0cmljIENvbXBhbnkxIDAeBgNVBAMTF0dFIEV4dGVybmFsIFJvb3Qg +Q0EgMi4xMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAzCzT4wNRZtr2 +XTzoTMjppjulZfG35/nOt44q2zg47sxwgZ8o4qjcrwzIhsntoFrRQssjXSF5qXdC +zsm1G7f04qEBimuOH/X+CidWX+sudCS8VyRjXi9cyvUW4/mYKCLXv5M6HhEoIHCD +Xdo6yUr5mSrf18qRR3yUFz0HYXopa2Ls3Q6lBvEUO2Xw04vqVvmg1h7S5jYuZovC +oIbd2+4QGdoSZPgtSNpCxSR+NwtPpzYZpmqiUuDGfVpO3HU42APB0c60D91cJho6 +tZpXYHDsR/RxYGm02K/iMGefD5F4YMrtoKoHbskty6+u5FUOrUgGATJJGtxleg5X +KotQYu8P1wIDAQABo3UwczASBgNVHRMBAf8ECDAGAQH/AgECMA4GA1UdDwEB/wQE +AwIBBjAuBgNVHREEJzAlpCMwITEfMB0GA1UEAxMWR0UtUm9vdC1DT00tUlNBLTIw +NDgtMTAdBgNVHQ4EFgQU3N2mUCJBCLYgtpZyxBeBMJwNZuowDQYJKoZIhvcNAQEL +BQADggEBACF4Zsf2Nm0FpVNeADUH+sl8mFgwL7dfL7+6n7hOgH1ZXcv6pDkoNtVE +0J/ZPdHJW6ntedKEZuizG5BCclUH3IyYK4/4GxNpFXugmWnKGy2feYwVae7Puyd7 +/iKOFEGCYx4C6E2kq3aFjJqiq1vbgSS/B0agt1D3rH3i/+dXVxx8ZjhyZMuN+cgS +pZL4gnhnSXFAGissxJhKsNkYgvKdOETRNn5lEgfgVyP2iOVqEguHk2Gu0gHSouLu +5ad/qyN+Zgbjx8vEWlywmhXb78Gaf/AwSGAwQPtmQ0310a4DulGxo/kcuS78vFH1 +mwJmHm9AIFoqBi8XpuhGmQ0nvymurEk= +-----END CERTIFICATE----- diff --git a/rack-box/http/user-data b/rack-box/http/user-data index 7a224e99..c3869be1 100644 --- a/rack-box/http/user-data +++ b/rack-box/http/user-data @@ -2,12 +2,14 @@ autoinstall: version: 1 early-commands: - # Block inbound SSH to stop Packer trying to connect during initial install + # Block inbound SSH during initial install to prevent Packer timeout - systemctl stop ssh + # Kludge from + - sleep 60 identity: username: ubuntu hostname: rack-box - password: '$6$wdAcoXrU039hKYPd$508Qvbe7ObUnxoj15DRCkzC3qO7edjH0VV7BPNRDYK4QR8ofJaEEF2heacn0QgD.f8pO8SNp83XNdWG6tocBM1' + password: "$6$wdAcoXrU039hKYPd$508Qvbe7ObUnxoj15DRCkzC3qO7edjH0VV7BPNRDYK4QR8ofJaEEF2heacn0QgD.f8pO8SNp83XNdWG6tocBM1" storage: layout: name: direct @@ -18,6 +20,7 @@ autoinstall: ssh: install-server: true packages: + # If you change packages here, change them in rack-box/scripts/install.sh too - default-jre - linux-cloud-tools-virtual - nginx-light diff --git a/rack-box/rack-box-docker.json b/rack-box/rack-box-docker.json index 3c248e04..0d668f01 100644 --- a/rack-box/rack-box-docker.json +++ b/rack-box/rack-box-docker.json @@ -11,12 +11,13 @@ "builders": [ { "type": "docker", - "image": "ubuntu:20.04", - "pull": false, - "commit": true, + "changes": [ "ENTRYPOINT [ \"/usr/bin/python3\", \"/usr/bin/systemctl\" ]" - ] + ], + "commit": true, + "image": "ubuntu:20.04", + "pull": false } ], diff --git a/rack-box/rack-box-hyperv.json b/rack-box/rack-box-hyperv.json index 525cf83f..923f13c0 100644 --- a/rack-box/rack-box-hyperv.json +++ b/rack-box/rack-box-hyperv.json @@ -4,7 +4,7 @@ "headless": "false", "http_proxy": "{{env `http_proxy`}}", "https_proxy": "{{env `https_proxy`}}", - "memory": "4096", + "memory": "8192", "no_proxy": "{{env `no_proxy`}}", "version": "dev", "vm_name": "rack-box-{{user `version`}}" @@ -13,26 +13,7 @@ "builders": [ { "type": "hyperv-iso", - "iso_checksum": "sha256:28ccdb56450e643bad03bb7bcf7507ce3d8d90e8bf09e38f6bd9ac298a98eaad", - "iso_url": "http://releases.ubuntu.com/20.04/ubuntu-20.04.4-live-server-amd64.iso", - "disk_block_size": 1, - "memory": "{{user `memory`}}", - "vm_name": "{{user `vm_name`}}", - "cpus": "{{user `cpus`}}", - "enable_dynamic_memory": true, - - "http_directory": "http", - - "headless": "{{user `headless`}}", - "shutdown_command": "sudo shutdown -P now", - - "communicator": "ssh", - "ssh_username": "ubuntu", - "ssh_password": "ubuntu", - "ssh_handshake_attempts": "15", - "ssh_timeout": "15m", - "boot_wait": "2s", "boot_command": [ "", "", @@ -45,7 +26,21 @@ "", "autoinstall ds=nocloud-net;s=http://{{.HTTPIP}}:{{.HTTPPort}}/", "" - ] + ], + "boot_wait": "2s", + "communicator": "ssh", + "cpus": "{{user `cpus`}}", + "disk_block_size": 1, + "enable_dynamic_memory": true, + "headless": "{{user `headless`}}", + "http_directory": "http", + "iso_checksum": "sha256:5035be37a7e9abbdc09f0d257f3e33416c1a0fb322ba860d42d74aa75c3468d4", + "iso_url": "https://releases.ubuntu.com/focal/ubuntu-20.04.5-live-server-amd64.iso", + "memory": "{{user `memory`}}", + "shutdown_command": "sudo shutdown -P now", + "ssh_password": "ubuntu", + "ssh_username": "ubuntu", + "vm_name": "{{user `vm_name`}}" } ], diff --git a/rack-box/rack-box-virtualbox.json b/rack-box/rack-box-virtualbox.json index 289e9608..dbf69a46 100644 --- a/rack-box/rack-box-virtualbox.json +++ b/rack-box/rack-box-virtualbox.json @@ -4,7 +4,7 @@ "headless": "false", "http_proxy": "{{env `http_proxy`}}", "https_proxy": "{{env `https_proxy`}}", - "memory": "4096", + "memory": "8192", "no_proxy": "{{env `no_proxy`}}", "version": "dev", "vm_name": "rack-box-{{user `version`}}" @@ -12,45 +12,26 @@ "builders": [ { - "type": "virtualbox-iso", - "iso_checksum": "sha256:28ccdb56450e643bad03bb7bcf7507ce3d8d90e8bf09e38f6bd9ac298a98eaad", - "iso_url": "http://releases.ubuntu.com/20.04/ubuntu-20.04.4-live-server-amd64.iso", + "type": "virtualbox-ovf", + "boot_wait": "30s", + "communicator": "ssh", "guest_additions_mode": "disable", - "guest_os_type": "Ubuntu_64", - "vm_name": "{{user `vm_name`}}", - "cpus": "{{user `cpus`}}", - "memory": "{{user `memory`}}", - "vboxmanage": [ - ["modifyvm", "{{.Name}}", "--vram", "20"] - ], - - "http_directory": "http", - "headless": "{{user `headless`}}", + "http_directory": "http", "shutdown_command": "sudo shutdown -P now", - - "communicator": "ssh", - "ssh_username": "ubuntu", - "ssh_password": "ubuntu", - "ssh_handshake_attempts": "15", - "ssh_timeout": "30m", - - "boot_keygroup_interval": "2s", - "boot_wait": "2s", - "boot_command": [ - "", - "", - "", - "", - "", - "", - "", - "", - "", - "autoinstall ds=nocloud-net;s=http://{{.HTTPIP}}:{{.HTTPPort}}/", - "" - ] + "source_path": "focal64/box.ovf", + "ssh_private_key_file": "vagrant", + "ssh_username": "vagrant", + "vboxmanage": [ + ["modifyvm", "{{.Name}}", "--audio", "none"], + ["modifyvm", "{{.Name}}", "--cpus", "{{user `cpus`}}"], + ["modifyvm", "{{.Name}}", "--graphicscontroller", "vmsvga"], + ["modifyvm", "{{.Name}}", "--memory", "{{user `memory`}}"], + ["modifyvm", "{{.Name}}", "--vram", "20"], + ["modifyvm", "{{.Name}}", "--vrde", "off"] + ], + "vm_name": "{{user `vm_name`}}" } ], diff --git a/rack-box/scripts/install.sh b/rack-box/scripts/install.sh index 58b92695..36a70c0b 100644 --- a/rack-box/scripts/install.sh +++ b/rack-box/scripts/install.sh @@ -6,33 +6,35 @@ set -eo pipefail export USER=${1:-ubuntu} cd /tmp/files +# Install necessary packages non-interactively + +export DEBIAN_FRONTEND=noninteractive +export DEBCONF_NONINTERACTIVE_SEEN=true +apt-get update -yqq +apt-get install -yqq ca-certificates software-properties-common +cp GE_External_Root_CA_2_1.crt /usr/local/share/ca-certificates +update-ca-certificates +add-apt-repository -yu ppa:swi-prolog/stable +apt-get update -yqq + +# If you change packages here, change them in rack-box/http/user-data too + +apt-get install -yqq \ + curl \ + default-jre \ + gettext-base \ + nano \ + nginx-light \ + python3 \ + python3-pip \ + strace \ + swi-prolog \ + unzip + # Execute this part of the script only if we're building a Docker image if [ "${PACKER_BUILDER_TYPE}" == "docker" ]; then - # Install necessary packages non-interactively - - export DEBIAN_FRONTEND=noninteractive - export DEBCONF_NONINTERACTIVE_SEEN=true - apt-get update -yqq - apt-get install -yqq software-properties-common - add-apt-repository -yu ppa:swi-prolog/stable - - # If you change this, change packages in rack-box/http/user-data too - # Note VM image already has curl, gettext-base, nano, etc. - - apt-get install -yqq \ - curl \ - default-jre \ - gettext-base \ - nano \ - nginx-light \ - python3 \ - python3-pip \ - strace \ - swi-prolog \ - unzip - # Install docker-systemctl-replaement chmod 755 systemctl3.py diff --git a/rack-box/vagrant b/rack-box/vagrant new file mode 100644 index 00000000..7d6a0839 --- /dev/null +++ b/rack-box/vagrant @@ -0,0 +1,27 @@ +-----BEGIN RSA PRIVATE KEY----- +MIIEogIBAAKCAQEA6NF8iallvQVp22WDkTkyrtvp9eWW6A8YVr+kz4TjGYe7gHzI +w+niNltGEFHzD8+v1I2YJ6oXevct1YeS0o9HZyN1Q9qgCgzUFtdOKLv6IedplqoP +kcmF0aYet2PkEDo3MlTBckFXPITAMzF8dJSIFo9D8HfdOV0IAdx4O7PtixWKn5y2 +hMNG0zQPyUecp4pzC6kivAIhyfHilFR61RGL+GPXQ2MWZWFYbAGjyiYJnAmCP3NO +Td0jMZEnDkbUvxhMmBYSdETk1rRgm+R4LOzFUGaHqHDLKLX+FIPKcF96hrucXzcW +yLbIbEgE98OHlnVYCzRdK8jlqm8tehUc9c9WhQIBIwKCAQEA4iqWPJXtzZA68mKd +ELs4jJsdyky+ewdZeNds5tjcnHU5zUYE25K+ffJED9qUWICcLZDc81TGWjHyAqD1 +Bw7XpgUwFgeUJwUlzQurAv+/ySnxiwuaGJfhFM1CaQHzfXphgVml+fZUvnJUTvzf +TK2Lg6EdbUE9TarUlBf/xPfuEhMSlIE5keb/Zz3/LUlRg8yDqz5w+QWVJ4utnKnK +iqwZN0mwpwU7YSyJhlT4YV1F3n4YjLswM5wJs2oqm0jssQu/BT0tyEXNDYBLEF4A +sClaWuSJ2kjq7KhrrYXzagqhnSei9ODYFShJu8UWVec3Ihb5ZXlzO6vdNQ1J9Xsf +4m+2ywKBgQD6qFxx/Rv9CNN96l/4rb14HKirC2o/orApiHmHDsURs5rUKDx0f9iP +cXN7S1uePXuJRK/5hsubaOCx3Owd2u9gD6Oq0CsMkE4CUSiJcYrMANtx54cGH7Rk +EjFZxK8xAv1ldELEyxrFqkbE4BKd8QOt414qjvTGyAK+OLD3M2QdCQKBgQDtx8pN +CAxR7yhHbIWT1AH66+XWN8bXq7l3RO/ukeaci98JfkbkxURZhtxV/HHuvUhnPLdX +3TwygPBYZFNo4pzVEhzWoTtnEtrFueKxyc3+LjZpuo+mBlQ6ORtfgkr9gBVphXZG +YEzkCD3lVdl8L4cw9BVpKrJCs1c5taGjDgdInQKBgHm/fVvv96bJxc9x1tffXAcj +3OVdUN0UgXNCSaf/3A/phbeBQe9xS+3mpc4r6qvx+iy69mNBeNZ0xOitIjpjBo2+ +dBEjSBwLk5q5tJqHmy/jKMJL4n9ROlx93XS+njxgibTvU6Fp9w+NOFD/HvxB3Tcz +6+jJF85D5BNAG3DBMKBjAoGBAOAxZvgsKN+JuENXsST7F89Tck2iTcQIT8g5rwWC +P9Vt74yboe2kDT531w8+egz7nAmRBKNM751U/95P9t88EDacDI/Z2OwnuFQHCPDF +llYOUI+SpLJ6/vURRbHSnnn8a/XG+nzedGH5JGqEJNQsz+xT2axM0/W/CRknmGaJ +kda/AoGANWrLCz708y7VYgAtW2Uf1DPOIYMdvo6fxIB5i9ZfISgcJ/bbCUkFrhoH ++vq/5CIWxCPp0f85R4qxxQ5ihxJ0YDQT9Jpx4TMss4PSavPaBH3RXow5Ohe+bYoQ +NE5OgEXk2wVfZczCZpigBKbKZHNYcelXtTt/nP3rsCuGcM4h53s= +-----END RSA PRIVATE KEY----- diff --git a/rack-ui/app.py b/rack-ui/app.py index 87548f31..da43fcb8 100644 --- a/rack-ui/app.py +++ b/rack-ui/app.py @@ -1,39 +1,17 @@ -import io -import base64 -import glob -import time -import os -import re -import uuid +""" Main application page """ + import diskcache -import traceback -from pathlib import Path -from zipfile import ZipFile -from contextlib import redirect_stdout -import urllib -from urllib.parse import urlparse -import tempfile import dash -from dash import DiskcacheManager, Input, Output, html, dcc, State +from dash import Dash, DiskcacheManager, html, dcc, callback, Input, Output import dash_bootstrap_components as dbc -import rack -from rack import Graph, Manifest, sparql_connection -import semtk3 - -TEMP_DIR = tempfile.gettempdir() - -BASE_URL = "http://localhost" -TRIPLE_STORE = "http://localhost:3030/RACK" -TRIPLE_STORE_TYPE = "fuseki" - -# name of default manifest file within ingestion package -MANIFEST_FILE_NAME = "manifest.yaml" +from pages import home, load, verify +from pages.helper import * # diskcache for non-production apps when developing locally (fine for our Docker application). Needed for @dash.callback with background=True -cache = diskcache.Cache(TEMP_DIR + "/cache") +cache = diskcache.Cache(get_temp_dir() + "/cache") background_callback_manager = DiskcacheManager(cache) -app = dash.Dash(external_stylesheets=[dbc.themes.BOOTSTRAP], background_callback_manager=background_callback_manager) +app = Dash(__name__, external_stylesheets=[dbc.themes.BOOTSTRAP], background_callback_manager=background_callback_manager) app.title = 'RACK UI' # menu @@ -42,232 +20,49 @@ html.Table([ html.Tr([ html.Td(html.Img(src=app.get_asset_url('RACK_cartoon.jpg'), height="90px")), - html.Td([dcc.Markdown("## RACK\n\nin-a-box\n\n_System manager_")]) - ]) + html.Td([dcc.Markdown("## RACK\n_System manager_")]), + ]), + html.Tr( + html.Td([ + dbc.Nav([ + dbc.NavLink("Home", href="/", active="exact"), + dbc.NavLink("Load", href="/load", active="exact"), + dbc.NavLink("Verify", href="/verify", active="exact"), + ], + vertical=True, pills=True, + ) + ], colSpan=2) + ) ]), ], className="sidebar" ) -# div showing load details/options and load/cancel buttons -load_div = html.Div( - [ - dcc.Markdown("", id="load-div-message"), - dcc.RadioItems([], value="manifest-graphs", id="load-graph-radio", labelStyle={'display': 'block'}, inputStyle={"margin-right": "10px"}), # choose to load to manifest-specified or default graphs - html.Button("Load", id="load-button", n_clicks=0), # load button - html.Button("Cancel", id="cancel-load-button", n_clicks=0) # cancel button - ], - id="load-div", - hidden=True, - style={"margin-top": "50px"}, -) - -# dialog indicating unzip error (e.g. no manifest) -unzip_error_dialog = dbc.Modal( - [ - dbc.ModalBody("UNZIP ERROR PLACEHOLDER", id="unzip-error-dialog-body"), # message - dbc.ModalFooter(html.Button("Close", id="unzip-error-dialog-button", n_clicks=0)), # close button - ], - id="unzip-error-dialog", - is_open=False, - backdrop=False, -) - -# dialog confirming load done -done_dialog = dbc.Modal( - [ - dbc.ModalBody("MESSAGE PLACEHOLDER", id="done-dialog-body"), # message - dbc.ModalFooter(html.Button("Close", id="done-dialog-button", n_clicks=0)), # close button - ], - id="done-dialog", - is_open=False, - backdrop=False, -) - -content = html.Div( - [ - sidebar, - dcc.Markdown("Welcome to RACK."), - html.Div([dcc.Upload( html.Button(id="run-button", children="Select ingestion package"), id='run-button-upload', accept=".zip", multiple=False)]), # upload button - load_div, - html.Div(id="status-div", className="scrollarea"), # displays ingestion status - unzip_error_dialog, - done_dialog, - ], - style = { "margin-left": "18rem", "margin-right": "2rem", "padding": "2rem 1rem" } -) - +# layout app.layout = html.Div([ - dcc.Location(id="url"), - content, - dcc.Store("status-filepath"), # stores the filename of the temp file containing status - dcc.Store("manifest-filepath"), # stores the path to the manifest file - dcc.Interval(id='status-interval', interval=0.5*1000, n_intervals=0, disabled=True), # triggers updating the status display - ]) - -####### callbacks ####### - -@dash.callback( - output=[ - Output("load-div-message", "children"), - Output("load-graph-radio", "options"), - Output("manifest-filepath", "data"), - Output("unzip-error-dialog-body", "children"), - Output("status-filepath", "data"), # store a status file path - Output("run-button-upload", "contents")], # set to None after extracting, else callback ignores re-uploaded file - inputs=Input("run-button-upload", "contents"), # triggered by user selecting an upload file - background=True, # background callback - running=[ - (Output("run-button", "disabled"), True, False), # disable the run button while running - ], - prevent_initial_call=True -) -def run_unzip(zip_file_contents): - """ - Extract the selected zip file - """ - try: - tmp_dir = TEMP_DIR + "/ingest_" + str(uuid.uuid4()) # temp directory to store the unzipped package - zip_str = io.BytesIO(base64.b64decode(zip_file_contents.split(',')[1])) - zip_obj = ZipFile(zip_str, 'r') - zip_obj.extractall(path=tmp_dir) # unzip the package - manifest_paths = glob.glob(tmp_dir + '/**/' + MANIFEST_FILE_NAME, recursive=True) - if len(manifest_paths) == 0: - raise Exception("Cannot load ingestion package: does not contain manifest file " + MANIFEST_FILE_NAME) - if len(manifest_paths) > 1: - raise Exception("Cannot load ingestion package: contains multiple default manifest files: " + str(manifests)) - manifest_path = manifest_paths[0] - - manifest = get_manifest(manifest_path) - manifest_graphs_option = "Load to " + str(manifest.getModelgraphsFootprint()) + " " + str(manifest.getDatagraphsFootprint()) - radio_choices = [{'label': manifest_graphs_option, 'value': 'manifest-graphs'}, {'label': 'Load to default graph (for optimized performance)', 'value': 'default-graph'}] - - # generate a file in which to capture the ingestion status - status_filepath = os.path.join(TEMP_DIR, "output_" + str(uuid.uuid4())) - - except Exception as e: - return "", [], None, get_error_trace(e), status_filepath, None - return "You have selected package '" + manifest.getName() + "'", radio_choices, manifest_path, None, status_filepath, None - - -@dash.callback( - output=Output("done-dialog-body", "children"), - inputs=Input("load-button", "n_clicks"), # triggered by user clicking load button - state=[ - State("load-graph-radio", "value"), # load to manifest or default graphs - State("status-filepath", "data"), - State("manifest-filepath", "data")], - background=True, # background callback - running=[ - (Output("run-button", "disabled"), True, False), # disable the run button while running - (Output("status-interval", "disabled"), False, True) # enable the interval component while running - ], - prevent_initial_call=True + dcc.Location(id='url', refresh=False), + sidebar, + html.Div(id='page-content'), # display page content + dcc.Store("last-loaded-graphs"), # stores the last-loaded graphs (used by multiple pages) +], + style = { "margin-left": "18rem", "margin-right": "2rem", "padding": "2rem 1rem" } ) -def run_ingest(load_button_clicks, manifest_or_default_graphs, status_filepath, manifest_filepath): - """ - Ingest the selected zip file - """ - try: - use_default_graph = (manifest_or_default_graphs == "default-graph") - - f = open(status_filepath, "a") - with redirect_stdout(f): # send command output to temporary file - rack.ingest_manifest_driver(Path(manifest_filepath), BASE_URL, TRIPLE_STORE, TRIPLE_STORE_TYPE, True, use_default_graph) # process the manifest - - # get connection from manifest, construct SPARQLGraph URL - manifest = get_manifest(manifest_filepath) - if use_default_graph: - conn_str = manifest.getDefaultGraphConnection() - else: - conn_str = manifest.getConnection() - sparqlgraph_url_str = semtk3.get_sparqlgraph_url("http://localhost:8080", conn_json_str=conn_str) - - time.sleep(1) - except Exception as e: - return get_error_trace(e) # show done dialog with error - return [dcc.Markdown("Loaded ingestion package."), html.A("Open in SPARQLGraph UI", href=sparqlgraph_url_str, target="_blank", style={"margin-top": "100px"})] +# validate using this layout (includes components from pages) +app.validation_layout = html.Div([app.layout, home.layout, load.layout, verify.layout]) -@app.callback(Output("status-div", "children"), - Input("status-interval", "n_intervals"), # triggered at regular interval - Input("status-filepath", "data"), # or triggered by resetting the file path (to clear out the status when selecting a new file) - prevent_initial_call=True) -def update_status(n, status_filepath): - """ - Update the displayed status - """ - print("update_status") # show it's running - status = "" - try: - with open(status_filepath, "r") as file: - status = file.read() - ansi_escape = re.compile(r'\x1B(?:[@-Z\\-_]|\[[0-?]*[ -/]*[@-~])') # remove ANSI escape sequences (e.g. ESC[32m, ESC[0m) from command output - return ansi_escape.sub('', status) - except: - return "" - -####### simple callbacks to show/hide components ####### - -@app.callback(Output("load-div", "hidden"), - Input("load-graph-radio", "options"), # triggered by setting load graph radio options - Input("load-button", "n_clicks"), - Input("cancel-load-button", "n_clicks"), - prevent_initial_call=True - ) -def manage_load_div(radio_options, load_clicks, cancel_clicks): - """ Show or hide the load div """ - if (get_trigger() in ["load-button.n_clicks", "cancel-load-button.n_clicks"]): - return True # load or button pressed, hide div - elif radio_options == []: - return True # no radio options provided, don't show div - else: - return False # radio options provided, show div - -@app.callback(Output("unzip-error-dialog", "is_open"), - Input("unzip-error-dialog-body", "children"), - Input("unzip-error-dialog-button", "n_clicks"), - prevent_initial_call=True - ) -def manage_unzip_error_dialog(message, n_clicks): - """ Show or hide the unzip error dialog """ - if (get_trigger() == "unzip-error-dialog-button.n_clicks"): - return False # button pressed, hide the dialog - elif message == None: - return False # no message, don't show the dialog - else: - return True # child added, show the dialog - -@app.callback(Output("done-dialog", "is_open"), - Input("done-dialog-body", "children"), - Input("done-dialog-button", "n_clicks"), - prevent_initial_call=True - ) -def manage_done_dialog(children, n_clicks): - """ Show or hide the done dialog """ - if (get_trigger() == "done-dialog-button.n_clicks"): - return False # button pressed, hide the dialog +@callback(Output('page-content', 'children'), + Input('url', 'pathname')) +def display_page(pathname): + if pathname == '/': + return home.layout + elif pathname == '/load': + return load.layout + elif pathname == '/verify': + return verify.layout else: - return True # child added, show the dialog - - -####### convenience functions ####### - -def get_trigger(): - """ Get the input that triggered a callback (for @app.callback only, not @dash.callback) """ - return dash.callback_context.triggered[0]['prop_id'] - -def get_error_trace(e) -> str: - """ Get error trace string """ - trace = traceback.format_exception(None, e, e.__traceback__) - return trace[-1] - -def get_manifest(manifest_filepath) -> Manifest: - """ Get manifest contents from file """ - with open(manifest_filepath, mode='r', encoding='utf-8-sig') as manifest_file: - manifest = Manifest.fromYAML(manifest_file) - return manifest + return '404' -if __name__ == "__main__": - app.run_server(host="0.0.0.0", debug=False) +if __name__ == '__main__': + app.run_server(host="0.0.0.0", debug=False) \ No newline at end of file diff --git a/rack-ui/assets/RACK_cartoon.jpg b/rack-ui/assets/RACK_cartoon.jpg index dfe55738..4cc01501 100644 Binary files a/rack-ui/assets/RACK_cartoon.jpg and b/rack-ui/assets/RACK_cartoon.jpg differ diff --git a/rack-ui/assets/style.css b/rack-ui/assets/style.css index 24532793..6041f017 100644 --- a/rack-ui/assets/style.css +++ b/rack-ui/assets/style.css @@ -14,6 +14,7 @@ button { font-size: 16px; width:200px; display: inline-block; + margin-top: 10px; margin-bottom: 10px; margin-right: 5px; height:25px; diff --git a/rack-ui/pages/helper.py b/rack-ui/pages/helper.py new file mode 100644 index 00000000..2223aa7f --- /dev/null +++ b/rack-ui/pages/helper.py @@ -0,0 +1,41 @@ + +""" Helper functions """ + +import tempfile +import traceback +import dash +import re +import os +import uuid + +# configuration +BASE_URL = "http://localhost" +SPARQLGRAPH_BASE_URL = "http://localhost:8080" +TRIPLE_STORE_BASE_URL = "http://localhost:3030" +TRIPLE_STORE = TRIPLE_STORE_BASE_URL + "/RACK" +TRIPLE_STORE_TYPE = "fuseki" + +def get_temp_dir() -> str: + """ Get a temp dir """ + return tempfile.gettempdir() + +def get_temp_dir_unique(prefix) -> str: + """ Get a unique subdirectory within the temp dir, e.g. /tmp/ingest_9d40551e-f31f-4530-8c90-ca3e0acc4257""" + return os.path.join(get_temp_dir(), prefix + "_" + str(uuid.uuid4())) + +def get_error_trace(e) -> str: + """ Get error trace string """ + trace = traceback.format_exception(None, e, e.__traceback__) + return trace[-1] + +def get_trigger(): + """ + Get the input that triggered a callback + Not for use with @dash.callback (in local Windows environment, gives dash.exceptions.MissingCallbackContextException) + """ + return dash.callback_context.triggered[0]['prop_id'] + +def clean_for_display(s): + """ Cleans process output to be displayed to user """ + ansi_escape = re.compile(r'\x1B(?:[@-Z\\-_]|\[[0-?]*[ -/]*[@-~])') # remove ANSI escape sequences (e.g. ESC[32m, ESC[0m) from command output + return ansi_escape.sub('', s) \ No newline at end of file diff --git a/rack-ui/pages/home.py b/rack-ui/pages/home.py new file mode 100644 index 00000000..f3eed146 --- /dev/null +++ b/rack-ui/pages/home.py @@ -0,0 +1,7 @@ +""" Content for the home page """ + +from dash import html, dcc + +layout = html.Div(children=[ + html.H2('Welcome to RACK.'), +]) \ No newline at end of file diff --git a/rack-ui/pages/load.py b/rack-ui/pages/load.py new file mode 100644 index 00000000..e5e37319 --- /dev/null +++ b/rack-ui/pages/load.py @@ -0,0 +1,248 @@ +""" Content for the "load data" page """ + +import time +import io +import base64 +import glob +from contextlib import redirect_stdout, redirect_stderr +from urllib.parse import urlparse +from pathlib import Path +from zipfile import ZipFile +from dash import html, dcc, callback, Input, Output, State +import dash_bootstrap_components as dbc +import rack +from rack import Manifest +import semtk3 +from .helper import * + +# name of default manifest file within ingestion package +MANIFEST_FILE_NAME = "manifest.yaml" + +# div showing load details/options and load/cancel buttons +load_div = html.Div( + [ + dcc.Markdown("", id="load-div-message"), + dcc.RadioItems([], value="manifest-graphs", id="load-graph-radio", labelStyle={'display': 'block'}, inputStyle={"margin-right": "10px"}), # choose to load to manifest-specified or default graphs + html.Button("Load", id="load-button", n_clicks=0), # load button + html.Button("Cancel", id="cancel-load-button", n_clicks=0) # cancel button + ], + id="load-div", + hidden=True, + style={"margin-top": "50px"}, +) + +# dialog indicating unzip error (e.g. no manifest) +unzip_error_dialog = dbc.Modal( + [ + dbc.ModalBody("UNZIP ERROR PLACEHOLDER", id="unzip-error-dialog-body"), # message + dbc.ModalFooter(html.Button("Close", id="unzip-error-dialog-button", n_clicks=0)), # close button + ], + id="unzip-error-dialog", + is_open=False, + backdrop=False, +) + +# dialog confirming load done +done_dialog = dbc.Modal( + [ + dbc.ModalBody("MESSAGE PLACEHOLDER", id="done-dialog-body"), # message + dbc.ModalFooter(html.Button("Close", id="done-dialog-button", n_clicks=0)), # close button + ], + id="done-dialog", + is_open=False, + backdrop=False, +) + +# page elements +layout = html.Div([ + html.H2("Load data"), + dcc.Markdown("_Load data into RACK_"), + html.Div([html.Button(id="turnstile-button", children="Load Turnstile data")]), # button to load turnstile + dbc.Tooltip("Load the Turnstile sample data provided with RACK", target="turnstile-button"), + html.Div([dcc.Upload( html.Button(id="select-button", children="Load ingestion package"), id='select-button-upload', accept=".zip", multiple=False)]), # button to show upload dialog to pick ingestion package + dbc.Tooltip("Load an ingestion package (in .zip format) from your local machine", target="select-button"), + load_div, + html.Div(id="status-div", className="scrollarea"), # displays ingestion status + unzip_error_dialog, + done_dialog, + dcc.Store("status-filepath"), # stores the filename of the temp file containing status + dcc.Store("manifest-filepath"), # stores the path to the manifest file + dcc.Interval(id='status-interval', interval=0.5*1000, n_intervals=0, disabled=True), # triggers updating the status display + ]) + +####### callbacks ####### + +@dash.callback( + output=[ + Output("load-div-message", "children"), + Output("load-graph-radio", "options"), + Output("manifest-filepath", "data"), + Output("unzip-error-dialog-body", "children"), + Output("status-filepath", "data"), # store a status file path + Output("select-button-upload", "contents")], # set to None after extracting, else callback ignores re-uploaded file + inputs=[ + Input("select-button-upload", "contents"), # triggered by user selecting an upload file + Input("turnstile-button", "n_clicks")], # triggered by turnstile button + background=True, # background callback + running=[ + (Output("select-button", "disabled"), True, False), # disable the button while running + (Output("turnstile-button", "disabled"), True, False), # disable the button while running + ], + prevent_initial_call=True +) +def run_unzip(zip_file_contents, turnstile_clicks): + """ + Extract the selected zip file + """ + try: + if zip_file_contents != None: + tmp_dir = get_temp_dir_unique("ingest") # temp directory to store the unzipped package + zip_str = io.BytesIO(base64.b64decode(zip_file_contents.split(',')[1])) + zip_obj = ZipFile(zip_str, 'r') + zip_obj.extractall(path=tmp_dir) # unzip the package + manifest_paths = glob.glob(tmp_dir + '/**/' + MANIFEST_FILE_NAME, recursive=True) + if len(manifest_paths) == 0: + raise Exception("Cannot load ingestion package: does not contain manifest file " + MANIFEST_FILE_NAME) + if len(manifest_paths) > 1: + raise Exception("Cannot load ingestion package: contains multiple default manifest files: " + str(manifests)) + manifest_path = manifest_paths[0] + else: + manifest_path = "../Turnstile-Example/Turnstile-IngestionPackage/manifest.yaml" + + manifest = get_manifest(manifest_path) + manifest_graphs_option = "Load to " + str(manifest.getModelgraphsFootprint()) + " " + str(manifest.getDatagraphsFootprint()) + radio_choices = [{'label': manifest_graphs_option, 'value': 'manifest-graphs'}, {'label': 'Load to default graph (for optimized performance)', 'value': 'default-graph'}] + + # generate a file in which to capture the ingestion status + status_filepath = get_temp_dir_unique("output") + + selected_message = "You have selected package '" + manifest.getName() + "'" + if manifest.getDescription() != None and manifest.getDescription().strip() != "": + selected_message = selected_message + " (" + manifest.getDescription() + ")" + + except Exception as e: + return "", [], None, get_error_trace(e), None, None + return selected_message, radio_choices, manifest_path, None, status_filepath, None + + +@dash.callback( + output=[Output("done-dialog-body", "children"), + Output("last-loaded-graphs", "data")], # remember graphs loaded (used in the Verify tab) NOTE this Store is from app.py layout - using it here disables prevent_initial_call=True + inputs=Input("load-button", "n_clicks"), # triggered by user clicking load button + state=[ + State("load-graph-radio", "value"), # load to manifest or default graphs + State("status-filepath", "data"), + State("manifest-filepath", "data")], + background=True, # background callback + running=[ + (Output("select-button", "disabled"), True, False), # disable button while running + (Output("turnstile-button", "disabled"), True, False), # disable button while running + (Output("status-interval", "disabled"), False, True) # enable the interval component while running + ], + prevent_initial_call=True # NOTE won't work because last-loaded-graphs is in the layout before load-button (see https://dash.plotly.com/advanced-callbacks#prevent-callback-execution-upon-initial-component-render) +) +def run_ingest(load_button_clicks, manifest_or_default_graphs, status_filepath, manifest_filepath): + """ + Ingest the selected zip file + """ + # this callback gets triggered when the pages is loaded - if so don't proceed + if load_button_clicks == 0: + raise dash.exceptions.PreventUpdate + + try: + # avoid a ConnectionError if SemTK services are not fully up yet + if semtk3.check_services() == False: + raise Exception("Cannot reach SemTK Services (wait for startup to complete, or check for failures)") + + use_default_graph = (manifest_or_default_graphs == "default-graph") + + f = open(status_filepath, "a") + with redirect_stdout(f), redirect_stderr(f): # send command output to temporary file + rack.logger.setLevel("ERROR") + rack.ingest_manifest_driver(Path(manifest_filepath), BASE_URL, TRIPLE_STORE, TRIPLE_STORE_TYPE, True, use_default_graph) # process the manifest + + # get connection from manifest, construct SPARQLGraph URL + manifest = get_manifest(manifest_filepath) + if use_default_graph: + conn_str = manifest.getDefaultGraphConnection() + else: + conn_str = manifest.getConnection() + sparqlgraph_url_str = semtk3.get_sparqlgraph_url(SPARQLGRAPH_BASE_URL, conn_json_str=conn_str) + + # store list of loaded graphs + last_loaded_graphs = manifest.getModelgraphsFootprint() + manifest.getDatagraphsFootprint() + + time.sleep(1) + except Exception as e: + return get_error_trace(e), [] # show done dialog with error + return [dcc.Markdown("Loaded ingestion package."), html.A("Open in SPARQLGraph UI", href=sparqlgraph_url_str, target="_blank", style={"margin-top": "100px"})], last_loaded_graphs + + +@callback(Output("status-div", "children"), + Input("status-interval", "n_intervals"), # triggered at regular interval + Input("status-filepath", "data"), # or triggered by resetting the file path (to clear out the status when selecting a new file) + prevent_initial_call=True) +def update_status(n, status_filepath): + """ + Update the displayed status + """ + status = "" + try: + with open(status_filepath, "r") as file: + status = file.read() + return clean_for_display(status) + except: + return "" + + +####### simple callbacks to show/hide components ####### + +@callback(Output("load-div", "hidden"), + Input("load-graph-radio", "options"), # triggered by setting load graph radio options + Input("load-button", "n_clicks"), + Input("cancel-load-button", "n_clicks"), + prevent_initial_call=True + ) +def manage_load_div(radio_options, load_clicks, cancel_clicks): + """ Show or hide the load div """ + if (get_trigger() in ["load-button.n_clicks", "cancel-load-button.n_clicks"]): + return True # load or cancel button pressed, hide div + elif radio_options == []: + return True # no radio options provided, don't show div + else: + return False # radio options provided, show div + +@callback(Output("unzip-error-dialog", "is_open"), + Input("unzip-error-dialog-body", "children"), + Input("unzip-error-dialog-button", "n_clicks"), + prevent_initial_call=True + ) +def manage_unzip_error_dialog(message, n_clicks): + """ Show or hide the unzip error dialog """ + if (get_trigger() == "unzip-error-dialog-button.n_clicks"): + return False # button pressed, hide the dialog + elif message == None: + return False # no message, don't show the dialog + else: + return True # child added, show the dialog + +@callback(Output("done-dialog", "is_open"), + Input("done-dialog-body", "children"), + Input("done-dialog-button", "n_clicks"), + prevent_initial_call=True + ) +def manage_done_dialog(children, n_clicks): + """ Show or hide the done dialog """ + if (get_trigger() == "done-dialog-button.n_clicks"): + return False # button pressed, hide the dialog + else: + return True # child added, show the dialog + + +####### convenience functions ####### + +def get_manifest(manifest_filepath) -> Manifest: + """ Get manifest contents from file """ + with open(manifest_filepath, mode='r', encoding='utf-8-sig') as manifest_file: + manifest = Manifest.fromYAML(manifest_file) + return manifest \ No newline at end of file diff --git a/rack-ui/pages/verify.py b/rack-ui/pages/verify.py new file mode 100644 index 00000000..f9fbf537 --- /dev/null +++ b/rack-ui/pages/verify.py @@ -0,0 +1,292 @@ +""" Content for the "verify data" page """ + +import time +import platform +import subprocess +from dash import html, dcc, callback, Input, Output, State +import dash_bootstrap_components as dbc +import semtk3 +import rack +from .helper import * + +# dialog confirming ASSIST verification done +verify_assist_done_dialog = dbc.Modal( + [ + dbc.ModalBody("MESSAGE PLACEHOLDER", id="verify-assist-done-dialog-body"), # message + dbc.ModalFooter([ + html.Button("Download results", id="verify-assist-download-button"), # download results button + html.Button("Close", id="verify-assist-done-button", n_clicks=0) # close button + ]), + dcc.Download(id="download"), + ], + id="verify-assist-done-dialog", + is_open=False, + backdrop=False, +) + +# div showing graphs list +verify_report_options_div = html.Div( + [ + dcc.Markdown("Select graphs to include in report:"), + dcc.Checklist([], [], id="verify-graph-checklist", labelStyle={'display': 'block'}, inputStyle={"margin-right": "10px"}), # choose which graphs to verify + html.Button("Continue", id="verify-report-continue-button", n_clicks=0), # button to open SPARQLgraph report + html.Button("Cancel", id="verify-report-cancel-button", n_clicks=0) # button to cancel + ], + id="verify-report-options-div", + hidden=True, + style={"margin-top": "50px"}, +) + +# dialog indicating an error generating the SPARQLgraph report (e.g. no graphs selected) +verify_report_error_dialog = dbc.Modal( + [ + dbc.ModalBody("MESSAGE PLACEHOLDER", id="verify-report-error-dialog-body"), # message + dbc.ModalFooter([ + html.Button("Close", id="verify-report-error-button", n_clicks=0) # close button + ]), + ], + id="verify-report-error-dialog", + is_open=False, + backdrop=False, +) + +# page elements +layout = html.Div(children=[ + html.H2('Verify Data'), + dcc.Markdown("_Run verification routines on the data loaded in RACK_"), + html.Button("Verify using ASSIST", id="verify-assist-button", n_clicks=0), # button to verify using ASSIST + html.Button("Verify using report", id="verify-report-button"), # button to verify using SPARQLgraph report + verify_report_options_div, + html.Div(id="assist-status-div", className="scrollarea"), # displays status + verify_assist_done_dialog, + verify_report_error_dialog, + dcc.Store("assist-status-filepath"), # stores the filename of the temp file containing status + dcc.Store("sparqlgraph-url"), # stores the URL of the SPARQLgraph report + dcc.Store(id="clientside-dummy-store"), # dummy store because callback needs an Output + dcc.Interval(id='assist-status-interval', interval=0.5*1000, n_intervals=0, disabled=True), # triggers updating the status display +]) + +####### callbacks for ASSIST verification ###################################### + + +@dash.callback( + output=Output("assist-status-filepath", "data"), # store a status file path + inputs=Input("verify-assist-button", "n_clicks"), # triggered by clicking ASSIST button + background=True, # background callback + running=[ + (Output("verify-report-button", "disabled"), True, False), # disable the button while running + (Output("verify-assist-button", "disabled"), True, False), # disable the button while running + ], + prevent_initial_call=True +) +def create_assist_status_filepath(n_clicks): + """ + Generate a file in which to capture the ASSIST status + """ + status_filepath = get_temp_dir_unique("output") + return status_filepath + + +@dash.callback( + output=[Output("verify-assist-done-dialog-body", "children"), + Output("verify-assist-download-button", "hidden")], + inputs=Input("assist-status-filepath", "data"), # triggered by creating ASSIST status filepath + background=True, # background callback + running=[ + (Output("verify-report-button", "disabled"), True, False), # disable the button while running + (Output("verify-assist-button", "disabled"), True, False), # disable the button while running + (Output("assist-status-interval", "disabled"), False, True) # enable the interval component while running + ], + prevent_initial_call=True +) +def run_assist(status_filepath): + """ + Run the ASSIST tool + """ + try: + if platform.system() == "Windows": + raise Exception("Not yet supported on Windows. (PROLOG checking is available through LINUX/Docker.)") + else: + # runs on all graphs in the triple store, minus an exclusion list of internal SemTK graphs (e.g. demo data) + subprocess.call("../assist/bin/check -v -m " + TRIPLE_STORE_BASE_URL + "/ > " + status_filepath + " 2>&1", shell=True) + time.sleep(1) + + return [dcc.Markdown("Completed ASSIST verification.")], False + except Exception as e: + return get_error_trace(e), True # show done dialog with error, hide the download button + + +@callback(Output("assist-status-div", "children"), + Input("assist-status-interval", "n_intervals"), # triggered at regular interval + Input("assist-status-filepath", "data"), # or triggered by resetting the file path (to clear out the status when selecting a new file) + prevent_initial_call=True) +def update_assist_status(n, status_filepath): + """ + Update the displayed status + """ + status = "" + try: + with open(status_filepath, "r") as file: + lines = file.readlines() + status = "...\n\n" + "".join(lines[-1 * min(20, len(lines)):]) # get the last 20 lines (or fewer if there are not 20 in the list) + return clean_for_display(status) + except Exception as e: + return "" + + +@callback( + Output("download", "data"), + Input("verify-assist-download-button", "n_clicks"), # triggered by user clicking download button + State("assist-status-filepath", "data"), # the name of the file to download + prevent_initial_call=True, +) +def download_assist_results(n_clicks, status_filepath): + """ + Download file when user clicks button + """ + # read contents of the result file + with open(status_filepath, 'r') as file: + file_content = file.read() + return dict(content=file_content, filename="rack_verification_results.txt") + + +####### callbacks for SPARQLgraph report verification ###################################### + + +@dash.callback( + output=[ + Output("verify-graph-checklist", "options"), # list of graphs populated in the triple store + Output("verify-graph-checklist", "value")], # list of graphs to pre-select (graphs recently loaded) + inputs=Input("verify-report-button", "n_clicks"), # triggered by user clicking button + state=State("last-loaded-graphs", "data"), # last loaded graphs + background=True, # background callback + running=[ + (Output("verify-report-button", "disabled"), True, False), # disable the run button while running + (Output("verify-assist-button", "disabled"), True, False), # disable the button while running + ], + prevent_initial_call=True +) +def show_report_options(button_clicks, last_loaded_graphs): + """ + Show list of graphs for verification report, with the last loaded graphs pre-selected + """ + # get list of graphs populated in the triple store - create checkboxes for these + conn_str = rack.sparql_connection(BASE_URL, None, None, [], TRIPLE_STORE, TRIPLE_STORE_TYPE) + graphs_list = semtk3.get_graph_names(conn_str, True) # excludes internal SemTK graphs + graphs_list.sort() + + # these are the graphs last loaded - check the checkboxes for these + if last_loaded_graphs == None: + last_loaded_graphs = [] + + return graphs_list, last_loaded_graphs + + +@dash.callback( + output=[Output("sparqlgraph-url", "data"), # output SPARQLgraph report URL + Output("verify-report-error-dialog-body", "children")], # output error message + inputs=Input("verify-report-continue-button", "n_clicks"), # triggered by clicking continue button + state=State("verify-graph-checklist", "value"), # the currently selected graphs + background=True, # background callback + running=[ + (Output("verify-report-button", "disabled"), True, False), # disable the button while running + (Output("verify-assist-button", "disabled"), True, False), # disable the button while running + ], + prevent_initial_call=True +) +def generate_report_link(sg_button_clicks, graphs_selected): + """ + Generate the SPARQLgraph report link + """ + # error if no graphs were selected + if len(graphs_selected) == 0: + return None, "Please select at least one graph" # return error message and no URL + + # build a connection using selected graphs (no need to differentiate model vs data) + graphs = [] + for graph in graphs_selected: + graphs.append(graph) + conn = semtk3.build_connection_str("Graphs To Verify", TRIPLE_STORE_TYPE, TRIPLE_STORE, graphs, graphs[0], graphs[1:]) # use all graphs for both model and data, to avoid either being empty + + # construct SG report URL + sparqlgraph_verify_url_str = semtk3.get_sparqlgraph_url(SPARQLGRAPH_BASE_URL, conn_json_str=str(conn), report_id="report data verification") + + # return SPARQLgraph report URL + return sparqlgraph_verify_url_str, None + + +# Open a browser tab with SPARQLgraph report (this is a clientside callback written in JavaScript: https://dash.plotly.com/clientside-callbacks) +dash.clientside_callback( + """ + function(url) { + if(url != null){ + window.open(url); + } + return "dummy value" + } + """, + Output("clientside-dummy-store", "data"), # serves no purpose, but an output is required + Input("sparqlgraph-url", "data"), + prevent_initial_call=True +) + + +####### simple callbacks to show/hide components ####### + + +@callback(Output("assist-status-div", "hidden"), + Input("verify-assist-button", "n_clicks"), + Input("verify-report-button", "n_clicks"), + prevent_initial_call=True + ) +def manage_assist_status_div(assist_clicks, report_clicks): + """ Show or hide the ASSIST status div """ + if (get_trigger() in ["verify-assist-button.n_clicks"]): + return False # user clicked ASSIST, show the div + else: + return True # user clicked report, hide the div + + +@callback(Output("verify-report-options-div", "hidden"), + Input("verify-graph-checklist", "options"), + Input("verify-assist-button", "n_clicks"), + Input("verify-report-cancel-button", "n_clicks"), + prevent_initial_call=True + ) +def manage_verify_report_options_div(checklist_options, continue_clicks, cancel_clicks): + """ Show or hide the graph checklist div """ + if (get_trigger() in ["verify-assist-button.n_clicks", "verify-report-cancel-button.n_clicks"]): + return True # continue or cancel button pressed, hide div + elif checklist_options == []: + return True # no checklist options provided, don't show div + else: + return False # checklist options provided, show div + + +@callback(Output("verify-assist-done-dialog", "is_open"), + Input("verify-assist-done-dialog-body", "children"), + Input("verify-assist-done-button", "n_clicks"), + prevent_initial_call=True + ) +def manage_verify_assist_done_dialog(children, n_clicks): + """ Show or hide the done dialog after running ASSIST """ + if (get_trigger() == "verify-assist-done-button.n_clicks"): + return False # button pressed, hide the dialog + else: + return True # child added, show the dialog + + +@callback(Output("verify-report-error-dialog", "is_open"), + Input("verify-report-error-dialog-body", "children"), + Input("verify-report-error-button", "n_clicks"), + prevent_initial_call=True + ) +def manage_verify_report_error_dialog(children, n_clicks): + """ Show or hide the SPARQLgraph report error dialog (e.g. if no graphs selected) """ + if (get_trigger() == "verify-report-error-button.n_clicks"): + return False # button pressed, hide the dialog + else: + if children == None: + return False # child added but it's None, hide the dialog + else: + return True # child added, show it diff --git a/sadl-examples/OwlModels/TurnstileSecurity.rules b/sadl-examples/OwlModels/TurnstileSecurity.rules new file mode 100644 index 00000000..072e3760 --- /dev/null +++ b/sadl-examples/OwlModels/TurnstileSecurity.rules @@ -0,0 +1,11 @@ +# Jena Rules file generated by SADL IDE -- Do not edit! Edit the SADL model and regenerate. +# Created from SADL model 'http://arcos.sadl-examples/TurnstileSecurity' + +@prefix Sec: +@prefix rdf: +@prefix CPS: +@prefix TSec: +@prefix sys: + +[Vul-CAPEC-148: (?comp rdf:type http://arcos.turnstile/CPS#Cps), (?comp http://arcos.turnstile/CPS#insideTrustedBoundary 'true'^^http://www.w3.org/2001/XMLSchema#boolean), (?conn rdf:type http://arcos.turnstile/CPS#Connection), (?conn http://arcos.rack/SYSTEM#destination ?comp), (?conn http://arcos.turnstile/CPS#connectionType http://arcos.turnstile/CPS#Untrusted) -> (http://arcos.sadl-examples/TurnstileSecurity#CAPEC-148 http://arcos.rack/SECURITY#source ?conn)] +[Mitigated-CAPEC-148: (?conn rdf:type http://arcos.turnstile/CPS#Connection), (http://arcos.sadl-examples/TurnstileSecurity#CAPEC-148 http://arcos.rack/SECURITY#source ?conn), (?conn http://arcos.turnstile/CPS#implControl http://arcos.sadl-examples/TurnstileSecurity#ic1), (?conn http://arcos.turnstile/CPS#implControl http://arcos.sadl-examples/TurnstileSecurity#ic2) -> print('CAPEC-148 (Content Spoofing) is mitigated for connection = '^^http://www.w3.org/2001/XMLSchema#string, ?conn)] diff --git a/sadl-examples/OwlModels/configuration.rdf b/sadl-examples/OwlModels/configuration.rdf new file mode 100644 index 00000000..c86269f9 --- /dev/null +++ b/sadl-examples/OwlModels/configuration.rdf @@ -0,0 +1,20 @@ + + + file:/RACK/STR-Ontology + + + + + + + com.ge.research.sadl.jena.reasoner.builtin.Print + print + + + + + + + diff --git a/sadl-examples/OwlModels/ont-policy.rdf b/sadl-examples/OwlModels/ont-policy.rdf new file mode 100644 index 00000000..139ada25 --- /dev/null +++ b/sadl-examples/OwlModels/ont-policy.rdf @@ -0,0 +1,109 @@ + + + platform:/resource/sadl-examples/AllOntology.sadl + All + SADL + + + + + + platform:/resource/sadl-examples/OverlayChecks.sadl + chk + SADL + + + + + + platform:/resource/sadl-examples/Example.sadl + SADL + + + + + + platform:/resource/sadl-examples/RdfsSubset.sadl + rdfs + SADL + + + + + + platform:/resource/sadl-examples/ImplicitModel/SadlBuiltinFunctions.sadl + builtinfunctions + SADL + + + + + + sadllistmodel + SADL + + + + + + sadlbasemodel + SADL + + + + + + platform:/resource/sadl-examples/ImplicitModel/SadlImplicitModel.sadl + sadlimplicitmodel + SADL + + + + + + platform:/resource/sadl-examples/RequirementAnalysisExample.sadl + SADL + + + + + + platform:/resource/sadl-examples/ShellCheckAnalysisExample.sadl + SADL + + + + + + platform:/resource/sadl-examples/BaselinesSrcRq.sadl + SADL + + + + + + platform:/resource/sadl-examples/Magic.sadl + SADL + + + + + + platform:/resource/sadl-examples/TurnstileSecurity.sadl + TSec + SADL + + + + + + platform:/resource/sadl-examples/OverlayGraphs.sadl + graph + SADL + + + + + diff --git a/sadl-examples/TurnstileSecurity.sadl b/sadl-examples/TurnstileSecurity.sadl index 2bf5d24d..e8a44788 100644 --- a/sadl-examples/TurnstileSecurity.sadl +++ b/sadl-examples/TurnstileSecurity.sadl @@ -129,8 +129,8 @@ ic1 is a ImplControl ic2 is a ImplControl with control IA-3-1 with dal 6. -inflow has implControl ic1. -inflow has implControl ic2. +inflow has implConnControl ic1. +inflow has implConnControl ic2. // Rule to identify threats Rule Vul-CAPEC-148