Showing with 14,168 additions and 282 deletions.
  1. +4 −1 .editorconfig
  2. +2 −2 .github/workflows/docker_image_linux.yml
  3. +1 −1 Directory.build.props
  4. +31 −17 README.md
  5. +15 −0 src/Spark.Engine/Core/HttpHeaderName.cs
  6. +0 −1 src/Spark.Engine/Extensions/FhirDateTimeExtensions.cs
  7. +15 −4 src/Spark.Engine/Extensions/HttpRequestFhirExtensions.cs
  8. +3 −0 src/Spark.Engine/Extensions/NetCore/IServiceCollectionExtensions.cs
  9. +44 −0 src/Spark.Engine/Filters/UnsupportedMediaTypeFilter.cs
  10. +2 −2 src/Spark.Engine/Formatters/HtmlFhirFormatter.cs
  11. +1 −0 src/Spark.Engine/Formatters/NetCore/BinaryOutputFormatter.cs
  12. +4 −4 src/Spark.Engine/Formatters/NetCore/ResourceJsonInputFormatter.cs
  13. +4 −5 src/Spark.Engine/Formatters/NetCore/ResourceXmlInputFormatter.cs
  14. +4 −23 src/Spark.Engine/Handlers/NetCore/FormatTypeHandler.cs
  15. +0 −3 src/Spark.Engine/Search/ElementIndexer.cs
  16. +17 −0 src/Spark.Mongo.Tests/Search/BsonNullBsonSerializer.cs
  17. +37 −0 src/Spark.Mongo.Tests/Search/BsonSerializationProvider.cs
  18. +110 −0 src/Spark.Mongo.Tests/Search/CriteriumQueryBuilderTests.cs
  19. +0 −39 src/Spark.Mongo.Tests/Search/CriteriumStringSearchParameterTests.cs
  20. +18 −0 src/Spark.Mongo.Tests/Search/StringBsonSerializer.cs
  21. +48 −28 src/Spark.Mongo/Search/Searcher/CriteriaMongoExtensions.cs
  22. +1 −0 src/Spark.Web/ClientApp/js/main.js
  23. +13,620 −0 src/Spark.Web/ClientApp/package-lock.json
  24. +72 −0 src/Spark.Web/ClientApp/package.json
  25. +25 −36 src/Spark.Web/Hubs/MaintenanceHub.cs
  26. +3 −0 src/Spark.Web/Startup.cs
  27. +55 −71 src/Spark.Web/Views/Admin/Maintenance.cshtml
  28. +10 −7 src/Spark.Web/Views/Shared/_Layout.cshtml
  29. +3 −1 src/Spark.Web/appsettings.Development.json
  30. +1 −1 src/Spark.Web/appsettings.json
  31. +5 −5 src/Spark.Web/wwwroot/assets/css/main.css
  32. +13 −13 src/Spark.Web/wwwroot/assets/js/main.js
  33. +0 −17 src/Spark.Web/wwwroot/assets/js/microsoft/signalr/dist/browser/signalr.min.js
  34. +0 −1 src/Spark/Spark.csproj
@@ -1,4 +1,4 @@
[*.{cs,vb}]
[*.{cs,vb,cshtml}]

dotnet_naming_rule.private_fields_start_with_underscore.symbols = private_fields
dotnet_naming_rule.private_fields_start_with_underscore.style = starts_with_underscore
@@ -17,3 +17,6 @@ dotnet_naming_symbols.private_fields.applicable_accessibilities = private

dotnet_naming_style.starts_with_underscore.capitalization = camel_case
dotnet_naming_style.starts_with_underscore.required_prefix = _

indent_style = space
indent_size = 4
@@ -19,10 +19,10 @@ jobs:
-t ${{ secrets.DOCKERHUB_ORGANIZATION }}/spark:${{steps.vars.outputs.tag}}
-t ${{ secrets.DOCKERHUB_ORGANIZATION }}/spark:r4-latest
- name: Push the tagged Spark Docker image
run: docker push ${{ secrets.DOCKERHUB_ORGANIZATION }}/spark
run: docker push --all-tags ${{ secrets.DOCKERHUB_ORGANIZATION }}/spark
- name: Build the tagged Mongo Docker image
run: docker build . --file .docker/linux/Mongo.Dockerfile
-t ${{ secrets.DOCKERHUB_ORGANIZATION }}/mongo:${{steps.vars.outputs.tag}}
-t ${{ secrets.DOCKERHUB_ORGANIZATION }}/mongo:r4-latest
- name: Push the tagged Mongo Docker image
run: docker push ${{ secrets.DOCKERHUB_ORGANIZATION }}/mongo
run: docker push --all-tags ${{ secrets.DOCKERHUB_ORGANIZATION }}/mongo
@@ -1,6 +1,6 @@
<Project>
<PropertyGroup>
<Version>1.5.4</Version>
<Version>1.5.5</Version>
</PropertyGroup>
<PropertyGroup>
<Authors>Firely, Incendi and contributors</Authors>
@@ -20,40 +20,54 @@ platform and playground for FHIR.

**DISCLAIMER: The web projects Spark.Web and Spark are meant as reference implementations and should never be used out of the box in a production environment without adding as a minimum security features.**

## Quickstart
The easiest way to test Spark FHIR server is by using Docker. Make sure you have installed [Docker](https://docs.docker.com/install/). On Linux you will need to install [Docker Compose](https://docs.docker.com/compose/install/) as well. After installing Docker you could run Spark server by running one of the following commands, found below, for your preferred FHIR Version. Remember to replace the single quotes with double quotes on Windows. The Spark FHIR Server will be available after startup at `http://localhost:5555`.
### Get Started
There are two ways to get started with Spark. Either by using the NuGet packages and following the Quickstart Tutorial, or by using the Docker Images.

#### DSTU2
`curl 'https://raw.githubusercontent.com/FirelyTeam/spark/master/.docker/docker-compose.example.yml' > docker-compose.yml && docker-compose up`
#### NuGet Packages
Read the [Quickstart Tutorial](https://firelyteam.github.io/spark/quickstart) on how to set up your own FHIR Server using the NuGet Packages. There is also an example project that accompanies the Quickstart Tutorial which you can find here: https://github.com/incendilabs/spark-example

#### STU3
`curl 'https://raw.githubusercontent.com/FirelyTeam/spark/stu3/master/.docker/docker-compose.example.yml' > docker-compose.yml && docker-compose up`
#### Docker Images
Set up the Spark FHIR server by using the Docker Images. Make sure you have installed [Docker](https://docs.docker.com/install/). On Linux you will need to install [Docker Compose](https://docs.docker.com/compose/install/) as well. After installing Docker you could run Spark server by running one of the following commands, found below, for your preferred FHIR Version. Remember to replace the single quotes with double quotes on Windows. The Spark FHIR Server will be available after startup at `http://localhost:5555`.

#### R4
`curl 'https://raw.githubusercontent.com/FirelyTeam/spark/r4/master/.docker/docker-compose.example.yml' > docker-compose.yml && docker-compose up`
```
curl 'https://raw.githubusercontent.com/FirelyTeam/spark/r4/master/.docker/docker-compose.example.yml' > docker-compose.yml
docker-compose up
```
#### STU3
```
curl 'https://raw.githubusercontent.com/FirelyTeam/spark/stu3/master/.docker/docker-compose.example.yml' > docker-compose.yml
docker-compose up`
```

## Versions
#### DSTU2
```
curl 'https://raw.githubusercontent.com/FirelyTeam/spark/master/.docker/docker-compose.example.yml' > docker-compose.yml
docker-compose up
```

#### DSTU1
DSTU1 is no longer maintained by this project. The source code can be found in the branch **dstu1/master**.
## Versions

#### DSTU2
DSTU2 is no longer maintained by this project. The source code can be found in the branch **master**.
#### R4
Source code can be found in the branch **r4/master**. This is the version of Spark running at https://spark.incendi.no
FHIR Endpoint: https://spark.incendi.no/fhir

#### STU3
Source code can be found in the branch **stu3/master**, we try to keep up-to-date with the STU3 version of FHIR.
This is the version of Spark running at http://spark-stu3.incendi.no FHIR Endpoint: http://spark-stu3.incendi.no/fhir
This is the version of Spark running at https://spark-stu3.incendi.no FHIR Endpoint: https://spark-stu3.incendi.no/fhir

#### R4
Source code can be found in the branch **r4/master**. This is the version of Spark running at http://spark.incendi.no
FHIR Endpoint: http://spark.incendi.no/fhir
#### DSTU2
DSTU2 is no longer maintained by this project. The source code can be found in the branch **master**.

#### DSTU1
DSTU1 is no longer maintained by this project. The source code can be found in the branch **dstu1/master**.

## Contributing
If you want to contribute, see our [guidelines](https://github.com/furore-fhir/spark/wiki/Contributing)

### Git branching strategy
Our strategy for git branching:

Branch from the master branch which contains the DSTU2 version, unless the feature or bug fix is considered for a specific version of FHIR then branch from either stu3/master or r4/master.
Branch from the stu3/master branch which contains the STU3 version, unless the feature or bug fix is considered for a specific version of FHIR then branch from the relevant branch which at this point is only r4/master.

See [GitHub flow](https://guides.github.com/introduction/flow/) for more information.
@@ -0,0 +1,15 @@
namespace Spark.Engine.Core
{
internal static class HttpHeaderName
{
public const string ACCEPT = "Accept";
public const string CONTENT_DISPOSITION = "Content-Disposition";
public const string CONTENT_LOCATION = "Content-Location";
public const string CONTENT_TYPE = "Content-Type";
public const string ETAG = "ETag";
public const string LOCATION = "Location";
public const string LAST_MODIFIED = "Last-Modified";

public const string X_CONTENT_TYPE = "X-Content-Type";
}
}
@@ -53,7 +53,6 @@ public static DateTimeOffset UpperBound(this FhirDateTime fdt)
FhirDateTimePrecision.Day => start.AddDays(1),
FhirDateTimePrecision.Minute => start.AddMinutes(1),
FhirDateTimePrecision.Second => start.AddSeconds(1),
_ => start,
};
return end;
}
@@ -137,6 +137,13 @@ internal static bool IsRawBinaryRequest(this OutputFormatterCanWriteContext cont
return false;
}

internal static bool IsRawBinaryRequest(this HttpRequest request)
{
var ub = new UriBuilder(request.GetRequestUri());
return ub.Path.Contains("Binary")
&& !ub.Path.EndsWith("_search");
}

internal static bool IsRawBinaryPostOrPutRequest(this HttpRequest request)
{
var ub = new UriBuilder(request.GetRequestUri());
@@ -151,17 +158,17 @@ internal static void AcquireHeaders(this HttpResponse response, FhirResponse fhi
{
if (fhirResponse.Key != null)
{
response.Headers.Add("ETag", ETag.Create(fhirResponse.Key.VersionId)?.ToString());
response.Headers.Add(HttpHeaderName.ETAG, ETag.Create(fhirResponse.Key.VersionId)?.ToString());

Uri location = fhirResponse.Key.ToUri();
response.Headers.Add("Location", location.OriginalString);
response.Headers.Add(HttpHeaderName.LOCATION, location.OriginalString);

if (response.Body != null)
{
response.Headers.Add("Content-Location", location.OriginalString);
response.Headers.Add(HttpHeaderName.CONTENT_LOCATION, location.OriginalString);
if (fhirResponse.Resource != null && fhirResponse.Resource.Meta != null)
{
response.Headers.Add("Last-Modified", fhirResponse.Resource.Meta.LastUpdated.Value.ToString("R"));
response.Headers.Add(HttpHeaderName.LAST_MODIFIED, fhirResponse.Resource.Meta.LastUpdated.Value.ToString("R"));
}
}
}
@@ -185,6 +192,10 @@ internal static void AcquireHeaders(this HttpResponseMessage response, FhirRespo
{
response.Content.Headers.LastModified = fhirResponse.Resource.Meta.LastUpdated;
}
if(fhirResponse.Resource is Binary)
{
response.Content.Headers.Add(HttpHeaderName.CONTENT_DISPOSITION, "attachment");
}
}
}
}
@@ -7,6 +7,7 @@
using Microsoft.Extensions.DependencyInjection.Extensions;
using Spark.Engine.Core;
using Spark.Engine.FhirResponseFactory;
using Spark.Engine.Filters;
using Spark.Engine.Formatters;
using Spark.Engine.Interfaces;
using Spark.Engine.Search;
@@ -101,6 +102,8 @@ public static IMvcCoreBuilder AddFhirFormatters(this IServiceCollection services

return services.AddMvcCore(options =>
{
options.Filters.Add<UnsupportedMediaTypeFilter>(-3001);

if (settings.UseAsynchronousIO)
{
options.InputFormatters.Add(new AsyncResourceJsonInputFormatter(new FhirJsonParser(settings.ParserSettings)));
@@ -0,0 +1,44 @@
#if NETSTANDARD2_0
using Microsoft.AspNetCore.Mvc.Filters;
using Spark.Core;
using Spark.Engine.Core;
using Spark.Engine.Extensions;
using System.Linq;

namespace Spark.Engine.Filters
{
internal class UnsupportedMediaTypeFilter : IActionFilter, IFilterMetadata
{
///<inheritdoc/>
public void OnActionExecuted(ActionExecutedContext context)
{

}

///<inheritdoc/>
public void OnActionExecuting(ActionExecutingContext context)
{
var request = context.HttpContext.Request;

if (request.IsRawBinaryRequest()) return;

if (request.Headers.ContainsKey("Accept"))
{
var acceptHeader = request.Headers["Accept"].ToString();
if (!FhirMediaType.SupportedMimeTypes.Any(mimeType => acceptHeader.Contains(mimeType)))
{
throw Error.NotAcceptable();
}
}

if (context.HttpContext.Request.ContentType != null)
{
if (!FhirMediaType.SupportedMimeTypes.Any(mimeType => context.HttpContext.Request.ContentType.Contains(mimeType)))
{
throw Error.UnsupportedMediaType();
}
}
}
}
}
#endif
@@ -149,7 +149,7 @@ private void WriteHTMLOutput(Type type, object value, Stream writeStream)
else
writer.WriteLine(string.Format("Blank Text: {0}<br/>", item.Resource.ExtractKey().ToUriString()));
}
else
else
{
writer.WriteLine("This is not a domain resource");
}
@@ -192,4 +192,4 @@ private void WriteHTMLOutput(Type type, object value, Stream writeStream)
writer.Flush();
}
}
}
}
@@ -37,6 +37,7 @@ public override async Task WriteResponseBodyAsync(OutputFormatterWriteContext co
}
if (binary == null) return;

context.HttpContext.Response.Headers.Add(HttpHeaderName.CONTENT_DISPOSITION, "attachment");
context.HttpContext.Response.ContentType = binary.ContentType;

Stream stream = new MemoryStream(binary.Data);
@@ -48,10 +48,10 @@ public ResourceJsonInputFormatter()
SupportedEncodings.Clear();
SupportedEncodings.Add(Encoding.UTF8);

SupportedMediaTypes.Add("application/json");
SupportedMediaTypes.Add("application/fhir+json");
SupportedMediaTypes.Add("application/json+fhir");
SupportedMediaTypes.Add("text/json");
foreach (var mediaType in FhirMediaType.JsonMimeTypes)
{
SupportedMediaTypes.Add(mediaType);
}
}

protected override bool CanReadType(Type type)
@@ -42,11 +42,10 @@ public ResourceXmlInputFormatter()
SupportedEncodings.Clear();
SupportedEncodings.Add(Encoding.UTF8);

SupportedMediaTypes.Add("application/xml");
SupportedMediaTypes.Add("application/fhir+xml");
SupportedMediaTypes.Add("application/xml+fhir");
SupportedMediaTypes.Add("text/xml");
SupportedMediaTypes.Add("text/xml+fhir");
foreach (var mediaType in FhirMediaType.XmlMimeTypes)
{
SupportedMediaTypes.Add(mediaType);
}
}

protected override bool CanReadType(Type type)
@@ -2,10 +2,8 @@
using Hl7.Fhir.Rest;
using Microsoft.AspNetCore.Http;
using Microsoft.Extensions.Primitives;
using Spark.Core;
using Spark.Engine.Core;
using Spark.Engine.Extensions;
using System.Linq;
using System.Threading.Tasks;

namespace Spark.Engine.Handlers.NetCore
@@ -27,11 +25,11 @@ public async Task InvokeAsync(HttpContext context)
ResourceFormat accepted = ContentType.GetResourceFormatFromFormatParam(format);
if (accepted != ResourceFormat.Unknown)
{
if (context.Request.Headers.ContainsKey("Accept")) context.Request.Headers.Remove("Accept");
if (context.Request.Headers.ContainsKey(HttpHeaderName.ACCEPT)) context.Request.Headers.Remove(HttpHeaderName.ACCEPT);
if (accepted == ResourceFormat.Json)
context.Request.Headers.Add("Accept", new StringValues(ContentType.JSON_CONTENT_HEADER));
context.Request.Headers.Add(HttpHeaderName.ACCEPT, new StringValues(ContentType.JSON_CONTENT_HEADER));
else
context.Request.Headers.Add("Accept", new StringValues(ContentType.XML_CONTENT_HEADER));
context.Request.Headers.Add(HttpHeaderName.ACCEPT, new StringValues(ContentType.XML_CONTENT_HEADER));
}
}

@@ -40,28 +38,11 @@ public async Task InvokeAsync(HttpContext context)
if (!HttpRequestExtensions.IsContentTypeHeaderFhirMediaType(context.Request.ContentType))
{
string contentType = context.Request.ContentType;
context.Request.Headers.Add("X-Content-Type", contentType);
context.Request.Headers.Add(HttpHeaderName.X_CONTENT_TYPE, contentType);
context.Request.ContentType = FhirMediaType.OctetStreamMimeType;
}
}

//application/foobar
if (context.Request.Headers.ContainsKey("Accept"))
{
var acceptHeader = context.Request.Headers["Accept"].ToString();
if (!FhirMediaType.SupportedMimeTypes.Any(mimeType => acceptHeader.Contains(mimeType)))
{
throw Error.NotAcceptable();
}
}
if(context.Request.ContentType != null)
{
if (!FhirMediaType.SupportedMimeTypes.Any(mimeType => context.Request.ContentType.Contains(mimeType)))
{
throw Error.UnsupportedMediaType();
}
}

await _next(context);
}
}
@@ -8,9 +8,6 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Reflection;
using System.Text;
using System.Threading.Tasks;
using Expression = Spark.Search.Expression;

namespace Spark.Engine.Search
@@ -0,0 +1,17 @@
/*
* Copyright (c) 2020, Kufu (info@kufu.no) and contributors
* See the file CONTRIBUTORS for details.
*
* This file is licensed under the BSD 3-Clause license
* available at https://raw.github.com/furore-fhir/spark/master/LICENSE
*/

using MongoDB.Bson.Serialization;
using MongoDB.Bson.Serialization.Serializers;

namespace Spark.Mongo.Tests.Search
{
internal class BsonNullBsonSerializer : BsonNullSerializer, IBsonSerializer
{
}
}
@@ -0,0 +1,37 @@
/*
* Copyright (c) 2020, Kufu (info@kufu.no) and contributors
* See the file CONTRIBUTORS for details.
*
* This file is licensed under the BSD 3-Clause license
* available at https://raw.github.com/furore-fhir/spark/master/LICENSE
*/

using MongoDB.Bson;
using MongoDB.Bson.Serialization;
using MongoDB.Bson.Serialization.Serializers;
using System;
using System.Collections.Generic;

namespace Spark.Mongo.Tests.Search
{
internal class BsonSerializationProvider : IBsonSerializationProvider
{
private IDictionary<Type, Func<IBsonSerializer>> _registeredBsonSerializers = new Dictionary<Type, Func<IBsonSerializer>>
{
{ typeof(BsonNull), () => new BsonNullSerializer() },
{ typeof(string), () => new StringBsonSerializer() },
{ typeof(BsonDocument), () => new BsonDocumentSerializer() },
{ typeof(BsonDateTime), () => new BsonDateTimeSerializer() },
};

public IBsonSerializer GetSerializer(System.Type type)
{
if(_registeredBsonSerializers.ContainsKey(type))
{
return _registeredBsonSerializers[type].Invoke();
}

return null;
}
}
}
@@ -0,0 +1,110 @@
/*
* Copyright (c) 2020, Kufu (info@kufu.no) and contributors
* See the file CONTRIBUTORS for details.
*
* This file is licensed under the BSD 3-Clause license
* available at https://raw.github.com/furore-fhir/spark/master/LICENSE
*/

using Hl7.Fhir.Model;
using Hl7.Fhir.Utility;
using MongoDB.Bson;
using MongoDB.Bson.Serialization;
using Spark.Search;
using Spark.Search.Mongo;
using System.Linq;
using Xunit;

namespace Spark.Mongo.Tests.Search
{
public class CriteriumQueryBuilderTests
{
[Theory]
[InlineData(ResourceType.Condition, "code", "code=ha125", "{ \"$or\" : [{ \"code\" : { \"$elemMatch\" : { \"code\" : \"ha125\" } } }, { \"code\" : { \"$not\" : { \"$type\" : 4 } }, \"code.code\" : \"ha125\" }, { \"$and\" : [{ \"code\" : { \"$type\" : 2 } }, { \"code\" : \"ha125\" }] }] }")]
[InlineData(ResourceType.Condition, "code", "code=|ha125", "{ \"$or\" : [{ \"code\" : { \"$elemMatch\" : { \"code\" : \"ha125\", \"system\" : { \"$exists\" : false } } } }, { \"code\" : { \"$not\" : { \"$type\" : 4 } }, \"code.code\" : \"ha125\", \"code.system\" : { \"$exists\" : false } }, { \"$and\" : [{ \"code\" : { \"$type\" : 2 } }, { \"code\" : \"ha125\" }, { \"system\" : { \"$exists\" : false } }] }] }")]
[InlineData(ResourceType.Condition, "code", "code:text=headache", "{ \"code.text\" : /headache/i }")]
[InlineData(ResourceType.Patient,
"gender",
"gender:not=male",
"{ \"$or\" : [{ \"gender\" : { \"$elemMatch\" : { \"gender\" : { \"$exists\" : true }, \"code\" : { \"$ne\" : \"male\" } } } }, { \"gender\" : { \"$not\" : { \"$type\" : 4 }, \"$exists\" : true }, \"gender.code\" : { \"$ne\" : \"male\" } }, { \"gender\" : { \"$type\" : 2, \"$exists\" : true, \"$ne\" : \"male\" } }] }")]
[InlineData(ResourceType.Patient, "gender", "gender:missing=true", "{ \"gender\" : null, \"gender.text\" : null }")]
[InlineData(ResourceType.Patient, "gender", "gender:missing=false", "{ \"$or\" : [{ \"gender\" : { \"$ne\" : null } }, { \"gender.text\" : null }] }")]
public void Can_Build_TokenQuery_Filter(ResourceType resourceType, string searchParameter, string query, string expected)
{
var jsonFilter = BuildAndReturnQueryFilterAsJsonString(resourceType, searchParameter, query);

Assert.Equal(expected, jsonFilter);
}

[Theory]
[InlineData(ResourceType.RiskAssessment, "probability", "probability=0.8", "{ \"probability\" : \"0.8\" }")]
[InlineData(ResourceType.RiskAssessment, "probability", "probability=eq0.8", "{ \"probability\" : \"0.8\" }")]
[InlineData(ResourceType.RiskAssessment, "probability", "probability=gt0.8", "{ \"probability\" : { \"$gt\" : \"0.8\" } }")]
[InlineData(ResourceType.RiskAssessment, "probability", "probability=ge0.8", "{ \"probability\" : { \"$gte\" : \"0.8\" } }")]
[InlineData(ResourceType.RiskAssessment, "probability", "probability=lt0.8", "{ \"probability\" : { \"$lt\" : \"0.8\" } }")]
[InlineData(ResourceType.RiskAssessment, "probability", "probability=le0.8", "{ \"probability\" : { \"$lte\" : \"0.8\" } }")]
[InlineData(ResourceType.RiskAssessment, "probability", "probability=ne0.8", "{ \"probability\" : { \"$ne\" : \"0.8\" } }")]
[InlineData(ResourceType.RiskAssessment, "probability", "probability:missing=true", "{ \"$or\" : [{ \"probability\" : { \"$exists\" : false } }, { \"probability\" : null }] }")]
[InlineData(ResourceType.RiskAssessment, "probability", "probability:missing=false", "{ \"probability\" : { \"$ne\" : null } }")]
public void Can_Build_NumberQuery_Filter(ResourceType resourceType, string searchParameter, string query, string expected)
{
var jsonFilter = BuildAndReturnQueryFilterAsJsonString(resourceType, searchParameter, query);

Assert.Equal(expected, jsonFilter);
}

[Theory]
[InlineData(ResourceType.Patient, "name", "name=eve", "{ \"name\" : /^eve/i }")]
[InlineData(ResourceType.Patient, "name", "name:contains=eve", "{ \"name\" : /.*eve.*/i }")]
[InlineData(ResourceType.Patient, "name", "name:exact=Eve", "{ \"name\" : \"Eve\" }")]
[InlineData(ResourceType.Patient, "name", "name:missing=true", "{ \"$or\" : [{ \"name\" : { \"$exists\" : false } }, { \"name\" : null }] }")]
[InlineData(ResourceType.Patient, "name", "name:missing=false", "{ \"name\" : { \"$ne\" : null } }")]
// Complex cases or edge cases
[InlineData(
ResourceType.Subscription,
"criteria",
"criteria=Observation?patient.identifier=http://somehost.no/fhir/Name%20Hospital|someId",
"{ \"criteria\" : /^Observation?patient.identifier=http:\\/\\/somehost.no\\/fhir\\/Name%20Hospital|someId/i }")]
public void Can_Build_StringQuery_Filter(ResourceType resourceType, string searchParameter, string query, string expected)
{
var jsonFilter = BuildAndReturnQueryFilterAsJsonString(resourceType, searchParameter, query);

Assert.Equal(expected, jsonFilter);
}

[Theory]
[InlineData(ResourceType.Procedure, "date", "date=2010-01-01", "{ \"date.end\" : { \"$gte\" : ISODate(\"2010-01-01T00:00:00Z\") }, \"date.start\" : { \"$lt\" : ISODate(\"2010-01-02T00:00:00Z\") } }")]
[InlineData(ResourceType.Procedure, "date", "date=ap2010-01-01", "{ \"date.end\" : { \"$gte\" : ISODate(\"2010-01-01T00:00:00Z\") }, \"date.start\" : { \"$lt\" : ISODate(\"2010-01-02T00:00:00Z\") } }")]
[InlineData(ResourceType.Procedure, "date", "date=eq2010-01-01", "{ \"date.end\" : { \"$gte\" : ISODate(\"2010-01-01T00:00:00Z\") }, \"date.start\" : { \"$lt\" : ISODate(\"2010-01-02T00:00:00Z\") } }")]
[InlineData(ResourceType.Procedure, "date", "date=ne2010-01-01", "{ \"$or\" : [{ \"date.end\" : { \"$lte\" : ISODate(\"2010-01-01T00:00:00Z\") } }, { \"date.start\" : { \"$gte\" : ISODate(\"2010-01-02T00:00:00Z\") } }] }")]
[InlineData(ResourceType.Procedure, "date", "date=gt2010-01-01", "{ \"date.start\" : { \"$gte\" : ISODate(\"2010-01-02T00:00:00Z\") } }")]
[InlineData(ResourceType.Procedure, "date", "date=ge2010-01-01", "{ \"date.start\" : { \"$gte\" : ISODate(\"2010-01-01T00:00:00Z\") } }")]
[InlineData(ResourceType.Procedure, "date", "date=lt2010-01-01", "{ \"date.end\" : { \"$lt\" : ISODate(\"2010-01-01T00:00:00Z\") } }")]
[InlineData(ResourceType.Procedure, "date", "date=le2010-01-01", "{ \"date.end\" : { \"$lt\" : ISODate(\"2010-01-02T00:00:00Z\") } }")]
[InlineData(ResourceType.Procedure, "date", "date=sa2010-01-01", "{ \"date.start\" : { \"$gte\" : ISODate(\"2010-01-02T00:00:00Z\") } }")]
[InlineData(ResourceType.Procedure, "date", "date=eb2010-01-01", "{ \"date.end\" : { \"$lte\" : ISODate(\"2010-01-01T00:00:00Z\") } }")]
[InlineData(ResourceType.Procedure, "date", "date:missing=true", "{ \"$or\" : [{ \"date\" : { \"$exists\" : false } }, { \"date\" : null }] }")]
[InlineData(ResourceType.Procedure, "date", "date:missing=false", "{ \"date\" : { \"$ne\" : null } }")]
public void Can_Build_DateQuery_Filter(ResourceType resourceType, string searchParameter, string query, string expected)
{
var jsonFilter = BuildAndReturnQueryFilterAsJsonString(resourceType, searchParameter, query);

Assert.Equal(expected, jsonFilter);
}

private string BuildAndReturnQueryFilterAsJsonString(ResourceType resourceType, string searchParameter, string query)
{
var bsonSerializerRegistry = new BsonSerializerRegistry();
bsonSerializerRegistry.RegisterSerializationProvider(new BsonSerializationProvider());

var resourceTypeAsString = resourceType.GetLiteral();
var criterium = Criterium.Parse(query);
criterium.SearchParameters.AddRange(ModelInfo.SearchParameters.Where(p => p.Resource == resourceTypeAsString && p.Name == searchParameter));

var filter = criterium.ToFilter(resourceType.GetLiteral());
var jsonFilter = filter?.Render(null, bsonSerializerRegistry)?.ToJson();

return jsonFilter;
}
}
}

This file was deleted.

@@ -0,0 +1,18 @@
/*
* Copyright (c) 2020, Kufu (info@kufu.no) and contributors
* See the file CONTRIBUTORS for details.
*
* This file is licensed under the BSD 3-Clause license
* available at https://raw.github.com/furore-fhir/spark/master/LICENSE
*/

using MongoDB.Bson.Serialization;
using MongoDB.Bson.Serialization.Serializers;

namespace Spark.Mongo.Tests.Search
{
internal class StringBsonSerializer : StringSerializer, IBsonSerializer
{

}
}
@@ -198,25 +198,28 @@ private static FilterDefinition<BsonDocument> StringQuery(String parameterName,
//No modifiers allowed on number parameters, hence not in the method signature.
private static FilterDefinition<BsonDocument> NumberQuery(String parameterName, Operator optor, ValueExpression operand)
{
string typedOperand;
try
{
typedOperand = ((UntypedValue)operand).AsNumberValue().ToString();
}
catch (InvalidCastException)
string typedOperand = null;
if (operand != null)
{
try
{
typedOperand = ((UntypedValue)operand).AsNumberValue().ToString();
}
catch (InvalidCastException)
{
throw new ArgumentException(string.Format("Invalid number value {0} on number parameter {1}", operand, parameterName));
}
catch (FormatException)
{
}
catch (FormatException)
{
throw new ArgumentException(string.Format("Invalid number value {0} on number parameter {1}", operand, parameterName));
}
}

switch (optor)
{
case Operator.APPROX:
//case Operator.APPROX:
//TODO
case Operator.CHAIN:
//case Operator.CHAIN:
//Invalid in this context
case Operator.EQ:
return Builders<BsonDocument>.Filter.Eq(parameterName, typedOperand);
@@ -227,14 +230,16 @@ private static FilterDefinition<BsonDocument> NumberQuery(String parameterName,
case Operator.IN:
IEnumerable<ValueExpression> opMultiple = ((ChoiceValue)operand).Choices;
return SafeIn(parameterName, new BsonArray(opMultiple.Cast<NumberValue>().Select(nv => nv.Value)));
case Operator.ISNULL:
return Builders<BsonDocument>.Filter.Eq(parameterName, BsonNull.Value);
case Operator.LT:
return Builders<BsonDocument>.Filter.Lt(parameterName, typedOperand);
case Operator.LTE:
return Builders<BsonDocument>.Filter.Lte(parameterName, typedOperand);
case Operator.NOT_EQUAL:
return Builders<BsonDocument>.Filter.Ne(parameterName, typedOperand);
case Operator.ISNULL:
return Builders<BsonDocument>.Filter.Or(Builders<BsonDocument>.Filter.Exists(parameterName, false), Builders<BsonDocument>.Filter.Eq(parameterName, BsonNull.Value)); //With only Builders<BsonDocument>.Filter.NotExists, that would exclude resources that have this field with an explicit null in it.
case Operator.NOTNULL:
return Builders<BsonDocument>.Filter.Ne(parameterName, BsonNull.Value);
return Builders<BsonDocument>.Filter.Ne(parameterName, BsonNull.Value); //We don't use Builders<BsonDocument>.Filter.Exists, because that would include resources that have this field with an explicit null in it.
default:
throw new ArgumentException(string.Format("Invalid operator {0} on number parameter {1}", optor.ToString(), parameterName));
}
@@ -381,7 +386,7 @@ private static FilterDefinition<BsonDocument> TokenQuery(String parameterName, O
case Operator.NOTNULL:
return Builders<BsonDocument>.Filter.Or(Builders<BsonDocument>.Filter.Ne(parameterName, BsonNull.Value), Builders<BsonDocument>.Filter.Eq(textfield, BsonNull.Value)); //We don't use Builders<BsonDocument>.Filter.Exists, because that would include resources that have this field with an explicit null in it.
default:
throw new ArgumentException(String.Format("Invalid operator {0} on token parameter {1}", optor.ToString(), parameterName));
throw new ArgumentException($"Invalid operator {optor} on token parameter {parameterName}");
}
}

@@ -419,36 +424,51 @@ private static FilterDefinition<BsonDocument> DateQuery(String parameterName, Op
return Builders<BsonDocument>.Filter.Or(opMultiple.Select(choice => DateQuery(parameterName, Operator.EQ, modifier, choice)));
}

string start = parameterName + ".start";
string end = parameterName + ".end";

var fdtValue = ((UntypedValue)operand).AsDateTimeValue();
var valueLower = BsonDateTime.Create(fdtValue.LowerBound());
var valueUpper = BsonDateTime.Create(fdtValue.UpperBound());
var start = parameterName + ".start";
var end = parameterName + ".end";

BsonDateTime dateValueLower = null;
BsonDateTime dateValueUpper = null;
if (operand != null)
{
var dateValue = ((UntypedValue)operand).AsDateTimeValue();
dateValueLower = BsonDateTime.Create(dateValue.LowerBound());
dateValueUpper = BsonDateTime.Create(dateValue.UpperBound());
}

switch (optor)
{
case Operator.APPROX:
case Operator.EQ:
return
Builders<BsonDocument>.Filter.And(Builders<BsonDocument>.Filter.Gte(end, valueLower), Builders<BsonDocument>.Filter.Lt(start, valueUpper));
Builders<BsonDocument>.Filter.And(Builders<BsonDocument>.Filter.Gte(end, dateValueLower), Builders<BsonDocument>.Filter.Lt(start, dateValueUpper));
case Operator.NOT_EQUAL:
return Builders<BsonDocument>.Filter.Or(
Builders<BsonDocument>.Filter.Lte(end, dateValueLower),
Builders<BsonDocument>.Filter.Gte(start, dateValueUpper)
);
case Operator.GT:
return
Builders<BsonDocument>.Filter.Gte(start, valueUpper);
Builders<BsonDocument>.Filter.Gte(start, dateValueUpper);
case Operator.GTE:
return
Builders<BsonDocument>.Filter.Gte(start, valueLower);
Builders<BsonDocument>.Filter.Gte(start, dateValueLower);
case Operator.LT:
return
Builders<BsonDocument>.Filter.Lt(end, valueLower);
Builders<BsonDocument>.Filter.Lt(end, dateValueLower);
case Operator.LTE:
return
Builders<BsonDocument>.Filter.Lt(end, valueUpper);
Builders<BsonDocument>.Filter.Lt(end, dateValueUpper);
case Operator.STARTS_AFTER:
return Builders<BsonDocument>.Filter.Gte(start, dateValueUpper);
case Operator.ENDS_BEFORE:
return Builders<BsonDocument>.Filter.Lte(end, dateValueLower);
case Operator.ISNULL:
return Builders<BsonDocument>.Filter.Eq(parameterName, BsonNull.Value); //We don't use Builders<BsonDocument>.Filter.NotExists, because that would exclude resources that have this field with an explicit null in it.
return Builders<BsonDocument>.Filter.Or(Builders<BsonDocument>.Filter.Exists(parameterName, false), Builders<BsonDocument>.Filter.Eq(parameterName, BsonNull.Value)); //With only Builders<BsonDocument>.Filter.NotExists, that would exclude resources that have this field with an explicit null in it.
case Operator.NOTNULL:
return Builders<BsonDocument>.Filter.Ne(parameterName, BsonNull.Value); //We don't use Builders<BsonDocument>.Filter.Exists, because that would include resources that have this field with an explicit null in it.
default:
throw new ArgumentException(String.Format("Invalid operator {0} on date parameter {1}", optor.ToString(), parameterName));
throw new ArgumentException($"Invalid operator {optor} on date parameter {parameterName}");
}
}

@@ -0,0 +1 @@
import 'bootstrap';

Large diffs are not rendered by default.

@@ -0,0 +1,72 @@
{
"name": "spark-web",
"version": "1.0.0",
"description": "Front-end package for Spark FHIR server",
"config": {
"src_js": "js",
"src_css": "scss",
"src_img": "images",
"dest_js": "../wwwroot/assets/js",
"dest_css": "../wwwroot/assets/css",
"dest_img": "../wwwroot/assets/images",
"dist": "../wwwroot/assets"
},
"scripts": {
"//[ Utility ]//": "",
"test": "echo \"Error: no test specified\" && exit 1",
"clean:dist": "rm -rf $npm_package_config_dist/{js/**,css/**,images/**}",
"clean": "npm-run-all -p clean:*",
"create_dirs:dest_js": "mkdir -p $npm_package_config_dest_js",
"create_dirs:dest_css": "mkdir -p $npm_package_config_dest_css",
"create_dirs:dest_img": "mkdir -p $npm_package_config_dest_img",
"create_dirs": "npm-run-all -p create_dirs:*",
"//[ SASS ]//": "",
"rebuild:css": "npm rebuild node-sass",
"compile:css": "node-sass -o $npm_package_config_dest_css $npm_package_config_src_css",
"compile:css:dev": "npm run compile:css -- --output-style expanded --source-map true --source-map-contents true",
"compile:css:watch": "npm run compile:css -- --output-style expanded --source-map true --source-map-contents true --watch",
"compile:css:prod": "npm run compile:css -- --output-style compressed",
"compile:css:debug": "npm run compile:css -- --output-style compressed --watch",
"postcompile:css": "npm run autoprefixer && npm run customproperties",
"autoprefixer": "postcss --map 0 -u autoprefixer -r $npm_package_config_dest_css/*.css",
"customproperties": "postcss --map 0 -u postcss-custom-properties -b -r $npm_package_config_dest_css/*.css",
"//[ Javascript ]//": "",
"compile:js": "webpack-cli",
"compile:js:prod": "npm run compile:js -- -p --mode production",
"compile:js:dev": "npm run compile:js -- -d --mode development --progress",
"compile:js:watch": "npm run compile:js:dev -- --watch",
"compile:js:debug": "npm run compile:js:watch -- --display-modules",
"//[ Syncs ]//": "",
"sync:img": "rsync -rtvu --delete $npm_package_config_src_img/ $npm_package_config_dest_img/",
"sync": "npm-run-all -p sync:*",
"//[ Tasks ]//": "",
"build": "npm-run-all clean rebuild:css create_dirs sync compile:*:prod",
"build:dev": "npm-run-all clean rebuild:css create_dirs sync compile:*:dev"
},
"repository": {
"type": "git",
"url": "git+https://github.com/firelyteam/spark.git"
},
"author": "",
"bugs": {
"url": "https://github.com/FirelyTeam/Spark/issues"
},
"homepage": "https://github.com/FirelyTeam/Spark",
"dependencies": {
"@microsoft/signalr": "5.0.5",
"bootstrap": "4.6.0",
"jquery": "3.6.0",
"popper.js": "1.16.1",
"prismjs": "1.23.0"
},
"devDependencies": {
"autoprefixer": "^10.2.5",
"node-sass": "5.0.0",
"npm-run-all": "4.1.5",
"postcss-cli": "8.3.1",
"postcss-custom-properties": "11.0.0",
"rimraf": "^3.0.2",
"webpack": "4.45.0",
"webpack-cli": "3.3.10"
}
}
@@ -17,6 +17,7 @@

namespace Spark.Web.Hubs
{
//[Authorize(Policy = "RequireAdministratorRole")]
public class MaintenanceHub : Hub
{
private List<Resource> _resources = null;
@@ -73,57 +74,48 @@ public List<Resource> GetExampleData()
return list;
}

private ImportProgressMessage Message(string message, int idx)
{
var msg = new ImportProgressMessage
{
Message = message,
Progress = (int)10 + (idx + 1) * 90 / _resourceCount
};
return msg;
}

public async void ClearStore()
{
var notifier = new HubContextProgressNotifier(_hubContext, _logger);
try
{
await notifier.SendProgressUpdate(0, "Clearing the database...");
await _fhirStoreAdministration.CleanAsync().ConfigureAwait(false);
await _fhirIndex.CleanAsync().ConfigureAwait(false);
await notifier.SendProgressUpdate(100, "Database cleared");
await _hubContext.Clients.All.SendAsync("UpdateProgress", "Starting clearing database...");
await _fhirStoreAdministration.CleanAsync();

await _hubContext.Clients.All.SendAsync("UpdateProgress", "... and cleaning indexes...");
await _fhirIndex.CleanAsync();
await _hubContext.Clients.All.SendAsync("UpdateProgress", "Database cleared");
}
catch (Exception e)
{
await notifier.SendProgressUpdate(100, "ERROR CLEARING :( " + e.InnerException);
_logger.LogError(e, "Failed to clear store.");
await _hubContext.Clients.All.SendAsync("UpdateProgress", $"ERROR CLEARING :(");
}

}

public async void RebuildIndex()
{
var notifier = new HubContextProgressNotifier(_hubContext, _logger);
try
{
await _indexRebuildService.RebuildIndexAsync(notifier)
await _hubContext.Clients.All.SendAsync("UpdateProgress", "Rebuilding index...");
await _indexRebuildService.RebuildIndexAsync()
.ConfigureAwait(false);
}
catch (Exception e)
{
_logger.LogError(e, "Failed to rebuild index");

await notifier.SendProgressUpdate(100, "ERROR REBUILDING INDEX :( " + e.InnerException)
await _hubContext.Clients.All.SendAsync("UpdateProgress", "ERROR REBUILDING INDEX :( ")
.ConfigureAwait(false);
}
await _hubContext.Clients.All.SendAsync("UpdateProgress", "Index rebuilt!");
}

public async Tasks.Task LoadExamplesToStore()
public async void LoadExamplesToStore()
{
var messages = new StringBuilder();
var notifier = new HubContextProgressNotifier(_hubContext, _logger);
try
{
await notifier.SendProgressUpdate(1, "Loading examples data...");
await _hubContext.Clients.All.SendAsync("UpdateProgress", "Loading examples");
_resources = GetExampleData();

var resarray = _resources.ToArray();
@@ -132,39 +124,36 @@ public async Tasks.Task LoadExamplesToStore()
for (int x = 0; x <= _resourceCount - 1; x++)
{
var res = resarray[x];
// Sending message:
var msg = Message("Importing " + res.TypeName + " " + res.Id + "...", x);
await notifier.SendProgressUpdate(msg.Progress, msg.Message);
var msg = $"Importing {res.TypeName}, id {res.Id} ...";
await _hubContext.Clients.All.SendAsync("UpdateProgress", msg);

try
{
Key key = res.ExtractKey();

if (res.Id != null && res.Id != "")
{
await _fhirService.PutAsync(key, res).ConfigureAwait(false);
await _fhirService.PutAsync(key, res);
}
else
{
await _fhirService.CreateAsync(key, res).ConfigureAwait(false);
await _fhirService.CreateAsync(key, res);
}
}
catch (Exception e)
{
// Sending message:
var msgError = Message("ERROR Importing " + res.TypeName + " " + res.Id + "... ", x);
await Clients.All.SendAsync("Error", msg);
messages.AppendLine(msgError.Message + ": " + e.Message);
_logger.LogError(e, "Failed when loading example.");
var msgError = $"ERROR Importing {res.TypeName.ToString()}, id {res.Id}...";
await _hubContext.Clients.All.SendAsync("UpdateProgress", msgError);
}


}

await notifier.SendProgressUpdate(100, messages.ToString());
await _hubContext.Clients.All.SendAsync("UpdateProgress", "Finished loading examples");
}
catch (Exception e)
{
await notifier.Progress("Error: " + e.Message);
_logger.LogError(e, "Failed to load examples.");
await _hubContext.Clients.All.SendAsync("UpdateProgress", "Error: " + e.Message);
}
}
}
@@ -102,6 +102,9 @@ public void ConfigureServices(IServiceCollection services)
{
options.InputFormatters.RemoveType<SystemTextJsonInputFormatter>();
options.OutputFormatters.RemoveType<SystemTextJsonOutputFormatter>();
// We remove StringOutputFormatter to make Swagger happy by not
// showing text/plain in the list of available media types.
options.OutputFormatters.RemoveType<StringOutputFormatter>();
options.EnableEndpointRouting = false;
}).SetCompatibilityVersion(CompatibilityVersion.Version_3_0);

@@ -10,90 +10,74 @@
<li>Spark version: @SparkSettings.Version</li>
</ul>
<div>

<input type="button" class="btn btn-danger" id="clearButton" value="Clear database" />
<input type="button" class="btn btn-warning" id="initButton" value="Load examples" />
<input type="button" class="btn btn-success" id="reindexButton" value="Rebuild index" />

<div class="overflow-auto mt-5 h-50 border border-info" style="height: 50vh!important;">
<ul id="messagesList"></ul>
</div>

</div>
</div>
<script src="/assets/js/microsoft/signalr/dist/browser/signalr.min.js"></script>
<script>
"use strict";
document.addEventListener("DOMContentLoaded", function(){
var connection = new signalR.HubConnectionBuilder().withUrl("/maintenanceHub").build();
var progress = 0;
function addListItem(text, list) {
var li = document.createElement("li");
li.textContent = text;
document.getElementById(list).appendChild(li);
}
connection.on("UpdateProgress", function (message) {
// var output = $("#resultmessage");
var n = parseInt(message.progress);
addListItem(message.progress + '%: ' + message.message, "messagesList");
//var percetage = $("#percentage");
//percetage.html(message.Progress + "%");
//$("#pbar").css("width", n + "%").attr('aria-valuenow', n);

if (n === 100)
{
// find progress bar and .removeClass("progress-bar-striped");
}
});
connection.on("Importing", function (message) {
addListItem(message, "messagesList");
});
connection.on("Pong", function (message) {
var encodedMsg = "Server says " + message;
var li = document.createElement("li");
li.textContent = encodedMsg;
document.getElementById("messagesList").appendChild(li);
addListItem("test", "messagesList");
});
connection.start().then(function(){
document.getElementById("initButton").disabled = false;
}).catch(function (err) {
return console.error(err.toString());
});
@section scripts {
<script src="https://cdnjs.cloudflare.com/ajax/libs/microsoft-signalr/5.0.5/signalr.min.js"
integrity="sha512-jqqGgPrIDxMezbWBnn8g+VUyWas+yFogGZS+cEfw8o2tLdzvIpRYYZvrloQsguBPcqsdbE7htoABeN/R+aCBzw=="
crossorigin="anonymous"></script>
<script>
"use strict";
document.addEventListener("DOMContentLoaded", function () {
const connection = new signalR.HubConnectionBuilder()
.withUrl("/maintenanceHub")
.build();
async function start() {
try {
await connection.start();
console.log("SignalR Connected.");
} catch (err) {
console.log(err);
setTimeout(start, 5000);
}
};
connection.onclose(start);
start();
function addListItem(text, list) {
var li = document.createElement("li");
li.textContent = text;
document.getElementById(list).appendChild(li);
}
connection.on("UpdateProgress", (message) => {
addListItem(message, "messagesList");
});
document.getElementById("initButton").addEventListener("click", function (event) {
var message = "test";
connection.invoke("LoadExamplesToStore").catch(function (err) {
return console.error(err.toString());
});
event.preventDefault();
});
document.getElementById("initButton").addEventListener("click", function (event) {
connection.invoke("LoadExamplesToStore").catch(function (err) {
return console.error(err.toString());
});
event.preventDefault();
});
document.getElementById("clearButton").addEventListener("click", function (event) {
var message = "test";
connection.invoke("ClearStore").catch(function (err) {
return console.error(err.toString());
});
event.preventDefault();
});
document.getElementById("clearButton").addEventListener("click", function (event) {
connection.invoke("ClearStore").catch(function (err) {
return console.error(err.toString());
});
event.preventDefault();
});
document.getElementById("reindexButton").addEventListener("click", function (event) {
connection.invoke("RebuildIndex").catch(function (err) {
return console.error(err.toString());
document.getElementById("reindexButton").addEventListener("click", function (event) {
connection.invoke("RebuildIndex").catch(function (err) {
return console.error(err.toString());
});
event.preventDefault();
});
event.preventDefault();
});
});
</script>
</script>
}
@@ -1,20 +1,22 @@
<!DOCTYPE html>
<html>

<head>
<meta charset="utf-8" />
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<title>@ViewData["Title"] | Spark</title>

<link rel="stylesheet" href="~/assets/css/main.css" />
<link rel="stylesheet" href="~/assets/css/fhir-html.css" />
</head>

<body>
<header>
<nav class="navbar navbar-expand-sm navbar-toggleable-sm navbar-dark bg-dark text-light border-bottom box-shadow">
<nav
class="navbar navbar-expand-sm navbar-toggleable-sm navbar-dark bg-dark text-light border-bottom box-shadow">
<div class="container">
<a class="navbar-brand" href="/">Spark FHIR server</a>
<button class="navbar-toggler" type="button" data-toggle="collapse" data-target=".navbar-collapse" aria-controls="navbarSupportedContent"
aria-expanded="false" aria-label="Toggle navigation">
<button class="navbar-toggler" type="button" data-toggle="collapse" data-target=".navbar-collapse"
aria-controls="navbarSupportedContent" aria-expanded="false" aria-label="Toggle navigation">
<span class="navbar-toggler-icon"></span>
</button>
<div class="collapse navbar-collapse mr-auto" id="navbarSupportedContent">
@@ -38,9 +40,9 @@
<partial name="_CookieConsentPartial" />

</div>
<main role="main" class="py-3 container">
@RenderBody()
</main>
<main role="main" class="py-3 container">
@RenderBody()
</main>

<footer class="border-top footer text-muted">
<div class="container">
@@ -52,4 +54,5 @@
@RenderSection("Scripts", required: false)

</body>

</html>
@@ -9,7 +9,9 @@
"LogLevel": {
"Default": "Debug",
"System": "Information",
"Microsoft": "Information"
"Microsoft": "Information",
"Microsoft.AspNetCore.SignalR": "Debug",
"Microsoft.AspNetCore.Http.Connections": "Debug"
}
}
}
@@ -21,4 +21,4 @@
}
},
"AllowedHosts": "*"
}
}

Large diffs are not rendered by default.

Large diffs are not rendered by default.

This file was deleted.

@@ -309,7 +309,6 @@
<Content Include="Content\bootstrap-reboot.min.css" />
<Content Include="Content\bootstrap.css" />
<Content Include="Content\bootstrap.min.css" />
<Content Include="Content\css\fhir-html.css" />
<Content Include="Content\css\site.css">
<DependentUpon>site.scss</DependentUpon>
<CopyToOutputDirectory>Always</CopyToOutputDirectory>