├── .docker
├── docker-compose.example.yml
└── linux
│ ├── Mongo.Dockerfile
│ ├── Spark.Dockerfile
│ ├── mongorestore.sh
│ └── r4.archive.gz
├── .dockerignore
├── .editorconfig
├── .gitattributes
├── .github
├── ISSUE_TEMPLATE
│ ├── bug_report.md
│ └── feature_request.md
├── dependabot.yml
└── workflows
│ ├── docker_image_linux.yml
│ ├── integration_tests.yml
│ ├── nuget_deploy.yml
│ └── run_tests.yaml
├── .gitignore
├── CONTRIBUTORS.md
├── Directory.Build.props
├── Documentation
├── .gitignore
├── Architecture.md
├── Contribute.md
├── InstallingSpark.md
├── MigrateFromv1Tov2.md
├── MigrateFromv2Tov3.md
├── Performance.md
├── Quickstart.md
├── RunningSparkInDocker.md
├── UsingSpark.md
├── index.md
└── originals
│ ├── FHIR_MMSparkDeployment1-1.docx
│ ├── FHIR_MMSparkDeployment1-1.pdf
│ ├── Overview Spark Search.vsdx
│ ├── SparkComponentOverview_0-1.pdf
│ └── SparkComponentOverview_0-1.vsdx
├── LICENSE
├── README.md
├── SECURITY.md
├── Spark.sln
├── incendi-logo-128x128.png
├── scripts
├── CreateIndexes.bat
├── CreateIndexes.js
├── RemoveDuplicateId.bat
└── RemoveDuplicateId.js
├── src
├── Spark-Legacy
│ └── Examples
│ │ ├── DSTU2
│ │ └── examples.zip
│ │ ├── R4
│ │ └── examples.zip
│ │ └── STU3
│ │ └── examples.zip
├── Spark.Engine.Test
│ ├── Auxiliary
│ │ └── LimitedStreamTests.cs
│ ├── Core
│ │ ├── Builders
│ │ │ └── CapabilityStatementBuilderTests.cs
│ │ ├── ElementQueryTests.cs
│ │ └── FhirModelTests.cs
│ ├── Examples
│ │ ├── appointment-example2doctors.json
│ │ ├── careplan-example-f201-renal.json
│ │ ├── observation-example-bloodpressure.json
│ │ └── patient-example.json
│ ├── Extensions
│ │ ├── OperationOutcomeExtensionsTests.cs
│ │ ├── RegexExtensionsTests.cs
│ │ └── SearchParameterExtensionsTests.cs
│ ├── FhirVersion.cs
│ ├── Formatters
│ │ ├── FormatterTestBase.cs
│ │ ├── NonSeekableReadStream.cs
│ │ ├── ResourceJsonInputFormatterTests.cs
│ │ ├── ResourceXmlInputFormatterTests.cs
│ │ └── TestHttpRequestStreamReaderFactory.cs
│ ├── Maintenance
│ │ └── MaintenanceModeTests.cs
│ ├── Search
│ │ ├── CriteriumTests.cs
│ │ ├── ElementIndexerTests.cs
│ │ ├── ModifierTests.cs
│ │ └── ReverseIncludeTests.cs
│ ├── Service
│ │ ├── IndexServiceTests.cs
│ │ ├── IndexServiceTests2.cs
│ │ ├── IndexValueTestExtensions.cs
│ │ └── PatchServiceTests.cs
│ ├── Spark.Engine.Test.csproj
│ ├── TestData
│ │ ├── R2
│ │ │ ├── patient-example.json
│ │ │ └── patient-example.xml
│ │ ├── R3
│ │ │ ├── patient-example.json
│ │ │ └── patient-example.xml
│ │ └── R4
│ │ │ ├── patient-example.json
│ │ │ └── patient-example.xml
│ ├── TextFileHelper.cs
│ └── Utility
│ │ ├── FhirPathUtilTests.cs
│ │ └── FhirVersionUtility.cs
├── Spark.Engine
│ ├── Auxiliary
│ │ ├── LimitedStream.cs
│ │ └── ResourceVisitor.cs
│ ├── Core
│ │ ├── CapabilityStatementBuilder.cs
│ │ ├── ConditionalHeaderParameters.cs
│ │ ├── Const.cs
│ │ ├── ElementQuery.cs
│ │ ├── Error.cs
│ │ ├── FhirMediaType.cs
│ │ ├── FhirModel.cs
│ │ ├── HistoryParameters.cs
│ │ ├── HttpHeaderName.cs
│ │ ├── IFhirModel.cs
│ │ ├── IIndexService.cs
│ │ ├── ILocalhost.cs
│ │ ├── Interaction.cs
│ │ ├── Key.cs
│ │ ├── KeyKind.cs
│ │ ├── Localhost.cs
│ │ ├── LocalhostExtensions.cs
│ │ ├── MessagingComponentBuilder.cs
│ │ ├── Namespaces.cs
│ │ ├── RequiredAttributeException.cs
│ │ ├── ResourceComponentBuilder.cs
│ │ ├── Respond.cs
│ │ ├── Response.cs
│ │ ├── RestComponentBuilder.cs
│ │ ├── SearchResults.cs
│ │ ├── Snapshot.cs
│ │ ├── SparkException.cs
│ │ └── UriHelper.cs
│ ├── ExceptionHandling
│ │ └── ErrorHandler.cs
│ ├── ExportSettings.cs
│ ├── Extensions
│ │ ├── BundleExtensions.cs
│ │ ├── CodingExtensions.cs
│ │ ├── DateTimeOffsetExtensions.cs
│ │ ├── ETag.cs
│ │ ├── FhirDateTimeExtensions.cs
│ │ ├── FhirServiceDictionary.cs
│ │ ├── FhirServiceExtensionDictionary.cs
│ │ ├── FhirStoreDictionary.cs
│ │ ├── GeneratorKeyExtensions.cs
│ │ ├── HttpContextExtensions.cs
│ │ ├── HttpHeadersExtensions.cs
│ │ ├── HttpRequestFhirExtensions.cs
│ │ ├── IApplicationBuilderExtensions.cs
│ │ ├── IServiceCollectionExtensions.cs
│ │ ├── InteractionExtensions.cs
│ │ ├── KeyExtensions.cs
│ │ ├── MetaExtensions.cs
│ │ ├── OperationOutcomeExtensions.cs
│ │ ├── ParametersExtensions.cs
│ │ ├── QuantityExtensions.cs
│ │ ├── RegexExtensions.cs
│ │ ├── SearchParamDefinitionExtensions.cs
│ │ ├── SearchParameterExtensions.cs
│ │ ├── SparkOptions.cs
│ │ ├── StringExtensions.cs
│ │ ├── UriParamExtensions.cs
│ │ ├── UriUtil.cs
│ │ └── XDocumentExtensions.cs
│ ├── FhirResponseFactory
│ │ ├── ConditionalHeaderFhirResponseInterceptor.cs
│ │ ├── FhirResponseFactory.cs
│ │ ├── FhirResponseInterceptorRunner.cs
│ │ └── IFhirResponseFactory.cs
│ ├── Filters
│ │ ├── GzipCompressedContent.cs
│ │ ├── GzipContent.cs
│ │ └── UnsupportedMediaTypeFilter.cs
│ ├── Formatters
│ │ ├── AsyncResourceJsonInputFormatter.cs
│ │ ├── AsyncResourceJsonOutputFormatter.cs
│ │ ├── AsyncResourceXmlInputFormatter.cs
│ │ ├── AsyncResourceXmlOutputFormatter.cs
│ │ ├── BinaryInputFormatter.cs
│ │ ├── BinaryOutputFormatter.cs
│ │ ├── FhirOutputFormatterSelector.cs
│ │ ├── JsonArrayPool.cs
│ │ ├── ResourceJsonInputFormatter.cs
│ │ ├── ResourceJsonOutputFormatter.cs
│ │ ├── ResourceXmlInputFormatter.cs
│ │ └── ResourceXmlOutputFormatter.cs
│ ├── Handlers
│ │ └── FormatTypeHandler.cs
│ ├── IO
│ │ └── NonDisposableStream.cs
│ ├── IndexSettings.cs
│ ├── Interfaces
│ │ ├── IFhirIndex.cs
│ │ ├── IFhirResponseInterceptor.cs
│ │ ├── IFhirResponseInterceptorRunner.cs
│ │ ├── IFhirStoreAdministration.cs
│ │ └── IIdentityGenerator.cs
│ ├── Logging
│ │ └── SparkEngineEventSource.cs
│ ├── Maintenance
│ │ ├── MaintenanceLock.cs
│ │ ├── MaintenanceLockMode.cs
│ │ ├── MaintenanceMode.cs
│ │ ├── MaintenanceModeEnabledException.cs
│ │ └── MaintenanceModeHandler.cs
│ ├── Model
│ │ ├── CompartmentInfo.cs
│ │ ├── IndexEntry.cs
│ │ └── SparkModelInfo.cs
│ ├── Properties
│ │ └── AssemblyInfo.cs
│ ├── Search
│ │ ├── ElementIndexer.cs
│ │ ├── IReferenceNormalizationService.cs
│ │ ├── Model
│ │ │ ├── IndexFieldNames.cs
│ │ │ ├── Modifier.cs
│ │ │ └── ReverseInclude.cs
│ │ ├── ReferenceNormalizationService.cs
│ │ ├── ResourceResolver.cs
│ │ ├── SearchSettings.cs
│ │ ├── Support
│ │ │ ├── IPositionInfo.cs
│ │ │ ├── Logging.cs
│ │ │ └── StringExtensions.cs
│ │ └── Types
│ │ │ ├── ChoiceValue.cs
│ │ │ ├── CompositeValue.cs
│ │ │ ├── Criterium.cs
│ │ │ ├── DateTimeValue.cs
│ │ │ ├── DateValue.cs
│ │ │ ├── Expression.cs
│ │ │ ├── NumberValue.cs
│ │ │ ├── QuantityValue.cs
│ │ │ ├── ReferenceValue.cs
│ │ │ ├── StringValue.cs
│ │ │ ├── TokenValue.cs
│ │ │ ├── UntypedValue.cs
│ │ │ └── ValueExpression.cs
│ ├── Service
│ │ ├── Abstractions
│ │ │ └── FhirServiceBase.cs
│ │ ├── Export.cs
│ │ ├── FhirService.cs
│ │ ├── FhirServiceExtensions
│ │ │ ├── ConformanceBuilder.cs
│ │ │ ├── ConformanceService.cs
│ │ │ ├── DeleteManipulationOperation.cs
│ │ │ ├── ElementNavFhirExtensionsNew.cs
│ │ │ ├── GetManipulationOperation.cs
│ │ │ ├── HistoryService.cs
│ │ │ ├── ICapabilityStatementService.cs
│ │ │ ├── IFhirServiceExtension.cs
│ │ │ ├── IHistoryService.cs
│ │ │ ├── IIndexBuildProgressReporter.cs
│ │ │ ├── IIndexRebuildService.cs
│ │ │ ├── IInteractionHandler.cs
│ │ │ ├── IPagingService.cs
│ │ │ ├── IPatchService.cs
│ │ │ ├── IQueryService.cs
│ │ │ ├── IResourceStorageService.cs
│ │ │ ├── ISearchService.cs
│ │ │ ├── ISnapshotPagination.cs
│ │ │ ├── ISnapshotPaginationCalculator.cs
│ │ │ ├── ISnapshotPaginationProvider.cs
│ │ │ ├── ITransactionService.cs
│ │ │ ├── IndexRebuildService.cs
│ │ │ ├── IndexService.cs
│ │ │ ├── PagingService.cs
│ │ │ ├── PatchManipulationOperation.cs
│ │ │ ├── PatchService.cs
│ │ │ ├── PostManipulationOperation.cs
│ │ │ ├── PutManipulationOperation.cs
│ │ │ ├── ResourceManipulationOperation.cs
│ │ │ ├── ResourceManipulationOperationFactory.cs
│ │ │ ├── ResourceStorageService.cs
│ │ │ ├── SearchService.cs
│ │ │ ├── SnapshotPaginationCalculator.cs
│ │ │ ├── SnapshotPaginationProvider.cs
│ │ │ ├── SnapshotPaginationService.cs
│ │ │ └── TransactionService.cs
│ │ ├── ICompositeServiceListener.cs
│ │ ├── IFhirService.cs
│ │ ├── IServiceListener.cs
│ │ ├── ITransfer.cs
│ │ ├── Import.cs
│ │ ├── KeyMapper.cs
│ │ ├── ServiceListener.cs
│ │ ├── Transfer.cs
│ │ └── Validate.cs
│ ├── Spark.Engine.csproj
│ ├── SparkSettings.cs
│ ├── Store
│ │ ├── ExtendableWith.cs
│ │ └── Interfaces
│ │ │ ├── IExtendableWith.cs
│ │ │ ├── IFhirStore.cs
│ │ │ ├── IFhirStorePagedReader.cs
│ │ │ ├── IHistoryStore.cs
│ │ │ ├── IIndexStore.cs
│ │ │ └── ISnapshotStore.cs
│ ├── StoreSettings.cs
│ └── Utility
│ │ ├── FhirParameter.cs
│ │ └── FhirPathUtil.cs
├── Spark.Mongo.Tests
│ ├── Indexer
│ │ └── MongoIndexMapperTest.cs
│ ├── Search
│ │ ├── BsonSerializationProvider.cs
│ │ └── CriteriumQueryBuilderTests.cs
│ └── Spark.Mongo.Tests.csproj
├── Spark.Mongo
│ ├── Extensions
│ │ └── IServiceCollectionExtensions.cs
│ ├── Properties
│ │ └── AssemblyInfo.cs
│ ├── Search
│ │ ├── Common
│ │ │ ├── Arguments.cs
│ │ │ ├── Config.cs
│ │ │ ├── Definitions.cs
│ │ │ └── DefinitionsFactory.cs
│ │ ├── Indexer
│ │ │ ├── BsonIndexDocument.cs
│ │ │ ├── BsonIndexDocumentBuilder.cs
│ │ │ └── MongoIndexMapper.cs
│ │ ├── Infrastructure
│ │ │ ├── MongoDatabaseFactory.cs
│ │ │ ├── MongoFhirIndex.cs
│ │ │ └── MongoIndexStore.cs
│ │ ├── Searcher
│ │ │ ├── CriteriaMongoExtensions.cs
│ │ │ ├── ITerm.cs
│ │ │ └── MongoSearcher.cs
│ │ └── Utils
│ │ │ ├── Soundex.cs
│ │ │ └── UnitsOfMeasureHelper.cs
│ ├── Spark.Mongo.csproj
│ ├── SparkMongoEventSource.cs
│ └── Store
│ │ ├── BsonHelper.cs
│ │ ├── Constants.cs
│ │ ├── Extensions
│ │ └── HistoryStore.cs
│ │ ├── GuidIdentityGenerator.cs
│ │ ├── MongoCollectionPageResult.cs
│ │ ├── MongoFhirStore.cs
│ │ ├── MongoFhirStorePagedReader.cs
│ │ ├── MongoIdGenerator.cs
│ │ ├── MongoSnapshotStore.cs
│ │ └── MongoStoreAdministration.cs
└── Spark.Web
│ ├── .gitignore
│ ├── ClientApp
│ ├── js
│ │ └── main.js
│ ├── package-lock.json
│ └── package.json
│ ├── Controllers
│ ├── AdminController.cs
│ ├── FhirController.cs
│ ├── HomeController.cs
│ └── ResourcesController.cs
│ ├── Data
│ ├── ApplicationDbContext.cs
│ └── ApplicationDbInitializer.cs
│ ├── Examples
│ ├── DSTU2
│ │ └── .gitkeep
│ ├── R4
│ │ └── .gitkeep
│ └── STU3
│ │ └── .gitkeep
│ ├── Hubs
│ ├── HubContextProgressNotifier.cs
│ ├── ImportProgressMessage.cs
│ └── MaintenanceHub.cs
│ ├── Migrations
│ ├── 20191101194423_Init.Designer.cs
│ ├── 20191101194423_Init.cs
│ └── ApplicationDbContextModelSnapshot.cs
│ ├── Models
│ ├── Config
│ │ └── ExamplesSettings.cs
│ └── ErrorViewModel.cs
│ ├── Program.cs
│ ├── Properties
│ ├── .gitignore
│ └── launchSettings.json
│ ├── README.md
│ ├── Services
│ ├── ResourceStat.cs
│ └── ServerMetadata.cs
│ ├── Spark.Web.csproj
│ ├── Startup.cs
│ ├── Utilities
│ └── FhirFileImport.cs
│ ├── Views
│ ├── Admin
│ │ ├── Index.cshtml
│ │ └── Maintenance.cshtml
│ ├── Home
│ │ ├── Index.cshtml
│ │ └── Privacy.cshtml
│ ├── Resources
│ │ └── Index.cshtml
│ ├── Shared
│ │ ├── Error.cshtml
│ │ ├── _AdminNavbar.cshtml
│ │ ├── _CookieConsentPartial.cshtml
│ │ ├── _Layout.cshtml
│ │ ├── _LoginPartial.cshtml
│ │ ├── _ValidationScriptsPartial.cshtml
│ │ └── _ViewImports.cshtml
│ └── _ViewStart.cshtml
│ ├── appsettings.Development.json
│ ├── appsettings.json
│ ├── libman.json
│ └── wwwroot
│ └── assets
│ ├── css
│ └── main.css
│ ├── images
│ └── 150.png
│ └── js
│ └── main.js
└── tests
└── integration-tests
├── .gitignore
├── combine-test-results.sh
├── docker-compose.yml
└── readme.md
/.docker/docker-compose.example.yml:
--------------------------------------------------------------------------------
1 | version: "3"
2 | services:
3 | spark:
4 | container_name: spark
5 | restart: always
6 | image: sparkfhir/spark:r4-latest
7 | environment:
8 | - StoreSettings__ConnectionString=mongodb://root:CosmicTopSecret@mongodb:27017/spark?authSource=admin
9 | - SparkSettings__Endpoint=http://localhost:5555/fhir
10 | ports:
11 | - "5555:80"
12 | depends_on:
13 | - mongodb
14 | mongodb:
15 | container_name: mongodb
16 | image: sparkfhir/mongo:r4-latest
17 | environment:
18 | MONGO_INITDB_ROOT_USERNAME: root
19 | MONGO_INITDB_ROOT_PASSWORD: CosmicTopSecret
20 | ports:
21 | - "17017:27017"
22 | volumes:
23 | - r4-latest-develop-data-volume:/data/db
24 | volumes:
25 | r4-latest-develop-data-volume:
26 |
--------------------------------------------------------------------------------
/.docker/linux/Mongo.Dockerfile:
--------------------------------------------------------------------------------
1 | FROM mongo
2 | COPY .docker/linux/r4.archive.gz /home/
3 | COPY .docker/linux/mongorestore.sh /docker-entrypoint-initdb.d/
4 |
--------------------------------------------------------------------------------
/.docker/linux/Spark.Dockerfile:
--------------------------------------------------------------------------------
1 | FROM mcr.microsoft.com/dotnet/aspnet:9.0-alpine AS base
2 | RUN apk add --no-cache icu-libs
3 | ENV DOTNET_SYSTEM_GLOBALIZATION_INVARIANT=false
4 | WORKDIR /app
5 | ENV ASPNETCORE_URLS=http://+:80
6 |
7 | FROM mcr.microsoft.com/dotnet/sdk:9.0-noble AS build
8 | WORKDIR /src
9 | COPY ["./Directory.Build.props", "../Directory.Build.props"]
10 | COPY ["./src/Spark.Web/Spark.Web.csproj", "Spark.Web/Spark.Web.csproj"]
11 | COPY ["./src/Spark.Engine/Spark.Engine.csproj", "Spark.Engine/Spark.Engine.csproj"]
12 | COPY ["./src/Spark.Mongo/Spark.Mongo.csproj", "Spark.Mongo/Spark.Mongo.csproj"]
13 | RUN dotnet restore "/src/Spark.Web/Spark.Web.csproj"
14 | COPY ./src .
15 | RUN dotnet build "/src/Spark.Web/Spark.Web.csproj" -c Release -o /app
16 |
17 | FROM build AS publish
18 | RUN dotnet publish "/src/Spark.Web/Spark.Web.csproj" -c Release -o /app
19 |
20 | FROM base AS final
21 | WORKDIR /app
22 | COPY --from=publish /app .
23 |
24 | ENTRYPOINT ["dotnet", "Spark.Web.dll"]
25 |
--------------------------------------------------------------------------------
/.docker/linux/mongorestore.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | mongorestore --drop --archive=/home/r4.archive.gz --gzip
3 |
--------------------------------------------------------------------------------
/.docker/linux/r4.archive.gz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/FirelyTeam/spark/0b93a6e52381ffef564860231a79927cbdde868a/.docker/linux/r4.archive.gz
--------------------------------------------------------------------------------
/.dockerignore:
--------------------------------------------------------------------------------
1 | .vs/
2 | .idea/
3 | .github/
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/bug_report.md:
--------------------------------------------------------------------------------
1 | ---
2 | name: Bug report
3 | about: Create a report to help us improve
4 | title: ''
5 | labels: ''
6 | assignees: ''
7 |
8 | ---
9 |
10 | **Describe the bug**
11 | A clear and concise description of what the bug is.
12 |
13 | **To Reproduce**
14 | Steps to reproduce the behavior:
15 | 1. Go to '...'
16 | 2. Click on '....'
17 | 3. Scroll down to '....'
18 | 4. See error
19 |
20 | **Expected behavior**
21 | A clear and concise description of what you expected to happen.
22 |
23 | **Screenshots**
24 | If applicable, add screenshots to help explain your problem.
25 |
26 | **Spark version**
27 | - Version: [e.g. 1.5.9]
28 |
29 | **Operating system + Database**
30 | - OS: [e.g. Linux]
31 | - Database: [e.g. MongoDB]
32 |
33 | **Container service / Cloud infrastructure:**
34 | - Container service: [e.g. Docker swarm]
35 | - Cloud provider: [e.g. Google Cloud]
36 | - Cloud infrastructure: [e.g. VM, Web Service, etc]
37 | - Database as a service: [e.g. CosmosDB w/ MongoDB API]
38 |
39 | **Additional context**
40 | Add any other context about the problem here.
41 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/feature_request.md:
--------------------------------------------------------------------------------
1 | ---
2 | name: Feature request
3 | about: Suggest an idea for this project
4 | title: ''
5 | labels: ''
6 | assignees: ''
7 |
8 | ---
9 |
10 | **Is your feature request related to a problem? Please describe.**
11 | A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
12 |
13 | **Describe the solution you'd like**
14 | A clear and concise description of what you want to happen.
15 |
16 | **Describe alternatives you've considered**
17 | A clear and concise description of any alternative solutions or features you've considered.
18 |
19 | **Additional context**
20 | Add any other context or screenshots about the feature request here.
21 |
--------------------------------------------------------------------------------
/.github/dependabot.yml:
--------------------------------------------------------------------------------
1 | version: 2
2 | updates:
3 | - package-ecosystem: nuget
4 | directory: "/"
5 | groups:
6 | nuget-patch-updates:
7 | patterns:
8 | - "*"
9 | update-types:
10 | - "patch"
11 | nuget-minor-updates:
12 | patterns:
13 | - "*"
14 | update-types:
15 | - "minor"
16 | nuget-major-updates:
17 | patterns:
18 | - "*"
19 | update-types:
20 | - "major"
21 | schedule:
22 | interval: daily
23 | - package-ecosystem: github-actions
24 | directory: "/"
25 | schedule:
26 | interval: daily
27 |
--------------------------------------------------------------------------------
/.github/workflows/docker_image_linux.yml:
--------------------------------------------------------------------------------
1 | name: Docker Release
2 |
3 | on:
4 | release:
5 | types: [published]
6 |
7 | jobs:
8 | build:
9 | if: github.repository == 'FirelyTeam/spark'
10 |
11 | runs-on: ubuntu-24.04
12 | steps:
13 | - uses: actions/checkout@v4
14 | - name: Login to DockerHub Registry
15 | run: echo ${{ secrets.DOCKERHUB_PASSWORD }} | docker login -u ${{ secrets.DOCKERHUB_USERNAME }} --password-stdin
16 | - name: Get the version
17 | id: vars
18 | run: echo ::set-output name=tag::$(echo ${GITHUB_REF:10})
19 | - name: Build the tagged Spark Docker image
20 | run: docker build . --file .docker/linux/Spark.Dockerfile
21 | -t ${{ secrets.DOCKERHUB_ORGANIZATION }}/spark:${{steps.vars.outputs.tag}}
22 | -t ${{ secrets.DOCKERHUB_ORGANIZATION }}/spark:r4-latest
23 | -t ${{ secrets.DOCKERHUB_ORGANIZATION_2 }}/spark:${{steps.vars.outputs.tag}}
24 | -t ${{ secrets.DOCKERHUB_ORGANIZATION_2 }}/spark:r4-latest
25 | - name: Push the tagged Spark Docker image
26 | run: docker push --all-tags ${{ secrets.DOCKERHUB_ORGANIZATION }}/spark
27 | - name: Build the tagged Mongo Docker image
28 | run: docker build . --file .docker/linux/Mongo.Dockerfile
29 | -t ${{ secrets.DOCKERHUB_ORGANIZATION }}/mongo:${{steps.vars.outputs.tag}}
30 | -t ${{ secrets.DOCKERHUB_ORGANIZATION }}/mongo:r4-latest
31 | -t ${{ secrets.DOCKERHUB_ORGANIZATION_2 }}/mongo:${{steps.vars.outputs.tag}}
32 | -t ${{ secrets.DOCKERHUB_ORGANIZATION_2 }}/mongo:r4-latest
33 | - name: Push the tagged Mongo Docker image
34 | run: docker push --all-tags ${{ secrets.DOCKERHUB_ORGANIZATION }}/mongo
35 |
--------------------------------------------------------------------------------
/.github/workflows/nuget_deploy.yml:
--------------------------------------------------------------------------------
1 | name: Release
2 |
3 | on:
4 | release:
5 | types: [published]
6 |
7 | jobs:
8 | build:
9 | if: github.repository == 'FirelyTeam/spark'
10 |
11 | runs-on: windows-latest
12 | steps:
13 | - uses: actions/checkout@v4
14 | - name: Setup .NET Core
15 | uses: actions/setup-dotnet@v4
16 | with:
17 | dotnet-version: 9.0.x
18 | - name: Build
19 | run: |
20 | dotnet pack "./src/Spark.Engine/Spark.Engine.csproj" -c Release
21 | dotnet pack "./src/Spark.Mongo/Spark.Mongo.csproj" -c Release
22 | - name: Deploy Nuget Packages
23 | run: dotnet nuget push .\src\**\*.nupkg
24 | --api-key ${{ secrets.NUGET_API_KEY }}
25 | --skip-duplicate
26 | --source https://api.nuget.org/v3/index.json
27 | --no-symbols
28 |
--------------------------------------------------------------------------------
/.github/workflows/run_tests.yaml:
--------------------------------------------------------------------------------
1 | name: Tests
2 |
3 | on:
4 | push:
5 | branches:
6 | - 'r4/master'
7 | pull_request:
8 |
9 | jobs:
10 | build:
11 | if: github.repository == 'FirelyTeam/spark'
12 |
13 | runs-on: ${{ matrix.platform }}
14 | strategy:
15 | matrix:
16 | platform: [ windows-latest, ubuntu-24.04, macos-14, macos-14-large ]
17 |
18 | steps:
19 | - uses: actions/checkout@v4
20 | - name: Setup .NET Core
21 | uses: actions/setup-dotnet@v4
22 | with:
23 | dotnet-version: 9.0.x
24 | - name: Build with dotnet
25 | run: dotnet build src/Spark.Web/Spark.Web.csproj -c Release
26 | - name: Unit tests
27 | run: |
28 | dotnet test "./src/Spark.Engine.Test/Spark.Engine.Test.csproj"
29 | dotnet test "./src/Spark.Mongo.Tests/Spark.Mongo.Tests.csproj"
30 |
--------------------------------------------------------------------------------
/CONTRIBUTORS.md:
--------------------------------------------------------------------------------
1 | Spark contributors
2 | ==================
3 | * [Ewout Kramer](https://github.com/ewoutkramer) - [Furore](https://github.com/firelyteam)
4 |
5 | * [Christiaan Knaap](https://github.com/cknaap) - [Furore](https://github.com/firelyteam)
6 |
7 | * [Martijn Harthoorn](https://github.com/mharthoorn) - [Furore](https://github.com/firelyteam)
8 |
9 | * Corina Ciocanea - [Furore](https://github.com/firelyteam)
10 |
11 | * [Tony Abell](https://github.com/TonyAbell) - CornerStone
12 |
13 | * [Richard Schneider](https://github.com/richardschneider) - Orion Health
14 | Richard added GZip support
15 |
16 | * [Brian Postlethwaite](https://github.com/brianpos)
17 | Brian made the resources look fresh in the UI.
18 |
19 | * [Kenneth Myhra](https://github.com/kennethmyhra) - [Incendi](https://github.com/incendilabs/)
20 |
21 | * [Ole Kristian Losvik](https://github.com/losolio)
22 |
23 | * [Andrew Anisimov](https://github.com/andy-a-o)
24 |
--------------------------------------------------------------------------------
/Directory.Build.props:
--------------------------------------------------------------------------------
1 |
2 |
3 | 2.3.3
4 |
5 |
6 | Firely, Incendi and contributors
7 | Firely and Incendi
8 | Copyright © Firely 2014-2018, © Incendi 2018-2025
9 | https://github.com/firelyteam/spark
10 | LICENSE
11 | images\incendi-logo-128x128.png
12 | README.md
13 | true
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
--------------------------------------------------------------------------------
/Documentation/.gitignore:
--------------------------------------------------------------------------------
1 | # Logs
2 | logs
3 | *.log
4 | npm-debug.log*
5 | yarn-debug.log*
6 | yarn-error.log*
7 |
8 | # Runtime data
9 | pids
10 | *.pid
11 | *.seed
12 | *.pid.lock
13 |
14 | # Directory for instrumented libs generated by jscoverage/JSCover
15 | lib-cov
16 |
17 | # Coverage directory used by tools like istanbul
18 | coverage
19 |
20 | # nyc test coverage
21 | .nyc_output
22 |
23 | # Grunt intermediate storage (http://gruntjs.com/creating-plugins#storing-task-files)
24 | .grunt
25 |
26 | # Bower dependency directory (https://bower.io/)
27 | bower_components
28 |
29 | # node-waf configuration
30 | .lock-wscript
31 |
32 | # Compiled binary addons (http://nodejs.org/api/addons.html)
33 | build/Release
34 |
35 | # Dependency directories
36 | node_modules/
37 | jspm_packages/
38 |
39 | # Typescript v1 declaration files
40 | typings/
41 |
42 | # Optional npm cache directory
43 | .npm
44 |
45 | # Optional eslint cache
46 | .eslintcache
47 |
48 | # Optional REPL history
49 | .node_repl_history
50 |
51 | # Output of 'npm pack'
52 | *.tgz
53 |
54 | # dotenv environment variables file
55 | .env
56 |
57 | # gatsby files
58 | .cache/
59 | public
60 |
61 | # Mac files
62 | .DS_Store
63 |
64 | # Yarn
65 | yarn-error.log
66 | .pnp/
67 | .pnp.js
68 | # Yarn Integrity file
69 | .yarn-integrity
70 |
71 | # Docz
72 | .Docz
--------------------------------------------------------------------------------
/Documentation/Contribute.md:
--------------------------------------------------------------------------------
1 | # Contributions
2 | You are welcome to contribute to this project. The Spark server is used in several commercial and open source projects. Therefore we have a high quality standard and we carefully review submissions.
3 |
4 | When you want to contribute changes:
5 | - Contact us by opening an issue before start working on a major change.
6 | - Fork and open a pull request
7 |
8 | ### Pull requests
9 | When you send us a pull request
10 | - Make sure it builds
11 | - Make sure it's tested
12 | - The pull request should cover only one change
13 | - Accept that we might reject it because it conflicts with our strategy.
14 |
15 | We do appreciate suggestions, but the Spark FHIR server code is used by us for commercial projects, so we will most probably reject substantial changes unless you coordinate them with us first.
16 |
17 | ### GIT branching strategy
18 | Branch from the `r4/master` branch which contains the R4 FHIR version, unless the feature or bug fix is considered for a specific version of FHIR then branch from the relevant branch which at this point is `stu3/master`.
19 |
20 | See [GitHub flow](https://guides.github.com/introduction/flow/) for more information.
21 |
--------------------------------------------------------------------------------
/Documentation/InstallingSpark.md:
--------------------------------------------------------------------------------
1 | # Getting Started
2 |
3 | ## Database
4 |
5 | Spark FHIR server supports MongoDB as it´s persistence layer. The following options are supported:
6 |
7 | - MongoDB Atlas. Sign up for a [free trial account](https://www.mongodb.com/download-center) and run your database in the cloud.
8 | - MongoDB Community Server. [Download](https://www.mongodb.com/download-center/community) and install locally
9 | - MongoDB Enterprise. [Download](https://www.mongodb.com/download-center/enterprise) and install locally.
10 | - MongoDB in docker. Check out the example [docker-compose-example.yml](../.docker/docker-compose.example.yml).
11 |
12 | ## CosmoDB with MongoDB as a database
13 | In general we do not recommend using CosmosDB. There are known installations using CosmosDB with MongoDB API which runs fairly well, but it has not been without problems.
14 |
15 | ## Install Spark
16 |
17 | The core packages `Spark.Engine` and `Spark.Mongo` targets `net8.0` and `net9.0`. For the web application you may choose between:
18 |
19 | ## Reference Implementations
20 | The reference implementations are only meant as examples and must never be used out of the box in a production environment without adding as a minimum security features.
21 |
22 | - `Spark.Web` which runs on ASP.Net 9.0.
23 |
--------------------------------------------------------------------------------
/Documentation/Performance.md:
--------------------------------------------------------------------------------
1 | # Performance
2 |
3 | We have no performance figures of Spark yet. Spark is already being used in production scenarios, so it is fit for real use. If you have measured performance of Spark yourself, please share the results!
4 |
5 | In the near future we will develop performance tests for Spark to get you an idea whether or how it will fit your performance needs.
6 |
7 | If you are concerned Spark will not handle your load as fast as you would like, consider the following possibilities for spreading the load.
8 |
9 | * If you are FHIR-enabling multiple source systems, you could provide every system with it's own FHIR front-end, implemented by Spark. Instead of feeding data from all the source systems into one instance of Spark. You will however need a way to route requests to the correct instance of Spark.
10 |
11 | * If there is a logical attribute in your data to split the whole set into multiple sets, you could deploy Spark multiple times, each one on a 'shard' of the data. You will need a way to route requests to the correct instance of Spark, based on the chosen attribute.
12 |
13 | * MongoDB supports sharding, as described in the [MongoDB documentation](https://docs.mongodb.com/manual/sharding/). You will have to choose a shard key based upon expected use. To use this in Spark, you will probably have to tweak the Spark Mongo implementation.
14 |
--------------------------------------------------------------------------------
/Documentation/originals/FHIR_MMSparkDeployment1-1.docx:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/FirelyTeam/spark/0b93a6e52381ffef564860231a79927cbdde868a/Documentation/originals/FHIR_MMSparkDeployment1-1.docx
--------------------------------------------------------------------------------
/Documentation/originals/FHIR_MMSparkDeployment1-1.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/FirelyTeam/spark/0b93a6e52381ffef564860231a79927cbdde868a/Documentation/originals/FHIR_MMSparkDeployment1-1.pdf
--------------------------------------------------------------------------------
/Documentation/originals/Overview Spark Search.vsdx:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/FirelyTeam/spark/0b93a6e52381ffef564860231a79927cbdde868a/Documentation/originals/Overview Spark Search.vsdx
--------------------------------------------------------------------------------
/Documentation/originals/SparkComponentOverview_0-1.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/FirelyTeam/spark/0b93a6e52381ffef564860231a79927cbdde868a/Documentation/originals/SparkComponentOverview_0-1.pdf
--------------------------------------------------------------------------------
/Documentation/originals/SparkComponentOverview_0-1.vsdx:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/FirelyTeam/spark/0b93a6e52381ffef564860231a79927cbdde868a/Documentation/originals/SparkComponentOverview_0-1.vsdx
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | BSD 3-Clause License
2 |
3 | Copyright (c) 2014-2018 Firely ,
4 | Copyright (c) 2018-2025 Incendi and contributors
5 | All rights reserved.
6 |
7 | Redistribution and use in source and binary forms, with or without modification,
8 | are permitted provided that the following conditions are met:
9 |
10 | * Redistributions of source code must retain the above copyright notice, this
11 | list of conditions and the following disclaimer.
12 |
13 | * Redistributions in binary form must reproduce the above copyright notice, this
14 | list of conditions and the following disclaimer in the documentation and/or
15 | other materials provided with the distribution.
16 |
17 | * Neither the name of the copyright holders nor the names of its
18 | contributors may be used to endorse or promote products derived from
19 | this software without specific prior written permission.
20 |
21 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
22 | ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
23 | WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
24 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
25 | ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
26 | (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
27 | LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
28 | ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
29 | (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
30 | SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
31 |
--------------------------------------------------------------------------------
/SECURITY.md:
--------------------------------------------------------------------------------
1 | # Security Policy
2 |
3 | ## Supported Versions
4 |
5 | These versions are currently being supported with security updates.
6 |
7 | #### R4
8 | | Version | Supported |
9 | | ------- | ------------------ |
10 | | 2.0.x | :white_check_mark: |
11 | | 1.5.x | :x: |
12 | | 1.4.x | :x: |
13 | | 1.3.x | :x: |
14 |
15 | #### STU3
16 | | Version | Supported |
17 | | ------- | ------------------ |
18 | | 2.0.x | :white_check_mark: |
19 | | 1.5.x | :x: |
20 | | 1.4.x | :x: |
21 | | 1.3.x | :x: |
22 |
23 | #### DSTU2
24 | | Version | Supported |
25 | | ------- | ------------------ |
26 | | 1.5.x | :x: |
27 | | 1.4.x | :x: |
28 | | 1.3.x | :x: |
29 |
30 | ## Reporting a Vulnerability
31 |
32 | If you want to report a vulnerability please do not create an issue,
33 | rather send an e-mail to info@incendi.no.
34 |
35 | We encourage reporters to follow the disclosure model "responsible disclosure"
36 |
37 | Vulnerability reports will be considered and applied according to how
38 | critical the vulnerability is considered.
39 |
--------------------------------------------------------------------------------
/incendi-logo-128x128.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/FirelyTeam/spark/0b93a6e52381ffef564860231a79927cbdde868a/incendi-logo-128x128.png
--------------------------------------------------------------------------------
/scripts/CreateIndexes.bat:
--------------------------------------------------------------------------------
1 | mongo spark CreateIndexes.js > CreateIndexes.log
2 |
--------------------------------------------------------------------------------
/scripts/CreateIndexes.js:
--------------------------------------------------------------------------------
1 | print("indexes for just about any operation, including create / update");
2 | printjson(db.searchindex.createIndex({ "internal_resource" : 1, "identifier.code" : 1, "identifier.system" : 1}, { "name" : "ix_resource_identifier", "background" : true}));
3 | printjson(db.searchindex.createIndex({"internal_id":1, "internal_selflink":1},{"name":"ix_internal_id_selflink", "background":true}));
4 | printjson(db.resources.createIndex({"@REFERENCE" : 1, "@state" : 1}, { "name" : "ix_REFERENCE_state", "background" : "true" }));
5 |
6 | print("indexes for when you query by Patient.name or Patient.family a lot");
7 | printjson(db.searchindex.createIndex({"name" : 1}, { "name" : "ix_Patient_name", partialFilterExpression : { "internal_resource" : "Patient" }, "background" : "true" }));
8 | printjson(db.searchindex.createIndex({"family" : 1}, { "name" : "ix_Patient_family", partialFilterExpression : { "internal_resource" : "Patient" }, "background" : "true" }));
9 |
10 | print("specific index for Encounter.serviceprovider");
11 | printjson(db.searchindex.createIndex({"internal_resource" : 1, "serviceprovider" : 1}, { "name" : "ix_Encounter_serviceProvider", partialFilterExpression : { "internal_resource" : "Encounter" }, "background" : "true" }));
12 | print("specific index for references to patient, from any resources that has a 'patient' search parameter");
13 | printjson(db.searchindex.createIndex({"internal_resource" : 1, "patient" : 1}, { "name" : "ix_patient_reference", "background" : "true" }));
14 | print("specific index for Observation.code");
15 | printjson(db.searchindex.createIndex({"code.code" : 1, "code.system" : 1}, { "name" : "ix_Observation_code", partialFilterExpression : { "internal_resource" : "Observation" }, "background" : "true" }));
16 |
--------------------------------------------------------------------------------
/scripts/RemoveDuplicateId.bat:
--------------------------------------------------------------------------------
1 | mongo spark RemoveDuplicateId.js > RemoveDuplicateId.log
2 |
--------------------------------------------------------------------------------
/scripts/RemoveDuplicateId.js:
--------------------------------------------------------------------------------
1 | print("Removing duplicate Ids");
2 | printjson(db.resources.update({}, {$unset: {Id:1}}, {multi: true}));
3 |
--------------------------------------------------------------------------------
/src/Spark-Legacy/Examples/DSTU2/examples.zip:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/FirelyTeam/spark/0b93a6e52381ffef564860231a79927cbdde868a/src/Spark-Legacy/Examples/DSTU2/examples.zip
--------------------------------------------------------------------------------
/src/Spark-Legacy/Examples/R4/examples.zip:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/FirelyTeam/spark/0b93a6e52381ffef564860231a79927cbdde868a/src/Spark-Legacy/Examples/R4/examples.zip
--------------------------------------------------------------------------------
/src/Spark-Legacy/Examples/STU3/examples.zip:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/FirelyTeam/spark/0b93a6e52381ffef564860231a79927cbdde868a/src/Spark-Legacy/Examples/STU3/examples.zip
--------------------------------------------------------------------------------
/src/Spark.Engine.Test/Core/FhirModelTests.cs:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright (c) 2016-2018, Firely
3 | *
4 | * SPDX-License-Identifier: BSD-3-Clause
5 | */
6 |
7 | using Microsoft.VisualStudio.TestTools.UnitTesting;
8 | using Spark.Engine.Core;
9 | using System.Linq;
10 | using Hl7.Fhir.Model;
11 |
12 | namespace Spark.Engine.Test.Core;
13 |
14 | [TestClass]
15 | public class FhirModelTests
16 | {
17 | private static FhirModel sut;
18 |
19 | [ClassInitialize]
20 | public static void ClassInitialize(TestContext testContext)
21 | {
22 | sut = new FhirModel();
23 | }
24 |
25 | [TestMethod]
26 | public void TestCompartments()
27 | {
28 | var actual = sut.FindCompartmentInfo(ResourceType.Patient);
29 |
30 | Assert.IsNotNull(actual);
31 | Assert.IsTrue(actual.ReverseIncludes.Any());
32 | }
33 | }
--------------------------------------------------------------------------------
/src/Spark.Engine.Test/Extensions/RegexExtensionsTests.cs:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright (c) 2016-2018, Firely
3 | *
4 | * SPDX-License-Identifier: BSD-3-Clause
5 | */
6 |
7 | using Microsoft.VisualStudio.TestTools.UnitTesting;
8 | using System.Text.RegularExpressions;
9 | using Spark.Engine.Extensions;
10 |
11 | namespace Spark.Engine.Test.Extensions;
12 |
13 | [TestClass]
14 | public class RegexExtensionsTests
15 | {
16 | public static Regex sut = new Regex(@"[^a]*(?a)[^a]*");
17 |
18 | [TestMethod]
19 | public void TestReplaceNamedGroupNoSuchGroup()
20 | {
21 | var input = @"bababa";
22 | var result = sut.ReplaceGroup(input, "blabla", "c");
23 | Assert.AreEqual(@"bababa", result);
24 | }
25 |
26 | [TestMethod]
27 | public void TestReplaceNamedGroupNoCaptures()
28 | {
29 | var input = @"bbbbbb";
30 | var result = sut.ReplaceGroup(input, "alpha", "c");
31 | Assert.AreEqual(@"bbbbbb", result);
32 | }
33 |
34 | [TestMethod]
35 | public void TestReplaceNamedGroupSingleCapture()
36 | {
37 | var input = @"babbbb";
38 | var result = sut.ReplaceGroup(input, "alpha", "c");
39 | Assert.AreEqual(@"bcbbbb", result);
40 | }
41 |
42 | [TestMethod]
43 | public void TestReplaceNamedGroupMultipleCaptures()
44 | {
45 | var input = @"bababa";
46 | var result = sut.ReplaceGroup(input, "alpha", "c");
47 | Assert.AreEqual(@"bcbcbc", result);
48 | }
49 | }
--------------------------------------------------------------------------------
/src/Spark.Engine.Test/FhirVersion.cs:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright (c) 2020-2025, Incendi
3 | *
4 | * SPDX-License-Identifier: BSD-3-Clause
5 | */
6 |
7 | using Hl7.Fhir.Utility;
8 | using Spark.Engine.Test.Utility;
9 |
10 | namespace Spark.Engine.Test;
11 |
12 | public enum FhirVersionMoniker
13 | {
14 | [EnumLiteral("")]
15 | None = 0,
16 | [EnumLiteral(FhirVersionUtility.VERSION_R2)]
17 | R2 = 2,
18 | [EnumLiteral(FhirVersionUtility.VERSION_R3)]
19 | R3 = 3,
20 | [EnumLiteral(FhirVersionUtility.VERSION_R4)]
21 | R4 = 4,
22 | [EnumLiteral(FhirVersionUtility.VERSION_R5)]
23 | R5 = 5,
24 | }
--------------------------------------------------------------------------------
/src/Spark.Engine.Test/Formatters/FormatterTestBase.cs:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright (c) 2020-2025, Incendi
3 | *
4 | * SPDX-License-Identifier: BSD-3-Clause
5 | */
6 |
7 | using Microsoft.AspNetCore.Http;
8 | using Microsoft.AspNetCore.Mvc.Formatters;
9 | using Microsoft.AspNetCore.Mvc.ModelBinding;
10 | using System;
11 | using System.IO;
12 |
13 | namespace Spark.Engine.Test.Formatters;
14 |
15 | public class FormatterTestBase
16 | {
17 | protected string GetResourceFromFileAsString(string path)
18 | {
19 | using TextReader reader = new StreamReader(path);
20 | return reader.ReadToEnd();
21 | }
22 |
23 | protected static HttpContext GetHttpContext(
24 | byte[] contentBytes,
25 | string contentType)
26 | {
27 | return GetHttpContext(new MemoryStream(contentBytes), contentType);
28 | }
29 |
30 | protected static HttpContext GetHttpContext(Stream requestStream, string contentType)
31 | {
32 | var httpContext = new DefaultHttpContext();
33 | httpContext.Request.Body = requestStream;
34 | httpContext.Request.ContentType = contentType;
35 |
36 | return httpContext;
37 | }
38 |
39 | protected static InputFormatterContext CreateInputFormatterContext(
40 | Type modelType,
41 | HttpContext httpContext,
42 | string modelName = null,
43 | bool treatEmptyInputAsDefaultValue = false)
44 | {
45 | var provider = new EmptyModelMetadataProvider();
46 | var metadata = provider.GetMetadataForType(modelType);
47 |
48 | return new InputFormatterContext(
49 | httpContext,
50 | modelName: modelName ?? string.Empty,
51 | modelState: new ModelStateDictionary(),
52 | metadata: metadata,
53 | readerFactory: new TestHttpRequestStreamReaderFactory().CreateReader,
54 | treatEmptyInputAsDefaultValue: treatEmptyInputAsDefaultValue);
55 | }
56 | }
--------------------------------------------------------------------------------
/src/Spark.Engine.Test/Formatters/NonSeekableReadStream.cs:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright (c) 2020-2025, Incendi
3 | *
4 | * SPDX-License-Identifier: BSD-3-Clause
5 | */
6 |
7 | using System;
8 | using System.IO;
9 | using System.Threading;
10 | using System.Threading.Tasks;
11 |
12 | namespace Spark.Engine.Test.Formatters;
13 |
14 | internal class NonSeekableReadStream : Stream
15 | {
16 | private Stream _inner;
17 |
18 | public NonSeekableReadStream(byte[] data)
19 | {
20 | _inner = new MemoryStream(data);
21 | }
22 |
23 | public override bool CanRead => _inner.CanRead;
24 |
25 | public override bool CanSeek => false;
26 |
27 | public override bool CanWrite => false;
28 |
29 | public override long Length => throw new NotSupportedException();
30 |
31 | public override long Position { get => throw new NotSupportedException(); set => throw new NotSupportedException(); }
32 |
33 | public override void Flush()
34 | {
35 | throw new NotImplementedException();
36 | }
37 |
38 | public override int Read(byte[] buffer, int offset, int count)
39 | {
40 | return _inner.Read(buffer, offset, count);
41 | }
42 |
43 | public override long Seek(long offset, SeekOrigin origin)
44 | {
45 | throw new NotImplementedException();
46 | }
47 |
48 | public override void SetLength(long value)
49 | {
50 | throw new NotImplementedException();
51 | }
52 |
53 | public override void Write(byte[] buffer, int offset, int count)
54 | {
55 | throw new NotImplementedException();
56 | }
57 |
58 | public override Task ReadAsync(byte[] buffer, int offset, int count, CancellationToken cancellationToken)
59 | {
60 | return _inner.ReadAsync(buffer, offset, count, cancellationToken);
61 | }
62 | }
--------------------------------------------------------------------------------
/src/Spark.Engine.Test/Formatters/TestHttpRequestStreamReaderFactory.cs:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright (c) 2020-2025, Incendi
3 | *
4 | * SPDX-License-Identifier: BSD-3-Clause
5 | */
6 |
7 | using Microsoft.AspNetCore.Mvc.Infrastructure;
8 | using Microsoft.AspNetCore.WebUtilities;
9 | using System.IO;
10 | using System.Text;
11 |
12 | namespace Spark.Engine.Test.Formatters;
13 |
14 | public class TestHttpRequestStreamReaderFactory : IHttpRequestStreamReaderFactory
15 | {
16 | public TextReader CreateReader(Stream stream, Encoding encoding)
17 | {
18 | return new HttpRequestStreamReader(stream, encoding);
19 | }
20 | }
--------------------------------------------------------------------------------
/src/Spark.Engine.Test/Search/ModifierTests.cs:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright (c) 2015-2018, Firely
3 | *
4 | * SPDX-License-Identifier: BSD-3-Clause
5 | */
6 |
7 | using Hl7.Fhir.Model;
8 | using Microsoft.VisualStudio.TestTools.UnitTesting;
9 | using Spark.Engine.Search.Model;
10 |
11 | namespace Spark.Engine.Test.Search;
12 |
13 | [TestClass]
14 | public class ModifierTests
15 | {
16 | [TestMethod]
17 | public void TestActualModifierConstructorWithMissingModifiers()
18 | {
19 | var am = new ActualModifier("missing");
20 | Assert.AreEqual(Modifier.MISSING, am.Modifier);
21 | Assert.AreEqual("missing", am.RawModifier);
22 | Assert.IsNull(am.ModifierType);
23 | Assert.IsTrue(am.Missing.Value);
24 | Assert.AreEqual("missing=true", am.ToString());
25 |
26 | am = new ActualModifier("missing=false");
27 | Assert.AreEqual(Modifier.MISSING, am.Modifier);
28 | Assert.AreEqual("missing=false", am.RawModifier);
29 | Assert.IsNull(am.ModifierType);
30 | Assert.IsFalse(am.Missing.Value);
31 | Assert.AreEqual("missing=false", am.ToString());
32 | }
33 |
34 | [TestMethod]
35 | public void TestActualModifierConstructorWithValidTypeModifier()
36 | {
37 | var am = new ActualModifier("Patient");
38 | Assert.AreEqual(Modifier.TYPE, am.Modifier);
39 | Assert.AreEqual("Patient", am.RawModifier);
40 | Assert.AreEqual(typeof(Patient), am.ModifierType);
41 | Assert.AreEqual("Patient", am.ToString());
42 | }
43 |
44 | [TestMethod]
45 | public void TestActualModifierConstructorWithInvalidModifier()
46 | {
47 | var am = new ActualModifier("blabla");
48 | Assert.AreEqual(Modifier.UNKNOWN, am.Modifier);
49 | Assert.AreEqual("blabla", am.RawModifier);
50 | Assert.IsNull(am.ModifierType);
51 | Assert.AreEqual(null, am.ToString());
52 | }
53 | }
--------------------------------------------------------------------------------
/src/Spark.Engine.Test/Search/ReverseIncludeTests.cs:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright (c) 2016-2018, Firely
3 | * Copyright (c) 2021-2025, Incendi
4 | *
5 | * SPDX-License-Identifier: BSD-3-Clause
6 | */
7 |
8 | using System;
9 | using Microsoft.VisualStudio.TestTools.UnitTesting;
10 | using Spark.Engine.Search.Model;
11 |
12 | namespace Spark.Engine.Test.Search;
13 |
14 | [TestClass]
15 | public class ReverseIncludeTests
16 | {
17 | [TestMethod]
18 | public void TestParseValid()
19 | {
20 | ReverseInclude sut = ReverseInclude.Parse("Patient:actor");
21 |
22 | Assert.AreEqual("Patient", sut.ResourceType);
23 | Assert.AreEqual("actor", sut.SearchPath);
24 | }
25 | [TestMethod]
26 | public void TestParseValidLongerPath()
27 | {
28 | ReverseInclude sut = ReverseInclude.Parse("Provenance:target.patient");
29 |
30 | Assert.AreEqual("Provenance", sut.ResourceType);
31 | Assert.AreEqual("target.patient", sut.SearchPath);
32 | }
33 | [TestMethod]
34 | [ExpectedException(typeof(ArgumentNullException))]
35 | public void TestParseNull()
36 | {
37 | _ = ReverseInclude.Parse(null);
38 | }
39 |
40 | [TestMethod]
41 | [ExpectedException(typeof(ArgumentException))]
42 | public void TestParseInvalid()
43 | {
44 | _ = ReverseInclude.Parse("bla;foo");
45 | }
46 | }
--------------------------------------------------------------------------------
/src/Spark.Engine.Test/Service/IndexServiceTests2.cs:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright (c) 2025, Incendi
3 | *
4 | * SPDX-License-Identifier: BSD-3-Clause
5 | */
6 |
7 | using Hl7.Fhir.Model;
8 | using Moq;
9 | using Spark.Engine.Core;
10 | using Spark.Engine.Search;
11 | using Spark.Engine.Service.FhirServiceExtensions;
12 | using Spark.Engine.Store.Interfaces;
13 | using Xunit;
14 | using Task = System.Threading.Tasks.Task;
15 |
16 | namespace Spark.Engine.Test.Service;
17 |
18 | // FIXME: Migrate the old tests in IndexServiceTests to XUnit and Consolidate those tests with these tests.
19 | public class IndexServiceTests2
20 | {
21 | [Fact]
22 | public async Task IndexResourceWithContainedResourcesLackingAnIdShouldNotCrash()
23 | {
24 | FhirModel fhirModel = new();
25 | Mock indexStoreMock = new();
26 | ElementIndexer elementIndexer = new(fhirModel);
27 | ResourceResolver resourceResolver = new();
28 | IndexService indexService = new(fhirModel, indexStoreMock.Object, elementIndexer, resourceResolver);
29 |
30 | Organization organization = new()
31 | {
32 | Name = "An Organization", Identifier = { new Identifier("http://a-fake-system", "a value") }
33 | };
34 |
35 | organization.Contained.Add(new Endpoint
36 | {
37 | Identifier = { new Identifier { System = "http://not-a-real-system", Value = "endpoint-1-identifier" } }
38 | });
39 | organization.Contained.Add(new Endpoint
40 | {
41 | Identifier = { new Identifier { System = "http://not-a-real-system", Value = "endpoint-2-identifier" } }
42 | });
43 |
44 | Key key = Key.Create(organization.TypeName, organization.Id);
45 | await indexService.IndexResourceAsync(organization, key);
46 | }
47 | }
48 |
--------------------------------------------------------------------------------
/src/Spark.Engine.Test/Service/IndexValueTestExtensions.cs:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright (c) 2020-2025, Incendi
3 | *
4 | * SPDX-License-Identifier: BSD-3-Clause
5 | */
6 |
7 | using Spark.Engine.Model;
8 | using System.Collections.Generic;
9 | using System.Linq;
10 |
11 | namespace Spark.Engine.Test.Service;
12 |
13 | public static class IndexValueTestExtensions
14 | {
15 | public static IEnumerable NonInternalValues(this IndexValue root)
16 | {
17 | return root.IndexValues().Where(v => !v.Name.StartsWith("internal_"));
18 | }
19 | }
--------------------------------------------------------------------------------
/src/Spark.Engine.Test/TextFileHelper.cs:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright (c) 2019-2025, Incendi
3 | *
4 | * SPDX-License-Identifier: BSD-3-Clause
5 | */
6 |
7 | using System.IO;
8 |
9 | namespace Spark.Engine.Test;
10 |
11 | public static class TextFileHelper
12 | {
13 | public static string ReadTextFileFromDisk(string path)
14 | {
15 | using TextReader reader = new StreamReader(path);
16 | return reader.ReadToEnd();
17 | }
18 | }
--------------------------------------------------------------------------------
/src/Spark.Engine.Test/Utility/FhirVersionUtility.cs:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright (c) 2020-2025, Incendi
3 | *
4 | * SPDX-License-Identifier: BSD-3-Clause
5 | */
6 |
7 | using Hl7.Fhir.Model;
8 | using Hl7.Fhir.Utility;
9 | using NuGet.Versioning;
10 | using System.Collections.Generic;
11 |
12 | namespace Spark.Engine.Test.Utility;
13 |
14 | internal class FhirVersionUtility
15 | {
16 | public const string VERSION_R2 = "1.0";
17 | public const string VERSION_R3 = "3.0";
18 | public const string VERSION_R4 = "4.0";
19 | public const string VERSION_R5 = "4.4";
20 |
21 | public static Dictionary KnownFhirVersions = new Dictionary
22 | {
23 | { FhirVersionMoniker.None, string.Empty },
24 | { FhirVersionMoniker.R2, VERSION_R2 },
25 | { FhirVersionMoniker.R3, VERSION_R3 },
26 | { FhirVersionMoniker.R4, VERSION_R4 },
27 | { FhirVersionMoniker.R5, VERSION_R5 },
28 | };
29 |
30 | public static FhirVersionMoniker GetFhirVersionMoniker()
31 | {
32 | FhirVersionMoniker? fhirVersion = default;
33 | if (SemanticVersion.TryParse(ModelInfo.Version, out SemanticVersion semanticVersion))
34 | {
35 | fhirVersion = EnumUtility.ParseLiteral($"{semanticVersion.Major}.{semanticVersion.Minor}");
36 | }
37 |
38 | return fhirVersion ?? FhirVersionMoniker.None;
39 | }
40 | }
--------------------------------------------------------------------------------
/src/Spark.Engine/Core/ConditionalHeaderParameters.cs:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright (c) 2015-2018, Firely
3 | * Copyright (c) 2021-2025, Incendi
4 | *
5 | * SPDX-License-Identifier: BSD-3-Clause
6 | */
7 |
8 | using System;
9 | using System.Collections.Generic;
10 | using Spark.Engine.Extensions;
11 | using Microsoft.AspNetCore.Http;
12 |
13 | namespace Spark.Engine.Core;
14 |
15 | public class ConditionalHeaderParameters
16 | {
17 | public ConditionalHeaderParameters(HttpRequest request)
18 | {
19 | IfNoneMatchTags = request.IfNoneMatch();
20 | IfModifiedSince = request.IfModifiedSince();
21 | }
22 |
23 | public IEnumerable IfNoneMatchTags { get; set; }
24 | public DateTimeOffset? IfModifiedSince { get; set; }
25 | }
26 |
--------------------------------------------------------------------------------
/src/Spark.Engine/Core/Const.cs:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright (c) 2014-2018, Firely
3 | * Copyright (c) 2021-2025, Incendi
4 | *
5 | * SPDX-License-Identifier: BSD-3-Clause
6 | */
7 |
8 |
9 | namespace Spark.Engine.Core;
10 |
11 | public static class FhirRestOp
12 | {
13 | public const string SNAPSHOT = "_snapshot";
14 | }
15 |
16 | public static class FhirHeader
17 | {
18 | public const string CATEGORY = "Category";
19 | }
20 |
21 | public static class FhirParameter
22 | {
23 | public const string SNAPSHOT_ID = "id";
24 | public const string SNAPSHOT_INDEX = "start";
25 | public const string OFFSET = "_offset";
26 | public const string SUMMARY = "_summary";
27 | public const string COUNT = "_count";
28 | public const string SINCE = "_since";
29 | public const string SORT = "_sort";
30 | }
31 |
--------------------------------------------------------------------------------
/src/Spark.Engine/Core/FhirMediaType.cs:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright (c) 2014-2018, Firely
3 | * Copyright (c) 2018-2025, Incendi
4 | *
5 | * SPDX-License-Identifier: BSD-3-Clause
6 | */
7 |
8 | using Hl7.Fhir.Rest;
9 | using System.Collections.Generic;
10 | using System.Linq;
11 |
12 | namespace Spark.Engine.Core;
13 |
14 | public static class FhirMediaType
15 | {
16 | public static string OctetStreamMimeType = "application/octet-stream";
17 | public static string FormUrlEncodedMimeType = "application/x-www-form-urlencoded";
18 | public static string AnyMimeType = "*/*";
19 |
20 | public static IEnumerable JsonMimeTypes => ContentType.JSON_CONTENT_HEADERS;
21 | public static IEnumerable XmlMimeTypes => ContentType.XML_CONTENT_HEADERS;
22 | public static IEnumerable SupportedMimeTypes => JsonMimeTypes
23 | .Concat(XmlMimeTypes)
24 | .Concat(new[] { OctetStreamMimeType, FormUrlEncodedMimeType, AnyMimeType });
25 | }
26 |
--------------------------------------------------------------------------------
/src/Spark.Engine/Core/HistoryParameters.cs:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright (c) 2015-2018, Firely
3 | * Copyright (c) 2019-2025, Incendi
4 | *
5 | * SPDX-License-Identifier: BSD-3-Clause
6 | */
7 |
8 | using System;
9 | using Spark.Engine.Extensions;
10 | using Spark.Engine.Utility;
11 | using Microsoft.AspNetCore.Http;
12 |
13 | namespace Spark.Engine.Core;
14 |
15 | public class HistoryParameters
16 | {
17 | public HistoryParameters(HttpRequest request)
18 | {
19 | Count = FhirParameterParser.ParseIntParameter(request.GetParameter(FhirParameter.COUNT));
20 | Since = FhirParameterParser.ParseDateParameter(request.GetParameter(FhirParameter.SINCE));
21 | SortBy = request.GetParameter(FhirParameter.SORT);
22 | }
23 |
24 | public int? Count { get; set; }
25 | public DateTimeOffset? Since { get; set; }
26 | public string Format { get; set; }
27 | public string SortBy { get; set; }
28 | }
29 |
--------------------------------------------------------------------------------
/src/Spark.Engine/Core/HttpHeaderName.cs:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright (c) 2021-2025, Incendi
3 | *
4 | * SPDX-License-Identifier: BSD-3-Clause
5 | */
6 |
7 | namespace Spark.Engine.Core;
8 |
9 | internal static class HttpHeaderName
10 | {
11 | public const string ACCEPT = "Accept";
12 | public const string CONTENT_DISPOSITION = "Content-Disposition";
13 | public const string CONTENT_LOCATION = "Content-Location";
14 | public const string CONTENT_TYPE = "Content-Type";
15 | public const string ETAG = "ETag";
16 | public const string LOCATION = "Location";
17 | public const string LAST_MODIFIED = "Last-Modified";
18 |
19 | public const string X_CONTENT_TYPE = "X-Content-Type";
20 | }
21 |
--------------------------------------------------------------------------------
/src/Spark.Engine/Core/IIndexService.cs:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright (c) 2019-2025, Incendi
3 | *
4 | * SPDX-License-Identifier: BSD-3-Clause
5 | */
6 |
7 | using System.Threading.Tasks;
8 | using Hl7.Fhir.Model;
9 | using Spark.Engine.Model;
10 | using Task = System.Threading.Tasks.Task;
11 |
12 | namespace Spark.Engine.Core;
13 |
14 | public interface IIndexService
15 | {
16 | Task ProcessAsync(Entry entry);
17 | Task IndexResourceAsync(Resource resource, IKey key);
18 | }
19 |
--------------------------------------------------------------------------------
/src/Spark.Engine/Core/ILocalhost.cs:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright (c) 2014-2018, Firely
3 | * Copyright (c) 2021-2025, Incendi
4 | *
5 | * SPDX-License-Identifier: BSD-3-Clause
6 | */
7 |
8 | using System;
9 |
10 | namespace Spark.Engine.Core;
11 |
12 | public interface ILocalhost
13 | {
14 | Uri DefaultBase { get; }
15 | Uri Absolute(Uri uri);
16 | bool IsBaseOf(Uri uri);
17 | Uri GetBaseOf(Uri uri);
18 | }
19 |
--------------------------------------------------------------------------------
/src/Spark.Engine/Core/KeyKind.cs:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright (c) 2015-2018, Firely
3 | *
4 | * SPDX-License-Identifier: BSD-3-Clause
5 | */
6 |
7 | namespace Spark.Engine.Core;
8 |
9 | ///
10 | /// Any will be triaged by an as one of these.
11 | ///
12 | public enum KeyKind
13 | {
14 | ///
15 | /// absolute url, where base is not localhost
16 | ///
17 | Foreign,
18 |
19 | ///
20 | /// temporary id, URN, but not a URL.
21 | ///
22 | Temporary,
23 |
24 | ///
25 | /// absolute url, but base is (any of the) localhost(s)
26 | ///
27 | Local,
28 |
29 | ///
30 | /// Relative url, for internal references
31 | ///
32 | Internal
33 | }
34 |
--------------------------------------------------------------------------------
/src/Spark.Engine/Core/Localhost.cs:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright (c) 2014-2018, Firely
3 | * Copyright (c) 2021-2025, Incendi
4 | *
5 | * SPDX-License-Identifier: BSD-3-Clause
6 | */
7 |
8 | using System;
9 |
10 | namespace Spark.Engine.Core;
11 |
12 | public class Localhost : ILocalhost
13 | {
14 | public Uri DefaultBase { get; set; }
15 |
16 | public Localhost(Uri baseuri)
17 | {
18 | DefaultBase = baseuri;
19 | }
20 |
21 | public Uri Absolute(Uri uri)
22 | {
23 | if (uri.IsAbsoluteUri)
24 | {
25 | return uri;
26 | }
27 | else
28 | {
29 | string _base = DefaultBase.ToString().TrimEnd('/') + "/";
30 | return new Uri(_base + uri);
31 | }
32 | }
33 |
34 | public bool IsBaseOf(Uri uri)
35 | {
36 | if (uri.IsAbsoluteUri)
37 | {
38 | bool isbase = DefaultBase.Bugfixed_IsBaseOf(uri);
39 | return isbase;
40 | }
41 | else
42 | {
43 | return false;
44 | }
45 |
46 | }
47 |
48 | public Uri GetBaseOf(Uri uri)
49 | {
50 | return (IsBaseOf(uri)) ? DefaultBase : null;
51 | }
52 | }
53 |
--------------------------------------------------------------------------------
/src/Spark.Engine/Core/Namespaces.cs:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright (c) 2014-2018, Firely
3 | * Copyright (c) 2021-2025, Incendi
4 | *
5 | * SPDX-License-Identifier: BSD-3-Clause
6 | */
7 |
8 | using System.Xml.Linq;
9 |
10 | namespace Spark.Engine.Core;
11 |
12 | public static class Namespaces
13 | {
14 | public static XNamespace XHtml = XNamespace.Get("http://www.w3.org/1999/xhtml");
15 | }
16 |
--------------------------------------------------------------------------------
/src/Spark.Engine/Core/RequiredAttributeException.cs:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright (c) 2021-2025, Incendi
3 | *
4 | * SPDX-License-Identifier: BSD-3-Clause
5 | */
6 |
7 | using System;
8 | using System.Runtime.Serialization;
9 |
10 | namespace Spark.Engine.Core;
11 |
12 | [Serializable]
13 | internal class RequiredAttributeException : Exception
14 | {
15 | public RequiredAttributeException()
16 | {
17 | }
18 |
19 | public RequiredAttributeException(string message) : base(message)
20 | {
21 | }
22 |
23 | public RequiredAttributeException(string message, Exception innerException) : base(message, innerException)
24 | {
25 | }
26 | }
27 |
--------------------------------------------------------------------------------
/src/Spark.Engine/Core/SearchResults.cs:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright (c) 2014-2018, Firely
3 | * Copyright (c) 2021-2025, Incendi
4 | *
5 | * SPDX-License-Identifier: BSD-3-Clause
6 | */
7 |
8 | using Hl7.Fhir.Model;
9 | using Spark.Engine.Search.Types;
10 | using System.Collections.Generic;
11 | using System.Linq;
12 |
13 | namespace Spark.Engine.Core;
14 |
15 | public class SearchResults : List
16 | {
17 | public List UsedCriteria { get; set; }
18 | public int MatchCount { get; set; }
19 |
20 | private readonly OperationOutcome _outcome;
21 | public OperationOutcome Outcome {
22 | get
23 | {
24 | return _outcome.Issue.Any() ? _outcome : null;
25 | }
26 | }
27 |
28 | // todo: I think OperationOutcome logic should be on a higher level or at least not SearchResults specific -mh
29 | public SearchResults()
30 | {
31 | UsedCriteria = new List();
32 | MatchCount = 0;
33 | _outcome = new OperationOutcome
34 | {
35 | Issue = new List()
36 | };
37 | }
38 |
39 | public void AddIssue(string errorMessage, OperationOutcome.IssueSeverity severity = OperationOutcome.IssueSeverity.Error)
40 | {
41 | var newIssue = new OperationOutcome.IssueComponent() { Diagnostics = errorMessage, Severity = severity };
42 | _outcome.Issue.Add(newIssue);
43 | }
44 |
45 | public bool HasErrors
46 | {
47 | get
48 | {
49 | return Outcome != null && Outcome.Issue.Any(i => i.Severity <= OperationOutcome.IssueSeverity.Error);
50 | }
51 | }
52 |
53 | public bool HasIssues
54 | {
55 | get
56 | {
57 | return Outcome != null && Outcome.Issue.Any();
58 | }
59 | }
60 |
61 | public string UsedParameters
62 | {
63 | get
64 | {
65 | string[] used = UsedCriteria.Select(c => c.ToString()).ToArray();
66 | return string.Join("&", used);
67 | }
68 | }
69 | }
70 |
--------------------------------------------------------------------------------
/src/Spark.Engine/Core/SparkException.cs:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright (c) 2014-2018, Firely
3 | *
4 | * SPDX-License-Identifier: BSD-3-Clause
5 | */
6 |
7 | using Hl7.Fhir.Model;
8 | using System;
9 | using System.Net;
10 |
11 | namespace Spark.Engine.Core;
12 | // Placed in a sub-namespace because you must be explicit about it if you want to throw this error directly
13 |
14 | // todo: Can this be replaced by a FhirOperationException ?
15 |
16 | public class SparkException : Exception
17 | {
18 | public HttpStatusCode StatusCode;
19 | public OperationOutcome Outcome { get; set; }
20 |
21 | public SparkException(HttpStatusCode statuscode, string message = null) : base(message)
22 | {
23 | this.StatusCode = statuscode;
24 | }
25 |
26 | public SparkException(HttpStatusCode statuscode, string message, params object[] values)
27 | : base(string.Format(message, values))
28 | {
29 | this.StatusCode = statuscode;
30 | }
31 |
32 | public SparkException(string message) : base(message)
33 | {
34 | this.StatusCode = HttpStatusCode.BadRequest;
35 | }
36 |
37 | public SparkException(HttpStatusCode statuscode, string message, Exception inner) : base(message, inner)
38 | {
39 | this.StatusCode = statuscode;
40 | }
41 |
42 | public SparkException(HttpStatusCode statuscode, OperationOutcome outcome, string message = null)
43 | : this(statuscode, message)
44 | {
45 | this.Outcome = outcome;
46 | }
47 | }
48 |
--------------------------------------------------------------------------------
/src/Spark.Engine/ExportSettings.cs:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright (c) 2021-2025, Incendi
3 | *
4 | * SPDX-License-Identifier: BSD-3-Clause
5 | */
6 |
7 | namespace Spark.Engine;
8 |
9 | public class ExportSettings
10 | {
11 | ///
12 | /// Whether to externalize FHIR URIs, for example, "Patient"
->
13 | /// "https://your.fhir.url/fhir/Patient"
(false
by default).
14 | ///
15 | public bool ExternalizeFhirUri { get; set; }
16 | }
17 |
--------------------------------------------------------------------------------
/src/Spark.Engine/Extensions/CodingExtensions.cs:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright (c) 2023-2025, Incendi
3 | *
4 | * SPDX-License-Identifier: BSD-3-Clause
5 | */
6 |
7 | using Hl7.Fhir.Model;
8 | using System.Collections.Generic;
9 | using System.Linq;
10 |
11 | namespace Spark.Engine.Extensions;
12 |
13 | public static class CodingExtensions
14 | {
15 | ///
16 | /// Compares this instance against other, returns true if the two have identical System and
17 | /// Code, otherwise false.
18 | ///
19 | /// This instance.
20 | /// The instance to compare this against.
21 | ///
22 | public static bool AreEqual(this Coding coding, Coding other)
23 | {
24 | return coding.System == other.System && coding.Code == other.Code;
25 | }
26 |
27 | ///
28 | /// Compares this list of Coding instances against the instance other,
29 | /// returns true if at least one instance have identical System and Code to other, otherwise false.
30 | ///
31 | /// This list of instances.
32 | /// The instance to compare this list against.
33 | ///
34 | public static bool HasEqualCoding(this IEnumerable sources, Coding other)
35 | {
36 | return sources.Any(source => AreEqual(source, other));
37 | }
38 | }
39 |
--------------------------------------------------------------------------------
/src/Spark.Engine/Extensions/DateTimeOffsetExtensions.cs:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright (c) 2021-2025, Incendi
3 | *
4 | * SPDX-License-Identifier: BSD-3-Clause
5 | */
6 |
7 | using System;
8 |
9 | namespace Spark.Engine.Extensions;
10 |
11 | public static class DateTimeOffsetExtensions
12 | {
13 | public static DateTimeOffset TruncateToMillis(this DateTimeOffset dateTime)
14 | {
15 | return dateTime.AddTicks(-(dateTime.Ticks % TimeSpan.TicksPerMillisecond));
16 | }
17 | }
18 |
--------------------------------------------------------------------------------
/src/Spark.Engine/Extensions/ETag.cs:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright (c) 2015-2018, Firely
3 | *
4 | * SPDX-License-Identifier: BSD-3-Clause
5 | */
6 |
7 | using System.Net.Http.Headers;
8 |
9 | namespace Spark.Engine.Extensions;
10 |
11 | public static class ETag
12 | {
13 | public static EntityTagHeaderValue Create(string value)
14 | {
15 | string tag = "\"" + value + "\"";
16 | return new EntityTagHeaderValue(tag, true);
17 | }
18 | }
19 |
--------------------------------------------------------------------------------
/src/Spark.Engine/Extensions/FhirDateTimeExtensions.cs:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright (c) 2015-2018, Firely
3 | * Copyright (c) 2020-2025, Incendi
4 | *
5 | * SPDX-License-Identifier: BSD-3-Clause
6 | */
7 |
8 | using Hl7.Fhir.Model;
9 | using System;
10 |
11 | namespace Spark.Engine.Extensions;
12 |
13 | public static class FhirDateTimeExtensions
14 | {
15 | public enum FhirDateTimePrecision
16 | {
17 | Year = 4, //1994
18 | Month = 7, //1994-10
19 | Day = 10, //1994-10-21
20 | Minute = 15, //1994-10-21T13:45
21 | Second = 18 //1994-10-21T13:45:21
22 | }
23 |
24 | public static FhirDateTimePrecision Precision(this FhirDateTime fdt)
25 | {
26 | return (FhirDateTimePrecision)Math.Min(fdt.Value.Length, 18); //Ignore timezone for stating precision.
27 | }
28 |
29 | public static DateTimeOffset LowerBound(this FhirDateTime fdt)
30 | {
31 | return fdt.ToDateTimeOffset(TimeSpan.Zero);
32 | }
33 |
34 | public static DateTimeOffset UpperBound(this FhirDateTime fdt)
35 | {
36 | var start = fdt.LowerBound();
37 | var end = (fdt.Precision()) switch
38 | {
39 | FhirDateTimePrecision.Year => start.AddYears(1),
40 | FhirDateTimePrecision.Month => start.AddMonths(1),
41 | FhirDateTimePrecision.Day => start.AddDays(1),
42 | FhirDateTimePrecision.Minute => start.AddMinutes(1),
43 | FhirDateTimePrecision.Second => start.AddSeconds(1),
44 | _ => start
45 | };
46 | return end;
47 | }
48 | }
49 |
--------------------------------------------------------------------------------
/src/Spark.Engine/Extensions/GeneratorKeyExtensions.cs:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright (c) 2015-2018, Firely
3 | * Copyright (c) 2017-2025, Incendi
4 | *
5 | * SPDX-License-Identifier: BSD-3-Clause
6 | */
7 |
8 | using Hl7.Fhir.Model;
9 | using System.Collections.Generic;
10 | using Spark.Engine.Core;
11 | using Spark.Engine.Interfaces;
12 |
13 | namespace Spark.Engine.Extensions;
14 |
15 | public static class GeneratorKeyExtensions
16 | {
17 | public static Key NextHistoryKey(this IIdentityGenerator generator, IKey key)
18 | {
19 | Key historykey = key.Clone();
20 | historykey.VersionId = generator.NextVersionId(key.TypeName, key.ResourceId);
21 | return historykey;
22 | }
23 |
24 | public static Key NextKey(this IIdentityGenerator generator, Resource resource)
25 | {
26 | string resourceid = generator.NextResourceId(resource);
27 | Key key = resource.ExtractKey();
28 | string versionid = generator.NextVersionId(key.TypeName, resourceid);
29 | return Key.Create(key.TypeName, resourceid, versionid);
30 | }
31 |
32 | public static void AddHistoryKeys(this IIdentityGenerator generator, List entries)
33 | {
34 | // PERF: this needs a performance improvement.
35 | foreach (Entry entry in entries)
36 | {
37 | entry.Key = generator.NextHistoryKey(entry.Key);
38 | }
39 | }
40 | }
41 |
--------------------------------------------------------------------------------
/src/Spark.Engine/Extensions/IApplicationBuilderExtensions.cs:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright (c) 2019-2025, Incendi
3 | *
4 | * SPDX-License-Identifier: BSD-3-Clause
5 | */
6 |
7 | using Microsoft.AspNetCore.Builder;
8 | using Microsoft.AspNetCore.Routing;
9 | using Spark.Engine.ExceptionHandling;
10 | using Spark.Engine.Handlers;
11 | using System;
12 | using Spark.Engine.Maintenance;
13 |
14 | namespace Spark.Engine.Extensions;
15 |
16 | public static class IApplicationBuilderExtensions
17 | {
18 | public static void UseFhir(this IApplicationBuilder app, Action configureRoutes = null)
19 | {
20 | app.UseMiddleware();
21 | app.UseMiddleware();
22 | app.UseMiddleware();
23 |
24 | if (configureRoutes == null)
25 | app.UseMvc();
26 | else
27 | app.UseMvc(configureRoutes);
28 | }
29 | }
30 |
--------------------------------------------------------------------------------
/src/Spark.Engine/Extensions/MetaExtensions.cs:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright (c) 2023-2025, Incendi
3 | *
4 | * SPDX-License-Identifier: BSD-3-Clause
5 | */
6 |
7 | using Hl7.Fhir.Model;
8 | using System.Linq;
9 |
10 | namespace Spark.Engine.Extensions;
11 |
12 | public static class MetaExtensions
13 | {
14 | ///
15 | /// Merges the data in source into target.
16 | ///
17 | /// The target of the merge operation.
18 | /// The source of the merge operation.
19 | public static void Merge(this Meta target, Meta source)
20 | {
21 | var targetProfiles = target.Profile.ToList();
22 | foreach (var profile in source.Profile)
23 | {
24 | if (profile == null)
25 | continue;
26 | if (!targetProfiles.Any(p => profile.Equals(p)))
27 | targetProfiles.Add(profile);
28 | }
29 | source.Profile = targetProfiles;
30 |
31 | foreach (var securityCoding in source.Security)
32 | {
33 | if (target.Security.HasEqualCoding(securityCoding))
34 | continue;
35 | target.Security.Add(securityCoding);
36 | }
37 |
38 | foreach (var tag in source.Tag)
39 | {
40 | if (target.Tag.HasEqualCoding(tag))
41 | continue;
42 | target.Tag.Add(tag);
43 | }
44 | }
45 | }
46 |
--------------------------------------------------------------------------------
/src/Spark.Engine/Extensions/ParametersExtensions.cs:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright (c) 2023-2025, Incendi
3 | *
4 | * SPDX-License-Identifier: BSD-3-Clause
5 | */
6 |
7 | using Hl7.Fhir.Model;
8 | using System.Collections.Generic;
9 | using System.Linq;
10 |
11 | namespace Spark.Engine.Extensions;
12 |
13 | public static class ParametersExtensions
14 | {
15 | public static IEnumerable ExtractMetaResources(this Parameters parameters)
16 | {
17 | foreach(var parameter in parameters.Parameter.Where(p => p.Name == "meta"))
18 | {
19 | if (parameter.Value is Meta meta)
20 | yield return meta;
21 | }
22 | }
23 | }
24 |
--------------------------------------------------------------------------------
/src/Spark.Engine/Extensions/RegexExtensions.cs:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright (c) 2016-2018, Firely
3 | * Copyright (c) 2021-2025, Incendi
4 | *
5 | * SPDX-License-Identifier: BSD-3-Clause
6 | */
7 |
8 | using System.Collections.Generic;
9 | using System.Text.RegularExpressions;
10 |
11 | namespace Spark.Engine.Extensions;
12 |
13 | public static class RegexExtensions
14 | {
15 | public static string ReplaceGroup(this Regex regex, string input, string groupName, string replacement)
16 | {
17 | return ReplaceGroups(regex, input, new Dictionary { { groupName, replacement } });
18 | }
19 |
20 | public static string ReplaceGroups(this Regex regex, string input, Dictionary replacements)
21 | {
22 | return regex.Replace(input, m =>
23 | {
24 | return ReplaceNamedGroups(m, replacements);
25 | });
26 | }
27 |
28 | private static string ReplaceNamedGroups(Match m, Dictionary replacements)
29 | {
30 | string result = m.Value;
31 | foreach (var replacement in replacements)
32 | {
33 | var groupName = replacement.Key;
34 | var replaceWith = replacement.Value;
35 | foreach (Capture cap in m.Groups[groupName]?.Captures)
36 | {
37 | result = result.Remove(cap.Index - m.Index, cap.Length);
38 | result = result.Insert(cap.Index - m.Index, replaceWith);
39 | }
40 | }
41 | return result;
42 | }
43 | }
44 |
--------------------------------------------------------------------------------
/src/Spark.Engine/Extensions/SearchParamDefinitionExtensions.cs:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright (c) 2019-2025, Incendi
3 | *
4 | * SPDX-License-Identifier: BSD-3-Clause
5 | */
6 |
7 | using Hl7.Fhir.Model;
8 | using System.Collections.Generic;
9 | using static Hl7.Fhir.Model.ModelInfo;
10 |
11 | namespace Spark.Engine.Extensions;
12 |
13 | internal static class SearchParamDefinitionExtensions
14 | {
15 | ///
16 | /// Returns true if the search parameter is one of the following types: Number, Date or Quantity.
17 | /// See https://www.hl7.org/fhir/stu3/search.html#prefix for more information.
18 | ///
19 | ///
20 | /// A List of , since this is an extension this is usually a reference
21 | /// to ModelInfo.SearcParameters.
22 | ///
23 | /// A representing the name of the search parameter.
24 | /// Returns true if the search parameter is of type Number, Date or Quanity, otherwise false.
25 | internal static bool CanHaveOperatorPrefix(this List searchParamDefinitions, string resourceType, string name)
26 | {
27 | SearchParamDefinition searchParamDefinition = searchParamDefinitions.Find(p => (p.Resource == resourceType || p.Resource == nameof(Resource)) && p.Name == name);
28 | return searchParamDefinition != null && (searchParamDefinition.Type == SearchParamType.Number
29 | || searchParamDefinition.Type == SearchParamType.Date
30 | || searchParamDefinition.Type == SearchParamType.Quantity);
31 | }
32 | }
33 |
--------------------------------------------------------------------------------
/src/Spark.Engine/Extensions/SparkOptions.cs:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright (c) 2021-2025, Incendi
3 | *
4 | * SPDX-License-Identifier: BSD-3-Clause
5 | */
6 |
7 | using Microsoft.AspNetCore.Mvc;
8 | using System;
9 |
10 | namespace Spark.Engine.Extensions;
11 |
12 | public class SparkOptions
13 | {
14 | public SparkSettings Settings { get; set; } = new();
15 | public StoreSettings StoreSettings { get; set; } = new();
16 | public FhirServiceExtensionDictionary FhirExtensions { get; } = new();
17 |
18 | public FhirServiceDictionary FhirServices { get; } = new();
19 |
20 | public FhirStoreDictionary FhirStores { get; } = new();
21 |
22 | public Action MvcOption { get; set; }
23 | }
24 |
--------------------------------------------------------------------------------
/src/Spark.Engine/Extensions/StringExtensions.cs:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright (c) 2016-2018, Firely
3 | * Copyright (c) 2021-2025, Incendi
4 | *
5 | * SPDX-License-Identifier: BSD-3-Clause
6 | */
7 |
8 | namespace Spark.Engine.Extensions;
9 |
10 | public static class StringExtensions
11 | {
12 | public static string FirstUpper(this string input)
13 | {
14 | if (string.IsNullOrWhiteSpace(input))
15 | return input;
16 |
17 | return string.Concat(input.Substring(0, 1).ToUpperInvariant(), input.Remove(0, 1));
18 | }
19 | }
20 |
--------------------------------------------------------------------------------
/src/Spark.Engine/Extensions/UriParamExtensions.cs:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright (c) 2014-2018, Firely
3 | * Copyright (c) 2021-2025, Incendi
4 | *
5 | * SPDX-License-Identifier: BSD-3-Clause
6 | */
7 |
8 | using System;
9 | using System.Collections.Generic;
10 | using System.Linq;
11 |
12 | namespace Spark.Engine.Extensions;
13 |
14 | public static class UriParamExtensions
15 | {
16 | //TODO: horrible!! Should refactor
17 | public static Uri AddParam(this Uri uri, string name, params string[] values)
18 | {
19 | Uri fakeBase = new Uri("http://example.com");
20 | UriBuilder builder;
21 | if (uri.IsAbsoluteUri)
22 | {
23 | builder = new UriBuilder(uri);
24 | }
25 | else
26 | {
27 | builder = new UriBuilder(fakeBase)
28 | {
29 | Path = uri.ToString()
30 | };
31 | }
32 |
33 | ICollection> query = UriUtil.SplitParams(builder.Query).ToList();
34 |
35 | foreach (string value in values)
36 | {
37 | query.Add(new Tuple(name, value));
38 | }
39 |
40 | builder.Query = UriUtil.JoinParams(query);
41 |
42 | if (uri.IsAbsoluteUri)
43 | {
44 | return builder.Uri;
45 | }
46 | else
47 | {
48 | return fakeBase.MakeRelativeUri(builder.Uri);
49 | }
50 | }
51 | }
52 |
--------------------------------------------------------------------------------
/src/Spark.Engine/Extensions/UriUtil.cs:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright (c) 2014-2018, Firely
3 | * Copyright (c) 2021-2025, Incendi
4 | *
5 | * SPDX-License-Identifier: BSD-3-Clause
6 | */
7 |
8 | using System;
9 | using System.Collections.Generic;
10 | using System.Linq;
11 |
12 | namespace Spark.Engine.Extensions;
13 |
14 | public static class UriUtil
15 | {
16 | public static Tuple SplitParam(string s)
17 | {
18 | string[] a = s.Split(new char[] { '=' }, 2);
19 | return new Tuple(a.First(), a.Skip(1).FirstOrDefault());
20 | }
21 |
22 | public static ICollection> SplitParams(string query)
23 | {
24 | return query.TrimStart('?').Split(new[] { '&' }, 2, StringSplitOptions.RemoveEmptyEntries).Select(SplitParam).ToList();
25 | }
26 |
27 | public static ICollection> SplitParams(this Uri uri)
28 | {
29 | return SplitParams(uri.Query);
30 | }
31 |
32 | public static string JoinParams(IEnumerable> query)
33 | {
34 | return string.Join("&", query.Select(t => t.Item1 + "=" + t.Item2));
35 | }
36 |
37 | public static string NormalizeUri(string uriString)
38 | {
39 | if(!string.IsNullOrWhiteSpace(uriString) && Uri.TryCreate(uriString, UriKind.RelativeOrAbsolute, out var uri))
40 | {
41 | return uri.ToString();
42 | }
43 | return uriString;
44 | }
45 | }
46 |
--------------------------------------------------------------------------------
/src/Spark.Engine/Extensions/XDocumentExtensions.cs:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright (c) 2015-2018, Firely
3 | *
4 | * SPDX-License-Identifier: BSD-3-Clause
5 | */
6 |
7 | using Spark.Engine.Core;
8 | using System;
9 | using System.Xml.Linq;
10 |
11 | namespace Spark.Engine.Extensions;
12 |
13 | public static class XDocumentExtensions
14 | {
15 | public static void VisitAttributes(this XDocument document, string tagname, string attrName, Action action)
16 | {
17 | var nodes = document.Descendants(Namespaces.XHtml + tagname).Attributes(attrName);
18 | foreach (var node in nodes)
19 | {
20 | action(node);
21 | }
22 | }
23 | }
24 |
--------------------------------------------------------------------------------
/src/Spark.Engine/FhirResponseFactory/ConditionalHeaderFhirResponseInterceptor.cs:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright (c) 2015-2018, Firely
3 | *
4 | * SPDX-License-Identifier: BSD-3-Clause
5 | */
6 |
7 | using System.Linq;
8 | using System.Net;
9 | using Spark.Engine.Core;
10 | using Spark.Engine.Extensions;
11 | using Spark.Engine.Interfaces;
12 |
13 | namespace Spark.Engine.FhirResponseFactory;
14 |
15 | public class ConditionalHeaderFhirResponseInterceptor : IFhirResponseInterceptor
16 | {
17 | public bool CanHandle(object input)
18 | {
19 | return input is ConditionalHeaderParameters;
20 | }
21 |
22 | private ConditionalHeaderParameters ConvertInput(object input)
23 | {
24 | return input as ConditionalHeaderParameters;
25 | }
26 |
27 | public FhirResponse GetFhirResponse(Entry entry, object input)
28 | {
29 | ConditionalHeaderParameters parameters = ConvertInput(input);
30 | if (parameters == null) return null;
31 |
32 | bool? matchTags = parameters.IfNoneMatchTags.Any() ? parameters.IfNoneMatchTags.Any(t => t == ETag.Create(entry.Key.VersionId).Tag) : (bool?)null;
33 | bool? matchModifiedDate = parameters.IfModifiedSince.HasValue
34 | ? parameters.IfModifiedSince.Value < entry.Resource.Meta.LastUpdated
35 | : (bool?) null;
36 |
37 | if (!matchTags.HasValue && !matchModifiedDate.HasValue)
38 | {
39 | return null;
40 | }
41 |
42 | if ((matchTags ?? true) && (matchModifiedDate ?? true))
43 | {
44 | return Respond.WithCode(HttpStatusCode.NotModified);
45 | }
46 |
47 | return null;
48 | }
49 | }
50 |
--------------------------------------------------------------------------------
/src/Spark.Engine/FhirResponseFactory/FhirResponseInterceptorRunner.cs:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright (c) 2015-2018, Firely
3 | * Copyright (c) 2021-2025, Incendi
4 | *
5 | * SPDX-License-Identifier: BSD-3-Clause
6 | */
7 |
8 | using System.Collections.Generic;
9 | using System.Linq;
10 | using Spark.Engine.Core;
11 | using Spark.Engine.Interfaces;
12 |
13 | namespace Spark.Engine.FhirResponseFactory;
14 |
15 | public class FhirResponseInterceptorRunner : IFhirResponseInterceptorRunner
16 | {
17 | private readonly IList _interceptors;
18 |
19 | public FhirResponseInterceptorRunner(IFhirResponseInterceptor[] interceptors)
20 | {
21 | _interceptors = new List(interceptors);
22 | }
23 |
24 | public void AddInterceptor(IFhirResponseInterceptor interceptor)
25 | {
26 | _interceptors.Add(interceptor);
27 | }
28 |
29 | public void ClearInterceptors()
30 | {
31 | _interceptors.Clear();
32 | }
33 |
34 | public FhirResponse RunInterceptors(Entry entry, IEnumerable