├── .docker ├── docker-compose.example.yml └── linux │ ├── Mongo.Dockerfile │ ├── Spark.Dockerfile │ ├── mongorestore.sh │ └── r4.archive.gz ├── .dockerignore ├── .editorconfig ├── .gitattributes ├── .github ├── ISSUE_TEMPLATE │ ├── bug_report.md │ └── feature_request.md ├── dependabot.yml └── workflows │ ├── docker_image_linux.yml │ ├── integration_tests.yml │ ├── nuget_deploy.yml │ └── run_tests.yaml ├── .gitignore ├── CONTRIBUTORS.md ├── Directory.Build.props ├── Documentation ├── .gitignore ├── Architecture.md ├── Contribute.md ├── InstallingSpark.md ├── MigrateFromv1Tov2.md ├── MigrateFromv2Tov3.md ├── Performance.md ├── Quickstart.md ├── RunningSparkInDocker.md ├── UsingSpark.md ├── index.md └── originals │ ├── FHIR_MMSparkDeployment1-1.docx │ ├── FHIR_MMSparkDeployment1-1.pdf │ ├── Overview Spark Search.vsdx │ ├── SparkComponentOverview_0-1.pdf │ └── SparkComponentOverview_0-1.vsdx ├── LICENSE ├── README.md ├── SECURITY.md ├── Spark.sln ├── incendi-logo-128x128.png ├── scripts ├── CreateIndexes.bat ├── CreateIndexes.js ├── RemoveDuplicateId.bat └── RemoveDuplicateId.js ├── src ├── Spark-Legacy │ └── Examples │ │ ├── DSTU2 │ │ └── examples.zip │ │ ├── R4 │ │ └── examples.zip │ │ └── STU3 │ │ └── examples.zip ├── Spark.Engine.Test │ ├── Auxiliary │ │ └── LimitedStreamTests.cs │ ├── Core │ │ ├── Builders │ │ │ └── CapabilityStatementBuilderTests.cs │ │ ├── ElementQueryTests.cs │ │ └── FhirModelTests.cs │ ├── Examples │ │ ├── appointment-example2doctors.json │ │ ├── careplan-example-f201-renal.json │ │ ├── observation-example-bloodpressure.json │ │ └── patient-example.json │ ├── Extensions │ │ ├── OperationOutcomeExtensionsTests.cs │ │ ├── RegexExtensionsTests.cs │ │ └── SearchParameterExtensionsTests.cs │ ├── FhirVersion.cs │ ├── Formatters │ │ ├── FormatterTestBase.cs │ │ ├── NonSeekableReadStream.cs │ │ ├── ResourceJsonInputFormatterTests.cs │ │ ├── ResourceXmlInputFormatterTests.cs │ │ └── TestHttpRequestStreamReaderFactory.cs │ ├── Maintenance │ │ └── MaintenanceModeTests.cs │ ├── Search │ │ ├── CriteriumTests.cs │ │ ├── ElementIndexerTests.cs │ │ ├── ModifierTests.cs │ │ └── ReverseIncludeTests.cs │ ├── Service │ │ ├── IndexServiceTests.cs │ │ ├── IndexServiceTests2.cs │ │ ├── IndexValueTestExtensions.cs │ │ └── PatchServiceTests.cs │ ├── Spark.Engine.Test.csproj │ ├── TestData │ │ ├── R2 │ │ │ ├── patient-example.json │ │ │ └── patient-example.xml │ │ ├── R3 │ │ │ ├── patient-example.json │ │ │ └── patient-example.xml │ │ └── R4 │ │ │ ├── patient-example.json │ │ │ └── patient-example.xml │ ├── TextFileHelper.cs │ └── Utility │ │ ├── FhirPathUtilTests.cs │ │ └── FhirVersionUtility.cs ├── Spark.Engine │ ├── Auxiliary │ │ ├── LimitedStream.cs │ │ └── ResourceVisitor.cs │ ├── Core │ │ ├── CapabilityStatementBuilder.cs │ │ ├── ConditionalHeaderParameters.cs │ │ ├── Const.cs │ │ ├── ElementQuery.cs │ │ ├── Error.cs │ │ ├── FhirMediaType.cs │ │ ├── FhirModel.cs │ │ ├── HistoryParameters.cs │ │ ├── HttpHeaderName.cs │ │ ├── IFhirModel.cs │ │ ├── IIndexService.cs │ │ ├── ILocalhost.cs │ │ ├── Interaction.cs │ │ ├── Key.cs │ │ ├── KeyKind.cs │ │ ├── Localhost.cs │ │ ├── LocalhostExtensions.cs │ │ ├── MessagingComponentBuilder.cs │ │ ├── Namespaces.cs │ │ ├── RequiredAttributeException.cs │ │ ├── ResourceComponentBuilder.cs │ │ ├── Respond.cs │ │ ├── Response.cs │ │ ├── RestComponentBuilder.cs │ │ ├── SearchResults.cs │ │ ├── Snapshot.cs │ │ ├── SparkException.cs │ │ └── UriHelper.cs │ ├── ExceptionHandling │ │ └── ErrorHandler.cs │ ├── ExportSettings.cs │ ├── Extensions │ │ ├── BundleExtensions.cs │ │ ├── CodingExtensions.cs │ │ ├── DateTimeOffsetExtensions.cs │ │ ├── ETag.cs │ │ ├── FhirDateTimeExtensions.cs │ │ ├── FhirServiceDictionary.cs │ │ ├── FhirServiceExtensionDictionary.cs │ │ ├── FhirStoreDictionary.cs │ │ ├── GeneratorKeyExtensions.cs │ │ ├── HttpContextExtensions.cs │ │ ├── HttpHeadersExtensions.cs │ │ ├── HttpRequestFhirExtensions.cs │ │ ├── IApplicationBuilderExtensions.cs │ │ ├── IServiceCollectionExtensions.cs │ │ ├── InteractionExtensions.cs │ │ ├── KeyExtensions.cs │ │ ├── MetaExtensions.cs │ │ ├── OperationOutcomeExtensions.cs │ │ ├── ParametersExtensions.cs │ │ ├── QuantityExtensions.cs │ │ ├── RegexExtensions.cs │ │ ├── SearchParamDefinitionExtensions.cs │ │ ├── SearchParameterExtensions.cs │ │ ├── SparkOptions.cs │ │ ├── StringExtensions.cs │ │ ├── UriParamExtensions.cs │ │ ├── UriUtil.cs │ │ └── XDocumentExtensions.cs │ ├── FhirResponseFactory │ │ ├── ConditionalHeaderFhirResponseInterceptor.cs │ │ ├── FhirResponseFactory.cs │ │ ├── FhirResponseInterceptorRunner.cs │ │ └── IFhirResponseFactory.cs │ ├── Filters │ │ ├── GzipCompressedContent.cs │ │ ├── GzipContent.cs │ │ └── UnsupportedMediaTypeFilter.cs │ ├── Formatters │ │ ├── AsyncResourceJsonInputFormatter.cs │ │ ├── AsyncResourceJsonOutputFormatter.cs │ │ ├── AsyncResourceXmlInputFormatter.cs │ │ ├── AsyncResourceXmlOutputFormatter.cs │ │ ├── BinaryInputFormatter.cs │ │ ├── BinaryOutputFormatter.cs │ │ ├── FhirOutputFormatterSelector.cs │ │ ├── JsonArrayPool.cs │ │ ├── ResourceJsonInputFormatter.cs │ │ ├── ResourceJsonOutputFormatter.cs │ │ ├── ResourceXmlInputFormatter.cs │ │ └── ResourceXmlOutputFormatter.cs │ ├── Handlers │ │ └── FormatTypeHandler.cs │ ├── IO │ │ └── NonDisposableStream.cs │ ├── IndexSettings.cs │ ├── Interfaces │ │ ├── IFhirIndex.cs │ │ ├── IFhirResponseInterceptor.cs │ │ ├── IFhirResponseInterceptorRunner.cs │ │ ├── IFhirStoreAdministration.cs │ │ └── IIdentityGenerator.cs │ ├── Logging │ │ └── SparkEngineEventSource.cs │ ├── Maintenance │ │ ├── MaintenanceLock.cs │ │ ├── MaintenanceLockMode.cs │ │ ├── MaintenanceMode.cs │ │ ├── MaintenanceModeEnabledException.cs │ │ └── MaintenanceModeHandler.cs │ ├── Model │ │ ├── CompartmentInfo.cs │ │ ├── IndexEntry.cs │ │ └── SparkModelInfo.cs │ ├── Properties │ │ └── AssemblyInfo.cs │ ├── Search │ │ ├── ElementIndexer.cs │ │ ├── IReferenceNormalizationService.cs │ │ ├── Model │ │ │ ├── IndexFieldNames.cs │ │ │ ├── Modifier.cs │ │ │ └── ReverseInclude.cs │ │ ├── ReferenceNormalizationService.cs │ │ ├── ResourceResolver.cs │ │ ├── SearchSettings.cs │ │ ├── Support │ │ │ ├── IPositionInfo.cs │ │ │ ├── Logging.cs │ │ │ └── StringExtensions.cs │ │ └── Types │ │ │ ├── ChoiceValue.cs │ │ │ ├── CompositeValue.cs │ │ │ ├── Criterium.cs │ │ │ ├── DateTimeValue.cs │ │ │ ├── DateValue.cs │ │ │ ├── Expression.cs │ │ │ ├── NumberValue.cs │ │ │ ├── QuantityValue.cs │ │ │ ├── ReferenceValue.cs │ │ │ ├── StringValue.cs │ │ │ ├── TokenValue.cs │ │ │ ├── UntypedValue.cs │ │ │ └── ValueExpression.cs │ ├── Service │ │ ├── Abstractions │ │ │ └── FhirServiceBase.cs │ │ ├── Export.cs │ │ ├── FhirService.cs │ │ ├── FhirServiceExtensions │ │ │ ├── ConformanceBuilder.cs │ │ │ ├── ConformanceService.cs │ │ │ ├── DeleteManipulationOperation.cs │ │ │ ├── ElementNavFhirExtensionsNew.cs │ │ │ ├── GetManipulationOperation.cs │ │ │ ├── HistoryService.cs │ │ │ ├── ICapabilityStatementService.cs │ │ │ ├── IFhirServiceExtension.cs │ │ │ ├── IHistoryService.cs │ │ │ ├── IIndexBuildProgressReporter.cs │ │ │ ├── IIndexRebuildService.cs │ │ │ ├── IInteractionHandler.cs │ │ │ ├── IPagingService.cs │ │ │ ├── IPatchService.cs │ │ │ ├── IQueryService.cs │ │ │ ├── IResourceStorageService.cs │ │ │ ├── ISearchService.cs │ │ │ ├── ISnapshotPagination.cs │ │ │ ├── ISnapshotPaginationCalculator.cs │ │ │ ├── ISnapshotPaginationProvider.cs │ │ │ ├── ITransactionService.cs │ │ │ ├── IndexRebuildService.cs │ │ │ ├── IndexService.cs │ │ │ ├── PagingService.cs │ │ │ ├── PatchManipulationOperation.cs │ │ │ ├── PatchService.cs │ │ │ ├── PostManipulationOperation.cs │ │ │ ├── PutManipulationOperation.cs │ │ │ ├── ResourceManipulationOperation.cs │ │ │ ├── ResourceManipulationOperationFactory.cs │ │ │ ├── ResourceStorageService.cs │ │ │ ├── SearchService.cs │ │ │ ├── SnapshotPaginationCalculator.cs │ │ │ ├── SnapshotPaginationProvider.cs │ │ │ ├── SnapshotPaginationService.cs │ │ │ └── TransactionService.cs │ │ ├── ICompositeServiceListener.cs │ │ ├── IFhirService.cs │ │ ├── IServiceListener.cs │ │ ├── ITransfer.cs │ │ ├── Import.cs │ │ ├── KeyMapper.cs │ │ ├── ServiceListener.cs │ │ ├── Transfer.cs │ │ └── Validate.cs │ ├── Spark.Engine.csproj │ ├── SparkSettings.cs │ ├── Store │ │ ├── ExtendableWith.cs │ │ └── Interfaces │ │ │ ├── IExtendableWith.cs │ │ │ ├── IFhirStore.cs │ │ │ ├── IFhirStorePagedReader.cs │ │ │ ├── IHistoryStore.cs │ │ │ ├── IIndexStore.cs │ │ │ └── ISnapshotStore.cs │ ├── StoreSettings.cs │ └── Utility │ │ ├── FhirParameter.cs │ │ └── FhirPathUtil.cs ├── Spark.Mongo.Tests │ ├── Indexer │ │ └── MongoIndexMapperTest.cs │ ├── Search │ │ ├── BsonSerializationProvider.cs │ │ └── CriteriumQueryBuilderTests.cs │ └── Spark.Mongo.Tests.csproj ├── Spark.Mongo │ ├── Extensions │ │ └── IServiceCollectionExtensions.cs │ ├── Properties │ │ └── AssemblyInfo.cs │ ├── Search │ │ ├── Common │ │ │ ├── Arguments.cs │ │ │ ├── Config.cs │ │ │ ├── Definitions.cs │ │ │ └── DefinitionsFactory.cs │ │ ├── Indexer │ │ │ ├── BsonIndexDocument.cs │ │ │ ├── BsonIndexDocumentBuilder.cs │ │ │ └── MongoIndexMapper.cs │ │ ├── Infrastructure │ │ │ ├── MongoDatabaseFactory.cs │ │ │ ├── MongoFhirIndex.cs │ │ │ └── MongoIndexStore.cs │ │ ├── Searcher │ │ │ ├── CriteriaMongoExtensions.cs │ │ │ ├── ITerm.cs │ │ │ └── MongoSearcher.cs │ │ └── Utils │ │ │ ├── Soundex.cs │ │ │ └── UnitsOfMeasureHelper.cs │ ├── Spark.Mongo.csproj │ ├── SparkMongoEventSource.cs │ └── Store │ │ ├── BsonHelper.cs │ │ ├── Constants.cs │ │ ├── Extensions │ │ └── HistoryStore.cs │ │ ├── GuidIdentityGenerator.cs │ │ ├── MongoCollectionPageResult.cs │ │ ├── MongoFhirStore.cs │ │ ├── MongoFhirStorePagedReader.cs │ │ ├── MongoIdGenerator.cs │ │ ├── MongoSnapshotStore.cs │ │ └── MongoStoreAdministration.cs └── Spark.Web │ ├── .gitignore │ ├── ClientApp │ ├── js │ │ └── main.js │ ├── package-lock.json │ └── package.json │ ├── Controllers │ ├── AdminController.cs │ ├── FhirController.cs │ ├── HomeController.cs │ └── ResourcesController.cs │ ├── Data │ ├── ApplicationDbContext.cs │ └── ApplicationDbInitializer.cs │ ├── Examples │ ├── DSTU2 │ │ └── .gitkeep │ ├── R4 │ │ └── .gitkeep │ └── STU3 │ │ └── .gitkeep │ ├── Hubs │ ├── HubContextProgressNotifier.cs │ ├── ImportProgressMessage.cs │ └── MaintenanceHub.cs │ ├── Migrations │ ├── 20191101194423_Init.Designer.cs │ ├── 20191101194423_Init.cs │ └── ApplicationDbContextModelSnapshot.cs │ ├── Models │ ├── Config │ │ └── ExamplesSettings.cs │ └── ErrorViewModel.cs │ ├── Program.cs │ ├── Properties │ ├── .gitignore │ └── launchSettings.json │ ├── README.md │ ├── Services │ ├── ResourceStat.cs │ └── ServerMetadata.cs │ ├── Spark.Web.csproj │ ├── Startup.cs │ ├── Utilities │ └── FhirFileImport.cs │ ├── Views │ ├── Admin │ │ ├── Index.cshtml │ │ └── Maintenance.cshtml │ ├── Home │ │ ├── Index.cshtml │ │ └── Privacy.cshtml │ ├── Resources │ │ └── Index.cshtml │ ├── Shared │ │ ├── Error.cshtml │ │ ├── _AdminNavbar.cshtml │ │ ├── _CookieConsentPartial.cshtml │ │ ├── _Layout.cshtml │ │ ├── _LoginPartial.cshtml │ │ ├── _ValidationScriptsPartial.cshtml │ │ └── _ViewImports.cshtml │ └── _ViewStart.cshtml │ ├── appsettings.Development.json │ ├── appsettings.json │ ├── libman.json │ └── wwwroot │ └── assets │ ├── css │ └── main.css │ ├── images │ └── 150.png │ └── js │ └── main.js └── tests └── integration-tests ├── .gitignore ├── combine-test-results.sh ├── docker-compose.yml └── readme.md /.docker/docker-compose.example.yml: -------------------------------------------------------------------------------- 1 | version: "3" 2 | services: 3 | spark: 4 | container_name: spark 5 | restart: always 6 | image: sparkfhir/spark:r4-latest 7 | environment: 8 | - StoreSettings__ConnectionString=mongodb://root:CosmicTopSecret@mongodb:27017/spark?authSource=admin 9 | - SparkSettings__Endpoint=http://localhost:5555/fhir 10 | ports: 11 | - "5555:80" 12 | depends_on: 13 | - mongodb 14 | mongodb: 15 | container_name: mongodb 16 | image: sparkfhir/mongo:r4-latest 17 | environment: 18 | MONGO_INITDB_ROOT_USERNAME: root 19 | MONGO_INITDB_ROOT_PASSWORD: CosmicTopSecret 20 | ports: 21 | - "17017:27017" 22 | volumes: 23 | - r4-latest-develop-data-volume:/data/db 24 | volumes: 25 | r4-latest-develop-data-volume: 26 | -------------------------------------------------------------------------------- /.docker/linux/Mongo.Dockerfile: -------------------------------------------------------------------------------- 1 | FROM mongo 2 | COPY .docker/linux/r4.archive.gz /home/ 3 | COPY .docker/linux/mongorestore.sh /docker-entrypoint-initdb.d/ 4 | -------------------------------------------------------------------------------- /.docker/linux/Spark.Dockerfile: -------------------------------------------------------------------------------- 1 | FROM mcr.microsoft.com/dotnet/aspnet:9.0-alpine AS base 2 | RUN apk add --no-cache icu-libs 3 | ENV DOTNET_SYSTEM_GLOBALIZATION_INVARIANT=false 4 | WORKDIR /app 5 | ENV ASPNETCORE_URLS=http://+:80 6 | 7 | FROM mcr.microsoft.com/dotnet/sdk:9.0-noble AS build 8 | WORKDIR /src 9 | COPY ["./Directory.Build.props", "../Directory.Build.props"] 10 | COPY ["./src/Spark.Web/Spark.Web.csproj", "Spark.Web/Spark.Web.csproj"] 11 | COPY ["./src/Spark.Engine/Spark.Engine.csproj", "Spark.Engine/Spark.Engine.csproj"] 12 | COPY ["./src/Spark.Mongo/Spark.Mongo.csproj", "Spark.Mongo/Spark.Mongo.csproj"] 13 | RUN dotnet restore "/src/Spark.Web/Spark.Web.csproj" 14 | COPY ./src . 15 | RUN dotnet build "/src/Spark.Web/Spark.Web.csproj" -c Release -o /app 16 | 17 | FROM build AS publish 18 | RUN dotnet publish "/src/Spark.Web/Spark.Web.csproj" -c Release -o /app 19 | 20 | FROM base AS final 21 | WORKDIR /app 22 | COPY --from=publish /app . 23 | 24 | ENTRYPOINT ["dotnet", "Spark.Web.dll"] 25 | -------------------------------------------------------------------------------- /.docker/linux/mongorestore.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | mongorestore --drop --archive=/home/r4.archive.gz --gzip 3 | -------------------------------------------------------------------------------- /.docker/linux/r4.archive.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/FirelyTeam/spark/0b93a6e52381ffef564860231a79927cbdde868a/.docker/linux/r4.archive.gz -------------------------------------------------------------------------------- /.dockerignore: -------------------------------------------------------------------------------- 1 | .vs/ 2 | .idea/ 3 | .github/ -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/bug_report.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Bug report 3 | about: Create a report to help us improve 4 | title: '' 5 | labels: '' 6 | assignees: '' 7 | 8 | --- 9 | 10 | **Describe the bug** 11 | A clear and concise description of what the bug is. 12 | 13 | **To Reproduce** 14 | Steps to reproduce the behavior: 15 | 1. Go to '...' 16 | 2. Click on '....' 17 | 3. Scroll down to '....' 18 | 4. See error 19 | 20 | **Expected behavior** 21 | A clear and concise description of what you expected to happen. 22 | 23 | **Screenshots** 24 | If applicable, add screenshots to help explain your problem. 25 | 26 | **Spark version** 27 | - Version: [e.g. 1.5.9] 28 | 29 | **Operating system + Database** 30 | - OS: [e.g. Linux] 31 | - Database: [e.g. MongoDB] 32 | 33 | **Container service / Cloud infrastructure:** 34 | - Container service: [e.g. Docker swarm] 35 | - Cloud provider: [e.g. Google Cloud] 36 | - Cloud infrastructure: [e.g. VM, Web Service, etc] 37 | - Database as a service: [e.g. CosmosDB w/ MongoDB API] 38 | 39 | **Additional context** 40 | Add any other context about the problem here. 41 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/feature_request.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Feature request 3 | about: Suggest an idea for this project 4 | title: '' 5 | labels: '' 6 | assignees: '' 7 | 8 | --- 9 | 10 | **Is your feature request related to a problem? Please describe.** 11 | A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] 12 | 13 | **Describe the solution you'd like** 14 | A clear and concise description of what you want to happen. 15 | 16 | **Describe alternatives you've considered** 17 | A clear and concise description of any alternative solutions or features you've considered. 18 | 19 | **Additional context** 20 | Add any other context or screenshots about the feature request here. 21 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | updates: 3 | - package-ecosystem: nuget 4 | directory: "/" 5 | groups: 6 | nuget-patch-updates: 7 | patterns: 8 | - "*" 9 | update-types: 10 | - "patch" 11 | nuget-minor-updates: 12 | patterns: 13 | - "*" 14 | update-types: 15 | - "minor" 16 | nuget-major-updates: 17 | patterns: 18 | - "*" 19 | update-types: 20 | - "major" 21 | schedule: 22 | interval: daily 23 | - package-ecosystem: github-actions 24 | directory: "/" 25 | schedule: 26 | interval: daily 27 | -------------------------------------------------------------------------------- /.github/workflows/docker_image_linux.yml: -------------------------------------------------------------------------------- 1 | name: Docker Release 2 | 3 | on: 4 | release: 5 | types: [published] 6 | 7 | jobs: 8 | build: 9 | if: github.repository == 'FirelyTeam/spark' 10 | 11 | runs-on: ubuntu-24.04 12 | steps: 13 | - uses: actions/checkout@v4 14 | - name: Login to DockerHub Registry 15 | run: echo ${{ secrets.DOCKERHUB_PASSWORD }} | docker login -u ${{ secrets.DOCKERHUB_USERNAME }} --password-stdin 16 | - name: Get the version 17 | id: vars 18 | run: echo ::set-output name=tag::$(echo ${GITHUB_REF:10}) 19 | - name: Build the tagged Spark Docker image 20 | run: docker build . --file .docker/linux/Spark.Dockerfile 21 | -t ${{ secrets.DOCKERHUB_ORGANIZATION }}/spark:${{steps.vars.outputs.tag}} 22 | -t ${{ secrets.DOCKERHUB_ORGANIZATION }}/spark:r4-latest 23 | -t ${{ secrets.DOCKERHUB_ORGANIZATION_2 }}/spark:${{steps.vars.outputs.tag}} 24 | -t ${{ secrets.DOCKERHUB_ORGANIZATION_2 }}/spark:r4-latest 25 | - name: Push the tagged Spark Docker image 26 | run: docker push --all-tags ${{ secrets.DOCKERHUB_ORGANIZATION }}/spark 27 | - name: Build the tagged Mongo Docker image 28 | run: docker build . --file .docker/linux/Mongo.Dockerfile 29 | -t ${{ secrets.DOCKERHUB_ORGANIZATION }}/mongo:${{steps.vars.outputs.tag}} 30 | -t ${{ secrets.DOCKERHUB_ORGANIZATION }}/mongo:r4-latest 31 | -t ${{ secrets.DOCKERHUB_ORGANIZATION_2 }}/mongo:${{steps.vars.outputs.tag}} 32 | -t ${{ secrets.DOCKERHUB_ORGANIZATION_2 }}/mongo:r4-latest 33 | - name: Push the tagged Mongo Docker image 34 | run: docker push --all-tags ${{ secrets.DOCKERHUB_ORGANIZATION }}/mongo 35 | -------------------------------------------------------------------------------- /.github/workflows/nuget_deploy.yml: -------------------------------------------------------------------------------- 1 | name: Release 2 | 3 | on: 4 | release: 5 | types: [published] 6 | 7 | jobs: 8 | build: 9 | if: github.repository == 'FirelyTeam/spark' 10 | 11 | runs-on: windows-latest 12 | steps: 13 | - uses: actions/checkout@v4 14 | - name: Setup .NET Core 15 | uses: actions/setup-dotnet@v4 16 | with: 17 | dotnet-version: 9.0.x 18 | - name: Build 19 | run: | 20 | dotnet pack "./src/Spark.Engine/Spark.Engine.csproj" -c Release 21 | dotnet pack "./src/Spark.Mongo/Spark.Mongo.csproj" -c Release 22 | - name: Deploy Nuget Packages 23 | run: dotnet nuget push .\src\**\*.nupkg 24 | --api-key ${{ secrets.NUGET_API_KEY }} 25 | --skip-duplicate 26 | --source https://api.nuget.org/v3/index.json 27 | --no-symbols 28 | -------------------------------------------------------------------------------- /.github/workflows/run_tests.yaml: -------------------------------------------------------------------------------- 1 | name: Tests 2 | 3 | on: 4 | push: 5 | branches: 6 | - 'r4/master' 7 | pull_request: 8 | 9 | jobs: 10 | build: 11 | if: github.repository == 'FirelyTeam/spark' 12 | 13 | runs-on: ${{ matrix.platform }} 14 | strategy: 15 | matrix: 16 | platform: [ windows-latest, ubuntu-24.04, macos-14, macos-14-large ] 17 | 18 | steps: 19 | - uses: actions/checkout@v4 20 | - name: Setup .NET Core 21 | uses: actions/setup-dotnet@v4 22 | with: 23 | dotnet-version: 9.0.x 24 | - name: Build with dotnet 25 | run: dotnet build src/Spark.Web/Spark.Web.csproj -c Release 26 | - name: Unit tests 27 | run: | 28 | dotnet test "./src/Spark.Engine.Test/Spark.Engine.Test.csproj" 29 | dotnet test "./src/Spark.Mongo.Tests/Spark.Mongo.Tests.csproj" 30 | -------------------------------------------------------------------------------- /CONTRIBUTORS.md: -------------------------------------------------------------------------------- 1 | Spark contributors 2 | ================== 3 | * [Ewout Kramer](https://github.com/ewoutkramer) - [Furore](https://github.com/firelyteam) 4 | 5 | * [Christiaan Knaap](https://github.com/cknaap) - [Furore](https://github.com/firelyteam) 6 | 7 | * [Martijn Harthoorn](https://github.com/mharthoorn) - [Furore](https://github.com/firelyteam) 8 | 9 | * Corina Ciocanea - [Furore](https://github.com/firelyteam) 10 | 11 | * [Tony Abell](https://github.com/TonyAbell) - CornerStone 12 | 13 | * [Richard Schneider](https://github.com/richardschneider) - Orion Health 14 | Richard added GZip support 15 | 16 | * [Brian Postlethwaite](https://github.com/brianpos) 17 | Brian made the resources look fresh in the UI. 18 | 19 | * [Kenneth Myhra](https://github.com/kennethmyhra) - [Incendi](https://github.com/incendilabs/) 20 | 21 | * [Ole Kristian Losvik](https://github.com/losolio) 22 | 23 | * [Andrew Anisimov](https://github.com/andy-a-o) 24 | -------------------------------------------------------------------------------- /Directory.Build.props: -------------------------------------------------------------------------------- 1 | 2 | 3 | 2.3.3 4 | 5 | 6 | Firely, Incendi and contributors 7 | Firely and Incendi 8 | Copyright © Firely 2014-2018, © Incendi 2018-2025 9 | https://github.com/firelyteam/spark 10 | LICENSE 11 | images\incendi-logo-128x128.png 12 | README.md 13 | true 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | -------------------------------------------------------------------------------- /Documentation/.gitignore: -------------------------------------------------------------------------------- 1 | # Logs 2 | logs 3 | *.log 4 | npm-debug.log* 5 | yarn-debug.log* 6 | yarn-error.log* 7 | 8 | # Runtime data 9 | pids 10 | *.pid 11 | *.seed 12 | *.pid.lock 13 | 14 | # Directory for instrumented libs generated by jscoverage/JSCover 15 | lib-cov 16 | 17 | # Coverage directory used by tools like istanbul 18 | coverage 19 | 20 | # nyc test coverage 21 | .nyc_output 22 | 23 | # Grunt intermediate storage (http://gruntjs.com/creating-plugins#storing-task-files) 24 | .grunt 25 | 26 | # Bower dependency directory (https://bower.io/) 27 | bower_components 28 | 29 | # node-waf configuration 30 | .lock-wscript 31 | 32 | # Compiled binary addons (http://nodejs.org/api/addons.html) 33 | build/Release 34 | 35 | # Dependency directories 36 | node_modules/ 37 | jspm_packages/ 38 | 39 | # Typescript v1 declaration files 40 | typings/ 41 | 42 | # Optional npm cache directory 43 | .npm 44 | 45 | # Optional eslint cache 46 | .eslintcache 47 | 48 | # Optional REPL history 49 | .node_repl_history 50 | 51 | # Output of 'npm pack' 52 | *.tgz 53 | 54 | # dotenv environment variables file 55 | .env 56 | 57 | # gatsby files 58 | .cache/ 59 | public 60 | 61 | # Mac files 62 | .DS_Store 63 | 64 | # Yarn 65 | yarn-error.log 66 | .pnp/ 67 | .pnp.js 68 | # Yarn Integrity file 69 | .yarn-integrity 70 | 71 | # Docz 72 | .Docz -------------------------------------------------------------------------------- /Documentation/Contribute.md: -------------------------------------------------------------------------------- 1 | # Contributions 2 | You are welcome to contribute to this project. The Spark server is used in several commercial and open source projects. Therefore we have a high quality standard and we carefully review submissions. 3 | 4 | When you want to contribute changes: 5 | - Contact us by opening an issue before start working on a major change. 6 | - Fork and open a pull request 7 | 8 | ### Pull requests 9 | When you send us a pull request 10 | - Make sure it builds 11 | - Make sure it's tested 12 | - The pull request should cover only one change 13 | - Accept that we might reject it because it conflicts with our strategy. 14 | 15 | We do appreciate suggestions, but the Spark FHIR server code is used by us for commercial projects, so we will most probably reject substantial changes unless you coordinate them with us first. 16 | 17 | ### GIT branching strategy 18 | Branch from the `r4/master` branch which contains the R4 FHIR version, unless the feature or bug fix is considered for a specific version of FHIR then branch from the relevant branch which at this point is `stu3/master`. 19 | 20 | See [GitHub flow](https://guides.github.com/introduction/flow/) for more information. 21 | -------------------------------------------------------------------------------- /Documentation/InstallingSpark.md: -------------------------------------------------------------------------------- 1 | # Getting Started 2 | 3 | ## Database 4 | 5 | Spark FHIR server supports MongoDB as it´s persistence layer. The following options are supported: 6 | 7 | - MongoDB Atlas. Sign up for a [free trial account](https://www.mongodb.com/download-center) and run your database in the cloud. 8 | - MongoDB Community Server. [Download](https://www.mongodb.com/download-center/community) and install locally 9 | - MongoDB Enterprise. [Download](https://www.mongodb.com/download-center/enterprise) and install locally. 10 | - MongoDB in docker. Check out the example [docker-compose-example.yml](../.docker/docker-compose.example.yml). 11 | 12 | ## CosmoDB with MongoDB as a database 13 | In general we do not recommend using CosmosDB. There are known installations using CosmosDB with MongoDB API which runs fairly well, but it has not been without problems. 14 | 15 | ## Install Spark 16 | 17 | The core packages `Spark.Engine` and `Spark.Mongo` targets `net8.0` and `net9.0`. For the web application you may choose between: 18 | 19 | ## Reference Implementations 20 | The reference implementations are only meant as examples and must never be used out of the box in a production environment without adding as a minimum security features. 21 | 22 | - `Spark.Web` which runs on ASP.Net 9.0. 23 | -------------------------------------------------------------------------------- /Documentation/Performance.md: -------------------------------------------------------------------------------- 1 | # Performance 2 | 3 | We have no performance figures of Spark yet. Spark is already being used in production scenarios, so it is fit for real use. If you have measured performance of Spark yourself, please share the results! 4 | 5 | In the near future we will develop performance tests for Spark to get you an idea whether or how it will fit your performance needs. 6 | 7 | If you are concerned Spark will not handle your load as fast as you would like, consider the following possibilities for spreading the load. 8 | 9 | * If you are FHIR-enabling multiple source systems, you could provide every system with it's own FHIR front-end, implemented by Spark. Instead of feeding data from all the source systems into one instance of Spark. You will however need a way to route requests to the correct instance of Spark. 10 | 11 | * If there is a logical attribute in your data to split the whole set into multiple sets, you could deploy Spark multiple times, each one on a 'shard' of the data. You will need a way to route requests to the correct instance of Spark, based on the chosen attribute. 12 | 13 | * MongoDB supports sharding, as described in the [MongoDB documentation](https://docs.mongodb.com/manual/sharding/). You will have to choose a shard key based upon expected use. To use this in Spark, you will probably have to tweak the Spark Mongo implementation. 14 | -------------------------------------------------------------------------------- /Documentation/originals/FHIR_MMSparkDeployment1-1.docx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/FirelyTeam/spark/0b93a6e52381ffef564860231a79927cbdde868a/Documentation/originals/FHIR_MMSparkDeployment1-1.docx -------------------------------------------------------------------------------- /Documentation/originals/FHIR_MMSparkDeployment1-1.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/FirelyTeam/spark/0b93a6e52381ffef564860231a79927cbdde868a/Documentation/originals/FHIR_MMSparkDeployment1-1.pdf -------------------------------------------------------------------------------- /Documentation/originals/Overview Spark Search.vsdx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/FirelyTeam/spark/0b93a6e52381ffef564860231a79927cbdde868a/Documentation/originals/Overview Spark Search.vsdx -------------------------------------------------------------------------------- /Documentation/originals/SparkComponentOverview_0-1.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/FirelyTeam/spark/0b93a6e52381ffef564860231a79927cbdde868a/Documentation/originals/SparkComponentOverview_0-1.pdf -------------------------------------------------------------------------------- /Documentation/originals/SparkComponentOverview_0-1.vsdx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/FirelyTeam/spark/0b93a6e52381ffef564860231a79927cbdde868a/Documentation/originals/SparkComponentOverview_0-1.vsdx -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | BSD 3-Clause License 2 | 3 | Copyright (c) 2014-2018 Firely , 4 | Copyright (c) 2018-2025 Incendi and contributors 5 | All rights reserved. 6 | 7 | Redistribution and use in source and binary forms, with or without modification, 8 | are permitted provided that the following conditions are met: 9 | 10 | * Redistributions of source code must retain the above copyright notice, this 11 | list of conditions and the following disclaimer. 12 | 13 | * Redistributions in binary form must reproduce the above copyright notice, this 14 | list of conditions and the following disclaimer in the documentation and/or 15 | other materials provided with the distribution. 16 | 17 | * Neither the name of the copyright holders nor the names of its 18 | contributors may be used to endorse or promote products derived from 19 | this software without specific prior written permission. 20 | 21 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND 22 | ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED 23 | WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 24 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR 25 | ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES 26 | (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; 27 | LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON 28 | ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 29 | (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS 30 | SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 31 | -------------------------------------------------------------------------------- /SECURITY.md: -------------------------------------------------------------------------------- 1 | # Security Policy 2 | 3 | ## Supported Versions 4 | 5 | These versions are currently being supported with security updates. 6 | 7 | #### R4 8 | | Version | Supported | 9 | | ------- | ------------------ | 10 | | 2.0.x | :white_check_mark: | 11 | | 1.5.x | :x: | 12 | | 1.4.x | :x: | 13 | | 1.3.x | :x: | 14 | 15 | #### STU3 16 | | Version | Supported | 17 | | ------- | ------------------ | 18 | | 2.0.x | :white_check_mark: | 19 | | 1.5.x | :x: | 20 | | 1.4.x | :x: | 21 | | 1.3.x | :x: | 22 | 23 | #### DSTU2 24 | | Version | Supported | 25 | | ------- | ------------------ | 26 | | 1.5.x | :x: | 27 | | 1.4.x | :x: | 28 | | 1.3.x | :x: | 29 | 30 | ## Reporting a Vulnerability 31 | 32 | If you want to report a vulnerability please do not create an issue, 33 | rather send an e-mail to info@incendi.no. 34 | 35 | We encourage reporters to follow the disclosure model "responsible disclosure" 36 | 37 | Vulnerability reports will be considered and applied according to how 38 | critical the vulnerability is considered. 39 | -------------------------------------------------------------------------------- /incendi-logo-128x128.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/FirelyTeam/spark/0b93a6e52381ffef564860231a79927cbdde868a/incendi-logo-128x128.png -------------------------------------------------------------------------------- /scripts/CreateIndexes.bat: -------------------------------------------------------------------------------- 1 | mongo spark CreateIndexes.js > CreateIndexes.log 2 | -------------------------------------------------------------------------------- /scripts/CreateIndexes.js: -------------------------------------------------------------------------------- 1 | print("indexes for just about any operation, including create / update"); 2 | printjson(db.searchindex.createIndex({ "internal_resource" : 1, "identifier.code" : 1, "identifier.system" : 1}, { "name" : "ix_resource_identifier", "background" : true})); 3 | printjson(db.searchindex.createIndex({"internal_id":1, "internal_selflink":1},{"name":"ix_internal_id_selflink", "background":true})); 4 | printjson(db.resources.createIndex({"@REFERENCE" : 1, "@state" : 1}, { "name" : "ix_REFERENCE_state", "background" : "true" })); 5 | 6 | print("indexes for when you query by Patient.name or Patient.family a lot"); 7 | printjson(db.searchindex.createIndex({"name" : 1}, { "name" : "ix_Patient_name", partialFilterExpression : { "internal_resource" : "Patient" }, "background" : "true" })); 8 | printjson(db.searchindex.createIndex({"family" : 1}, { "name" : "ix_Patient_family", partialFilterExpression : { "internal_resource" : "Patient" }, "background" : "true" })); 9 | 10 | print("specific index for Encounter.serviceprovider"); 11 | printjson(db.searchindex.createIndex({"internal_resource" : 1, "serviceprovider" : 1}, { "name" : "ix_Encounter_serviceProvider", partialFilterExpression : { "internal_resource" : "Encounter" }, "background" : "true" })); 12 | print("specific index for references to patient, from any resources that has a 'patient' search parameter"); 13 | printjson(db.searchindex.createIndex({"internal_resource" : 1, "patient" : 1}, { "name" : "ix_patient_reference", "background" : "true" })); 14 | print("specific index for Observation.code"); 15 | printjson(db.searchindex.createIndex({"code.code" : 1, "code.system" : 1}, { "name" : "ix_Observation_code", partialFilterExpression : { "internal_resource" : "Observation" }, "background" : "true" })); 16 | -------------------------------------------------------------------------------- /scripts/RemoveDuplicateId.bat: -------------------------------------------------------------------------------- 1 | mongo spark RemoveDuplicateId.js > RemoveDuplicateId.log 2 | -------------------------------------------------------------------------------- /scripts/RemoveDuplicateId.js: -------------------------------------------------------------------------------- 1 | print("Removing duplicate Ids"); 2 | printjson(db.resources.update({}, {$unset: {Id:1}}, {multi: true})); 3 | -------------------------------------------------------------------------------- /src/Spark-Legacy/Examples/DSTU2/examples.zip: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/FirelyTeam/spark/0b93a6e52381ffef564860231a79927cbdde868a/src/Spark-Legacy/Examples/DSTU2/examples.zip -------------------------------------------------------------------------------- /src/Spark-Legacy/Examples/R4/examples.zip: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/FirelyTeam/spark/0b93a6e52381ffef564860231a79927cbdde868a/src/Spark-Legacy/Examples/R4/examples.zip -------------------------------------------------------------------------------- /src/Spark-Legacy/Examples/STU3/examples.zip: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/FirelyTeam/spark/0b93a6e52381ffef564860231a79927cbdde868a/src/Spark-Legacy/Examples/STU3/examples.zip -------------------------------------------------------------------------------- /src/Spark.Engine.Test/Core/FhirModelTests.cs: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2016-2018, Firely 3 | * 4 | * SPDX-License-Identifier: BSD-3-Clause 5 | */ 6 | 7 | using Microsoft.VisualStudio.TestTools.UnitTesting; 8 | using Spark.Engine.Core; 9 | using System.Linq; 10 | using Hl7.Fhir.Model; 11 | 12 | namespace Spark.Engine.Test.Core; 13 | 14 | [TestClass] 15 | public class FhirModelTests 16 | { 17 | private static FhirModel sut; 18 | 19 | [ClassInitialize] 20 | public static void ClassInitialize(TestContext testContext) 21 | { 22 | sut = new FhirModel(); 23 | } 24 | 25 | [TestMethod] 26 | public void TestCompartments() 27 | { 28 | var actual = sut.FindCompartmentInfo(ResourceType.Patient); 29 | 30 | Assert.IsNotNull(actual); 31 | Assert.IsTrue(actual.ReverseIncludes.Any()); 32 | } 33 | } -------------------------------------------------------------------------------- /src/Spark.Engine.Test/Extensions/RegexExtensionsTests.cs: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2016-2018, Firely 3 | * 4 | * SPDX-License-Identifier: BSD-3-Clause 5 | */ 6 | 7 | using Microsoft.VisualStudio.TestTools.UnitTesting; 8 | using System.Text.RegularExpressions; 9 | using Spark.Engine.Extensions; 10 | 11 | namespace Spark.Engine.Test.Extensions; 12 | 13 | [TestClass] 14 | public class RegexExtensionsTests 15 | { 16 | public static Regex sut = new Regex(@"[^a]*(?a)[^a]*"); 17 | 18 | [TestMethod] 19 | public void TestReplaceNamedGroupNoSuchGroup() 20 | { 21 | var input = @"bababa"; 22 | var result = sut.ReplaceGroup(input, "blabla", "c"); 23 | Assert.AreEqual(@"bababa", result); 24 | } 25 | 26 | [TestMethod] 27 | public void TestReplaceNamedGroupNoCaptures() 28 | { 29 | var input = @"bbbbbb"; 30 | var result = sut.ReplaceGroup(input, "alpha", "c"); 31 | Assert.AreEqual(@"bbbbbb", result); 32 | } 33 | 34 | [TestMethod] 35 | public void TestReplaceNamedGroupSingleCapture() 36 | { 37 | var input = @"babbbb"; 38 | var result = sut.ReplaceGroup(input, "alpha", "c"); 39 | Assert.AreEqual(@"bcbbbb", result); 40 | } 41 | 42 | [TestMethod] 43 | public void TestReplaceNamedGroupMultipleCaptures() 44 | { 45 | var input = @"bababa"; 46 | var result = sut.ReplaceGroup(input, "alpha", "c"); 47 | Assert.AreEqual(@"bcbcbc", result); 48 | } 49 | } -------------------------------------------------------------------------------- /src/Spark.Engine.Test/FhirVersion.cs: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2020-2025, Incendi 3 | * 4 | * SPDX-License-Identifier: BSD-3-Clause 5 | */ 6 | 7 | using Hl7.Fhir.Utility; 8 | using Spark.Engine.Test.Utility; 9 | 10 | namespace Spark.Engine.Test; 11 | 12 | public enum FhirVersionMoniker 13 | { 14 | [EnumLiteral("")] 15 | None = 0, 16 | [EnumLiteral(FhirVersionUtility.VERSION_R2)] 17 | R2 = 2, 18 | [EnumLiteral(FhirVersionUtility.VERSION_R3)] 19 | R3 = 3, 20 | [EnumLiteral(FhirVersionUtility.VERSION_R4)] 21 | R4 = 4, 22 | [EnumLiteral(FhirVersionUtility.VERSION_R5)] 23 | R5 = 5, 24 | } -------------------------------------------------------------------------------- /src/Spark.Engine.Test/Formatters/FormatterTestBase.cs: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2020-2025, Incendi 3 | * 4 | * SPDX-License-Identifier: BSD-3-Clause 5 | */ 6 | 7 | using Microsoft.AspNetCore.Http; 8 | using Microsoft.AspNetCore.Mvc.Formatters; 9 | using Microsoft.AspNetCore.Mvc.ModelBinding; 10 | using System; 11 | using System.IO; 12 | 13 | namespace Spark.Engine.Test.Formatters; 14 | 15 | public class FormatterTestBase 16 | { 17 | protected string GetResourceFromFileAsString(string path) 18 | { 19 | using TextReader reader = new StreamReader(path); 20 | return reader.ReadToEnd(); 21 | } 22 | 23 | protected static HttpContext GetHttpContext( 24 | byte[] contentBytes, 25 | string contentType) 26 | { 27 | return GetHttpContext(new MemoryStream(contentBytes), contentType); 28 | } 29 | 30 | protected static HttpContext GetHttpContext(Stream requestStream, string contentType) 31 | { 32 | var httpContext = new DefaultHttpContext(); 33 | httpContext.Request.Body = requestStream; 34 | httpContext.Request.ContentType = contentType; 35 | 36 | return httpContext; 37 | } 38 | 39 | protected static InputFormatterContext CreateInputFormatterContext( 40 | Type modelType, 41 | HttpContext httpContext, 42 | string modelName = null, 43 | bool treatEmptyInputAsDefaultValue = false) 44 | { 45 | var provider = new EmptyModelMetadataProvider(); 46 | var metadata = provider.GetMetadataForType(modelType); 47 | 48 | return new InputFormatterContext( 49 | httpContext, 50 | modelName: modelName ?? string.Empty, 51 | modelState: new ModelStateDictionary(), 52 | metadata: metadata, 53 | readerFactory: new TestHttpRequestStreamReaderFactory().CreateReader, 54 | treatEmptyInputAsDefaultValue: treatEmptyInputAsDefaultValue); 55 | } 56 | } -------------------------------------------------------------------------------- /src/Spark.Engine.Test/Formatters/NonSeekableReadStream.cs: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2020-2025, Incendi 3 | * 4 | * SPDX-License-Identifier: BSD-3-Clause 5 | */ 6 | 7 | using System; 8 | using System.IO; 9 | using System.Threading; 10 | using System.Threading.Tasks; 11 | 12 | namespace Spark.Engine.Test.Formatters; 13 | 14 | internal class NonSeekableReadStream : Stream 15 | { 16 | private Stream _inner; 17 | 18 | public NonSeekableReadStream(byte[] data) 19 | { 20 | _inner = new MemoryStream(data); 21 | } 22 | 23 | public override bool CanRead => _inner.CanRead; 24 | 25 | public override bool CanSeek => false; 26 | 27 | public override bool CanWrite => false; 28 | 29 | public override long Length => throw new NotSupportedException(); 30 | 31 | public override long Position { get => throw new NotSupportedException(); set => throw new NotSupportedException(); } 32 | 33 | public override void Flush() 34 | { 35 | throw new NotImplementedException(); 36 | } 37 | 38 | public override int Read(byte[] buffer, int offset, int count) 39 | { 40 | return _inner.Read(buffer, offset, count); 41 | } 42 | 43 | public override long Seek(long offset, SeekOrigin origin) 44 | { 45 | throw new NotImplementedException(); 46 | } 47 | 48 | public override void SetLength(long value) 49 | { 50 | throw new NotImplementedException(); 51 | } 52 | 53 | public override void Write(byte[] buffer, int offset, int count) 54 | { 55 | throw new NotImplementedException(); 56 | } 57 | 58 | public override Task ReadAsync(byte[] buffer, int offset, int count, CancellationToken cancellationToken) 59 | { 60 | return _inner.ReadAsync(buffer, offset, count, cancellationToken); 61 | } 62 | } -------------------------------------------------------------------------------- /src/Spark.Engine.Test/Formatters/TestHttpRequestStreamReaderFactory.cs: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2020-2025, Incendi 3 | * 4 | * SPDX-License-Identifier: BSD-3-Clause 5 | */ 6 | 7 | using Microsoft.AspNetCore.Mvc.Infrastructure; 8 | using Microsoft.AspNetCore.WebUtilities; 9 | using System.IO; 10 | using System.Text; 11 | 12 | namespace Spark.Engine.Test.Formatters; 13 | 14 | public class TestHttpRequestStreamReaderFactory : IHttpRequestStreamReaderFactory 15 | { 16 | public TextReader CreateReader(Stream stream, Encoding encoding) 17 | { 18 | return new HttpRequestStreamReader(stream, encoding); 19 | } 20 | } -------------------------------------------------------------------------------- /src/Spark.Engine.Test/Search/ModifierTests.cs: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2015-2018, Firely 3 | * 4 | * SPDX-License-Identifier: BSD-3-Clause 5 | */ 6 | 7 | using Hl7.Fhir.Model; 8 | using Microsoft.VisualStudio.TestTools.UnitTesting; 9 | using Spark.Engine.Search.Model; 10 | 11 | namespace Spark.Engine.Test.Search; 12 | 13 | [TestClass] 14 | public class ModifierTests 15 | { 16 | [TestMethod] 17 | public void TestActualModifierConstructorWithMissingModifiers() 18 | { 19 | var am = new ActualModifier("missing"); 20 | Assert.AreEqual(Modifier.MISSING, am.Modifier); 21 | Assert.AreEqual("missing", am.RawModifier); 22 | Assert.IsNull(am.ModifierType); 23 | Assert.IsTrue(am.Missing.Value); 24 | Assert.AreEqual("missing=true", am.ToString()); 25 | 26 | am = new ActualModifier("missing=false"); 27 | Assert.AreEqual(Modifier.MISSING, am.Modifier); 28 | Assert.AreEqual("missing=false", am.RawModifier); 29 | Assert.IsNull(am.ModifierType); 30 | Assert.IsFalse(am.Missing.Value); 31 | Assert.AreEqual("missing=false", am.ToString()); 32 | } 33 | 34 | [TestMethod] 35 | public void TestActualModifierConstructorWithValidTypeModifier() 36 | { 37 | var am = new ActualModifier("Patient"); 38 | Assert.AreEqual(Modifier.TYPE, am.Modifier); 39 | Assert.AreEqual("Patient", am.RawModifier); 40 | Assert.AreEqual(typeof(Patient), am.ModifierType); 41 | Assert.AreEqual("Patient", am.ToString()); 42 | } 43 | 44 | [TestMethod] 45 | public void TestActualModifierConstructorWithInvalidModifier() 46 | { 47 | var am = new ActualModifier("blabla"); 48 | Assert.AreEqual(Modifier.UNKNOWN, am.Modifier); 49 | Assert.AreEqual("blabla", am.RawModifier); 50 | Assert.IsNull(am.ModifierType); 51 | Assert.AreEqual(null, am.ToString()); 52 | } 53 | } -------------------------------------------------------------------------------- /src/Spark.Engine.Test/Search/ReverseIncludeTests.cs: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2016-2018, Firely 3 | * Copyright (c) 2021-2025, Incendi 4 | * 5 | * SPDX-License-Identifier: BSD-3-Clause 6 | */ 7 | 8 | using System; 9 | using Microsoft.VisualStudio.TestTools.UnitTesting; 10 | using Spark.Engine.Search.Model; 11 | 12 | namespace Spark.Engine.Test.Search; 13 | 14 | [TestClass] 15 | public class ReverseIncludeTests 16 | { 17 | [TestMethod] 18 | public void TestParseValid() 19 | { 20 | ReverseInclude sut = ReverseInclude.Parse("Patient:actor"); 21 | 22 | Assert.AreEqual("Patient", sut.ResourceType); 23 | Assert.AreEqual("actor", sut.SearchPath); 24 | } 25 | [TestMethod] 26 | public void TestParseValidLongerPath() 27 | { 28 | ReverseInclude sut = ReverseInclude.Parse("Provenance:target.patient"); 29 | 30 | Assert.AreEqual("Provenance", sut.ResourceType); 31 | Assert.AreEqual("target.patient", sut.SearchPath); 32 | } 33 | [TestMethod] 34 | [ExpectedException(typeof(ArgumentNullException))] 35 | public void TestParseNull() 36 | { 37 | _ = ReverseInclude.Parse(null); 38 | } 39 | 40 | [TestMethod] 41 | [ExpectedException(typeof(ArgumentException))] 42 | public void TestParseInvalid() 43 | { 44 | _ = ReverseInclude.Parse("bla;foo"); 45 | } 46 | } -------------------------------------------------------------------------------- /src/Spark.Engine.Test/Service/IndexServiceTests2.cs: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2025, Incendi 3 | * 4 | * SPDX-License-Identifier: BSD-3-Clause 5 | */ 6 | 7 | using Hl7.Fhir.Model; 8 | using Moq; 9 | using Spark.Engine.Core; 10 | using Spark.Engine.Search; 11 | using Spark.Engine.Service.FhirServiceExtensions; 12 | using Spark.Engine.Store.Interfaces; 13 | using Xunit; 14 | using Task = System.Threading.Tasks.Task; 15 | 16 | namespace Spark.Engine.Test.Service; 17 | 18 | // FIXME: Migrate the old tests in IndexServiceTests to XUnit and Consolidate those tests with these tests. 19 | public class IndexServiceTests2 20 | { 21 | [Fact] 22 | public async Task IndexResourceWithContainedResourcesLackingAnIdShouldNotCrash() 23 | { 24 | FhirModel fhirModel = new(); 25 | Mock indexStoreMock = new(); 26 | ElementIndexer elementIndexer = new(fhirModel); 27 | ResourceResolver resourceResolver = new(); 28 | IndexService indexService = new(fhirModel, indexStoreMock.Object, elementIndexer, resourceResolver); 29 | 30 | Organization organization = new() 31 | { 32 | Name = "An Organization", Identifier = { new Identifier("http://a-fake-system", "a value") } 33 | }; 34 | 35 | organization.Contained.Add(new Endpoint 36 | { 37 | Identifier = { new Identifier { System = "http://not-a-real-system", Value = "endpoint-1-identifier" } } 38 | }); 39 | organization.Contained.Add(new Endpoint 40 | { 41 | Identifier = { new Identifier { System = "http://not-a-real-system", Value = "endpoint-2-identifier" } } 42 | }); 43 | 44 | Key key = Key.Create(organization.TypeName, organization.Id); 45 | await indexService.IndexResourceAsync(organization, key); 46 | } 47 | } 48 | -------------------------------------------------------------------------------- /src/Spark.Engine.Test/Service/IndexValueTestExtensions.cs: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2020-2025, Incendi 3 | * 4 | * SPDX-License-Identifier: BSD-3-Clause 5 | */ 6 | 7 | using Spark.Engine.Model; 8 | using System.Collections.Generic; 9 | using System.Linq; 10 | 11 | namespace Spark.Engine.Test.Service; 12 | 13 | public static class IndexValueTestExtensions 14 | { 15 | public static IEnumerable NonInternalValues(this IndexValue root) 16 | { 17 | return root.IndexValues().Where(v => !v.Name.StartsWith("internal_")); 18 | } 19 | } -------------------------------------------------------------------------------- /src/Spark.Engine.Test/TextFileHelper.cs: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2019-2025, Incendi 3 | * 4 | * SPDX-License-Identifier: BSD-3-Clause 5 | */ 6 | 7 | using System.IO; 8 | 9 | namespace Spark.Engine.Test; 10 | 11 | public static class TextFileHelper 12 | { 13 | public static string ReadTextFileFromDisk(string path) 14 | { 15 | using TextReader reader = new StreamReader(path); 16 | return reader.ReadToEnd(); 17 | } 18 | } -------------------------------------------------------------------------------- /src/Spark.Engine.Test/Utility/FhirVersionUtility.cs: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2020-2025, Incendi 3 | * 4 | * SPDX-License-Identifier: BSD-3-Clause 5 | */ 6 | 7 | using Hl7.Fhir.Model; 8 | using Hl7.Fhir.Utility; 9 | using NuGet.Versioning; 10 | using System.Collections.Generic; 11 | 12 | namespace Spark.Engine.Test.Utility; 13 | 14 | internal class FhirVersionUtility 15 | { 16 | public const string VERSION_R2 = "1.0"; 17 | public const string VERSION_R3 = "3.0"; 18 | public const string VERSION_R4 = "4.0"; 19 | public const string VERSION_R5 = "4.4"; 20 | 21 | public static Dictionary KnownFhirVersions = new Dictionary 22 | { 23 | { FhirVersionMoniker.None, string.Empty }, 24 | { FhirVersionMoniker.R2, VERSION_R2 }, 25 | { FhirVersionMoniker.R3, VERSION_R3 }, 26 | { FhirVersionMoniker.R4, VERSION_R4 }, 27 | { FhirVersionMoniker.R5, VERSION_R5 }, 28 | }; 29 | 30 | public static FhirVersionMoniker GetFhirVersionMoniker() 31 | { 32 | FhirVersionMoniker? fhirVersion = default; 33 | if (SemanticVersion.TryParse(ModelInfo.Version, out SemanticVersion semanticVersion)) 34 | { 35 | fhirVersion = EnumUtility.ParseLiteral($"{semanticVersion.Major}.{semanticVersion.Minor}"); 36 | } 37 | 38 | return fhirVersion ?? FhirVersionMoniker.None; 39 | } 40 | } -------------------------------------------------------------------------------- /src/Spark.Engine/Core/ConditionalHeaderParameters.cs: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2015-2018, Firely 3 | * Copyright (c) 2021-2025, Incendi 4 | * 5 | * SPDX-License-Identifier: BSD-3-Clause 6 | */ 7 | 8 | using System; 9 | using System.Collections.Generic; 10 | using Spark.Engine.Extensions; 11 | using Microsoft.AspNetCore.Http; 12 | 13 | namespace Spark.Engine.Core; 14 | 15 | public class ConditionalHeaderParameters 16 | { 17 | public ConditionalHeaderParameters(HttpRequest request) 18 | { 19 | IfNoneMatchTags = request.IfNoneMatch(); 20 | IfModifiedSince = request.IfModifiedSince(); 21 | } 22 | 23 | public IEnumerable IfNoneMatchTags { get; set; } 24 | public DateTimeOffset? IfModifiedSince { get; set; } 25 | } 26 | -------------------------------------------------------------------------------- /src/Spark.Engine/Core/Const.cs: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2014-2018, Firely 3 | * Copyright (c) 2021-2025, Incendi 4 | * 5 | * SPDX-License-Identifier: BSD-3-Clause 6 | */ 7 | 8 | 9 | namespace Spark.Engine.Core; 10 | 11 | public static class FhirRestOp 12 | { 13 | public const string SNAPSHOT = "_snapshot"; 14 | } 15 | 16 | public static class FhirHeader 17 | { 18 | public const string CATEGORY = "Category"; 19 | } 20 | 21 | public static class FhirParameter 22 | { 23 | public const string SNAPSHOT_ID = "id"; 24 | public const string SNAPSHOT_INDEX = "start"; 25 | public const string OFFSET = "_offset"; 26 | public const string SUMMARY = "_summary"; 27 | public const string COUNT = "_count"; 28 | public const string SINCE = "_since"; 29 | public const string SORT = "_sort"; 30 | } 31 | -------------------------------------------------------------------------------- /src/Spark.Engine/Core/FhirMediaType.cs: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2014-2018, Firely 3 | * Copyright (c) 2018-2025, Incendi 4 | * 5 | * SPDX-License-Identifier: BSD-3-Clause 6 | */ 7 | 8 | using Hl7.Fhir.Rest; 9 | using System.Collections.Generic; 10 | using System.Linq; 11 | 12 | namespace Spark.Engine.Core; 13 | 14 | public static class FhirMediaType 15 | { 16 | public static string OctetStreamMimeType = "application/octet-stream"; 17 | public static string FormUrlEncodedMimeType = "application/x-www-form-urlencoded"; 18 | public static string AnyMimeType = "*/*"; 19 | 20 | public static IEnumerable JsonMimeTypes => ContentType.JSON_CONTENT_HEADERS; 21 | public static IEnumerable XmlMimeTypes => ContentType.XML_CONTENT_HEADERS; 22 | public static IEnumerable SupportedMimeTypes => JsonMimeTypes 23 | .Concat(XmlMimeTypes) 24 | .Concat(new[] { OctetStreamMimeType, FormUrlEncodedMimeType, AnyMimeType }); 25 | } 26 | -------------------------------------------------------------------------------- /src/Spark.Engine/Core/HistoryParameters.cs: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2015-2018, Firely 3 | * Copyright (c) 2019-2025, Incendi 4 | * 5 | * SPDX-License-Identifier: BSD-3-Clause 6 | */ 7 | 8 | using System; 9 | using Spark.Engine.Extensions; 10 | using Spark.Engine.Utility; 11 | using Microsoft.AspNetCore.Http; 12 | 13 | namespace Spark.Engine.Core; 14 | 15 | public class HistoryParameters 16 | { 17 | public HistoryParameters(HttpRequest request) 18 | { 19 | Count = FhirParameterParser.ParseIntParameter(request.GetParameter(FhirParameter.COUNT)); 20 | Since = FhirParameterParser.ParseDateParameter(request.GetParameter(FhirParameter.SINCE)); 21 | SortBy = request.GetParameter(FhirParameter.SORT); 22 | } 23 | 24 | public int? Count { get; set; } 25 | public DateTimeOffset? Since { get; set; } 26 | public string Format { get; set; } 27 | public string SortBy { get; set; } 28 | } 29 | -------------------------------------------------------------------------------- /src/Spark.Engine/Core/HttpHeaderName.cs: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2021-2025, Incendi 3 | * 4 | * SPDX-License-Identifier: BSD-3-Clause 5 | */ 6 | 7 | namespace Spark.Engine.Core; 8 | 9 | internal static class HttpHeaderName 10 | { 11 | public const string ACCEPT = "Accept"; 12 | public const string CONTENT_DISPOSITION = "Content-Disposition"; 13 | public const string CONTENT_LOCATION = "Content-Location"; 14 | public const string CONTENT_TYPE = "Content-Type"; 15 | public const string ETAG = "ETag"; 16 | public const string LOCATION = "Location"; 17 | public const string LAST_MODIFIED = "Last-Modified"; 18 | 19 | public const string X_CONTENT_TYPE = "X-Content-Type"; 20 | } 21 | -------------------------------------------------------------------------------- /src/Spark.Engine/Core/IIndexService.cs: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2019-2025, Incendi 3 | * 4 | * SPDX-License-Identifier: BSD-3-Clause 5 | */ 6 | 7 | using System.Threading.Tasks; 8 | using Hl7.Fhir.Model; 9 | using Spark.Engine.Model; 10 | using Task = System.Threading.Tasks.Task; 11 | 12 | namespace Spark.Engine.Core; 13 | 14 | public interface IIndexService 15 | { 16 | Task ProcessAsync(Entry entry); 17 | Task IndexResourceAsync(Resource resource, IKey key); 18 | } 19 | -------------------------------------------------------------------------------- /src/Spark.Engine/Core/ILocalhost.cs: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2014-2018, Firely 3 | * Copyright (c) 2021-2025, Incendi 4 | * 5 | * SPDX-License-Identifier: BSD-3-Clause 6 | */ 7 | 8 | using System; 9 | 10 | namespace Spark.Engine.Core; 11 | 12 | public interface ILocalhost 13 | { 14 | Uri DefaultBase { get; } 15 | Uri Absolute(Uri uri); 16 | bool IsBaseOf(Uri uri); 17 | Uri GetBaseOf(Uri uri); 18 | } 19 | -------------------------------------------------------------------------------- /src/Spark.Engine/Core/KeyKind.cs: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2015-2018, Firely 3 | * 4 | * SPDX-License-Identifier: BSD-3-Clause 5 | */ 6 | 7 | namespace Spark.Engine.Core; 8 | 9 | /// 10 | /// Any will be triaged by an as one of these. 11 | /// 12 | public enum KeyKind 13 | { 14 | /// 15 | /// absolute url, where base is not localhost 16 | /// 17 | Foreign, 18 | 19 | /// 20 | /// temporary id, URN, but not a URL. 21 | /// 22 | Temporary, 23 | 24 | /// 25 | /// absolute url, but base is (any of the) localhost(s) 26 | /// 27 | Local, 28 | 29 | /// 30 | /// Relative url, for internal references 31 | /// 32 | Internal 33 | } 34 | -------------------------------------------------------------------------------- /src/Spark.Engine/Core/Localhost.cs: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2014-2018, Firely 3 | * Copyright (c) 2021-2025, Incendi 4 | * 5 | * SPDX-License-Identifier: BSD-3-Clause 6 | */ 7 | 8 | using System; 9 | 10 | namespace Spark.Engine.Core; 11 | 12 | public class Localhost : ILocalhost 13 | { 14 | public Uri DefaultBase { get; set; } 15 | 16 | public Localhost(Uri baseuri) 17 | { 18 | DefaultBase = baseuri; 19 | } 20 | 21 | public Uri Absolute(Uri uri) 22 | { 23 | if (uri.IsAbsoluteUri) 24 | { 25 | return uri; 26 | } 27 | else 28 | { 29 | string _base = DefaultBase.ToString().TrimEnd('/') + "/"; 30 | return new Uri(_base + uri); 31 | } 32 | } 33 | 34 | public bool IsBaseOf(Uri uri) 35 | { 36 | if (uri.IsAbsoluteUri) 37 | { 38 | bool isbase = DefaultBase.Bugfixed_IsBaseOf(uri); 39 | return isbase; 40 | } 41 | else 42 | { 43 | return false; 44 | } 45 | 46 | } 47 | 48 | public Uri GetBaseOf(Uri uri) 49 | { 50 | return (IsBaseOf(uri)) ? DefaultBase : null; 51 | } 52 | } 53 | -------------------------------------------------------------------------------- /src/Spark.Engine/Core/Namespaces.cs: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2014-2018, Firely 3 | * Copyright (c) 2021-2025, Incendi 4 | * 5 | * SPDX-License-Identifier: BSD-3-Clause 6 | */ 7 | 8 | using System.Xml.Linq; 9 | 10 | namespace Spark.Engine.Core; 11 | 12 | public static class Namespaces 13 | { 14 | public static XNamespace XHtml = XNamespace.Get("http://www.w3.org/1999/xhtml"); 15 | } 16 | -------------------------------------------------------------------------------- /src/Spark.Engine/Core/RequiredAttributeException.cs: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2021-2025, Incendi 3 | * 4 | * SPDX-License-Identifier: BSD-3-Clause 5 | */ 6 | 7 | using System; 8 | using System.Runtime.Serialization; 9 | 10 | namespace Spark.Engine.Core; 11 | 12 | [Serializable] 13 | internal class RequiredAttributeException : Exception 14 | { 15 | public RequiredAttributeException() 16 | { 17 | } 18 | 19 | public RequiredAttributeException(string message) : base(message) 20 | { 21 | } 22 | 23 | public RequiredAttributeException(string message, Exception innerException) : base(message, innerException) 24 | { 25 | } 26 | } 27 | -------------------------------------------------------------------------------- /src/Spark.Engine/Core/SearchResults.cs: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2014-2018, Firely 3 | * Copyright (c) 2021-2025, Incendi 4 | * 5 | * SPDX-License-Identifier: BSD-3-Clause 6 | */ 7 | 8 | using Hl7.Fhir.Model; 9 | using Spark.Engine.Search.Types; 10 | using System.Collections.Generic; 11 | using System.Linq; 12 | 13 | namespace Spark.Engine.Core; 14 | 15 | public class SearchResults : List 16 | { 17 | public List UsedCriteria { get; set; } 18 | public int MatchCount { get; set; } 19 | 20 | private readonly OperationOutcome _outcome; 21 | public OperationOutcome Outcome { 22 | get 23 | { 24 | return _outcome.Issue.Any() ? _outcome : null; 25 | } 26 | } 27 | 28 | // todo: I think OperationOutcome logic should be on a higher level or at least not SearchResults specific -mh 29 | public SearchResults() 30 | { 31 | UsedCriteria = new List(); 32 | MatchCount = 0; 33 | _outcome = new OperationOutcome 34 | { 35 | Issue = new List() 36 | }; 37 | } 38 | 39 | public void AddIssue(string errorMessage, OperationOutcome.IssueSeverity severity = OperationOutcome.IssueSeverity.Error) 40 | { 41 | var newIssue = new OperationOutcome.IssueComponent() { Diagnostics = errorMessage, Severity = severity }; 42 | _outcome.Issue.Add(newIssue); 43 | } 44 | 45 | public bool HasErrors 46 | { 47 | get 48 | { 49 | return Outcome != null && Outcome.Issue.Any(i => i.Severity <= OperationOutcome.IssueSeverity.Error); 50 | } 51 | } 52 | 53 | public bool HasIssues 54 | { 55 | get 56 | { 57 | return Outcome != null && Outcome.Issue.Any(); 58 | } 59 | } 60 | 61 | public string UsedParameters 62 | { 63 | get 64 | { 65 | string[] used = UsedCriteria.Select(c => c.ToString()).ToArray(); 66 | return string.Join("&", used); 67 | } 68 | } 69 | } 70 | -------------------------------------------------------------------------------- /src/Spark.Engine/Core/SparkException.cs: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2014-2018, Firely 3 | * 4 | * SPDX-License-Identifier: BSD-3-Clause 5 | */ 6 | 7 | using Hl7.Fhir.Model; 8 | using System; 9 | using System.Net; 10 | 11 | namespace Spark.Engine.Core; 12 | // Placed in a sub-namespace because you must be explicit about it if you want to throw this error directly 13 | 14 | // todo: Can this be replaced by a FhirOperationException ? 15 | 16 | public class SparkException : Exception 17 | { 18 | public HttpStatusCode StatusCode; 19 | public OperationOutcome Outcome { get; set; } 20 | 21 | public SparkException(HttpStatusCode statuscode, string message = null) : base(message) 22 | { 23 | this.StatusCode = statuscode; 24 | } 25 | 26 | public SparkException(HttpStatusCode statuscode, string message, params object[] values) 27 | : base(string.Format(message, values)) 28 | { 29 | this.StatusCode = statuscode; 30 | } 31 | 32 | public SparkException(string message) : base(message) 33 | { 34 | this.StatusCode = HttpStatusCode.BadRequest; 35 | } 36 | 37 | public SparkException(HttpStatusCode statuscode, string message, Exception inner) : base(message, inner) 38 | { 39 | this.StatusCode = statuscode; 40 | } 41 | 42 | public SparkException(HttpStatusCode statuscode, OperationOutcome outcome, string message = null) 43 | : this(statuscode, message) 44 | { 45 | this.Outcome = outcome; 46 | } 47 | } 48 | -------------------------------------------------------------------------------- /src/Spark.Engine/ExportSettings.cs: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2021-2025, Incendi 3 | * 4 | * SPDX-License-Identifier: BSD-3-Clause 5 | */ 6 | 7 | namespace Spark.Engine; 8 | 9 | public class ExportSettings 10 | { 11 | /// 12 | /// Whether to externalize FHIR URIs, for example, "Patient" -> 13 | /// "https://your.fhir.url/fhir/Patient" (false by default). 14 | /// 15 | public bool ExternalizeFhirUri { get; set; } 16 | } 17 | -------------------------------------------------------------------------------- /src/Spark.Engine/Extensions/CodingExtensions.cs: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2023-2025, Incendi 3 | * 4 | * SPDX-License-Identifier: BSD-3-Clause 5 | */ 6 | 7 | using Hl7.Fhir.Model; 8 | using System.Collections.Generic; 9 | using System.Linq; 10 | 11 | namespace Spark.Engine.Extensions; 12 | 13 | public static class CodingExtensions 14 | { 15 | /// 16 | /// Compares this instance against other, returns true if the two have identical System and 17 | /// Code, otherwise false. 18 | /// 19 | /// This instance. 20 | /// The instance to compare this against. 21 | /// 22 | public static bool AreEqual(this Coding coding, Coding other) 23 | { 24 | return coding.System == other.System && coding.Code == other.Code; 25 | } 26 | 27 | /// 28 | /// Compares this list of Coding instances against the instance other, 29 | /// returns true if at least one instance have identical System and Code to other, otherwise false. 30 | /// 31 | /// This list of instances. 32 | /// The instance to compare this list against. 33 | /// 34 | public static bool HasEqualCoding(this IEnumerable sources, Coding other) 35 | { 36 | return sources.Any(source => AreEqual(source, other)); 37 | } 38 | } 39 | -------------------------------------------------------------------------------- /src/Spark.Engine/Extensions/DateTimeOffsetExtensions.cs: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2021-2025, Incendi 3 | * 4 | * SPDX-License-Identifier: BSD-3-Clause 5 | */ 6 | 7 | using System; 8 | 9 | namespace Spark.Engine.Extensions; 10 | 11 | public static class DateTimeOffsetExtensions 12 | { 13 | public static DateTimeOffset TruncateToMillis(this DateTimeOffset dateTime) 14 | { 15 | return dateTime.AddTicks(-(dateTime.Ticks % TimeSpan.TicksPerMillisecond)); 16 | } 17 | } 18 | -------------------------------------------------------------------------------- /src/Spark.Engine/Extensions/ETag.cs: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2015-2018, Firely 3 | * 4 | * SPDX-License-Identifier: BSD-3-Clause 5 | */ 6 | 7 | using System.Net.Http.Headers; 8 | 9 | namespace Spark.Engine.Extensions; 10 | 11 | public static class ETag 12 | { 13 | public static EntityTagHeaderValue Create(string value) 14 | { 15 | string tag = "\"" + value + "\""; 16 | return new EntityTagHeaderValue(tag, true); 17 | } 18 | } 19 | -------------------------------------------------------------------------------- /src/Spark.Engine/Extensions/FhirDateTimeExtensions.cs: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2015-2018, Firely 3 | * Copyright (c) 2020-2025, Incendi 4 | * 5 | * SPDX-License-Identifier: BSD-3-Clause 6 | */ 7 | 8 | using Hl7.Fhir.Model; 9 | using System; 10 | 11 | namespace Spark.Engine.Extensions; 12 | 13 | public static class FhirDateTimeExtensions 14 | { 15 | public enum FhirDateTimePrecision 16 | { 17 | Year = 4, //1994 18 | Month = 7, //1994-10 19 | Day = 10, //1994-10-21 20 | Minute = 15, //1994-10-21T13:45 21 | Second = 18 //1994-10-21T13:45:21 22 | } 23 | 24 | public static FhirDateTimePrecision Precision(this FhirDateTime fdt) 25 | { 26 | return (FhirDateTimePrecision)Math.Min(fdt.Value.Length, 18); //Ignore timezone for stating precision. 27 | } 28 | 29 | public static DateTimeOffset LowerBound(this FhirDateTime fdt) 30 | { 31 | return fdt.ToDateTimeOffset(TimeSpan.Zero); 32 | } 33 | 34 | public static DateTimeOffset UpperBound(this FhirDateTime fdt) 35 | { 36 | var start = fdt.LowerBound(); 37 | var end = (fdt.Precision()) switch 38 | { 39 | FhirDateTimePrecision.Year => start.AddYears(1), 40 | FhirDateTimePrecision.Month => start.AddMonths(1), 41 | FhirDateTimePrecision.Day => start.AddDays(1), 42 | FhirDateTimePrecision.Minute => start.AddMinutes(1), 43 | FhirDateTimePrecision.Second => start.AddSeconds(1), 44 | _ => start 45 | }; 46 | return end; 47 | } 48 | } 49 | -------------------------------------------------------------------------------- /src/Spark.Engine/Extensions/GeneratorKeyExtensions.cs: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2015-2018, Firely 3 | * Copyright (c) 2017-2025, Incendi 4 | * 5 | * SPDX-License-Identifier: BSD-3-Clause 6 | */ 7 | 8 | using Hl7.Fhir.Model; 9 | using System.Collections.Generic; 10 | using Spark.Engine.Core; 11 | using Spark.Engine.Interfaces; 12 | 13 | namespace Spark.Engine.Extensions; 14 | 15 | public static class GeneratorKeyExtensions 16 | { 17 | public static Key NextHistoryKey(this IIdentityGenerator generator, IKey key) 18 | { 19 | Key historykey = key.Clone(); 20 | historykey.VersionId = generator.NextVersionId(key.TypeName, key.ResourceId); 21 | return historykey; 22 | } 23 | 24 | public static Key NextKey(this IIdentityGenerator generator, Resource resource) 25 | { 26 | string resourceid = generator.NextResourceId(resource); 27 | Key key = resource.ExtractKey(); 28 | string versionid = generator.NextVersionId(key.TypeName, resourceid); 29 | return Key.Create(key.TypeName, resourceid, versionid); 30 | } 31 | 32 | public static void AddHistoryKeys(this IIdentityGenerator generator, List entries) 33 | { 34 | // PERF: this needs a performance improvement. 35 | foreach (Entry entry in entries) 36 | { 37 | entry.Key = generator.NextHistoryKey(entry.Key); 38 | } 39 | } 40 | } 41 | -------------------------------------------------------------------------------- /src/Spark.Engine/Extensions/IApplicationBuilderExtensions.cs: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2019-2025, Incendi 3 | * 4 | * SPDX-License-Identifier: BSD-3-Clause 5 | */ 6 | 7 | using Microsoft.AspNetCore.Builder; 8 | using Microsoft.AspNetCore.Routing; 9 | using Spark.Engine.ExceptionHandling; 10 | using Spark.Engine.Handlers; 11 | using System; 12 | using Spark.Engine.Maintenance; 13 | 14 | namespace Spark.Engine.Extensions; 15 | 16 | public static class IApplicationBuilderExtensions 17 | { 18 | public static void UseFhir(this IApplicationBuilder app, Action configureRoutes = null) 19 | { 20 | app.UseMiddleware(); 21 | app.UseMiddleware(); 22 | app.UseMiddleware(); 23 | 24 | if (configureRoutes == null) 25 | app.UseMvc(); 26 | else 27 | app.UseMvc(configureRoutes); 28 | } 29 | } 30 | -------------------------------------------------------------------------------- /src/Spark.Engine/Extensions/MetaExtensions.cs: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2023-2025, Incendi 3 | * 4 | * SPDX-License-Identifier: BSD-3-Clause 5 | */ 6 | 7 | using Hl7.Fhir.Model; 8 | using System.Linq; 9 | 10 | namespace Spark.Engine.Extensions; 11 | 12 | public static class MetaExtensions 13 | { 14 | /// 15 | /// Merges the data in source into target. 16 | /// 17 | /// The target of the merge operation. 18 | /// The source of the merge operation. 19 | public static void Merge(this Meta target, Meta source) 20 | { 21 | var targetProfiles = target.Profile.ToList(); 22 | foreach (var profile in source.Profile) 23 | { 24 | if (profile == null) 25 | continue; 26 | if (!targetProfiles.Any(p => profile.Equals(p))) 27 | targetProfiles.Add(profile); 28 | } 29 | source.Profile = targetProfiles; 30 | 31 | foreach (var securityCoding in source.Security) 32 | { 33 | if (target.Security.HasEqualCoding(securityCoding)) 34 | continue; 35 | target.Security.Add(securityCoding); 36 | } 37 | 38 | foreach (var tag in source.Tag) 39 | { 40 | if (target.Tag.HasEqualCoding(tag)) 41 | continue; 42 | target.Tag.Add(tag); 43 | } 44 | } 45 | } 46 | -------------------------------------------------------------------------------- /src/Spark.Engine/Extensions/ParametersExtensions.cs: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2023-2025, Incendi 3 | * 4 | * SPDX-License-Identifier: BSD-3-Clause 5 | */ 6 | 7 | using Hl7.Fhir.Model; 8 | using System.Collections.Generic; 9 | using System.Linq; 10 | 11 | namespace Spark.Engine.Extensions; 12 | 13 | public static class ParametersExtensions 14 | { 15 | public static IEnumerable ExtractMetaResources(this Parameters parameters) 16 | { 17 | foreach(var parameter in parameters.Parameter.Where(p => p.Name == "meta")) 18 | { 19 | if (parameter.Value is Meta meta) 20 | yield return meta; 21 | } 22 | } 23 | } 24 | -------------------------------------------------------------------------------- /src/Spark.Engine/Extensions/RegexExtensions.cs: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2016-2018, Firely 3 | * Copyright (c) 2021-2025, Incendi 4 | * 5 | * SPDX-License-Identifier: BSD-3-Clause 6 | */ 7 | 8 | using System.Collections.Generic; 9 | using System.Text.RegularExpressions; 10 | 11 | namespace Spark.Engine.Extensions; 12 | 13 | public static class RegexExtensions 14 | { 15 | public static string ReplaceGroup(this Regex regex, string input, string groupName, string replacement) 16 | { 17 | return ReplaceGroups(regex, input, new Dictionary { { groupName, replacement } }); 18 | } 19 | 20 | public static string ReplaceGroups(this Regex regex, string input, Dictionary replacements) 21 | { 22 | return regex.Replace(input, m => 23 | { 24 | return ReplaceNamedGroups(m, replacements); 25 | }); 26 | } 27 | 28 | private static string ReplaceNamedGroups(Match m, Dictionary replacements) 29 | { 30 | string result = m.Value; 31 | foreach (var replacement in replacements) 32 | { 33 | var groupName = replacement.Key; 34 | var replaceWith = replacement.Value; 35 | foreach (Capture cap in m.Groups[groupName]?.Captures) 36 | { 37 | result = result.Remove(cap.Index - m.Index, cap.Length); 38 | result = result.Insert(cap.Index - m.Index, replaceWith); 39 | } 40 | } 41 | return result; 42 | } 43 | } 44 | -------------------------------------------------------------------------------- /src/Spark.Engine/Extensions/SearchParamDefinitionExtensions.cs: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2019-2025, Incendi 3 | * 4 | * SPDX-License-Identifier: BSD-3-Clause 5 | */ 6 | 7 | using Hl7.Fhir.Model; 8 | using System.Collections.Generic; 9 | using static Hl7.Fhir.Model.ModelInfo; 10 | 11 | namespace Spark.Engine.Extensions; 12 | 13 | internal static class SearchParamDefinitionExtensions 14 | { 15 | /// 16 | /// Returns true if the search parameter is one of the following types: Number, Date or Quantity. 17 | /// See https://www.hl7.org/fhir/stu3/search.html#prefix for more information. 18 | /// 19 | /// 20 | /// A List of , since this is an extension this is usually a reference 21 | /// to ModelInfo.SearcParameters. 22 | /// 23 | /// A representing the name of the search parameter. 24 | /// Returns true if the search parameter is of type Number, Date or Quanity, otherwise false. 25 | internal static bool CanHaveOperatorPrefix(this List searchParamDefinitions, string resourceType, string name) 26 | { 27 | SearchParamDefinition searchParamDefinition = searchParamDefinitions.Find(p => (p.Resource == resourceType || p.Resource == nameof(Resource)) && p.Name == name); 28 | return searchParamDefinition != null && (searchParamDefinition.Type == SearchParamType.Number 29 | || searchParamDefinition.Type == SearchParamType.Date 30 | || searchParamDefinition.Type == SearchParamType.Quantity); 31 | } 32 | } 33 | -------------------------------------------------------------------------------- /src/Spark.Engine/Extensions/SparkOptions.cs: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2021-2025, Incendi 3 | * 4 | * SPDX-License-Identifier: BSD-3-Clause 5 | */ 6 | 7 | using Microsoft.AspNetCore.Mvc; 8 | using System; 9 | 10 | namespace Spark.Engine.Extensions; 11 | 12 | public class SparkOptions 13 | { 14 | public SparkSettings Settings { get; set; } = new(); 15 | public StoreSettings StoreSettings { get; set; } = new(); 16 | public FhirServiceExtensionDictionary FhirExtensions { get; } = new(); 17 | 18 | public FhirServiceDictionary FhirServices { get; } = new(); 19 | 20 | public FhirStoreDictionary FhirStores { get; } = new(); 21 | 22 | public Action MvcOption { get; set; } 23 | } 24 | -------------------------------------------------------------------------------- /src/Spark.Engine/Extensions/StringExtensions.cs: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2016-2018, Firely 3 | * Copyright (c) 2021-2025, Incendi 4 | * 5 | * SPDX-License-Identifier: BSD-3-Clause 6 | */ 7 | 8 | namespace Spark.Engine.Extensions; 9 | 10 | public static class StringExtensions 11 | { 12 | public static string FirstUpper(this string input) 13 | { 14 | if (string.IsNullOrWhiteSpace(input)) 15 | return input; 16 | 17 | return string.Concat(input.Substring(0, 1).ToUpperInvariant(), input.Remove(0, 1)); 18 | } 19 | } 20 | -------------------------------------------------------------------------------- /src/Spark.Engine/Extensions/UriParamExtensions.cs: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2014-2018, Firely 3 | * Copyright (c) 2021-2025, Incendi 4 | * 5 | * SPDX-License-Identifier: BSD-3-Clause 6 | */ 7 | 8 | using System; 9 | using System.Collections.Generic; 10 | using System.Linq; 11 | 12 | namespace Spark.Engine.Extensions; 13 | 14 | public static class UriParamExtensions 15 | { 16 | //TODO: horrible!! Should refactor 17 | public static Uri AddParam(this Uri uri, string name, params string[] values) 18 | { 19 | Uri fakeBase = new Uri("http://example.com"); 20 | UriBuilder builder; 21 | if (uri.IsAbsoluteUri) 22 | { 23 | builder = new UriBuilder(uri); 24 | } 25 | else 26 | { 27 | builder = new UriBuilder(fakeBase) 28 | { 29 | Path = uri.ToString() 30 | }; 31 | } 32 | 33 | ICollection> query = UriUtil.SplitParams(builder.Query).ToList(); 34 | 35 | foreach (string value in values) 36 | { 37 | query.Add(new Tuple(name, value)); 38 | } 39 | 40 | builder.Query = UriUtil.JoinParams(query); 41 | 42 | if (uri.IsAbsoluteUri) 43 | { 44 | return builder.Uri; 45 | } 46 | else 47 | { 48 | return fakeBase.MakeRelativeUri(builder.Uri); 49 | } 50 | } 51 | } 52 | -------------------------------------------------------------------------------- /src/Spark.Engine/Extensions/UriUtil.cs: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2014-2018, Firely 3 | * Copyright (c) 2021-2025, Incendi 4 | * 5 | * SPDX-License-Identifier: BSD-3-Clause 6 | */ 7 | 8 | using System; 9 | using System.Collections.Generic; 10 | using System.Linq; 11 | 12 | namespace Spark.Engine.Extensions; 13 | 14 | public static class UriUtil 15 | { 16 | public static Tuple SplitParam(string s) 17 | { 18 | string[] a = s.Split(new char[] { '=' }, 2); 19 | return new Tuple(a.First(), a.Skip(1).FirstOrDefault()); 20 | } 21 | 22 | public static ICollection> SplitParams(string query) 23 | { 24 | return query.TrimStart('?').Split(new[] { '&' }, 2, StringSplitOptions.RemoveEmptyEntries).Select(SplitParam).ToList(); 25 | } 26 | 27 | public static ICollection> SplitParams(this Uri uri) 28 | { 29 | return SplitParams(uri.Query); 30 | } 31 | 32 | public static string JoinParams(IEnumerable> query) 33 | { 34 | return string.Join("&", query.Select(t => t.Item1 + "=" + t.Item2)); 35 | } 36 | 37 | public static string NormalizeUri(string uriString) 38 | { 39 | if(!string.IsNullOrWhiteSpace(uriString) && Uri.TryCreate(uriString, UriKind.RelativeOrAbsolute, out var uri)) 40 | { 41 | return uri.ToString(); 42 | } 43 | return uriString; 44 | } 45 | } 46 | -------------------------------------------------------------------------------- /src/Spark.Engine/Extensions/XDocumentExtensions.cs: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2015-2018, Firely 3 | * 4 | * SPDX-License-Identifier: BSD-3-Clause 5 | */ 6 | 7 | using Spark.Engine.Core; 8 | using System; 9 | using System.Xml.Linq; 10 | 11 | namespace Spark.Engine.Extensions; 12 | 13 | public static class XDocumentExtensions 14 | { 15 | public static void VisitAttributes(this XDocument document, string tagname, string attrName, Action action) 16 | { 17 | var nodes = document.Descendants(Namespaces.XHtml + tagname).Attributes(attrName); 18 | foreach (var node in nodes) 19 | { 20 | action(node); 21 | } 22 | } 23 | } 24 | -------------------------------------------------------------------------------- /src/Spark.Engine/FhirResponseFactory/ConditionalHeaderFhirResponseInterceptor.cs: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2015-2018, Firely 3 | * 4 | * SPDX-License-Identifier: BSD-3-Clause 5 | */ 6 | 7 | using System.Linq; 8 | using System.Net; 9 | using Spark.Engine.Core; 10 | using Spark.Engine.Extensions; 11 | using Spark.Engine.Interfaces; 12 | 13 | namespace Spark.Engine.FhirResponseFactory; 14 | 15 | public class ConditionalHeaderFhirResponseInterceptor : IFhirResponseInterceptor 16 | { 17 | public bool CanHandle(object input) 18 | { 19 | return input is ConditionalHeaderParameters; 20 | } 21 | 22 | private ConditionalHeaderParameters ConvertInput(object input) 23 | { 24 | return input as ConditionalHeaderParameters; 25 | } 26 | 27 | public FhirResponse GetFhirResponse(Entry entry, object input) 28 | { 29 | ConditionalHeaderParameters parameters = ConvertInput(input); 30 | if (parameters == null) return null; 31 | 32 | bool? matchTags = parameters.IfNoneMatchTags.Any() ? parameters.IfNoneMatchTags.Any(t => t == ETag.Create(entry.Key.VersionId).Tag) : (bool?)null; 33 | bool? matchModifiedDate = parameters.IfModifiedSince.HasValue 34 | ? parameters.IfModifiedSince.Value < entry.Resource.Meta.LastUpdated 35 | : (bool?) null; 36 | 37 | if (!matchTags.HasValue && !matchModifiedDate.HasValue) 38 | { 39 | return null; 40 | } 41 | 42 | if ((matchTags ?? true) && (matchModifiedDate ?? true)) 43 | { 44 | return Respond.WithCode(HttpStatusCode.NotModified); 45 | } 46 | 47 | return null; 48 | } 49 | } 50 | -------------------------------------------------------------------------------- /src/Spark.Engine/FhirResponseFactory/FhirResponseInterceptorRunner.cs: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2015-2018, Firely 3 | * Copyright (c) 2021-2025, Incendi 4 | * 5 | * SPDX-License-Identifier: BSD-3-Clause 6 | */ 7 | 8 | using System.Collections.Generic; 9 | using System.Linq; 10 | using Spark.Engine.Core; 11 | using Spark.Engine.Interfaces; 12 | 13 | namespace Spark.Engine.FhirResponseFactory; 14 | 15 | public class FhirResponseInterceptorRunner : IFhirResponseInterceptorRunner 16 | { 17 | private readonly IList _interceptors; 18 | 19 | public FhirResponseInterceptorRunner(IFhirResponseInterceptor[] interceptors) 20 | { 21 | _interceptors = new List(interceptors); 22 | } 23 | 24 | public void AddInterceptor(IFhirResponseInterceptor interceptor) 25 | { 26 | _interceptors.Add(interceptor); 27 | } 28 | 29 | public void ClearInterceptors() 30 | { 31 | _interceptors.Clear(); 32 | } 33 | 34 | public FhirResponse RunInterceptors(Entry entry, IEnumerable parameters) 35 | { 36 | FhirResponse response = null; 37 | parameters.FirstOrDefault(p => (response = RunInterceptors(entry, p)) != null); 38 | return response; 39 | } 40 | 41 | private FhirResponse RunInterceptors(Entry entry, object input) 42 | { 43 | FhirResponse response = null; 44 | GetResponseInterceptors(input).FirstOrDefault(f => (response = f.GetFhirResponse(entry, input)) != null); 45 | return response; 46 | } 47 | private IEnumerable GetResponseInterceptors(object input) 48 | { 49 | return _interceptors.Where(i => i.CanHandle(input)); 50 | } 51 | } 52 | -------------------------------------------------------------------------------- /src/Spark.Engine/FhirResponseFactory/IFhirResponseFactory.cs: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2016-2018, Firely 3 | * Copyright (c) 2019-2025, Incendi 4 | * 5 | * SPDX-License-Identifier: BSD-3-Clause 6 | */ 7 | 8 | using Hl7.Fhir.Model; 9 | using Spark.Engine.Core; 10 | using System; 11 | using System.Collections.Generic; 12 | 13 | namespace Spark.Engine.FhirResponseFactory; 14 | 15 | public interface IFhirResponseFactory 16 | { 17 | FhirResponse GetFhirResponse(Entry entry, IKey key = null, IEnumerable parameters = null) 18 | where T : Resource; 19 | FhirResponse GetFhirResponse(Entry entry, IKey key = null, IEnumerable parameters = null); 20 | FhirResponse GetFhirResponse(Entry entry, IKey key = null, params object[] parameters) 21 | where T : Resource; 22 | FhirResponse GetFhirResponse(Entry entry, IKey key = null, params object[] parameters); 23 | FhirResponse GetMetadataResponse(Entry entry, IKey key = null); 24 | FhirResponse GetFhirResponse(IList interactions, Bundle.BundleType bundleType); 25 | FhirResponse GetFhirResponse(Bundle bundle); 26 | FhirResponse GetFhirResponse(IEnumerable> responses, Bundle.BundleType bundleType); 27 | } 28 | -------------------------------------------------------------------------------- /src/Spark.Engine/Filters/GzipContent.cs: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2014-2018, Firely 3 | * Copyright (c) 2021-2025, Incendi 4 | * 5 | * SPDX-License-Identifier: BSD-3-Clause 6 | */ 7 | 8 | using System.IO; 9 | using System.IO.Compression; 10 | using System.Net; 11 | using System.Net.Http; 12 | using System.Net.Http.Headers; 13 | using System.Threading.Tasks; 14 | 15 | namespace Spark.Engine.Filters; 16 | 17 | /// 18 | /// GZip encoded . 19 | /// 20 | /// 21 | /// 22 | public class GZipContent : HttpContent 23 | { 24 | private readonly HttpContent _content; 25 | 26 | /// 27 | /// Creates a new instance of the from the 28 | /// specified . 29 | /// 30 | /// 31 | /// The unencoded . 32 | /// 33 | /// 34 | /// All from the are copied 35 | /// and the header is added. 36 | /// 37 | public GZipContent(HttpContent content) 38 | { 39 | _content = content; 40 | foreach (var header in content.Headers) 41 | { 42 | Headers.TryAddWithoutValidation(header.Key, header.Value); 43 | } 44 | Headers.ContentEncoding.Add("gzip"); 45 | } 46 | 47 | /// 48 | protected override bool TryComputeLength(out long length) 49 | { 50 | length = -1; 51 | return false; 52 | } 53 | 54 | /// 55 | protected async override Task SerializeToStreamAsync(Stream stream, TransportContext context) 56 | { 57 | using (_content) 58 | using (var compressedStream = new GZipStream(stream, CompressionMode.Compress, leaveOpen: true)) 59 | { 60 | await _content.CopyToAsync(compressedStream); 61 | } 62 | } 63 | } 64 | -------------------------------------------------------------------------------- /src/Spark.Engine/Filters/UnsupportedMediaTypeFilter.cs: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2021-2025, Incendi 3 | * 4 | * SPDX-License-Identifier: BSD-3-Clause 5 | */ 6 | 7 | using Microsoft.AspNetCore.Mvc.Filters; 8 | using Spark.Engine.Core; 9 | using Spark.Engine.Extensions; 10 | using System.Linq; 11 | 12 | namespace Spark.Engine.Filters; 13 | 14 | internal class UnsupportedMediaTypeFilter : IActionFilter 15 | { 16 | /// 17 | public void OnActionExecuted(ActionExecutedContext context) 18 | { 19 | 20 | } 21 | 22 | /// 23 | public void OnActionExecuting(ActionExecutingContext context) 24 | { 25 | var request = context.HttpContext.Request; 26 | 27 | if (request.IsRawBinaryRequest()) return; 28 | 29 | if (request.Headers.ContainsKey("Accept")) 30 | { 31 | var acceptHeader = request.Headers["Accept"].ToString(); 32 | if (!FhirMediaType.SupportedMimeTypes.Any(mimeType => acceptHeader.Contains(mimeType))) 33 | { 34 | throw Error.NotAcceptable(); 35 | } 36 | } 37 | 38 | if (context.HttpContext.Request.ContentType != null) 39 | { 40 | if (!FhirMediaType.SupportedMimeTypes.Any(mimeType => context.HttpContext.Request.ContentType.Contains(mimeType))) 41 | { 42 | throw Error.UnsupportedMediaType(); 43 | } 44 | } 45 | } 46 | } 47 | -------------------------------------------------------------------------------- /src/Spark.Engine/Formatters/BinaryInputFormatter.cs: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2019-2025, Incendi 3 | * 4 | * SPDX-License-Identifier: BSD-3-Clause 5 | */ 6 | 7 | using Hl7.Fhir.Model; 8 | using Microsoft.AspNetCore.Mvc.Formatters; 9 | using Microsoft.Extensions.Primitives; 10 | using Spark.Engine.Core; 11 | using System; 12 | using System.IO; 13 | using System.Linq; 14 | using System.Threading.Tasks; 15 | 16 | namespace Spark.Engine.Formatters; 17 | 18 | public class BinaryInputFormatter : InputFormatter 19 | { 20 | public BinaryInputFormatter() 21 | { 22 | SupportedMediaTypes.Add(FhirMediaType.OctetStreamMimeType); 23 | } 24 | 25 | protected override bool CanReadType(Type type) 26 | { 27 | return type == typeof(Resource); 28 | } 29 | 30 | public override async Task ReadRequestBodyAsync(InputFormatterContext context) 31 | { 32 | if (!context.HttpContext.Request.Headers.TryGetValue("X-Content-Type", out StringValues contentTypeHeaderValues)) 33 | throw Error.BadRequest("Binary POST and PUT must provide a Content-Type header."); 34 | 35 | string contentType = contentTypeHeaderValues.FirstOrDefault(); 36 | MemoryStream memoryStream = new MemoryStream(); 37 | await context.HttpContext.Request.Body.CopyToAsync(memoryStream); 38 | Binary binary = new Binary 39 | { 40 | ContentType = contentType, 41 | Data = memoryStream.ToArray() 42 | }; 43 | 44 | return await InputFormatterResult.SuccessAsync(binary); 45 | } 46 | } 47 | -------------------------------------------------------------------------------- /src/Spark.Engine/Formatters/BinaryOutputFormatter.cs: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2019-2025, Incendi 3 | * 4 | * SPDX-License-Identifier: BSD-3-Clause 5 | */ 6 | 7 | using Microsoft.AspNetCore.Http; 8 | using FhirModel = Hl7.Fhir.Model; 9 | using Microsoft.AspNetCore.Mvc.Formatters; 10 | using Spark.Engine.Core; 11 | using Spark.Engine.Extensions; 12 | using System; 13 | using System.IO; 14 | using System.Threading.Tasks; 15 | 16 | namespace Spark.Engine.Formatters; 17 | 18 | public class BinaryOutputFormatter : OutputFormatter 19 | { 20 | public BinaryOutputFormatter() 21 | { 22 | SupportedMediaTypes.Add(FhirMediaType.OctetStreamMimeType); 23 | } 24 | 25 | protected override bool CanWriteType(Type type) 26 | { 27 | return typeof(FhirModel.Binary).IsAssignableFrom(type) || typeof(FhirResponse).IsAssignableFrom(type); 28 | } 29 | 30 | public override async Task WriteResponseBodyAsync(OutputFormatterWriteContext context) 31 | { 32 | if (typeof(FhirModel.Binary).IsAssignableFrom(context.ObjectType) || typeof(FhirResponse).IsAssignableFrom(context.ObjectType)) 33 | { 34 | FhirModel.Binary binary = null; 35 | if (typeof(FhirResponse).IsAssignableFrom(context.ObjectType)) 36 | { 37 | FhirResponse response = (FhirResponse)context.Object; 38 | 39 | context.HttpContext.Response.AcquireHeaders(response); 40 | context.HttpContext.Response.StatusCode = (int)response.StatusCode; 41 | 42 | binary = response.Resource as FhirModel.Binary; 43 | } 44 | if (binary == null) return; 45 | 46 | context.HttpContext.Response.Headers.Append(HttpHeaderName.CONTENT_DISPOSITION, "attachment"); 47 | context.HttpContext.Response.ContentType = binary.ContentType; 48 | 49 | Stream stream = new MemoryStream(binary.Data); 50 | await stream.CopyToAsync(context.HttpContext.Response.Body); 51 | } 52 | } 53 | } 54 | -------------------------------------------------------------------------------- /src/Spark.Engine/Formatters/FhirOutputFormatterSelector.cs: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2019-2025, Incendi 3 | * 4 | * SPDX-License-Identifier: BSD-3-Clause 5 | */ 6 | 7 | using Microsoft.AspNetCore.Mvc; 8 | using Microsoft.AspNetCore.Mvc.Formatters; 9 | using Microsoft.AspNetCore.Mvc.Infrastructure; 10 | using Microsoft.Extensions.Logging; 11 | using Microsoft.Extensions.Options; 12 | using Spark.Engine.Extensions; 13 | using System.Collections.Generic; 14 | using System.Linq; 15 | 16 | namespace Spark.Engine.Formatters; 17 | 18 | internal class FhirOutputFormatterSelector : DefaultOutputFormatterSelector 19 | { 20 | private IOptions _options; 21 | public FhirOutputFormatterSelector(IOptions options, ILoggerFactory loggerFactory) : base(options, loggerFactory) 22 | { 23 | _options = options; 24 | } 25 | 26 | public override IOutputFormatter SelectFormatter(OutputFormatterCanWriteContext context, IList formatters, MediaTypeCollection contentTypes) 27 | { 28 | if(context.IsRawBinaryRequest(context.ObjectType)) 29 | { 30 | IOutputFormatter formatter = formatters.Where(f => f is BinaryOutputFormatter).SingleOrDefault(); 31 | if (formatter != null) return formatter; 32 | formatter = _options.Value.OutputFormatters.Where(f => f is BinaryOutputFormatter).SingleOrDefault(); 33 | if (formatter != null) return formatter; 34 | } 35 | 36 | return base.SelectFormatter(context, formatters, contentTypes); 37 | } 38 | } 39 | -------------------------------------------------------------------------------- /src/Spark.Engine/Formatters/JsonArrayPool.cs: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2020-2025, Incendi 3 | * 4 | * SPDX-License-Identifier: BSD-3-Clause 5 | */ 6 | 7 | using Newtonsoft.Json; 8 | using System; 9 | using System.Buffers; 10 | 11 | namespace Spark.Engine.Formatters; 12 | 13 | internal class JsonArrayPool : IArrayPool 14 | { 15 | private readonly ArrayPool _inner; 16 | 17 | public JsonArrayPool(ArrayPool inner) 18 | { 19 | _inner = inner ?? throw new ArgumentNullException(nameof(inner)); 20 | } 21 | 22 | public char[] Rent(int minimumLength) 23 | { 24 | return _inner.Rent(minimumLength); 25 | } 26 | 27 | public void Return(char[] array) 28 | { 29 | if (array == null) throw new ArgumentNullException(nameof(array)); 30 | 31 | _inner.Return(array); 32 | } 33 | } 34 | -------------------------------------------------------------------------------- /src/Spark.Engine/Handlers/FormatTypeHandler.cs: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2019-2025, Incendi 3 | * 4 | * SPDX-License-Identifier: BSD-3-Clause 5 | */ 6 | 7 | using Hl7.Fhir.Rest; 8 | using Microsoft.AspNetCore.Http; 9 | using Microsoft.Extensions.Primitives; 10 | using Spark.Engine.Core; 11 | using Spark.Engine.Extensions; 12 | using System.Threading.Tasks; 13 | 14 | namespace Spark.Engine.Handlers; 15 | 16 | public class FormatTypeHandler 17 | { 18 | private readonly RequestDelegate _next; 19 | 20 | public FormatTypeHandler(RequestDelegate next) 21 | { 22 | _next = next; 23 | } 24 | 25 | public async Task InvokeAsync(HttpContext context) 26 | { 27 | string format = context.Request.GetParameter("_format"); 28 | if (!string.IsNullOrEmpty(format)) 29 | { 30 | ResourceFormat accepted = ContentType.GetResourceFormatFromFormatParam(format); 31 | if (accepted != ResourceFormat.Unknown) 32 | { 33 | if (context.Request.Headers.ContainsKey(HttpHeaderName.ACCEPT)) context.Request.Headers.Remove(HttpHeaderName.ACCEPT); 34 | if (accepted == ResourceFormat.Json) 35 | context.Request.Headers.Append(HttpHeaderName.ACCEPT, new StringValues(ContentType.JSON_CONTENT_HEADER)); 36 | else 37 | context.Request.Headers.Append(HttpHeaderName.ACCEPT, new StringValues(ContentType.XML_CONTENT_HEADER)); 38 | } 39 | } 40 | 41 | if (context.Request.IsRawBinaryPostOrPutRequest()) 42 | { 43 | if (!HttpRequestExtensions.IsContentTypeHeaderFhirMediaType(context.Request.ContentType)) 44 | { 45 | string contentType = context.Request.ContentType; 46 | context.Request.Headers.Append(HttpHeaderName.X_CONTENT_TYPE, contentType); 47 | context.Request.ContentType = FhirMediaType.OctetStreamMimeType; 48 | } 49 | } 50 | 51 | await _next(context); 52 | } 53 | } 54 | -------------------------------------------------------------------------------- /src/Spark.Engine/IndexSettings.cs: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2021-2025, Incendi 3 | * 4 | * SPDX-License-Identifier: BSD-3-Clause 5 | */ 6 | 7 | namespace Spark.Engine; 8 | 9 | public class IndexSettings 10 | { 11 | /// 12 | /// Whether to clear index before rebuilding it. Setting it to false (default) 13 | /// will may cause stale records to appear in index (for example, when some documents are not 14 | /// reindexed for some reason). 15 | /// 16 | public bool ClearIndexOnRebuild { get; set; } 17 | 18 | /// 19 | /// Number of documents to be loaded into memory for reindexing. 20 | /// It's recommended to keep it low for having the low memory footprint. 21 | /// 22 | public int ReindexBatchSize { get; set; } = 100; 23 | } 24 | -------------------------------------------------------------------------------- /src/Spark.Engine/Interfaces/IFhirIndex.cs: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2014-2018, Firely 3 | * Copyright (c) 2020-2025, Incendi 4 | * 5 | * SPDX-License-Identifier: BSD-3-Clause 6 | */ 7 | 8 | using Hl7.Fhir.Rest; 9 | using Spark.Engine.Core; 10 | using System.Collections.Generic; 11 | using System.Threading.Tasks; 12 | 13 | namespace Spark.Engine.Interfaces; 14 | 15 | public interface IFhirIndex 16 | { 17 | Task CleanAsync(); 18 | Task SearchAsync(string resource, SearchParams searchCommand); 19 | Task FindSingleAsync(string resource, SearchParams searchCommand); 20 | Task GetReverseIncludesAsync(IList keys, IList revIncludes); 21 | } 22 | -------------------------------------------------------------------------------- /src/Spark.Engine/Interfaces/IFhirResponseInterceptor.cs: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2015-2018, Firely 3 | * 4 | * SPDX-License-Identifier: BSD-3-Clause 5 | */ 6 | 7 | using Spark.Engine.Core; 8 | 9 | namespace Spark.Engine.Interfaces; 10 | 11 | public interface IFhirResponseInterceptor 12 | { 13 | FhirResponse GetFhirResponse(Entry entry, object input); 14 | 15 | bool CanHandle(object input); 16 | } 17 | -------------------------------------------------------------------------------- /src/Spark.Engine/Interfaces/IFhirResponseInterceptorRunner.cs: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2015-2018, Firely 3 | * 4 | * SPDX-License-Identifier: BSD-3-Clause 5 | */ 6 | 7 | using System.Collections.Generic; 8 | using Spark.Engine.Core; 9 | 10 | namespace Spark.Engine.Interfaces; 11 | 12 | public interface IFhirResponseInterceptorRunner 13 | { 14 | void AddInterceptor(IFhirResponseInterceptor interceptor); 15 | void ClearInterceptors(); 16 | FhirResponse RunInterceptors(Entry entry, IEnumerable parameters); 17 | } 18 | -------------------------------------------------------------------------------- /src/Spark.Engine/Interfaces/IFhirStoreAdministration.cs: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2014-2018, Firely 3 | * Copyright (c) 2021-2025, Incendi 4 | * 5 | * SPDX-License-Identifier: BSD-3-Clause 6 | */ 7 | 8 | using System.Threading.Tasks; 9 | 10 | namespace Spark.Engine.Interfaces; 11 | 12 | public interface IFhirStoreAdministration 13 | { 14 | Task CleanAsync(); 15 | } 16 | -------------------------------------------------------------------------------- /src/Spark.Engine/Interfaces/IIdentityGenerator.cs: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2014-2018, Firely 3 | * Copyright (c) 2021-2025, Incendi 4 | * 5 | * SPDX-License-Identifier: BSD-3-Clause 6 | */ 7 | 8 | using Hl7.Fhir.Model; 9 | 10 | namespace Spark.Engine.Interfaces; 11 | 12 | public interface IIdentityGenerator 13 | { 14 | string NextResourceId(Resource resource); 15 | string NextVersionId(string resourceIdentifier); 16 | string NextVersionId(string resourceType, string resourceIdentifier); 17 | } 18 | -------------------------------------------------------------------------------- /src/Spark.Engine/Logging/SparkEngineEventSource.cs: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2015-2018, Firely 3 | * Copyright (c) 2021-2025, Incendi 4 | * 5 | * SPDX-License-Identifier: BSD-3-Clause 6 | */ 7 | 8 | using System.Diagnostics.Tracing; 9 | using System; 10 | 11 | namespace Spark.Engine.Logging; 12 | 13 | [EventSource(Name = "Spark-Engine")] 14 | public sealed class SparkEngineEventSource : EventSource 15 | { 16 | private static readonly Lazy _instance = new Lazy(() => new SparkEngineEventSource()); 17 | 18 | public class Keywords 19 | { 20 | public const EventKeywords ServiceMethod = (EventKeywords)1; 21 | public const EventKeywords Invalid = (EventKeywords)2; 22 | public const EventKeywords Unsupported = (EventKeywords)4; 23 | public const EventKeywords Tracing = (EventKeywords)8; 24 | } 25 | 26 | public class Tasks 27 | { 28 | public const EventTask ServiceMethod = (EventTask)1; 29 | } 30 | 31 | private SparkEngineEventSource() { } 32 | 33 | public static SparkEngineEventSource Log { get { return _instance.Value; } } 34 | 35 | [Event(1, Message = "Service call: {0}", 36 | Level = EventLevel.Verbose, Keywords = Keywords.ServiceMethod)] 37 | internal void ServiceMethodCalled(string methodName) 38 | { 39 | WriteEvent(1, methodName); 40 | } 41 | 42 | [Event(2, Message = "Not supported: {0} in {1}", 43 | Level = EventLevel.Verbose, Keywords = Keywords.Unsupported)] 44 | internal void UnsupportedFeature(string methodName, string feature) 45 | { 46 | WriteEvent(2, feature, methodName); 47 | } 48 | 49 | [Event(4, Message = "Invalid Element", 50 | Level = EventLevel.Verbose, Keywords = Keywords.Unsupported)] 51 | internal void InvalidElement(string resourceID, string element, string message) 52 | { 53 | WriteEvent(4, message, resourceID, element); 54 | } 55 | } 56 | -------------------------------------------------------------------------------- /src/Spark.Engine/Maintenance/MaintenanceLock.cs: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2020-2025, Incendi 3 | * 4 | * SPDX-License-Identifier: BSD-3-Clause 5 | */ 6 | 7 | using System; 8 | 9 | namespace Spark.Engine.Maintenance; 10 | 11 | internal class MaintenanceLock : IDisposable 12 | { 13 | public MaintenanceLockMode Mode { get; private set; } 14 | 15 | public bool IsLocked => Mode > MaintenanceLockMode.None; 16 | 17 | public MaintenanceLock(MaintenanceLockMode mode) 18 | { 19 | Mode = mode; 20 | } 21 | 22 | public void Unlock() 23 | { 24 | Mode = MaintenanceLockMode.None; 25 | } 26 | 27 | public void Dispose() 28 | { 29 | Unlock(); 30 | } 31 | } 32 | -------------------------------------------------------------------------------- /src/Spark.Engine/Maintenance/MaintenanceLockMode.cs: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2020-2025, Incendi 3 | * 4 | * SPDX-License-Identifier: BSD-3-Clause 5 | */ 6 | 7 | namespace Spark.Engine.Maintenance; 8 | 9 | internal enum MaintenanceLockMode 10 | { 11 | None = 0, 12 | Write = 1, 13 | Full = 2 14 | } 15 | -------------------------------------------------------------------------------- /src/Spark.Engine/Maintenance/MaintenanceModeEnabledException.cs: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2020-2025, Incendi 3 | * 4 | * SPDX-License-Identifier: BSD-3-Clause 5 | */ 6 | 7 | using Spark.Engine.Core; 8 | using System.Net; 9 | 10 | namespace Spark.Engine.Maintenance; 11 | 12 | internal class MaintenanceModeEnabledException : SparkException 13 | { 14 | public MaintenanceModeEnabledException() : base(HttpStatusCode.ServiceUnavailable) 15 | { 16 | } 17 | } 18 | -------------------------------------------------------------------------------- /src/Spark.Engine/Maintenance/MaintenanceModeHandler.cs: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2020-2025, Incendi 3 | * 4 | * SPDX-License-Identifier: BSD-3-Clause 5 | */ 6 | 7 | using System.Threading.Tasks; 8 | using Microsoft.AspNetCore.Http; 9 | 10 | namespace Spark.Engine.Maintenance; 11 | 12 | public class MaintenanceModeHandler 13 | { 14 | private readonly RequestDelegate _next; 15 | 16 | public MaintenanceModeHandler(RequestDelegate next) 17 | { 18 | _next = next; 19 | } 20 | 21 | public async Task InvokeAsync(HttpContext context) 22 | { 23 | if (MaintenanceMode.IsEnabledForHttpMethod(context.Request.Method)) 24 | { 25 | context.Response.StatusCode = StatusCodes.Status503ServiceUnavailable; 26 | return; 27 | } 28 | await _next(context); 29 | } 30 | } 31 | -------------------------------------------------------------------------------- /src/Spark.Engine/Model/CompartmentInfo.cs: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2016-2018, Firely 3 | * Copyright (c) 2021-2025, Incendi 4 | * 5 | * SPDX-License-Identifier: BSD-3-Clause 6 | */ 7 | 8 | using Hl7.Fhir.Model; 9 | using System.Collections.Generic; 10 | 11 | namespace Spark.Engine.Model; 12 | 13 | /// 14 | /// Class for holding information as present in a CompartmentDefinition resource. 15 | /// This is a (hopefully) temporary solution, since the Hl7.Fhir api does not containt CompartmentDefinition yet. 16 | /// 17 | public class CompartmentInfo 18 | { 19 | public ResourceType ResourceType { get; set; } 20 | 21 | private List _revIncludes = new List(); 22 | public List ReverseIncludes { get { return _revIncludes; } } 23 | 24 | public CompartmentInfo(ResourceType resourceType) 25 | { 26 | this.ResourceType = resourceType; 27 | } 28 | 29 | public void AddReverseInclude(string revInclude) 30 | { 31 | _revIncludes.Add(revInclude); 32 | } 33 | 34 | public void AddReverseIncludes(IEnumerable revIncludes) 35 | { 36 | this._revIncludes.AddRange(revIncludes); 37 | } 38 | } 39 | -------------------------------------------------------------------------------- /src/Spark.Engine/Model/IndexEntry.cs: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2015-2018, Firely 3 | * Copyright (c) 2021-2025, Incendi 4 | * 5 | * SPDX-License-Identifier: BSD-3-Clause 6 | */ 7 | 8 | using Spark.Engine.Search.Types; 9 | using System.Collections.Generic; 10 | using System.Linq; 11 | 12 | namespace Spark.Engine.Model; 13 | 14 | public class IndexValue : ValueExpression 15 | { 16 | public IndexValue() 17 | { 18 | _values = new List(); 19 | } 20 | 21 | public IndexValue(string name): this() 22 | { 23 | Name = name; 24 | } 25 | 26 | public IndexValue(string name, List values): this(name) 27 | { 28 | Values = values; 29 | } 30 | 31 | public IndexValue(string name, params Expression[] values): this(name) 32 | { 33 | Values = values.ToList(); 34 | } 35 | 36 | public string Name { get; set; } 37 | 38 | private List _values; 39 | public List Values { get { return _values; } set { _values.AddRange(value); } } 40 | 41 | public void AddValue(Expression value) 42 | { 43 | _values.Add(value); 44 | } 45 | } 46 | 47 | public static class IndexValueExtensions 48 | { 49 | public static IEnumerable IndexValues(this IndexValue root) 50 | { 51 | return root.Values.Where(v => v is IndexValue).Select(v => (IndexValue)v); 52 | } 53 | } 54 | -------------------------------------------------------------------------------- /src/Spark.Engine/Properties/AssemblyInfo.cs: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2014-2018, Firely 3 | * Copyright (c) 2019-2025, Incendi 4 | * 5 | * SPDX-License-Identifier: BSD-3-Clause 6 | */ 7 | 8 | using System.Runtime.CompilerServices; 9 | 10 | [assembly: InternalsVisibleTo("Spark.Engine.Test")] 11 | -------------------------------------------------------------------------------- /src/Spark.Engine/Search/IReferenceNormalizationService.cs: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2020-2025, Incendi 3 | * 4 | * SPDX-License-Identifier: BSD-3-Clause 5 | */ 6 | 7 | using Spark.Engine.Search.Types; 8 | 9 | namespace Spark.Engine.Search; 10 | 11 | public interface IReferenceNormalizationService 12 | { 13 | /// 14 | /// 15 | /// 16 | /// Can be null 17 | /// Optional 18 | /// null if the given originalValue is null or can't be used for search for any other reason, normalized value otherwise. 19 | ValueExpression GetNormalizedReferenceValue(ValueExpression originalValue, string resourceType); 20 | 21 | /// 22 | /// Makes the following transformations on the original criteria operand: 23 | /// 1. Adds modifier to the reference, if present. Example: ?subject:Patient=1001 becomes ?subject=Patient/1001 24 | /// 2. Removes local base uri from references. Example: ?subject=http://localhost:xyz/fhir/Patient/10014 is transformed to 25 | /// ?subject=Patient/10014. Note: external references are not transformed. 26 | /// 27 | /// Normalized criteria if c is valid, null otherwise 28 | Criterium GetNormalizedReferenceCriteria(Criterium c); 29 | } 30 | -------------------------------------------------------------------------------- /src/Spark.Engine/Search/Model/IndexFieldNames.cs: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2015-2018, Firely 3 | * Copyright (c) 2021-2025, Incendi 4 | * 5 | * SPDX-License-Identifier: BSD-3-Clause 6 | */ 7 | 8 | namespace Spark.Engine.Search.Model; 9 | 10 | public static class IndexFieldNames 11 | { 12 | public const string 13 | // Internally stored search fields 14 | ID = "internal_id", 15 | JUSTID = "internal_justid", 16 | SELFLINK = "internal_selflink", 17 | CONTAINER = "internal_container", 18 | RESOURCE = "internal_resource", 19 | LEVEL = "internal_level", 20 | TAG = "internal_tag", 21 | TAGSCHEME = "scheme", 22 | TAGTERM = "term", 23 | TAGLABEL = "label", 24 | LASTUPDATED = "lastupdated"; 25 | 26 | public static string[] All = { ID, JUSTID, SELFLINK, CONTAINER, RESOURCE, LEVEL, TAG, LASTUPDATED }; 27 | } 28 | -------------------------------------------------------------------------------- /src/Spark.Engine/Search/Model/ReverseInclude.cs: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2016-2018, Firely 3 | * Copyright (c) 2021-2025, Incendi 4 | * 5 | * SPDX-License-Identifier: BSD-3-Clause 6 | */ 7 | 8 | using System; 9 | using System.Text.RegularExpressions; 10 | 11 | namespace Spark.Engine.Search.Model; 12 | 13 | public class ReverseInclude 14 | { 15 | private static Regex _pattern = new Regex(@"(?[^\.]+):(?.*)"); 16 | 17 | public string ResourceType { get; set; } 18 | public string SearchPath { get; set; } 19 | 20 | /// 21 | /// Expected format: ResourceType.searchParameter[.searchParameter]* 22 | /// 23 | /// 24 | /// ReverseInclude instance with ResourceType is everything before the first dot, and SearchPath everything after it. 25 | public static ReverseInclude Parse(string reverseInclude) 26 | { 27 | //_revinclude should have the following format: ResourceType.searchParameter[.searchParameter]* 28 | //so we simply split in on the first dot. 29 | if (reverseInclude == null) 30 | { 31 | throw new ArgumentNullException("reverseInclude cannot be null"); 32 | } 33 | var result = new ReverseInclude(); 34 | var match = _pattern.Match(reverseInclude); 35 | if (match.Groups.Count < 2) 36 | { 37 | throw new ArgumentException(string.Format("reverseInclude '{0}' does not adhere to the format 'ResourceType.searchParameter[.searchParameter]*'", reverseInclude)); 38 | } 39 | 40 | result.ResourceType = match.Groups["resourcetype"].Captures[0].Value; 41 | result.SearchPath = match.Groups["searchpath"].Captures[0].Value; 42 | 43 | return result; 44 | } 45 | } 46 | -------------------------------------------------------------------------------- /src/Spark.Engine/Search/Support/IPositionInfo.cs: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2015-2018, Firely 3 | * Copyright (c) 2021-2025, Incendi 4 | * 5 | * SPDX-License-Identifier: BSD-3-Clause 6 | */ 7 | 8 | namespace Spark.Engine.Search.Support; 9 | 10 | public interface IPositionInfo 11 | { 12 | int LineNumber { get; } 13 | int LinePosition { get; } 14 | } 15 | -------------------------------------------------------------------------------- /src/Spark.Engine/Search/Support/StringExtensions.cs: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2015-2018, Firely 3 | * Copyright (c) 2021-2025, Incendi 4 | * 5 | * SPDX-License-Identifier: BSD-3-Clause 6 | */ 7 | 8 | using System; 9 | using System.Collections.Generic; 10 | 11 | namespace Spark.Engine.Search.Support; 12 | 13 | public static class StringExtensions 14 | { 15 | public static string[] SplitNotEscaped(this string str, char separator) 16 | { 17 | var value = string.Empty; 18 | var values = new List(); 19 | bool seenEscape = false; 20 | 21 | foreach (var ch in str) 22 | { 23 | if (ch == '\\') 24 | { 25 | seenEscape = true; 26 | continue; 27 | } 28 | 29 | if (ch == separator && !seenEscape) 30 | { 31 | values.Add(value); 32 | value = string.Empty; 33 | continue; 34 | } 35 | 36 | if (seenEscape) 37 | { 38 | value += '\\'; 39 | seenEscape = false; 40 | } 41 | 42 | value += ch; 43 | } 44 | 45 | values.Add(value); 46 | 47 | return values.ToArray(); 48 | } 49 | 50 | public static Tuple SplitLeft(this string str, char separator) 51 | { 52 | var position = str.IndexOf(separator); 53 | 54 | if (position == -1) 55 | // Nothing to split. 56 | return Tuple.Create(str, null); 57 | 58 | var key = str[..position]; 59 | var value = str[(position + 1)..]; 60 | return Tuple.Create(key, value); 61 | } 62 | } 63 | -------------------------------------------------------------------------------- /src/Spark.Engine/Search/Types/ChoiceValue.cs: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2015-2018, Firely 3 | * Copyright (c) 2021-2025, Incendi 4 | * 5 | * SPDX-License-Identifier: BSD-3-Clause 6 | */ 7 | 8 | using Spark.Engine.Search.Support; 9 | using System.Collections.Generic; 10 | using System.Linq; 11 | 12 | namespace Spark.Engine.Search.Types; 13 | 14 | public class ChoiceValue : ValueExpression 15 | { 16 | private const char VALUE_SEPARATOR = ','; 17 | 18 | public ChoiceValue(ValueExpression[] choices) 19 | { 20 | Choices = choices ?? throw Error.ArgumentNull("choices"); 21 | } 22 | 23 | public ChoiceValue(IEnumerable choices) 24 | { 25 | Choices = choices == null 26 | ? throw Error.ArgumentNull("choices") 27 | : choices.ToArray(); 28 | } 29 | 30 | public ValueExpression[] Choices { get; } 31 | 32 | public override string ToString() 33 | { 34 | IEnumerable values = Choices.Select(v => v.ToString()); 35 | return string.Join(VALUE_SEPARATOR.ToString(), values); 36 | } 37 | 38 | public static ChoiceValue Parse(string text) 39 | { 40 | if (text == null) 41 | Error.ArgumentNull("text"); 42 | 43 | string[] values = text.SplitNotEscaped(VALUE_SEPARATOR); 44 | 45 | return new ChoiceValue(values.Select(splitIntoComposite)); 46 | } 47 | 48 | private static ValueExpression splitIntoComposite(string text) 49 | { 50 | CompositeValue composite = CompositeValue.Parse(text); 51 | 52 | // If there's only one component, this really was a single value 53 | return composite.Components.Length == 1 54 | ? composite.Components[0] 55 | : composite; 56 | } 57 | } 58 | -------------------------------------------------------------------------------- /src/Spark.Engine/Search/Types/CompositeValue.cs: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2015-2018, Firely 3 | * Copyright (c) 2021-2025, Incendi 4 | * 5 | * SPDX-License-Identifier: BSD-3-Clause 6 | */ 7 | 8 | using Spark.Engine.Search.Support; 9 | using System.Collections.Generic; 10 | using System.Linq; 11 | 12 | namespace Spark.Engine.Search.Types; 13 | 14 | public class CompositeValue : ValueExpression 15 | { 16 | private const char TUPLE_SEPARATOR = '$'; 17 | 18 | public CompositeValue(ValueExpression[] components) 19 | { 20 | Components = components ?? throw Error.ArgumentNull("components"); 21 | } 22 | 23 | public CompositeValue(IEnumerable components) 24 | { 25 | if (components == null) throw Error.ArgumentNull("components"); 26 | 27 | Components = components == null 28 | ? throw Error.ArgumentNull("components") 29 | : components.ToArray(); 30 | } 31 | 32 | public ValueExpression[] Components { get; } 33 | 34 | public override string ToString() 35 | { 36 | IEnumerable values = Components.Select(v => v.ToString()); 37 | return string.Join(TUPLE_SEPARATOR.ToString(), values); 38 | } 39 | 40 | public static CompositeValue Parse(string text) 41 | { 42 | if (text == null) throw Error.ArgumentNull("text"); 43 | 44 | string[] values = text.SplitNotEscaped(TUPLE_SEPARATOR); 45 | 46 | return new CompositeValue(values.Select(v => new UntypedValue(v))); 47 | } 48 | } 49 | -------------------------------------------------------------------------------- /src/Spark.Engine/Search/Types/DateTimeValue.cs: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2015-2018, Firely 3 | * Copyright (c) 2020-2025, Incendi 4 | * 5 | * SPDX-License-Identifier: BSD-3-Clause 6 | */ 7 | 8 | using Hl7.Fhir.Model; 9 | using Spark.Engine.Search.Support; 10 | using System; 11 | 12 | namespace Spark.Engine.Search.Types; 13 | 14 | /// 15 | /// DateTimeValue is always specified up to the second. 16 | /// Spark uses it for the boundaries of a period. So fuzzy dates as in FhirDateTime (just year + month for example) get 17 | /// translated in an upper- and lowerbound in DateTimeValues. 18 | /// These are used for indexing. 19 | /// 20 | public class DateTimeValue : ValueExpression 21 | { 22 | public DateTimeValue(DateTimeOffset value) 23 | { 24 | // The DateValue datatype is not interested in any time related 25 | // components, so we must strip those off before converting to the string 26 | // value 27 | Value = value; 28 | } 29 | 30 | public DateTimeValue(string datetime) 31 | { 32 | if (!FhirDateTime.IsValidValue(datetime)) 33 | throw Error.Argument("datetime", $"The string [{datetime}] cannot be translated to a DateTimeValue"); 34 | 35 | FhirDateTime fdt = new(datetime); 36 | Value = fdt.ToDateTimeOffset(TimeSpan.Zero); 37 | } 38 | 39 | public DateTimeOffset Value { get; } 40 | 41 | public override string ToString() => new FhirDateTime(Value).ToString(); 42 | 43 | public static DateTimeValue Parse(string text) => new(text); 44 | } 45 | -------------------------------------------------------------------------------- /src/Spark.Engine/Search/Types/DateValue.cs: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2015-2018, Firely 3 | * Copyright (c) 2020-2025, Incendi 4 | * 5 | * SPDX-License-Identifier: BSD-3-Clause 6 | */ 7 | 8 | using Hl7.Fhir.Model; 9 | using Spark.Engine.Search.Support; 10 | using System; 11 | 12 | namespace Spark.Engine.Search.Types; 13 | 14 | public class DateValue : ValueExpression 15 | { 16 | public DateValue(DateTimeOffset value) 17 | { 18 | // The DateValue datatype is not interested in any time related 19 | // components, so we must strip those off before converting to the string 20 | // value 21 | Value = value.Date.ToString("yyyy-MM-dd"); 22 | } 23 | 24 | public DateValue(string date) 25 | { 26 | if (!Date.IsValidValue(date)) 27 | { 28 | if (!FhirDateTime.IsValidValue(date)) 29 | throw Error.Argument("date", 30 | $"The string [{date}] is not a valid FHIR date string and isn't a FHIR datetime either"); 31 | 32 | // This was a time, so we can just use the date portion of this 33 | date = new FhirDateTime(date).ToDateTimeOffset(TimeSpan.Zero).Date.ToString("yyyy-MM-dd"); 34 | } 35 | 36 | Value = date; 37 | } 38 | 39 | public string Value { get; } 40 | 41 | public override string ToString() => Value; 42 | 43 | public static DateValue Parse(string text) => new(text); 44 | } 45 | -------------------------------------------------------------------------------- /src/Spark.Engine/Search/Types/Expression.cs: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2015-2018, Firely 3 | * 4 | * SPDX-License-Identifier: BSD-3-Clause 5 | */ 6 | 7 | namespace Spark.Engine.Search.Types; 8 | 9 | public abstract class Expression 10 | { 11 | } 12 | -------------------------------------------------------------------------------- /src/Spark.Engine/Search/Types/NumberValue.cs: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2015-2018, Firely 3 | * 4 | * SPDX-License-Identifier: BSD-3-Clause 5 | */ 6 | 7 | using Hl7.Fhir.Serialization; 8 | 9 | namespace Spark.Engine.Search.Types; 10 | 11 | public class NumberValue : ValueExpression 12 | { 13 | public NumberValue(decimal value) 14 | { 15 | Value = value; 16 | } 17 | 18 | public decimal Value { get; } 19 | 20 | public override string ToString() => PrimitiveTypeConverter.ConvertTo(Value); 21 | 22 | public static NumberValue Parse(string text) => new(PrimitiveTypeConverter.ConvertTo(text)); 23 | } 24 | -------------------------------------------------------------------------------- /src/Spark.Engine/Search/Types/QuantityValue.cs: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2015-2018, Firely 3 | * Copyright (c) 2021-2025, Incendi 4 | * 5 | * SPDX-License-Identifier: BSD-3-Clause 6 | */ 7 | 8 | using Hl7.Fhir.Serialization; 9 | using Spark.Engine.Search.Support; 10 | using System; 11 | 12 | namespace Spark.Engine.Search.Types; 13 | 14 | public class QuantityValue : ValueExpression 15 | { 16 | public QuantityValue(decimal number, string unit) 17 | { 18 | Number = number; 19 | Unit = unit; 20 | } 21 | 22 | public QuantityValue(decimal number, string ns, string unit) 23 | { 24 | Number = number; 25 | Unit = unit; 26 | Namespace = ns; 27 | } 28 | 29 | public decimal Number { get; } 30 | 31 | public string Namespace { get; } 32 | 33 | public string Unit { get; } 34 | 35 | public override string ToString() 36 | { 37 | string ns = Namespace ?? string.Empty; 38 | return 39 | $"{PrimitiveTypeConverter.ConvertTo(Number)}|{StringValue.EscapeString(ns)}|{StringValue.EscapeString(Unit)}"; 40 | } 41 | 42 | public static QuantityValue Parse(string text) 43 | { 44 | if (text == null) throw Error.ArgumentNull("text"); 45 | 46 | string[] triple = text.SplitNotEscaped('|'); 47 | 48 | if (triple.Length != 3) 49 | throw Error.Argument("text", "Quantity needs to have three parts separated by '|'"); 50 | 51 | if (triple[0] == string.Empty) 52 | throw new FormatException("Quantity needs to specify a number"); 53 | 54 | decimal number = PrimitiveTypeConverter.ConvertTo(triple[0]); 55 | string ns = triple[1] != string.Empty ? StringValue.UnescapeString(triple[1]) : null; 56 | 57 | if (triple[2] == string.Empty) 58 | throw new FormatException("Quantity needs to specify a unit"); 59 | 60 | string unit = StringValue.UnescapeString(triple[2]); 61 | 62 | return new QuantityValue(number, ns, unit); 63 | } 64 | } 65 | -------------------------------------------------------------------------------- /src/Spark.Engine/Search/Types/ReferenceValue.cs: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2015-2018, Firely 3 | * 4 | * SPDX-License-Identifier: BSD-3-Clause 5 | */ 6 | 7 | using Hl7.Fhir.Model; 8 | using Spark.Engine.Search.Support; 9 | using System; 10 | 11 | namespace Spark.Engine.Search.Types; 12 | 13 | public class ReferenceValue : ValueExpression 14 | { 15 | public ReferenceValue(string value) 16 | { 17 | if (!Uri.IsWellFormedUriString(value, UriKind.Absolute) && 18 | !Id.IsValidValue(value)) 19 | throw Error.Argument("text", "Reference is not a valid Id nor a valid absolute Url"); 20 | 21 | Value = value; 22 | } 23 | 24 | public string Value { get; } 25 | 26 | public override string ToString() => StringValue.EscapeString(Value); 27 | 28 | public static ReferenceValue Parse(string text) => new(StringValue.UnescapeString(text)); 29 | } 30 | -------------------------------------------------------------------------------- /src/Spark.Engine/Search/Types/StringValue.cs: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2015-2018, Firely 3 | * Copyright (c) 2020-2025, Incendi 4 | * 5 | * SPDX-License-Identifier: BSD-3-Clause 6 | */ 7 | 8 | namespace Spark.Engine.Search.Types; 9 | 10 | public class StringValue : ValueExpression 11 | { 12 | public StringValue(string value) 13 | { 14 | Value = value; 15 | } 16 | 17 | public string Value { get; } 18 | 19 | public override string ToString() => EscapeString(Value); 20 | 21 | public static StringValue Parse(string text) => new(UnescapeString(text)); 22 | 23 | public static string EscapeString(string value) 24 | { 25 | if (value == null) return null; 26 | 27 | value = value.Replace(@"\", @"\\"); 28 | value = value.Replace(@"$", @"\$"); 29 | value = value.Replace(@",", @"\,"); 30 | value = value.Replace(@"|", @"\|"); 31 | 32 | return value; 33 | } 34 | 35 | public static string UnescapeString(string value) 36 | { 37 | if (value == null) return null; 38 | 39 | value = value.Replace(@"\|", @"|"); 40 | value = value.Replace(@"\,", @","); 41 | value = value.Replace(@"\$", @"$"); 42 | value = value.Replace(@"\\", @"\"); 43 | 44 | return value; 45 | } 46 | } 47 | -------------------------------------------------------------------------------- /src/Spark.Engine/Search/Types/TokenValue.cs: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2015-2018, Firely 3 | * Copyright (c) 2021-2025, Incendi 4 | * 5 | * SPDX-License-Identifier: BSD-3-Clause 6 | */ 7 | 8 | using Spark.Engine.Search.Support; 9 | 10 | namespace Spark.Engine.Search.Types; 11 | 12 | public class TokenValue : ValueExpression 13 | { 14 | public string Namespace { get; set; } 15 | 16 | public string Value { get; set; } 17 | 18 | public bool AnyNamespace { get; set; } 19 | 20 | public override string ToString() 21 | { 22 | if (AnyNamespace) 23 | return StringValue.EscapeString(Value); 24 | 25 | string ns = Namespace ?? string.Empty; 26 | return $"{StringValue.EscapeString(ns)}|{StringValue.EscapeString(Value)}"; 27 | } 28 | 29 | public static TokenValue Parse(string text) 30 | { 31 | if (text == null) throw Error.ArgumentNull("text"); 32 | 33 | string[] pair = text.SplitNotEscaped('|'); 34 | 35 | if (pair.Length > 2) 36 | throw Error.Argument("text", "Token cannot have more than two parts separated by '|'"); 37 | 38 | bool hasNamespace = pair.Length == 2; 39 | 40 | string pair0 = StringValue.UnescapeString(pair[0]); 41 | 42 | if (!hasNamespace) 43 | return new TokenValue { Value = pair0, AnyNamespace = true }; 44 | 45 | string pair1 = StringValue.UnescapeString(pair[1]); 46 | 47 | if (string.IsNullOrEmpty(pair0)) 48 | return new TokenValue { Value = pair1, AnyNamespace = false }; 49 | 50 | return string.IsNullOrEmpty(pair1) 51 | ? new TokenValue { Namespace = pair0, AnyNamespace = false } 52 | : new TokenValue { Namespace = pair0, Value = pair1, AnyNamespace = false }; 53 | } 54 | } 55 | -------------------------------------------------------------------------------- /src/Spark.Engine/Search/Types/UntypedValue.cs: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2015-2018, Firely 3 | * 4 | * SPDX-License-Identifier: BSD-3-Clause 5 | */ 6 | 7 | using Hl7.Fhir.Model; 8 | 9 | namespace Spark.Engine.Search.Types; 10 | 11 | public class UntypedValue : ValueExpression 12 | { 13 | public UntypedValue(string value) 14 | { 15 | Value = value; 16 | } 17 | 18 | public string Value { get; } 19 | 20 | public override string ToString() => Value; 21 | 22 | public NumberValue AsNumberValue() => NumberValue.Parse(Value); 23 | 24 | public DateValue AsDateValue() => DateValue.Parse(Value); 25 | 26 | public FhirDateTime AsDateTimeValue() => new(Value); 27 | 28 | public StringValue AsStringValue() => StringValue.Parse(Value); 29 | 30 | public TokenValue AsTokenValue() => TokenValue.Parse(Value); 31 | 32 | public QuantityValue AsQuantityValue() => QuantityValue.Parse(Value); 33 | 34 | public ReferenceValue AsReferenceValue() => ReferenceValue.Parse(Value); 35 | } 36 | -------------------------------------------------------------------------------- /src/Spark.Engine/Search/Types/ValueExpression.cs: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2015-2018, Firely 3 | * Copyright (c) 2020-2025, Incendi 4 | * 5 | * SPDX-License-Identifier: BSD-3-Clause 6 | */ 7 | 8 | namespace Spark.Engine.Search.Types; 9 | 10 | public abstract class ValueExpression : Expression 11 | { 12 | public string ToUnescapedString() 13 | { 14 | ValueExpression value = this; 15 | if (value is not UntypedValue untyped) 16 | return value.ToString(); 17 | 18 | value = untyped.AsStringValue(); 19 | return StringValue.UnescapeString(value.ToString()); 20 | } 21 | } 22 | -------------------------------------------------------------------------------- /src/Spark.Engine/Service/FhirServiceExtensions/ConformanceService.cs: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2016-2018, Firely 3 | * Copyright (c) 2018-2025, Incendi 4 | * 5 | * SPDX-License-Identifier: BSD-3-Clause 6 | */ 7 | 8 | using Hl7.Fhir.Model; 9 | using Spark.Engine.Core; 10 | 11 | namespace Spark.Engine.Service.FhirServiceExtensions; 12 | 13 | public class CapabilityStatementService : ICapabilityStatementService 14 | { 15 | private readonly ILocalhost _localhost; 16 | 17 | public CapabilityStatementService(ILocalhost localhost) 18 | { 19 | _localhost = localhost; 20 | } 21 | 22 | public CapabilityStatement GetSparkCapabilityStatement(string sparkVersion) 23 | { 24 | return CapabilityStatementBuilder.GetSparkCapabilityStatement(sparkVersion, _localhost); 25 | } 26 | } 27 | -------------------------------------------------------------------------------- /src/Spark.Engine/Service/FhirServiceExtensions/DeleteManipulationOperation.cs: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2016-2018, Firely 3 | * Copyright (c) 2020-2025, Incendi 4 | * 5 | * SPDX-License-Identifier: BSD-3-Clause 6 | */ 7 | 8 | using System; 9 | using System.Collections.Generic; 10 | using Hl7.Fhir.Model; 11 | using Hl7.Fhir.Rest; 12 | using Spark.Engine.Core; 13 | 14 | namespace Spark.Engine.Service.FhirServiceExtensions; 15 | 16 | public static partial class ResourceManipulationOperationFactory 17 | { 18 | private class DeleteManipulationOperation : ResourceManipulationOperation 19 | { 20 | public DeleteManipulationOperation(Resource resource, IKey operationKey, SearchResults searchResults, SearchParams searchCommand = null) 21 | : base(resource, operationKey, searchResults, searchCommand) 22 | { 23 | } 24 | 25 | public static Uri ReadSearchUri(Bundle.EntryComponent entry) 26 | { 27 | return entry.Request != null 28 | ? new Uri(entry.Request.Url, UriKind.RelativeOrAbsolute) 29 | : null; 30 | } 31 | 32 | protected override IEnumerable ComputeEntries() 33 | { 34 | if (SearchResults != null) 35 | { 36 | foreach (var localKeyValue in SearchResults) 37 | { 38 | yield return Entry.DELETE(Key.ParseOperationPath(localKeyValue), DateTimeOffset.UtcNow); 39 | } 40 | } 41 | else 42 | { 43 | yield return Entry.DELETE(OperationKey, DateTimeOffset.UtcNow); 44 | } 45 | } 46 | } 47 | } 48 | -------------------------------------------------------------------------------- /src/Spark.Engine/Service/FhirServiceExtensions/GetManipulationOperation.cs: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2021-2025, Incendi 3 | * 4 | * SPDX-License-Identifier: BSD-3-Clause 5 | */ 6 | 7 | using Hl7.Fhir.Model; 8 | using Hl7.Fhir.Rest; 9 | using Spark.Engine.Core; 10 | using System; 11 | using System.Collections.Generic; 12 | 13 | namespace Spark.Engine.Service.FhirServiceExtensions; 14 | 15 | public class GetManipulationOperation : ResourceManipulationOperation 16 | { 17 | public GetManipulationOperation(Resource resource, IKey operationKey, SearchResults searchResults, SearchParams searchCommand = null) 18 | : base(resource, operationKey, searchResults, searchCommand) 19 | { 20 | } 21 | 22 | public static Uri ReadSearchUri(Bundle.EntryComponent entry) 23 | { 24 | return entry.Request != null 25 | ? new Uri(entry.Request.Url, UriKind.RelativeOrAbsolute) 26 | : null; 27 | } 28 | 29 | protected override IEnumerable ComputeEntries() 30 | { 31 | if (SearchResults != null) 32 | { 33 | foreach (string localKeyLiteral in SearchResults) 34 | { 35 | yield return Entry.Create(Bundle.HTTPVerb.GET, Key.ParseOperationPath(localKeyLiteral)); 36 | } 37 | } 38 | else 39 | { 40 | yield return Entry.Create(Bundle.HTTPVerb.GET, OperationKey); 41 | } 42 | } 43 | } 44 | -------------------------------------------------------------------------------- /src/Spark.Engine/Service/FhirServiceExtensions/HistoryService.cs: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2016-2018, Firely 3 | * Copyright (c) 2018-2025, Incendi 4 | * 5 | * SPDX-License-Identifier: BSD-3-Clause 6 | */ 7 | 8 | using System.Threading.Tasks; 9 | using Spark.Engine.Core; 10 | using Spark.Engine.Store.Interfaces; 11 | 12 | namespace Spark.Engine.Service.FhirServiceExtensions; 13 | 14 | public class HistoryService : IHistoryService 15 | { 16 | private IHistoryStore _historyStore; 17 | 18 | public HistoryService(IHistoryStore historyStore) 19 | { 20 | _historyStore = historyStore; 21 | } 22 | 23 | public async Task HistoryAsync(string typename, HistoryParameters parameters) 24 | { 25 | return await _historyStore.HistoryAsync(typename, parameters).ConfigureAwait(false); 26 | } 27 | 28 | public async Task HistoryAsync(IKey key, HistoryParameters parameters) 29 | { 30 | return await _historyStore.HistoryAsync(key, parameters).ConfigureAwait(false); 31 | } 32 | 33 | public async Task HistoryAsync(HistoryParameters parameters) 34 | { 35 | return await _historyStore.HistoryAsync(parameters).ConfigureAwait(false); 36 | } 37 | } 38 | -------------------------------------------------------------------------------- /src/Spark.Engine/Service/FhirServiceExtensions/ICapabilityStatementService.cs: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2016-2018, Firely 3 | * Copyright (c) 2018-2025, Incendi 4 | * 5 | * SPDX-License-Identifier: BSD-3-Clause 6 | */ 7 | 8 | using Hl7.Fhir.Model; 9 | 10 | namespace Spark.Engine.Service.FhirServiceExtensions; 11 | 12 | public interface ICapabilityStatementService : IFhirServiceExtension 13 | { 14 | CapabilityStatement GetSparkCapabilityStatement(string sparkVersion); 15 | } 16 | -------------------------------------------------------------------------------- /src/Spark.Engine/Service/FhirServiceExtensions/IFhirServiceExtension.cs: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2016-2018, Firely 3 | * 4 | * SPDX-License-Identifier: BSD-3-Clause 5 | */ 6 | 7 | namespace Spark.Engine.Service.FhirServiceExtensions; 8 | 9 | public interface IFhirServiceExtension 10 | { 11 | } -------------------------------------------------------------------------------- /src/Spark.Engine/Service/FhirServiceExtensions/IHistoryService.cs: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2016-2018, Firely 3 | * Copyright (c) 2021-2025, Incendi 4 | * 5 | * SPDX-License-Identifier: BSD-3-Clause 6 | */ 7 | 8 | using System.Threading.Tasks; 9 | using Spark.Engine.Core; 10 | 11 | namespace Spark.Engine.Service.FhirServiceExtensions; 12 | 13 | internal interface IHistoryService : IFhirServiceExtension 14 | { 15 | Task HistoryAsync(string typename, HistoryParameters parameters); 16 | Task HistoryAsync(IKey key, HistoryParameters parameters); 17 | Task HistoryAsync(HistoryParameters parameters); 18 | } 19 | -------------------------------------------------------------------------------- /src/Spark.Engine/Service/FhirServiceExtensions/IIndexBuildProgressReporter.cs: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2020-2025, Incendi 3 | * 4 | * SPDX-License-Identifier: BSD-3-Clause 5 | */ 6 | 7 | using System.Threading.Tasks; 8 | 9 | namespace Spark.Engine.Service.FhirServiceExtensions; 10 | 11 | public interface IIndexBuildProgressReporter 12 | { 13 | Task ReportProgressAsync(int progress, string message); 14 | 15 | Task ReportErrorAsync(string message); 16 | } 17 | -------------------------------------------------------------------------------- /src/Spark.Engine/Service/FhirServiceExtensions/IIndexRebuildService.cs: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2020-2025, Incendi 3 | * 4 | * SPDX-License-Identifier: BSD-3-Clause 5 | */ 6 | 7 | using System.Threading.Tasks; 8 | 9 | namespace Spark.Engine.Service.FhirServiceExtensions; 10 | 11 | public interface IIndexRebuildService 12 | { 13 | Task RebuildIndexAsync(IIndexBuildProgressReporter reporter = null); 14 | } 15 | -------------------------------------------------------------------------------- /src/Spark.Engine/Service/FhirServiceExtensions/IInteractionHandler.cs: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2016-2018, Firely 3 | * Copyright (c) 2021-2025, Incendi 4 | * 5 | * SPDX-License-Identifier: BSD-3-Clause 6 | */ 7 | 8 | using Spark.Engine.Core; 9 | using System.Threading.Tasks; 10 | 11 | namespace Spark.Engine.Service.FhirServiceExtensions; 12 | 13 | public interface IInteractionHandler 14 | { 15 | Task HandleInteractionAsync(Entry interaction); 16 | } 17 | -------------------------------------------------------------------------------- /src/Spark.Engine/Service/FhirServiceExtensions/IPagingService.cs: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2016-2018, Firely 3 | * Copyright (c) 2021-2025, Incendi 4 | * 5 | * SPDX-License-Identifier: BSD-3-Clause 6 | */ 7 | 8 | using System.Threading.Tasks; 9 | using Spark.Engine.Core; 10 | 11 | namespace Spark.Engine.Service.FhirServiceExtensions; 12 | 13 | public interface IPagingService : IFhirServiceExtension 14 | { 15 | Task StartPaginationAsync(Snapshot snapshot); 16 | Task StartPaginationAsync(string snapshotKey); 17 | } 18 | -------------------------------------------------------------------------------- /src/Spark.Engine/Service/FhirServiceExtensions/IPatchService.cs: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2021-2025, Incendi 3 | * 4 | * SPDX-License-Identifier: BSD-3-Clause 5 | */ 6 | 7 | namespace Spark.Engine.Service.FhirServiceExtensions; 8 | 9 | using Hl7.Fhir.Model; 10 | 11 | public interface IPatchService : IFhirServiceExtension 12 | { 13 | Resource Apply(Resource resource, Parameters patch); 14 | } 15 | -------------------------------------------------------------------------------- /src/Spark.Engine/Service/FhirServiceExtensions/IQueryService.cs: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2021-2025, Incendi 3 | * 4 | * SPDX-License-Identifier: BSD-3-Clause 5 | */ 6 | 7 | using Hl7.Fhir.Rest; 8 | using Spark.Engine.Core; 9 | using System.Collections.Generic; 10 | 11 | namespace Spark.Engine.Service.FhirServiceExtensions; 12 | 13 | /// 14 | /// Use this interface on your own risk. This interface is highly likely to having breaking changes until 15 | /// version 2.0 of Spark. 16 | /// 17 | public interface IQueryService : IFhirServiceExtension 18 | { 19 | IAsyncEnumerable GetAsync(string type, SearchParams searchParams); 20 | } 21 | -------------------------------------------------------------------------------- /src/Spark.Engine/Service/FhirServiceExtensions/IResourceStorageService.cs: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2016-2018, Firely 3 | * Copyright (c) 2021-2025, Incendi 4 | * 5 | * SPDX-License-Identifier: BSD-3-Clause 6 | */ 7 | 8 | using System.Collections.Generic; 9 | using System.Threading.Tasks; 10 | using Spark.Engine.Core; 11 | 12 | namespace Spark.Engine.Service.FhirServiceExtensions; 13 | 14 | public interface IResourceStorageService : IFhirServiceExtension 15 | { 16 | Task GetAsync(IKey key); 17 | Task AddAsync(Entry entry); 18 | Task> GetAsync(IEnumerable localIdentifiers, string sortby = null); 19 | } 20 | -------------------------------------------------------------------------------- /src/Spark.Engine/Service/FhirServiceExtensions/ISearchService.cs: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2016-2018, Firely 3 | * Copyright (c) 2021-2025, Incendi 4 | * 5 | * SPDX-License-Identifier: BSD-3-Clause 6 | */ 7 | 8 | using System.Threading.Tasks; 9 | using Hl7.Fhir.Rest; 10 | using Spark.Engine.Core; 11 | 12 | namespace Spark.Engine.Service.FhirServiceExtensions; 13 | 14 | public interface ISearchService : IFhirServiceExtension 15 | { 16 | Task GetSnapshotAsync(string type, SearchParams searchCommand); 17 | Task GetSnapshotForEverythingAsync(IKey key); 18 | Task FindSingleAsync(string type, SearchParams searchCommand); 19 | Task FindSingleOrDefaultAsync(string type, SearchParams searchCommand); 20 | Task GetSearchResultsAsync(string type, SearchParams searchCommand); 21 | } 22 | -------------------------------------------------------------------------------- /src/Spark.Engine/Service/FhirServiceExtensions/ISnapshotPagination.cs: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2016-2018, Firely 3 | * Copyright (c) 2021-2025, Incendi 4 | * 5 | * SPDX-License-Identifier: BSD-3-Clause 6 | */ 7 | 8 | using System; 9 | using System.Threading.Tasks; 10 | using Hl7.Fhir.Model; 11 | using Spark.Engine.Core; 12 | 13 | namespace Spark.Engine.Service.FhirServiceExtensions; 14 | 15 | public interface ISnapshotPagination 16 | { 17 | Task GetPageAsync(int? index = null, Action transformElement = null); 18 | } 19 | -------------------------------------------------------------------------------- /src/Spark.Engine/Service/FhirServiceExtensions/ISnapshotPaginationCalculator.cs: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2016-2018, Firely 3 | * 4 | * SPDX-License-Identifier: BSD-3-Clause 5 | */ 6 | 7 | using System.Collections.Generic; 8 | using Spark.Engine.Core; 9 | 10 | namespace Spark.Engine.Service.FhirServiceExtensions; 11 | 12 | public interface ISnapshotPaginationCalculator 13 | { 14 | IEnumerable GetKeysForPage(Snapshot snapshot, int? start = null); 15 | int GetIndexForLastPage(Snapshot snapshot); 16 | int? GetIndexForNextPage(Snapshot snapshot, int? start = null); 17 | int? GetIndexForPreviousPage(Snapshot snapshot, int? start = null); 18 | } -------------------------------------------------------------------------------- /src/Spark.Engine/Service/FhirServiceExtensions/ISnapshotPaginationProvider.cs: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2016-2018, Firely 3 | * 4 | * SPDX-License-Identifier: BSD-3-Clause 5 | */ 6 | 7 | using Spark.Engine.Core; 8 | 9 | namespace Spark.Engine.Service.FhirServiceExtensions; 10 | 11 | public interface ISnapshotPaginationProvider 12 | { 13 | ISnapshotPagination StartPagination(Snapshot snapshot); 14 | } -------------------------------------------------------------------------------- /src/Spark.Engine/Service/FhirServiceExtensions/ITransactionService.cs: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2016-2018, Firely 3 | * Copyright (c) 2018-2025, Incendi 4 | * 5 | * SPDX-License-Identifier: BSD-3-Clause 6 | */ 7 | 8 | using Hl7.Fhir.Model; 9 | using Spark.Engine.Core; 10 | using System; 11 | using System.Collections.Generic; 12 | using System.Threading.Tasks; 13 | 14 | namespace Spark.Engine.Service.FhirServiceExtensions; 15 | 16 | public interface ITransactionService : IFhirServiceExtension 17 | { 18 | Task HandleTransactionAsync(ResourceManipulationOperation operation, IInteractionHandler interactionHandler); 19 | Task>> HandleTransactionAsync(Bundle bundle, IInteractionHandler interactionHandler); 20 | Task>> HandleTransactionAsync(IList interactions, IInteractionHandler interactionHandler); 21 | } 22 | -------------------------------------------------------------------------------- /src/Spark.Engine/Service/FhirServiceExtensions/PagingService.cs: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2016-2018, Firely 3 | * Copyright (c) 2021-2025, Incendi 4 | * 5 | * SPDX-License-Identifier: BSD-3-Clause 6 | */ 7 | 8 | using System; 9 | using System.Threading.Tasks; 10 | using Spark.Engine.Core; 11 | using Spark.Engine.Store.Interfaces; 12 | 13 | namespace Spark.Engine.Service.FhirServiceExtensions; 14 | 15 | public class PagingService : IPagingService 16 | { 17 | private readonly ISnapshotStore _snapshotstore; 18 | private readonly ISnapshotPaginationProvider _paginationProvider; 19 | 20 | public PagingService(ISnapshotStore snapshotstore, ISnapshotPaginationProvider paginationProvider) 21 | { 22 | _snapshotstore = snapshotstore; 23 | _paginationProvider = paginationProvider; 24 | } 25 | 26 | public async Task StartPaginationAsync(Snapshot snapshot) 27 | { 28 | if (_snapshotstore != null) 29 | { 30 | await _snapshotstore.AddSnapshotAsync(snapshot).ConfigureAwait(false); 31 | } 32 | else 33 | { 34 | snapshot.Id = null; 35 | } 36 | 37 | return _paginationProvider.StartPagination(snapshot); 38 | } 39 | public async Task StartPaginationAsync(string snapshotkey) 40 | { 41 | if (_snapshotstore == null) 42 | { 43 | throw new NotSupportedException("Stateful pagination is not currently supported."); 44 | } 45 | return _paginationProvider.StartPagination(await _snapshotstore.GetSnapshotAsync(snapshotkey).ConfigureAwait(false)); 46 | } 47 | } 48 | -------------------------------------------------------------------------------- /src/Spark.Engine/Service/FhirServiceExtensions/ResourceStorageService.cs: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2016-2018, Firely 3 | * Copyright (c) 2021-2025, Incendi 4 | * 5 | * SPDX-License-Identifier: BSD-3-Clause 6 | */ 7 | 8 | using System.Collections.Generic; 9 | using System.Linq; 10 | using System.Threading.Tasks; 11 | using Spark.Engine.Core; 12 | using Spark.Engine.Store.Interfaces; 13 | 14 | namespace Spark.Engine.Service.FhirServiceExtensions; 15 | 16 | public class ResourceStorageService : IResourceStorageService 17 | { 18 | private readonly ITransfer _transfer; 19 | private readonly IFhirStore _fhirStore; 20 | 21 | 22 | public ResourceStorageService(ITransfer transfer, IFhirStore fhirStore) 23 | { 24 | _transfer = transfer; 25 | _fhirStore = fhirStore; 26 | } 27 | 28 | public async Task GetAsync(IKey key) 29 | { 30 | var entry = await _fhirStore.GetAsync(key).ConfigureAwait(false); 31 | if (entry != null) 32 | { 33 | _transfer.Externalize(entry); 34 | } 35 | return entry; 36 | } 37 | 38 | public async Task AddAsync(Entry entry) 39 | { 40 | if (entry.State != EntryState.Internal) 41 | { 42 | _transfer.Internalize(entry); 43 | } 44 | await _fhirStore.AddAsync(entry).ConfigureAwait(false); 45 | Entry result; 46 | if (entry.IsDelete) 47 | { 48 | result = entry; 49 | } 50 | else 51 | { 52 | result = await _fhirStore.GetAsync(entry.Key).ConfigureAwait(false); 53 | } 54 | _transfer.Externalize(result); 55 | 56 | return result; 57 | } 58 | 59 | public async Task> GetAsync(IEnumerable localIdentifiers, string sortby = null) 60 | { 61 | IList results = await _fhirStore.GetAsync(localIdentifiers.Select(k => (IKey)Key.ParseOperationPath(k))).ConfigureAwait(false); 62 | _transfer.Externalize(results); 63 | return results; 64 | } 65 | } 66 | -------------------------------------------------------------------------------- /src/Spark.Engine/Service/FhirServiceExtensions/SnapshotPaginationCalculator.cs: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2016-2018, Firely 3 | * 4 | * SPDX-License-Identifier: BSD-3-Clause 5 | */ 6 | 7 | using System; 8 | using System.Collections.Generic; 9 | using System.Linq; 10 | using Spark.Engine.Core; 11 | 12 | namespace Spark.Engine.Service.FhirServiceExtensions; 13 | 14 | public class SnapshotPaginationCalculator : ISnapshotPaginationCalculator 15 | { 16 | public const int DEFAULT_PAGE_SIZE = 20; 17 | 18 | public IEnumerable GetKeysForPage(Snapshot snapshot, int? start = null) 19 | { 20 | IEnumerable keysInBundle = snapshot.Keys; 21 | if (start.HasValue) 22 | { 23 | keysInBundle = keysInBundle.Skip(start.Value); 24 | } 25 | return keysInBundle.Take(snapshot.CountParam ?? DEFAULT_PAGE_SIZE).Select(k => (IKey)Key.ParseOperationPath(k)).ToList(); 26 | } 27 | 28 | public int GetIndexForLastPage(Snapshot snapshot) 29 | { 30 | int countParam = snapshot.CountParam ?? DEFAULT_PAGE_SIZE; 31 | if (snapshot.Count <= countParam) 32 | return 0; 33 | 34 | int numberOfPages = snapshot.Count/countParam; 35 | int lastPageIndex = (snapshot.Count%countParam == 0) ? numberOfPages - 1 : numberOfPages; 36 | return lastPageIndex*countParam; 37 | } 38 | 39 | public int? GetIndexForNextPage(Snapshot snapshot, int? start = null) 40 | { 41 | int countParam = snapshot.CountParam ?? DEFAULT_PAGE_SIZE; 42 | 43 | if (((start ?? 0) + countParam) >= snapshot.Count) 44 | return null; 45 | return (start ?? 0) + countParam; 46 | } 47 | 48 | public int? GetIndexForPreviousPage(Snapshot snapshot, int? start = null) 49 | { 50 | int countParam = snapshot.CountParam ?? DEFAULT_PAGE_SIZE; 51 | if (start.HasValue == false || start.Value == 0) 52 | return null; 53 | return Math.Max(0, start.Value - countParam); 54 | } 55 | } -------------------------------------------------------------------------------- /src/Spark.Engine/Service/FhirServiceExtensions/SnapshotPaginationProvider.cs: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2016-2018, Firely 3 | * Copyright (c) 2018-2025, Incendi 4 | * 5 | * SPDX-License-Identifier: BSD-3-Clause 6 | */ 7 | 8 | using Spark.Engine.Core; 9 | using Spark.Engine.Interfaces; 10 | using Spark.Engine.Store.Interfaces; 11 | 12 | namespace Spark.Engine.Service.FhirServiceExtensions; 13 | 14 | public class SnapshotPaginationProvider : ISnapshotPaginationProvider 15 | { 16 | private IFhirIndex _fhirIndex; 17 | private IFhirStore _fhirStore; 18 | private readonly ITransfer _transfer; 19 | private readonly ILocalhost _localhost; 20 | private readonly ISnapshotPaginationCalculator _snapshotPaginationCalculator; 21 | 22 | public SnapshotPaginationProvider(IFhirIndex fhirIndex, IFhirStore fhirStore, ITransfer transfer, ILocalhost localhost, ISnapshotPaginationCalculator snapshotPaginationCalculator) 23 | { 24 | _fhirIndex = fhirIndex; 25 | _fhirStore = fhirStore; 26 | _transfer = transfer; 27 | _localhost = localhost; 28 | _snapshotPaginationCalculator = snapshotPaginationCalculator; 29 | } 30 | 31 | public ISnapshotPagination StartPagination(Snapshot snapshot) 32 | { 33 | return new SnapshotPaginationService(_fhirIndex, _fhirStore, _transfer, _localhost, _snapshotPaginationCalculator, snapshot); 34 | } 35 | } 36 | -------------------------------------------------------------------------------- /src/Spark.Engine/Service/ICompositeServiceListener.cs: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2016-2018, Firely 3 | * Copyright (c) 2021-2025, Incendi 4 | * 5 | * SPDX-License-Identifier: BSD-3-Clause 6 | */ 7 | 8 | using Spark.Engine.Core; 9 | using System.Threading.Tasks; 10 | 11 | namespace Spark.Engine.Service; 12 | 13 | public interface ICompositeServiceListener : IServiceListener 14 | { 15 | void Add(IServiceListener listener); 16 | void Clear(); 17 | Task InformAsync(Entry interaction); 18 | } 19 | -------------------------------------------------------------------------------- /src/Spark.Engine/Service/IServiceListener.cs: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2015-2018, Firely 3 | * Copyright (c) 2021-2025, Incendi 4 | * 5 | * SPDX-License-Identifier: BSD-3-Clause 6 | */ 7 | 8 | using Spark.Engine.Core; 9 | using System; 10 | using System.Threading.Tasks; 11 | 12 | namespace Spark.Engine.Service; 13 | 14 | public interface IServiceListener 15 | { 16 | Task InformAsync(Uri location, Entry interaction); 17 | } 18 | -------------------------------------------------------------------------------- /src/Spark.Engine/Service/ITransfer.cs: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2016-2018, Firely 3 | * 4 | * SPDX-License-Identifier: BSD-3-Clause 5 | */ 6 | 7 | using Spark.Engine.Core; 8 | using System.Collections.Generic; 9 | 10 | namespace Spark.Engine.Service; 11 | 12 | public interface ITransfer 13 | { 14 | void Externalize(IEnumerable interactions); 15 | void Externalize(Entry interaction); 16 | void Internalize(IEnumerable interactions, Mapper mapper); 17 | void Internalize(Entry entry); 18 | } 19 | -------------------------------------------------------------------------------- /src/Spark.Engine/Service/KeyMapper.cs: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2014-2018, Firely 3 | * Copyright (c) 2017-2025, Incendi 4 | * 5 | * SPDX-License-Identifier: BSD-3-Clause 6 | */ 7 | 8 | using System; 9 | using System.Collections.Generic; 10 | 11 | namespace Spark.Engine.Service; 12 | 13 | public class Mapper 14 | { 15 | private readonly Dictionary _mapping = new Dictionary(); 16 | 17 | public Mapper() { } 18 | 19 | public void Clear() 20 | { 21 | _mapping.Clear(); 22 | } 23 | 24 | public TValue TryGet(TKey key) 25 | { 26 | if (_mapping.TryGetValue(key, out TValue value)) 27 | { 28 | return value; 29 | } 30 | else 31 | { 32 | return default; 33 | } 34 | } 35 | 36 | public bool Exists(TKey key) 37 | { 38 | foreach(var item in _mapping) 39 | { 40 | if (item.Key.Equals(key)) 41 | { 42 | return true; 43 | } 44 | } 45 | return false; 46 | } 47 | 48 | public TValue Remap(TKey key, TValue value) 49 | { 50 | if (Exists(key)) 51 | _mapping[key] = value; 52 | else 53 | _mapping.Add(key, value); 54 | return value; 55 | } 56 | 57 | public void Merge(Mapper mapper) 58 | { 59 | foreach (KeyValuePair keyValuePair in mapper._mapping) 60 | { 61 | if (!Exists(keyValuePair.Key)) 62 | { 63 | this._mapping.Add(keyValuePair.Key, keyValuePair.Value); 64 | } 65 | else if(Exists(keyValuePair.Key) && TryGet(keyValuePair.Key).Equals(keyValuePair.Value) == false) 66 | { 67 | throw new InvalidOperationException("Incompatible mappings"); 68 | } 69 | } 70 | } 71 | } 72 | -------------------------------------------------------------------------------- /src/Spark.Engine/Service/ServiceListener.cs: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2015-2018, Firely 3 | * Copyright (c) 2019-2025, Incendi 4 | * 5 | * SPDX-License-Identifier: BSD-3-Clause 6 | */ 7 | 8 | using Spark.Engine.Core; 9 | using System; 10 | using System.Collections.Generic; 11 | using System.Linq; 12 | using System.Threading.Tasks; 13 | 14 | namespace Spark.Engine.Service; 15 | 16 | public class ServiceListener : IServiceListener, ICompositeServiceListener 17 | { 18 | private readonly ILocalhost _localhost; 19 | readonly List _listeners; 20 | 21 | public ServiceListener(ILocalhost localhost, IServiceListener[] listeners = null) 22 | { 23 | _localhost = localhost; 24 | if(listeners != null) 25 | _listeners = new List(listeners.AsEnumerable()); 26 | } 27 | 28 | public void Add(IServiceListener listener) 29 | { 30 | _listeners.Add(listener); 31 | } 32 | 33 | private async Task InformAsync(IServiceListener listener, Uri location, Entry entry) 34 | { 35 | await listener.InformAsync(location, entry).ConfigureAwait(false); 36 | } 37 | 38 | public void Clear() 39 | { 40 | _listeners.Clear(); 41 | } 42 | 43 | public async Task InformAsync(Entry interaction) 44 | { 45 | foreach (IServiceListener listener in _listeners) 46 | { 47 | Uri location = _localhost.GetAbsoluteUri(interaction.Key); 48 | await InformAsync(listener, location, interaction).ConfigureAwait(false); 49 | } 50 | } 51 | 52 | public async Task InformAsync(Uri location, Entry interaction) 53 | { 54 | foreach (var listener in _listeners) 55 | { 56 | await listener.InformAsync(location, interaction).ConfigureAwait(false); 57 | } 58 | } 59 | } 60 | -------------------------------------------------------------------------------- /src/Spark.Engine/Service/Transfer.cs: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2015-2018, Firely 3 | * Copyright (c) 2020-2025, Incendi 4 | * 5 | * SPDX-License-Identifier: BSD-3-Clause 6 | */ 7 | 8 | using Spark.Engine.Core; 9 | using Spark.Engine.Interfaces; 10 | using System.Collections.Generic; 11 | 12 | namespace Spark.Engine.Service; 13 | 14 | /// 15 | /// Transfer maps between local id's and references and absolute id's and references upon incoming or outgoing Interactions. 16 | /// It uses an Import or Export to do de actual work for incoming or outgoing Interactions respectively. 17 | /// 18 | public class Transfer : ITransfer 19 | { 20 | private readonly ILocalhost _localhost; 21 | private readonly IIdentityGenerator _generator; 22 | private readonly SparkSettings _settings; 23 | 24 | public Transfer(IIdentityGenerator generator, ILocalhost localhost, SparkSettings settings = null) 25 | { 26 | _generator = generator; 27 | _localhost = localhost; 28 | _settings = settings; 29 | } 30 | 31 | public void Internalize(Entry entry) 32 | { 33 | var import = new Import(_localhost, _generator); 34 | import.Add(entry); 35 | import.Internalize(); 36 | } 37 | 38 | public void Internalize(IEnumerable interactions, Mapper mapper = null) 39 | { 40 | var import = new Import(_localhost, _generator); 41 | if (mapper != null) 42 | { 43 | import.AddMappings(mapper); 44 | } 45 | import.Add(interactions); 46 | import.Internalize(); 47 | } 48 | 49 | public void Externalize(Entry interaction) 50 | { 51 | Export export = new Export(_localhost, _settings?.ExportSettings ?? new ExportSettings()); 52 | export.Add(interaction); 53 | export.Externalize(); 54 | } 55 | 56 | public void Externalize(IEnumerable interactions) 57 | { 58 | Export export = new Export(_localhost, _settings?.ExportSettings ?? new ExportSettings()); 59 | export.Add(interactions); 60 | export.Externalize(); 61 | } 62 | } 63 | -------------------------------------------------------------------------------- /src/Spark.Engine/Spark.Engine.csproj: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | net8.0;net9.0 5 | Spark.Engine.R4 6 | latest 7 | Spark.Engine.R4 8 | Spark.Engine.R4 9 | FHIR Server Engine - Handling REST Calls and Service Layer 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | -------------------------------------------------------------------------------- /src/Spark.Engine/SparkSettings.cs: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2019-2025, Incendi 3 | * 4 | * SPDX-License-Identifier: BSD-3-Clause 5 | */ 6 | 7 | using Hl7.Fhir.Model; 8 | using Hl7.Fhir.Serialization; 9 | using System; 10 | using System.Diagnostics; 11 | using System.Reflection; 12 | using Spark.Engine.Search; 13 | 14 | namespace Spark.Engine; 15 | 16 | public class SparkSettings 17 | { 18 | public Uri Endpoint { get; set; } 19 | public bool UseAsynchronousIO { get; set; } 20 | public ParserSettings ParserSettings { get; set; } 21 | public SerializerSettings SerializerSettings { get; set; } 22 | public ExportSettings ExportSettings { get; set; } 23 | public IndexSettings IndexSettings { get; set; } 24 | public SearchSettings Search { get; set; } 25 | 26 | public string FhirRelease 27 | { 28 | get { return ModelInfo.Version;} 29 | 30 | } 31 | 32 | public string Version 33 | { 34 | get 35 | { 36 | var asm = Assembly.GetExecutingAssembly(); 37 | FileVersionInfo version = FileVersionInfo.GetVersionInfo(asm.Location); 38 | return $"{version.ProductMajorPart}.{version.ProductMinorPart}.{version.ProductBuildPart}"; 39 | } 40 | } 41 | } 42 | -------------------------------------------------------------------------------- /src/Spark.Engine/Store/ExtendableWith.cs: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2016-2018, Firely 3 | * Copyright (c) 2021-2025, Incendi 4 | * 5 | * SPDX-License-Identifier: BSD-3-Clause 6 | */ 7 | 8 | using Spark.Engine.Store.Interfaces; 9 | using System; 10 | using System.Collections; 11 | using System.Collections.Generic; 12 | using System.Linq; 13 | 14 | namespace Spark.Engine.Store; 15 | 16 | public class ExtendableWith : IExtendableWith, IEnumerable 17 | { 18 | private readonly Dictionary _extensions; 19 | 20 | public ExtendableWith() 21 | { 22 | _extensions = new Dictionary(); 23 | } 24 | 25 | public void AddExtension(TV extension) where TV : T 26 | { 27 | foreach (var interfaceType in extension.GetType().GetInterfaces().Where(i => typeof(T).IsAssignableFrom(i))) 28 | { 29 | _extensions[interfaceType] = extension; 30 | } 31 | } 32 | 33 | public void RemoveExtension() where TV : T 34 | { 35 | _extensions.Remove(typeof (TV)); 36 | } 37 | 38 | public void RemoveExtension(Type type) 39 | { 40 | _extensions.Remove(type); 41 | } 42 | 43 | public T FindExtension(Type type) 44 | { 45 | var key = _extensions.Keys.SingleOrDefault(k =>type.IsAssignableFrom(k)); 46 | if (key != null) 47 | return _extensions[key]; 48 | 49 | return default; 50 | } 51 | 52 | public TV FindExtension() where TV : T 53 | { 54 | if (_extensions.ContainsKey(typeof (TV))) 55 | return (TV)_extensions[typeof (TV)]; 56 | 57 | return default; 58 | } 59 | 60 | public IEnumerator GetEnumerator() 61 | { 62 | return _extensions.Values.GetEnumerator(); 63 | } 64 | 65 | IEnumerator IEnumerable.GetEnumerator() 66 | { 67 | return GetEnumerator(); 68 | } 69 | } 70 | -------------------------------------------------------------------------------- /src/Spark.Engine/Store/Interfaces/IExtendableWith.cs: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2016-2018, Firely 3 | * 4 | * SPDX-License-Identifier: BSD-3-Clause 5 | */ 6 | 7 | namespace Spark.Engine.Store.Interfaces; 8 | 9 | public interface IExtendableWith 10 | { 11 | void AddExtension(TV extension) where TV : T; 12 | void RemoveExtension() where TV : T; 13 | TV FindExtension() where TV : T; 14 | } 15 | 16 | public interface IExtension 17 | { 18 | void OnExtensionAdded(T extensibleObject); 19 | } -------------------------------------------------------------------------------- /src/Spark.Engine/Store/Interfaces/IFhirStore.cs: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2016-2018, Firely 3 | * Copyright (c) 2021-2025, Incendi 4 | * 5 | * SPDX-License-Identifier: BSD-3-Clause 6 | */ 7 | 8 | using System.Collections.Generic; 9 | using System.Threading.Tasks; 10 | using Spark.Engine.Core; 11 | 12 | namespace Spark.Engine.Store.Interfaces; 13 | 14 | public interface IFhirStore 15 | { 16 | Task AddAsync(Entry entry); 17 | Task GetAsync(IKey key); 18 | Task> GetAsync(IEnumerable localIdentifiers, IEnumerable elements = null); 19 | } 20 | -------------------------------------------------------------------------------- /src/Spark.Engine/Store/Interfaces/IFhirStorePagedReader.cs: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2020-2025, Incendi 3 | * 4 | * SPDX-License-Identifier: BSD-3-Clause 5 | */ 6 | 7 | using System; 8 | using System.Collections.Generic; 9 | using System.Threading.Tasks; 10 | using Spark.Engine.Core; 11 | 12 | namespace Spark.Engine.Store.Interfaces; 13 | 14 | public interface IFhirStorePagedReader 15 | { 16 | Task> ReadAsync(FhirStorePageReaderOptions options = null); 17 | } 18 | 19 | public class FhirStorePageReaderOptions 20 | { 21 | public int PageSize { get; set; } = 100; 22 | 23 | // TODO: add criteria? 24 | // TODO: add sorting? 25 | } 26 | 27 | public interface IPageResult 28 | { 29 | long TotalRecords { get; } 30 | 31 | long TotalPages { get; } 32 | 33 | Task IterateAllPagesAsync(Func, Task> callback); 34 | } 35 | -------------------------------------------------------------------------------- /src/Spark.Engine/Store/Interfaces/IHistoryStore.cs: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2016-2018, Firely 3 | * Copyright (c) 2021-2025, Incendi 4 | * 5 | * SPDX-License-Identifier: BSD-3-Clause 6 | */ 7 | 8 | using System.Threading.Tasks; 9 | using Spark.Engine.Core; 10 | 11 | namespace Spark.Engine.Store.Interfaces; 12 | 13 | public interface IHistoryStore 14 | { 15 | Task HistoryAsync(string typename, HistoryParameters parameters); 16 | Task HistoryAsync(IKey key, HistoryParameters parameters); 17 | Task HistoryAsync(HistoryParameters parameters); 18 | } 19 | -------------------------------------------------------------------------------- /src/Spark.Engine/Store/Interfaces/IIndexStore.cs: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2016-2018, Firely 3 | * Copyright (c) 2021-2025, Incendi 4 | * 5 | * SPDX-License-Identifier: BSD-3-Clause 6 | */ 7 | 8 | using System.Threading.Tasks; 9 | using Spark.Engine.Core; 10 | using Spark.Engine.Model; 11 | 12 | namespace Spark.Engine.Store.Interfaces; 13 | 14 | public interface IIndexStore 15 | { 16 | Task SaveAsync(IndexValue indexValue); 17 | Task DeleteAsync(Entry entry); 18 | Task CleanAsync(); 19 | } 20 | -------------------------------------------------------------------------------- /src/Spark.Engine/Store/Interfaces/ISnapshotStore.cs: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2016-2018, Firely 3 | * Copyright (c) 2021-2025, Incendi 4 | * 5 | * SPDX-License-Identifier: BSD-3-Clause 6 | */ 7 | 8 | using System.Threading.Tasks; 9 | using Spark.Engine.Core; 10 | 11 | namespace Spark.Engine.Store.Interfaces; 12 | 13 | public interface ISnapshotStore 14 | { 15 | Task AddSnapshotAsync(Snapshot snapshot); 16 | Task GetSnapshotAsync(string snapshotId); 17 | } 18 | -------------------------------------------------------------------------------- /src/Spark.Engine/StoreSettings.cs: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2019-2025, Incendi 3 | * 4 | * SPDX-License-Identifier: BSD-3-Clause 5 | */ 6 | 7 | namespace Spark.Engine; 8 | 9 | public class StoreSettings 10 | { 11 | public string ConnectionString { get; set; } 12 | } 13 | -------------------------------------------------------------------------------- /src/Spark.Engine/Utility/FhirParameter.cs: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2019-2025, Incendi 3 | * 4 | * SPDX-License-Identifier: BSD-3-Clause 5 | */ 6 | 7 | using System; 8 | 9 | namespace Spark.Engine.Utility; 10 | 11 | public static class FhirParameterParser 12 | { 13 | public static DateTimeOffset? ParseDateParameter(string value) 14 | { 15 | return DateTimeOffset.TryParse(value, out var dateTime) 16 | ? dateTime : (DateTimeOffset?)null; 17 | } 18 | 19 | public static int? ParseIntParameter(string value) 20 | { 21 | return (int.TryParse(value, out int n)) ? n : default(int?); 22 | } 23 | } 24 | -------------------------------------------------------------------------------- /src/Spark.Mongo.Tests/Indexer/MongoIndexMapperTest.cs: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2015-2018, Firely 3 | * Copyright (c) 2020-2025, Incendi 4 | * 5 | * SPDX-License-Identifier: BSD-3-Clause 6 | */ 7 | 8 | using Spark.Engine.Model; 9 | using Spark.Engine.Search.Types; 10 | using Spark.Mongo.Search.Indexer; 11 | using Xunit; 12 | 13 | namespace Spark.Mongo.Tests.Indexer; 14 | 15 | /// 16 | /// Summary description for MongoIndexMapperTest 17 | /// 18 | public class MongoIndexMapperTest 19 | { 20 | private readonly MongoIndexMapper _sut; 21 | public MongoIndexMapperTest() 22 | { 23 | _sut = new MongoIndexMapper(); 24 | } 25 | 26 | [Fact] 27 | public void TestMapRootIndexValue() 28 | { 29 | //"root" element should be skipped. 30 | IndexValue iv = new IndexValue("root"); 31 | iv.Values.Add(new IndexValue("internal_resource", new StringValue("Patient"))); 32 | 33 | var results = _sut.MapEntry(iv); 34 | Assert.Single(results); 35 | var result = results[0]; 36 | Assert.True(result.IsBsonDocument); 37 | Assert.Equal(2, result.AsBsonDocument.ElementCount); 38 | var firstElement = result.AsBsonDocument.GetElement(0); 39 | Assert.Equal("internal_level", firstElement.Name); 40 | var secondElement = result.GetElement(1); 41 | Assert.Equal("internal_resource", secondElement.Name); 42 | Assert.True(secondElement.Value.IsString); 43 | Assert.Equal("Patient", secondElement.Value.AsString); 44 | } 45 | } 46 | -------------------------------------------------------------------------------- /src/Spark.Mongo.Tests/Search/BsonSerializationProvider.cs: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2021-2025, Incendi 3 | * 4 | * SPDX-License-Identifier: BSD-3-Clause 5 | */ 6 | 7 | using MongoDB.Bson; 8 | using MongoDB.Bson.Serialization; 9 | using MongoDB.Bson.Serialization.Serializers; 10 | using System; 11 | using System.Collections.Generic; 12 | 13 | namespace Spark.Mongo.Tests.Search; 14 | 15 | internal class BsonSerializationProvider : IBsonSerializationProvider 16 | { 17 | private IDictionary> _registeredBsonSerializers = new Dictionary> 18 | { 19 | { typeof(BsonNull), () => new BsonNullSerializer() }, 20 | { typeof(string), () => new StringSerializer() }, 21 | { typeof(BsonDocument), () => new BsonDocumentSerializer() }, 22 | { typeof(BsonDateTime), () => new BsonDateTimeSerializer() }, 23 | }; 24 | 25 | public IBsonSerializer GetSerializer(System.Type type) 26 | { 27 | if(_registeredBsonSerializers.ContainsKey(type)) 28 | { 29 | return _registeredBsonSerializers[type].Invoke(); 30 | } 31 | 32 | return null; 33 | } 34 | } 35 | -------------------------------------------------------------------------------- /src/Spark.Mongo.Tests/Spark.Mongo.Tests.csproj: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | net9.0 5 | false 6 | 7 | 8 | 9 | 10 | 11 | 12 | all 13 | runtime; build; native; contentfiles; analyzers; buildtransitive 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | -------------------------------------------------------------------------------- /src/Spark.Mongo/Properties/AssemblyInfo.cs: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2014-2018, Firely 3 | * Copyright (c) 2019-2025, Incendi 4 | * 5 | * SPDX-License-Identifier: BSD-3-Clause 6 | */ 7 | 8 | using System.Runtime.CompilerServices; 9 | 10 | [assembly: InternalsVisibleTo("Spark.Mongo.Tests")] 11 | -------------------------------------------------------------------------------- /src/Spark.Mongo/Search/Common/Config.cs: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2014-2018, Firely 3 | * Copyright (c) 2017-2025, Incendi 4 | * 5 | * SPDX-License-Identifier: BSD-3-Clause 6 | */ 7 | 8 | using System; 9 | 10 | namespace Spark.Mongo.Search.Common; 11 | 12 | internal static class InternalField 13 | { 14 | // Internally stored search fields 15 | internal const string ID = "internal_id"; 16 | internal const string JUST_ID = "internal_justid"; 17 | internal const string LAST_UPDATED = "lastupdated"; 18 | internal const string LEVEL = "internal_level"; 19 | internal const string RESOURCE = "internal_resource"; 20 | internal const string SELF_LINK = "internal_selflink"; 21 | 22 | internal static readonly string[] ALL = [ID, JUST_ID, SELF_LINK, RESOURCE, LEVEL, LAST_UPDATED]; 23 | } 24 | 25 | public static class UniversalField 26 | { 27 | public const string 28 | ID = "_id", 29 | TAG = "_tag"; 30 | 31 | public static string[] All = { ID, TAG }; 32 | } 33 | 34 | public static class MetaField 35 | { 36 | public const string 37 | COUNT = "_count", 38 | INCLUDE = "_include", 39 | LIMIT = "_limit"; // Limit is geen onderdeel vd. standaard 40 | 41 | public static string[] All = { COUNT, INCLUDE, LIMIT }; 42 | } 43 | 44 | public static class Modifier 45 | { 46 | public const string ABOVE = "above"; 47 | public const string BELOW = "below"; 48 | public const string CONTAINS = "contains"; 49 | public const string EXACT = "exact"; 50 | public const string IDENTIFIER = "identifier"; 51 | public const string MISSING = "missing"; 52 | public const string NOT = "not"; 53 | public const string NONE = ""; 54 | public const string TEXT = "text"; 55 | } 56 | 57 | public static class MongoCollections 58 | { 59 | public const string SEARCH_INDEX_COLLECTION = "searchindex"; 60 | } 61 | -------------------------------------------------------------------------------- /src/Spark.Mongo/Search/Common/Definitions.cs: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2014-2018, Firely 3 | * 4 | * SPDX-License-Identifier: BSD-3-Clause 5 | */ 6 | 7 | using System.Collections.Generic; 8 | using Hl7.Fhir.Model; 9 | using Spark.Engine.Core; 10 | 11 | namespace Spark.Mongo.Search.Common; 12 | /* 13 | Ik heb deze class losgetrokken van SearchParamDefinition, 14 | omdat Definition onafhankelijk van Spark zou moeten kunnen bestaan. 15 | Er komt dus een converter voor in de plaats. -mh 16 | */ 17 | 18 | public class Definition 19 | { 20 | public Argument Argument { get; set; } 21 | public string Resource { get; set; } 22 | public string ParamName { get; set; } 23 | public string Description { get; set; } 24 | public SearchParamType ParamType { get; set; } 25 | public ElementQuery Query { get; set; } 26 | 27 | public override string ToString() 28 | { 29 | _ = Query.ToString(); 30 | return string.Format("{0}.{1}->{2}", Resource.ToLower(), ParamName.ToLower(), Query.ToString()); 31 | } 32 | } 33 | 34 | public class Definitions 35 | { 36 | private List _definitions = new List(); 37 | 38 | public void Add(Definition definition) 39 | { 40 | _definitions.Add(definition); 41 | } 42 | public void Replace(Definition definition) 43 | { 44 | _definitions.RemoveAll(d => (d.Resource == definition.Resource) && (d.ParamName == definition.ParamName)); 45 | _definitions.Add(definition); 46 | } 47 | } 48 | -------------------------------------------------------------------------------- /src/Spark.Mongo/Search/Common/DefinitionsFactory.cs: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2014-2018, Firely 3 | * Copyright (c) 2019-2025, Incendi 4 | * 5 | * SPDX-License-Identifier: BSD-3-Clause 6 | */ 7 | 8 | using Hl7.Fhir.Model; 9 | using System.Collections.Generic; 10 | using System.Linq; 11 | using Spark.Engine.Core; 12 | 13 | namespace Spark.Mongo.Search.Common; 14 | 15 | public static class DefinitionsFactory 16 | { 17 | public static Definition CreateDefinition(ModelInfo.SearchParamDefinition paramdef) 18 | { 19 | Definition definition = new Definition 20 | { 21 | Argument = ArgumentFactory.Create(paramdef.Type), 22 | Resource = paramdef.Resource, 23 | ParamName = paramdef.Name, 24 | Query = new ElementQuery(paramdef.Path), 25 | ParamType = paramdef.Type, 26 | Description = paramdef.Description?.Value 27 | }; 28 | return definition; 29 | } 30 | 31 | public static Definitions Generate(IEnumerable searchparameters) 32 | { 33 | var definitions = new Definitions(); 34 | 35 | foreach (var param in searchparameters) 36 | { 37 | if (param.Path != null && param.Path.Count() > 0) 38 | { 39 | Definition definition = CreateDefinition(param); 40 | definitions.Add(definition); 41 | } 42 | } 43 | ManualCorrectDefinitions(definitions); 44 | return definitions; 45 | } 46 | 47 | private static void ManualCorrectDefinitions(Definitions items) 48 | { 49 | // These overrides are for those cases where the current meta-data does not help or is incorrect. 50 | items.Replace(new Definition() { Resource = "Patient", ParamName = "phonetic", Query = new ElementQuery("Patient.Name.Family", "Patient.Name.Given"), Argument = new FuzzyArgument() }); 51 | items.Replace(new Definition() { Resource = "Practitioner", ParamName = "phonetic", Query = new ElementQuery("Practitioner.Name.Family", "Practitioner.Name.Given"), Argument = new FuzzyArgument() }); 52 | } 53 | } -------------------------------------------------------------------------------- /src/Spark.Mongo/Search/Indexer/BsonIndexDocument.cs: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2014-2018, Firely 3 | * Copyright (c) 2021-2025, Incendi 4 | * 5 | * SPDX-License-Identifier: BSD-3-Clause 6 | */ 7 | 8 | using MongoDB.Bson; 9 | 10 | namespace Spark.Mongo.Search.Indexer; 11 | 12 | public static class BsonDocumentExtensions 13 | { 14 | public static void Append(this BsonDocument document, string name, BsonValue value) 15 | { 16 | document.Add(name, value ?? BsonNull.Value); 17 | } 18 | 19 | public static void Write(this BsonDocument document, string field, BsonValue value) 20 | { 21 | 22 | if (value == null) return; 23 | 24 | if (field.StartsWith("_")) field = "PREFIX" + field; 25 | 26 | if (document.TryGetElement(field, out BsonElement element)) 27 | { 28 | if (element.Value.BsonType == BsonType.Array) 29 | { 30 | element.Value.AsBsonArray.Add(value); 31 | } 32 | else 33 | { 34 | document.Remove(field); 35 | document.Append(field, new BsonArray() { element.Value, value ?? BsonNull.Value }); 36 | } 37 | } 38 | else 39 | { 40 | if (value.BsonType == BsonType.Document) 41 | document.Append(field, new BsonArray() { value ?? BsonNull.Value }); 42 | else 43 | document.Append(field, value); 44 | } 45 | } 46 | } -------------------------------------------------------------------------------- /src/Spark.Mongo/Search/Infrastructure/MongoDatabaseFactory.cs: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2015-2018, Firely 3 | * Copyright (c) 2020-2025, Incendi 4 | * 5 | * SPDX-License-Identifier: BSD-3-Clause 6 | */ 7 | 8 | using MongoDB.Driver; 9 | using System.Collections.Generic; 10 | using System.Linq; 11 | 12 | namespace Spark.Store.Mongo; 13 | 14 | public static class MongoDatabaseFactory 15 | { 16 | private static Dictionary _instances; 17 | 18 | public static IMongoDatabase GetMongoDatabase(string url) 19 | { 20 | IMongoDatabase result; 21 | 22 | if (_instances == null) //instances dictionary is not at all initialized 23 | { 24 | _instances = new Dictionary(); 25 | } 26 | if (_instances.Where(i => i.Key == url).Count() == 0) //there is no instance for this url yet 27 | { 28 | result = CreateMongoDatabase(url); 29 | _instances.Add(url, result); 30 | }; 31 | return _instances.First(i => i.Key == url).Value; //now there must be one. 32 | } 33 | 34 | private static IMongoDatabase CreateMongoDatabase(string url) 35 | { 36 | var mongourl = new MongoUrl(url); 37 | var client = new MongoClient(mongourl); 38 | return client.GetDatabase(mongourl.DatabaseName); 39 | } 40 | } -------------------------------------------------------------------------------- /src/Spark.Mongo/Search/Searcher/ITerm.cs: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2014-2018, Firely 3 | * 4 | * SPDX-License-Identifier: BSD-3-Clause 5 | */ 6 | 7 | using Spark.Mongo.Search.Common; 8 | 9 | namespace Spark.Search.Mongo; 10 | 11 | public interface ITerm 12 | { 13 | string Resource { get; set; } 14 | string Field { get; set; } 15 | string Operator { get; set; } 16 | string Value { get; set; } 17 | Argument Argument { get; set; } 18 | } -------------------------------------------------------------------------------- /src/Spark.Mongo/Search/Utils/Soundex.cs: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2014-2018, Firely 3 | * 4 | * SPDX-License-Identifier: BSD-3-Clause 5 | */ 6 | 7 | using System.Linq; 8 | using System.Text; 9 | using System.Text.RegularExpressions; 10 | 11 | namespace Spark.Search.Mongo; 12 | 13 | public static class Soundex 14 | { 15 | public static string For(string word) 16 | { 17 | const int length = 4; 18 | 19 | var soundex = new StringBuilder(); 20 | var previousWasHOrW = false; 21 | 22 | word = Regex.Replace(word == null ? string.Empty : word.ToUpper(), @"[^\w\s]", string.Empty); 23 | 24 | if (string.IsNullOrEmpty(word)) 25 | return string.Empty.PadRight(length, '0'); 26 | 27 | soundex.Append(word.First()); 28 | 29 | for (var i = 1; i < word.Length; i++) 30 | { 31 | var n = GetCharNumberForLetter(word[i]); 32 | 33 | if (i == 1 && n == GetCharNumberForLetter(soundex[0])) 34 | continue; 35 | 36 | if (soundex.Length > 2 && previousWasHOrW && n == soundex[soundex.Length - 2]) 37 | continue; 38 | 39 | if (soundex.Length > 0 && n == soundex[soundex.Length - 1]) 40 | continue; 41 | 42 | soundex.Append(n); 43 | 44 | previousWasHOrW = "HW".Contains(word[i]); 45 | } 46 | 47 | return soundex 48 | .Replace("0", string.Empty).ToString() 49 | .PadRight(length, '0') 50 | .Substring(0, length); 51 | } 52 | 53 | private static char GetCharNumberForLetter(char letter) 54 | { 55 | if ("BFPV".Contains(letter)) return '1'; 56 | if ("CGJKQSXZ".Contains(letter)) return '2'; 57 | if ("DT".Contains(letter)) return '3'; 58 | if ('L' == letter) return '4'; 59 | if ("MN".Contains(letter)) return '5'; 60 | if ('R' == letter) return '6'; 61 | 62 | return '0'; 63 | } 64 | } -------------------------------------------------------------------------------- /src/Spark.Mongo/Spark.Mongo.csproj: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | net8.0;net9.0 5 | Spark.Mongo.R4 6 | latest 7 | Spark.Mongo.R4 8 | Spark.Mongo.R4 9 | MongoDB Storage Implementation for Spark 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | -------------------------------------------------------------------------------- /src/Spark.Mongo/SparkMongoEventSource.cs: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2015-2018, Firely 3 | * 4 | * SPDX-License-Identifier: BSD-3-Clause 5 | */ 6 | 7 | using System.Diagnostics.Tracing; 8 | using System; 9 | 10 | namespace Spark.Mongo; 11 | 12 | [EventSource(Name = "Spark-Mongo")] 13 | public sealed class SparkMongoEventSource : EventSource 14 | { 15 | public class Keywords 16 | { 17 | public const EventKeywords Tracing = (EventKeywords)1; 18 | public const EventKeywords Unsupported = (EventKeywords)2; 19 | } 20 | 21 | private static readonly Lazy _instance = new Lazy(() => new SparkMongoEventSource()); 22 | 23 | private SparkMongoEventSource() { } 24 | 25 | public static SparkMongoEventSource Log { get { return _instance.Value; } } 26 | 27 | [Event(1, Message = "Method call: {0}", 28 | Level = EventLevel.Verbose, Keywords = Keywords.Tracing)] 29 | internal void ServiceMethodCalled(string methodName) 30 | { 31 | WriteEvent(1, methodName); 32 | } 33 | } -------------------------------------------------------------------------------- /src/Spark.Mongo/Store/Constants.cs: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2015-2018, Firely 3 | * Copyright (c) 2021-2025, Incendi 4 | * 5 | * SPDX-License-Identifier: BSD-3-Clause 6 | */ 7 | 8 | namespace Spark.Store.Mongo; 9 | 10 | public static class Collection 11 | { 12 | public const string RESOURCE = "resources"; 13 | public const string COUNTERS = "counters"; 14 | public const string SNAPSHOT = "snapshots"; 15 | } 16 | 17 | public static class Field 18 | { 19 | // The id field is an actual field in the resource, so this const can't be changed. 20 | public const string RESOURCEID = "id"; // and it is a lowercase value 21 | public const string RESOURCETYPE = "resourceType"; 22 | public const string COUNTERVALUE = "last"; 23 | public const string CATEGORY = "category"; 24 | 25 | // Meta fields 26 | public const string PRIMARYKEY = "_id"; 27 | 28 | // The current key is TYPENAME/ID for example: Patient/1 29 | // This is to be able to batch supercede a bundle of different resource types 30 | public const string REFERENCE = "@REFERENCE"; 31 | 32 | public const string STATE = "@state"; 33 | public const string WHEN = "@when"; 34 | public const string METHOD = "@method"; // Present / Gone 35 | public const string TYPENAME = "@typename"; // Patient, Organization, etc. 36 | public const string VERSIONID = "@VersionId"; // The resource versionid is in Resource.Meta. This is a administrative copy 37 | 38 | internal const string TRANSACTION = "@transaction"; 39 | } 40 | 41 | public static class Value 42 | { 43 | public const string CURRENT = "current"; 44 | public const string SUPERCEDED = "superceded"; 45 | } -------------------------------------------------------------------------------- /src/Spark.Mongo/Store/GuidIdentityGenerator.cs: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2023-2025, Incendi 3 | * 4 | * SPDX-License-Identifier: BSD-3-Clause 5 | */ 6 | 7 | using Hl7.Fhir.Model; 8 | using MongoDB.Bson; 9 | using MongoDB.Driver; 10 | using Spark.Engine.Interfaces; 11 | using Spark.Store.Mongo; 12 | using System; 13 | 14 | namespace Spark.Mongo.Store; 15 | 16 | public class GuidIdentityGenerator : IIdentityGenerator 17 | { 18 | private readonly IMongoDatabase _database; 19 | private readonly string _formatSpecifier; 20 | 21 | public GuidIdentityGenerator(string mongoUrl, string formatSpecifier = "D") 22 | { 23 | _database = MongoDatabaseFactory.GetMongoDatabase(mongoUrl); 24 | _formatSpecifier = formatSpecifier; 25 | } 26 | public string NextResourceId(Resource resource) 27 | { 28 | var id = Guid.NewGuid().ToString(_formatSpecifier); 29 | return id; 30 | } 31 | 32 | public string NextVersionId(string resourceIdentifier) => throw new NotImplementedException(); 33 | 34 | public string NextVersionId(string resourceType, string resourceIdentifier) 35 | { 36 | var name = resourceType + "_history_" + resourceIdentifier; 37 | var versionId = Next(name); 38 | return versionId; 39 | } 40 | 41 | private string Next(string name) 42 | { 43 | var query = Builders.Filter.Eq(Field.PRIMARYKEY, name); 44 | var update = Builders.Update.Inc(Field.COUNTERVALUE, 1); 45 | var options = new FindOneAndUpdateOptions 46 | { 47 | IsUpsert = true, 48 | ReturnDocument = ReturnDocument.After, 49 | Projection = Builders.Projection.Include(Field.COUNTERVALUE) 50 | }; 51 | 52 | var collection = _database.GetCollection(Collection.COUNTERS); 53 | var document = collection.FindOneAndUpdate(query, update, options); 54 | 55 | return document[Field.COUNTERVALUE].AsInt32.ToString(); 56 | } 57 | } -------------------------------------------------------------------------------- /src/Spark.Mongo/Store/MongoCollectionPageResult.cs: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2020-2025, Incendi 3 | * 4 | * SPDX-License-Identifier: BSD-3-Clause 5 | */ 6 | 7 | using System; 8 | using System.Collections.Generic; 9 | using System.Linq; 10 | using System.Threading.Tasks; 11 | using MongoDB.Bson; 12 | using MongoDB.Driver; 13 | using Spark.Engine.Store.Interfaces; 14 | using Spark.Store.Mongo; 15 | 16 | namespace Spark.Mongo.Store; 17 | 18 | internal class MongoCollectionPageResult : IPageResult 19 | { 20 | public long TotalRecords { get; } 21 | 22 | public long TotalPages => (long)Math.Ceiling(TotalRecords / (double)_pageSize); 23 | 24 | private readonly IMongoCollection _collection; 25 | private readonly FilterDefinition _filter; 26 | private readonly int _pageSize; 27 | private readonly Func _transformFunc; 28 | 29 | public MongoCollectionPageResult( 30 | IMongoCollection collection, 31 | FilterDefinition filter, 32 | int pageSize, 33 | long totalRecords, 34 | Func transformFunc) 35 | { 36 | _collection = collection; 37 | _filter = filter; 38 | _pageSize = pageSize; 39 | _transformFunc = transformFunc; 40 | TotalRecords = totalRecords; 41 | } 42 | 43 | public async Task IterateAllPagesAsync(Func, Task> callback) 44 | { 45 | if (callback == null) 46 | { 47 | throw new ArgumentNullException(nameof(callback)); 48 | } 49 | 50 | for (var offset = 0; offset < TotalRecords; offset += _pageSize) 51 | { 52 | var data = await _collection.Find(_filter) 53 | .Sort(Builders.Sort.Ascending(Field.PRIMARYKEY)) 54 | .Skip(offset) 55 | .Limit(_pageSize) 56 | .ToListAsync() 57 | .ConfigureAwait(false); 58 | 59 | await callback(data.Select(d => _transformFunc(d)).ToList()) 60 | .ConfigureAwait(false); 61 | } 62 | } 63 | } -------------------------------------------------------------------------------- /src/Spark.Mongo/Store/MongoFhirStorePagedReader.cs: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2020-2025, Incendi 3 | * 4 | * SPDX-License-Identifier: BSD-3-Clause 5 | */ 6 | 7 | using MongoDB.Bson; 8 | using MongoDB.Driver; 9 | using Spark.Engine.Core; 10 | using Spark.Engine.Store.Interfaces; 11 | using Spark.Store.Mongo; 12 | using System.Threading.Tasks; 13 | 14 | namespace Spark.Mongo.Store; 15 | 16 | public class MongoFhirStorePagedReader : IFhirStorePagedReader 17 | { 18 | private readonly IMongoCollection _collection; 19 | 20 | public MongoFhirStorePagedReader(string mongoUrl) 21 | { 22 | var database = MongoDatabaseFactory.GetMongoDatabase(mongoUrl); 23 | _collection = database.GetCollection(Collection.RESOURCE); 24 | } 25 | 26 | public async Task> ReadAsync(FhirStorePageReaderOptions options) 27 | { 28 | options = options ?? new FhirStorePageReaderOptions(); 29 | 30 | var filter = Builders.Filter.Eq(Field.STATE, Value.CURRENT); 31 | 32 | var totalRecords = await _collection.CountDocumentsAsync(filter) 33 | .ConfigureAwait(false); 34 | 35 | return new MongoCollectionPageResult(_collection, filter, 36 | options.PageSize, totalRecords, 37 | document => document.ToEntry()); 38 | } 39 | } -------------------------------------------------------------------------------- /src/Spark.Mongo/Store/MongoSnapshotStore.cs: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2016-2018, Firely 3 | * Copyright (c) 2020-2025, Incendi 4 | * 5 | * SPDX-License-Identifier: BSD-3-Clause 6 | */ 7 | 8 | using System.Threading.Tasks; 9 | using MongoDB.Driver; 10 | using Spark.Engine.Core; 11 | using Spark.Engine.Store.Interfaces; 12 | using Spark.Store.Mongo; 13 | 14 | namespace Spark.Mongo.Store; 15 | 16 | public class MongoSnapshotStore : ISnapshotStore 17 | { 18 | private readonly IMongoDatabase _database; 19 | 20 | public MongoSnapshotStore(string mongoUrl) 21 | { 22 | _database = MongoDatabaseFactory.GetMongoDatabase(mongoUrl); 23 | } 24 | 25 | public async Task AddSnapshotAsync(Snapshot snapshot) 26 | { 27 | var collection = _database.GetCollection(Collection.SNAPSHOT); 28 | await collection.InsertOneAsync(snapshot).ConfigureAwait(false); 29 | } 30 | 31 | public async Task GetSnapshotAsync(string snapshotId) 32 | { 33 | var collection = _database.GetCollection(Collection.SNAPSHOT); 34 | return await collection.Find(s => s.Id == snapshotId) 35 | .FirstOrDefaultAsync(); 36 | } 37 | } -------------------------------------------------------------------------------- /src/Spark.Web/.gitignore: -------------------------------------------------------------------------------- 1 | database.db -------------------------------------------------------------------------------- /src/Spark.Web/ClientApp/js/main.js: -------------------------------------------------------------------------------- 1 | import 'bootstrap'; 2 | -------------------------------------------------------------------------------- /src/Spark.Web/Controllers/AdminController.cs: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2019-2025, Incendi 3 | * 4 | * SPDX-License-Identifier: BSD-3-Clause 5 | */ 6 | 7 | using Microsoft.AspNetCore.Authorization; 8 | using Microsoft.AspNetCore.Mvc; 9 | 10 | namespace Spark.Web.Controllers; 11 | 12 | [Authorize(Roles = "Admin")] 13 | public class AdminController : Controller 14 | { 15 | [HttpGet] 16 | public IActionResult Index() 17 | { 18 | return View(); 19 | } 20 | 21 | [HttpGet] 22 | public IActionResult Maintenance() 23 | { 24 | return View(); 25 | } 26 | } -------------------------------------------------------------------------------- /src/Spark.Web/Controllers/HomeController.cs: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2019-2025, Incendi 3 | * 4 | * SPDX-License-Identifier: BSD-3-Clause 5 | */ 6 | 7 | using Microsoft.AspNetCore.Mvc; 8 | 9 | namespace Spark.Web.Controllers; 10 | 11 | public class HomeController : Controller 12 | { 13 | public IActionResult Index() 14 | { 15 | return View(); 16 | } 17 | 18 | public IActionResult Privacy() 19 | { 20 | return View(); 21 | } 22 | } -------------------------------------------------------------------------------- /src/Spark.Web/Controllers/ResourcesController.cs: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2019-2025, Incendi 3 | * 4 | * SPDX-License-Identifier: BSD-3-Clause 5 | */ 6 | 7 | using Microsoft.AspNetCore.Mvc; 8 | 9 | namespace Spark.Web.Controllers; 10 | 11 | public class ResourcesController : Controller 12 | { 13 | public IActionResult Index() 14 | { 15 | return View(); 16 | } 17 | } -------------------------------------------------------------------------------- /src/Spark.Web/Data/ApplicationDbContext.cs: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2019-2025, Incendi 3 | * 4 | * SPDX-License-Identifier: BSD-3-Clause 5 | */ 6 | 7 | using Microsoft.AspNetCore.Identity; 8 | using Microsoft.AspNetCore.Identity.EntityFrameworkCore; 9 | using Microsoft.EntityFrameworkCore; 10 | 11 | namespace Spark.Web.Data; 12 | 13 | public class ApplicationDbContext : IdentityDbContext 14 | { 15 | public ApplicationDbContext(DbContextOptions options) 16 | : base(options) 17 | { 18 | } 19 | 20 | protected override void OnModelCreating(ModelBuilder modelBuilder) 21 | { 22 | base.OnModelCreating(modelBuilder); 23 | // Add stuff here: 24 | modelBuilder.Entity().HasData(new IdentityRole { Name = "Admin", NormalizedName = "Admin".ToUpper() }); 25 | } 26 | } -------------------------------------------------------------------------------- /src/Spark.Web/Data/ApplicationDbInitializer.cs: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2019-2025, Incendi 3 | * 4 | * SPDX-License-Identifier: BSD-3-Clause 5 | */ 6 | 7 | using Microsoft.AspNetCore.Identity; 8 | using Microsoft.EntityFrameworkCore; 9 | using Microsoft.Extensions.Configuration; 10 | 11 | namespace Spark.Web.Data; 12 | 13 | public static class ApplicationDbInitializer 14 | { 15 | public static void SeedAdmin(ApplicationDbContext context, UserManager userManager, IConfiguration config) 16 | { 17 | context.Database.Migrate(); 18 | 19 | string admin_email = config.GetValue("Admin:Email"); 20 | string admin_password = config.GetValue("Admin:Password"); 21 | 22 | if (userManager.FindByEmailAsync(admin_email).Result == null) 23 | { 24 | IdentityUser user = new IdentityUser 25 | { 26 | UserName = admin_email, 27 | Email = admin_email 28 | }; 29 | 30 | IdentityResult result = userManager.CreateAsync(user, admin_password).Result; 31 | 32 | if (result.Succeeded) 33 | { 34 | userManager.AddToRoleAsync(user, "Admin").Wait(); 35 | } 36 | } 37 | } 38 | } -------------------------------------------------------------------------------- /src/Spark.Web/Examples/DSTU2/.gitkeep: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/FirelyTeam/spark/0b93a6e52381ffef564860231a79927cbdde868a/src/Spark.Web/Examples/DSTU2/.gitkeep -------------------------------------------------------------------------------- /src/Spark.Web/Examples/R4/.gitkeep: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/FirelyTeam/spark/0b93a6e52381ffef564860231a79927cbdde868a/src/Spark.Web/Examples/R4/.gitkeep -------------------------------------------------------------------------------- /src/Spark.Web/Examples/STU3/.gitkeep: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/FirelyTeam/spark/0b93a6e52381ffef564860231a79927cbdde868a/src/Spark.Web/Examples/STU3/.gitkeep -------------------------------------------------------------------------------- /src/Spark.Web/Hubs/HubContextProgressNotifier.cs: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2021-2025, Incendi 3 | * 4 | * SPDX-License-Identifier: BSD-3-Clause 5 | */ 6 | 7 | using Microsoft.AspNetCore.SignalR; 8 | using Tasks = System.Threading.Tasks; 9 | using Microsoft.Extensions.Logging; 10 | using Spark.Engine.Service.FhirServiceExtensions; 11 | 12 | namespace Spark.Web.Hubs; 13 | 14 | /// 15 | /// SignalR hub is a short-living object while 16 | /// hub context lives longer and can be used for 17 | /// accessing Clients collection between requests. 18 | /// 19 | internal class HubContextProgressNotifier : IIndexBuildProgressReporter 20 | { 21 | private readonly IHubContext _hubContext; 22 | private readonly ILogger _logger; 23 | 24 | private int _progress; 25 | 26 | public HubContextProgressNotifier( 27 | IHubContext hubContext, 28 | ILogger logger) 29 | { 30 | _hubContext = hubContext; 31 | _logger = logger; 32 | } 33 | 34 | public async Tasks.Task SendProgressUpdate(int progress, string message) 35 | { 36 | _logger.LogInformation($"[{progress}%] {message}"); 37 | 38 | _progress = progress; 39 | 40 | var msg = new ImportProgressMessage 41 | { 42 | Message = message, 43 | Progress = progress 44 | }; 45 | 46 | await _hubContext.Clients.All.SendAsync("UpdateProgress", msg); 47 | } 48 | 49 | public async Tasks.Task Progress(string message) 50 | { 51 | await SendProgressUpdate(_progress, message); 52 | } 53 | 54 | public async Tasks.Task ReportProgressAsync(int progress, string message) 55 | { 56 | await SendProgressUpdate(progress, message) 57 | .ConfigureAwait(false); 58 | } 59 | 60 | public async Tasks.Task ReportErrorAsync(string message) 61 | { 62 | await Progress(message) 63 | .ConfigureAwait(false); 64 | } 65 | } -------------------------------------------------------------------------------- /src/Spark.Web/Hubs/ImportProgressMessage.cs: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2021-2025, Incendi 3 | * 4 | * SPDX-License-Identifier: BSD-3-Clause 5 | */ 6 | 7 | namespace Spark.Web.Hubs; 8 | 9 | internal class ImportProgressMessage 10 | { 11 | public int Progress; 12 | public string Message; 13 | } -------------------------------------------------------------------------------- /src/Spark.Web/Models/Config/ExamplesSettings.cs: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2019-2025, Incendi 3 | * 4 | * SPDX-License-Identifier: BSD-3-Clause 5 | */ 6 | 7 | namespace Spark.Web.Models.Config; 8 | 9 | public class ExamplesSettings 10 | { 11 | public string FilePath { get; set; } 12 | } -------------------------------------------------------------------------------- /src/Spark.Web/Models/ErrorViewModel.cs: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2019-2025, Incendi 3 | * 4 | * SPDX-License-Identifier: BSD-3-Clause 5 | */ 6 | 7 | namespace Spark.Web.Models; 8 | 9 | public class ErrorViewModel 10 | { 11 | public string RequestId { get; set; } 12 | 13 | public bool ShowRequestId => !string.IsNullOrEmpty(RequestId); 14 | } -------------------------------------------------------------------------------- /src/Spark.Web/Program.cs: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2019-2025, Incendi 3 | * 4 | * SPDX-License-Identifier: BSD-3-Clause 5 | */ 6 | 7 | using System; 8 | using System.IO; 9 | using Microsoft.AspNetCore; 10 | using Microsoft.AspNetCore.Hosting; 11 | using Microsoft.AspNetCore.Identity; 12 | using Microsoft.Extensions.Configuration; 13 | using Microsoft.Extensions.DependencyInjection; 14 | using Microsoft.Extensions.Logging; 15 | using Spark.Web.Data; 16 | 17 | namespace Spark.Web; 18 | 19 | public class Program 20 | { 21 | public static void Main(string[] args) 22 | { 23 | var host = CreateWebHostBuilder(args).Build(); 24 | var config = new ConfigurationBuilder() 25 | .SetBasePath(Directory.GetCurrentDirectory()) 26 | .AddJsonFile("appsettings.json", optional: false) 27 | .AddEnvironmentVariables() 28 | .AddUserSecrets() 29 | .Build(); 30 | 31 | using (var scope = host.Services.CreateScope()) 32 | { 33 | var services = scope.ServiceProvider; 34 | try 35 | { 36 | var context = services.GetRequiredService(); 37 | var userManager = services.GetRequiredService>(); 38 | ApplicationDbInitializer.SeedAdmin(context, userManager, config); 39 | } 40 | catch (Exception ex) 41 | { 42 | var logger = services.GetRequiredService>(); 43 | logger.LogError(ex, "An error occurred while seeding the database."); 44 | } 45 | } 46 | 47 | host.Run(); 48 | } 49 | 50 | public static IWebHostBuilder CreateWebHostBuilder(string[] args) => 51 | WebHost.CreateDefaultBuilder(args) 52 | .UseStartup() 53 | .ConfigureLogging(logging => 54 | { 55 | logging.AddConsole(); 56 | }); 57 | } -------------------------------------------------------------------------------- /src/Spark.Web/Properties/.gitignore: -------------------------------------------------------------------------------- 1 | serviceDependencies.json 2 | serviceDependencies.local.json -------------------------------------------------------------------------------- /src/Spark.Web/Properties/launchSettings.json: -------------------------------------------------------------------------------- 1 | { 2 | "iisSettings": { 3 | "windowsAuthentication": false, 4 | "anonymousAuthentication": true, 5 | "iisExpress": { 6 | "applicationUrl": "http://localhost:54460", 7 | "sslPort": 44305 8 | } 9 | }, 10 | "$schema": "http://json.schemastore.org/launchsettings.json", 11 | "profiles": { 12 | "IIS Express": { 13 | "commandName": "IISExpress", 14 | "launchBrowser": true, 15 | "environmentVariables": { 16 | "ASPNETCORE_ENVIRONMENT": "Development" 17 | } 18 | }, 19 | "Spark.Web": { 20 | "commandName": "Project", 21 | "launchBrowser": true, 22 | "launchUrl": "", 23 | "environmentVariables": { 24 | "ASPNETCORE_ENVIRONMENT": "Development" 25 | }, 26 | "applicationUrl": "https://localhost:5001;http://localhost:5000" 27 | } 28 | } 29 | } -------------------------------------------------------------------------------- /src/Spark.Web/README.md: -------------------------------------------------------------------------------- 1 | # Spark.Web 2 | 3 | This is the front-end for Spark FHIR server. 4 | 5 | **DISCLAIMER: This is meant as an reference web server for local testing, and should never be used as is in a production environment.** 6 | 7 | ## Build front-end assets 8 | 9 | All source files for frontend are found in the `client` folder. To update front end assets cd into the folder, run `npm install` and `npm run build`. 10 | 11 | 12 | ## Admin area 13 | 14 | When running `Spark.Web` the solution will check if any admin user exists. If non exist, it will create an admin user with credentials read from `appsettings.json`. It is strongly recommended to change this password. The default credentials are: 15 | 16 | ``` 17 | Username: admin@email.com 18 | Password: Str0ngPa$$word 19 | ``` 20 | 21 | 22 | ## Load examples 23 | 24 | Visit `localhost:5555/admin/maintenance to load sample data. -------------------------------------------------------------------------------- /src/Spark.Web/Services/ResourceStat.cs: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2021-2025, Incendi 3 | * 4 | * SPDX-License-Identifier: BSD-3-Clause 5 | */ 6 | 7 | namespace Spark.Web.Services; 8 | 9 | public partial class ServerMetadata 10 | { 11 | public class ResourceStat 12 | { 13 | public string ResourceName { get; set; } 14 | public long Count { get; set; } 15 | } 16 | } -------------------------------------------------------------------------------- /src/Spark.Web/Services/ServerMetadata.cs: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2019-2025, Incendi 3 | * 4 | * SPDX-License-Identifier: BSD-3-Clause 5 | */ 6 | 7 | using System.Collections.Generic; 8 | using System.Threading.Tasks; 9 | using Hl7.Fhir.Model; 10 | using Hl7.Fhir.Rest; 11 | using Spark.Engine.Service.FhirServiceExtensions; 12 | 13 | namespace Spark.Web.Services; 14 | 15 | public partial class ServerMetadata 16 | { 17 | private readonly ISearchService _searchService; 18 | 19 | public ServerMetadata(ISearchService searchService) 20 | { 21 | _searchService = searchService; 22 | } 23 | 24 | public async Task> GetResourceStatsAsync() 25 | { 26 | var stats = new List(); 27 | List names = ModelInfo.SupportedResources; 28 | 29 | foreach (string name in names) 30 | { 31 | var search = await _searchService.GetSnapshotAsync(name, new SearchParams { Summary = SummaryType.Count }); 32 | stats.Add(new ResourceStat() { ResourceName = name, Count = search.Count }); 33 | } 34 | 35 | return stats; 36 | } 37 | } -------------------------------------------------------------------------------- /src/Spark.Web/Spark.Web.csproj: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | net9.0 5 | AspNetCoreModuleV2 6 | a4d3c2a3-5edd-47d1-8407-62489d5568c5 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | -------------------------------------------------------------------------------- /src/Spark.Web/Views/Admin/Index.cshtml: -------------------------------------------------------------------------------- 1 | @{ 2 | ViewData["Title"] = "Spark Admin"; 3 | } 4 | 5 |
6 |

Admin

7 |

8 | Database maintenance 9 |

10 |
-------------------------------------------------------------------------------- /src/Spark.Web/Views/Home/Index.cshtml: -------------------------------------------------------------------------------- 1 | @inject Spark.Engine.SparkSettings Settings 2 | 3 |

Spark Fhir Server

4 | 5 |

Endpoint: @Settings.Endpoint 6 |

7 | -------------------------------------------------------------------------------- /src/Spark.Web/Views/Home/Privacy.cshtml: -------------------------------------------------------------------------------- 1 | @{ 2 | ViewData["Title"] = "Privacy Policy"; 3 | } 4 | 5 |
6 |

@ViewData["Title"]

7 |

This is a demo server. Don´t ever put real things into it.

8 |
9 | -------------------------------------------------------------------------------- /src/Spark.Web/Views/Resources/Index.cshtml: -------------------------------------------------------------------------------- 1 | @inject Spark.Web.Services.ServerMetadata ServerMeta 2 | 3 |

Resources

4 | 5 |
    6 | @foreach (var item in await ServerMeta.GetResourceStatsAsync()) 7 | { 8 |
  • 9 | @item.ResourceName 10 | @item.Count 11 |
  • 12 | } 13 |
14 | -------------------------------------------------------------------------------- /src/Spark.Web/Views/Shared/Error.cshtml: -------------------------------------------------------------------------------- 1 | @model Spark.Web.Models.ErrorViewModel 2 | 3 | @{ 4 | ViewData["Title"] = "Error"; 5 | } 6 | 7 |

Error.

8 |

An error occurred while processing your request.

9 | 10 | @if (Model.ShowRequestId) 11 | { 12 |

13 | Request ID: @Model.RequestId 14 |

15 | } 16 | 17 |

Development Mode

18 |

19 | Swapping to Development environment will display more detailed information about the error that occurred. 20 |

21 |

22 | The Development environment shouldn't be enabled for deployed applications. 23 | It can result in displaying sensitive information from exceptions to end users. 24 | For local debugging, enable the Development environment by setting the ASPNETCORE_ENVIRONMENT environment variable to Development 25 | and restarting the app. 26 |

27 | -------------------------------------------------------------------------------- /src/Spark.Web/Views/Shared/_AdminNavbar.cshtml: -------------------------------------------------------------------------------- 1 | @using Microsoft.AspNetCore.Identity 2 | @inject SignInManager SignInManager 3 | 4 | @if (SignInManager.IsSignedIn(User)) 5 | { 6 | // is the current user authorized to see the next part of the page? 7 | if (User.IsInRole("Admin") || User.IsInRole("SuperAdmin") ) 8 | { 9 | Admin 10 | } 11 | } -------------------------------------------------------------------------------- /src/Spark.Web/Views/Shared/_CookieConsentPartial.cshtml: -------------------------------------------------------------------------------- 1 | @using Microsoft.AspNetCore.Http.Features 2 | 3 | @{ 4 | var consentFeature = Context.Features.Get(); 5 | var showBanner = !consentFeature?.CanTrack ?? false; 6 | var cookieString = consentFeature?.CreateConsentCookie(); 7 | } 8 | 9 | @if (showBanner) 10 | { 11 | 17 | 25 | } 26 | -------------------------------------------------------------------------------- /src/Spark.Web/Views/Shared/_LoginPartial.cshtml: -------------------------------------------------------------------------------- 1 | @using Microsoft.AspNetCore.Identity 2 | @inject SignInManager SignInManager 3 | @inject UserManager UserManager 4 | 5 | 27 | -------------------------------------------------------------------------------- /src/Spark.Web/Views/Shared/_ValidationScriptsPartial.cshtml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 12 | 18 | 19 | -------------------------------------------------------------------------------- /src/Spark.Web/Views/Shared/_ViewImports.cshtml: -------------------------------------------------------------------------------- 1 | @using Spark.Web 2 | @using Spark.Web.Models 3 | @addTagHelper *, Microsoft.AspNetCore.Mvc.TagHelpers 4 | @inject Spark.Engine.SparkSettings Settings; -------------------------------------------------------------------------------- /src/Spark.Web/Views/_ViewStart.cshtml: -------------------------------------------------------------------------------- 1 | @{ 2 | Layout = "_Layout"; 3 | } 4 | -------------------------------------------------------------------------------- /src/Spark.Web/appsettings.Development.json: -------------------------------------------------------------------------------- 1 | { 2 | "StoreSettings": { 3 | "ConnectionString": "mongodb://localhost/spark" 4 | }, 5 | "ExamplesSettings": { 6 | "FilePath": "Examples/R4/examples.zip" 7 | }, 8 | "Logging": { 9 | "LogLevel": { 10 | "Default": "Debug", 11 | "System": "Information", 12 | "Microsoft": "Information", 13 | "Microsoft.AspNetCore.SignalR": "Debug", 14 | "Microsoft.AspNetCore.Http.Connections": "Debug" 15 | } 16 | } 17 | } 18 | -------------------------------------------------------------------------------- /src/Spark.Web/appsettings.json: -------------------------------------------------------------------------------- 1 | { 2 | "ConnectionStrings": { 3 | "DefaultConnection": "Data Source=database.db" 4 | }, 5 | "StoreSettings": { 6 | "ConnectionString": "mongodb://localhost:27017/spark" 7 | }, 8 | "SparkSettings": { 9 | "Endpoint": "https://localhost:5001/fhir" 10 | }, 11 | "Admin": { 12 | "Email": "admin@email.com", 13 | "Password": "Str0ngPa$$word" 14 | }, 15 | "ExamplesSettings": { 16 | "FilePath": "Examples/R4/examples.zip" 17 | }, 18 | "Logging": { 19 | "LogLevel": { 20 | "Default": "Warning" 21 | } 22 | }, 23 | "AllowedHosts": "*" 24 | } -------------------------------------------------------------------------------- /src/Spark.Web/libman.json: -------------------------------------------------------------------------------- 1 | { 2 | "version": "1.0", 3 | "defaultProvider": "cdnjs", 4 | "libraries": [ 5 | { 6 | "provider": "unpkg", 7 | "library": "@microsoft/signalr@latest", 8 | "destination": "wwwroot/assets/js/microsoft/signalr/", 9 | "files": [ 10 | "dist/browser/signalr.min.js" 11 | ] 12 | } 13 | ] 14 | } -------------------------------------------------------------------------------- /src/Spark.Web/wwwroot/assets/images/150.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/FirelyTeam/spark/0b93a6e52381ffef564860231a79927cbdde868a/src/Spark.Web/wwwroot/assets/images/150.png -------------------------------------------------------------------------------- /tests/integration-tests/.gitignore: -------------------------------------------------------------------------------- 1 | json_results 2 | annotations.json -------------------------------------------------------------------------------- /tests/integration-tests/combine-test-results.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -e 4 | 5 | JSON_PATH=$1 6 | ANNOTATIONS_FILE=$2 7 | 8 | function usage() { 9 | me=`basename "$0"` 10 | if [ ! -z "$1" ]; then 11 | echo $1 12 | fi 13 | echo "Usage: ./${me} path/to/input/json_results path/to/output/annotations.json" 14 | exit 1 15 | } 16 | 17 | [ $# -eq 2 ] || usage 18 | 19 | [ ! -z "${JSON_PATH}" ] || usage 20 | 21 | [ -d "${JSON_PATH}" ] || usage "${JSON_PATH} must be a directory" 22 | 23 | touch $ANNOTATIONS_FILE || "$ANNOTATIONS_FILE is not writable" 24 | 25 | DIR=$(pwd) 26 | 27 | cd ${JSON_PATH} 28 | 29 | # Output summary to stdout 30 | 31 | ls _summary*.json | xargs jq -r '. | "PASS: \(.pass // 0) 32 | FAIL: \(.fail // 0) 33 | ERROR: \(.error // 0) 34 | SKIP: \(.skip // 0)" 35 | ' 36 | 37 | # Have to use warning annotation level, notice isn't working anymore (but could be in future). 38 | 39 | SUMMARY=$(ls _summary*.json | xargs jq '[ . 40 | | { "file": ".github/workflow/integration_tests.yml", "line": 1, "message": ("PASS: \(.pass // 0)\nFAIL: \(.fail // 0)\nERROR: \(.error // 0)\nSKIP: \(.skip // 0)"), "annotation_level": "warning" } 41 | ]') 42 | 43 | FAILURES=$(ls -I '_summary*.json' | xargs -I '{}' jq '[ .[] 44 | | select(((.status == "skip") and (.message | contains("TODO") | not)) 45 | or .status == "fail" 46 | or .status == "error") 47 | | .id as $id 48 | | .status as $status 49 | | .message as $message 50 | | .description as $description 51 | | .test_method as $method 52 | | input_filename as $file 53 | | { "file": ".github/workflow/integration_tests.yml", "line": 1, "message": ($id + ": " + $message), "annotation_level": "failure" } 54 | ]' '{}') 55 | 56 | cd $DIR 57 | 58 | jq 'reduce inputs as $i (.; . += $i)' <(echo "${SUMMARY}") <(echo "${FAILURES}") > ${ANNOTATIONS_FILE} 59 | -------------------------------------------------------------------------------- /tests/integration-tests/docker-compose.yml: -------------------------------------------------------------------------------- 1 | version: "3" 2 | services: 3 | spark: 4 | container_name: spark 5 | restart: always 6 | image: sparkfhir/spark:r4-latest 7 | environment: 8 | - StoreSettings__ConnectionString=mongodb://root:secret@mongodb:27017/spark?authSource=admin 9 | - SparkSettings__Endpoint=http://spark:8080/fhir 10 | - ASPNETCORE_URLS=http://+:8080 11 | - ASPNETCORE_HTTP_PORT=8080 12 | ports: 13 | - "8000:8080" 14 | - "8001:8081" 15 | depends_on: 16 | - mongodb 17 | mongodb: 18 | container_name: mongodb 19 | image: sparkfhir/mongo:r4-latest 20 | environment: 21 | MONGO_INITDB_ROOT_USERNAME: root 22 | MONGO_INITDB_ROOT_PASSWORD: secret 23 | ports: 24 | - "17017:27017" 25 | plan_executor: 26 | container_name: plan_executor 27 | image: incendi/plan_executor:latest 28 | depends_on: 29 | - spark 30 | volumes: 31 | - ./logs:/app/logs:rw 32 | - ./html_summaries:/app/html_summaries:rw 33 | - ./json_results:/app/json_results:rw 34 | -------------------------------------------------------------------------------- /tests/integration-tests/readme.md: -------------------------------------------------------------------------------- 1 | # Spark FHIR Integration Tests 2 | 3 | ## Setup 4 | 5 | 1. Build the latest Spark Docker image. 6 | 2. Run Spark service: 7 | 8 | ``` 9 | mkdir -p logs html_summaries 10 | docker-compose up -d spark 11 | ``` 12 | 13 | ## Running integration tests 14 | 15 | ### Listing all available tests 16 | 17 | ``` 18 | docker-compose run --rm --no-deps plan_executor ./list_all.sh r4 19 | ``` 20 | 21 | ### Running particular test 22 | 23 | ``` 24 | docker-compose run --rm --no-deps plan_executor ./execute_test.sh http://spark.url/fhir r4 FormatTest 25 | ``` 26 | 27 | ### Running all tests 28 | 29 | ``` 30 | docker-compose run --rm --no-deps plan_executor ./execute_all.sh http://spark.url/fhir r4 31 | ``` 32 | 33 | ## Test results 34 | 35 | Test results are stored in HTML format in `html_summaries` directory. 36 | Each test result is stored in a separate subdir. 37 | 38 | ## Test logs 39 | 40 | Test logs can be found in `logs` directory. 41 | 42 | --------------------------------------------------------------------------------