├── .codecov.yml
├── .github
├── CODEOWNERS
├── PULL_REQUEST_TEMPLATE.md
└── workflows
│ ├── CI.yml
│ ├── add-backport-label.yml.disabled
│ ├── add-untriaged.yml
│ ├── backport.yml
│ ├── changelog_verifier.yml
│ ├── publish-snapshots.yml
│ ├── test-api-consistency.yml
│ ├── test_bwc.yml
│ ├── test_security.yml
│ └── wrapper.yml
├── .gitignore
├── .whitesource
├── CHANGELOG.md
├── CODE_OF_CONDUCT.md
├── CONTRIBUTING.md
├── DEVELOPER_GUIDE.md
├── LICENSE
├── MAINTAINERS.md
├── NOTICE
├── README.md
├── SECURITY.md
├── build.gradle
├── config
└── checkstyle
│ ├── checkstyle.xml
│ └── checkstyle_suppressions.xml
├── formatter
├── formatterConfig.xml
├── formatting.gradle
└── license-header.txt
├── gradle
└── wrapper
│ ├── gradle-wrapper.jar
│ └── gradle-wrapper.properties
├── gradlew
├── gradlew.bat
├── release-notes
├── opensearch-flow-framework.release-notes-2.12.0.0.md
├── opensearch-flow-framework.release-notes-2.13.0.0.md
├── opensearch-flow-framework.release-notes-2.14.0.0.md
├── opensearch-flow-framework.release-notes-2.15.0.0.md
├── opensearch-flow-framework.release-notes-2.16.0.0.md
├── opensearch-flow-framework.release-notes-2.17.0.0.md
├── opensearch-flow-framework.release-notes-2.17.1.0.md
├── opensearch-flow-framework.release-notes-2.18.0.0.md
├── opensearch-flow-framework.release-notes-2.19.0.0.md
├── opensearch-flow-framework.release-notes-2.19.2.0.md
├── opensearch-flow-framework.release-notes-3.0.0.0-alpha1.md
├── opensearch-flow-framework.release-notes-3.0.0.0-beta1.md
└── opensearch-flow-framework.release-notes-3.0.0.0.md
├── sample-templates
├── README.md
├── alert-summary-agent-claude-tested.json
├── alert-summary-agent-claude-tested.yml
├── alert-summary-log-pattern-agent.json
├── alert-summary-log-pattern-agent.yml
├── anomaly-detector-suggestion-agent-claude.json
├── anomaly-detector-suggestion-agent-claude.yml
├── create-knowledge-base-alert-agent.json
├── create-knowledge-base-alert-agent.yml
├── deploy-bedrock-claude-model.json
├── deploy-bedrock-claude-model.yml
├── deploy-openai-model.json
├── deploy-openai-model.yml
├── deploy-sagemaker-mistral-model.json
├── deploy-sagemaker-mistral-model.yml
├── observability-chat-agent-openai-untested.json
├── observability-chat-agent-openai-untested.yml
├── query-assist-agent-claude-tested.json
├── query-assist-agent-claude-tested.yml
├── query-assist-data-summary-agent-claude-tested.json
├── query-assist-data-summary-agent-claude-tested.yml
├── query-assist-data-summary-with-log-pattern-agent-claude-tested.json
├── query-assist-data-summary-with-log-pattern-agent-claude-tested.yml
├── text-to-visualization-claude.json
└── text-to-visualization-claude.yml
├── settings.gradle
└── src
├── main
├── java
│ └── org
│ │ └── opensearch
│ │ └── flowframework
│ │ ├── FlowFrameworkPlugin.java
│ │ ├── common
│ │ ├── CommonValue.java
│ │ ├── DefaultUseCases.java
│ │ ├── FlowFrameworkSettings.java
│ │ ├── ThrowingSupplier.java
│ │ ├── ThrowingSupplierWrapper.java
│ │ └── WorkflowResources.java
│ │ ├── exception
│ │ ├── ApiSpecParseException.java
│ │ ├── FlowFrameworkException.java
│ │ └── WorkflowStepException.java
│ │ ├── indices
│ │ ├── FlowFrameworkIndex.java
│ │ └── FlowFrameworkIndicesHandler.java
│ │ ├── model
│ │ ├── Config.java
│ │ ├── PipelineProcessor.java
│ │ ├── ProvisioningProgress.java
│ │ ├── ResourceCreated.java
│ │ ├── State.java
│ │ ├── Template.java
│ │ ├── Workflow.java
│ │ ├── WorkflowEdge.java
│ │ ├── WorkflowNode.java
│ │ ├── WorkflowState.java
│ │ ├── WorkflowStepValidator.java
│ │ └── WorkflowValidator.java
│ │ ├── rest
│ │ ├── AbstractSearchWorkflowAction.java
│ │ ├── RestCreateWorkflowAction.java
│ │ ├── RestDeleteWorkflowAction.java
│ │ ├── RestDeprovisionWorkflowAction.java
│ │ ├── RestGetWorkflowAction.java
│ │ ├── RestGetWorkflowStateAction.java
│ │ ├── RestGetWorkflowStepAction.java
│ │ ├── RestProvisionWorkflowAction.java
│ │ ├── RestSearchWorkflowAction.java
│ │ └── RestSearchWorkflowStateAction.java
│ │ ├── transport
│ │ ├── CreateWorkflowAction.java
│ │ ├── CreateWorkflowTransportAction.java
│ │ ├── DeleteWorkflowAction.java
│ │ ├── DeleteWorkflowTransportAction.java
│ │ ├── DeprovisionWorkflowAction.java
│ │ ├── DeprovisionWorkflowTransportAction.java
│ │ ├── GetWorkflowAction.java
│ │ ├── GetWorkflowResponse.java
│ │ ├── GetWorkflowStateAction.java
│ │ ├── GetWorkflowStateRequest.java
│ │ ├── GetWorkflowStateResponse.java
│ │ ├── GetWorkflowStateTransportAction.java
│ │ ├── GetWorkflowStepAction.java
│ │ ├── GetWorkflowStepResponse.java
│ │ ├── GetWorkflowStepTransportAction.java
│ │ ├── GetWorkflowTransportAction.java
│ │ ├── ProvisionWorkflowAction.java
│ │ ├── ProvisionWorkflowTransportAction.java
│ │ ├── ReprovisionWorkflowAction.java
│ │ ├── ReprovisionWorkflowRequest.java
│ │ ├── ReprovisionWorkflowTransportAction.java
│ │ ├── SearchWorkflowAction.java
│ │ ├── SearchWorkflowStateAction.java
│ │ ├── SearchWorkflowStateTransportAction.java
│ │ ├── SearchWorkflowTransportAction.java
│ │ ├── WorkflowRequest.java
│ │ ├── WorkflowResponse.java
│ │ └── handler
│ │ │ └── SearchHandler.java
│ │ ├── util
│ │ ├── ApiSpecFetcher.java
│ │ ├── EncryptorUtils.java
│ │ ├── ParseUtils.java
│ │ ├── RestHandlerUtils.java
│ │ ├── TenantAwareHelper.java
│ │ └── WorkflowTimeoutUtility.java
│ │ └── workflow
│ │ ├── AbstractCreatePipelineStep.java
│ │ ├── AbstractRegisterLocalModelStep.java
│ │ ├── AbstractRetryableWorkflowStep.java
│ │ ├── AbstractUpdatePipelineStep.java
│ │ ├── CreateConnectorStep.java
│ │ ├── CreateIndexStep.java
│ │ ├── CreateIngestPipelineStep.java
│ │ ├── CreateSearchPipelineStep.java
│ │ ├── DeleteAgentStep.java
│ │ ├── DeleteConnectorStep.java
│ │ ├── DeleteIndexStep.java
│ │ ├── DeleteIngestPipelineStep.java
│ │ ├── DeleteModelStep.java
│ │ ├── DeleteSearchPipelineStep.java
│ │ ├── DeployModelStep.java
│ │ ├── NoOpStep.java
│ │ ├── ProcessNode.java
│ │ ├── RegisterAgentStep.java
│ │ ├── RegisterLocalCustomModelStep.java
│ │ ├── RegisterLocalPretrainedModelStep.java
│ │ ├── RegisterLocalSparseEncodingModelStep.java
│ │ ├── RegisterModelGroupStep.java
│ │ ├── RegisterRemoteModelStep.java
│ │ ├── ReindexStep.java
│ │ ├── ToolStep.java
│ │ ├── UndeployModelStep.java
│ │ ├── UpdateIndexStep.java
│ │ ├── UpdateIngestPipelineStep.java
│ │ ├── UpdateSearchPipelineStep.java
│ │ ├── WorkflowData.java
│ │ ├── WorkflowDataStep.java
│ │ ├── WorkflowProcessSorter.java
│ │ ├── WorkflowStep.java
│ │ └── WorkflowStepFactory.java
├── plugin-metadata
│ └── plugin-security.policy
└── resources
│ ├── defaults
│ ├── bedrock-titan-embedding-defaults.json
│ ├── bedrock-titan-multimodal-defaults.json
│ ├── cohere-chat-defaults.json
│ ├── cohere-embedding-defaults.json
│ ├── cohere-embedding-semantic-search-defaults.json
│ ├── cohere-embedding-semantic-search-with-query-enricher-defaults.json
│ ├── conversational-search-defaults.json
│ ├── conversational-search-rag-tool-defaults.json
│ ├── hybrid-search-defaults.json
│ ├── hybrid-search-with-local-model-defaults.json
│ ├── local-sparse-search-biencoder-defaults.json
│ ├── multi-modal-search-defaults.json
│ ├── multimodal-search-bedrock-titan-defaults.json
│ ├── openai-chat-defaults.json
│ ├── openai-embedding-defaults.json
│ ├── semantic-search-defaults.json
│ ├── semantic-search-query-enricher-defaults.json
│ ├── semantic-search-with-local-model-defaults.json
│ └── semantic-search-with-reindex-defaults.json
│ ├── log4j2.xml
│ ├── mappings
│ ├── config.json
│ ├── global-context.json
│ └── workflow-state.json
│ └── substitutionTemplates
│ ├── conversational-search-with-bedrock-rag-tool-template.json
│ ├── conversational-search-with-cohere-model-template.json
│ ├── deploy-remote-bedrock-model-template.json
│ ├── deploy-remote-model-chat-template.json
│ ├── deploy-remote-model-extra-params-template.json
│ ├── deploy-remote-model-template.json
│ ├── hybrid-search-template.json
│ ├── hybrid-search-with-local-model-template.json
│ ├── multi-modal-search-template.json
│ ├── multi-modal-search-with-bedrock-titan-template.json
│ ├── neural-sparse-local-biencoder-template.json
│ ├── semantic-search-template.json
│ ├── semantic-search-with-local-model-template.json
│ ├── semantic-search-with-model-and-query-enricher-template.json
│ ├── semantic-search-with-model-template.json
│ ├── semantic-search-with-query-enricher-template.json
│ └── semantic-search-with-reindex-template.json
├── test
├── java
│ └── org
│ │ └── opensearch
│ │ └── flowframework
│ │ ├── FlowFrameworkPluginIT.java
│ │ ├── FlowFrameworkPluginTests.java
│ │ ├── FlowFrameworkRestTestCase.java
│ │ ├── FlowFrameworkTenantAwareRestTestCase.java
│ │ ├── TestHelpers.java
│ │ ├── bwc
│ │ └── FlowFrameworkBackwardsCompatibilityIT.java
│ │ ├── common
│ │ ├── DefaultUseCasesTests.java
│ │ └── FlowFrameworkSettingsTests.java
│ │ ├── exception
│ │ ├── ApiSpecParseExceptionTests.java
│ │ └── FlowFrameworkExceptionTests.java
│ │ ├── indices
│ │ └── FlowFrameworkIndicesHandlerTests.java
│ │ ├── model
│ │ ├── ConfigTests.java
│ │ ├── PipelineProcessorTests.java
│ │ ├── ResourceCreatedTests.java
│ │ ├── TemplateTestJsonUtil.java
│ │ ├── TemplateTests.java
│ │ ├── WorkflowEdgeTests.java
│ │ ├── WorkflowNodeTests.java
│ │ ├── WorkflowStateTests.java
│ │ ├── WorkflowStepValidatorTests.java
│ │ ├── WorkflowTests.java
│ │ └── WorkflowValidatorTests.java
│ │ ├── rest
│ │ ├── FlowFrameworkRestApiIT.java
│ │ ├── FlowFrameworkSecureRestApiIT.java
│ │ ├── RestCreateWorkflowActionTests.java
│ │ ├── RestDeleteWorkflowActionTests.java
│ │ ├── RestDeprovisionWorkflowActionTests.java
│ │ ├── RestGetWorkflowActionTests.java
│ │ ├── RestGetWorkflowStateActionTests.java
│ │ ├── RestGetWorkflowStepActionTests.java
│ │ ├── RestProvisionWorkflowActionTests.java
│ │ ├── RestSearchWorkflowActionTests.java
│ │ ├── RestSearchWorkflowStateActionTests.java
│ │ ├── RestWorkflowProvisionTenantAwareIT.java
│ │ ├── RestWorkflowStateTenantAwareIT.java
│ │ └── RestWorkflowTenantAwareIT.java
│ │ ├── transport
│ │ ├── CreateWorkflowTransportActionTests.java
│ │ ├── DeleteWorkflowTransportActionTests.java
│ │ ├── DeprovisionWorkflowTransportActionTests.java
│ │ ├── GetWorkflowStateTransportActionTests.java
│ │ ├── GetWorkflowStepTransportActionTests.java
│ │ ├── GetWorkflowTransportActionTests.java
│ │ ├── ProvisionWorkflowTransportActionTests.java
│ │ ├── ReprovisionWorkflowRequestTests.java
│ │ ├── ReprovisionWorkflowTransportActionTests.java
│ │ ├── SearchWorkflowStateTransportActionTests.java
│ │ ├── SearchWorkflowTransportActionTests.java
│ │ ├── WorkflowRequestResponseTests.java
│ │ └── handler
│ │ │ └── SearchHandlerTests.java
│ │ ├── util
│ │ ├── ApiSpecFetcherTests.java
│ │ ├── EncryptorUtilsTests.java
│ │ ├── ParseUtilsTests.java
│ │ ├── RestHandlerUtilsTests.java
│ │ ├── TenantAwareHelperTests.java
│ │ └── WorkflowTimeoutUtilityTests.java
│ │ └── workflow
│ │ ├── CreateConnectorStepTests.java
│ │ ├── CreateIndexStepTests.java
│ │ ├── CreateIngestPipelineStepTests.java
│ │ ├── CreateSearchPipelineStepTests.java
│ │ ├── DeleteAgentStepTests.java
│ │ ├── DeleteConnectorStepTests.java
│ │ ├── DeleteIndexStepTests.java
│ │ ├── DeleteIngestPipelineStepTests.java
│ │ ├── DeleteModelStepTests.java
│ │ ├── DeleteSearchPipelineStepTests.java
│ │ ├── DeployModelStepTests.java
│ │ ├── NoOpStepTests.java
│ │ ├── ProcessNodeTests.java
│ │ ├── RegisterAgentTests.java
│ │ ├── RegisterLocalCustomModelStepTests.java
│ │ ├── RegisterLocalPretrainedModelStepTests.java
│ │ ├── RegisterLocalSparseEncodingModelStepTests.java
│ │ ├── RegisterModelGroupStepTests.java
│ │ ├── RegisterRemoteModelStepTests.java
│ │ ├── ReindexStepTests.java
│ │ ├── ToolStepTests.java
│ │ ├── UndeployModelStepTests.java
│ │ ├── UpdateIndexStepTests.java
│ │ ├── UpdateIngestPipelineStepTests.java
│ │ ├── UpdateSearchPipelineStepTests.java
│ │ ├── WorkflowDataStepTests.java
│ │ ├── WorkflowDataTests.java
│ │ └── WorkflowProcessSorterTests.java
└── resources
│ └── template
│ ├── agent-framework.json
│ ├── createconnector-createconnectortool-createflowagent.json
│ ├── createconnector-registerremotemodel-deploymodel.json
│ ├── finaltemplate.json
│ ├── ingest-search-pipeline-template.json
│ ├── noop.json
│ ├── register-deploylocalsparseencodingmodel.json
│ ├── registerremotemodel-createindex.json
│ ├── registerremotemodel-ingestpipeline-createindex.json
│ ├── registerremotemodel-ingestpipeline-updateindex.json
│ ├── registerremotemodel-ingestpipeline.json
│ └── registerremotemodel.json
└── yamlRestTest
├── java
└── org
│ └── opensearch
│ └── flowframework
│ └── FlowFrameworkPluginYamlTestSuiteIT.java
└── resources
└── rest-api-spec
└── test
└── 10_basic.yml
/.codecov.yml:
--------------------------------------------------------------------------------
1 | codecov:
2 | require_ci_to_pass: true
3 |
4 | coverage:
5 | precision: 2
6 | round: down
7 | range: "70...100"
8 | status:
9 | project:
10 | default:
11 | target: auto
12 | threshold: 2% # project coverage can drop
13 | patch:
14 | default:
15 | target: 70% # required diff coverage value
16 |
--------------------------------------------------------------------------------
/.github/CODEOWNERS:
--------------------------------------------------------------------------------
1 | * @dbwiddis @owaiskazi19 @joshpalis @ohltyler @amitgalitz @jackiehanyang @junweid62
--------------------------------------------------------------------------------
/.github/PULL_REQUEST_TEMPLATE.md:
--------------------------------------------------------------------------------
1 | ### Description
2 | [Describe what this change achieves]
3 |
4 | ### Related Issues
5 | Resolves #[Issue number to be closed when this PR is merged]
6 |
7 |
8 | ### Check List
9 | - [ ] New functionality includes testing.
10 | - [ ] New functionality has been documented.
11 | - [ ] API changes companion pull request [created](https://github.com/opensearch-project/opensearch-api-specification/blob/main/DEVELOPER_GUIDE.md).
12 | - [ ] Commits are signed per the DCO using `--signoff`.
13 | - [ ] Public documentation issue/PR [created](https://github.com/opensearch-project/documentation-website/issues/new/choose).
14 |
15 | By submitting this pull request, I confirm that my contribution is made under the terms of the Apache 2.0 license.
16 | For more information on following Developer Certificate of Origin and signing off your commits, please check [here](https://github.com/opensearch-project/flow-framework/blob/main/CONTRIBUTING.md#developer-certificate-of-origin).
17 |
--------------------------------------------------------------------------------
/.github/workflows/add-backport-label.yml.disabled:
--------------------------------------------------------------------------------
1 | name: Add Backport Label
2 | on:
3 | pull_request_target:
4 | branches: main
5 | types: opened
6 |
7 | jobs:
8 | add_labels:
9 | runs-on: ubuntu-latest
10 | steps:
11 | - uses: actions/checkout@v4
12 | - uses: actions-ecosystem/action-add-labels@v1
13 | with:
14 | labels: backport 2.x
15 |
--------------------------------------------------------------------------------
/.github/workflows/add-untriaged.yml:
--------------------------------------------------------------------------------
1 | name: Apply 'untriaged' label during issue lifecycle
2 |
3 | on:
4 | issues:
5 | types: [opened, reopened, transferred]
6 |
7 | jobs:
8 | apply-label:
9 | runs-on: ubuntu-latest
10 | steps:
11 | - uses: actions/github-script@v7
12 | with:
13 | script: |
14 | github.rest.issues.addLabels({
15 | issue_number: context.issue.number,
16 | owner: context.repo.owner,
17 | repo: context.repo.repo,
18 | labels: ['untriaged']
19 | })
20 |
--------------------------------------------------------------------------------
/.github/workflows/backport.yml:
--------------------------------------------------------------------------------
1 | name: Backport
2 | on:
3 | pull_request_target:
4 | types:
5 | - closed
6 | - labeled
7 |
8 | jobs:
9 | backport:
10 | name: Backport
11 | runs-on: ubuntu-latest
12 | # Only react to merged PRs for security reasons.
13 | # See https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#pull_request_target.
14 | if: >
15 | github.event.pull_request.merged
16 | && (
17 | github.event.action == 'closed'
18 | || (
19 | github.event.action == 'labeled'
20 | && contains(github.event.label.name, 'backport')
21 | )
22 | )
23 | permissions:
24 | contents: write
25 | pull-requests: write
26 | steps:
27 | - name: GitHub App token
28 | id: github_app_token
29 | uses: tibdex/github-app-token@v2
30 | with:
31 | app_id: ${{ secrets.APP_ID }}
32 | private_key: ${{ secrets.APP_PRIVATE_KEY }}
33 | installation_id: 22958780
34 |
35 | - name: Backport
36 | uses: VachaShah/backport@v2.2.0
37 | with:
38 | github_token: ${{ steps.github_app_token.outputs.token }}
39 | head_template: backport/backport-<%= number %>-to-<%= base %>
40 | failure_labels: backport-failed
41 |
--------------------------------------------------------------------------------
/.github/workflows/changelog_verifier.yml:
--------------------------------------------------------------------------------
1 | name: "Changelog Verifier"
2 | on:
3 | push:
4 | branches-ignore:
5 | - 'whitesource-remediate/**'
6 | - 'backport/**'
7 | pull_request:
8 | branches: main
9 | types: [opened, synchronize, reopened, ready_for_review, labeled, unlabeled]
10 |
11 | jobs:
12 | # Enforces the update of a changelog file on every pull request
13 | verify-changelog:
14 | if: github.repository == 'opensearch-project/flow-framework'
15 | runs-on: ubuntu-latest
16 | steps:
17 | - uses: actions/checkout@v4
18 | with:
19 | token: ${{ secrets.GITHUB_TOKEN }}
20 | ref: ${{ github.event.pull_request.head.sha }}
21 |
22 | - uses: dangoslen/changelog-enforcer@v3
23 | with:
24 | skipLabels: "autocut, skip-changelog"
25 |
--------------------------------------------------------------------------------
/.github/workflows/publish-snapshots.yml:
--------------------------------------------------------------------------------
1 | name: Publish snapshots to maven
2 |
3 | on:
4 | workflow_dispatch:
5 | push:
6 | branches:
7 | - 'main'
8 | - '[0-9]+.[0-9]+'
9 | - '[0-9]+.x'
10 |
11 | jobs:
12 | build-and-publish-snapshots:
13 | strategy:
14 | fail-fast: false
15 | if: github.repository == 'opensearch-project/flow-framework'
16 | runs-on: ubuntu-latest
17 |
18 | permissions:
19 | id-token: write
20 | contents: write
21 |
22 | steps:
23 | - uses: actions/setup-java@v4
24 | with:
25 | distribution: temurin # Temurin is a distribution of adoptium
26 | java-version: 21
27 | - uses: actions/checkout@v4
28 | - uses: aws-actions/configure-aws-credentials@v4.2.1
29 | with:
30 | role-to-assume: ${{ secrets.PUBLISH_SNAPSHOTS_ROLE }}
31 | aws-region: us-east-1
32 | - name: publish snapshots to maven
33 | run: |
34 | export SONATYPE_USERNAME=$(aws secretsmanager get-secret-value --secret-id maven-snapshots-username --query SecretString --output text)
35 | export SONATYPE_PASSWORD=$(aws secretsmanager get-secret-value --secret-id maven-snapshots-password --query SecretString --output text)
36 | echo "::add-mask::$SONATYPE_USERNAME"
37 | echo "::add-mask::$SONATYPE_PASSWORD"
38 | ./gradlew publishPluginZipPublicationToSnapshotsRepository
39 |
--------------------------------------------------------------------------------
/.github/workflows/test-api-consistency.yml:
--------------------------------------------------------------------------------
1 | name: Daily API Consistency Test
2 |
3 | on:
4 | schedule:
5 | - cron: '0 8 * * *' # Runs daily at 8 AM UTC
6 | workflow_dispatch:
7 |
8 | jobs:
9 | API-consistency-test:
10 | if: github.repository == 'opensearch-project/flow-framework'
11 | runs-on: ubuntu-latest
12 | strategy:
13 | matrix:
14 | java: [21]
15 |
16 | steps:
17 | - name: Checkout Flow Framework
18 | uses: actions/checkout@v3
19 |
20 | - name: Setup Java ${{ matrix.java }}
21 | uses: actions/setup-java@v3
22 | with:
23 | distribution: 'temurin'
24 | java-version: ${{ matrix.java }}
25 |
26 | - name: Run API Consistency Tests
27 | run: ./gradlew test --tests "org.opensearch.flowframework.workflow.*"
28 |
--------------------------------------------------------------------------------
/.github/workflows/test_bwc.yml:
--------------------------------------------------------------------------------
1 | name: BWC
2 | on:
3 | workflow_dispatch:
4 | push:
5 | branches-ignore:
6 | - 'whitesource-remediate/**'
7 | - 'backport/**'
8 | - 'create-pull-request/**'
9 | pull_request:
10 | types: [opened, synchronize, reopened]
11 |
12 | jobs:
13 | Build-ff-linux:
14 | strategy:
15 | matrix:
16 | java: [21]
17 | fail-fast: false
18 |
19 | name: Test Flow Framework BWC
20 | runs-on: ubuntu-latest
21 |
22 | steps:
23 | - name: Setup Java ${{ matrix.java }}
24 | uses: actions/setup-java@v4
25 | with:
26 | distribution: 'temurin'
27 | java-version: ${{ matrix.java }}
28 |
29 | - name: Checkout Flow Framework
30 | uses: actions/checkout@v4
31 |
32 | - name: Assemble Flow Framework
33 | run: |
34 | plugin_version=`./gradlew properties -q | grep "opensearch_build:" | awk '{print $2}'`
35 | echo plugin_version $plugin_version
36 | ./gradlew assemble
37 | echo "Creating ./src/test/resources/org/opensearch/flowframework/bwc/flow-framework/$plugin_version ..."
38 | mkdir -p ./src/test/resources/org/opensearch/flowframework/bwc/flow-framework/$plugin_version
39 | echo "Copying ./build/distributions/*.zip to ./src/test/resources/org/opensearch/flowframework/bwc/flow-framework/$plugin_version ..."
40 | ls ./build/distributions/
41 | cp ./build/distributions/*.zip ./src/test/resources/org/opensearch/flowframework/bwc/flow-framework/$plugin_version
42 | echo "Copied ./build/distributions/*.zip to ./src/test/resources/org/opensearch/flowframework/bwc/flow-framework/$plugin_version ..."
43 | ls ./src/test/resources/org/opensearch/flowframework/bwc/flow-framework/$plugin_version
44 | - name: Run Flow Framework Backwards Compatibility Tests
45 | run: |
46 | echo "Running backwards compatibility tests ..."
47 | ./gradlew bwcTestSuite -Dtests.security.manager=false
48 |
--------------------------------------------------------------------------------
/.github/workflows/test_security.yml:
--------------------------------------------------------------------------------
1 | name: Security test workflow for Flow Framework
2 | on:
3 | workflow_dispatch:
4 | push:
5 | branches-ignore:
6 | - 'whitesource-remediate/**'
7 | - 'backport/**'
8 | - 'create-pull-request/**'
9 | pull_request:
10 | types: [opened, synchronize, reopened]
11 |
12 |
13 | jobs:
14 | Get-CI-Image-Tag:
15 | uses: opensearch-project/opensearch-build/.github/workflows/get-ci-image-tag.yml@main
16 | with:
17 | product: opensearch
18 |
19 | integ-test-with-security-linux:
20 | strategy:
21 | matrix:
22 | java: [21]
23 |
24 | name: Run Security Integration Tests on Linux
25 | runs-on: ubuntu-latest
26 | needs: Get-CI-Image-Tag
27 | container:
28 | # using the same image which is used by opensearch-build team to build the OpenSearch Distribution
29 | # this image tag is subject to change as more dependencies and updates will arrive over time
30 | image: ${{ needs.Get-CI-Image-Tag.outputs.ci-image-version-linux }}
31 | # need to switch to root so that github actions can install runner binary on container without permission issues.
32 | options: ${{ needs.Get-CI-Image-Tag.outputs.ci-image-start-options }}
33 |
34 | steps:
35 | - name: Run start commands
36 | run: ${{ needs.Get-CI-Image-Tag.outputs.ci-image-start-command }}
37 | - name: Checkout Flow Framework
38 | uses: actions/checkout@v4
39 | - name: Setup Java ${{ matrix.java }}
40 | uses: actions/setup-java@v4
41 | with:
42 | distribution: 'temurin'
43 | java-version: ${{ matrix.java }}
44 | - name: Run tests
45 | # switching the user, as OpenSearch cluster can only be started as root/Administrator on linux-deb/linux-rpm/windows-zip.
46 | run: |
47 | chown -R 1000:1000 `pwd`
48 | su `id -un 1000` -c "whoami && java -version && ./gradlew integTest -Dsecurity.enabled=true"
49 |
--------------------------------------------------------------------------------
/.github/workflows/wrapper.yml:
--------------------------------------------------------------------------------
1 | name: Validate Gradle Wrapper
2 | on:
3 | push:
4 | branches-ignore:
5 | - 'whitesource-remediate/**'
6 | - 'backport/**'
7 | pull_request:
8 | types: [opened, synchronize, reopened]
9 |
10 | jobs:
11 | validate:
12 | name: Validate
13 | if: github.repository == 'opensearch-project/flow-framework'
14 | runs-on: ubuntu-latest
15 | steps:
16 | - uses: actions/checkout@v4
17 | - uses: gradle/actions/wrapper-validation@v4
18 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # intellij files
2 | .idea/
3 | *.iml
4 | *.ipr
5 | *.iws
6 | *.log
7 | build-idea/
8 | out/
9 |
10 | # eclipse files
11 | .classpath
12 | .project
13 | .settings
14 |
15 | # gradle stuff
16 | .gradle/
17 | build/
18 | bin/
19 |
20 | # vscode stuff
21 | .vscode/
22 |
23 | # osx stuff
24 | .DS_Store
25 |
--------------------------------------------------------------------------------
/.whitesource:
--------------------------------------------------------------------------------
1 | {
2 | "scanSettings": {
3 | "configMode": "AUTO",
4 | "configExternalURL": "",
5 | "projectToken": "",
6 | "baseBranches": []
7 | },
8 | "scanSettingsSAST": {
9 | "enableScan": false,
10 | "scanPullRequests": false,
11 | "incrementalScan": true,
12 | "baseBranches": [],
13 | "snippetSize": 10
14 | },
15 | "checkRunSettings": {
16 | "vulnerableCheckRunConclusionLevel": "failure",
17 | "displayMode": "diff",
18 | "useMendCheckNames": true
19 | },
20 | "checkRunSettingsSAST": {
21 | "checkRunConclusionLevel": "failure",
22 | "severityThreshold": "high"
23 | },
24 | "issueSettings": {
25 | "minSeverityLevel": "LOW",
26 | "issueType": "DEPENDENCY"
27 | },
28 | "remediateSettings": {
29 | "enableRenovate": true,
30 | "extends": [
31 | "config:base",
32 | ":gitSignOff",
33 | "github>whitesource/merge-confidence:beta"
34 | ],
35 | "addLabels": ["skip-changelog"],
36 | "workflowRules": {
37 | "enabled": true
38 | }
39 | }
40 | }
--------------------------------------------------------------------------------
/CHANGELOG.md:
--------------------------------------------------------------------------------
1 | # CHANGELOG
2 | All notable changes to this project are documented in this file.
3 |
4 | Inspired from [Keep a Changelog](https://keepachangelog.com/en/1.1.0/)
5 |
6 | ## [Unreleased 3.1](https://github.com/opensearch-project/flow-framework/compare/3.0...HEAD)
7 | ### Features
8 | ### Enhancements
9 | - Make thread pool sizes configurable ([#1139](https://github.com/opensearch-project/flow-framework/issues/1139))
10 |
11 | ### Bug Fixes
12 | - Fixing llm field processing in RegisterAgentStep ([#1151](https://github.com/opensearch-project/flow-framework/pull/1151))
13 | - Include exception type in WorkflowState error field even if no cause ([#1154](https://github.com/opensearch-project/flow-framework/pull/1154))
14 | - Pass llm spec params to builder ([#1155](https://github.com/opensearch-project/flow-framework/pull/1155))
15 |
16 | ### Infrastructure
17 | - Conditionally include ddb-client dependency only if env variable set ([#1141](https://github.com/opensearch-project/flow-framework/issues/1141))
18 |
19 | ### Documentation
20 | - Feat: add data summary with log pattern agent template ([#1137](https://github.com/opensearch-project/flow-framework/pull/1137))
21 | ### Maintenance
22 | ### Refactoring
23 |
--------------------------------------------------------------------------------
/CODE_OF_CONDUCT.md:
--------------------------------------------------------------------------------
1 | ## Code of Conduct
2 | This project has adopted the [Amazon Open Source Code of Conduct](https://aws.github.io/code-of-conduct).
3 | For more information see the [Code of Conduct FAQ](https://aws.github.io/code-of-conduct-faq) or contact
4 | opensource-codeofconduct@amazon.com with any additional questions or comments.
5 |
6 | This code of conduct applies to all spaces provided by the OpenSource project including in code, documentation, issue trackers, mailing lists, chat channels, wikis, blogs, social media and any other communication channels used by the project.
7 |
8 | **Our open source communities endeavor to:**
9 |
10 | * Be Inclusive: We are committed to being a community where everyone can join and contribute. This means using inclusive and welcoming language.
11 | * Be Welcoming: We are committed to maintaining a safe space for everyone to be able to contribute.
12 | * Be Respectful: We are committed to encouraging differing viewpoints, accepting constructive criticism and work collaboratively towards decisions that help the project grow. Disrespectful and unacceptable behavior will not be tolerated.
13 | * Be Collaborative: We are committed to supporting what is best for our community and users. When we build anything for the benefit of the project, we should document the work we do and communicate to others on how this affects their work.
14 |
15 | **Our Responsibility. As contributors, members, or bystanders we each individually have the responsibility to behave professionally and respectfully at all times. Disrespectful and unacceptable behaviors include, but are not limited to:**
16 |
17 | * The use of violent threats, abusive, discriminatory, or derogatory language;
18 | * Offensive comments related to gender, gender identity and expression, sexual orientation, disability, mental illness, race, political or religious affiliation;
19 | * Posting of sexually explicit or violent content;
20 | * The use of sexualized language and unwelcome sexual attention or advances;
21 | * Public or private harassment of any kind;
22 | * Publishing private information, such as physical or electronic address, without permission;
23 | * Other conduct which could reasonably be considered inappropriate in a professional setting;
24 | * Advocating for or encouraging any of the above behaviors.
25 |
26 | **Enforcement and Reporting Code of Conduct Issues:**
27 |
28 | Instances of abusive, harassing, or otherwise unacceptable behavior may be reported. [Contact us](mailto:opensource-codeofconduct@amazon.com). All complaints will be reviewed and investigated and will result in a response that is deemed necessary and appropriate to the circumstances.
29 |
--------------------------------------------------------------------------------
/MAINTAINERS.md:
--------------------------------------------------------------------------------
1 | ## Overview
2 |
3 | This document contains a list of maintainers in this repo. See [opensearch-project/.github/RESPONSIBILITIES.md](https://github.com/opensearch-project/.github/blob/main/RESPONSIBILITIES.md#maintainer-responsibilities) that explains what the role of maintainer means, what maintainers do in this and other repos, and how they should be doing it. If you're interested in contributing, and becoming a maintainer, see [CONTRIBUTING](CONTRIBUTING.md).
4 |
5 | ## Current Maintainers
6 |
7 | | Maintainer | GitHub ID | Affiliation |
8 | | ----------------- | ------------------------------------------------------- | ----------- |
9 | | Junwei Dai | [junweid62](https://github.com/junweid62) | Amazon |
10 | | Amit Galitzky | [amitgalitz](https://github.com/amitgalitz) | Amazon |
11 | | Jackie Han | [jackiehanyang](https://github.com/jackiehanyang) | Amazon |
12 | | Owais Kazi | [owaiskazi19](https://github.com/owaiskazi19) | Amazon |
13 | | Tyler Ohlsen | [ohltyler](https://github.com/ohltyler) | Amazon |
14 | | Josh Palis | [joshpalis](https://github.com/joshpalis) | Amazon |
15 | | Dan Widdis | [dbwiddis](https://github.com/dbwiddis) | Amazon |
16 |
--------------------------------------------------------------------------------
/NOTICE:
--------------------------------------------------------------------------------
1 | OpenSearch (https://opensearch.org/)
2 | Copyright OpenSearch Contributors
3 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | ## OpenSearch Flow Framework
2 |
3 | This project is an OpenSearch plugin that enables builders to innovate AI applications on OpenSearch.
4 |
5 | The current process of using ML offerings in OpenSearch, such as Semantic Search, requires users to handle complex setup and pre-processing tasks, and send verbose user queries, both of which can be time-consuming and error-prone.
6 |
7 | We want to introduce our customers to a new no-code/low-code builder experience ([Backend RFC](https://github.com/opensearch-project/OpenSearch/issues/9213) and [Frontend RFC](https://github.com/opensearch-project/OpenSearch-Dashboards/issues/4755)) that empowers users to compose AI-augmented query and ingestion flows, integrate ML models supported by ML-Commons, and streamline the OpenSearch app development experience through a drag-and-drop designer. The front end will help users create use case templates, which provide a compact description of configuration steps for automated workflows such as Retrieval Augment Generation (RAG), AI connectors and other components that prime OpenSearch as a backend to leverage generative models. Once primed, builders can query OpenSearch directly without building middleware logic to stitch together data flows and ML models.
8 |
9 | While the initial development has focused on ML use cases, the framework is generic and can be adapted to other use cases.
10 |
11 | See the [Development Plan](https://github.com/opensearch-project/flow-framework/issues/475) to view or comment on current incremental development priorities.
12 |
13 | ## Security
14 |
15 | See [CONTRIBUTING](CONTRIBUTING.md#security-issue-notifications) for more information.
16 |
17 | ## License
18 |
19 | This project is licensed under the Apache-2.0 License.
20 |
--------------------------------------------------------------------------------
/SECURITY.md:
--------------------------------------------------------------------------------
1 | ## Reporting a Vulnerability
2 |
3 | If you discover a potential security issue in this project we ask that you notify AWS/Amazon Security via our [vulnerability reporting page](http://aws.amazon.com/security/vulnerability-reporting/) or directly via email to aws-security@amazon.com. Please do **not** create a public GitHub issue.
4 |
--------------------------------------------------------------------------------
/config/checkstyle/checkstyle.xml:
--------------------------------------------------------------------------------
1 |
2 |
9 |
10 |
13 |
14 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 |
46 |
47 |
48 |
--------------------------------------------------------------------------------
/config/checkstyle/checkstyle_suppressions.xml:
--------------------------------------------------------------------------------
1 |
2 |
9 |
10 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
--------------------------------------------------------------------------------
/formatter/formatting.gradle:
--------------------------------------------------------------------------------
1 | allprojects {
2 | spotless {
3 | java {
4 | // Normally this isn't necessary, but we have Java sources in
5 | // non-standard places
6 | target '**/*.java'
7 |
8 | removeUnusedImports()
9 | importOrder('de.thetaphi',
10 | 'com.carrotsearch',
11 | 'com.fasterxml',
12 | 'com.avast',
13 | 'com.sun',
14 | 'com.maxmind|com.github|com.networknt|groovy|nebula',
15 | 'org.antlr',
16 | 'software.amazon',
17 | 'com.azure|com.microsoft|com.ibm|com.google|joptsimple|org.apache|org.bouncycastle|org.codehaus|org.opensearch|org.objectweb|org.joda|org.hamcrest|org.openjdk|org.gradle|org.junit',
18 | 'javax',
19 | 'java',
20 | '',
21 | '\\#java|\\#org.opensearch|\\#org.hamcrest|\\#')
22 | eclipse().configFile rootProject.file('formatter/formatterConfig.xml')
23 | trimTrailingWhitespace()
24 | endWithNewline()
25 |
26 | // See DEVELOPER_GUIDE.md for details of when to enable this.
27 | if (System.getProperty('spotless.paddedcell') != null) {
28 | paddedCell()
29 | }
30 | }
31 | format 'misc', {
32 | target '*.md', '*.gradle', '**/*.json', '**/*.yaml', '**/*.yml', '**/*.svg'
33 |
34 | trimTrailingWhitespace()
35 | endWithNewline()
36 | leadingTabsToSpaces()
37 | }
38 | format("license", {
39 | licenseHeaderFile("${rootProject.file("formatter/license-header.txt")}", "package ")
40 | target("src/*/java/**/*.java")
41 | })
42 | }
43 | }
44 |
--------------------------------------------------------------------------------
/formatter/license-header.txt:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright OpenSearch Contributors
3 | * SPDX-License-Identifier: Apache-2.0
4 | *
5 | * The OpenSearch Contributors require contributions made to
6 | * this file be licensed under the Apache-2.0 license or a
7 | * compatible open source license.
8 | */
9 |
--------------------------------------------------------------------------------
/gradle/wrapper/gradle-wrapper.jar:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/opensearch-project/flow-framework/deb02e7bf3354a845e80cd87292346ea6a261cf0/gradle/wrapper/gradle-wrapper.jar
--------------------------------------------------------------------------------
/gradle/wrapper/gradle-wrapper.properties:
--------------------------------------------------------------------------------
1 | distributionBase=GRADLE_USER_HOME
2 | distributionPath=wrapper/dists
3 | distributionSha256Sum=61ad310d3c7d3e5da131b76bbf22b5a4c0786e9d892dae8c1658d4b484de3caa
4 | distributionUrl=https\://services.gradle.org/distributions/gradle-8.14-bin.zip
5 | networkTimeout=10000
6 | validateDistributionUrl=true
7 | zipStoreBase=GRADLE_USER_HOME
8 | zipStorePath=wrapper/dists
9 |
--------------------------------------------------------------------------------
/release-notes/opensearch-flow-framework.release-notes-2.12.0.0.md:
--------------------------------------------------------------------------------
1 | ## 2024-02-20 Version 2.12.0.0
2 |
3 | Compatible with OpenSearch 2.12.0
4 |
5 | ### Features
6 | * Initial release of Flow Framework
--------------------------------------------------------------------------------
/release-notes/opensearch-flow-framework.release-notes-2.13.0.0.md:
--------------------------------------------------------------------------------
1 | ## 2024-03-18 Version 2.13.0.0
2 |
3 | Compatible with OpenSearch 2.13.0
4 |
5 | ### Features
6 | * Added create ingest pipeline step ([#558](https://github.com/opensearch-project/flow-framework/pull/558))
7 | * Added create search pipeline step ([#569](https://github.com/opensearch-project/flow-framework/pull/569))
8 | * Added create index step ([#574](https://github.com/opensearch-project/flow-framework/pull/574))
9 | * Added default use cases ([#583](https://github.com/opensearch-project/flow-framework/pull/583))
10 |
11 | ### Enhancements
12 | * Substitute REST path or body parameters in Workflow Steps ([#525](https://github.com/opensearch-project/flow-framework/pull/525))
13 | * Added an optional workflow_step param to the get workflow steps API ([#538](https://github.com/opensearch-project/flow-framework/pull/538))
14 | * Add created, updated, and provisioned timestamps to saved template ([#551](https://github.com/opensearch-project/flow-framework/pull/551))
15 | * Enable Flow Framework by default ([#553](https://github.com/opensearch-project/flow-framework/pull/553))
16 | * Adding new exception type for workflow step failures ([#577](https://github.com/opensearch-project/flow-framework/pull/577))
17 |
18 | ### Refactoring
19 | * Moved workflow-steps.json to Enum ([#523](https://github.com/opensearch-project/flow-framework/pull/523))
20 | * Refactored logging for consistency ([#524](https://github.com/opensearch-project/flow-framework/pull/524))
21 |
22 | ### Bug Fixes
23 | * Fixing create index and use case input parsing bugs ([#600](https://github.com/opensearch-project/flow-framework/pull/600))
24 |
--------------------------------------------------------------------------------
/release-notes/opensearch-flow-framework.release-notes-2.14.0.0.md:
--------------------------------------------------------------------------------
1 | ## 2024-04-30 Version 2.14.0.0
2 |
3 | Compatible with OpenSearch 2.14.0
4 |
5 | ### Enhancements
6 | - Add guardrails to default use case params ([#658](https://github.com/opensearch-project/flow-framework/pull/658))
7 | - Allow strings for boolean workflow step parameters ([#671](https://github.com/opensearch-project/flow-framework/pull/671))
8 | - Add optional delay parameter to no-op step ([#674](https://github.com/opensearch-project/flow-framework/pull/674))
9 | - Add model interface support for remote and local custom models ([#701](https://github.com/opensearch-project/flow-framework/pull/701))
10 |
11 | ### Bug Fixes
12 | - Reset workflow state to initial state after successful deprovision ([#635](https://github.com/opensearch-project/flow-framework/pull/635))
13 | - Silently ignore content on APIs that don't require it ([#639](https://github.com/opensearch-project/flow-framework/pull/639))
14 | - Hide user and credential field from search response ([#680](https://github.com/opensearch-project/flow-framework/pull/680))
15 | - Throw the correct error message in status API for WorkflowSteps ([#676](https://github.com/opensearch-project/flow-framework/pull/676))
16 | - Delete workflow state when template is deleted and no resources exist ([#689](https://github.com/opensearch-project/flow-framework/pull/689))
17 | - Fixing model group parsing and restoring context ([#695] (https://github.com/opensearch-project/flow-framework/pull/695))
18 |
19 |
20 | ### Infrastructure
21 | - Switch macOS runner to macos-13 from macos-latest since macos-latest is now arm64 ([#686](https://github.com/opensearch-project/flow-framework/pull/686))
22 |
23 | ### Refactoring
24 | - Improve error messages for workflow states other than NOT_STARTED ([#642](https://github.com/opensearch-project/flow-framework/pull/642))
25 |
--------------------------------------------------------------------------------
/release-notes/opensearch-flow-framework.release-notes-2.15.0.0.md:
--------------------------------------------------------------------------------
1 | ## Version 2.15.0.0
2 |
3 | Compatible with OpenSearch 2.15.0
4 |
5 | ### Enhancements
6 | - Add Workflow Step for Reindex from source index to destination ([#718](https://github.com/opensearch-project/flow-framework/pull/718))
7 | - Add param to delete workflow API to clear status even if resources exist ([#719](https://github.com/opensearch-project/flow-framework/pull/719))
8 | - Add additional default use cases ([#731](https://github.com/opensearch-project/flow-framework/pull/731))
9 | - Add conversation search default use case with RAG tool ([#732](https://github.com/opensearch-project/flow-framework/pull/732))
10 |
11 | ### Bug Fixes
12 | - Add user mapping to Workflow State index ([#705](https://github.com/opensearch-project/flow-framework/pull/705))
13 |
--------------------------------------------------------------------------------
/release-notes/opensearch-flow-framework.release-notes-2.16.0.0.md:
--------------------------------------------------------------------------------
1 | ## Version 2.16.0.0
2 |
3 | Compatible with OpenSearch 2.16.0
4 |
5 | ### Enhancements
6 | - Register system index descriptors through SystemIndexPlugin.getSystemIndexDescriptors ([#750](https://github.com/opensearch-project/flow-framework/pull/750))
7 | - Support editing of certain workflow fields on a provisioned workflow ([#757](https://github.com/opensearch-project/flow-framework/pull/757))
8 | - Add allow_delete parameter to Deprovision API ([#763](https://github.com/opensearch-project/flow-framework/pull/763))
9 | - Improve Template and WorkflowState builders ([#778](https://github.com/opensearch-project/flow-framework/pull/778))
10 |
11 | ### Bug Fixes
12 | - Handle Not Found deprovision exceptions as successful deletions ([#805](https://github.com/opensearch-project/flow-framework/pull/805))
13 | - Wrap CreateIndexRequest mappings in _doc key as required ([#809](https://github.com/opensearch-project/flow-framework/pull/809))
14 | - Have FlowFrameworkException status recognized by ExceptionsHelper ([#811](https://github.com/opensearch-project/flow-framework/pull/811))
15 |
--------------------------------------------------------------------------------
/release-notes/opensearch-flow-framework.release-notes-2.17.0.0.md:
--------------------------------------------------------------------------------
1 | ## Version 2.17.0.0
2 |
3 | Compatible with OpenSearch 2.17.0
4 |
5 | ### Features
6 | - Adds reprovision API to support updating search pipelines, ingest pipelines index settings ([#804](https://github.com/opensearch-project/flow-framework/pull/804))
7 | - Adds user level access control based on backend roles ([#838](https://github.com/opensearch-project/flow-framework/pull/838))
8 | - Support parsing connector_id when creating tools ([#846](https://github.com/opensearch-project/flow-framework/pull/846))
9 |
10 | ### Refactoring
11 | - Refactor workflow step resource updates to eliminate duplication ([#796](https://github.com/opensearch-project/flow-framework/pull/796))
12 |
--------------------------------------------------------------------------------
/release-notes/opensearch-flow-framework.release-notes-2.17.1.0.md:
--------------------------------------------------------------------------------
1 | ## Version 2.17.1.0
2 |
3 | Compatible with OpenSearch 2.17.1
4 |
5 | ### Maintenance
6 | - Fix flaky integ test reprovisioning before template update ([#880](https://github.com/opensearch-project/flow-framework/pull/880))
7 |
--------------------------------------------------------------------------------
/release-notes/opensearch-flow-framework.release-notes-2.18.0.0.md:
--------------------------------------------------------------------------------
1 | ## Version 2.18.0.0
2 |
3 | Compatible with OpenSearch 2.18.0
4 |
5 | ### Features
6 | - Add ApiSpecFetcher for Fetching and Comparing API Specifications ([#651](https://github.com/opensearch-project/flow-framework/issues/651))
7 | - Add optional config field to tool step ([#899](https://github.com/opensearch-project/flow-framework/pull/899))
8 |
9 | ### Enhancements
10 | - Incrementally remove resources from workflow state during deprovisioning ([#898](https://github.com/opensearch-project/flow-framework/pull/898))
11 |
12 | ### Bug Fixes
13 | - Fixed Template Update Location and Improved Logger Statements in ReprovisionWorkflowTransportAction ([#918](https://github.com/opensearch-project/flow-framework/pull/918))
14 |
15 | ### Documentation
16 | - Add query assist data summary agent into sample templates ([#875](https://github.com/opensearch-project/flow-framework/pull/875))
17 |
18 | ### Refactoring
19 | - Update workflow state without using painless script ([#894](https://github.com/opensearch-project/flow-framework/pull/894))
20 |
--------------------------------------------------------------------------------
/release-notes/opensearch-flow-framework.release-notes-2.19.0.0.md:
--------------------------------------------------------------------------------
1 | ## Version 2.19.0.0
2 |
3 | Compatible with OpenSearch 2.19.0
4 |
5 | ### Features
6 | - Add multitenant remote metadata client ([#980](https://github.com/opensearch-project/flow-framework/pull/980))
7 | - Add synchronous execution option to workflow provisioning ([#990](https://github.com/opensearch-project/flow-framework/pull/990))
8 |
9 | ### Bug Fixes
10 | - Remove useCase and defaultParams field in WorkflowRequest ([#758](https://github.com/opensearch-project/flow-framework/pull/758))
11 | - Fix RBAC fetching from workflow state when template is not present ([#998](https://github.com/opensearch-project/flow-framework/pull/998))
12 |
13 | ### Refactoring
14 | - Replace String concatenation with Log4j ParameterizedMessage for readability ([#943](https://github.com/opensearch-project/flow-framework/pull/943))
15 |
--------------------------------------------------------------------------------
/release-notes/opensearch-flow-framework.release-notes-2.19.2.0.md:
--------------------------------------------------------------------------------
1 | ## Version 2.19.2.0
2 |
3 | Compatible with OpenSearch 2.19.2
4 |
5 | ### Bug Fixes
6 | - Fix Config parser does not handle tenant_id field ([#1096](https://github.com/opensearch-project/flow-framework/pull/1096))
7 |
--------------------------------------------------------------------------------
/release-notes/opensearch-flow-framework.release-notes-3.0.0.0-alpha1.md:
--------------------------------------------------------------------------------
1 | ## Version 3.0.0.0-alpha1 Release Notes
2 |
3 | Compatible with OpenSearch 3.0.0-alpha1
4 |
5 | ### Infrastructure
6 | - Set Java target compatibility to JDK 21 ([#730](https://github.com/opensearch-project/flow-framework/pull/730))
7 |
8 | ### Documentation
9 | - Add text to visualization agent template ([#936](https://github.com/opensearch-project/flow-framework/pull/936))
10 |
11 | ### Maintenance
12 | - Fix breaking changes for 3.0.0 release ([#1026](https://github.com/opensearch-project/flow-framework/pull/1026))
--------------------------------------------------------------------------------
/release-notes/opensearch-flow-framework.release-notes-3.0.0.0-beta1.md:
--------------------------------------------------------------------------------
1 | ## Version 3.0.0.0-beta1 Release Notes
2 |
3 | Compatible with OpenSearch 3.0.0-beta1
4 |
5 | ### Features
6 | - Add per-tenant provisioning throttling ([#1074](https://github.com/opensearch-project/flow-framework/pull/1074))
7 |
8 | ### Bug Fixes
9 | - Change REST status codes for RBAC and provisioning ([#1083](https://github.com/opensearch-project/flow-framework/pull/1083))
10 | - Fix Config parser does not handle tenant_id field ([#1096](https://github.com/opensearch-project/flow-framework/pull/1096))
11 | - Complete action listener on failed synchronous workflow provisioning ([#1098](https://github.com/opensearch-project/opensearch-remote-metadata-sdk/pull/1098))
12 |
13 | ### Maintenance
14 | - Migrate from BC to BCFIPS libraries ([#1087](https://github.com/opensearch-project/flow-framework/pull/1087))
15 |
--------------------------------------------------------------------------------
/release-notes/opensearch-flow-framework.release-notes-3.0.0.0.md:
--------------------------------------------------------------------------------
1 | ## Version 3.0.0.0 Release Notes
2 |
3 | Compatible with OpenSearch 3.0.0
4 |
5 | ### Features
6 | - Add per-tenant provisioning throttling ([#1074](https://github.com/opensearch-project/flow-framework/pull/1074))
7 |
8 | ### Bug Fixes
9 | - Change REST status codes for RBAC and provisioning ([#1083](https://github.com/opensearch-project/flow-framework/pull/1083))
10 | - Fix Config parser does not handle tenant_id field ([#1096](https://github.com/opensearch-project/flow-framework/pull/1096))
11 | - Complete action listener on failed synchronous workflow provisioning ([#1098](https://github.com/opensearch-project/opensearch-remote-metadata-sdk/pull/1098))
12 | - Add new attributes field to ToolStep ([#1113](https://github.com/opensearch-project/flow-framework/pull/1113))
13 | - Fix bug handleReprovision missing wait_for_completion_timeout response ([#1107](https://github.com/opensearch-project/flow-framework/pull/1107))
14 |
15 | ### Maintenance
16 | - Fix breaking changes for 3.0.0 release ([#1026](https://github.com/opensearch-project/flow-framework/pull/1026))
17 | - Migrate from BC to BCFIPS libraries ([#1087](https://github.com/opensearch-project/flow-framework/pull/1087))
18 |
19 | ### Infrastructure
20 | - Set Java target compatibility to JDK 21 ([#730](https://github.com/opensearch-project/flow-framework/pull/730))
21 | - Use java-agent Gradle plugin to support phasing off SecurityManager usage in favor of Java Agent ([#1108](https://github.com/opensearch-project/flow-framework/pull/1108))
22 |
23 |
24 | ### Documentation
25 | - Add text to visualization agent template ([#936](https://github.com/opensearch-project/flow-framework/pull/936))
26 |
--------------------------------------------------------------------------------
/sample-templates/README.md:
--------------------------------------------------------------------------------
1 | ## Flow Framework Sample Templates
2 |
3 | This folder contains sample workflow templates that can be used with Flow Framework.
4 |
5 | Each template is provided in both YAML and JSON format with identical functionality.
6 | The YAML templates include comments which give more insight into the template's usage.
7 | Use the corresponding `Content-Type` (`application/yaml` or `application/json`) when providing them as the body of a REST request.
8 |
9 | **Note:** Several of the templates use both the single quote (`'`) and double quote (`"`) characters which may create issues using the templates with `curl` on the command line and the template in `--data`. Escaping the single quotes or reading the template from a file is needed to work around this.
10 |
11 | You will need to update the `credentials` field with appropriate API keys.
12 |
13 | To create a workflow and provision the resources:
14 |
15 | ```
16 | POST /_plugins/_flow_framework/workflow?provision=true
17 | { template as body }
18 | ```
19 |
20 | This will return a `workflow_id`. To get the IDs of created resources, call the workflow status API.
21 |
22 | ```
23 | GET /_plugins/_flow_framework/workflow//_status
24 | ```
25 |
26 | For the Query Assist Agent API, the `agent_id` of the `root_agent` can be used to query it.
27 |
28 | ```
29 | POST /_plugins/_ml/agents/_/_execute
30 | {
31 | "parameters": {
32 | "question": "How many 5xx logs do I have?"
33 | }
34 | }
35 | ```
--------------------------------------------------------------------------------
/sample-templates/alert-summary-agent-claude-tested.yml:
--------------------------------------------------------------------------------
1 | ---
2 | name: Alert Summary Agent
3 | description: Create Alert Summary Agent using Claude on BedRock
4 | use_case: REGISTER_AGENT
5 | version:
6 | template: 1.0.0
7 | compatibility:
8 | - 2.17.0
9 | - 3.0.0
10 | workflows:
11 | provision:
12 | user_params: {}
13 | nodes:
14 | - id: create_claude_connector
15 | type: create_connector
16 | previous_node_inputs: {}
17 | user_inputs:
18 | version: '1'
19 | name: Claude instant runtime Connector
20 | protocol: aws_sigv4
21 | description: The connector to BedRock service for Claude model
22 | actions:
23 | - headers:
24 | x-amz-content-sha256: required
25 | content-type: application/json
26 | method: POST
27 | request_body: '{"prompt":"\n\nHuman: ${parameters.prompt}\n\nAssistant:", "max_tokens_to_sample":${parameters.max_tokens_to_sample},
28 | "temperature":${parameters.temperature}, "anthropic_version":"${parameters.anthropic_version}"
29 | }'
30 | action_type: predict
31 | url: https://bedrock-runtime.us-west-2.amazonaws.com/model/anthropic.claude-instant-v1/invoke
32 | credential:
33 | access_key: ""
34 | secret_key: ""
35 | session_token: ""
36 | parameters:
37 | region: us-west-2
38 | endpoint: bedrock-runtime.us-west-2.amazonaws.com
39 | content_type: application/json
40 | auth: Sig_V4
41 | max_tokens_to_sample: '8000'
42 | service_name: bedrock
43 | temperature: '0.0001'
44 | response_filter: "$.completion"
45 | anthropic_version: bedrock-2023-05-31
46 | - id: register_claude_model
47 | type: register_remote_model
48 | previous_node_inputs:
49 | create_claude_connector: connector_id
50 | user_inputs:
51 | description: Claude model
52 | deploy: true
53 | name: claude-instant
54 | - id: create_alert_summary_ml_model_tool
55 | type: create_tool
56 | previous_node_inputs:
57 | register_claude_model: model_id
58 | user_inputs:
59 | parameters:
60 | prompt: "You are an OpenSearch Alert Assistant to help summarize the alerts.\n Here is the detail of alert: ${parameters.context};\n The question is: ${parameters.question}."
61 | name: MLModelTool
62 | type: MLModelTool
63 | - id: create_alert_summary_agent
64 | type: register_agent
65 | previous_node_inputs:
66 | create_alert_summary_ml_model_tool: tools
67 | user_inputs:
68 | parameters: {}
69 | type: flow
70 | name: Alert Summary Agent
71 | description: this is an alert summary agent
72 |
--------------------------------------------------------------------------------
/sample-templates/deploy-bedrock-claude-model.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "Deploy Claude Model",
3 | "description": "Deploy a model using a connector to Claude",
4 | "use_case": "PROVISION",
5 | "version": {
6 | "template": "1.0.0",
7 | "compatibility": [
8 | "2.12.0",
9 | "3.0.0"
10 | ]
11 | },
12 | "workflows": {
13 | "provision": {
14 | "nodes": [
15 | {
16 | "id": "create_claude_connector",
17 | "type": "create_connector",
18 | "user_inputs": {
19 | "name": "Claude Instant Runtime Connector",
20 | "version": "1",
21 | "protocol": "aws_sigv4",
22 | "description": "The connector to BedRock service for Claude model",
23 | "actions": [
24 | {
25 | "headers": {
26 | "x-amz-content-sha256": "required",
27 | "content-type": "application/json"
28 | },
29 | "method": "POST",
30 | "request_body": "{ \"prompt\":\"${parameters.prompt}\", \"max_tokens_to_sample\":${parameters.max_tokens_to_sample}, \"temperature\":${parameters.temperature}, \"anthropic_version\":\"${parameters.anthropic_version}\" }",
31 | "action_type": "predict",
32 | "url": "https://bedrock-runtime.us-west-2.amazonaws.com/model/anthropic.claude-instant-v1/invoke"
33 | }
34 | ],
35 | "credential": {
36 | "access_key": "PUT_YOUR_ACCESS_KEY_HERE",
37 | "secret_key": "PUT_YOUR_SECRET_KEY_HERE"
38 | },
39 | "parameters": {
40 | "endpoint": "bedrock-runtime.us-west-2.amazonaws.com",
41 | "content_type": "application/json",
42 | "auth": "Sig_V4",
43 | "max_tokens_to_sample": "8000",
44 | "service_name": "bedrock",
45 | "temperature": "0.0001",
46 | "response_filter": "$.completion",
47 | "region": "us-west-2",
48 | "anthropic_version": "bedrock-2023-05-31"
49 | }
50 | }
51 | },
52 | {
53 | "id": "register_claude_model",
54 | "type": "register_remote_model",
55 | "previous_node_inputs": {
56 | "create_claude_connector": "connector_id"
57 | },
58 | "user_inputs": {
59 | "name": "claude-instant"
60 | }
61 | },
62 | {
63 | "id": "deploy_claude_model",
64 | "type": "deploy_model",
65 | "previous_node_inputs": {
66 | "register_claude_model": "model_id"
67 | }
68 | }
69 | ]
70 | }
71 | }
72 | }
73 |
--------------------------------------------------------------------------------
/sample-templates/deploy-bedrock-claude-model.yml:
--------------------------------------------------------------------------------
1 | # This template creates a connector to the BedRock service for Claude model
2 | # It then registers a model using that connector and deploys it.
3 | #
4 | # To use:
5 | # - update the "credential" fields under the create_claude_connector node.
6 | # - if needed, update region
7 | #
8 | # After provisioning:
9 | # - returns a workflow ID
10 | # - use the status API to get the deployed model ID
11 | ---
12 | name: Deploy Claude Model
13 | description: Deploy a model using a connector to Claude
14 | use_case: PROVISION
15 | version:
16 | template: 1.0.0
17 | compatibility:
18 | - 2.12.0
19 | - 3.0.0
20 | # This section defines the provision workflow. Nodes are connected in a graph.
21 | # Either previous_node_inputs or explicit edges can be used to enforce ordering.
22 | workflows:
23 | provision:
24 | # Each id field in a workflow must be unique.
25 | nodes:
26 | - id: create_claude_connector
27 | type: create_connector
28 | user_inputs:
29 | name: Claude Instant Runtime Connector
30 | version: '1'
31 | protocol: aws_sigv4
32 | description: The connector to BedRock service for Claude model
33 | actions:
34 | - headers:
35 | x-amz-content-sha256: required
36 | content-type: application/json
37 | method: POST
38 | request_body: '{
39 | "prompt":"${parameters.prompt}",
40 | "max_tokens_to_sample":${parameters.max_tokens_to_sample},
41 | "temperature":${parameters.temperature}, "anthropic_version":"${parameters.anthropic_version}"
42 | }'
43 | action_type: predict
44 | url: https://bedrock-runtime.us-west-2.amazonaws.com/model/anthropic.claude-instant-v1/invoke
45 | credential:
46 | access_key: 'PUT_YOUR_ACCESS_KEY_HERE'
47 | secret_key: 'PUT_YOUR_SECRET_KEY_HERE'
48 | parameters:
49 | endpoint: bedrock-runtime.us-west-2.amazonaws.com
50 | content_type: application/json
51 | auth: Sig_V4
52 | max_tokens_to_sample: '8000'
53 | service_name: bedrock
54 | temperature: '0.0001'
55 | response_filter: "$.completion"
56 | region: us-west-2
57 | anthropic_version: bedrock-2023-05-31
58 | - id: register_claude_model
59 | type: register_remote_model
60 | previous_node_inputs:
61 | create_claude_connector: connector_id
62 | user_inputs:
63 | name: claude-instant
64 | # Using deploy: true here would both registers and deploy the model
65 | # and the deploy model step below could be deleted
66 | # deploy: true
67 | - id: deploy_claude_model
68 | type: deploy_model
69 | previous_node_inputs:
70 | register_claude_model: model_id
71 | # Because the above nodes use previous_node_inputs, these edges are automatically generated.
72 | # edges:
73 | # - source: create_claude_connector
74 | # dest: register_claude_model
75 | # - source: register_claude_model
76 | # dest: deploy_claude_model
77 |
--------------------------------------------------------------------------------
/sample-templates/deploy-openai-model.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "Deploy OpenAI Model",
3 | "description": "Deploy a model using a connector to OpenAI",
4 | "use_case": "PROVISION",
5 | "version": {
6 | "template": "1.0.0",
7 | "compatibility": [
8 | "2.12.0",
9 | "3.0.0"
10 | ]
11 | },
12 | "workflows": {
13 | "provision": {
14 | "nodes": [
15 | {
16 | "id": "create_openai_connector",
17 | "type": "create_connector",
18 | "user_inputs": {
19 | "name": "OpenAI Chat Connector",
20 | "description": "The connector to public OpenAI model service for GPT 3.5",
21 | "version": "1",
22 | "protocol": "http",
23 | "parameters": {
24 | "endpoint": "api.openai.com",
25 | "model": "gpt-3.5-turbo",
26 | "response_filter": "$.choices[0].message.content"
27 | },
28 | "credential": {
29 | "openAI_key": "PUT_YOUR_API_KEY_HERE"
30 | },
31 | "actions": [
32 | {
33 | "action_type": "predict",
34 | "method": "POST",
35 | "url": "https://${parameters.endpoint}/v1/chat/completions"
36 | }
37 | ]
38 | }
39 | },
40 | {
41 | "id": "register_openai_model",
42 | "type": "register_remote_model",
43 | "previous_node_inputs": {
44 | "create_openai_connector": "connector_id"
45 | },
46 | "user_inputs": {
47 | "name": "openAI-gpt-3.5-turbo"
48 | }
49 | },
50 | {
51 | "id": "deploy_openai_model",
52 | "type": "deploy_model",
53 | "previous_node_inputs": {
54 | "register_openai_model": "model_id"
55 | }
56 | }
57 | ]
58 | }
59 | }
60 | }
61 |
--------------------------------------------------------------------------------
/sample-templates/deploy-openai-model.yml:
--------------------------------------------------------------------------------
1 | # This template creates a connector to the public OpenAI model service for GPT 3.5
2 | # It then registers a model using that connector and deploys it.
3 | #
4 | # To use:
5 | # - update the "credential" field under the create_openai_connector node.
6 | #
7 | # After provisioning:
8 | # - returns a workflow ID
9 | # - use the status API to get the deployed model ID
10 | ---
11 | name: Deploy OpenAI Model
12 | description: Deploy a model using a connector to OpenAI
13 | use_case: PROVISION
14 | version:
15 | template: 1.0.0
16 | compatibility:
17 | - 2.12.0
18 | - 3.0.0
19 | # This section defines the provision workflow. Nodes are connected in a graph.
20 | # Either previous_node_inputs or explicit edges can be used to enforce ordering.
21 | workflows:
22 | provision:
23 | # Each id field in a workflow must be unique.
24 | nodes:
25 | - id: create_openai_connector
26 | type: create_connector
27 | user_inputs:
28 | name: OpenAI Chat Connector
29 | description: The connector to public OpenAI model service for GPT 3.5
30 | version: '1'
31 | protocol: http
32 | parameters:
33 | endpoint: api.openai.com
34 | model: gpt-3.5-turbo
35 | response_filter: '$.choices[0].message.content'
36 | credential:
37 | openAI_key: 'PUT_YOUR_API_KEY_HERE'
38 | actions:
39 | - action_type: predict
40 | method: POST
41 | url: https://${parameters.endpoint}/v1/chat/completions
42 | - id: register_openai_model
43 | type: register_remote_model
44 | previous_node_inputs:
45 | create_openai_connector: connector_id
46 | user_inputs:
47 | name: openAI-gpt-3.5-turbo
48 | # Using deploy: true here would both registers and deploy the model
49 | # and the deploy model step below could be deleted
50 | # deploy: true
51 | - id: deploy_openai_model
52 | type: deploy_model
53 | previous_node_inputs:
54 | register_openai_model: model_id
55 | # Because the above nodes use previous_node_inputs, these edges are automatically generated.
56 | # edges:
57 | # - source: create_openai_connector
58 | # dest: register_openai_model
59 | # - source: register_openai_model
60 | # dest: deploy_openai_model
61 |
--------------------------------------------------------------------------------
/sample-templates/deploy-sagemaker-mistral-model.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "Deploy Mistral Model",
3 | "description": "Deploy a model using a connector to SageMaker Mistral model",
4 | "use_case": "PROVISION",
5 | "version": {
6 | "template": "1.0.0",
7 | "compatibility": [
8 | "2.12.0",
9 | "3.0.0"
10 | ]
11 | },
12 | "workflows": {
13 | "provision": {
14 | "nodes": [
15 | {
16 | "id": "create_mistral_connector",
17 | "type": "create_connector",
18 | "user_inputs": {
19 | "name": "sagemaker: mistral",
20 | "description": "Test connector for Sagemaker mistral model",
21 | "version": "1",
22 | "protocol": "aws_sigv4",
23 | "credential": {
24 | "access_key": "PUT_YOUR_ACCESS_KEY_HERE",
25 | "secret_key": "PUT_YOUR_SECRET_KEY_HERE"
26 | },
27 | "parameters": {
28 | "region": "us-east-1",
29 | "service_name": "sagemaker"
30 | },
31 | "actions": [
32 | {
33 | "action_type": "predict",
34 | "method": "POST",
35 | "headers": {
36 | "content-type": "application/json"
37 | },
38 | "url": "https://PUT_YOUR_CUSTOM_SAGEMAKER_ENDPOINT_HERE",
39 | "request_body": "{\"prompt\":\"${parameters.prompt}\"}"
40 | }
41 | ]
42 | }
43 | },
44 | {
45 | "id": "register_mistral_model",
46 | "type": "register_remote_model",
47 | "previous_node_inputs": {
48 | "create_mistral_connector": "connector_id"
49 | },
50 | "user_inputs": {
51 | "name": "mistral fine-tuned model"
52 | }
53 | },
54 | {
55 | "id": "deploy_mistral_model",
56 | "type": "deploy_model",
57 | "previous_node_inputs": {
58 | "register_mistral_model": "model_id"
59 | }
60 | }
61 | ]
62 | }
63 | }
64 | }
65 |
--------------------------------------------------------------------------------
/sample-templates/deploy-sagemaker-mistral-model.yml:
--------------------------------------------------------------------------------
1 | # This template creates a connector to the SageMaker service for a Mistral model
2 | # It then registers a model using that connector and deploys it.
3 | #
4 | # To use:
5 | # - update the "credential" fields under the create_mistral_connector node.
6 | # - update the sagemaker endpoint
7 | # - if needed, update region
8 | #
9 | # After provisioning:
10 | # - returns a workflow ID
11 | # - use the status API to get the deployed model ID
12 | ---
13 | name: Deploy Mistral Model
14 | description: Deploy a model using a connector to SageMaker Mistral model
15 | use_case: PROVISION
16 | version:
17 | template: 1.0.0
18 | compatibility:
19 | - 2.12.0
20 | - 3.0.0
21 | # This section defines the provision workflow. Nodes are connected in a graph.
22 | # Either previous_node_inputs or explicit edges can be used to enforce ordering.
23 | workflows:
24 | provision:
25 | # Each id field in a workflow must be unique.
26 | nodes:
27 | - id: create_mistral_connector
28 | type: create_connector
29 | user_inputs:
30 | name: 'sagemaker: mistral'
31 | description: Test connector for Sagemaker mistral model
32 | version: '1'
33 | protocol: aws_sigv4
34 | credential:
35 | access_key: 'PUT_YOUR_ACCESS_KEY_HERE'
36 | secret_key: 'PUT_YOUR_SECRET_KEY_HERE'
37 | parameters:
38 | region: us-east-1
39 | service_name: sagemaker
40 | actions:
41 | - action_type: predict
42 | method: POST
43 | headers:
44 | content-type: application/json
45 | url: 'https://PUT_YOUR_CUSTOM_SAGEMAKER_ENDPOINT_HERE'
46 | request_body: '{"prompt":"${parameters.prompt}"}'
47 | - id: register_mistral_model
48 | type: register_remote_model
49 | previous_node_inputs:
50 | create_mistral_connector: connector_id
51 | user_inputs:
52 | name: mistral fine-tuned model
53 | # Using deploy: true here would both registers and deploy the model
54 | # and the deploy model step below could be deleted
55 | # deploy: true
56 | - id: deploy_mistral_model
57 | type: deploy_model
58 | previous_node_inputs:
59 | register_mistral_model: model_id
60 | # Because the above nodes use previous_node_inputs, these edges are automatically generated.
61 | # edges:
62 | # - source: create_mistral_connector
63 | # dest: register_mistral_model
64 | # - source: register_mistral_model
65 | # dest: deploy_mistral_model
66 |
--------------------------------------------------------------------------------
/settings.gradle:
--------------------------------------------------------------------------------
1 | rootProject.name = 'opensearch-flow-framework'
2 |
--------------------------------------------------------------------------------
/src/main/java/org/opensearch/flowframework/common/ThrowingSupplier.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright OpenSearch Contributors
3 | * SPDX-License-Identifier: Apache-2.0
4 | *
5 | * The OpenSearch Contributors require contributions made to
6 | * this file be licensed under the Apache-2.0 license or a
7 | * compatible open source license.
8 | */
9 | package org.opensearch.flowframework.common;
10 |
11 | /**
12 | * A supplier that can throw checked exception
13 | *
14 | * @param method parameter type
15 | * @param Exception type
16 | */
17 | @FunctionalInterface
18 | public interface ThrowingSupplier {
19 | /**
20 | * Gets a result or throws an exception if unable to produce a result.
21 | *
22 | * @return the result
23 | * @throws E if unable to produce a result
24 | */
25 | T get() throws E;
26 | }
27 |
--------------------------------------------------------------------------------
/src/main/java/org/opensearch/flowframework/common/ThrowingSupplierWrapper.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright OpenSearch Contributors
3 | * SPDX-License-Identifier: Apache-2.0
4 | *
5 | * The OpenSearch Contributors require contributions made to
6 | * this file be licensed under the Apache-2.0 license or a
7 | * compatible open source license.
8 | */
9 | package org.opensearch.flowframework.common;
10 |
11 | import java.util.function.Supplier;
12 |
13 | /**
14 | * Wrapper for throwing checked exception inside places that does not allow to do so
15 | */
16 | public class ThrowingSupplierWrapper {
17 |
18 | private ThrowingSupplierWrapper() {}
19 |
20 | /**
21 | * Utility method to use a method throwing checked exception inside a place
22 | * that does not allow throwing the corresponding checked exception (e.g.,
23 | * enum initialization).
24 | * Convert the checked exception thrown by throwingConsumer to a RuntimeException
25 | * so that the compiler won't complain.
26 | * @param the method's return type
27 | * @param throwingSupplier the method reference that can throw checked exception
28 | * @return converted method reference
29 | */
30 | public static Supplier throwingSupplierWrapper(ThrowingSupplier throwingSupplier) {
31 |
32 | return () -> {
33 | try {
34 | return throwingSupplier.get();
35 | } catch (Exception ex) {
36 | throw new RuntimeException(ex);
37 | }
38 | };
39 | }
40 | }
41 |
--------------------------------------------------------------------------------
/src/main/java/org/opensearch/flowframework/exception/ApiSpecParseException.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright OpenSearch Contributors
3 | * SPDX-License-Identifier: Apache-2.0
4 | *
5 | * The OpenSearch Contributors require contributions made to
6 | * this file be licensed under the Apache-2.0 license or a
7 | * compatible open source license.
8 | */
9 | package org.opensearch.flowframework.exception;
10 |
11 | import org.opensearch.OpenSearchException;
12 |
13 | import java.util.List;
14 |
15 | /**
16 | * Custom exception to be thrown when an error occurs during the parsing of an API specification.
17 | */
18 | public class ApiSpecParseException extends OpenSearchException {
19 |
20 | /**
21 | * Constructor with message.
22 | *
23 | * @param message The detail message.
24 | */
25 | public ApiSpecParseException(String message) {
26 | super(message);
27 | }
28 |
29 | /**
30 | * Constructor with message and cause.
31 | *
32 | * @param message The detail message.
33 | * @param cause The cause of the exception.
34 | */
35 | public ApiSpecParseException(String message, Throwable cause) {
36 | super(message, cause);
37 | }
38 |
39 | /**
40 | * Constructor with message and list of detailed errors.
41 | *
42 | * @param message The detail message.
43 | * @param details The list of errors encountered during the parsing process.
44 | */
45 | public ApiSpecParseException(String message, List details) {
46 | super(message + ": " + String.join(", ", details));
47 | }
48 | }
49 |
--------------------------------------------------------------------------------
/src/main/java/org/opensearch/flowframework/exception/WorkflowStepException.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright OpenSearch Contributors
3 | * SPDX-License-Identifier: Apache-2.0
4 | *
5 | * The OpenSearch Contributors require contributions made to
6 | * this file be licensed under the Apache-2.0 license or a
7 | * compatible open source license.
8 | */
9 | package org.opensearch.flowframework.exception;
10 |
11 | import org.opensearch.OpenSearchException;
12 | import org.opensearch.OpenSearchParseException;
13 | import org.opensearch.OpenSearchStatusException;
14 | import org.opensearch.core.rest.RestStatus;
15 | import org.opensearch.core.xcontent.ToXContentObject;
16 | import org.opensearch.core.xcontent.XContentBuilder;
17 |
18 | import java.io.IOException;
19 |
20 | /**
21 | * Representation of an exception that is caused by a workflow step failing outside of our plugin
22 | * This is caught by an external client (e.g. ml-client) returning the failure
23 | */
24 | public class WorkflowStepException extends FlowFrameworkException implements ToXContentObject {
25 |
26 | private static final long serialVersionUID = 1L;
27 |
28 | /**
29 | * Constructor with error message.
30 | *
31 | * @param message message of the exception
32 | * @param restStatus HTTP status code of the response
33 | */
34 | public WorkflowStepException(String message, RestStatus restStatus) {
35 | super(message, restStatus);
36 | }
37 |
38 | /**
39 | * Constructor with specified cause.
40 | * @param cause exception cause
41 | * @param restStatus HTTP status code of the response
42 | */
43 | public WorkflowStepException(Throwable cause, RestStatus restStatus) {
44 | super(cause, restStatus);
45 | }
46 |
47 | /**
48 | * Constructor with specified error message adn cause.
49 | * @param message error message
50 | * @param cause exception cause
51 | * @param restStatus HTTP status code of the response
52 | */
53 | public WorkflowStepException(String message, Throwable cause, RestStatus restStatus) {
54 | super(message, cause, restStatus);
55 | }
56 |
57 | /**
58 | * Getter for restStatus.
59 | *
60 | * @return the HTTP status code associated with the exception
61 | */
62 | public RestStatus getRestStatus() {
63 | return restStatus;
64 | }
65 |
66 | @Override
67 | public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
68 | return builder.startObject().field("error", this.getMessage()).endObject();
69 | }
70 |
71 | /**
72 | * Getter for safe exceptions
73 | * @param ex exception
74 | * @return exception if safe
75 | */
76 | public static Exception getSafeException(Exception ex) {
77 | if (ex instanceof IllegalArgumentException
78 | || ex instanceof OpenSearchStatusException
79 | || ex instanceof OpenSearchParseException
80 | || (ex instanceof OpenSearchException && ex.getCause() instanceof OpenSearchParseException)) {
81 | return ex;
82 | }
83 | return null;
84 | }
85 | }
86 |
--------------------------------------------------------------------------------
/src/main/java/org/opensearch/flowframework/indices/FlowFrameworkIndex.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright OpenSearch Contributors
3 | * SPDX-License-Identifier: Apache-2.0
4 | *
5 | * The OpenSearch Contributors require contributions made to
6 | * this file be licensed under the Apache-2.0 license or a
7 | * compatible open source license.
8 | */
9 | package org.opensearch.flowframework.indices;
10 |
11 | import org.opensearch.flowframework.common.ThrowingSupplierWrapper;
12 |
13 | import java.util.function.Supplier;
14 |
15 | import static org.opensearch.flowframework.common.CommonValue.CONFIG_INDEX;
16 | import static org.opensearch.flowframework.common.CommonValue.CONFIG_INDEX_VERSION;
17 | import static org.opensearch.flowframework.common.CommonValue.GLOBAL_CONTEXT_INDEX;
18 | import static org.opensearch.flowframework.common.CommonValue.GLOBAL_CONTEXT_INDEX_VERSION;
19 | import static org.opensearch.flowframework.common.CommonValue.WORKFLOW_STATE_INDEX;
20 | import static org.opensearch.flowframework.common.CommonValue.WORKFLOW_STATE_INDEX_VERSION;
21 |
22 | /**
23 | * An enumeration of Flow Framework indices
24 | */
25 | public enum FlowFrameworkIndex {
26 | /**
27 | * Global Context Index
28 | */
29 | GLOBAL_CONTEXT(
30 | GLOBAL_CONTEXT_INDEX,
31 | ThrowingSupplierWrapper.throwingSupplierWrapper(FlowFrameworkIndicesHandler::getGlobalContextMappings),
32 | GLOBAL_CONTEXT_INDEX_VERSION
33 | ),
34 | /**
35 | * Workflow State Index
36 | */
37 | WORKFLOW_STATE(
38 | WORKFLOW_STATE_INDEX,
39 | ThrowingSupplierWrapper.throwingSupplierWrapper(FlowFrameworkIndicesHandler::getWorkflowStateMappings),
40 | WORKFLOW_STATE_INDEX_VERSION
41 | ),
42 | /**
43 | * Config Index
44 | */
45 | CONFIG(
46 | CONFIG_INDEX,
47 | ThrowingSupplierWrapper.throwingSupplierWrapper(FlowFrameworkIndicesHandler::getConfigIndexMappings),
48 | CONFIG_INDEX_VERSION
49 | );
50 |
51 | private final String indexName;
52 | private final String mapping;
53 | private final Integer version;
54 |
55 | FlowFrameworkIndex(String name, Supplier mappingSupplier, Integer version) {
56 | this.indexName = name;
57 | this.mapping = mappingSupplier.get();
58 | this.version = version;
59 | }
60 |
61 | /**
62 | * Retrieves the index name
63 | * @return the index name
64 | */
65 | public String getIndexName() {
66 | return indexName;
67 | }
68 |
69 | /**
70 | * Retrieves the index mapping
71 | * @return the index mapping
72 | */
73 | public String getMapping() {
74 | return mapping;
75 | }
76 |
77 | /**
78 | * Retrieves the index version
79 | * @return the index version
80 | */
81 | public Integer getVersion() {
82 | return version;
83 | }
84 | }
85 |
--------------------------------------------------------------------------------
/src/main/java/org/opensearch/flowframework/model/ProvisioningProgress.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright OpenSearch Contributors
3 | * SPDX-License-Identifier: Apache-2.0
4 | *
5 | * The OpenSearch Contributors require contributions made to
6 | * this file be licensed under the Apache-2.0 license or a
7 | * compatible open source license.
8 | */
9 | package org.opensearch.flowframework.model;
10 |
11 | /**
12 | * Enum relating to the provisioning progress
13 | */
14 | // TODO: transfer this to more detailed array for each step
15 | public enum ProvisioningProgress {
16 | /** Not Started State */
17 | NOT_STARTED,
18 | /** In Progress State */
19 | IN_PROGRESS,
20 | /** Done State */
21 | DONE,
22 | /** Failed State */
23 | FAILED
24 | }
25 |
--------------------------------------------------------------------------------
/src/main/java/org/opensearch/flowframework/model/State.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright OpenSearch Contributors
3 | * SPDX-License-Identifier: Apache-2.0
4 | *
5 | * The OpenSearch Contributors require contributions made to
6 | * this file be licensed under the Apache-2.0 license or a
7 | * compatible open source license.
8 | */
9 | package org.opensearch.flowframework.model;
10 |
11 | /**
12 | * Enum relating to the state of a workflow
13 | */
14 | public enum State {
15 | /** Not Started state */
16 | NOT_STARTED,
17 | /** Provisioning state */
18 | PROVISIONING,
19 | /** Failed state */
20 | FAILED,
21 | /** Completed state */
22 | COMPLETED
23 | }
24 |
--------------------------------------------------------------------------------
/src/main/java/org/opensearch/flowframework/model/WorkflowValidator.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright OpenSearch Contributors
3 | * SPDX-License-Identifier: Apache-2.0
4 | *
5 | * The OpenSearch Contributors require contributions made to
6 | * this file be licensed under the Apache-2.0 license or a
7 | * compatible open source license.
8 | */
9 | package org.opensearch.flowframework.model;
10 |
11 | import org.opensearch.common.xcontent.json.JsonXContent;
12 | import org.opensearch.core.xcontent.ToXContentObject;
13 | import org.opensearch.core.xcontent.XContentBuilder;
14 |
15 | import java.io.IOException;
16 | import java.util.Map;
17 |
18 | /**
19 | * This represents the workflow steps json which maps each step to expected inputs and outputs
20 | */
21 | public class WorkflowValidator implements ToXContentObject {
22 |
23 | private Map workflowStepValidators;
24 |
25 | /**
26 | * Intantiate the object representing a Workflow validator
27 | * @param workflowStepValidators a map of {@link WorkflowStepValidator}
28 | */
29 | public WorkflowValidator(Map workflowStepValidators) {
30 | this.workflowStepValidators = workflowStepValidators;
31 | }
32 |
33 | /**
34 | * Output this object in a compact JSON string.
35 | *
36 | * @return a JSON representation of the template.
37 | */
38 | public String toJson() {
39 | try {
40 | XContentBuilder builder = JsonXContent.contentBuilder();
41 | return this.toXContent(builder, EMPTY_PARAMS).toString();
42 | } catch (IOException e) {
43 | return "{\"error\": \"couldn't create JSON from XContent\"}";
44 | }
45 | }
46 |
47 | /**
48 | * Get the map of WorkflowStepValidators
49 | * @return the map of WorkflowStepValidators
50 | */
51 | public Map getWorkflowStepValidators() {
52 | return Map.copyOf(this.workflowStepValidators);
53 | }
54 |
55 | @Override
56 | public XContentBuilder toXContent(XContentBuilder xContentBuilder, Params params) throws IOException {
57 | return xContentBuilder.map(workflowStepValidators);
58 | }
59 | }
60 |
--------------------------------------------------------------------------------
/src/main/java/org/opensearch/flowframework/rest/RestSearchWorkflowAction.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright OpenSearch Contributors
3 | * SPDX-License-Identifier: Apache-2.0
4 | *
5 | * The OpenSearch Contributors require contributions made to
6 | * this file be licensed under the Apache-2.0 license or a
7 | * compatible open source license.
8 | */
9 | package org.opensearch.flowframework.rest;
10 |
11 | import org.opensearch.flowframework.common.FlowFrameworkSettings;
12 | import org.opensearch.flowframework.model.Template;
13 | import org.opensearch.flowframework.transport.SearchWorkflowAction;
14 |
15 | import java.util.List;
16 |
17 | import static org.opensearch.flowframework.common.CommonValue.GLOBAL_CONTEXT_INDEX;
18 | import static org.opensearch.flowframework.common.CommonValue.WORKFLOW_URI;
19 |
20 | /**
21 | * Rest Action to facilitate requests to search workflows
22 | */
23 | public class RestSearchWorkflowAction extends AbstractSearchWorkflowAction {
24 |
25 | private static final String SEARCH_WORKFLOW_ACTION = "search_workflow_action";
26 | private static final String SEARCH_WORKFLOW_PATH = WORKFLOW_URI + "/_search";
27 |
28 | /**
29 | * Instantiates a new RestSearchWorkflowAction
30 | *
31 | * @param flowFrameworkFeatureEnabledSetting Whether this API is enabled
32 | */
33 | public RestSearchWorkflowAction(FlowFrameworkSettings flowFrameworkFeatureEnabledSetting) {
34 | super(
35 | List.of(SEARCH_WORKFLOW_PATH),
36 | GLOBAL_CONTEXT_INDEX,
37 | Template.class,
38 | SearchWorkflowAction.INSTANCE,
39 | flowFrameworkFeatureEnabledSetting
40 | );
41 | }
42 |
43 | @Override
44 | public String getName() {
45 | return SEARCH_WORKFLOW_ACTION;
46 | }
47 | }
48 |
--------------------------------------------------------------------------------
/src/main/java/org/opensearch/flowframework/rest/RestSearchWorkflowStateAction.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright OpenSearch Contributors
3 | * SPDX-License-Identifier: Apache-2.0
4 | *
5 | * The OpenSearch Contributors require contributions made to
6 | * this file be licensed under the Apache-2.0 license or a
7 | * compatible open source license.
8 | */
9 | package org.opensearch.flowframework.rest;
10 |
11 | import org.opensearch.flowframework.common.FlowFrameworkSettings;
12 | import org.opensearch.flowframework.model.WorkflowState;
13 | import org.opensearch.flowframework.transport.SearchWorkflowStateAction;
14 |
15 | import java.util.List;
16 |
17 | import static org.opensearch.flowframework.common.CommonValue.WORKFLOW_STATE_INDEX;
18 | import static org.opensearch.flowframework.common.CommonValue.WORKFLOW_URI;
19 |
20 | /**
21 | * Rest Action to facilitate requests to search workflow states
22 | */
23 | public class RestSearchWorkflowStateAction extends AbstractSearchWorkflowAction {
24 |
25 | private static final String SEARCH_WORKFLOW_STATE_ACTION = "search_workflow_state_action";
26 | private static final String SEARCH_WORKFLOW_STATE_PATH = WORKFLOW_URI + "/state/_search";
27 |
28 | /**
29 | * Instantiates a new RestSearchWorkflowStateAction
30 | *
31 | * @param flowFrameworkFeatureEnabledSetting Whether this API is enabled
32 | */
33 | public RestSearchWorkflowStateAction(FlowFrameworkSettings flowFrameworkFeatureEnabledSetting) {
34 | super(
35 | List.of(SEARCH_WORKFLOW_STATE_PATH),
36 | WORKFLOW_STATE_INDEX,
37 | WorkflowState.class,
38 | SearchWorkflowStateAction.INSTANCE,
39 | flowFrameworkFeatureEnabledSetting
40 | );
41 | }
42 |
43 | @Override
44 | public String getName() {
45 | return SEARCH_WORKFLOW_STATE_ACTION;
46 | }
47 |
48 | }
49 |
--------------------------------------------------------------------------------
/src/main/java/org/opensearch/flowframework/transport/CreateWorkflowAction.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright OpenSearch Contributors
3 | * SPDX-License-Identifier: Apache-2.0
4 | *
5 | * The OpenSearch Contributors require contributions made to
6 | * this file be licensed under the Apache-2.0 license or a
7 | * compatible open source license.
8 | */
9 | package org.opensearch.flowframework.transport;
10 |
11 | import org.opensearch.action.ActionType;
12 |
13 | import static org.opensearch.flowframework.common.CommonValue.TRANSPORT_ACTION_NAME_PREFIX;
14 |
15 | /**
16 | * External Action for public facing RestCreateWorkflowAction
17 | */
18 | public class CreateWorkflowAction extends ActionType {
19 |
20 | /** The name of this action */
21 | public static final String NAME = TRANSPORT_ACTION_NAME_PREFIX + "workflow/create";
22 | /** An instance of this action */
23 | public static final CreateWorkflowAction INSTANCE = new CreateWorkflowAction();
24 |
25 | private CreateWorkflowAction() {
26 | super(NAME, WorkflowResponse::new);
27 | }
28 | }
29 |
--------------------------------------------------------------------------------
/src/main/java/org/opensearch/flowframework/transport/DeleteWorkflowAction.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright OpenSearch Contributors
3 | * SPDX-License-Identifier: Apache-2.0
4 | *
5 | * The OpenSearch Contributors require contributions made to
6 | * this file be licensed under the Apache-2.0 license or a
7 | * compatible open source license.
8 | */
9 | package org.opensearch.flowframework.transport;
10 |
11 | import org.opensearch.action.ActionType;
12 | import org.opensearch.action.delete.DeleteResponse;
13 |
14 | import static org.opensearch.flowframework.common.CommonValue.TRANSPORT_ACTION_NAME_PREFIX;
15 |
16 | /**
17 | * External Action for public facing RestGetWorkflowAction
18 | */
19 | public class DeleteWorkflowAction extends ActionType {
20 | /** The name of this action */
21 | public static final String NAME = TRANSPORT_ACTION_NAME_PREFIX + "workflow/delete";
22 | /** An instance of this action */
23 | public static final DeleteWorkflowAction INSTANCE = new DeleteWorkflowAction();
24 |
25 | private DeleteWorkflowAction() {
26 | super(NAME, DeleteResponse::new);
27 | }
28 | }
29 |
--------------------------------------------------------------------------------
/src/main/java/org/opensearch/flowframework/transport/DeprovisionWorkflowAction.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright OpenSearch Contributors
3 | * SPDX-License-Identifier: Apache-2.0
4 | *
5 | * The OpenSearch Contributors require contributions made to
6 | * this file be licensed under the Apache-2.0 license or a
7 | * compatible open source license.
8 | */
9 | package org.opensearch.flowframework.transport;
10 |
11 | import org.opensearch.action.ActionType;
12 |
13 | import static org.opensearch.flowframework.common.CommonValue.TRANSPORT_ACTION_NAME_PREFIX;
14 |
15 | /**
16 | * External Action for public facing RestDeprovisionWorkflowAction
17 | */
18 | public class DeprovisionWorkflowAction extends ActionType {
19 | /** The name of this action */
20 | public static final String NAME = TRANSPORT_ACTION_NAME_PREFIX + "workflow/deprovision";
21 | /** An instance of this action */
22 | public static final DeprovisionWorkflowAction INSTANCE = new DeprovisionWorkflowAction();
23 |
24 | private DeprovisionWorkflowAction() {
25 | super(NAME, WorkflowResponse::new);
26 | }
27 | }
28 |
--------------------------------------------------------------------------------
/src/main/java/org/opensearch/flowframework/transport/GetWorkflowAction.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright OpenSearch Contributors
3 | * SPDX-License-Identifier: Apache-2.0
4 | *
5 | * The OpenSearch Contributors require contributions made to
6 | * this file be licensed under the Apache-2.0 license or a
7 | * compatible open source license.
8 | */
9 | package org.opensearch.flowframework.transport;
10 |
11 | import org.opensearch.action.ActionType;
12 |
13 | import static org.opensearch.flowframework.common.CommonValue.TRANSPORT_ACTION_NAME_PREFIX;
14 |
15 | /**
16 | * External Action for public facing RestGetWorkflowAction
17 | */
18 | public class GetWorkflowAction extends ActionType {
19 | /** The name of this action */
20 | public static final String NAME = TRANSPORT_ACTION_NAME_PREFIX + "workflow/get";
21 | /** An instance of this action */
22 | public static final GetWorkflowAction INSTANCE = new GetWorkflowAction();
23 |
24 | private GetWorkflowAction() {
25 | super(NAME, GetWorkflowResponse::new);
26 | }
27 | }
28 |
--------------------------------------------------------------------------------
/src/main/java/org/opensearch/flowframework/transport/GetWorkflowResponse.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright OpenSearch Contributors
3 | * SPDX-License-Identifier: Apache-2.0
4 | *
5 | * The OpenSearch Contributors require contributions made to
6 | * this file be licensed under the Apache-2.0 license or a
7 | * compatible open source license.
8 | */
9 | package org.opensearch.flowframework.transport;
10 |
11 | import org.opensearch.core.action.ActionResponse;
12 | import org.opensearch.core.common.io.stream.StreamInput;
13 | import org.opensearch.core.common.io.stream.StreamOutput;
14 | import org.opensearch.core.xcontent.ToXContentObject;
15 | import org.opensearch.core.xcontent.XContentBuilder;
16 | import org.opensearch.flowframework.model.Template;
17 |
18 | import java.io.IOException;
19 |
20 | /**
21 | * Transport Response from getting a template
22 | */
23 | public class GetWorkflowResponse extends ActionResponse implements ToXContentObject {
24 |
25 | /** The template */
26 | private Template template;
27 |
28 | /**
29 | * Instantiates a new GetWorkflowResponse from an input stream
30 | * @param in the input stream to read from
31 | * @throws IOException if the template json cannot be read from the input stream
32 | */
33 | public GetWorkflowResponse(StreamInput in) throws IOException {
34 | super(in);
35 | this.template = Template.parse(in.readString());
36 | }
37 |
38 | /**
39 | * Instantiates a new GetWorkflowResponse
40 | * @param template the template
41 | */
42 | public GetWorkflowResponse(Template template) {
43 | this.template = template;
44 | }
45 |
46 | @Override
47 | public void writeTo(StreamOutput out) throws IOException {
48 | out.writeString(template.toJson());
49 | }
50 |
51 | @Override
52 | public XContentBuilder toXContent(XContentBuilder xContentBuilder, Params params) throws IOException {
53 | return this.template.toXContent(xContentBuilder, params);
54 | }
55 |
56 | /**
57 | * Gets the template
58 | * @return the template
59 | */
60 | public Template getTemplate() {
61 | return this.template;
62 | }
63 |
64 | }
65 |
--------------------------------------------------------------------------------
/src/main/java/org/opensearch/flowframework/transport/GetWorkflowStateAction.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright OpenSearch Contributors
3 | * SPDX-License-Identifier: Apache-2.0
4 | *
5 | * The OpenSearch Contributors require contributions made to
6 | * this file be licensed under the Apache-2.0 license or a
7 | * compatible open source license.
8 | */
9 | package org.opensearch.flowframework.transport;
10 |
11 | import org.opensearch.action.ActionType;
12 |
13 | import static org.opensearch.flowframework.common.CommonValue.TRANSPORT_ACTION_NAME_PREFIX;
14 |
15 | /**
16 | * External Action for public facing RestGetWorkflowStateAction
17 | */
18 | public class GetWorkflowStateAction extends ActionType {
19 | // TODO : If the template body is returned as part of the GetWorkflowStateAction,
20 | // it is necessary to ensure the user has permissions for workflow/get
21 | /** The name of this action */
22 | public static final String NAME = TRANSPORT_ACTION_NAME_PREFIX + "workflow_state/get";
23 | /** An instance of this action */
24 | public static final GetWorkflowStateAction INSTANCE = new GetWorkflowStateAction();
25 |
26 | private GetWorkflowStateAction() {
27 | super(NAME, GetWorkflowStateResponse::new);
28 | }
29 | }
30 |
--------------------------------------------------------------------------------
/src/main/java/org/opensearch/flowframework/transport/GetWorkflowStepAction.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright OpenSearch Contributors
3 | * SPDX-License-Identifier: Apache-2.0
4 | *
5 | * The OpenSearch Contributors require contributions made to
6 | * this file be licensed under the Apache-2.0 license or a
7 | * compatible open source license.
8 | */
9 | package org.opensearch.flowframework.transport;
10 |
11 | import org.opensearch.action.ActionType;
12 |
13 | import static org.opensearch.flowframework.common.CommonValue.TRANSPORT_ACTION_NAME_PREFIX;
14 |
15 | /**
16 | * External Action for public facing RestGetWorkflowStepAction
17 | */
18 | public class GetWorkflowStepAction extends ActionType {
19 |
20 | /** The name of this action */
21 | public static final String NAME = TRANSPORT_ACTION_NAME_PREFIX + "workflow_step/get";
22 | /** An instance of this action */
23 | public static final GetWorkflowStepAction INSTANCE = new GetWorkflowStepAction();
24 |
25 | /**
26 | * Instantiates this class
27 | */
28 | public GetWorkflowStepAction() {
29 | super(NAME, GetWorkflowStepResponse::new);
30 | }
31 | }
32 |
--------------------------------------------------------------------------------
/src/main/java/org/opensearch/flowframework/transport/GetWorkflowStepResponse.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright OpenSearch Contributors
3 | * SPDX-License-Identifier: Apache-2.0
4 | *
5 | * The OpenSearch Contributors require contributions made to
6 | * this file be licensed under the Apache-2.0 license or a
7 | * compatible open source license.
8 | */
9 | package org.opensearch.flowframework.transport;
10 |
11 | import org.opensearch.core.action.ActionResponse;
12 | import org.opensearch.core.common.io.stream.StreamInput;
13 | import org.opensearch.core.common.io.stream.StreamOutput;
14 | import org.opensearch.core.xcontent.ToXContentObject;
15 | import org.opensearch.core.xcontent.XContentBuilder;
16 | import org.opensearch.flowframework.model.WorkflowValidator;
17 | import org.opensearch.flowframework.workflow.WorkflowStepFactory;
18 |
19 | import java.io.IOException;
20 |
21 | /**
22 | * Transport Response from getting workflow step
23 | */
24 | public class GetWorkflowStepResponse extends ActionResponse implements ToXContentObject {
25 |
26 | private WorkflowValidator workflowValidator;
27 | private WorkflowStepFactory workflowStepFactory;
28 |
29 | /**
30 | * Instantiates a new GetWorkflowStepResponse from an input stream
31 | * @param in the input stream to read from
32 | * @throws IOException if the workflow json cannot be read from the input stream
33 | */
34 | public GetWorkflowStepResponse(StreamInput in) throws IOException {
35 | super(in);
36 | this.workflowValidator = this.workflowStepFactory.getWorkflowValidator();
37 | }
38 |
39 | /**
40 | * Instantiates a new GetWorkflowStepResponse
41 | * @param workflowValidator the workflow validator
42 | */
43 | public GetWorkflowStepResponse(WorkflowValidator workflowValidator) {
44 | this.workflowValidator = workflowValidator;
45 | }
46 |
47 | @Override
48 | public void writeTo(StreamOutput out) throws IOException {
49 | out.writeString(workflowValidator.toJson());
50 | }
51 |
52 | @Override
53 | public XContentBuilder toXContent(XContentBuilder xContentBuilder, Params params) throws IOException {
54 | return this.workflowValidator.toXContent(xContentBuilder, params);
55 | }
56 | }
57 |
--------------------------------------------------------------------------------
/src/main/java/org/opensearch/flowframework/transport/ProvisionWorkflowAction.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright OpenSearch Contributors
3 | * SPDX-License-Identifier: Apache-2.0
4 | *
5 | * The OpenSearch Contributors require contributions made to
6 | * this file be licensed under the Apache-2.0 license or a
7 | * compatible open source license.
8 | */
9 | package org.opensearch.flowframework.transport;
10 |
11 | import org.opensearch.action.ActionType;
12 |
13 | import static org.opensearch.flowframework.common.CommonValue.TRANSPORT_ACTION_NAME_PREFIX;
14 |
15 | /**
16 | * External Action for public facing RestProvisionWorkflowAction
17 | */
18 | public class ProvisionWorkflowAction extends ActionType {
19 | /** The name of this action */
20 | public static final String NAME = TRANSPORT_ACTION_NAME_PREFIX + "workflow/provision";
21 | /** An instance of this action */
22 | public static final ProvisionWorkflowAction INSTANCE = new ProvisionWorkflowAction();
23 |
24 | private ProvisionWorkflowAction() {
25 | super(NAME, WorkflowResponse::new);
26 | }
27 | }
28 |
--------------------------------------------------------------------------------
/src/main/java/org/opensearch/flowframework/transport/ReprovisionWorkflowAction.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright OpenSearch Contributors
3 | * SPDX-License-Identifier: Apache-2.0
4 | *
5 | * The OpenSearch Contributors require contributions made to
6 | * this file be licensed under the Apache-2.0 license or a
7 | * compatible open source license.
8 | */
9 | package org.opensearch.flowframework.transport;
10 |
11 | import org.opensearch.action.ActionType;
12 |
13 | import static org.opensearch.flowframework.common.CommonValue.TRANSPORT_ACTION_NAME_PREFIX;
14 |
15 | /**
16 | * External Action for public facing RestCreateWorkflowAction
17 | */
18 | public class ReprovisionWorkflowAction extends ActionType {
19 |
20 | /** The name of this action */
21 | public static final String NAME = TRANSPORT_ACTION_NAME_PREFIX + "workflow/reprovision";
22 | /** An instance of this action */
23 | public static final ReprovisionWorkflowAction INSTANCE = new ReprovisionWorkflowAction();
24 |
25 | private ReprovisionWorkflowAction() {
26 | super(NAME, WorkflowResponse::new);
27 | }
28 | }
29 |
--------------------------------------------------------------------------------
/src/main/java/org/opensearch/flowframework/transport/SearchWorkflowAction.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright OpenSearch Contributors
3 | * SPDX-License-Identifier: Apache-2.0
4 | *
5 | * The OpenSearch Contributors require contributions made to
6 | * this file be licensed under the Apache-2.0 license or a
7 | * compatible open source license.
8 | */
9 | package org.opensearch.flowframework.transport;
10 |
11 | import org.opensearch.action.ActionType;
12 | import org.opensearch.action.search.SearchResponse;
13 |
14 | import static org.opensearch.flowframework.common.CommonValue.TRANSPORT_ACTION_NAME_PREFIX;
15 |
16 | /**
17 | * External Action for public facing RestSearchWorkflowAction
18 | */
19 | public class SearchWorkflowAction extends ActionType {
20 |
21 | /** The name of this action */
22 | public static final String NAME = TRANSPORT_ACTION_NAME_PREFIX + "workflow/search";
23 | /** An instance of this action */
24 | public static final SearchWorkflowAction INSTANCE = new SearchWorkflowAction();
25 |
26 | private SearchWorkflowAction() {
27 | super(NAME, SearchResponse::new);
28 | }
29 | }
30 |
--------------------------------------------------------------------------------
/src/main/java/org/opensearch/flowframework/transport/SearchWorkflowStateAction.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright OpenSearch Contributors
3 | * SPDX-License-Identifier: Apache-2.0
4 | *
5 | * The OpenSearch Contributors require contributions made to
6 | * this file be licensed under the Apache-2.0 license or a
7 | * compatible open source license.
8 | */
9 | package org.opensearch.flowframework.transport;
10 |
11 | import org.opensearch.action.ActionType;
12 | import org.opensearch.action.search.SearchResponse;
13 |
14 | import static org.opensearch.flowframework.common.CommonValue.TRANSPORT_ACTION_NAME_PREFIX;
15 |
16 | /**
17 | * External Action for public facing RestSearchWorkflowStateAction
18 | */
19 | public class SearchWorkflowStateAction extends ActionType {
20 |
21 | /** The name of this action */
22 | public static final String NAME = TRANSPORT_ACTION_NAME_PREFIX + "workflow_state/search";
23 | /** An instance of this action */
24 | public static final SearchWorkflowStateAction INSTANCE = new SearchWorkflowStateAction();
25 |
26 | private SearchWorkflowStateAction() {
27 | super(NAME, SearchResponse::new);
28 | }
29 | }
30 |
--------------------------------------------------------------------------------
/src/main/java/org/opensearch/flowframework/transport/SearchWorkflowStateTransportAction.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright OpenSearch Contributors
3 | * SPDX-License-Identifier: Apache-2.0
4 | *
5 | * The OpenSearch Contributors require contributions made to
6 | * this file be licensed under the Apache-2.0 license or a
7 | * compatible open source license.
8 | */
9 | package org.opensearch.flowframework.transport;
10 |
11 | import org.apache.logging.log4j.LogManager;
12 | import org.apache.logging.log4j.Logger;
13 | import org.opensearch.ExceptionsHelper;
14 | import org.opensearch.action.search.SearchRequest;
15 | import org.opensearch.action.search.SearchResponse;
16 | import org.opensearch.action.support.ActionFilters;
17 | import org.opensearch.action.support.HandledTransportAction;
18 | import org.opensearch.common.inject.Inject;
19 | import org.opensearch.core.action.ActionListener;
20 | import org.opensearch.flowframework.exception.FlowFrameworkException;
21 | import org.opensearch.flowframework.transport.handler.SearchHandler;
22 | import org.opensearch.tasks.Task;
23 | import org.opensearch.transport.TransportService;
24 |
25 | /**
26 | * Transport Action to search workflow states
27 | */
28 | public class SearchWorkflowStateTransportAction extends HandledTransportAction {
29 |
30 | private final Logger logger = LogManager.getLogger(SearchWorkflowStateTransportAction.class);
31 |
32 | private SearchHandler searchHandler;
33 |
34 | /**
35 | * Instantiates a new SearchWorkflowStateTransportAction
36 | * @param transportService the TransportService
37 | * @param actionFilters action filters
38 | * @param searchHandler The SearchHandler
39 | */
40 | @Inject
41 | public SearchWorkflowStateTransportAction(TransportService transportService, ActionFilters actionFilters, SearchHandler searchHandler) {
42 | super(SearchWorkflowStateAction.NAME, transportService, actionFilters, SearchRequest::new);
43 | this.searchHandler = searchHandler;
44 | }
45 |
46 | @Override
47 | protected void doExecute(Task task, SearchRequest request, ActionListener actionListener) {
48 | try {
49 | // We used the SearchRequest preference field to convey a tenant id if any
50 | String tenantId = null;
51 | if (request.preference() != null) {
52 | tenantId = request.preference();
53 | request.preference(null);
54 | }
55 | searchHandler.search(request, tenantId, actionListener);
56 | } catch (Exception e) {
57 | String errorMessage = "Failed to search workflow states in global context";
58 | logger.error(errorMessage, e);
59 | actionListener.onFailure(new FlowFrameworkException(errorMessage, ExceptionsHelper.status(e)));
60 | }
61 |
62 | }
63 | }
64 |
--------------------------------------------------------------------------------
/src/main/java/org/opensearch/flowframework/transport/SearchWorkflowTransportAction.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright OpenSearch Contributors
3 | * SPDX-License-Identifier: Apache-2.0
4 | *
5 | * The OpenSearch Contributors require contributions made to
6 | * this file be licensed under the Apache-2.0 license or a
7 | * compatible open source license.
8 | */
9 | package org.opensearch.flowframework.transport;
10 |
11 | import org.apache.logging.log4j.LogManager;
12 | import org.apache.logging.log4j.Logger;
13 | import org.opensearch.ExceptionsHelper;
14 | import org.opensearch.action.search.SearchRequest;
15 | import org.opensearch.action.search.SearchResponse;
16 | import org.opensearch.action.support.ActionFilters;
17 | import org.opensearch.action.support.HandledTransportAction;
18 | import org.opensearch.common.inject.Inject;
19 | import org.opensearch.core.action.ActionListener;
20 | import org.opensearch.flowframework.exception.FlowFrameworkException;
21 | import org.opensearch.flowframework.transport.handler.SearchHandler;
22 | import org.opensearch.tasks.Task;
23 | import org.opensearch.transport.TransportService;
24 |
25 | /**
26 | * Transport Action to search workflows created
27 | */
28 | public class SearchWorkflowTransportAction extends HandledTransportAction {
29 |
30 | private final Logger logger = LogManager.getLogger(SearchWorkflowTransportAction.class);
31 |
32 | private SearchHandler searchHandler;
33 |
34 | /**
35 | * Instantiates a new CreateWorkflowTransportAction
36 | * @param transportService the TransportService
37 | * @param actionFilters action filters
38 | * @param searchHandler the Search Handler
39 | */
40 | @Inject
41 | public SearchWorkflowTransportAction(TransportService transportService, ActionFilters actionFilters, SearchHandler searchHandler) {
42 | super(SearchWorkflowAction.NAME, transportService, actionFilters, SearchRequest::new);
43 | this.searchHandler = searchHandler;
44 | }
45 |
46 | @Override
47 | protected void doExecute(Task task, SearchRequest request, ActionListener actionListener) {
48 | try {
49 | // We used the SearchRequest preference field to convey a tenant id if any
50 | String tenantId = null;
51 | if (request.preference() != null) {
52 | tenantId = request.preference();
53 | request.preference(null);
54 | }
55 | searchHandler.search(request, tenantId, actionListener);
56 | } catch (Exception e) {
57 | String errorMessage = "Failed to search workflows in global context";
58 | logger.error(errorMessage, e);
59 | actionListener.onFailure(new FlowFrameworkException(errorMessage, ExceptionsHelper.status(e)));
60 | }
61 | }
62 | }
63 |
--------------------------------------------------------------------------------
/src/main/java/org/opensearch/flowframework/workflow/CreateIngestPipelineStep.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright OpenSearch Contributors
3 | * SPDX-License-Identifier: Apache-2.0
4 | *
5 | * The OpenSearch Contributors require contributions made to
6 | * this file be licensed under the Apache-2.0 license or a
7 | * compatible open source license.
8 | */
9 | package org.opensearch.flowframework.workflow;
10 |
11 | import org.opensearch.flowframework.indices.FlowFrameworkIndicesHandler;
12 | import org.opensearch.transport.client.Client;
13 |
14 | /**
15 | * Step to create an ingest pipeline
16 | */
17 | public class CreateIngestPipelineStep extends AbstractCreatePipelineStep {
18 | /** The name of this step, used as a key in the template and the {@link WorkflowStepFactory} */
19 | public static final String NAME = "create_ingest_pipeline";
20 |
21 | /**
22 | * Instantiates a new CreateIngestPipelineStep
23 | * @param client The client to create a pipeline and store workflow data into the global context index
24 | * @param flowFrameworkIndicesHandler FlowFrameworkIndicesHandler class to update system indices
25 | */
26 | public CreateIngestPipelineStep(Client client, FlowFrameworkIndicesHandler flowFrameworkIndicesHandler) {
27 | super(client, flowFrameworkIndicesHandler);
28 | }
29 |
30 | @Override
31 | public String getName() {
32 | return NAME;
33 | }
34 | }
35 |
--------------------------------------------------------------------------------
/src/main/java/org/opensearch/flowframework/workflow/CreateSearchPipelineStep.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright OpenSearch Contributors
3 | * SPDX-License-Identifier: Apache-2.0
4 | *
5 | * The OpenSearch Contributors require contributions made to
6 | * this file be licensed under the Apache-2.0 license or a
7 | * compatible open source license.
8 | */
9 | package org.opensearch.flowframework.workflow;
10 |
11 | import org.opensearch.flowframework.indices.FlowFrameworkIndicesHandler;
12 | import org.opensearch.transport.client.Client;
13 |
14 | /**
15 | * Step to create a search pipeline
16 | */
17 | public class CreateSearchPipelineStep extends AbstractCreatePipelineStep {
18 | /** The name of this step, used as a key in the template and the {@link WorkflowStepFactory} */
19 | public static final String NAME = "create_search_pipeline";
20 |
21 | /**
22 | * Instantiates a new CreateSearchPipelineStep
23 | * @param client The client to create a pipeline and store workflow data into the global context index
24 | * @param flowFrameworkIndicesHandler FlowFrameworkIndicesHandler class to update system indices
25 | */
26 | public CreateSearchPipelineStep(Client client, FlowFrameworkIndicesHandler flowFrameworkIndicesHandler) {
27 | super(client, flowFrameworkIndicesHandler);
28 | }
29 |
30 | @Override
31 | public String getName() {
32 | return NAME;
33 | }
34 | }
35 |
--------------------------------------------------------------------------------
/src/main/java/org/opensearch/flowframework/workflow/NoOpStep.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright OpenSearch Contributors
3 | * SPDX-License-Identifier: Apache-2.0
4 | *
5 | * The OpenSearch Contributors require contributions made to
6 | * this file be licensed under the Apache-2.0 license or a
7 | * compatible open source license.
8 | */
9 | package org.opensearch.flowframework.workflow;
10 |
11 | import org.opensearch.action.support.PlainActionFuture;
12 | import org.opensearch.common.unit.TimeValue;
13 | import org.opensearch.common.util.concurrent.FutureUtils;
14 | import org.opensearch.core.rest.RestStatus;
15 | import org.opensearch.flowframework.exception.WorkflowStepException;
16 | import org.opensearch.flowframework.util.ParseUtils;
17 |
18 | import java.util.Collections;
19 | import java.util.Map;
20 | import java.util.Set;
21 |
22 | import static org.opensearch.flowframework.common.CommonValue.DELAY_FIELD;
23 |
24 | /**
25 | * A workflow step that does nothing. May be used for synchronizing other actions.
26 | */
27 | public class NoOpStep implements WorkflowStep {
28 |
29 | /** Instantiate this class */
30 | public NoOpStep() {}
31 |
32 | /** The name of this step, used as a key in the template and the {@link WorkflowStepFactory} */
33 | public static final String NAME = "noop";
34 |
35 | @Override
36 | public PlainActionFuture execute(
37 | String currentNodeId,
38 | WorkflowData currentNodeInputs,
39 | Map outputs,
40 | Map previousNodeInputs,
41 | Map params,
42 | String tenantId
43 | ) {
44 | PlainActionFuture future = PlainActionFuture.newFuture();
45 |
46 | Set requiredKeys = Collections.emptySet();
47 | Set optionalKeys = Set.of(DELAY_FIELD);
48 |
49 | try {
50 | Map inputs = ParseUtils.getInputsFromPreviousSteps(
51 | requiredKeys,
52 | optionalKeys,
53 | currentNodeInputs,
54 | outputs,
55 | previousNodeInputs,
56 | params
57 | );
58 | if (inputs.containsKey(DELAY_FIELD)) {
59 | long delay = TimeValue.parseTimeValue(inputs.get(DELAY_FIELD).toString(), DELAY_FIELD).millis();
60 | Thread.sleep(delay);
61 | }
62 | } catch (IllegalArgumentException iae) {
63 | throw new WorkflowStepException(iae.getMessage(), RestStatus.BAD_REQUEST);
64 | } catch (InterruptedException e) {
65 | FutureUtils.cancel(future);
66 | Thread.currentThread().interrupt();
67 | }
68 |
69 | future.onResponse(WorkflowData.EMPTY);
70 | return future;
71 | }
72 |
73 | @Override
74 | public String getName() {
75 | return NAME;
76 | }
77 | }
78 |
--------------------------------------------------------------------------------
/src/main/java/org/opensearch/flowframework/workflow/RegisterLocalPretrainedModelStep.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright OpenSearch Contributors
3 | * SPDX-License-Identifier: Apache-2.0
4 | *
5 | * The OpenSearch Contributors require contributions made to
6 | * this file be licensed under the Apache-2.0 license or a
7 | * compatible open source license.
8 | */
9 | package org.opensearch.flowframework.workflow;
10 |
11 | import org.opensearch.flowframework.common.FlowFrameworkSettings;
12 | import org.opensearch.flowframework.indices.FlowFrameworkIndicesHandler;
13 | import org.opensearch.ml.client.MachineLearningNodeClient;
14 | import org.opensearch.ml.common.MLTask;
15 | import org.opensearch.threadpool.ThreadPool;
16 |
17 | import java.util.Set;
18 |
19 | import static org.opensearch.flowframework.common.CommonValue.DEPLOY_FIELD;
20 | import static org.opensearch.flowframework.common.CommonValue.DESCRIPTION_FIELD;
21 | import static org.opensearch.flowframework.common.CommonValue.MODEL_FORMAT;
22 | import static org.opensearch.flowframework.common.CommonValue.NAME_FIELD;
23 | import static org.opensearch.flowframework.common.CommonValue.VERSION_FIELD;
24 | import static org.opensearch.flowframework.common.WorkflowResources.MODEL_GROUP_ID;
25 |
26 | /**
27 | * Step to register an OpenSearch provided pretrained local model
28 | */
29 | public class RegisterLocalPretrainedModelStep extends AbstractRegisterLocalModelStep {
30 |
31 | /** The name of this step, used as a key in the template and the {@link WorkflowStepFactory} */
32 | public static final String NAME = "register_local_pretrained_model";
33 |
34 | /**
35 | * Instantiate this class
36 | * @param threadPool The OpenSearch thread pool
37 | * @param mlClient client to instantiate MLClient
38 | * @param flowFrameworkIndicesHandler FlowFrameworkIndicesHandler class to update system indices
39 | * @param flowFrameworkSettings settings of flow framework
40 | */
41 | public RegisterLocalPretrainedModelStep(
42 | ThreadPool threadPool,
43 | MachineLearningNodeClient mlClient,
44 | FlowFrameworkIndicesHandler flowFrameworkIndicesHandler,
45 | FlowFrameworkSettings flowFrameworkSettings
46 | ) {
47 | super(threadPool, mlClient, flowFrameworkIndicesHandler, flowFrameworkSettings);
48 | }
49 |
50 | @Override
51 | protected Set getRequiredKeys() {
52 | return Set.of(NAME_FIELD, VERSION_FIELD, MODEL_FORMAT);
53 | }
54 |
55 | @Override
56 | protected Set getOptionalKeys() {
57 | return Set.of(DESCRIPTION_FIELD, MODEL_GROUP_ID, DEPLOY_FIELD);
58 | }
59 |
60 | @Override
61 | protected String getResourceId(MLTask response) {
62 | return response.getModelId();
63 | }
64 |
65 | @Override
66 | public String getName() {
67 | return NAME;
68 | }
69 | }
70 |
--------------------------------------------------------------------------------
/src/main/java/org/opensearch/flowframework/workflow/RegisterLocalSparseEncodingModelStep.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright OpenSearch Contributors
3 | * SPDX-License-Identifier: Apache-2.0
4 | *
5 | * The OpenSearch Contributors require contributions made to
6 | * this file be licensed under the Apache-2.0 license or a
7 | * compatible open source license.
8 | */
9 | package org.opensearch.flowframework.workflow;
10 |
11 | import org.opensearch.flowframework.common.FlowFrameworkSettings;
12 | import org.opensearch.flowframework.indices.FlowFrameworkIndicesHandler;
13 | import org.opensearch.ml.client.MachineLearningNodeClient;
14 | import org.opensearch.ml.common.MLTask;
15 | import org.opensearch.threadpool.ThreadPool;
16 |
17 | import java.util.Set;
18 |
19 | import static org.opensearch.flowframework.common.CommonValue.DEPLOY_FIELD;
20 | import static org.opensearch.flowframework.common.CommonValue.DESCRIPTION_FIELD;
21 | import static org.opensearch.flowframework.common.CommonValue.FUNCTION_NAME;
22 | import static org.opensearch.flowframework.common.CommonValue.MODEL_CONTENT_HASH_VALUE;
23 | import static org.opensearch.flowframework.common.CommonValue.MODEL_FORMAT;
24 | import static org.opensearch.flowframework.common.CommonValue.NAME_FIELD;
25 | import static org.opensearch.flowframework.common.CommonValue.URL;
26 | import static org.opensearch.flowframework.common.CommonValue.VERSION_FIELD;
27 | import static org.opensearch.flowframework.common.WorkflowResources.MODEL_GROUP_ID;
28 |
29 | /**
30 | * Step to register a local sparse encoding model
31 | */
32 | public class RegisterLocalSparseEncodingModelStep extends AbstractRegisterLocalModelStep {
33 |
34 | /** The name of this step, used as a key in the template and the {@link WorkflowStepFactory} */
35 | public static final String NAME = "register_local_sparse_encoding_model";
36 |
37 | /**
38 | * Instantiate this class
39 | * @param threadPool The OpenSearch thread pool
40 | * @param mlClient client to instantiate MLClient
41 | * @param flowFrameworkIndicesHandler FlowFrameworkIndicesHandler class to update system indices
42 | * @param flowFrameworkSettings settings of flow framework
43 | */
44 | public RegisterLocalSparseEncodingModelStep(
45 | ThreadPool threadPool,
46 | MachineLearningNodeClient mlClient,
47 | FlowFrameworkIndicesHandler flowFrameworkIndicesHandler,
48 | FlowFrameworkSettings flowFrameworkSettings
49 | ) {
50 | super(threadPool, mlClient, flowFrameworkIndicesHandler, flowFrameworkSettings);
51 | }
52 |
53 | @Override
54 | protected Set getRequiredKeys() {
55 | return Set.of(NAME_FIELD, VERSION_FIELD, MODEL_FORMAT);
56 | }
57 |
58 | @Override
59 | protected Set getOptionalKeys() {
60 | return Set.of(DESCRIPTION_FIELD, MODEL_GROUP_ID, DEPLOY_FIELD, MODEL_CONTENT_HASH_VALUE, URL, FUNCTION_NAME);
61 | }
62 |
63 | @Override
64 | protected String getResourceId(MLTask response) {
65 | return response.getModelId();
66 | }
67 |
68 | @Override
69 | public String getName() {
70 | return NAME;
71 | }
72 | }
73 |
--------------------------------------------------------------------------------
/src/main/java/org/opensearch/flowframework/workflow/UpdateIngestPipelineStep.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright OpenSearch Contributors
3 | * SPDX-License-Identifier: Apache-2.0
4 | *
5 | * The OpenSearch Contributors require contributions made to
6 | * this file be licensed under the Apache-2.0 license or a
7 | * compatible open source license.
8 | */
9 | package org.opensearch.flowframework.workflow;
10 |
11 | import org.apache.logging.log4j.LogManager;
12 | import org.apache.logging.log4j.Logger;
13 | import org.opensearch.action.ingest.PutPipelineRequest;
14 | import org.opensearch.action.support.clustermanager.AcknowledgedResponse;
15 | import org.opensearch.common.xcontent.XContentType;
16 | import org.opensearch.core.action.ActionListener;
17 | import org.opensearch.core.common.bytes.BytesReference;
18 | import org.opensearch.transport.client.Client;
19 | import org.opensearch.transport.client.ClusterAdminClient;
20 |
21 | /**
22 | * Step to update an ingest pipeline
23 | */
24 | public class UpdateIngestPipelineStep extends AbstractUpdatePipelineStep {
25 | private static final Logger logger = LogManager.getLogger(UpdateIngestPipelineStep.class);
26 |
27 | /** The name of this step, used as a key in the {@link WorkflowStepFactory} */
28 | public static final String NAME = "update_ingest_pipeline";
29 |
30 | /**
31 | * Instantiates a new UpdateIngestPipelineStep
32 | * @param client The client to create a pipeline and store workflow data into the global context index
33 | */
34 | public UpdateIngestPipelineStep(Client client) {
35 | super(client);
36 | }
37 |
38 | @Override
39 | public void executePutPipelineRequest(
40 | String pipelineId,
41 | BytesReference configuration,
42 | ClusterAdminClient clusterAdminClient,
43 | ActionListener listener
44 | ) {
45 | PutPipelineRequest putPipelineRequest = new PutPipelineRequest(pipelineId, configuration, XContentType.JSON);
46 | clusterAdminClient.putPipeline(putPipelineRequest, listener);
47 | }
48 |
49 | @Override
50 | public String getName() {
51 | return NAME;
52 | }
53 | }
54 |
--------------------------------------------------------------------------------
/src/main/java/org/opensearch/flowframework/workflow/UpdateSearchPipelineStep.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright OpenSearch Contributors
3 | * SPDX-License-Identifier: Apache-2.0
4 | *
5 | * The OpenSearch Contributors require contributions made to
6 | * this file be licensed under the Apache-2.0 license or a
7 | * compatible open source license.
8 | */
9 | package org.opensearch.flowframework.workflow;
10 |
11 | import org.apache.logging.log4j.LogManager;
12 | import org.apache.logging.log4j.Logger;
13 | import org.opensearch.action.search.PutSearchPipelineRequest;
14 | import org.opensearch.action.support.clustermanager.AcknowledgedResponse;
15 | import org.opensearch.common.xcontent.XContentType;
16 | import org.opensearch.core.action.ActionListener;
17 | import org.opensearch.core.common.bytes.BytesReference;
18 | import org.opensearch.transport.client.Client;
19 | import org.opensearch.transport.client.ClusterAdminClient;
20 |
21 | /**
22 | * Step to update a search pipeline
23 | */
24 | public class UpdateSearchPipelineStep extends AbstractUpdatePipelineStep {
25 | private static final Logger logger = LogManager.getLogger(UpdateSearchPipelineStep.class);
26 |
27 | /** The name of this step, used as a key in the {@link WorkflowStepFactory} */
28 | public static final String NAME = "update_search_pipeline";
29 |
30 | /**
31 | * Instantiates a new UpdateSearchPipelineStep
32 | * @param client The client to create a pipeline and store workflow data into the global context index
33 | */
34 | public UpdateSearchPipelineStep(Client client) {
35 | super(client);
36 | }
37 |
38 | @Override
39 | public void executePutPipelineRequest(
40 | String pipelineId,
41 | BytesReference configuration,
42 | ClusterAdminClient clusterAdminClient,
43 | ActionListener listener
44 | ) {
45 | PutSearchPipelineRequest putSearchPipelineRequest = new PutSearchPipelineRequest(pipelineId, configuration, XContentType.JSON);
46 | clusterAdminClient.putSearchPipeline(putSearchPipelineRequest, listener);
47 | }
48 |
49 | @Override
50 | public String getName() {
51 | return NAME;
52 | }
53 | }
54 |
--------------------------------------------------------------------------------
/src/main/java/org/opensearch/flowframework/workflow/WorkflowDataStep.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright OpenSearch Contributors
3 | * SPDX-License-Identifier: Apache-2.0
4 | *
5 | * The OpenSearch Contributors require contributions made to
6 | * this file be licensed under the Apache-2.0 license or a
7 | * compatible open source license.
8 | */
9 | package org.opensearch.flowframework.workflow;
10 |
11 | import org.apache.logging.log4j.LogManager;
12 | import org.apache.logging.log4j.Logger;
13 | import org.opensearch.action.support.PlainActionFuture;
14 | import org.opensearch.flowframework.model.ResourceCreated;
15 |
16 | import java.util.Map;
17 |
18 | /**
19 | * Internal step to pass created resources to dependent nodes. Only used in reprovisioning
20 | */
21 | public class WorkflowDataStep implements WorkflowStep {
22 |
23 | private static final Logger logger = LogManager.getLogger(WorkflowDataStep.class);
24 | private final ResourceCreated resourceCreated;
25 |
26 | /** The name of this step */
27 | public static final String NAME = "workflow_data_step";
28 |
29 | /**
30 | * Instantiate this class
31 | * @param resourceCreated the created resource
32 | */
33 | public WorkflowDataStep(ResourceCreated resourceCreated) {
34 | this.resourceCreated = resourceCreated;
35 | }
36 |
37 | @Override
38 | public PlainActionFuture execute(
39 | String currentNodeId,
40 | WorkflowData currentNodeInputs,
41 | Map outputs,
42 | Map previousNodeInputs,
43 | Map params,
44 | String tenantId
45 | ) {
46 | PlainActionFuture workflowDataFuture = PlainActionFuture.newFuture();
47 | workflowDataFuture.onResponse(
48 | new WorkflowData(
49 | Map.of(resourceCreated.resourceType(), resourceCreated.resourceId()),
50 | currentNodeInputs.getWorkflowId(),
51 | currentNodeId
52 | )
53 | );
54 | return workflowDataFuture;
55 | }
56 |
57 | @Override
58 | public String getName() {
59 | return NAME;
60 | }
61 |
62 | }
63 |
--------------------------------------------------------------------------------
/src/main/java/org/opensearch/flowframework/workflow/WorkflowStep.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright OpenSearch Contributors
3 | * SPDX-License-Identifier: Apache-2.0
4 | *
5 | * The OpenSearch Contributors require contributions made to
6 | * this file be licensed under the Apache-2.0 license or a
7 | * compatible open source license.
8 | */
9 | package org.opensearch.flowframework.workflow;
10 |
11 | import org.opensearch.action.support.PlainActionFuture;
12 |
13 | import java.util.Map;
14 |
15 | /**
16 | * Interface for the workflow setup of different building blocks.
17 | */
18 | public interface WorkflowStep {
19 |
20 | /**
21 | * Triggers the actual processing of the building block.
22 | * @param currentNodeId The id of the node executing this step
23 | * @param currentNodeInputs Input params and content for this node, from workflow parsing
24 | * @param outputs WorkflowData content of previous steps.
25 | * @param previousNodeInputs Input params for this node that come from previous steps
26 | * @param params Params passed on the REST path
27 | * @param tenantId The tenantId
28 | * @return A CompletableFuture of the building block. This block should return immediately, but not be completed until the step executes, containing either the step's output data or {@link WorkflowData#EMPTY} which may be passed to follow-on steps.
29 | */
30 | PlainActionFuture execute(
31 | String currentNodeId,
32 | WorkflowData currentNodeInputs,
33 | Map outputs,
34 | Map previousNodeInputs,
35 | Map params,
36 | String tenantId
37 | );
38 |
39 | /**
40 | * Gets the name of the workflow step.
41 | * @return the name of this workflow step.
42 | */
43 | String getName();
44 |
45 | /**
46 | * For steps which delete data, override this method to require the resource ID to be specified on the rest path to deprovision it
47 | * @return true if the resource ID must be specified for deprovisioning
48 | */
49 | default boolean allowDeleteRequired() {
50 | return false;
51 | }
52 | }
53 |
--------------------------------------------------------------------------------
/src/main/plugin-metadata/plugin-security.policy:
--------------------------------------------------------------------------------
1 | grant {
2 | //ml-commons client
3 | permission java.lang.RuntimePermission "getClassLoader";
4 | permission java.lang.RuntimePermission "accessDeclaredMembers";
5 | permission java.lang.reflect.ReflectPermission "suppressAccessChecks";
6 | permission java.lang.RuntimePermission "setContextClassLoader";
7 | };
--------------------------------------------------------------------------------
/src/main/resources/defaults/bedrock-titan-embedding-defaults.json:
--------------------------------------------------------------------------------
1 | {
2 | "template.name": "bedrock_titan_embedding_model_deploy",
3 | "template.description": "Deploying Amazon Bedrock Titan embedding model ",
4 | "create_connector.name": "Amazon Bedrock Connector: embedding",
5 | "create_connector.description": "The connector to bedrock Titan embedding model",
6 | "create_connector.region": "us-east-1",
7 | "create_connector.credential.access_key": "123",
8 | "create_connector.credential.secret_key": "123",
9 | "create_connector.credential.session_token": "123",
10 | "create_connector.actions.url": "https://bedrock-runtime.us-east-1.amazonaws.com/model/amazon.titan-embed-text-v1/invoke",
11 | "create_connector.actions.request_body": "{ \"inputText\": \"${parameters.inputText}\" }",
12 | "create_connector.actions.pre_process_function": "\n StringBuilder builder = new StringBuilder();\n builder.append(\"\\\"\");\n String first = params.text_docs[0];\n builder.append(first);\n builder.append(\"\\\"\");\n def parameters = \"{\" +\"\\\"inputText\\\":\" + builder + \"}\";\n return \"{\" +\"\\\"parameters\\\":\" + parameters + \"}\";",
13 | "create_connector.actions.post_process_function": "\n def name = \"sentence_embedding\";\n def dataType = \"FLOAT32\";\n if (params.embedding == null || params.embedding.length == 0) {\n return params.message;\n }\n def shape = [params.embedding.length];\n def json = \"{\" +\n \"\\\"name\\\":\\\"\" + name + \"\\\",\" +\n \"\\\"data_type\\\":\\\"\" + dataType + \"\\\",\" +\n \"\\\"shape\\\":\" + shape + \",\" +\n \"\\\"data\\\":\" + params.embedding +\n \"}\";\n return json;\n ",
14 | "register_remote_model.name": "Bedrock embedding model",
15 | "register_remote_model.description": "bedrock-embedding-model"
16 | }
17 |
--------------------------------------------------------------------------------
/src/main/resources/defaults/bedrock-titan-multimodal-defaults.json:
--------------------------------------------------------------------------------
1 | {
2 | "template.name": "bedrock_titan_multimodal_model_deploy",
3 | "template.description": "Deploying Amazon Bedrock Titan multimodal embedding model ",
4 | "create_connector.name": "Amazon Bedrock Connector: multi-modal embedding",
5 | "create_connector.description": "The connector to bedrock Titan multi-modal embedding model",
6 | "create_connector.region": "us-east-1",
7 | "create_connector.input_docs_processed_step_size": "2",
8 | "create_connector.credential.access_key": "123",
9 | "create_connector.credential.secret_key": "123",
10 | "create_connector.credential.session_token": "123",
11 | "create_connector.actions.url": "https://bedrock-runtime.us-east-1.amazonaws.com/model/amazon.titan-embed-image-v1/invoke",
12 | "create_connector.actions.request_body": "{ \"inputText\": \"${parameters.inputText:-null}\", \"inputImage\": \"${parameters.inputImage:-null}\" }",
13 | "create_connector.actions.pre_process_function": "\n StringBuilder parametersBuilder = new StringBuilder(\"{\");\n if (params.text_docs.length > 0 && params.text_docs[0] != null) {\n parametersBuilder.append(\"\\\"inputText\\\":\");\n parametersBuilder.append(\"\\\"\");\n parametersBuilder.append(params.text_docs[0]);\n parametersBuilder.append(\"\\\"\");\n \n if (params.text_docs.length > 1 && params.text_docs[1] != null) {\n parametersBuilder.append(\",\");\n }\n }\n \n \n if (params.text_docs.length > 1 && params.text_docs[1] != null) {\n parametersBuilder.append(\"\\\"inputImage\\\":\");\n parametersBuilder.append(\"\\\"\");\n parametersBuilder.append(params.text_docs[1]);\n parametersBuilder.append(\"\\\"\");\n }\n parametersBuilder.append(\"}\");\n \n return \"{\" +\"\\\"parameters\\\":\" + parametersBuilder + \"}\";",
14 | "create_connector.actions.post_process_function": "\n def name = \"sentence_embedding\";\n def dataType = \"FLOAT32\";\n if (params.embedding == null || params.embedding.length == 0) {\n return null;\n }\n def shape = [params.embedding.length];\n def json = \"{\" +\n \"\\\"name\\\":\\\"\" + name + \"\\\",\" +\n \"\\\"data_type\\\":\\\"\" + dataType + \"\\\",\" +\n \"\\\"shape\\\":\" + shape + \",\" +\n \"\\\"data\\\":\" + params.embedding +\n \"}\";\n return json;\n ",
15 | "register_remote_model.name": "Bedrock multi-modal embedding model",
16 | "register_remote_model.description": "bedrock-multi-modal-embedding-model"
17 | }
18 |
--------------------------------------------------------------------------------
/src/main/resources/defaults/cohere-chat-defaults.json:
--------------------------------------------------------------------------------
1 | {
2 | "template.name": "cohere_chat_model_deploy",
3 | "template.description": "Deploying a Cohere chat model",
4 | "create_connector.name": "Cohere Chat Model",
5 | "create_connector.description": "The connector to Cohere's public chat API",
6 | "create_connector.protocol": "http",
7 | "create_connector.model": "command",
8 | "create_connector.endpoint": "api.cohere.ai",
9 | "create_connector.credential.key": "123",
10 | "create_connector.actions.url": "https://api.cohere.ai/v1/chat",
11 | "create_connector.actions.request_body": "{ \"message\": \"${parameters.message}\", \"model\": \"${parameters.model}\" }",
12 | "register_remote_model.name": "Cohere chat model",
13 | "register_remote_model.description": "cohere-chat-model"
14 | }
15 |
--------------------------------------------------------------------------------
/src/main/resources/defaults/cohere-embedding-defaults.json:
--------------------------------------------------------------------------------
1 | {
2 | "template.name": "cohere_embedding_model_deploy",
3 | "template.description": "Deploying a Cohere embedding model",
4 | "create_connector.name": "cohere-embedding-connector",
5 | "create_connector.description": "The connector to Cohere's public embed API",
6 | "create_connector.protocol": "http",
7 | "create_connector.model": "embed-english-v3.0",
8 | "create_connector.input_type": "search_document",
9 | "create_connector.truncate": "end",
10 | "create_connector.credential.key": "123",
11 | "create_connector.actions.url": "https://api.cohere.ai/v1/embed",
12 | "create_connector.actions.request_body": "{ \"texts\": ${parameters.texts}, \"truncate\": \"${parameters.truncate}\", \"model\": \"${parameters.model}\", \"input_type\": \"${parameters.input_type}\" }",
13 | "create_connector.actions.pre_process_function": "connector.pre_process.cohere.embedding",
14 | "create_connector.actions.post_process_function": "connector.post_process.cohere.embedding",
15 | "register_remote_model.name": "Cohere english embed model",
16 | "register_remote_model.description": "cohere-embedding-model"
17 | }
18 |
--------------------------------------------------------------------------------
/src/main/resources/defaults/cohere-embedding-semantic-search-defaults.json:
--------------------------------------------------------------------------------
1 | {
2 | "template.name": "semantic search with cohere embedding",
3 | "template.description": "Setting up semantic search, with a Cohere embedding model",
4 | "create_connector.name": "cohere-embedding-connector",
5 | "create_connector.description": "The connector to Cohere's public embed API",
6 | "create_connector.protocol": "http",
7 | "create_connector.model": "embed-english-v3.0",
8 | "create_connector.input_type": "search_document",
9 | "create_connector.truncate": "end",
10 | "create_connector.credential.key": "123",
11 | "create_connector.actions.url": "https://api.cohere.ai/v1/embed",
12 | "create_connector.actions.request_body": "{ \"texts\": ${parameters.texts}, \"truncate\": \"${parameters.truncate}\", \"model\": \"${parameters.model}\", \"input_type\": \"${parameters.input_type}\" }",
13 | "create_connector.actions.pre_process_function": "connector.pre_process.cohere.embedding",
14 | "create_connector.actions.post_process_function": "connector.post_process.cohere.embedding",
15 | "register_remote_model.name": "Cohere english embed model",
16 | "register_remote_model.description": "cohere-embedding-model",
17 | "create_ingest_pipeline.pipeline_id": "nlp-ingest-pipeline",
18 | "create_ingest_pipeline.description": "A text embedding pipeline",
19 | "text_embedding.field_map.input": "passage_text",
20 | "text_embedding.field_map.output": "passage_embedding",
21 | "create_index.name": "my-nlp-index",
22 | "create_index.settings.number_of_shards": "2",
23 | "create_index.mappings.method.engine": "lucene",
24 | "create_index.mappings.method.space_type": "l2",
25 | "create_index.mappings.method.name": "hnsw",
26 | "text_embedding.field_map.output.dimension": "1024",
27 | "create_search_pipeline.pipeline_id": "default_model_pipeline"
28 | }
29 |
--------------------------------------------------------------------------------
/src/main/resources/defaults/cohere-embedding-semantic-search-with-query-enricher-defaults.json:
--------------------------------------------------------------------------------
1 | {
2 | "template.name": "semantic search with cohere embedding",
3 | "template.description": "Setting up semantic search, with a Cohere embedding model",
4 | "create_connector.name": "cohere-embedding-connector",
5 | "create_connector.description": "The connector to Cohere's public embed API",
6 | "create_connector.protocol": "http",
7 | "create_connector.model": "embed-english-v3.0",
8 | "create_connector.input_type": "search_document",
9 | "create_connector.truncate": "end",
10 | "create_connector.credential.key": "123",
11 | "create_connector.actions.url": "https://api.cohere.ai/v1/embed",
12 | "create_connector.actions.request_body": "{ \"texts\": ${parameters.texts}, \"truncate\": \"${parameters.truncate}\", \"model\": \"${parameters.model}\", \"input_type\": \"${parameters.input_type}\" }",
13 | "create_connector.actions.pre_process_function": "connector.pre_process.cohere.embedding",
14 | "create_connector.actions.post_process_function": "connector.post_process.cohere.embedding",
15 | "register_remote_model.name": "Cohere english embed model",
16 | "register_remote_model.description": "cohere-embedding-model",
17 | "create_ingest_pipeline.pipeline_id": "nlp-ingest-pipeline",
18 | "create_ingest_pipeline.description": "A text embedding pipeline",
19 | "text_embedding.field_map.input": "passage_text",
20 | "text_embedding.field_map.output": "passage_embedding",
21 | "create_index.name": "my-nlp-index",
22 | "create_index.settings.number_of_shards": "2",
23 | "create_index.mappings.method.engine": "lucene",
24 | "create_index.mappings.method.space_type": "l2",
25 | "create_index.mappings.method.name": "hnsw",
26 | "text_embedding.field_map.output.dimension": "1024",
27 | "create_search_pipeline.pipeline_id": "default_model_pipeline"
28 | }
29 |
--------------------------------------------------------------------------------
/src/main/resources/defaults/conversational-search-defaults.json:
--------------------------------------------------------------------------------
1 | {
2 | "template.name": "deploy-cohere-chat-model",
3 | "template.description": "A template to deploy a Cohere chat model",
4 | "create_connector.name": "Cohere Chat Model",
5 | "create_connector.description": "The connector to Cohere's public chat API",
6 | "create_connector.protocol": "http",
7 | "create_connector.model": "command",
8 | "create_connector.endpoint": "api.cohere.ai",
9 | "create_connector.credential.key": "123",
10 | "create_connector.actions.url": "https://api.cohere.ai/v1/chat",
11 | "create_connector.actions.request_body": "{ \"message\": \"${parameters.message}\", \"model\": \"${parameters.model}\" }",
12 | "register_remote_model.name": "Cohere chat model",
13 | "register_remote_model.description": "cohere-chat-model",
14 | "create_search_pipeline.pipeline_id": "rag-pipeline",
15 | "create_search_pipeline.retrieval_augmented_generation.tag": "openai_pipeline_demo",
16 | "create_search_pipeline.retrieval_augmented_generation.description": "Demo pipeline using a Cohere chat model",
17 | "create_search_pipeline.retrieval_augmented_generation.context_field_list": "[\"text\"]",
18 | "create_search_pipeline.retrieval_augmented_generation.system_prompt": "You are a helpful assistant",
19 | "create_search_pipeline.retrieval_augmented_generation.user_instructions": "Generate a concise and informative answer in less than 100 words for the given question"
20 | }
21 |
--------------------------------------------------------------------------------
/src/main/resources/defaults/conversational-search-rag-tool-defaults.json:
--------------------------------------------------------------------------------
1 | {
2 | "template.name": "deploy-bedrock-chat-model",
3 | "template.description": "A template to deploy a Bedrock chat model",
4 | "create_bedrock_connector.name": "Amazon Bedrock Connector: Claude Instant V1",
5 | "create_bedrock_connector.description": "The connector to bedrock Claude model",
6 | "create_bedrock_connector.protocol": "aws_sigv4",
7 | "create_bedrock_connector.actions.url": "https://bedrock-runtime.us-west-2.amazonaws.com/model/anthropic.claude-instant-v1/invoke",
8 | "create_bedrock_connector.actions.request_body": "{\"prompt\":\"${parameters.prompt}\", \"max_tokens_to_sample\":${parameters.max_tokens_to_sample}, \"temperature\":${parameters.temperature}, \"anthropic_version\":\"${parameters.anthropic_version}\" }",
9 | "create_bedrock_connector.credential.access_key": "",
10 | "create_bedrock_connector.credential.secret_key": "",
11 | "create_bedrock_connector.credential.session_token": "",
12 | "create_bedrock_connector.region": "us-west-2",
13 | "create_embedding_connector.name": "Amazon Bedrock Connector: embedding",
14 | "create_embedding_connector.description": "The connector to bedrock Titan embedding model",
15 | "create_embedding_connector.protocol": "aws_sigv4",
16 | "create_embedding_connector.actions.url": "https://bedrock-runtime.us-west-2.amazonaws.com/model/amazon.titan-embed-text-v2:0/invoke",
17 | "create_embedding_connector.actions.request_body": "{ \"inputText\": \"${parameters.inputText}\" }",
18 | "register_bedrock_model.name": "anthropic.claude-v2",
19 | "register_bedrock_model.description": "bedrock-chat-model",
20 | "register_bedrock_embedding_model.name": "Bedrock embedding model v2",
21 | "register_bedrock_embedding_model.description": "Bedrock embedding model v2",
22 | "create_ingest_pipeline.pipeline_id": "nlp-ingest-pipeline",
23 | "create_ingest_pipeline.description": "A neural ingest pipeline",
24 | "text_embedding.field_map.input": "passage_text",
25 | "text_embedding.field_map.output": "passage_embedding",
26 | "create_index.name": "my-nlp-index",
27 | "create_index.mappings.method.engine": "lucene",
28 | "create_index.mappings.method.space_type": "l2",
29 | "create_index.mappings.method.name": "hnsw",
30 | "text_embedding.field_map.output.dimension": "1024",
31 | "rag_tool.parameters.prompt": "\n\nHuman:You are a professional data analysist. You will always answer question based on the given context first. If the answer is not directly shown in the context, you will analyze the data and find the answer. If you don't know the answer, just say don't know. \n\n Context:\n${parameters.output_field:-}\n\n\nHuman:${parameters.question}\n\nAssistant:",
32 | "root_agent.parameters.parameters": "Answer the question as best you can.",
33 | "root_agent.name": "Root agent",
34 | "root_agent.description": "this is the root agent"
35 | }
36 |
--------------------------------------------------------------------------------
/src/main/resources/defaults/hybrid-search-defaults.json:
--------------------------------------------------------------------------------
1 | {
2 | "template.name": "hybrid-search",
3 | "template.description": "Setting up hybrid search, ingest pipeline and index",
4 | "create_ingest_pipeline.pipeline_id": "nlp-ingest-pipeline",
5 | "create_ingest_pipeline.description": "A text embedding pipeline",
6 | "create_ingest_pipeline.model_id": "123",
7 | "text_embedding.field_map.input": "passage_text",
8 | "text_embedding.field_map.output": "passage_embedding",
9 | "create_index.name": "my-nlp-index",
10 | "create_index.settings.number_of_shards": "2",
11 | "create_index.mappings.method.engine": "lucene",
12 | "create_index.mappings.method.space_type": "l2",
13 | "create_index.mappings.method.name": "hnsw",
14 | "text_embedding.field_map.output.dimension": "1024",
15 | "create_search_pipeline.pipeline_id": "nlp-search-pipeline",
16 | "normalization-processor.normalization.technique": "min_max",
17 | "normalization-processor.combination.technique": "arithmetic_mean"
18 | }
19 |
--------------------------------------------------------------------------------
/src/main/resources/defaults/hybrid-search-with-local-model-defaults.json:
--------------------------------------------------------------------------------
1 | {
2 | "template.name": "hybrid-search",
3 | "template.description": "Setting up hybrid search, ingest pipeline and index",
4 | "register_local_pretrained_model.name": "huggingface/sentence-transformers/paraphrase-MiniLM-L3-v2",
5 | "register_local_pretrained_model.description": "This is a sentence transformer model",
6 | "register_local_pretrained_model.model_format": "TORCH_SCRIPT",
7 | "register_local_pretrained_model.deploy": "true",
8 | "register_local_pretrained_model.version": "1.0.2",
9 | "create_ingest_pipeline.pipeline_id": "nlp-ingest-pipeline",
10 | "create_ingest_pipeline.description": "A text embedding pipeline",
11 | "create_ingest_pipeline.model_id": "123",
12 | "text_embedding.field_map.input": "passage_text",
13 | "text_embedding.field_map.output": "passage_embedding",
14 | "create_index.name": "my-nlp-index",
15 | "create_index.settings.number_of_shards": "2",
16 | "create_index.mappings.method.engine": "lucene",
17 | "create_index.mappings.method.space_type": "l2",
18 | "create_index.mappings.method.name": "hnsw",
19 | "text_embedding.field_map.output.dimension": "768",
20 | "create_search_pipeline.pipeline_id": "nlp-search-pipeline",
21 | "normalization-processor.normalization.technique": "min_max",
22 | "normalization-processor.combination.technique": "arithmetic_mean"
23 | }
24 |
--------------------------------------------------------------------------------
/src/main/resources/defaults/local-sparse-search-biencoder-defaults.json:
--------------------------------------------------------------------------------
1 | {
2 | "template.name": "local_neural_sparse_search_bi_encoder",
3 | "template.description": "Setting up neural sparse search with pretrained local model",
4 | "register_local_sparse_encoding_model.name": "amazon/neural-sparse/opensearch-neural-sparse-encoding-v1",
5 | "register_local_sparse_encoding_model.description": "This is a neural sparse encoding model",
6 | "register_local_sparse_encoding_model.model_format": "TORCH_SCRIPT",
7 | "register_local_sparse_encoding_model.deploy": "true",
8 | "register_local_sparse_encoding_model.version": "1.0.1",
9 | "create_ingest_pipeline.pipeline_id": "nlp-ingest-pipeline-sparse",
10 | "create_ingest_pipeline.description": "A sparse encoding ingest pipeline",
11 | "create_ingest_pipeline.text_embedding.field_map.input": "passage_text",
12 | "create_ingest_pipeline.text_embedding.field_map.output": "passage_embedding",
13 | "create_index.name": "my-nlp-index"
14 | }
15 |
--------------------------------------------------------------------------------
/src/main/resources/defaults/multi-modal-search-defaults.json:
--------------------------------------------------------------------------------
1 | {
2 | "template.name": "multimodal_search",
3 | "template.description": "Setting up multimodal search, ingest pipeline and index",
4 | "create_ingest_pipeline.pipeline_id": "nlp-multimodal-ingest-pipeline",
5 | "create_ingest_pipeline.description": "A text/image embedding pipeline",
6 | "create_ingest_pipeline.model_id": "123",
7 | "text_image_embedding.embedding": "vector_embedding",
8 | "text_image_embedding.field_map.text": "image_description",
9 | "text_image_embedding.field_map.image": "image_binary",
10 | "create_index.name": "my-multimodal-nlp-index",
11 | "create_index.settings.number_of_shards": "2",
12 | "text_image_embedding.field_map.output.dimension": "1024",
13 | "create_index.mappings.method.engine": "lucene",
14 | "create_index.mappings.method.name": "hnsw",
15 | "text_image_embedding.field_map.image.type": "text",
16 | "text_image_embedding.field_map.text.type": "text"
17 | }
18 |
--------------------------------------------------------------------------------
/src/main/resources/defaults/openai-chat-defaults.json:
--------------------------------------------------------------------------------
1 | {
2 | "template.name": "openai_chat_model_deploy",
3 | "template.description": "Deploying an OpenAI chat model",
4 | "create_connector.name": "OpenAI Chat Connector",
5 | "create_connector.description": "Connector to public OpenAI model",
6 | "create_connector.protocol": "http",
7 | "create_connector.model": "gpt-3.5-turbo",
8 | "create_connector.endpoint": "api.openai.com",
9 | "create_connector.credential.key": "123",
10 | "create_connector.actions.url": "https://${parameters.endpoint}/v1/chat/completions",
11 | "create_connector.actions.request_body": "{ \"model\": \"${parameters.model}\", \"messages\": ${parameters.messages} }",
12 | "register_remote_model_1.name": "OpenAI chat model",
13 | "register_remote_model_1.description": "openai-chat-model"
14 | }
15 |
--------------------------------------------------------------------------------
/src/main/resources/defaults/openai-embedding-defaults.json:
--------------------------------------------------------------------------------
1 | {
2 | "template.name": "open_ai_embedding_model_deploy",
3 | "template.description": "Deploying an OpenAI embedding model",
4 | "create_connector.name": "OpenAI-embedding-connector",
5 | "create_connector.description": "Connector to public OpenAI model",
6 | "create_connector.protocol": "http",
7 | "create_connector.model": "text-embedding-ada-002",
8 | "create_connector.endpoint": "api.openai.com",
9 | "create_connector.credential.key": "123",
10 | "create_connector.actions.url": "https://api.openai.com/v1/embeddings",
11 | "create_connector.actions.request_body": "{ \"input\": ${parameters.input}, \"model\": \"${parameters.model}\" }",
12 | "create_connector.actions.pre_process_function": "connector.pre_process.openai.embedding",
13 | "create_connector.actions.post_process_function": "connector.post_process.openai.embedding",
14 | "register_remote_model_1.name": "OpenAI embedding model",
15 | "register_remote_model_1.description": "openai-embedding-model"
16 | }
17 |
--------------------------------------------------------------------------------
/src/main/resources/defaults/semantic-search-defaults.json:
--------------------------------------------------------------------------------
1 | {
2 | "template.name": "semantic_search",
3 | "template.description": "Setting up semantic search, ingest pipeline and index",
4 | "create_ingest_pipeline.pipeline_id": "nlp-ingest-pipeline",
5 | "create_ingest_pipeline.description": "A text embedding pipeline",
6 | "create_ingest_pipeline.model_id": "123",
7 | "text_embedding.field_map.input": "passage_text",
8 | "text_embedding.field_map.output": "passage_embedding",
9 | "create_index.name": "my-nlp-index",
10 | "create_index.settings.number_of_shards": "2",
11 | "create_index.mappings.method.engine": "lucene",
12 | "create_index.mappings.method.space_type": "l2",
13 | "create_index.mappings.method.name": "hnsw",
14 | "text_embedding.field_map.output.dimension": "1024"
15 | }
16 |
--------------------------------------------------------------------------------
/src/main/resources/defaults/semantic-search-query-enricher-defaults.json:
--------------------------------------------------------------------------------
1 | {
2 | "template.name": "semantic_search",
3 | "template.description": "Setting up semantic search, ingest pipeline and index",
4 | "create_ingest_pipeline.pipeline_id": "nlp-ingest-pipeline",
5 | "create_ingest_pipeline.description": "A text embedding pipeline",
6 | "create_ingest_pipeline.model_id": "123",
7 | "text_embedding.field_map.input": "passage_text",
8 | "text_embedding.field_map.output": "passage_embedding",
9 | "create_index.name": "my-nlp-index",
10 | "create_index.settings.number_of_shards": "2",
11 | "create_index.mappings.method.engine": "lucene",
12 | "create_index.mappings.method.space_type": "l2",
13 | "create_index.mappings.method.name": "hnsw",
14 | "text_embedding.field_map.output.dimension": "1024",
15 | "create_search_pipeline.pipeline_id": "default_model_pipeline"
16 | }
17 |
--------------------------------------------------------------------------------
/src/main/resources/defaults/semantic-search-with-local-model-defaults.json:
--------------------------------------------------------------------------------
1 | {
2 | "template.name": "semantic search with local pretrained model",
3 | "template.description": "Setting up semantic search, with a local pretrained embedding model",
4 | "register_local_pretrained_model.name": "huggingface/sentence-transformers/paraphrase-MiniLM-L3-v2",
5 | "register_local_pretrained_model.description": "This is a sentence transformer model",
6 | "register_local_pretrained_model.model_format": "TORCH_SCRIPT",
7 | "register_local_pretrained_model.deploy": "true",
8 | "register_local_pretrained_model.version": "1.0.2",
9 | "create_ingest_pipeline.pipeline_id": "nlp-ingest-pipeline",
10 | "create_ingest_pipeline.description": "A text embedding pipeline",
11 | "text_embedding.field_map.input": "passage_text",
12 | "text_embedding.field_map.output": "passage_embedding",
13 | "create_index.name": "my-nlp-index",
14 | "create_index.settings.number_of_shards": "2",
15 | "create_index.mappings.method.engine": "lucene",
16 | "create_index.mappings.method.space_type": "l2",
17 | "create_index.mappings.method.name": "hnsw",
18 | "text_embedding.field_map.output.dimension": "768",
19 | "create_search_pipeline.pipeline_id": "default_model_pipeline"
20 | }
21 |
--------------------------------------------------------------------------------
/src/main/resources/defaults/semantic-search-with-reindex-defaults.json:
--------------------------------------------------------------------------------
1 | {
2 | "template.name": "semantic search with cohere embedding",
3 | "template.description": "Setting up semantic search, with a Cohere embedding model",
4 | "create_connector.name": "cohere-embedding-connector",
5 | "create_connector.description": "The connector to Cohere's public embed API",
6 | "create_connector.protocol": "http",
7 | "create_connector.model": "embed-english-v3.0",
8 | "create_connector.input_type": "search_document",
9 | "create_connector.truncate": "end",
10 | "create_connector.credential.key": "123",
11 | "create_connector.actions.url": "https://api.cohere.ai/v1/embed",
12 | "create_connector.actions.request_body": "{ \"texts\": ${parameters.texts}, \"truncate\": \"${parameters.truncate}\", \"model\": \"${parameters.model}\", \"input_type\": \"${parameters.input_type}\" }",
13 | "create_connector.actions.pre_process_function": "connector.pre_process.cohere.embedding",
14 | "create_connector.actions.post_process_function": "connector.post_process.cohere.embedding",
15 | "register_remote_model.name": "Cohere english embed model",
16 | "register_remote_model.description": "cohere-embedding-model",
17 | "create_ingest_pipeline.pipeline_id": "nlp-ingest-pipeline",
18 | "create_ingest_pipeline.description": "A text embedding pipeline",
19 | "text_embedding.field_map.input": "passage_text",
20 | "text_embedding.field_map.output": "passage_embedding",
21 | "create_index.name": "my-nlp-index",
22 | "create_index.settings.number_of_shards": "2",
23 | "create_index.mappings.method.engine": "lucene",
24 | "create_index.mappings.method.space_type": "l2",
25 | "create_index.mappings.method.name": "hnsw",
26 | "text_embedding.field_map.output.dimension": "1024",
27 | "create_search_pipeline.pipeline_id": "default_model_pipeline",
28 | "reindex.source_index": "",
29 | "reindex.requests_per_second": "-1",
30 | "reindex.slices": "1"
31 | }
32 |
--------------------------------------------------------------------------------
/src/main/resources/log4j2.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
--------------------------------------------------------------------------------
/src/main/resources/mappings/config.json:
--------------------------------------------------------------------------------
1 | {
2 | "dynamic": false,
3 | "_meta": {
4 | "schema_version": 1
5 | },
6 | "properties": {
7 | "master_key": {
8 | "type": "keyword"
9 | },
10 | "create_time": {
11 | "type": "date",
12 | "format": "strict_date_time||epoch_millis"
13 | }
14 | }
15 | }
16 |
--------------------------------------------------------------------------------
/src/main/resources/mappings/global-context.json:
--------------------------------------------------------------------------------
1 | {
2 | "dynamic": false,
3 | "_meta": {
4 | "schema_version": 4
5 | },
6 | "properties": {
7 | "workflow_id": {
8 | "type": "keyword"
9 | },
10 | "name": {
11 | "type": "text",
12 | "fields": {
13 | "keyword": {
14 | "type": "keyword",
15 | "ignore_above": 256
16 | }
17 | }
18 | },
19 | "description": {
20 | "type": "text"
21 | },
22 | "use_case": {
23 | "type": "keyword"
24 | },
25 | "version": {
26 | "type": "nested",
27 | "properties": {
28 | "template": {
29 | "type": "text"
30 | },
31 | "compatibility": {
32 | "type": "text"
33 | }
34 | }
35 | },
36 | "workflows": {
37 | "type": "object"
38 | },
39 | "user": {
40 | "type": "nested",
41 | "properties": {
42 | "name": {
43 | "type": "text",
44 | "fields": {
45 | "keyword": {
46 | "type": "keyword",
47 | "ignore_above": 256
48 | }
49 | }
50 | },
51 | "backend_roles": {
52 | "type" : "text",
53 | "fields" : {
54 | "keyword" : {
55 | "type" : "keyword"
56 | }
57 | }
58 | },
59 | "roles": {
60 | "type" : "text",
61 | "fields" : {
62 | "keyword" : {
63 | "type" : "keyword"
64 | }
65 | }
66 | },
67 | "custom_attribute_names": {
68 | "type" : "text",
69 | "fields" : {
70 | "keyword" : {
71 | "type" : "keyword"
72 | }
73 | }
74 | }
75 | }
76 | },
77 | "created_time": {
78 | "type": "date",
79 | "format": "strict_date_time||epoch_millis"
80 | },
81 | "last_updated_time": {
82 | "type": "date",
83 | "format": "strict_date_time||epoch_millis"
84 | },
85 | "last_provisioned_time": {
86 | "type": "date",
87 | "format": "strict_date_time||epoch_millis"
88 | },
89 | "tenant_id": {
90 | "type": "keyword"
91 | },
92 | "ui_metadata": {
93 | "type": "object",
94 | "enabled": false
95 | }
96 | }
97 | }
98 |
--------------------------------------------------------------------------------
/src/main/resources/mappings/workflow-state.json:
--------------------------------------------------------------------------------
1 | {
2 | "dynamic": false,
3 | "_meta": {
4 | "schema_version": 4
5 | },
6 | "properties": {
7 | "schema_version": {
8 | "type": "integer"
9 | },
10 | "workflow_id": {
11 | "type": "keyword"
12 | },
13 | "error": {
14 | "type": "text"
15 | },
16 | "state": {
17 | "type": "keyword"
18 | },
19 | "provisioning_progress": {
20 | "type": "keyword"
21 | },
22 | "provision_start_time": {
23 | "type": "date",
24 | "format": "strict_date_time||epoch_millis"
25 | },
26 | "provision_end_time": {
27 | "type": "date",
28 | "format": "strict_date_time||epoch_millis"
29 | },
30 | "user_outputs": {
31 | "type": "object"
32 | },
33 | "user": {
34 | "type": "nested",
35 | "properties": {
36 | "name": {
37 | "type": "text",
38 | "fields": {
39 | "keyword": {
40 | "type": "keyword",
41 | "ignore_above": 256
42 | }
43 | }
44 | },
45 | "backend_roles": {
46 | "type" : "text",
47 | "fields" : {
48 | "keyword" : {
49 | "type" : "keyword"
50 | }
51 | }
52 | },
53 | "roles": {
54 | "type" : "text",
55 | "fields" : {
56 | "keyword" : {
57 | "type" : "keyword"
58 | }
59 | }
60 | },
61 | "custom_attribute_names": {
62 | "type" : "text",
63 | "fields" : {
64 | "keyword" : {
65 | "type" : "keyword"
66 | }
67 | }
68 | }
69 | }
70 | },
71 | "resources_created": {
72 | "type": "nested",
73 | "properties": {
74 | "workflow_step_name": {
75 | "type": "keyword"
76 | },
77 | "workflow_step_id": {
78 | "type": "keyword"
79 | },
80 | "resource_type": {
81 | "type": "keyword"
82 | },
83 | "resource_id": {
84 | "type": "keyword"
85 | }
86 | }
87 | },
88 | "tenant_id": {
89 | "type": "keyword"
90 | }
91 | }
92 | }
93 |
--------------------------------------------------------------------------------
/src/main/resources/substitutionTemplates/conversational-search-with-cohere-model-template.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "${{template.name}}",
3 | "description": "${{template.description}}",
4 | "use_case": "CONVERSATION_SEARCH",
5 | "version": {
6 | "template": "1.0.0",
7 | "compatibility": [
8 | "2.12.0",
9 | "3.0.0"
10 | ]
11 | },
12 | "workflows": {
13 | "provision": {
14 | "nodes": [
15 | {
16 | "id": "create_connector",
17 | "type": "create_connector",
18 | "user_inputs": {
19 | "name": "${{create_connector}}",
20 | "description": "${{create_connector.description}}",
21 | "version": "1",
22 | "protocol": "${{create_connector.protocol}}",
23 | "parameters": {
24 | "endpoint": "${{create_connector.endpoint}}",
25 | "model": "${{create_connector.model}}"
26 | },
27 | "credential": {
28 | "key": "${{create_connector.credential.key}}"
29 | },
30 | "actions": [
31 | {
32 | "action_type": "predict",
33 | "method": "POST",
34 | "url": "${{create_connector.actions.url}}",
35 | "headers": {
36 | "Authorization": "Bearer ${credential.key}"
37 | },
38 | "request_body": "${{create_connector.actions.request_body}}"
39 | }
40 | ]
41 | }
42 | },
43 | {
44 | "id": "register_model",
45 | "type": "register_remote_model",
46 | "previous_node_inputs": {
47 | "create_connector": "parameters"
48 | },
49 | "user_inputs": {
50 | "name": "${{register_remote_model.name}}",
51 | "function_name": "remote",
52 | "description": "${{register_remote_model.description}}",
53 | "deploy": true
54 | }
55 | },
56 | {
57 | "id": "create_search_pipeline",
58 | "type": "create_search_pipeline",
59 | "previous_node_inputs": {
60 | "register_model": "model_id"
61 | },
62 | "user_inputs": {
63 | "pipeline_id": "${{create_search_pipeline.pipeline_id}}",
64 | "configurations": {
65 | "response_processors": [
66 | {
67 | "retrieval_augmented_generation": {
68 | "tag": "${{create_search_pipeline.retrieval_augmented_generation.tag}}",
69 | "description": "${{create_search_pipeline.retrieval_augmented_generation.description}}",
70 | "model_id": "${{register_model.model_id}}",
71 | "context_field_list": "${{create_search_pipeline.retrieval_augmented_generation.context_field_list}}",
72 | "system_prompt": "${{create_search_pipeline.retrieval_augmented_generation.system_prompt}}",
73 | "user_instructions": "${{create_search_pipeline.retrieval_augmented_generation.user_instructions}}"
74 | }
75 | }
76 | ]
77 | }
78 | }
79 | }
80 | ]
81 | }
82 | }
83 | }
84 |
--------------------------------------------------------------------------------
/src/main/resources/substitutionTemplates/deploy-remote-bedrock-model-template.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "${{template.name}}",
3 | "description": "${{template.description}}",
4 | "use_case": "",
5 | "version": {
6 | "template": "1.0.0",
7 | "compatibility": [
8 | "2.12.0",
9 | "3.0.0"
10 | ]
11 | },
12 | "workflows": {
13 | "provision": {
14 | "nodes": [
15 | {
16 | "id": "create_connector",
17 | "type": "create_connector",
18 | "user_inputs": {
19 | "name": "${{create_connector.name}}",
20 | "description": "${{create_connector.description}}",
21 | "version": "1",
22 | "protocol": "aws_sigv4",
23 | "parameters": {
24 | "region": "${{create_connector.region}}",
25 | "service_name": "bedrock",
26 | "input_docs_processed_step_size": "${{create_connector.input_docs_processed_step_size}}"
27 | },
28 | "credential": {
29 | "access_key": "${{create_connector.credential.access_key}}",
30 | "secret_key": "${{create_connector.credential.secret_key}}",
31 | "session_token": "${{create_connector.credential.session_token}}"
32 | },
33 | "actions": [
34 | {
35 | "action_type": "predict",
36 | "method": "POST",
37 | "url": "${{create_connector.actions.url}}",
38 | "headers": {
39 | "content-type": "application/json",
40 | "x-amz-content-sha256": "required"
41 | },
42 | "request_body": "${{create_connector.actions.request_body}}",
43 | "pre_process_function": "${{create_connector.actions.pre_process_function}}",
44 | "post_process_function": "${{create_connector.actions.post_process_function}}"
45 | }
46 | ]
47 | }
48 | },
49 | {
50 | "id": "register_model",
51 | "type": "register_remote_model",
52 | "previous_node_inputs": {
53 | "create_connector": "parameters"
54 | },
55 | "user_inputs": {
56 | "name": "${{register_remote_model.name}}",
57 | "function_name": "remote",
58 | "description": "${{register_remote_model.description}}"
59 | }
60 | },
61 | {
62 | "id": "deploy_model",
63 | "type": "deploy_model",
64 | "previous_node_inputs": {
65 | "register_model": "model_id"
66 | }
67 | }
68 | ],
69 | "edges": [
70 | {
71 | "source": "create_connector",
72 | "dest": "register_model"
73 | },
74 | {
75 | "source": "register_model",
76 | "dest": "deploy_model"
77 | }
78 | ]
79 | }
80 | }
81 | }
82 |
--------------------------------------------------------------------------------
/src/main/resources/substitutionTemplates/deploy-remote-model-chat-template.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "${{template.name}}",
3 | "description": "${{template.description}}",
4 | "use_case": "",
5 | "version": {
6 | "template": "1.0.0",
7 | "compatibility": [
8 | "2.12.0",
9 | "3.0.0"
10 | ]
11 | },
12 | "workflows": {
13 | "provision": {
14 | "nodes": [
15 | {
16 | "id": "create_connector",
17 | "type": "create_connector",
18 | "user_inputs": {
19 | "name": "${{create_connector}}",
20 | "description": "${{create_connector.description}}",
21 | "version": "1",
22 | "protocol": "${{create_connector.protocol}}",
23 | "parameters": {
24 | "endpoint": "${{create_connector.endpoint}}",
25 | "model": "${{create_connector.model}}"
26 | },
27 | "credential": {
28 | "key": "${{create_connector.credential.key}}"
29 | },
30 | "actions": [
31 | {
32 | "action_type": "predict",
33 | "method": "POST",
34 | "url": "${{create_connector.actions.url}}",
35 | "headers": {
36 | "Authorization": "Bearer ${credential.key}"
37 | },
38 | "request_body": "${{create_connector.actions.request_body}}"
39 | }
40 | ]
41 | }
42 | },
43 | {
44 | "id": "register_model",
45 | "type": "register_remote_model",
46 | "previous_node_inputs": {
47 | "create_connector": "parameters"
48 | },
49 | "user_inputs": {
50 | "name": "${{register_remote_model.name}}",
51 | "function_name": "remote",
52 | "description": "${{register_remote_model.description}}"
53 | }
54 | },
55 | {
56 | "id": "deploy_model",
57 | "type": "deploy_model",
58 | "previous_node_inputs": {
59 | "register_model": "model_id"
60 | }
61 | }
62 | ],
63 | "edges": [
64 | {
65 | "source": "create_connector",
66 | "dest": "register_model"
67 | },
68 | {
69 | "source": "register_model",
70 | "dest": "deploy_model"
71 | }
72 | ]
73 | }
74 | }
75 | }
76 |
--------------------------------------------------------------------------------
/src/main/resources/substitutionTemplates/deploy-remote-model-extra-params-template.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "${{template.name}}",
3 | "description": "${{template.description}}",
4 | "use_case": "",
5 | "version": {
6 | "template": "1.0.0",
7 | "compatibility": [
8 | "2.12.0",
9 | "3.0.0"
10 | ]
11 | },
12 | "workflows": {
13 | "provision": {
14 | "nodes": [
15 | {
16 | "id": "create_connector",
17 | "type": "create_connector",
18 | "user_inputs": {
19 | "name": "${{create_connector.name}}",
20 | "description": "${{create_connector.description}}",
21 | "version": "1",
22 | "protocol": "${{create_connector.protocol}}",
23 | "parameters": {
24 | "endpoint": "${{create_connector.endpoint}}",
25 | "model": "${{create_connector.model}}",
26 | "input_type": "search_document",
27 | "truncate": "END"
28 | },
29 | "credential": {
30 | "key": "${{create_connector.credential.key}}"
31 | },
32 | "actions": [
33 | {
34 | "action_type": "predict",
35 | "method": "POST",
36 | "url": "${{create_connector.actions.url}}",
37 | "headers": {
38 | "Authorization": "Bearer ${credential.key}",
39 | "Request-Source": "unspecified:opensearch"
40 | },
41 | "request_body": "${{create_connector.actions.request_body}}",
42 | "pre_process_function": "${{create_connector.actions.pre_process_function}}",
43 | "post_process_function": "${{create_connector.actions.post_process_function}}"
44 | }
45 | ]
46 | }
47 | },
48 | {
49 | "id": "register_model",
50 | "type": "register_remote_model",
51 | "previous_node_inputs": {
52 | "create_connector": "parameters"
53 | },
54 | "user_inputs": {
55 | "name": "${{register_remote_model.name}}",
56 | "function_name": "remote",
57 | "description": "${{register_remote_model.description}}"
58 | }
59 | },
60 | {
61 | "id": "deploy_model",
62 | "type": "deploy_model",
63 | "previous_node_inputs": {
64 | "register_model": "model_id"
65 | }
66 | }
67 | ],
68 | "edges": [
69 | {
70 | "source": "create_connector",
71 | "dest": "register_model"
72 | },
73 | {
74 | "source": "register_model",
75 | "dest": "deploy_model"
76 | }
77 | ]
78 | }
79 | }
80 | }
81 |
--------------------------------------------------------------------------------
/src/main/resources/substitutionTemplates/deploy-remote-model-template.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "${{template.name}}",
3 | "description": "${{template.description}}",
4 | "use_case": "",
5 | "version": {
6 | "template": "1.0.0",
7 | "compatibility": [
8 | "2.12.0",
9 | "3.0.0"
10 | ]
11 | },
12 | "workflows": {
13 | "provision": {
14 | "nodes": [
15 | {
16 | "id": "create_connector",
17 | "type": "create_connector",
18 | "user_inputs": {
19 | "name": "${{create_connector}}",
20 | "description": "${{create_connector.description}}",
21 | "version": "1",
22 | "protocol": "${{create_connector.protocol}}",
23 | "parameters": {
24 | "endpoint": "${{create_connector.endpoint}}",
25 | "model": "${{create_connector.model}}"
26 | },
27 | "credential": {
28 | "key": "${{create_connector.credential.key}}"
29 | },
30 | "actions": [
31 | {
32 | "action_type": "predict",
33 | "method": "POST",
34 | "url": "${{create_connector.actions.url}}",
35 | "headers": {
36 | "Authorization": "Bearer ${credential.key}"
37 | },
38 | "request_body": "${{create_connector.actions.request_body}}",
39 | "pre_process_function": "${{create_connector.actions.pre_process_function}}",
40 | "post_process_function": "${{create_connector.actions.post_process_function}}"
41 | }
42 | ]
43 | }
44 | },
45 | {
46 | "id": "register_model",
47 | "type": "register_remote_model",
48 | "previous_node_inputs": {
49 | "create_connector": "parameters"
50 | },
51 | "user_inputs": {
52 | "name": "${{register_remote_model.name}}",
53 | "function_name": "remote",
54 | "description": "${{register_remote_model.description}}"
55 | }
56 | },
57 | {
58 | "id": "deploy_model",
59 | "type": "deploy_model",
60 | "previous_node_inputs": {
61 | "register_model": "model_id"
62 | }
63 | }
64 | ],
65 | "edges": [
66 | {
67 | "source": "create_connector",
68 | "dest": "register_model"
69 | },
70 | {
71 | "source": "register_model",
72 | "dest": "deploy_model"
73 | }
74 | ]
75 | }
76 | }
77 | }
78 |
--------------------------------------------------------------------------------
/src/main/resources/substitutionTemplates/multi-modal-search-template.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "${{template.name}}",
3 | "description": "${{template.description}}",
4 | "use_case": "MULTIMODAL_SEARCH",
5 | "version": {
6 | "template": "1.0.0",
7 | "compatibility": [
8 | "2.12.0",
9 | "3.0.0"
10 | ]
11 | },
12 | "workflows": {
13 | "provision": {
14 | "nodes": [
15 | {
16 | "id": "create_ingest_pipeline",
17 | "type": "create_ingest_pipeline",
18 | "user_inputs": {
19 | "pipeline_id": "${{create_ingest_pipeline.pipeline_id}}",
20 | "configurations": {
21 | "description": "${{create_ingest_pipeline.description}}",
22 | "processors": [
23 | {
24 | "text_image_embedding": {
25 | "model_id": "${{create_ingest_pipeline.model_id}}",
26 | "embedding": "${{text_image_embedding.embedding}}",
27 | "field_map": {
28 | "text": "${{text_image_embedding.field_map.text}}",
29 | "image": "${{text_image_embedding.field_map.image}}"
30 | }
31 | }
32 | }
33 | ]
34 | }
35 | }
36 | },
37 | {
38 | "id": "create_index",
39 | "type": "create_index",
40 | "previous_node_inputs": {
41 | "create_ingest_pipeline": "pipeline_id"
42 | },
43 | "user_inputs": {
44 | "index_name": "${{create_index.name}}",
45 | "configurations": {
46 | "settings": {
47 | "index.knn": true,
48 | "default_pipeline": "${{create_ingest_pipeline.pipeline_id}}",
49 | "number_of_shards": "${{create_index.settings.number_of_shards}}"
50 | },
51 | "mappings": {
52 | "_doc": {
53 | "properties": {
54 | "${{text_image_embedding.embedding}}": {
55 | "type": "knn_vector",
56 | "dimension": "${{text_image_embedding.field_map.output.dimension}}",
57 | "method": {
58 | "engine": "${{create_index.mappings.method.engine}}",
59 | "name": "${{create_index.mappings.method.name}}",
60 | "parameters": {}
61 | }
62 | },
63 | "${{text_image_embedding.field_map.text}}": {
64 | "type": "${{text_image_embedding.field_map.text.type}}"
65 | },
66 | "${{text_image_embedding.field_map.image}}": {
67 | "type": "${{text_image_embedding.field_map.image.type}}"
68 | }
69 | }
70 | }
71 | }
72 | }
73 | }
74 | }
75 | ]
76 | }
77 | }
78 | }
79 |
--------------------------------------------------------------------------------
/src/main/resources/substitutionTemplates/neural-sparse-local-biencoder-template.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "${{template.name}}",
3 | "description": "${{template.description}}",
4 | "use_case": "",
5 | "version": {
6 | "template": "1.0.0",
7 | "compatibility": [
8 | "2.12.0",
9 | "3.0.0"
10 | ]
11 | },
12 | "workflows": {
13 | "provision": {
14 | "nodes": [
15 | {
16 | "id": "register_local_sparse_encoding_model",
17 | "type": "register_local_sparse_encoding_model",
18 | "user_inputs": {
19 | "name": "${{register_local_sparse_encoding_model.name}}",
20 | "version": "${{register_local_sparse_encoding_model.version}}",
21 | "description": "${{register_local_sparse_encoding_model.description}}",
22 | "model_format": "${{register_local_sparse_encoding_model.model_format}}",
23 | "deploy": true
24 | }
25 | },
26 | {
27 | "id": "create_ingest_pipeline",
28 | "type": "create_ingest_pipeline",
29 | "previous_node_inputs": {
30 | "register_local_sparse_encoding_model": "model_id"
31 | },
32 | "user_inputs": {
33 | "pipeline_id": "${{create_ingest_pipeline.pipeline_id}}",
34 | "configurations": {
35 | "description": "${{create_ingest_pipeline.description}}",
36 | "processors": [
37 | {
38 | "sparse_encoding": {
39 | "model_id": "${{register_local_sparse_encoding_model.model_id}}",
40 | "field_map": {
41 | "${{create_ingest_pipeline.text_embedding.field_map.input}}": "${{create_ingest_pipeline.text_embedding.field_map.output}}"
42 | }
43 | }
44 | }
45 | ]
46 | }
47 | }
48 | },
49 | {
50 | "id": "create_index",
51 | "type": "create_index",
52 | "previous_node_inputs": {
53 | "create_ingest_pipeline": "pipeline_id"
54 | },
55 | "user_inputs": {
56 | "index_name": "${{create_index.name}}",
57 | "configurations": {
58 | "settings": {
59 | "default_pipeline": "${{create_ingest_pipeline.pipeline_id}}"
60 | },
61 | "mappings": {
62 | "_doc": {
63 | "properties": {
64 | "${{create_ingest_pipeline.text_embedding.field_map.output}}": {
65 | "type": "rank_features"
66 | },
67 | "${{create_ingest_pipeline.text_embedding.field_map.input}}": {
68 | "type": "text"
69 | }
70 | }
71 | }
72 | }
73 | }
74 | }
75 | }
76 | ]
77 | }
78 | }
79 | }
80 |
--------------------------------------------------------------------------------
/src/main/resources/substitutionTemplates/semantic-search-template.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "${{template.name}}",
3 | "description": "${{template.description}}",
4 | "use_case": "SEMANTIC_SEARCH",
5 | "version": {
6 | "template": "1.0.0",
7 | "compatibility": [
8 | "2.12.0",
9 | "3.0.0"
10 | ]
11 | },
12 | "workflows": {
13 | "provision": {
14 | "nodes": [
15 | {
16 | "id": "create_ingest_pipeline",
17 | "type": "create_ingest_pipeline",
18 | "user_inputs": {
19 | "pipeline_id": "${{create_ingest_pipeline.pipeline_id}}",
20 | "configurations": {
21 | "description": "${{create_ingest_pipeline.description}}",
22 | "processors": [
23 | {
24 | "text_embedding": {
25 | "model_id": "${{create_ingest_pipeline.model_id}}",
26 | "field_map": {
27 | "${{text_embedding.field_map.input}}": "${{text_embedding.field_map.output}}"
28 | }
29 | }
30 | }
31 | ]
32 | }
33 | }
34 | },
35 | {
36 | "id": "create_index",
37 | "type": "create_index",
38 | "previous_node_inputs": {
39 | "create_ingest_pipeline": "pipeline_id"
40 | },
41 | "user_inputs": {
42 | "index_name": "${{create_index.name}}",
43 | "configurations": {
44 | "settings": {
45 | "index.knn": true,
46 | "default_pipeline": "${{create_ingest_pipeline.pipeline_id}}",
47 | "number_of_shards": "${{create_index.settings.number_of_shards}}"
48 | },
49 | "mappings": {
50 | "_doc": {
51 | "properties": {
52 | "${{text_embedding.field_map.output}}": {
53 | "type": "knn_vector",
54 | "dimension": "${{text_embedding.field_map.output.dimension}}",
55 | "method": {
56 | "engine": "${{create_index.mappings.method.engine}}",
57 | "space_type": "${{create_index.mappings.method.space_type}}",
58 | "name": "${{create_index.mappings.method.name}}",
59 | "parameters": {}
60 | }
61 | },
62 | "${{text_embedding.field_map.input}}": {
63 | "type": "text"
64 | }
65 | }
66 | }
67 | }
68 | }
69 | }
70 | }
71 | ]
72 | }
73 | }
74 | }
75 |
--------------------------------------------------------------------------------
/src/test/java/org/opensearch/flowframework/FlowFrameworkPluginIT.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright OpenSearch Contributors
3 | * SPDX-License-Identifier: Apache-2.0
4 | *
5 | * The OpenSearch Contributors require contributions made to
6 | * this file be licensed under the Apache-2.0 license or a
7 | * compatible open source license.
8 | */
9 | package org.opensearch.flowframework;
10 |
11 | import org.apache.hc.core5.http.ParseException;
12 | import org.apache.hc.core5.http.io.entity.EntityUtils;
13 | import org.opensearch.client.Request;
14 | import org.opensearch.client.Response;
15 | import org.opensearch.test.rest.OpenSearchRestTestCase;
16 |
17 | import java.io.IOException;
18 | import java.nio.charset.StandardCharsets;
19 |
20 | public class FlowFrameworkPluginIT extends OpenSearchRestTestCase {
21 |
22 | public void testPluginInstalled() throws IOException, ParseException {
23 | Response response = client().performRequest(new Request("GET", "/_cat/plugins"));
24 | String body = EntityUtils.toString(response.getEntity(), StandardCharsets.UTF_8);
25 |
26 | logger.info("response body: {}", body);
27 | assertNotNull(body);
28 | assertTrue(body.contains("opensearch-flow-framework"));
29 | }
30 | }
31 |
--------------------------------------------------------------------------------
/src/test/java/org/opensearch/flowframework/common/DefaultUseCasesTests.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright OpenSearch Contributors
3 | * SPDX-License-Identifier: Apache-2.0
4 | *
5 | * The OpenSearch Contributors require contributions made to
6 | * this file be licensed under the Apache-2.0 license or a
7 | * compatible open source license.
8 | */
9 | package org.opensearch.flowframework.common;
10 |
11 | import org.opensearch.flowframework.exception.FlowFrameworkException;
12 | import org.opensearch.test.OpenSearchTestCase;
13 |
14 | public class DefaultUseCasesTests extends OpenSearchTestCase {
15 |
16 | @Override
17 | public void setUp() throws Exception {
18 | super.setUp();
19 | }
20 |
21 | public void testGetDefaultsFileByValidUseCaseName() throws FlowFrameworkException {
22 | String defaultsFile = DefaultUseCases.getDefaultsFileByUseCaseName("open_ai_embedding_model_deploy");
23 | assertEquals("defaults/openai-embedding-defaults.json", defaultsFile);
24 | }
25 |
26 | public void testGetDefaultsFileByInvalidUseCaseName() throws FlowFrameworkException {
27 | FlowFrameworkException e = assertThrows(
28 | FlowFrameworkException.class,
29 | () -> DefaultUseCases.getDefaultsFileByUseCaseName("invalid_use_case")
30 | );
31 | }
32 |
33 | public void testGetSubstitutionTemplateByValidUseCaseName() throws FlowFrameworkException {
34 | String templateFile = DefaultUseCases.getSubstitutionReadyFileByUseCaseName("open_ai_embedding_model_deploy");
35 | assertEquals("substitutionTemplates/deploy-remote-model-template.json", templateFile);
36 | }
37 |
38 | public void testGetSubstitutionTemplateByInvalidUseCaseName() throws FlowFrameworkException {
39 | FlowFrameworkException e = assertThrows(
40 | FlowFrameworkException.class,
41 | () -> DefaultUseCases.getSubstitutionReadyFileByUseCaseName("invalid_use_case")
42 | );
43 | }
44 | }
45 |
--------------------------------------------------------------------------------
/src/test/java/org/opensearch/flowframework/exception/ApiSpecParseExceptionTests.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright OpenSearch Contributors
3 | * SPDX-License-Identifier: Apache-2.0
4 | *
5 | * The OpenSearch Contributors require contributions made to
6 | * this file be licensed under the Apache-2.0 license or a
7 | * compatible open source license.
8 | */
9 | package org.opensearch.flowframework.exception;
10 |
11 | import org.opensearch.OpenSearchException;
12 | import org.opensearch.test.OpenSearchTestCase;
13 |
14 | import java.util.Arrays;
15 | import java.util.List;
16 |
17 | public class ApiSpecParseExceptionTests extends OpenSearchTestCase {
18 |
19 | public void testApiSpecParseException() {
20 | ApiSpecParseException exception = new ApiSpecParseException("API spec parsing failed");
21 | assertTrue(exception instanceof OpenSearchException);
22 | assertEquals("API spec parsing failed", exception.getMessage());
23 | }
24 |
25 | public void testApiSpecParseExceptionWithCause() {
26 | Throwable cause = new RuntimeException("Underlying issue");
27 | ApiSpecParseException exception = new ApiSpecParseException("API spec parsing failed", cause);
28 | assertTrue(exception instanceof OpenSearchException);
29 | assertEquals("API spec parsing failed", exception.getMessage());
30 | assertEquals(cause, exception.getCause());
31 | }
32 |
33 | public void testApiSpecParseExceptionWithDetailedErrors() {
34 | String message = "API spec parsing failed";
35 | List details = Arrays.asList("Missing required field", "Invalid type");
36 | ApiSpecParseException exception = new ApiSpecParseException(message, details);
37 | assertTrue(exception instanceof OpenSearchException);
38 | String expectedMessage = "API spec parsing failed: Missing required field, Invalid type";
39 | assertEquals(expectedMessage, exception.getMessage());
40 | }
41 |
42 | }
43 |
--------------------------------------------------------------------------------
/src/test/java/org/opensearch/flowframework/exception/FlowFrameworkExceptionTests.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright OpenSearch Contributors
3 | * SPDX-License-Identifier: Apache-2.0
4 | *
5 | * The OpenSearch Contributors require contributions made to
6 | * this file be licensed under the Apache-2.0 license or a
7 | * compatible open source license.
8 | */
9 | package org.opensearch.flowframework.exception;
10 |
11 | import org.opensearch.ExceptionsHelper;
12 | import org.opensearch.OpenSearchException;
13 | import org.opensearch.common.io.stream.BytesStreamOutput;
14 | import org.opensearch.common.xcontent.json.JsonXContent;
15 | import org.opensearch.core.common.bytes.BytesReference;
16 | import org.opensearch.core.common.io.stream.BytesStreamInput;
17 | import org.opensearch.core.rest.RestStatus;
18 | import org.opensearch.core.xcontent.ToXContent;
19 | import org.opensearch.core.xcontent.XContentBuilder;
20 | import org.opensearch.test.OpenSearchTestCase;
21 |
22 | import java.io.IOException;
23 |
24 | public class FlowFrameworkExceptionTests extends OpenSearchTestCase {
25 |
26 | public void testExceptions() {
27 | WorkflowStepException wse = new WorkflowStepException("WSE", RestStatus.OK);
28 | assertTrue(wse instanceof FlowFrameworkException);
29 | assertTrue(wse instanceof OpenSearchException);
30 | assertEquals(RestStatus.OK, ExceptionsHelper.status(wse));
31 | assertFalse(wse.isFragment());
32 | }
33 |
34 | public void testSerialize() throws IOException {
35 | FlowFrameworkException ffe = new FlowFrameworkException("FFE", RestStatus.OK);
36 | assertTrue(ffe instanceof OpenSearchException);
37 | assertEquals(RestStatus.OK, ExceptionsHelper.status(ffe));
38 |
39 | try (BytesStreamOutput out = new BytesStreamOutput()) {
40 | ffe.writeTo(out);
41 | try (BytesStreamInput in = new BytesStreamInput(BytesReference.toBytes(out.bytes()))) {
42 | ffe = new FlowFrameworkException(in);
43 | assertTrue(ffe instanceof OpenSearchException);
44 | assertEquals(RestStatus.OK, ExceptionsHelper.status(ffe));
45 | }
46 | }
47 |
48 | XContentBuilder builder = JsonXContent.contentBuilder();
49 | assertEquals("{\"error\":\"FFE\"}", ffe.toXContent(builder, ToXContent.EMPTY_PARAMS).toString());
50 | }
51 | }
52 |
--------------------------------------------------------------------------------
/src/test/java/org/opensearch/flowframework/model/PipelineProcessorTests.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright OpenSearch Contributors
3 | * SPDX-License-Identifier: Apache-2.0
4 | *
5 | * The OpenSearch Contributors require contributions made to
6 | * this file be licensed under the Apache-2.0 license or a
7 | * compatible open source license.
8 | */
9 | package org.opensearch.flowframework.model;
10 |
11 | import org.opensearch.core.rest.RestStatus;
12 | import org.opensearch.flowframework.exception.FlowFrameworkException;
13 | import org.opensearch.test.OpenSearchTestCase;
14 |
15 | import java.io.IOException;
16 | import java.util.Map;
17 |
18 | public class PipelineProcessorTests extends OpenSearchTestCase {
19 |
20 | public void testProcessor() throws IOException {
21 | PipelineProcessor processor = new PipelineProcessor("foo", Map.of("bar", "baz"));
22 |
23 | assertEquals("foo", processor.type());
24 | assertEquals(Map.of("bar", "baz"), processor.params());
25 |
26 | String expectedJson = "{\"type\":\"foo\",\"params\":{\"bar\":\"baz\"}}";
27 | String json = TemplateTestJsonUtil.parseToJson(processor);
28 | assertEquals(expectedJson, json);
29 |
30 | PipelineProcessor processorX = PipelineProcessor.parse(TemplateTestJsonUtil.jsonToParser(json));
31 | assertEquals("foo", processorX.type());
32 | assertEquals(Map.of("bar", "baz"), processorX.params());
33 | }
34 |
35 | public void testExceptions() throws IOException {
36 | String badJson = "{\"badField\":\"foo\",\"params\":{\"bar\":\"baz\"}}";
37 | FlowFrameworkException e = assertThrows(
38 | FlowFrameworkException.class,
39 | () -> PipelineProcessor.parse(TemplateTestJsonUtil.jsonToParser(badJson))
40 | );
41 | assertEquals("Unable to parse field [badField] in a pipeline processor object.", e.getMessage());
42 | assertEquals(RestStatus.BAD_REQUEST, e.getRestStatus());
43 |
44 | String noTypeJson = "{\"params\":{\"bar\":\"baz\"}}";
45 | e = assertThrows(FlowFrameworkException.class, () -> PipelineProcessor.parse(TemplateTestJsonUtil.jsonToParser(noTypeJson)));
46 | assertEquals("A processor object requires a type field.", e.getMessage());
47 | assertEquals(RestStatus.BAD_REQUEST, e.getRestStatus());
48 | }
49 |
50 | }
51 |
--------------------------------------------------------------------------------
/src/test/java/org/opensearch/flowframework/model/ResourceCreatedTests.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright OpenSearch Contributors
3 | * SPDX-License-Identifier: Apache-2.0
4 | *
5 | * The OpenSearch Contributors require contributions made to
6 | * this file be licensed under the Apache-2.0 license or a
7 | * compatible open source license.
8 | */
9 | package org.opensearch.flowframework.model;
10 |
11 | import org.opensearch.test.OpenSearchTestCase;
12 |
13 | import java.io.IOException;
14 |
15 | import static org.opensearch.flowframework.common.WorkflowResources.CREATE_CONNECTOR;
16 | import static org.opensearch.flowframework.common.WorkflowResources.getResourceByWorkflowStep;
17 |
18 | public class ResourceCreatedTests extends OpenSearchTestCase {
19 |
20 | @Override
21 | public void setUp() throws Exception {
22 | super.setUp();
23 | }
24 |
25 | public void testParseFeature() throws IOException {
26 | String workflowStepName = CREATE_CONNECTOR.getCreateStep();
27 | String resourceType = getResourceByWorkflowStep(workflowStepName);
28 | ResourceCreated resourceCreated = new ResourceCreated(workflowStepName, "workflow_step_1", resourceType, "L85p1IsBbfF");
29 | assertEquals(workflowStepName, resourceCreated.workflowStepName());
30 | assertEquals("workflow_step_1", resourceCreated.workflowStepId());
31 | assertEquals("connector_id", resourceCreated.resourceType());
32 | assertEquals("L85p1IsBbfF", resourceCreated.resourceId());
33 |
34 | String json = TemplateTestJsonUtil.parseToJson(resourceCreated);
35 | assertTrue(json.contains("\"workflow_step_name\":\"create_connector\""));
36 | assertTrue(json.contains("\"workflow_step_id\":\"workflow_step_1\""));
37 | assertTrue(json.contains("\"resource_type\":\"connector_id\""));
38 | assertTrue(json.contains("\"resource_id\":\"L85p1IsBbfF\""));
39 |
40 | ResourceCreated resourceCreatedTwo = ResourceCreated.parse(TemplateTestJsonUtil.jsonToParser(json));
41 | assertEquals(workflowStepName, resourceCreatedTwo.workflowStepName());
42 | assertEquals("workflow_step_1", resourceCreatedTwo.workflowStepId());
43 | assertEquals("L85p1IsBbfF", resourceCreatedTwo.resourceId());
44 | }
45 | }
46 |
--------------------------------------------------------------------------------
/src/test/java/org/opensearch/flowframework/model/WorkflowEdgeTests.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright OpenSearch Contributors
3 | * SPDX-License-Identifier: Apache-2.0
4 | *
5 | * The OpenSearch Contributors require contributions made to
6 | * this file be licensed under the Apache-2.0 license or a
7 | * compatible open source license.
8 | */
9 | package org.opensearch.flowframework.model;
10 |
11 | import org.opensearch.core.rest.RestStatus;
12 | import org.opensearch.flowframework.exception.FlowFrameworkException;
13 | import org.opensearch.test.OpenSearchTestCase;
14 |
15 | import java.io.IOException;
16 |
17 | public class WorkflowEdgeTests extends OpenSearchTestCase {
18 |
19 | @Override
20 | public void setUp() throws Exception {
21 | super.setUp();
22 | }
23 |
24 | public void testEdge() throws IOException {
25 | WorkflowEdge edgeAB = new WorkflowEdge("A", "B");
26 | assertEquals("A", edgeAB.source());
27 | assertEquals("B", edgeAB.destination());
28 | assertEquals("A->B", edgeAB.toString());
29 |
30 | WorkflowEdge edgeAB2 = new WorkflowEdge("A", "B");
31 | assertEquals(edgeAB, edgeAB2);
32 |
33 | WorkflowEdge edgeAC = new WorkflowEdge("A", "C");
34 | assertNotEquals(edgeAB, edgeAC);
35 |
36 | String expectedJson = "{\"source\":\"A\",\"dest\":\"B\"}";
37 | String json = TemplateTestJsonUtil.parseToJson(edgeAB);
38 | assertEquals(expectedJson, json);
39 |
40 | WorkflowEdge edgeX = WorkflowEdge.parse(TemplateTestJsonUtil.jsonToParser(json));
41 | assertEquals("A", edgeX.source());
42 | assertEquals("B", edgeX.destination());
43 | assertEquals("A->B", edgeX.toString());
44 | }
45 |
46 | public void testExceptions() throws IOException {
47 | String badJson = "{\"badField\":\"A\",\"dest\":\"B\"}";
48 | FlowFrameworkException e = assertThrows(
49 | FlowFrameworkException.class,
50 | () -> WorkflowEdge.parse(TemplateTestJsonUtil.jsonToParser(badJson))
51 | );
52 | assertEquals("Unable to parse field [badField] in an edge object.", e.getMessage());
53 | assertEquals(RestStatus.BAD_REQUEST, e.getRestStatus());
54 |
55 | String missingJson = "{\"dest\":\"B\"}";
56 | e = assertThrows(FlowFrameworkException.class, () -> WorkflowEdge.parse(TemplateTestJsonUtil.jsonToParser(missingJson)));
57 | assertEquals("An edge object requires both a source and dest field.", e.getMessage());
58 | assertEquals(RestStatus.BAD_REQUEST, e.getRestStatus());
59 | }
60 |
61 | }
62 |
--------------------------------------------------------------------------------
/src/test/java/org/opensearch/flowframework/model/WorkflowStepValidatorTests.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright OpenSearch Contributors
3 | * SPDX-License-Identifier: Apache-2.0
4 | *
5 | * The OpenSearch Contributors require contributions made to
6 | * this file be licensed under the Apache-2.0 license or a
7 | * compatible open source license.
8 | */
9 | package org.opensearch.flowframework.model;
10 |
11 | import org.opensearch.flowframework.workflow.WorkflowStepFactory;
12 | import org.opensearch.test.OpenSearchTestCase;
13 |
14 | import java.io.IOException;
15 | import java.util.HashMap;
16 | import java.util.Map;
17 |
18 | public class WorkflowStepValidatorTests extends OpenSearchTestCase {
19 |
20 | @Override
21 | public void setUp() throws Exception {
22 | super.setUp();
23 | }
24 |
25 | public void testParseWorkflowStepValidator() throws IOException {
26 |
27 | Map workflowStepValidators = new HashMap<>();
28 | workflowStepValidators.put(
29 | WorkflowStepFactory.WorkflowSteps.CREATE_CONNECTOR.getWorkflowStepName(),
30 | WorkflowStepFactory.WorkflowSteps.CREATE_CONNECTOR.getWorkflowStepValidator()
31 | );
32 |
33 | assertEquals(7, WorkflowStepFactory.WorkflowSteps.CREATE_CONNECTOR.inputs().size());
34 | assertEquals(1, WorkflowStepFactory.WorkflowSteps.CREATE_CONNECTOR.outputs().size());
35 |
36 | assertEquals("name", WorkflowStepFactory.WorkflowSteps.CREATE_CONNECTOR.inputs().get(0));
37 | assertEquals("connector_id", WorkflowStepFactory.WorkflowSteps.CREATE_CONNECTOR.outputs().get(0));
38 | }
39 |
40 | }
41 |
--------------------------------------------------------------------------------
/src/test/java/org/opensearch/flowframework/model/WorkflowTests.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright OpenSearch Contributors
3 | * SPDX-License-Identifier: Apache-2.0
4 | *
5 | * The OpenSearch Contributors require contributions made to
6 | * this file be licensed under the Apache-2.0 license or a
7 | * compatible open source license.
8 | */
9 | package org.opensearch.flowframework.model;
10 |
11 | import org.opensearch.core.xcontent.XContentParser;
12 | import org.opensearch.test.OpenSearchTestCase;
13 |
14 | import java.io.IOException;
15 | import java.util.Collections;
16 | import java.util.List;
17 | import java.util.Map;
18 |
19 | public class WorkflowTests extends OpenSearchTestCase {
20 |
21 | @Override
22 | public void setUp() throws Exception {
23 | super.setUp();
24 | }
25 |
26 | public void testWorkflow() throws IOException {
27 | WorkflowNode nodeA = new WorkflowNode("A", "a-type", Collections.emptyMap(), Map.of("foo", "bar"));
28 | WorkflowNode nodeB = new WorkflowNode("B", "b-type", Map.of("A", "foo"), Map.of("baz", "qux"));
29 | WorkflowEdge edgeAB = new WorkflowEdge("A", "B");
30 | List nodes = List.of(nodeA, nodeB);
31 | List edges = List.of(edgeAB);
32 |
33 | Workflow workflow = new Workflow(Map.of("key", "value"), nodes, edges);
34 | assertEquals(Map.of("key", "value"), workflow.userParams());
35 | assertEquals(List.of(nodeA, nodeB), workflow.nodes());
36 | assertEquals(List.of(edgeAB), workflow.edges());
37 |
38 | String expectedJson = "{\"user_params\":{\"key\":\"value\"},"
39 | + "\"nodes\":[{\"id\":\"A\",\"type\":\"a-type\",\"previous_node_inputs\":{},\"user_inputs\":{\"foo\":\"bar\"}},"
40 | + "{\"id\":\"B\",\"type\":\"b-type\",\"previous_node_inputs\":{\"A\":\"foo\"},\"user_inputs\":{\"baz\":\"qux\"}}],"
41 | + "\"edges\":[{\"source\":\"A\",\"dest\":\"B\"}]}";
42 | String json = TemplateTestJsonUtil.parseToJson(workflow);
43 | assertEquals(expectedJson, json);
44 |
45 | XContentParser parser = TemplateTestJsonUtil.jsonToParser(json);
46 | Workflow workflowX = Workflow.parse(parser);
47 | assertEquals(Map.of("key", "value"), workflowX.userParams());
48 | assertEquals(List.of(nodeA, nodeB), workflowX.nodes());
49 | assertEquals(List.of(edgeAB), workflowX.edges());
50 | }
51 | }
52 |
--------------------------------------------------------------------------------
/src/test/java/org/opensearch/flowframework/transport/GetWorkflowStepTransportActionTests.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright OpenSearch Contributors
3 | * SPDX-License-Identifier: Apache-2.0
4 | *
5 | * The OpenSearch Contributors require contributions made to
6 | * this file be licensed under the Apache-2.0 license or a
7 | * compatible open source license.
8 | */
9 | package org.opensearch.flowframework.transport;
10 |
11 | import org.opensearch.action.support.ActionFilters;
12 | import org.opensearch.core.action.ActionListener;
13 | import org.opensearch.flowframework.workflow.WorkflowStepFactory;
14 | import org.opensearch.tasks.Task;
15 | import org.opensearch.test.OpenSearchTestCase;
16 | import org.opensearch.transport.TransportService;
17 |
18 | import java.io.IOException;
19 | import java.util.HashMap;
20 | import java.util.Map;
21 |
22 | import org.mockito.ArgumentCaptor;
23 |
24 | import static org.opensearch.flowframework.common.CommonValue.WORKFLOW_STEP;
25 | import static org.mockito.Mockito.mock;
26 | import static org.mockito.Mockito.times;
27 | import static org.mockito.Mockito.verify;
28 |
29 | @SuppressWarnings("unchecked")
30 | public class GetWorkflowStepTransportActionTests extends OpenSearchTestCase {
31 |
32 | private GetWorkflowStepTransportAction getWorkflowStepTransportAction;
33 |
34 | @Override
35 | public void setUp() throws Exception {
36 | super.setUp();
37 |
38 | this.getWorkflowStepTransportAction = new GetWorkflowStepTransportAction(
39 | mock(TransportService.class),
40 | mock(ActionFilters.class),
41 | mock(WorkflowStepFactory.class)
42 | );
43 | }
44 |
45 | public void testGetWorkflowStepAction() throws IOException {
46 | WorkflowRequest workflowRequest = new WorkflowRequest(null, null);
47 | ActionListener listener = mock(ActionListener.class);
48 | getWorkflowStepTransportAction.doExecute(mock(Task.class), workflowRequest, listener);
49 |
50 | ArgumentCaptor stepCaptor = ArgumentCaptor.forClass(GetWorkflowStepResponse.class);
51 | verify(listener, times(1)).onResponse(stepCaptor.capture());
52 | }
53 |
54 | public void testGetWorkflowStepValidator() throws IOException {
55 | Map params = new HashMap<>();
56 | params.put(WORKFLOW_STEP, "create_connector, delete_model");
57 |
58 | WorkflowRequest workflowRequest = new WorkflowRequest(null, null, params);
59 | ActionListener listener = mock(ActionListener.class);
60 | getWorkflowStepTransportAction.doExecute(mock(Task.class), workflowRequest, listener);
61 | ArgumentCaptor stepCaptor = ArgumentCaptor.forClass(GetWorkflowStepResponse.class);
62 | verify(listener, times(1)).onResponse(stepCaptor.capture());
63 | assertEquals(GetWorkflowStepResponse.class, stepCaptor.getValue().getClass());
64 |
65 | }
66 | }
67 |
--------------------------------------------------------------------------------
/src/test/java/org/opensearch/flowframework/util/RestHandlerUtilsTests.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright OpenSearch Contributors
3 | * SPDX-License-Identifier: Apache-2.0
4 | *
5 | * The OpenSearch Contributors require contributions made to
6 | * this file be licensed under the Apache-2.0 license or a
7 | * compatible open source license.
8 | */
9 | package org.opensearch.flowframework.util;
10 |
11 | import org.opensearch.commons.authuser.User;
12 | import org.opensearch.search.builder.SearchSourceBuilder;
13 | import org.opensearch.search.fetch.subphase.FetchSourceContext;
14 | import org.opensearch.test.OpenSearchTestCase;
15 |
16 | import java.util.ArrayList;
17 | import java.util.Collections;
18 | import java.util.List;
19 |
20 | public class RestHandlerUtilsTests extends OpenSearchTestCase {
21 |
22 | public void testGetSourceContextFromClientWithDashboardExcludes() {
23 | SearchSourceBuilder testSearchSourceBuilder = new SearchSourceBuilder();
24 | testSearchSourceBuilder.fetchSource(new String[] { "a" }, new String[] { "b" });
25 | User user = new User("user", Collections.emptyList(), Collections.emptyList(), Collections.emptyList());
26 | FetchSourceContext sourceContext = RestHandlerUtils.getSourceContext(user, testSearchSourceBuilder);
27 | assertEquals(sourceContext.excludes().length, 4);
28 | }
29 |
30 | public void testGetSourceContextFromClientWithExcludes() {
31 | SearchSourceBuilder testSearchSourceBuilder = new SearchSourceBuilder();
32 | User user = new User("user", Collections.emptyList(), Collections.emptyList(), Collections.emptyList());
33 | FetchSourceContext sourceContext = RestHandlerUtils.getSourceContext(user, testSearchSourceBuilder);
34 | assertEquals(sourceContext.excludes().length, 2);
35 | }
36 |
37 | public void testGetSourceContextAdminUser() {
38 | SearchSourceBuilder testSearchSourceBuilder = new SearchSourceBuilder();
39 | List roles = new ArrayList<>();
40 | roles.add("all_access");
41 |
42 | User user = new User("admin", roles, roles, Collections.emptyList());
43 | FetchSourceContext sourceContext = RestHandlerUtils.getSourceContext(user, testSearchSourceBuilder);
44 | assertEquals(sourceContext.excludes().length, 1);
45 | }
46 |
47 | }
48 |
--------------------------------------------------------------------------------
/src/test/java/org/opensearch/flowframework/workflow/WorkflowDataStepTests.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright OpenSearch Contributors
3 | * SPDX-License-Identifier: Apache-2.0
4 | *
5 | * The OpenSearch Contributors require contributions made to
6 | * this file be licensed under the Apache-2.0 license or a
7 | * compatible open source license.
8 | */
9 | package org.opensearch.flowframework.workflow;
10 |
11 | import org.opensearch.action.support.PlainActionFuture;
12 | import org.opensearch.flowframework.model.ResourceCreated;
13 | import org.opensearch.test.OpenSearchTestCase;
14 |
15 | import java.util.Collections;
16 | import java.util.Map;
17 | import java.util.concurrent.ExecutionException;
18 |
19 | import static org.junit.Assert.assertTrue;
20 |
21 | public class WorkflowDataStepTests extends OpenSearchTestCase {
22 |
23 | private WorkflowDataStep workflowDataStep;
24 | private WorkflowData inputData;
25 | private WorkflowData outputData;
26 |
27 | private String workflowId = "test-id";
28 | private String workflowStepId = "test-node-id";
29 | private String resourceId = "resourceId";
30 | private String resourceType = "resourceType";
31 |
32 | @Override
33 | public void setUp() throws Exception {
34 | super.setUp();
35 |
36 | ResourceCreated resourceCreated = new ResourceCreated("step_name", workflowStepId, resourceType, resourceId);
37 | this.workflowDataStep = new WorkflowDataStep(resourceCreated);
38 | this.inputData = new WorkflowData(Map.of(), workflowId, workflowStepId);
39 | this.outputData = new WorkflowData(Map.ofEntries(Map.entry(resourceType, resourceId)), workflowId, workflowStepId);
40 | }
41 |
42 | public void testExecuteWorkflowDataStep() throws ExecutionException, InterruptedException {
43 |
44 | @SuppressWarnings("unchecked")
45 | PlainActionFuture future = workflowDataStep.execute(
46 | inputData.getNodeId(),
47 | inputData,
48 | Collections.emptyMap(),
49 | Collections.emptyMap(),
50 | Collections.emptyMap(),
51 | null
52 | );
53 |
54 | assertTrue(future.isDone());
55 | assertEquals(outputData.getContent().get(resourceType), future.get().getContent().get(resourceType));
56 |
57 | }
58 |
59 | }
60 |
--------------------------------------------------------------------------------
/src/test/java/org/opensearch/flowframework/workflow/WorkflowDataTests.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright OpenSearch Contributors
3 | * SPDX-License-Identifier: Apache-2.0
4 | *
5 | * The OpenSearch Contributors require contributions made to
6 | * this file be licensed under the Apache-2.0 license or a
7 | * compatible open source license.
8 | */
9 | package org.opensearch.flowframework.workflow;
10 |
11 | import org.opensearch.test.OpenSearchTestCase;
12 |
13 | import java.util.Map;
14 |
15 | public class WorkflowDataTests extends OpenSearchTestCase {
16 |
17 | @Override
18 | public void setUp() throws Exception {
19 | super.setUp();
20 | }
21 |
22 | public void testWorkflowData() {
23 |
24 | WorkflowData empty = WorkflowData.EMPTY;
25 | assertTrue(empty.getParams().isEmpty());
26 | assertTrue(empty.getContent().isEmpty());
27 |
28 | Map expectedContent = Map.of("baz", new String[] { "qux", "quxx" });
29 | WorkflowData contentOnly = new WorkflowData(expectedContent, null, null);
30 | assertTrue(contentOnly.getParams().isEmpty());
31 | assertEquals(expectedContent, contentOnly.getContent());
32 | assertNull(contentOnly.getWorkflowId());
33 | assertNull(contentOnly.getNodeId());
34 |
35 | Map expectedParams = Map.of("foo", "bar");
36 | WorkflowData contentAndParams = new WorkflowData(expectedContent, expectedParams, "test-id-123", "test-node-id");
37 | assertEquals(expectedParams, contentAndParams.getParams());
38 | assertEquals(expectedContent, contentAndParams.getContent());
39 | assertEquals("test-id-123", contentAndParams.getWorkflowId());
40 | assertEquals("test-node-id", contentAndParams.getNodeId());
41 | }
42 | }
43 |
--------------------------------------------------------------------------------
/src/test/resources/template/createconnector-createconnectortool-createflowagent.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "createconnector-createconnectortool-createflowagent",
3 | "description": "test case",
4 | "use_case": "TEST_CASE",
5 | "version": {
6 | "template": "1.0.0",
7 | "compatibility": [
8 | "2.15.0",
9 | "3.0.0"
10 | ]
11 | },
12 | "workflows": {
13 | "provision": {
14 | "nodes": [
15 | {
16 | "id": "create_connector",
17 | "type": "create_connector",
18 | "user_inputs": {
19 | "name": "OpenAI Chat Connector",
20 | "description": "The connector to public OpenAI model service for GPT 3.5",
21 | "version": "1",
22 | "protocol": "http",
23 | "parameters": {
24 | "endpoint": "api.openai.com",
25 | "model": "gpt-3.5-turbo"
26 | },
27 | "credential": {
28 | "openAI_key": "12345"
29 | },
30 | "actions": [
31 | {
32 | "action_type": "predict",
33 | "method": "POST",
34 | "url": "https://${parameters.endpoint}/v1/chat/completions"
35 | }
36 | ]
37 | }
38 | },
39 | {
40 | "id": "create_tool",
41 | "type": "create_tool",
42 | "previous_node_inputs": {
43 | "create_connector": "connector_id"
44 | },
45 | "user_inputs": {
46 | "parameters": {},
47 | "name": "ConnectorTool",
48 | "type": "ConnectorTool"
49 | }
50 | },
51 | {
52 | "id": "create_flow_agent",
53 | "type": "register_agent",
54 | "previous_node_inputs": {
55 | "create_tool": "tools"
56 | },
57 | "user_inputs": {
58 | "parameters": {},
59 | "type": "flow",
60 | "name": "OpenAI Chat Agent"
61 | }
62 | }
63 | ],
64 | "edges": [
65 | {
66 | "source": "create_connector",
67 | "dest": "create_tool"
68 | },
69 | {
70 | "source": "create_tool",
71 | "dest": "create_flow_agent"
72 | }
73 | ]
74 | }
75 | }
76 | }
77 |
--------------------------------------------------------------------------------
/src/test/resources/template/createconnector-registerremotemodel-deploymodel.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "createconnector-registerremotemodel-deploymodel",
3 | "description": "test case",
4 | "use_case": "TEST_CASE",
5 | "version": {
6 | "template": "1.0.0",
7 | "compatibility": [
8 | "2.12.0",
9 | "3.0.0"
10 | ]
11 | },
12 | "workflows": {
13 | "provision": {
14 | "nodes": [
15 | {
16 | "id": "workflow_step_1",
17 | "type": "create_connector",
18 | "user_inputs": {
19 | "name": "OpenAI Chat Connector",
20 | "description": "The connector to public OpenAI model service for GPT 3.5",
21 | "version": "1",
22 | "protocol": "http",
23 | "parameters": {
24 | "endpoint": "api.openai.com",
25 | "model": "gpt-3.5-turbo"
26 | },
27 | "credential": {
28 | "openAI_key": "12345"
29 | },
30 | "actions": [
31 | {
32 | "action_type": "predict",
33 | "method": "POST",
34 | "url": "https://${parameters.endpoint}/v1/chat/completions"
35 | }
36 | ]
37 | }
38 | },
39 | {
40 | "id": "workflow_step_2",
41 | "type": "register_remote_model",
42 | "previous_node_inputs": {
43 | "workflow_step_1": "connector_id"
44 | },
45 | "user_inputs": {
46 | "name": "openAI-gpt-3.5-turbo",
47 | "function_name": "remote",
48 | "description": "test model"
49 | }
50 | },
51 | {
52 | "id": "workflow_step_3",
53 | "type": "deploy_model",
54 | "previous_node_inputs": {
55 | "workflow_step_2": "model_id"
56 | }
57 | },
58 | {
59 | "id": "workflow_step_4",
60 | "type": "register_agent",
61 | "previous_node_inputs": {
62 | "workflow_step_3": "model_id"
63 | },
64 | "user_inputs": {
65 | "name": "Test Agent",
66 | "type": "conversational",
67 | "parameters": {
68 | "greeting": "Hello! How can I assist you?"
69 | },
70 | "llm.parameters": {
71 | "max_iteration": "5",
72 | "stop_when_no_tool_found": "true"
73 | },
74 | "memory": {
75 | "type": "conversation_index"
76 | },
77 | "app_type": "chatbot"
78 | }
79 | }
80 | ],
81 | "edges": [
82 | {
83 | "source": "workflow_step_1",
84 | "dest": "workflow_step_2"
85 | },
86 | {
87 | "source": "workflow_step_2",
88 | "dest": "workflow_step_3"
89 | },
90 | {
91 | "source": "workflow_step_3",
92 | "dest": "workflow_step_4"
93 | }
94 | ]
95 | }
96 | }
97 | }
98 |
--------------------------------------------------------------------------------
/src/test/resources/template/finaltemplate.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "semantic-search",
3 | "description": "My semantic search use case",
4 | "use_case": "SEMANTIC_SEARCH",
5 | "operations": [
6 | "PROVISION",
7 | "INGEST",
8 | "QUERY"
9 | ],
10 | "version": {
11 | "template": "1.0.0",
12 | "compatibility": [
13 | "2.9.0",
14 | "3.0.0"
15 | ]
16 | },
17 | "user_inputs": {
18 | "index_name": "my-knn-index",
19 | "index_settings": {}
20 | },
21 | "workflows": {
22 | "provision": {
23 | "nodes": [{
24 | "id": "create_index",
25 | "type": "create_index",
26 | "inputs": {
27 | "name": "user_inputs.index_name",
28 | "settings": "user_inputs.index_settings",
29 | "node_timeout": "10s"
30 | }
31 | },
32 | {
33 | "id": "create_ingest_pipeline",
34 | "type": "create_ingest_pipeline",
35 | "inputs": {
36 | "name": "my-ingest-pipeline",
37 | "description": "some description",
38 | "processors": [{
39 | "type": "text_embedding",
40 | "params": {
41 | "model_id": "my-existing-model-id",
42 | "input_field": "text_passage",
43 | "output_field": "text_embedding"
44 | }
45 | }],
46 | "node_timeout": "10s"
47 | }
48 | }
49 | ],
50 | "edges": [{
51 | "source": "create_index",
52 | "dest": "create_ingest_pipeline"
53 | }]
54 | },
55 | "ingest": {
56 | "user_params": {
57 | "document": "doc"
58 | },
59 | "nodes": [{
60 | "id": "ingest_index",
61 | "type": "ingest_index",
62 | "inputs": {
63 | "index": "user_inputs.index_name",
64 | "ingest_pipeline": "my-ingest-pipeline",
65 | "document": "user_params.document",
66 | "node_timeout": "10s"
67 | }
68 | }]
69 | },
70 | "query": {
71 | "user_params": {
72 | "plaintext": "string"
73 | },
74 | "nodes": [{
75 | "id": "transform_query",
76 | "type": "transform_query",
77 | "inputs": {
78 | "template": "neural-search-template-1",
79 | "plaintext": "user_params.plaintext",
80 | "node_timeout": "10s"
81 | }
82 | },
83 | {
84 | "id": "query_index",
85 | "type": "query_index",
86 | "inputs": {
87 | "index": "user_inputs.index_name",
88 | "query": "{{output-from-prev-step}}.query",
89 | "search_request_processors": [],
90 | "search_response_processors": [],
91 | "node_timeout": "10s"
92 | }
93 | }
94 | ],
95 | "edges": [{
96 | "source": "transform_query",
97 | "dest": "query_index"
98 | }]
99 | }
100 | }
101 | }
102 |
--------------------------------------------------------------------------------
/src/test/resources/template/noop.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "noop",
3 | "workflows": {
4 | "provision": {
5 | "nodes": [
6 | {
7 | "id": "no-op",
8 | "type": "noop"
9 | }
10 | ]
11 | }
12 | }
13 | }
14 |
--------------------------------------------------------------------------------
/src/test/resources/template/register-deploylocalsparseencodingmodel.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "register-deploy-local-sparse-encoding-model",
3 | "description": "test case",
4 | "use_case": "TEST_CASE",
5 | "version": {
6 | "template": "1.0.0",
7 | "compatibility": [
8 | "2.12.0",
9 | "3.0.0"
10 | ]
11 | },
12 | "workflows": {
13 | "provision": {
14 | "nodes": [
15 | {
16 | "id": "workflow_step_1",
17 | "type": "register_local_sparse_encoding_model",
18 | "user_inputs": {
19 | "node_timeout": "60s",
20 | "name": "neural-sparse/opensearch-neural-sparse-tokenizer-v1",
21 | "version": "1.0.0",
22 | "description": "This is a neural sparse tokenizer model: It tokenize input sentence into tokens and assign pre-defined weight from IDF to each. It serves only in query.",
23 | "model_format": "TORCH_SCRIPT",
24 | "function_name": "SPARSE_TOKENIZE",
25 | "model_content_hash_value": "b3487da9c58ac90541b720f3b367084f271d280c7f3bdc3e6d9c9a269fb31950",
26 | "url": "https://artifacts.opensearch.org/models/ml-models/amazon/neural-sparse/opensearch-neural-sparse-tokenizer-v1/1.0.0/torch_script/opensearch-neural-sparse-tokenizer-v1-1.0.0.zip",
27 | "deploy": true
28 | }
29 | }
30 | ]
31 | }
32 | }
33 | }
34 |
--------------------------------------------------------------------------------
/src/test/resources/template/registerremotemodel-createindex.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "semantic search with local pretrained model",
3 | "description": "Setting up semantic search, with a local pretrained embedding model",
4 | "use_case": "SEMANTIC_SEARCH",
5 | "version": {
6 | "template": "1.0.0",
7 | "compatibility": [
8 | "2.12.0",
9 | "3.0.0"
10 | ]
11 | },
12 | "workflows": {
13 | "provision": {
14 | "nodes": [
15 | {
16 | "id": "create_openai_connector",
17 | "type": "create_connector",
18 | "user_inputs": {
19 | "name": "OpenAI Chat Connector",
20 | "description": "The connector to public OpenAI model service for text embedding model",
21 | "version": "1",
22 | "protocol": "http",
23 | "parameters": {
24 | "endpoint": "api.openai.com",
25 | "model": "gpt-3.5-turbo",
26 | "response_filter": "$.choices[0].message.content"
27 | },
28 | "credential": {
29 | "openAI_key": "12345"
30 | },
31 | "actions": [
32 | {
33 | "action_type": "predict",
34 | "method": "POST",
35 | "url": "https://${parameters.endpoint}/v1/chat/completions"
36 | }
37 | ]
38 | }
39 | },
40 | {
41 | "id": "register_openai_model",
42 | "type": "register_remote_model",
43 | "previous_node_inputs": {
44 | "create_openai_connector": "connector_id"
45 | },
46 | "user_inputs": {
47 | "name": "openAI-gpt-3.5-turbo",
48 | "deploy": true
49 | }
50 | },
51 | {
52 | "id": "create_index",
53 | "type": "create_index",
54 | "user_inputs": {
55 | "index_name": "my-nlp-index",
56 | "configurations": {
57 | "settings": {
58 | "index.knn": true,
59 | "index.number_of_shards": "2"
60 | },
61 | "mappings": {
62 | "properties": {
63 | "passage_embedding": {
64 | "type": "knn_vector",
65 | "dimension": "768",
66 | "method": {
67 | "engine": "lucene",
68 | "space_type": "l2",
69 | "name": "hnsw",
70 | "parameters": {}
71 | }
72 | },
73 | "passage_text": {
74 | "type": "text"
75 | }
76 | }
77 | }
78 | }
79 | }
80 | }
81 | ]
82 | }
83 | }
84 | }
85 |
--------------------------------------------------------------------------------
/src/test/resources/template/registerremotemodel-ingestpipeline.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "semantic search with local pretrained model",
3 | "description": "Setting up semantic search, with a local pretrained embedding model",
4 | "use_case": "SEMANTIC_SEARCH",
5 | "version": {
6 | "template": "1.0.0",
7 | "compatibility": [
8 | "2.12.0",
9 | "3.0.0"
10 | ]
11 | },
12 | "workflows": {
13 | "provision": {
14 | "nodes": [
15 | {
16 | "id": "create_openai_connector",
17 | "type": "create_connector",
18 | "user_inputs": {
19 | "name": "OpenAI Chat Connector",
20 | "description": "The connector to public OpenAI model service for text embedding model",
21 | "version": "1",
22 | "protocol": "http",
23 | "parameters": {
24 | "endpoint": "api.openai.com",
25 | "model": "gpt-3.5-turbo",
26 | "response_filter": "$.choices[0].message.content"
27 | },
28 | "credential": {
29 | "openAI_key": "12345"
30 | },
31 | "actions": [
32 | {
33 | "action_type": "predict",
34 | "method": "POST",
35 | "url": "https://${parameters.endpoint}/v1/chat/completions"
36 | }
37 | ]
38 | }
39 | },
40 | {
41 | "id": "register_openai_model",
42 | "type": "register_remote_model",
43 | "previous_node_inputs": {
44 | "create_openai_connector": "connector_id"
45 | },
46 | "user_inputs": {
47 | "name": "openAI-gpt-3.5-turbo",
48 | "deploy": true
49 | }
50 | },
51 | {
52 | "id": "create_ingest_pipeline",
53 | "type": "create_ingest_pipeline",
54 | "previous_node_inputs": {
55 | "register_openai_model": "model_id"
56 | },
57 | "user_inputs": {
58 | "pipeline_id": "nlp-ingest-pipeline",
59 | "configurations": {
60 | "description": "A text embedding pipeline",
61 | "processors": [
62 | {
63 | "text_embedding": {
64 | "model_id": "${{register_openai_model.model_id}}",
65 | "field_map": {
66 | "passage_text": "passage_embedding"
67 | }
68 | }
69 | }
70 | ]
71 | }
72 | }
73 | }
74 | ]
75 | }
76 | }
77 | }
78 |
--------------------------------------------------------------------------------
/src/test/resources/template/registerremotemodel.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "semantic search with local pretrained model",
3 | "description": "Setting up semantic search, with a local pretrained embedding model",
4 | "use_case": "SEMANTIC_SEARCH",
5 | "version": {
6 | "template": "1.0.0",
7 | "compatibility": [
8 | "2.12.0",
9 | "3.0.0"
10 | ]
11 | },
12 | "workflows": {
13 | "provision": {
14 | "nodes": [
15 | {
16 | "id": "create_openai_connector",
17 | "type": "create_connector",
18 | "user_inputs": {
19 | "name": "OpenAI Chat Connector",
20 | "description": "The connector to public OpenAI model service for text embedding model",
21 | "version": "1",
22 | "protocol": "http",
23 | "parameters": {
24 | "endpoint": "api.openai.com",
25 | "model": "gpt-3.5-turbo",
26 | "response_filter": "$.choices[0].message.content"
27 | },
28 | "credential": {
29 | "openAI_key": "12345"
30 | },
31 | "actions": [
32 | {
33 | "action_type": "predict",
34 | "method": "POST",
35 | "url": "https://${parameters.endpoint}/v1/chat/completions"
36 | }
37 | ]
38 | }
39 | },
40 | {
41 | "id": "register_openai_model",
42 | "type": "register_remote_model",
43 | "previous_node_inputs": {
44 | "create_openai_connector": "connector_id"
45 | },
46 | "user_inputs": {
47 | "name": "openAI-gpt-3.5-turbo",
48 | "deploy": true
49 | }
50 | }
51 | ]
52 | }
53 | }
54 | }
55 |
--------------------------------------------------------------------------------
/src/yamlRestTest/java/org/opensearch/flowframework/FlowFrameworkPluginYamlTestSuiteIT.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright OpenSearch Contributors
3 | * SPDX-License-Identifier: Apache-2.0
4 | *
5 | * The OpenSearch Contributors require contributions made to
6 | * this file be licensed under the Apache-2.0 license or a
7 | * compatible open source license.
8 | */
9 | package org.opensearch.flowframework;
10 |
11 | import com.carrotsearch.randomizedtesting.annotations.Name;
12 | import com.carrotsearch.randomizedtesting.annotations.ParametersFactory;
13 |
14 | import org.opensearch.test.rest.yaml.ClientYamlTestCandidate;
15 | import org.opensearch.test.rest.yaml.OpenSearchClientYamlSuiteTestCase;
16 |
17 | public class FlowFrameworkPluginYamlTestSuiteIT extends OpenSearchClientYamlSuiteTestCase {
18 |
19 | public FlowFrameworkPluginYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) {
20 | super(testCandidate);
21 | }
22 |
23 | @ParametersFactory
24 | public static Iterable