├── .appinspect.manualcheck.yaml ├── .github └── workflows │ └── build-test.yml ├── .gitignore ├── .pre-commit-config.yaml ├── LICENSE ├── README.md ├── additional_packaging.py ├── docker-compose.yml ├── globalConfig.json ├── package ├── LICENSE.txt ├── README.txt ├── app.manifest ├── bin │ ├── example_api_key_verification.py │ ├── example_delete_checkpoint_rh.py │ ├── example_helper.py │ └── example_utils.py ├── default │ ├── collections.conf │ ├── eventtypes.conf │ ├── props.conf │ ├── tags.conf │ └── transforms.conf ├── lib │ └── requirements.txt └── static │ ├── appIcon.png │ ├── appIconAlt.png │ ├── appIconAlt_2x.png │ └── appIcon_2x.png ├── requirements-dev.txt ├── scripts ├── build_ui.sh ├── local_testing_setup.sh ├── run_locally.sh └── run_splunk.sh ├── server ├── Dockerfile ├── app.py └── requirements.txt ├── tests ├── __init__.py ├── knowledge │ ├── pytest-splunk-addon-data.conf │ ├── samples │ │ └── example_sourcetype.xml │ └── test_addon.py └── ucc_modinput_functional │ ├── README.md │ ├── __init__.py │ ├── defaults.py │ ├── splunk │ ├── __init__.py │ ├── client │ │ ├── __init__.py │ │ ├── _managed_client.py │ │ ├── client.py │ │ └── configuration.py │ ├── forges.py │ └── probes.py │ ├── test_configuration.py │ ├── test_inputs.py │ ├── test_settings.py │ └── vendor │ ├── __init__.py │ ├── client │ ├── __init__.py │ ├── client.py │ └── configuration.py │ └── forges.py └── ui ├── .gitignore ├── .nvmrc ├── README.md ├── babel.config.cjs ├── eslint.config.js ├── jest.config.ts ├── package-lock.json ├── package.json ├── src ├── ucc-ui-extensions │ ├── AdvancedInputsTab │ │ ├── AdvancedInputsTab.spec.tsx │ │ ├── AdvancedInputsTab.tsx │ │ └── index.tsx │ ├── DateInput │ │ ├── DateInput.spec.tsx │ │ ├── DateInput.tsx │ │ └── index.tsx │ └── README.md └── utils │ ├── ResponseError.ts │ ├── api.ts │ └── apiHooks.ts ├── tests ├── jest.setup.ts └── mocks │ ├── README.md │ └── server.ts ├── tsconfig.app.json ├── tsconfig.json ├── tsconfig.node.json └── webpack.config.js /.appinspect.manualcheck.yaml: -------------------------------------------------------------------------------- 1 | check_for_binary_files_without_source_code: 2 | comment: 'done' 3 | check_for_remote_code_execution_in_javascript: 4 | comment: 'done' 5 | check_for_builtin_functions: 6 | comment: 'done' 7 | check_for_data_compression_and_archiving: 8 | comment: 'done' 9 | check_for_file_and_directory_access: 10 | comment: 'done' 11 | check_for_generic_operating_system_services: 12 | comment: 'done' 13 | check_for_importing_modules: 14 | comment: 'done' 15 | check_for_plain_text_credentials_in_python: 16 | comment: 'done' 17 | check_for_environment_variable_use_in_python: 18 | comment: 'done' 19 | check_for_secret_disclosure: 20 | comment: 'done' 21 | check_for_executable_flag: 22 | comment: 'done' 23 | check_for_known_vulnerabilities_in_third_party_libraries: 24 | comment: 'done' 25 | check_embedded_links: 26 | comment: 'done' 27 | check_for_auto_update_features: 28 | comment: 'done' 29 | check_link_includes_contact_info: 30 | comment: 'done' 31 | check_documented_included_open_source: 32 | comment: 'done' 33 | check_editing_and_proofreading: 34 | comment: 'done' 35 | check_authorization_credentials: 36 | comment: 'done' 37 | check_for_stacktrace_returned_to_user: 38 | comment: 'done' 39 | check_fs_writes: 40 | comment: 'done' 41 | check_user_privileges: 42 | comment: 'done' 43 | check_for_reverse_shells: 44 | comment: 'done' 45 | check_requires_access_to_files_outside_apps_dir: 46 | comment: 'done' 47 | check_for_offensive_material: 48 | comment: 'done' 49 | check_dependencies: 50 | comment: 'done' 51 | check_hard_coded_paths: 52 | comment: 'done' 53 | check_for_insecure_http_calls_in_python: 54 | comment: 'done' 55 | check_for_supported_tls: 56 | comment: 'done' 57 | -------------------------------------------------------------------------------- /.github/workflows/build-test.yml: -------------------------------------------------------------------------------- 1 | name: Gold TA CI 2 | on: 3 | push: 4 | branches: 5 | - "main" 6 | - "develop" 7 | pull_request: 8 | branches: 9 | - "**" 10 | workflow_dispatch: 11 | 12 | jobs: 13 | pre-commit: 14 | runs-on: ubuntu-latest 15 | steps: 16 | - uses: actions/checkout@v4 17 | - uses: actions/setup-python@v5 18 | with: 19 | python-version: "3.12" 20 | - uses: pre-commit/action@v3.0.1 21 | 22 | build: 23 | needs: 24 | - pre-commit 25 | runs-on: ubuntu-22.04 26 | steps: 27 | - uses: actions/checkout@v4 28 | - uses: actions/setup-node@v4 29 | with: 30 | cache: 'npm' 31 | cache-dependency-path: 'ui/package-lock.json' 32 | node-version-file: 'ui/package.json' 33 | - uses: actions/setup-python@v5 34 | with: 35 | python-version: "3.7" 36 | - run: | 37 | python3 -m venv .venv 38 | source .venv/bin/activate 39 | - run: pip install -r requirements-dev.txt 40 | - run: ucc-gen build 41 | - uses: actions/upload-artifact@v4 42 | with: 43 | name: Splunk_TA_Example-raw-output 44 | path: output/* 45 | # Taken from https://github.com/splunk/addonfactory-ucc-generator-action/blob/main/action.yml. 46 | # Should resolve `check_for_expansive_permissions` check from AppInspect CLI. 47 | - run: chmod -R +r output 48 | shell: bash 49 | - run: chmod -R go-w output 50 | shell: bash 51 | - uses: actions/upload-artifact@v4 52 | with: 53 | name: output 54 | path: output/ 55 | - run: ucc-gen package --path output/Splunk_TA_Example 56 | - uses: actions/upload-artifact@v4 57 | with: 58 | name: Splunk_TA_Example 59 | path: Splunk_TA_Example*.tar.gz 60 | 61 | ui-checks: 62 | runs-on: ubuntu-22.04 63 | defaults: 64 | run: 65 | shell: bash 66 | working-directory: ui 67 | steps: 68 | - uses: actions/checkout@v4 69 | - uses: actions/setup-node@v4 70 | with: 71 | cache: 'npm' 72 | cache-dependency-path: 'ui/package-lock.json' 73 | node-version-file: 'ui/package.json' 74 | - name: Install Dependencies 75 | run: npm ci 76 | - name: Code lint 77 | run: npm run lint 78 | - name: Unit tests 79 | run: npm run test 80 | 81 | splunk-appinspect-cli: 82 | name: splunk-appinspect-cli ${{ matrix.tags }} 83 | needs: 84 | - build 85 | runs-on: ubuntu-latest 86 | continue-on-error: true 87 | strategy: 88 | matrix: 89 | tags: 90 | - "cloud" 91 | - "appapproval" 92 | - "deprecated_feature" 93 | - "developer_guidance" 94 | - "future" 95 | - "self-service" 96 | - "splunk_appinspect" 97 | steps: 98 | - uses: actions/checkout@v4 99 | - uses: actions/download-artifact@v4 100 | with: 101 | name: Splunk_TA_Example 102 | path: build/package 103 | - uses: splunk/appinspect-cli-action@v2.8 104 | with: 105 | app_path: build/package 106 | included_tags: ${{ matrix.tags }} 107 | 108 | psa-test: 109 | runs-on: ubuntu-22.04 110 | needs: 111 | - build 112 | permissions: 113 | actions: read 114 | deployments: read 115 | contents: read 116 | packages: read 117 | statuses: read 118 | checks: write 119 | steps: 120 | - uses: actions/checkout@v4 121 | - uses: actions/setup-python@v5 122 | with: 123 | python-version: "3.7" 124 | - uses: actions/download-artifact@v4 125 | with: 126 | name: Splunk_TA_Example-raw-output 127 | path: psa-output/ 128 | - run: | 129 | ./scripts/run_splunk.sh 130 | until curl -Lsk "https://localhost:8088/services/collector/health" &>/dev/null ; do echo -n "Waiting for HEC-" && sleep 5 ; done 131 | timeout-minutes: 5 132 | - name: Set up virtual environment 133 | run: | 134 | python3 -m venv .venv 135 | source .venv/bin/activate 136 | - run: pip install --no-cache-dir -r requirements-dev.txt 137 | - name: Run PSA knowledge tests 138 | run : | 139 | sudo chown -R runner:runner psa-output/Splunk_TA_Example/ 140 | chmod u+rwx psa-output/Splunk_TA_Example/ 141 | pytest tests/knowledge --splunk-type=external --splunk-app=package/ --splunk-data-generator=tests/knowledge --splunk-host=localhost --splunk-port=8089 --splunk-user=admin --splunk-password=Chang3d! --splunk-hec-token=4a8a737d-5452-426c-a6f7-106dca4e813f 142 | - name: Set output if steps failed 143 | run: | 144 | echo "failed=${{ env.failed }}" >> $GITHUB_ENV 145 | outputs: 146 | failed: ${{ env.failed }} 147 | 148 | modinput-test: 149 | runs-on: ubuntu-22.04 150 | needs: 151 | - build 152 | permissions: 153 | actions: read 154 | deployments: read 155 | contents: read 156 | packages: read 157 | statuses: read 158 | checks: write 159 | steps: 160 | - uses: actions/checkout@v4 161 | - uses: actions/setup-python@v5 162 | with: 163 | python-version: "3.7" 164 | - uses: actions/download-artifact@v4 165 | with: 166 | name: Splunk_TA_Example 167 | - run: ./scripts/run_locally.sh 168 | - name: Set up virtual environment 169 | run: | 170 | python3 -m venv .venv 171 | source .venv/bin/activate 172 | - run: pip install --no-cache-dir -r requirements-dev.txt 173 | - name: Run modinput functional tests 174 | run : | 175 | export MODINPUT_TEST_SPLUNK_HOST=localhost 176 | export MODINPUT_TEST_SPLUNK_PORT=8089 177 | export MODINPUT_TEST_SPLUNK_USERNAME=admin 178 | export MODINPUT_TEST_SPLUNK_PASSWORD_BASE64=$(ucc-test-modinput base64encode -s 'Chang3d!') 179 | export MODINPUT_TEST_EXAMPLE_API_KEY_BASE64=$(ucc-test-modinput base64encode -s 'super-secret-api-token') 180 | ucc-test-modinput gen 181 | pytest tests/ucc_modinput_functional 182 | - name: Set output if steps failed 183 | run: | 184 | echo "failed=${{ env.failed }}" >> $GITHUB_ENV 185 | outputs: 186 | failed: ${{ env.failed }} 187 | 188 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .idea 2 | .venv* 3 | output 4 | .DS_Store 5 | __pycache__ 6 | *.log 7 | # PSA files on local execution 8 | events.pickle 9 | generator.lock 10 | .tokenized_events 11 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | repos: 2 | - repo: https://github.com/pre-commit/pre-commit-hooks 3 | rev: v4.4.0 4 | hooks: 5 | - id: check-merge-conflict 6 | - id: debug-statements 7 | - repo: https://github.com/asottile/pyupgrade 8 | rev: v3.3.2 9 | hooks: 10 | - id: pyupgrade 11 | args: 12 | - --py37-plus 13 | - repo: https://github.com/psf/black 14 | rev: 23.3.0 15 | hooks: 16 | - id: black 17 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright 2024 Splunk Inc. 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Splunk_TA_Example 2 | 3 | This is an example TA for Splunk that demonstrates how to use the modular input framework to collect data from an API and send it to Splunk. 4 | 5 | ## Frameworks and tools used 6 | 7 | * UCC - https://github.com/splunk/addonfactory-ucc-generator 8 | * PSA - https://github.com/splunk/pytest-splunk-addon 9 | 10 | ## API 11 | 12 | The API is a simple Flask app that returns a list of events. 13 | 14 | ## Quick start locally with Docker 15 | 16 | ```bash 17 | ./scripts/run_locally.sh 18 | ``` 19 | 20 | 21 | ## Build and package TA 22 | 23 | ```bash 24 | python3 -m venv .venv 25 | source .venv/bin/activate 26 | pip install -r requirements-dev.txt 27 | ucc-gen build 28 | ucc-gen package --path output/Splunk_TA_Example 29 | ``` 30 | 31 | ## Notable PRs 32 | 33 | * Custom REST handlers - https://github.com/splunk/splunk-example-ta/pull/4 34 | * Add KVStore checkpoint for modular input - https://github.com/splunk/splunk-example-ta/pull/5 35 | * Delete KVStore checkpoint when input is deleted - https://github.com/splunk/splunk-example-ta/pull/6 36 | -------------------------------------------------------------------------------- /additional_packaging.py: -------------------------------------------------------------------------------- 1 | import os 2 | from os import path 3 | 4 | 5 | def additional_packaging(addon_name: str) -> None: 6 | # It fixes https://github.com/splunk/splunk-example-ta/actions/runs/11767701819/job/32776693866?pr=2. 7 | file_to_remove = f"output/{addon_name}/lib/__pycache__/socks.cpython-37.pyc" 8 | if path.exists(file_to_remove): 9 | os.remove(file_to_remove) 10 | 11 | build_ui_script = os.path.join( 12 | os.path.dirname(os.path.realpath(__file__)), "scripts", "build_ui.sh" 13 | ) 14 | if path.exists(build_ui_script): 15 | os.system(f"chmod +x {build_ui_script}") 16 | return_code = os.system(build_ui_script) 17 | if return_code != 0: 18 | os._exit(os.WEXITSTATUS(return_code)) 19 | -------------------------------------------------------------------------------- /docker-compose.yml: -------------------------------------------------------------------------------- 1 | name: splunk-example-ta 2 | services: 3 | server: 4 | container_name: server-example-ta 5 | build: 6 | context: server 7 | dockerfile: Dockerfile 8 | 9 | splunk: 10 | image: splunk/splunk:latest 11 | container_name: splunk-example-ta 12 | ports: 13 | - "8000:8000" 14 | - "8089:8089" 15 | environment: 16 | - SPLUNK_PASSWORD=${SPLUNK_PASSWORD:-Chang3d!} 17 | - SPLUNK_HEC_TOKEN=${SPLUNK_HEC_TOKEN:-4a8a737d-5452-426c-a6f7-106dca4e813f} 18 | - SPLUNK_START_ARGS=${SPLUNK_START_ARGS:---accept-license} 19 | volumes: 20 | - ./output/Splunk_TA_Example:/opt/splunk/etc/apps/Splunk_TA_Example 21 | -------------------------------------------------------------------------------- /globalConfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "pages": { 3 | "configuration": { 4 | "tabs": [ 5 | { 6 | "name": "account", 7 | "restHandlerModule": "example_api_key_verification", 8 | "restHandlerClass": "APIKeyValidator", 9 | "table": { 10 | "actions": [ 11 | "edit", 12 | "delete", 13 | "clone" 14 | ], 15 | "header": [ 16 | { 17 | "label": "Name", 18 | "field": "name" 19 | } 20 | ] 21 | }, 22 | "entity": [ 23 | { 24 | "type": "text", 25 | "label": "Name", 26 | "validators": [ 27 | { 28 | "type": "regex", 29 | "errorMsg": "Account Name must begin with a letter and consist exclusively of alphanumeric characters and underscores.", 30 | "pattern": "^[a-zA-Z]\\w*$" 31 | }, 32 | { 33 | "type": "string", 34 | "errorMsg": "Length of input name should be between 1 and 100", 35 | "minLength": 1, 36 | "maxLength": 100 37 | } 38 | ], 39 | "field": "name", 40 | "help": "A unique name for the account.", 41 | "required": true 42 | }, 43 | { 44 | "type": "text", 45 | "label": "API key", 46 | "field": "api_key", 47 | "help": "API key that is validated by the server (it is 'super-secret-api-token', but don't tell anyone).", 48 | "required": true, 49 | "encrypted": true 50 | } 51 | ], 52 | "title": "Accounts" 53 | }, 54 | { 55 | "type": "proxyTab", 56 | "proxy_type": true, 57 | "username": true, 58 | "password": true, 59 | "dns_resolution": true 60 | }, 61 | { 62 | "type": "loggingTab" 63 | }, 64 | { 65 | "name": "advanced_inputs", 66 | "title": "Advanced Inputs", 67 | "customTab": { 68 | "src": "AdvancedInputsTab", 69 | "type": "external" 70 | }, 71 | "entity": [] 72 | } 73 | ], 74 | "title": "Configuration", 75 | "description": "Set up your add-on" 76 | }, 77 | "inputs": { 78 | "services": [ 79 | { 80 | "name": "example", 81 | "restHandlerModule": "example_delete_checkpoint_rh", 82 | "restHandlerClass": "DeleteCheckpointRestHandler", 83 | "entity": [ 84 | { 85 | "type": "text", 86 | "label": "Name", 87 | "validators": [ 88 | { 89 | "type": "regex", 90 | "errorMsg": "Input Name must begin with a letter and consist exclusively of alphanumeric characters and underscores.", 91 | "pattern": "^[a-zA-Z]\\w*$" 92 | }, 93 | { 94 | "type": "string", 95 | "errorMsg": "Length of input name should be between 1 and 100", 96 | "minLength": 1, 97 | "maxLength": 100 98 | } 99 | ], 100 | "field": "name", 101 | "help": "A unique name for the data input.", 102 | "required": true 103 | }, 104 | { 105 | "type": "interval", 106 | "label": "Interval", 107 | "defaultValue": "300", 108 | "field": "interval", 109 | "options": { 110 | "range": [ 111 | 10, 112 | 301 113 | ] 114 | }, 115 | "help": "Time interval of the data input, in seconds.", 116 | "required": true 117 | }, 118 | { 119 | "type": "index", 120 | "field": "index", 121 | "label": "Index" 122 | }, 123 | { 124 | "type": "singleSelect", 125 | "label": "Account to use", 126 | "options": { 127 | "referenceName": "account" 128 | }, 129 | "help": "Account to use for this input.", 130 | "field": "account", 131 | "required": true 132 | }, 133 | { 134 | "type": "text", 135 | "label": "Fetch from Page", 136 | "defaultValue": "0", 137 | "field": "fetch_from", 138 | "validators": [ 139 | { 140 | "type": "number", 141 | "range": [ 142 | 0, 143 | 10000 144 | ], 145 | "isInteger": true 146 | } 147 | ], 148 | "help": "Page to fetch the data from. Default: 0.", 149 | "required": false 150 | }, 151 | { 152 | "label": "Start From", 153 | "field": "start_from", 154 | "type": "custom", 155 | "options": { 156 | "src": "DateInput", 157 | "type": "external" 158 | } 159 | } 160 | ], 161 | "inputHelperModule": "example_helper", 162 | "title": "example" 163 | } 164 | ], 165 | "title": "Inputs", 166 | "description": "Manage your data inputs", 167 | "table": { 168 | "actions": [ 169 | "edit", 170 | "delete", 171 | "clone" 172 | ], 173 | "header": [ 174 | { 175 | "label": "Name", 176 | "field": "name" 177 | }, 178 | { 179 | "label": "Interval", 180 | "field": "interval" 181 | }, 182 | { 183 | "label": "Index", 184 | "field": "index" 185 | }, 186 | { 187 | "label": "Status", 188 | "field": "disabled" 189 | } 190 | ], 191 | "moreInfo": [ 192 | { 193 | "label": "Name", 194 | "field": "name" 195 | }, 196 | { 197 | "label": "Interval", 198 | "field": "interval" 199 | }, 200 | { 201 | "label": "Index", 202 | "field": "index" 203 | }, 204 | { 205 | "label": "Status", 206 | "field": "disabled", 207 | "mapping": { 208 | "true": "Inactive", 209 | "false": "Active" 210 | } 211 | } 212 | ] 213 | } 214 | }, 215 | "dashboard": { 216 | "panels": [ 217 | { 218 | "name": "default" 219 | } 220 | ] 221 | } 222 | }, 223 | "meta": { 224 | "name": "Splunk_TA_Example", 225 | "restRoot": "Splunk_TA_Example", 226 | "version": "0.0.1+cbf8f61", 227 | "displayName": "Splunk Add-on for Example", 228 | "schemaVersion": "0.0.9", 229 | "supportedThemes": [ 230 | "light", 231 | "dark" 232 | ] 233 | } 234 | } 235 | -------------------------------------------------------------------------------- /package/LICENSE.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/splunk/splunk-example-ta/1378a9a04905e8edff6bfd0a6dc7354791053c16/package/LICENSE.txt -------------------------------------------------------------------------------- /package/README.txt: -------------------------------------------------------------------------------- 1 | # splunk-example-ta 2 | 3 | # Binary File Declaration 4 | 5 | lib/charset_normalizer/md__mypyc.cpython-37m-x86_64-linux-gnu.so -- This binary file contains the compiled code related to the Charset Normalizer package that has been generated by the MyPyC tool for Python 6 | 7 | lib/charset_normalizer/md.cpython-37m-x86_64-linux-gnu.so -- This binary file contains the compiled code of the Charset Normalizer package, which includes performance optimizations and low-level functionality written in C or C++ 8 | -------------------------------------------------------------------------------- /package/app.manifest: -------------------------------------------------------------------------------- 1 | { 2 | "schemaVersion": "2.0.0", 3 | "info": { 4 | "title": "Splunk Add-on for Example", 5 | "id": { 6 | "group": null, 7 | "name": "Splunk_TA_Example", 8 | "version": "0.0.1" 9 | }, 10 | "author": [ 11 | { 12 | "name": "Splunk Inc.", 13 | "email": null, 14 | "company": null 15 | } 16 | ], 17 | "releaseDate": null, 18 | "description": "Splunk Add-on for Example", 19 | "classification": { 20 | "intendedAudience": "IT Professionals", 21 | "categories": [ 22 | "Security, Fraud & Compliance" 23 | ], 24 | "developmentStatus": "Production/Stable" 25 | }, 26 | "commonInformationModels": null, 27 | "license": { 28 | "name": null, 29 | "text": "LICENSE.txt", 30 | "uri": null 31 | }, 32 | "privacyPolicy": { 33 | "name": null, 34 | "text": null, 35 | "uri": null 36 | }, 37 | "releaseNotes": { 38 | "name": "README", 39 | "text": "README.txt", 40 | "uri": "" 41 | } 42 | }, 43 | "dependencies": null, 44 | "tasks": null, 45 | "inputGroups": null, 46 | "incompatibleApps": null, 47 | "platformRequirements": null, 48 | "supportedDeployments": [ 49 | "_standalone", 50 | "_distributed", 51 | "_search_head_clustering" 52 | ], 53 | "targetWorkloads": [ 54 | "_search_heads", 55 | "_indexers" 56 | ] 57 | } -------------------------------------------------------------------------------- /package/bin/example_api_key_verification.py: -------------------------------------------------------------------------------- 1 | import import_declare_test 2 | 3 | from splunktaucclib.rest_handler.admin_external import AdminExternalHandler 4 | from splunktaucclib.rest_handler.error import RestError 5 | 6 | 7 | def _validate_api_key(api_key: str): 8 | # Some code to validate the API key. 9 | # Should return nothing if the configuration is valid. 10 | # Should raise an exception splunktaucclib.rest_handler.error.RestError if the configuration is not valid. 11 | if api_key != "super-secret-api-token": 12 | raise RestError(400, "API Key provided is not correct!") 13 | 14 | 15 | class APIKeyValidator(AdminExternalHandler): 16 | def __init__(self, *args, **kwargs): 17 | AdminExternalHandler.__init__(self, *args, **kwargs) 18 | 19 | def handleList(self, confInfo): 20 | AdminExternalHandler.handleList(self, confInfo) 21 | 22 | def handleEdit(self, confInfo): 23 | _validate_api_key( 24 | self.payload.get("api_key"), 25 | ) 26 | AdminExternalHandler.handleEdit(self, confInfo) 27 | 28 | def handleCreate(self, confInfo): 29 | _validate_api_key( 30 | self.payload.get("api_key"), 31 | ) 32 | AdminExternalHandler.handleCreate(self, confInfo) 33 | 34 | def handleRemove(self, confInfo): 35 | AdminExternalHandler.handleRemove(self, confInfo) 36 | -------------------------------------------------------------------------------- /package/bin/example_delete_checkpoint_rh.py: -------------------------------------------------------------------------------- 1 | import traceback 2 | 3 | import import_declare_test 4 | 5 | from solnlib import log 6 | from solnlib.modular_input import checkpointer 7 | from splunktaucclib.rest_handler.admin_external import AdminExternalHandler 8 | 9 | import example_utils 10 | 11 | 12 | class DeleteCheckpointRestHandler(AdminExternalHandler): 13 | def __init__(self, *args, **kwargs): 14 | AdminExternalHandler.__init__(self, *args, **kwargs) 15 | 16 | def handleList(self, confInfo): 17 | AdminExternalHandler.handleList(self, confInfo) 18 | 19 | def handleEdit(self, confInfo): 20 | AdminExternalHandler.handleEdit(self, confInfo) 21 | 22 | def handleCreate(self, confInfo): 23 | AdminExternalHandler.handleCreate(self, confInfo) 24 | 25 | def handleRemove(self, confInfo): 26 | log_filename = "example_delete_checkpoint" 27 | logger = log.Logs().get_logger(log_filename) 28 | session_key = self.getSessionKey() 29 | input_name = str(self.callerArgs.id) 30 | checkpointer_key_name = example_utils.get_example_collection_key_name( 31 | input_name 32 | ) 33 | logger.info(f"Deleting the checkpoint for input '{input_name}'") 34 | try: 35 | kvstore_checkpointer = checkpointer.KVStoreCheckpointer( 36 | "example_checkpointer", 37 | session_key, 38 | example_utils.ADDON_NAME, 39 | ) 40 | kvstore_checkpointer.delete(checkpointer_key_name) 41 | except Exception as e: 42 | log.log_exception( 43 | logger, 44 | e, 45 | "Checkpoint Error", 46 | msg_before=f"Error while deleting checkpoint for {input_name} input. {traceback.format_exc()}", 47 | ) 48 | AdminExternalHandler.handleRemove(self, confInfo) 49 | -------------------------------------------------------------------------------- /package/bin/example_helper.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | from datetime import timezone 3 | import json 4 | import logging 5 | from typing import Optional 6 | 7 | import import_declare_test 8 | 9 | import example_utils 10 | 11 | import requests 12 | from solnlib import conf_manager, log 13 | from solnlib.modular_input import checkpointer 14 | from splunklib import modularinput as smi 15 | 16 | PAGE_SIZE = 100 17 | 18 | 19 | def logger_for_input(input_name: str) -> logging.Logger: 20 | return log.Logs().get_logger(f"{example_utils.ADDON_NAME.lower()}_{input_name}") 21 | 22 | 23 | def get_account_api_key(session_key: str, account_name: str): 24 | cfm = conf_manager.ConfManager( 25 | session_key, 26 | example_utils.ADDON_NAME, 27 | realm=f"__REST_CREDENTIAL__#{example_utils.ADDON_NAME}#configs/conf-splunk_ta_example_account", 28 | ) 29 | account_conf_file = cfm.get_conf("splunk_ta_example_account") 30 | return account_conf_file.get(account_name).get("api_key") 31 | 32 | 33 | def get_data_from_api( 34 | logger: logging.Logger, api_key: str, page_number: Optional[int] = 0 35 | ): 36 | logger.info("Getting data from an external API == ", page_number) 37 | 38 | def _call_api(page_number: int): 39 | parameters = {"page": page_number, "per_page": PAGE_SIZE} 40 | response = requests.get( 41 | "http://server-example-ta:5000/events", 42 | headers={ 43 | "API-Key": api_key, 44 | }, 45 | timeout=20, 46 | params=parameters, 47 | ) 48 | response.raise_for_status() 49 | return response.json() 50 | 51 | for _ in range(3): 52 | try: 53 | return _call_api(page_number) 54 | except requests.exceptions.HTTPError: 55 | logger.warning("Failed to get data from the API, retrying...") 56 | raise Exception("Failed to get data from the API") 57 | 58 | 59 | def validate_input(definition: smi.ValidationDefinition): 60 | return 61 | 62 | 63 | def stream_events(inputs: smi.InputDefinition, event_writer: smi.EventWriter): 64 | # inputs.inputs is a Python dictionary object like: 65 | # { 66 | # "example://": { 67 | # "account": "", 68 | # "disabled": "0", 69 | # "host": "$decideOnStartup", 70 | # "index": "", 71 | # "interval": "", 72 | # "python.version": "python3", 73 | # }, 74 | # } 75 | for input_name, input_item in inputs.inputs.items(): 76 | normalized_input_name = input_name.split("/")[-1] 77 | logger = logger_for_input(normalized_input_name) 78 | try: 79 | session_key = inputs.metadata["session_key"] 80 | kvstore_checkpointer = checkpointer.KVStoreCheckpointer( 81 | "example_checkpointer", 82 | session_key, 83 | example_utils.ADDON_NAME, 84 | ) 85 | log_level = conf_manager.get_log_level( 86 | logger=logger, 87 | session_key=session_key, 88 | app_name=example_utils.ADDON_NAME, 89 | conf_name="splunk_ta_example_settings", 90 | ) 91 | logger.setLevel(log_level) 92 | log.modular_input_start(logger, normalized_input_name) 93 | api_key = get_account_api_key(session_key, input_item.get("account")) 94 | checkpointer_key_name = example_utils.get_example_collection_key_name( 95 | input_name 96 | ) 97 | # if we don't have any checkpoint, we default it to 0 98 | current_checkpoint = ( 99 | kvstore_checkpointer.get(checkpointer_key_name) 100 | or input_item.get("fetch_from") 101 | or 0 102 | ) 103 | data = get_data_from_api(logger, api_key, current_checkpoint) 104 | sourcetype = "example:events" 105 | for line in data["events"]: 106 | event_writer.write_event( 107 | smi.Event( 108 | data=json.dumps(line, ensure_ascii=False, default=str), 109 | index=input_item.get("index"), 110 | sourcetype=sourcetype, 111 | ) 112 | ) 113 | new_checkpoint = int(current_checkpoint) + 1 114 | kvstore_checkpointer.update(checkpointer_key_name, new_checkpoint) 115 | log.events_ingested( 116 | logger, 117 | input_name, 118 | sourcetype, 119 | len(data), 120 | input_item.get("index"), 121 | account=input_item.get("account"), 122 | ) 123 | log.modular_input_end(logger, normalized_input_name) 124 | except Exception as e: 125 | log.log_exception( 126 | logger, 127 | e, 128 | "IngestionError", 129 | msg_before="Exception raised while ingesting data for demo_input: ", 130 | ) 131 | -------------------------------------------------------------------------------- /package/bin/example_utils.py: -------------------------------------------------------------------------------- 1 | ADDON_NAME = "Splunk_TA_Example" 2 | 3 | 4 | def get_example_collection_key_name(input_name: str) -> str: 5 | # `input_name` is a string like "example://". 6 | return input_name.split("/")[-1] 7 | -------------------------------------------------------------------------------- /package/default/collections.conf: -------------------------------------------------------------------------------- 1 | [example_checkpointer] 2 | field.state = string 3 | -------------------------------------------------------------------------------- /package/default/eventtypes.conf: -------------------------------------------------------------------------------- 1 | [example_events] 2 | search = (sourcetype=example:events) 3 | #tags alert -------------------------------------------------------------------------------- /package/default/props.conf: -------------------------------------------------------------------------------- 1 | [example:events] 2 | DATETIME_CONFIG = CURRENT 3 | LINE_BREAKER = ([\r\n]+) 4 | EVAL-app = "sample_app" 5 | EVAL-body = "This event is for PSA test" 6 | EVAL-description = "sample_description" 7 | EVAL-dest = "sample_dest" 8 | EVAL-dest_type = "sample_dest_type" 9 | EVAL-id = "sample_id" 10 | EVAL-mitre_technique_id = "sample_mitre_technique_id" 11 | EVAL-severity = "low" 12 | EVAL-signature = "sample_signature" 13 | EVAL-signature_id = "sample_signature_id" 14 | EVAL-src = "sample_src" 15 | EVAL-src_type = "sample_src_type" 16 | EVAL-type = "alert" 17 | EVAL-user = "sample_user" 18 | EVAL-user_name = "sample_user_name" 19 | EVAL-vendor_account = "sample_vendor_account" 20 | EVAL-vendor_region = "sample_vendor_region" 21 | -------------------------------------------------------------------------------- /package/default/tags.conf: -------------------------------------------------------------------------------- 1 | [eventtype=example_events] 2 | alert = enabled -------------------------------------------------------------------------------- /package/default/transforms.conf: -------------------------------------------------------------------------------- 1 | [example_checkpointer_lookup] 2 | collection = example_checkpointer 3 | external_type = kvstore 4 | fields_list = _key, state 5 | -------------------------------------------------------------------------------- /package/lib/requirements.txt: -------------------------------------------------------------------------------- 1 | splunktaucclib 2 | splunk-sdk 3 | solnlib 4 | -------------------------------------------------------------------------------- /package/static/appIcon.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/splunk/splunk-example-ta/1378a9a04905e8edff6bfd0a6dc7354791053c16/package/static/appIcon.png -------------------------------------------------------------------------------- /package/static/appIconAlt.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/splunk/splunk-example-ta/1378a9a04905e8edff6bfd0a6dc7354791053c16/package/static/appIconAlt.png -------------------------------------------------------------------------------- /package/static/appIconAlt_2x.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/splunk/splunk-example-ta/1378a9a04905e8edff6bfd0a6dc7354791053c16/package/static/appIconAlt_2x.png -------------------------------------------------------------------------------- /package/static/appIcon_2x.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/splunk/splunk-example-ta/1378a9a04905e8edff6bfd0a6dc7354791053c16/package/static/appIcon_2x.png -------------------------------------------------------------------------------- /requirements-dev.txt: -------------------------------------------------------------------------------- 1 | pytest-splunk-addon==5.4.1 2 | splunk-add-on-ucc-framework==5.58.0 3 | splunk-add-on-ucc-modinput-test==1.0.0 4 | -------------------------------------------------------------------------------- /scripts/build_ui.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Determine the directory of the script 4 | SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" 5 | 6 | # Check if npm is installed 7 | if ! command -v node &> /dev/null 8 | then 9 | echo "Node.JS is not installed. Please install Node.JS to continue." 10 | exit 1 11 | fi 12 | 13 | if [ "$CI" = "true" ]; then 14 | npm --prefix "$SCRIPT_DIR/../ui" ci 15 | else 16 | npm --prefix "$SCRIPT_DIR/../ui" install 17 | fi 18 | 19 | npm --prefix "$SCRIPT_DIR/../ui" run build -------------------------------------------------------------------------------- /scripts/local_testing_setup.sh: -------------------------------------------------------------------------------- 1 | curl -k -X POST -u admin:Chang3d! --header "Content-Type: application/json" https://localhost:8089/servicesNS/-/Splunk_TA_Example/Splunk_TA_Example_account -d name=api_key -d api_key=super-secret-api-token 2 | 3 | for i in $(seq 1 4); 4 | do 5 | curl -k -X POST -u admin:Chang3d! --header "Content-Type: application/json" https://localhost:8089/servicesNS/-/Splunk_TA_Example/Splunk_TA_Example_example -d name=test_input_$i -d interval=20 -d index=main -d account=api_key 6 | done 7 | -------------------------------------------------------------------------------- /scripts/run_locally.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -e 3 | # Determine the directory of the script 4 | SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" 5 | 6 | cd "$SCRIPT_DIR"/.. 7 | echo "🛑 Stopping any running Docker containers" 8 | docker compose down 9 | echo "🐍 Setting up a Python environment and deps" 10 | python3 -m venv .venv 11 | source .venv/bin/activate 12 | pip install -r requirements-dev.txt 13 | 14 | echo "🏗 Building the project with ucc-gen" 15 | ucc-gen build 16 | 17 | echo "📦 Installing npm dependencies and building UI assets" 18 | chmod +x "$SCRIPT_DIR"/build_ui.sh 19 | "$SCRIPT_DIR"/build_ui.sh 20 | 21 | # running on ARM macOS 22 | export DOCKER_DEFAULT_PLATFORM=linux/amd64 23 | echo "🐳 Starting Docker containers" 24 | docker compose up -d --build --wait 25 | 26 | echo "🚀 Done! It is now running at http://localhost:8000" -------------------------------------------------------------------------------- /scripts/run_splunk.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Copyright 2024 Splunk Inc. 4 | # 5 | # Licensed under the Apache License, Version 2.0 (the "License"); 6 | # you may not use this file except in compliance with the License. 7 | # You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, software 12 | # distributed under the License is distributed on an "AS IS" BASIS, 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | # See the License for the specific language governing permissions and 15 | # limitations under the License. 16 | 17 | SCRIPT_DIR=$(cd $(dirname ${BASH_SOURCE[0]}) && pwd) 18 | REPO_ROOT_DIR=$(cd "$SCRIPT_DIR/.." && pwd) 19 | 20 | 21 | # running on ARM Mac 22 | if [[ $(uname -m) == 'arm64' ]]; then 23 | export DOCKER_DEFAULT_PLATFORM=linux/amd64 24 | fi 25 | 26 | docker run \ 27 | -v "$REPO_ROOT_DIR/psa-output/Splunk_TA_Example:/opt/splunk/etc/apps/Splunk_TA_Example" \ 28 | -p 8000:8000 \ 29 | -p 8088:8088 \ 30 | -p 8089:8089 \ 31 | -p 9997:9997 \ 32 | -e "SPLUNK_START_ARGS=--accept-license" \ 33 | -e "SPLUNK_PASSWORD=Chang3d!" \ 34 | -e "SPLUNK_HEC_TOKEN=4a8a737d-5452-426c-a6f7-106dca4e813f" \ 35 | -e "SPLUNK_DISABLE_POPUPS=true" \ 36 | -d \ 37 | --pull=always \ 38 | --name splunk splunk/splunk:${1:-"latest"} 39 | -------------------------------------------------------------------------------- /server/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM python:3.12-alpine 2 | 3 | WORKDIR /app 4 | 5 | COPY requirements.txt . 6 | 7 | RUN pip install -r requirements.txt 8 | 9 | COPY app.py . 10 | 11 | CMD [ "python", "./app.py" ] 12 | -------------------------------------------------------------------------------- /server/app.py: -------------------------------------------------------------------------------- 1 | import random 2 | 3 | from flask import Flask, request, jsonify 4 | 5 | app = Flask(__name__) 6 | 7 | API_KEY = "super-secret-api-token" 8 | 9 | # Mock data for demonstration replace it with actual data retrieval logic 10 | all_events = [{"id": i, "event": f"Event {i}"} for i in range(1, 101)] 11 | 12 | 13 | # Mock data for demonstration replace it with actual data retrieval logic 14 | def get_mocked_events(page_num: int, per_page: int): 15 | return [ 16 | {"id": i, "event": f"Event {i}"} 17 | for i in range(page_num * per_page, (page_num + 1) * per_page) 18 | ] 19 | 20 | 21 | def _should_fail_with_server_error() -> bool: 22 | if random.random() < 0.1: 23 | return True 24 | return False 25 | 26 | 27 | @app.route("/events") 28 | def events(): 29 | # API key validation 30 | api_key = request.headers.get("API-Key") 31 | 32 | if api_key != API_KEY: 33 | return "Unauthorized", 401 34 | 35 | if _should_fail_with_server_error(): 36 | return "Internal Server Error", 500 37 | 38 | # Get pagination parameters from the query string 39 | page = request.args.get("page", 0, type=int) 40 | per_page = request.args.get("per_page", 10, type=int) 41 | 42 | # Calculate start and end indices for the items on the current page 43 | paginated_events = get_mocked_events(page, per_page) 44 | 45 | total_events = len(paginated_events) 46 | total_pages = (total_events + per_page - 1) // per_page 47 | 48 | return ( 49 | jsonify( 50 | { 51 | "events": paginated_events, 52 | "page": page, 53 | "per_page": per_page, 54 | "total_events": total_events, 55 | "total_pages": total_pages, 56 | } 57 | ), 58 | 200, 59 | ) 60 | 61 | 62 | if __name__ == "__main__": 63 | app.run(host="0.0.0.0", port=5000) 64 | -------------------------------------------------------------------------------- /server/requirements.txt: -------------------------------------------------------------------------------- 1 | flask==3.0.0 -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/splunk/splunk-example-ta/1378a9a04905e8edff6bfd0a6dc7354791053c16/tests/__init__.py -------------------------------------------------------------------------------- /tests/knowledge/pytest-splunk-addon-data.conf: -------------------------------------------------------------------------------- 1 | [example_sourcetype.xml] 2 | requirement_test_sample = 1 3 | source = Example_TA 4 | sourcetype = example:events 5 | sourcetype_to_search = example:events 6 | input_type = modinput 7 | host_type = plugin 8 | timestamp_type = plugin 9 | expected_event_count = 1 10 | index = main 11 | -------------------------------------------------------------------------------- /tests/knowledge/samples/example_sourcetype.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | Splunk 4 | Splunk Example TA 5 | 6 | 7 | 0.0.1 8 | 9 | 10 | 11 | lab, These are sample events used to generate PSA tests 12 | 13 | 14 | 15 | 16 | Alerts 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | -------------------------------------------------------------------------------- /tests/knowledge/test_addon.py: -------------------------------------------------------------------------------- 1 | from pytest_splunk_addon.standard_lib.addon_basic import Basic 2 | 3 | 4 | class Test_App(Basic): 5 | def empty_method(self): 6 | pass 7 | -------------------------------------------------------------------------------- /tests/ucc_modinput_functional/README.md: -------------------------------------------------------------------------------- 1 | 1. Make sure there is no Splunk running on your workstation that exposes standard ports 2 | 3 | 2. Run following script: 4 | ```console 5 | ./scripts/run_locally.sh 6 | ``` 7 | Splunk and vendor product will be setup as docker containers: splunk-example-ta and server-example-ta respectively. Splunk container exposes standard ports and have `admin` user defined with `Chang3d!` as password 8 | 9 | 3. Make sure ucc-test-modinput is installed 10 | ```console 11 | ucc-test-modinput --version 12 | ``` 13 | 14 | 4. Export environment variables for: 15 | 16 | 1. Splunk 17 | 18 | ```console 19 | export MODINPUT_TEST_SPLUNK_HOST=localhost 20 | export MODINPUT_TEST_SPLUNK_PORT=8089 21 | export MODINPUT_TEST_SPLUNK_USERNAME=admin 22 | export MODINPUT_TEST_SPLUNK_PASSWORD_BASE64=$(ucc-test-modinput base64encode -s 'Chang3d!') 23 | ``` 24 | 25 | 2. vendor product 26 | 27 | ```console 28 | export MODINPUT_TEST_EXAMPLE_API_KEY_BASE64=$(ucc-test-modinput base64encode -s 'super-secret-api-token') 29 | ``` 30 | 31 | 5. Run ucc-test-modinput to generate add-on SDK 32 | ```console 33 | ucc-test-modinput gen 34 | ``` 35 | 36 | 6. Run tests 37 | ```console 38 | pytest tests/ucc_modinput_functional/ 39 | ``` -------------------------------------------------------------------------------- /tests/ucc_modinput_functional/__init__.py: -------------------------------------------------------------------------------- 1 | import tests.ucc_modinput_functional.vendor.client # noqa 2 | import tests.ucc_modinput_functional.splunk.client # noqa 3 | -------------------------------------------------------------------------------- /tests/ucc_modinput_functional/defaults.py: -------------------------------------------------------------------------------- 1 | PROBE_PROXY_CHECK_INTERVAL = 2 # in seconds 2 | PROBE_PROXY_CHECK_TIMEOUT = 30 # in seconds 3 | PROBE_LOGLEVEL_CHECK_INTERVAL = 2 # in seconds 4 | PROBE_LOGLEVEL_CHECK_TIMEOUT = 30 # in seconds 5 | TA_LOG_LEVEL_FOR_TESTS = "DEBUG" 6 | ENCRYPTED_VALUE = "******" 7 | INPUT_INTERVAL = 60 8 | -------------------------------------------------------------------------------- /tests/ucc_modinput_functional/splunk/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/splunk/splunk-example-ta/1378a9a04905e8edff6bfd0a6dc7354791053c16/tests/ucc_modinput_functional/splunk/__init__.py -------------------------------------------------------------------------------- /tests/ucc_modinput_functional/splunk/client/__init__.py: -------------------------------------------------------------------------------- 1 | from .client import SplunkClient, SplunkApiError # noqa 2 | -------------------------------------------------------------------------------- /tests/ucc_modinput_functional/splunk/client/_managed_client.py: -------------------------------------------------------------------------------- 1 | # fmt: off 2 | 3 | """ 4 | Note: 5 | This file is under the control of the ucc-test-modinput CLI tool and must 6 | not be modified manually by developers. If an extension is needed or a 7 | method needs to be redefined, it should be done in a separate file. 8 | """ 9 | 10 | import json 11 | from typing import Any, Dict, List, Optional 12 | from splunk_add_on_ucc_modinput_test.functional.splunk.client import ( 13 | SplunkClientBase, 14 | ) 15 | from swagger_client.rest import ApiException 16 | import logging 17 | 18 | logger = logging.getLogger("ucc-modinput-test") 19 | 20 | 21 | class SplunkApiError(Exception): 22 | def __init__(self, error: ApiException): 23 | self.api_exception = error 24 | 25 | @property 26 | def status(self) -> int: 27 | return int(self.api_exception.status) 28 | 29 | @property 30 | def reason(self) -> str: 31 | return self.api_exception.reason 32 | 33 | @property 34 | def body(self) -> bytes: 35 | return self.api_exception.body 36 | 37 | @property 38 | def json(self) -> Optional[Dict[str, Any]]: 39 | try: 40 | return json.loads(self.api_exception.body) 41 | except json.JSONDecodeError: 42 | return None 43 | 44 | @property 45 | def error_message(self) -> Optional[str]: 46 | json_body = self.json 47 | if json_body: 48 | return json_body.get("messages", [{}])[0].get("text") 49 | return None 50 | 51 | 52 | class ManagedSplunkClient(SplunkClientBase): 53 | _OUTPUT_MODE = "json" 54 | 55 | def get_account_list(self) -> List[Any]: 56 | try: 57 | kwargs = dict(output_mode=self._OUTPUT_MODE) 58 | kwargs = {k: v for k, v in kwargs.items() if v is not None} 59 | response = ( 60 | self.ta_api.splunk_ta_example_account_get( 61 | **kwargs 62 | ) 63 | ) 64 | logger.debug( 65 | f"TA API splunk_ta_example_account_get response: {response}" 66 | ) 67 | 68 | return response.to_dict().get("entry", []) 69 | 70 | except ApiException as e: 71 | logger.error( 72 | "Exception when calling TA API splunk_ta_example_account_get:" 73 | f" {e}" 74 | ) 75 | raise SplunkApiError(e) from e 76 | 77 | def delete_account(self, name: str) -> None: 78 | try: 79 | kwargs = dict(name=name, output_mode=self._OUTPUT_MODE) 80 | kwargs = {k: v for k, v in kwargs.items() if v is not None} 81 | response = ( 82 | self.ta_api.splunk_ta_example_account_name_delete( 83 | **kwargs 84 | ) 85 | ) 86 | logger.debug( 87 | f"TA API splunk_ta_example_account_name_delete response: {response}" # noqa: E501 88 | ) 89 | 90 | return None 91 | 92 | except ApiException as e: 93 | logger.error( 94 | "Exception when calling TA API splunk_ta_example_account_name_delete:" # noqa: E501 95 | f" {e}" 96 | ) 97 | raise SplunkApiError(e) from e 98 | 99 | def get_account(self, name: str) -> Dict[str, Any]: 100 | try: 101 | kwargs = dict(name=name, output_mode=self._OUTPUT_MODE) 102 | kwargs = {k: v for k, v in kwargs.items() if v is not None} 103 | response = ( 104 | self.ta_api.splunk_ta_example_account_name_get( 105 | **kwargs 106 | ) 107 | ) 108 | logger.debug( 109 | f"TA API splunk_ta_example_account_name_get response: {response}" # noqa: E501 110 | ) 111 | 112 | return response.to_dict().get("entry", [{}])[0].get("content") 113 | 114 | except ApiException as e: 115 | logger.error( 116 | "Exception when calling TA API splunk_ta_example_account_name_get:" # noqa: E501 117 | f" {e}" 118 | ) 119 | raise SplunkApiError(e) from e 120 | 121 | def update_account(self, name: str, api_key: Optional[str] = None) -> Dict[str, Any]: # noqa: E501 122 | try: 123 | kwargs = dict(output_mode=self._OUTPUT_MODE, name=name, api_key=api_key) # noqa: E501 124 | kwargs = {k: v for k, v in kwargs.items() if v is not None} 125 | response = ( 126 | self.ta_api.splunk_ta_example_account_name_post( 127 | **kwargs 128 | ) 129 | ) 130 | logger.debug( 131 | f"TA API splunk_ta_example_account_name_post response: {response}" # noqa: E501 132 | ) 133 | 134 | return response.to_dict().get("entry", [{}])[0].get("content") 135 | 136 | except ApiException as e: 137 | logger.error( 138 | "Exception when calling TA API splunk_ta_example_account_name_post:" # noqa: E501 139 | f" {e}" 140 | ) 141 | raise SplunkApiError(e) from e 142 | 143 | def create_account(self, name: Optional[str] = None, api_key: Optional[str] = None) -> Dict[str, Any]: # noqa: E501 144 | try: 145 | kwargs = dict(output_mode=self._OUTPUT_MODE, name=name, api_key=api_key) # noqa: E501 146 | kwargs = {k: v for k, v in kwargs.items() if v is not None} 147 | response = ( 148 | self.ta_api.splunk_ta_example_account_post( 149 | **kwargs 150 | ) 151 | ) 152 | logger.debug( 153 | f"TA API splunk_ta_example_account_post response: {response}" 154 | ) 155 | 156 | return response.to_dict().get("entry", [{}])[0].get("content") 157 | 158 | except ApiException as e: 159 | logger.error( 160 | "Exception when calling TA API splunk_ta_example_account_post:" 161 | f" {e}" 162 | ) 163 | raise SplunkApiError(e) from e 164 | 165 | def get_example_list(self) -> List[Any]: 166 | try: 167 | kwargs = dict(output_mode=self._OUTPUT_MODE) 168 | kwargs = {k: v for k, v in kwargs.items() if v is not None} 169 | response = ( 170 | self.ta_api.splunk_ta_example_example_get( 171 | **kwargs 172 | ) 173 | ) 174 | logger.debug( 175 | f"TA API splunk_ta_example_example_get response: {response}" 176 | ) 177 | 178 | return response.to_dict().get("entry", []) 179 | 180 | except ApiException as e: 181 | logger.error( 182 | "Exception when calling TA API splunk_ta_example_example_get:" 183 | f" {e}" 184 | ) 185 | raise SplunkApiError(e) from e 186 | 187 | def delete_example(self, name: str) -> None: 188 | try: 189 | kwargs = dict(name=name, output_mode=self._OUTPUT_MODE) 190 | kwargs = {k: v for k, v in kwargs.items() if v is not None} 191 | response = ( 192 | self.ta_api.splunk_ta_example_example_name_delete( 193 | **kwargs 194 | ) 195 | ) 196 | logger.debug( 197 | f"TA API splunk_ta_example_example_name_delete response: {response}" # noqa: E501 198 | ) 199 | 200 | return None 201 | 202 | except ApiException as e: 203 | logger.error( 204 | "Exception when calling TA API splunk_ta_example_example_name_delete:" # noqa: E501 205 | f" {e}" 206 | ) 207 | raise SplunkApiError(e) from e 208 | 209 | def get_example(self, name: str) -> Dict[str, Any]: 210 | try: 211 | kwargs = dict(name=name, output_mode=self._OUTPUT_MODE) 212 | kwargs = {k: v for k, v in kwargs.items() if v is not None} 213 | response = ( 214 | self.ta_api.splunk_ta_example_example_name_get( 215 | **kwargs 216 | ) 217 | ) 218 | logger.debug( 219 | f"TA API splunk_ta_example_example_name_get response: {response}" # noqa: E501 220 | ) 221 | 222 | return response.to_dict().get("entry", [{}])[0].get("content") 223 | 224 | except ApiException as e: 225 | logger.error( 226 | "Exception when calling TA API splunk_ta_example_example_name_get:" # noqa: E501 227 | f" {e}" 228 | ) 229 | raise SplunkApiError(e) from e 230 | 231 | def update_example( 232 | self, 233 | name: str, 234 | interval: Optional[str] = None, 235 | index: Optional[str] = None, 236 | account: Optional[str] = None, 237 | fetch_from: Optional[str] = None, 238 | start_from: Optional[str] = None, 239 | disabled: Optional[str] = None, 240 | ) -> Dict[str, Any]: 241 | try: 242 | kwargs = dict( 243 | output_mode=self._OUTPUT_MODE, 244 | name=name, 245 | interval=interval, 246 | index=index, 247 | account=account, 248 | fetch_from=fetch_from, 249 | start_from=start_from, 250 | disabled=disabled, 251 | ) 252 | kwargs = {k: v for k, v in kwargs.items() if v is not None} 253 | response = ( 254 | self.ta_api.splunk_ta_example_example_name_post( 255 | **kwargs 256 | ) 257 | ) 258 | logger.debug( 259 | f"TA API splunk_ta_example_example_name_post response: {response}" # noqa: E501 260 | ) 261 | 262 | return response.to_dict().get("entry", [{}])[0].get("content") 263 | 264 | except ApiException as e: 265 | logger.error( 266 | "Exception when calling TA API splunk_ta_example_example_name_post:" # noqa: E501 267 | f" {e}" 268 | ) 269 | raise SplunkApiError(e) from e 270 | 271 | def create_example( 272 | self, 273 | name: Optional[str] = None, 274 | interval: Optional[str] = None, 275 | index: Optional[str] = None, 276 | account: Optional[str] = None, 277 | fetch_from: Optional[str] = None, 278 | start_from: Optional[str] = None, 279 | ) -> Dict[str, Any]: 280 | try: 281 | kwargs = dict( 282 | output_mode=self._OUTPUT_MODE, 283 | name=name, 284 | interval=interval, 285 | index=index, 286 | account=account, 287 | fetch_from=fetch_from, 288 | start_from=start_from, 289 | ) 290 | kwargs = {k: v for k, v in kwargs.items() if v is not None} 291 | response = ( 292 | self.ta_api.splunk_ta_example_example_post( 293 | **kwargs 294 | ) 295 | ) 296 | logger.debug( 297 | f"TA API splunk_ta_example_example_post response: {response}" 298 | ) 299 | 300 | return response.to_dict().get("entry", [{}])[0].get("content") 301 | 302 | except ApiException as e: 303 | logger.error( 304 | "Exception when calling TA API splunk_ta_example_example_post:" 305 | f" {e}" 306 | ) 307 | raise SplunkApiError(e) from e 308 | 309 | def get_settings_advanced_inputs(self) -> Dict[str, Any]: 310 | try: 311 | kwargs = dict(output_mode=self._OUTPUT_MODE) 312 | kwargs = {k: v for k, v in kwargs.items() if v is not None} 313 | response = ( 314 | self.ta_api.splunk_ta_example_settings_advanced_inputs_get( 315 | **kwargs 316 | ) 317 | ) 318 | logger.debug( 319 | f"TA API splunk_ta_example_settings_advanced_inputs_get response: {response}" # noqa: E501 320 | ) 321 | 322 | return response.to_dict().get("entry", [{}])[0].get("content") 323 | 324 | except ApiException as e: 325 | logger.error( 326 | "Exception when calling TA API splunk_ta_example_settings_advanced_inputs_get:" # noqa: E501 327 | f" {e}" 328 | ) 329 | raise SplunkApiError(e) from e 330 | 331 | def update_settings_advanced_inputs(self) -> Dict[str, Any]: 332 | try: 333 | kwargs = dict(output_mode=self._OUTPUT_MODE) 334 | kwargs = {k: v for k, v in kwargs.items() if v is not None} 335 | response = ( 336 | self.ta_api.splunk_ta_example_settings_advanced_inputs_post( 337 | **kwargs 338 | ) 339 | ) 340 | logger.debug( 341 | f"TA API splunk_ta_example_settings_advanced_inputs_post response: {response}" # noqa: E501 342 | ) 343 | 344 | return response.to_dict().get("entry", [{}])[0].get("content") 345 | 346 | except ApiException as e: 347 | logger.error( 348 | "Exception when calling TA API splunk_ta_example_settings_advanced_inputs_post:" # noqa: E501 349 | f" {e}" 350 | ) 351 | raise SplunkApiError(e) from e 352 | 353 | def get_settings_logging(self) -> Dict[str, Any]: 354 | try: 355 | kwargs = dict(output_mode=self._OUTPUT_MODE) 356 | kwargs = {k: v for k, v in kwargs.items() if v is not None} 357 | response = ( 358 | self.ta_api.splunk_ta_example_settings_logging_get( 359 | **kwargs 360 | ) 361 | ) 362 | logger.debug( 363 | f"TA API splunk_ta_example_settings_logging_get response: {response}" # noqa: E501 364 | ) 365 | 366 | return response.to_dict().get("entry", [{}])[0].get("content") 367 | 368 | except ApiException as e: 369 | logger.error( 370 | "Exception when calling TA API splunk_ta_example_settings_logging_get:" # noqa: E501 371 | f" {e}" 372 | ) 373 | raise SplunkApiError(e) from e 374 | 375 | def update_settings_logging(self, loglevel: Optional[str] = None) -> Dict[str, Any]: # noqa: E501 376 | try: 377 | kwargs = dict(output_mode=self._OUTPUT_MODE, loglevel=loglevel) 378 | kwargs = {k: v for k, v in kwargs.items() if v is not None} 379 | response = ( 380 | self.ta_api.splunk_ta_example_settings_logging_post( 381 | **kwargs 382 | ) 383 | ) 384 | logger.debug( 385 | f"TA API splunk_ta_example_settings_logging_post response: {response}" # noqa: E501 386 | ) 387 | 388 | return response.to_dict().get("entry", [{}])[0].get("content") 389 | 390 | except ApiException as e: 391 | logger.error( 392 | "Exception when calling TA API splunk_ta_example_settings_logging_post:" # noqa: E501 393 | f" {e}" 394 | ) 395 | raise SplunkApiError(e) from e 396 | 397 | def get_settings_proxy(self) -> Dict[str, Any]: 398 | try: 399 | kwargs = dict(output_mode=self._OUTPUT_MODE) 400 | kwargs = {k: v for k, v in kwargs.items() if v is not None} 401 | response = ( 402 | self.ta_api.splunk_ta_example_settings_proxy_get( 403 | **kwargs 404 | ) 405 | ) 406 | logger.debug( 407 | f"TA API splunk_ta_example_settings_proxy_get response: {response}" # noqa: E501 408 | ) 409 | 410 | return response.to_dict().get("entry", [{}])[0].get("content") 411 | 412 | except ApiException as e: 413 | logger.error( 414 | "Exception when calling TA API splunk_ta_example_settings_proxy_get:" # noqa: E501 415 | f" {e}" 416 | ) 417 | raise SplunkApiError(e) from e 418 | 419 | def update_settings_proxy( 420 | self, 421 | proxy_enabled: Optional[str] = None, 422 | proxy_type: Optional[str] = None, 423 | proxy_url: Optional[str] = None, 424 | proxy_port: Optional[str] = None, 425 | proxy_username: Optional[str] = None, 426 | proxy_password: Optional[str] = None, 427 | proxy_rdns: Optional[str] = None, 428 | ) -> Dict[str, Any]: 429 | try: 430 | kwargs = dict( 431 | output_mode=self._OUTPUT_MODE, 432 | proxy_enabled=proxy_enabled, 433 | proxy_type=proxy_type, 434 | proxy_url=proxy_url, 435 | proxy_port=proxy_port, 436 | proxy_username=proxy_username, 437 | proxy_password=proxy_password, 438 | proxy_rdns=proxy_rdns, 439 | ) 440 | kwargs = {k: v for k, v in kwargs.items() if v is not None} 441 | response = ( 442 | self.ta_api.splunk_ta_example_settings_proxy_post( 443 | **kwargs 444 | ) 445 | ) 446 | logger.debug( 447 | f"TA API splunk_ta_example_settings_proxy_post response: {response}" # noqa: E501 448 | ) 449 | 450 | return response.to_dict().get("entry", [{}])[0].get("content") 451 | 452 | except ApiException as e: 453 | logger.error( 454 | "Exception when calling TA API splunk_ta_example_settings_proxy_post:" # noqa: E501 455 | f" {e}" 456 | ) 457 | raise SplunkApiError(e) from e 458 | # fmt: on 459 | -------------------------------------------------------------------------------- /tests/ucc_modinput_functional/splunk/client/client.py: -------------------------------------------------------------------------------- 1 | from splunk_add_on_ucc_modinput_test.functional.decorators import ( 2 | register_splunk_class, 3 | ) 4 | from tests.ucc_modinput_functional.splunk.client.configuration import ( 5 | Configuration, 6 | ) 7 | from tests.ucc_modinput_functional.splunk.client._managed_client import ( 8 | ManagedSplunkClient, 9 | SplunkApiError, 10 | ) 11 | import swagger_client 12 | 13 | 14 | @register_splunk_class(swagger_client, Configuration) 15 | class SplunkClient(ManagedSplunkClient): 16 | # here you can add your custom Splunk client extension code 17 | pass 18 | -------------------------------------------------------------------------------- /tests/ucc_modinput_functional/splunk/client/configuration.py: -------------------------------------------------------------------------------- 1 | from splunk_add_on_ucc_modinput_test.common import utils # noqa 2 | from splunk_add_on_ucc_modinput_test.functional.splunk import ( 3 | SplunkConfigurationBase, 4 | ) 5 | 6 | 7 | class Configuration(SplunkConfigurationBase): 8 | def customize_configuration(self) -> None: 9 | # to be implemented 10 | # self.encoded_prop = utils.get_from_environment_variable( 11 | # "ENV_PROP_NAME1", string_function=utils.Base64.decode 12 | # ) 13 | # self.not_encoded_prop = utils.get_from_environment_variable( 14 | # "ENV_PROP_NAME2" 15 | # ) 16 | pass 17 | -------------------------------------------------------------------------------- /tests/ucc_modinput_functional/splunk/forges.py: -------------------------------------------------------------------------------- 1 | from typing import Dict, Generator, List 2 | from tests.ucc_modinput_functional import defaults 3 | from tests.ucc_modinput_functional.splunk.client import ( 4 | SplunkClient, 5 | SplunkApiError, 6 | ) 7 | from tests.ucc_modinput_functional.vendor.client import VendorClient 8 | from splunk_add_on_ucc_modinput_test.common import utils 9 | 10 | 11 | def try_to_set_loglevel( 12 | splunk_client: SplunkClient, loglevel: str 13 | ) -> Generator[Dict[str, object], None, None]: 14 | """ 15 | Forge method that tries to configure TA log level using an incorrect 16 | unsupported log level value. The goal is to collect the error message 17 | provided by the TA and pass it to the corresponding test method for 18 | validation. Forges pass values to tests, probes, and forges by storing 19 | key-value pairs in the test artifactory, achieved by yielding key-value 20 | pairs in the form of a dictionary. In addition to the error message, 21 | the forge yields the expected log level value used by a probe to ensure 22 | the desired log level value was indeed applied to the TA's configuration. 23 | 24 | Args: 25 | splunk_client (SplunkClient): The Splunk client instance. 26 | loglevel (str): The log level to set. 27 | 28 | Yields: 29 | Dict[str, object]: A dictionary containing the expected log level 30 | and any error message encountered. 31 | """ 32 | error = None 33 | old_loglevel = splunk_client.get_settings_logging().get("loglevel") 34 | assert ( 35 | old_loglevel != loglevel 36 | ), f"Invalid initial conditions: loglevel is already set to {loglevel}" 37 | 38 | try: 39 | splunk_client.update_settings_logging(loglevel) 40 | except ValueError as e: 41 | error = str(e) 42 | 43 | yield dict(expected_loglevel=loglevel, error=error) 44 | 45 | # teardown 46 | if error is None: 47 | splunk_client.update_settings_logging(old_loglevel) 48 | 49 | 50 | def set_loglevel( 51 | splunk_client: SplunkClient, loglevel: str 52 | ) -> Generator[Dict[str, object], None, None]: 53 | """ 54 | Forge method that configures TA log level and passes it to the 55 | artifactory to be used by a probe to ensure the desired loglevel 56 | value is applied to TA's configuration. 57 | 58 | This method does not catch and pass any error messages. Instead, 59 | API exceptions cause a forge crash handled by the framework and 60 | turned into a corresponding test fast fail. 61 | 62 | Args: 63 | splunk_client (SplunkClient): The Splunk client instance. 64 | loglevel (str): The desired log level to set. 65 | 66 | Yields: 67 | Dict[str, object]: A dictionary containing the expected log level. 68 | 69 | Teardown: 70 | Resets the log level to its original value after the test. 71 | """ 72 | old_loglevel = splunk_client.get_settings_logging().get("loglevel") 73 | 74 | splunk_client.update_settings_logging(loglevel) 75 | 76 | yield dict( 77 | expected_loglevel=loglevel, 78 | ) 79 | 80 | # teardown 81 | splunk_client.update_settings_logging(old_loglevel) 82 | 83 | 84 | def try_to_configure_proxy( 85 | splunk_client: SplunkClient, 86 | exclude: List[str] = [], 87 | overwrite: Dict[str, object] = {}, 88 | ) -> Generator[Dict[str, object], None, None]: 89 | """ 90 | Forge method that tries to configure TA proxy using incorrect/unsupported 91 | proxy configuration. 92 | 93 | Default proxy_config contains values that should pass API endpoint 94 | validation. However, the exclude and overwrite arguments allow deleting 95 | and/or modifying configuration values to fail validation and provoke the 96 | expected error message. 97 | 98 | The goal is to collect the error message and status_code provided by the 99 | TA API endpoint and pass it to the corresponding test method for 100 | validation. Forges pass values to tests, probes, and forges by storing 101 | key-value pairs in the test artifactory, achieved by yielding key-value 102 | pairs in the form of a dictionary. 103 | 104 | In addition to the error message and status_code, the forge yields the 105 | expected proxy configuration used by a probe to ensure that the desired 106 | proxy settings are indeed applied to TA's configuration. 107 | 108 | At the teardown section, it only disables the configured proxy. This is 109 | done instead of restoring the previous configuration to avoid restoring 110 | a non-configured proxy state that always fails as some proxy fields are 111 | mandatory. 112 | 113 | Args: 114 | splunk_client (SplunkClient): The Splunk client instance. 115 | exclude (List[str], optional): List of proxy configuration fields to 116 | exclude. Defaults to []. 117 | overwrite (Dict[str, object], optional): Dictionary of proxy 118 | configuration fields to overwrite. Defaults to {}. 119 | 120 | Yields: 121 | Dict[str, object]: A dictionary containing the expected proxy 122 | configuration, error message, and status code. 123 | """ 124 | 125 | error, status_code = None, None 126 | proxy_configs: Dict[str, object] = { 127 | "proxy_enabled": "0", 128 | "proxy_port": "3128", 129 | "proxy_rdns": "1", 130 | "proxy_type": "http", 131 | "proxy_url": "localhost", 132 | "proxy_username": "some_user_name", 133 | "proxy_password": "some_password", 134 | } 135 | 136 | for field_name in exclude: 137 | proxy_configs.pop(field_name, None) 138 | 139 | proxy_configs.update(overwrite) 140 | 141 | try: 142 | splunk_client.update_settings_proxy(**proxy_configs) 143 | except SplunkApiError as e: 144 | error = e.error_message 145 | status_code = e.status 146 | utils.logger.error(f"proxy error: {status_code}, {e.error_message}") 147 | 148 | yield dict( 149 | expected_proxy=proxy_configs, 150 | error=error, 151 | status_code=status_code, 152 | ) 153 | 154 | # teardown 155 | if error is None: 156 | proxy_configs["proxy_enabled"] = "0" 157 | splunk_client.update_settings_proxy(**proxy_configs) 158 | 159 | 160 | def configure_proxy( 161 | splunk_client: SplunkClient, 162 | proxy_configs: Dict[str, str], 163 | ) -> Generator[Dict[str, object], None, None]: 164 | """ 165 | Configures the TA proxy settings for a Splunk client and ensures the 166 | desired proxy settings are applied to the TA's configuration. 167 | 168 | This function updates the proxy settings using the provided configurations 169 | and yields the expected proxy settings. It does not handle any exceptions 170 | internally; any API exceptions will cause the forge to crash, which is 171 | handled by the framework and results in a corresponding test fast fail. 172 | 173 | During the teardown phase, the function disables the configured proxy 174 | instead of restoring the previous configuration. This approach avoids 175 | restoring a non-configured proxy state, which always fails as some proxy 176 | fields are mandatory. 177 | 178 | Args: 179 | splunk_client (SplunkClient): The Splunk client to configure. 180 | proxy_configs (Dict[str, str]): The proxy configuration settings. 181 | 182 | Yields: 183 | Dict[str, object]: A dictionary containing the expected proxy settings. 184 | """ 185 | splunk_client.update_settings_proxy(**proxy_configs) 186 | 187 | yield dict( 188 | expected_proxy=proxy_configs, 189 | ) 190 | 191 | # teardown 192 | proxy_configs["proxy_enabled"] = "0" 193 | splunk_client.update_settings_proxy(**proxy_configs) 194 | 195 | 196 | def configure_socks5_proxy( 197 | splunk_client: SplunkClient, valid: bool = True 198 | ) -> Generator[Dict[str, object], None, None]: 199 | """ 200 | Forge method that prepares socks5 proxy configuration while using 201 | configure_proxy forge for actual proxy update. 202 | 203 | Based on test requirements, this forge can create a proxy with valid and 204 | invalid credentials. By default, it configures the proxy with valid 205 | credentials. A proxy with invalid credentials can be useful for tests 206 | checking how the TA handles incorrectly configured proxies and what error 207 | logs it creates to let users quickly detect the issue. 208 | 209 | Args: 210 | splunk_client (SplunkClient): The Splunk client instance. 211 | valid (bool, optional): Flag to determine if the proxy should be 212 | configured with valid credentials. Defaults to True. 213 | 214 | Yields: 215 | Generator[Dict[str, object], None, None]: A generator yielding the 216 | proxy configuration dictionary. 217 | """ 218 | proxy_configs = { 219 | "proxy_enabled": "1", 220 | "proxy_port": "1080", 221 | "proxy_rdns": "1", 222 | "proxy_type": "socks5", 223 | "proxy_url": splunk_client.config.proxy_http_url, 224 | } 225 | 226 | if valid: 227 | proxy_configs.update( 228 | { 229 | "proxy_password": splunk_client.config.proxy_socks5_password, 230 | "proxy_username": splunk_client.config.proxy_socks5_username, 231 | } 232 | ) 233 | else: 234 | proxy_configs.update( 235 | { 236 | "proxy_password": "invalid-proxy-password", 237 | "proxy_username": "invalid-proxy-username", 238 | } 239 | ) 240 | 241 | yield from configure_proxy(splunk_client, proxy_configs) 242 | 243 | 244 | def configure_http_proxy( 245 | splunk_client: SplunkClient, valid: bool = True 246 | ) -> Generator[Dict[str, object], None, None]: 247 | """ 248 | Forge method that prepares http proxy configuration while using 249 | configure_proxy forge for actual proxy update. 250 | 251 | Based on test requirements, this forge can create a proxy with valid 252 | and invalid credentials. By default, it configures the proxy with valid 253 | credentials. 254 | 255 | A proxy with invalid credentials can be useful for tests checking how 256 | the TA handles incorrectly configured proxies and what error logs it 257 | creates to let users quickly detect the issue. 258 | 259 | Args: 260 | splunk_client (SplunkClient): The Splunk client instance. 261 | valid (bool): Flag indicating whether to configure the proxy with 262 | valid credentials. Defaults to True. 263 | 264 | Yields: 265 | Generator[Dict[str, object], None, None]: A generator yielding the 266 | proxy configuration dictionary. 267 | """ 268 | proxy_configs = { 269 | "proxy_enabled": "1", 270 | "proxy_port": "3128", 271 | "proxy_rdns": "1", 272 | "proxy_type": "http", 273 | "proxy_url": splunk_client.config.proxy_http_url, 274 | } 275 | 276 | if valid: 277 | proxy_configs.update( 278 | { 279 | "proxy_password": splunk_client.config.proxy_http_password, 280 | "proxy_username": splunk_client.config.proxy_http_username, 281 | } 282 | ) 283 | else: 284 | proxy_configs.update( 285 | { 286 | "proxy_password": "invalid-proxy-password", 287 | "proxy_username": "invalid-proxy-username", 288 | } 289 | ) 290 | 291 | yield from configure_proxy(splunk_client, proxy_configs) 292 | 293 | 294 | def _account_config(name: str, vendor_client: VendorClient) -> Dict[str, str]: 295 | return { 296 | "name": name, 297 | "api_key": vendor_client.config.api_key, 298 | } 299 | 300 | 301 | def account( 302 | splunk_client: SplunkClient, 303 | vendor_client: VendorClient, 304 | ) -> Generator[Dict[str, str], None, None]: 305 | account_config = _account_config("ExampleAccount", vendor_client) 306 | splunk_client.create_account(**account_config) 307 | yield dict( 308 | account_config_name=account_config["name"] 309 | ) # yielded from forges dict key will be available as global variable 310 | # you can use in your tests to refer to yielded dict value 311 | 312 | 313 | def another_account( 314 | splunk_client: SplunkClient, 315 | vendor_client: VendorClient, 316 | ) -> Generator[Dict[str, str], None, None]: 317 | account_config = _account_config("AnotherExampleAccount", vendor_client) 318 | splunk_client.create_account(**account_config) 319 | yield dict(another_account_config_name=account_config["name"]) 320 | 321 | 322 | def another_account_index( 323 | splunk_client: SplunkClient, 324 | ) -> Generator[Dict[str, str], None, None]: 325 | index_name = f"idx_mit_another_account_{utils.Common().sufix}" 326 | splk_conf = splunk_client.splunk_configuration 327 | splk_conf.create_index( 328 | index_name, 329 | splk_conf.service, 330 | is_cloud=splk_conf.is_cloud, 331 | acs_stack=splk_conf._acs_stack, 332 | acs_server=splk_conf.acs_server, 333 | splunk_token=splk_conf.token, 334 | ) 335 | yield {"another_account_index_name": index_name} 336 | 337 | 338 | def _account_input( 339 | splunk_client: SplunkClient, 340 | test_id: str, 341 | *, 342 | name: str, 343 | index: str, 344 | account: str, 345 | input_spl_name: str, 346 | ) -> Generator[Dict[str, str], None, None]: 347 | start_time = utils.get_epoch_timestamp() 348 | name += f"_{test_id}" 349 | splunk_client.create_example(name, defaults.INPUT_INTERVAL, index, account) 350 | input_spl = ( 351 | f'search index={index} source="example://{name}" ' f"| where _time>{start_time}" 352 | ) 353 | # Take raw event into account when constructing the SPL; as an example: 354 | # extractions should be tested with pytest-splunk-addon 355 | yield {input_spl_name: input_spl} 356 | splunk_client.update_example(name, None, None, None, None, None, True) 357 | 358 | 359 | def account_input( 360 | splunk_client: SplunkClient, 361 | # vendor_client: VendorClient, 362 | test_id: str, 363 | account_config_name: str, 364 | ) -> Generator[Dict[str, str], None, None]: 365 | yield from _account_input( 366 | splunk_client=splunk_client, 367 | test_id=test_id, 368 | name="ExampleInput", 369 | index=splunk_client.splunk_configuration.dedicated_index.name, 370 | account=account_config_name, 371 | input_spl_name="example_input_spl", 372 | ) 373 | -------------------------------------------------------------------------------- /tests/ucc_modinput_functional/splunk/probes.py: -------------------------------------------------------------------------------- 1 | import time 2 | from typing import Generator, Dict, Optional 3 | from tests.ucc_modinput_functional.splunk.client import SplunkClient 4 | from tests.ucc_modinput_functional.defaults import ( 5 | PROBE_PROXY_CHECK_INTERVAL, 6 | PROBE_PROXY_CHECK_TIMEOUT, 7 | PROBE_LOGLEVEL_CHECK_INTERVAL, 8 | PROBE_LOGLEVEL_CHECK_TIMEOUT, 9 | ) 10 | from splunk_add_on_ucc_modinput_test.common import utils 11 | import logging 12 | 13 | logger = logging.getLogger("ucc-modinput-test") 14 | 15 | 16 | def same_proxy_configs( 17 | proxy1: Dict[str, object], 18 | proxy2: Dict[str, object], 19 | ignore_password: bool = True, 20 | ) -> bool: 21 | """ 22 | Compare two proxy configurations, ignoring properties that are not 23 | configured (None). 24 | 25 | By default, the comparison ignores the 'proxy_password' property as it 26 | is returned sanitized by the API. 27 | 28 | Args: 29 | proxy1 (Dict[str, object]): The first proxy configuration to compare. 30 | proxy2 (Dict[str, object]): The second proxy configuration to compare. 31 | ignore_password (bool, optional): Whether to ignore 'proxy_password' 32 | property in the comparison. Defaults to True. 33 | 34 | Returns: 35 | bool: True if the proxy configurations are the same, otherwise False. 36 | """ 37 | 38 | proxy1 = {k: v for k, v in proxy1.items() if v is not None} 39 | proxy2 = {k: v for k, v in proxy2.items() if v is not None} 40 | if ignore_password: 41 | proxy1.pop("proxy_password", None) 42 | proxy2.pop("proxy_password", None) 43 | 44 | res = proxy1 == proxy2 45 | logger.debug(f"same_proxy_configs: {res}\n\tproxy1: {proxy1}\n\tproxy2: {proxy2}") 46 | 47 | return res 48 | 49 | 50 | def wait_for_proxy( 51 | splunk_client: SplunkClient, 52 | expected_proxy: Dict[str, object], 53 | error: Optional[str] = None, 54 | ) -> Generator[int, None, bool]: 55 | """ 56 | Waits for the Splunk proxy configuration to match the expected value. 57 | 58 | This function repeatedly checks the Splunk proxy configuration until it 59 | matches the expected value provided in the `expected_proxy` argument or 60 | until the timeout expires. If the `error` argument is not None, the 61 | function returns immediately with a status of False. 62 | 63 | Args: 64 | splunk_client (SplunkClient): The Splunk client to use for checking 65 | the proxy settings. 66 | expected_proxy (Dict[str, object]): The expected proxy configuration 67 | to match against. 68 | error (Optional[str]): An error message indicating that the proxy 69 | setting API call failed. If not None, the function returns False. 70 | 71 | Yields: 72 | int: The interval in seconds after which the probe should be invoked 73 | again if the verification was not successful. 74 | 75 | Returns: 76 | bool: True if the proxy configuration matches the expected value 77 | within the timeout period, False otherwise. 78 | """ 79 | 80 | if error is not None: 81 | return False 82 | 83 | start = time.time() 84 | expire = time.time() + PROBE_PROXY_CHECK_TIMEOUT 85 | while time.time() < expire: 86 | proxy = splunk_client.get_settings_proxy() 87 | if same_proxy_configs(expected_proxy, proxy): 88 | logger.debug( 89 | f"probe wait_for_proxy successful after {time.time() - start} " 90 | "seconds" 91 | ) 92 | return True 93 | logger.debug(f"probe wait_for_proxy failed after {time.time() - start} seconds") 94 | yield PROBE_PROXY_CHECK_INTERVAL 95 | 96 | logger.debug( 97 | "probe wait_for_proxy expired with failed status after " 98 | f"{time.time() - start} seconds" 99 | ) 100 | 101 | return False 102 | 103 | 104 | def wait_for_loglevel( 105 | splunk_client: SplunkClient, 106 | expected_loglevel: str, 107 | error: Optional[str] = None, 108 | ) -> Generator[int, None, bool]: 109 | """ 110 | Probe generator method that repeatedly checks TA log level configuration 111 | until it matches the expected_loglevel or the probe time expires. 112 | 113 | Args: 114 | splunk_client (SplunkClient): The Splunk client instance. 115 | expected_loglevel (str): The expected log level to wait for. 116 | error (Optional[str], optional): Error message if the API call setting 117 | log level failed. Defaults to None. 118 | 119 | Yields: 120 | int: Interval after which the probe should be invoked again if the 121 | verification was not successful. 122 | 123 | Returns: 124 | bool: True if the log level matches expected value, False otherwise. 125 | """ 126 | if error is not None: 127 | return False 128 | 129 | start = time.time() 130 | expire = start + PROBE_LOGLEVEL_CHECK_TIMEOUT 131 | while time.time() < expire: 132 | loglevel = splunk_client.get_settings_logging().get("loglevel") 133 | if loglevel == expected_loglevel: 134 | logger.debug( 135 | "probe wait_for_loglevel successful after " 136 | f"{time.time() - start} seconds" 137 | ) 138 | return True 139 | logger.debug( 140 | f"probe wait_for_loglevel failed after {time.time() - start} " "seconds" 141 | ) 142 | yield PROBE_LOGLEVEL_CHECK_INTERVAL 143 | 144 | logger.debug( 145 | "probe wait_for_loglevel expired with failed status after " 146 | f"{time.time() - start} seconds" 147 | ) 148 | return False 149 | 150 | 151 | def events_ingested( 152 | splunk_client: SplunkClient, probe_spl: str, probes_wait_time: int = 10 153 | ) -> Generator[int, None, None]: 154 | start_time = utils.get_epoch_timestamp() 155 | utils.logger.debug(f"started at {start_time}") 156 | utils.logger.debug(probe_spl) 157 | while True: 158 | search = splunk_client.search(searchquery=probe_spl) 159 | if search.result_count != 0: 160 | break 161 | utils.logger.debug( 162 | f"failed, let's wait another {probes_wait_time} \ 163 | seconds and try again" 164 | ) 165 | yield probes_wait_time 166 | 167 | utils.logger.debug( 168 | "successfully finished after " 169 | f"{utils.get_epoch_timestamp()-start_time} seconds" 170 | ) 171 | 172 | 173 | def account_input_events_ingested( 174 | splunk_client: SplunkClient, example_input_spl: str 175 | ) -> Generator[int, None, None]: 176 | yield from events_ingested(splunk_client, example_input_spl) 177 | -------------------------------------------------------------------------------- /tests/ucc_modinput_functional/test_configuration.py: -------------------------------------------------------------------------------- 1 | import pytest # noqa F401 2 | 3 | from splunk_add_on_ucc_modinput_test.functional.decorators import ( 4 | bootstrap, 5 | forge, 6 | forges, 7 | ) 8 | from tests.ucc_modinput_functional.splunk.client import SplunkClient 9 | from tests.ucc_modinput_functional.splunk.forges import ( 10 | set_loglevel, 11 | account, 12 | another_account, 13 | another_account_index, 14 | ) 15 | from tests.ucc_modinput_functional.splunk.probes import ( 16 | wait_for_loglevel, 17 | ) 18 | from tests.ucc_modinput_functional import defaults 19 | 20 | 21 | @bootstrap( 22 | # each forge will be executed just once, 23 | # no matter how many times appears in tests 24 | forge( 25 | set_loglevel, 26 | loglevel=defaults.TA_LOG_LEVEL_FOR_TESTS, 27 | probe=wait_for_loglevel, 28 | ) 29 | ) 30 | def test_ta_logging(splunk_client: SplunkClient) -> None: 31 | assert ( 32 | splunk_client.get_settings_logging()["loglevel"] 33 | == defaults.TA_LOG_LEVEL_FOR_TESTS 34 | ) 35 | 36 | 37 | @bootstrap( 38 | forge( 39 | set_loglevel, 40 | loglevel=defaults.TA_LOG_LEVEL_FOR_TESTS, 41 | probe=wait_for_loglevel, 42 | ), # sequence matters - ta_logging will be executed before accounts 43 | forges( # there is parallel execution within forges 44 | forge(account), 45 | forge(another_account), 46 | ), 47 | ) 48 | def test_accounts( 49 | splunk_client: SplunkClient, 50 | # vendor_client, # you may want to refer to it in most real-life cases 51 | account_config_name: str, 52 | another_account_config_name: str, 53 | ) -> None: 54 | actual_account = splunk_client.get_account(account_config_name) 55 | assert actual_account is not None 56 | assert actual_account["api_key"] == defaults.ENCRYPTED_VALUE 57 | 58 | actual_another_account = splunk_client.get_account(another_account_config_name) 59 | assert actual_another_account is not None 60 | assert actual_another_account["api_key"] == defaults.ENCRYPTED_VALUE 61 | 62 | 63 | @bootstrap( 64 | forge( 65 | set_loglevel, 66 | loglevel=defaults.TA_LOG_LEVEL_FOR_TESTS, 67 | probe=wait_for_loglevel, 68 | ), # sequence matters - ta_logging will be executed before accounts 69 | forges( 70 | forge(another_account_index), 71 | # the test framework creates session specific index that can be used 72 | # if more indexes are needed, they can be created as well 73 | ), 74 | ) 75 | def test_indexes(splunk_client: SplunkClient, another_account_index_name: str) -> None: 76 | splk_config = splunk_client.splunk_configuration 77 | actual_index = splk_config.get_index( 78 | another_account_index_name, client_service=splk_config.service 79 | ) 80 | assert actual_index is not None 81 | assert actual_index.name == another_account_index_name 82 | -------------------------------------------------------------------------------- /tests/ucc_modinput_functional/test_inputs.py: -------------------------------------------------------------------------------- 1 | import pytest # noqa F401 2 | from splunk_add_on_ucc_modinput_test.common import utils 3 | from splunk_add_on_ucc_modinput_test.functional.decorators import ( 4 | bootstrap, 5 | forge, 6 | forges, 7 | ) 8 | from tests.ucc_modinput_functional.splunk.forges import ( 9 | set_loglevel, 10 | account, 11 | another_account, 12 | account_input, 13 | ) 14 | from tests.ucc_modinput_functional.splunk.probes import ( 15 | wait_for_loglevel, 16 | account_input_events_ingested, 17 | ) 18 | from tests.ucc_modinput_functional.splunk.client import SplunkClient 19 | from tests.ucc_modinput_functional import defaults 20 | import logging 21 | 22 | logger = logging.getLogger("ucc-modinput-test") 23 | 24 | 25 | @bootstrap( 26 | forge( 27 | set_loglevel, 28 | loglevel=defaults.TA_LOG_LEVEL_FOR_TESTS, 29 | probe=wait_for_loglevel, 30 | ), # sequence matters - ta_logging will be executed before accounts 31 | forges( 32 | forge(account), 33 | forge(another_account), 34 | ), 35 | forges( 36 | forge( 37 | account_input, 38 | probe=account_input_events_ingested, 39 | ), 40 | # forge( 41 | # another_account_base_input, 42 | # # probe=probes.event_log_input_oauth_hourly_events_ingested, 43 | # ), 44 | ), 45 | ) 46 | def test_inputs(splunk_client: SplunkClient, example_input_spl: str) -> None: 47 | search_result_details = splunk_client.search(searchquery=example_input_spl) 48 | assert ( 49 | search_result_details.result_count != 0 50 | ), f"Following query returned 0 events: {example_input_spl}" 51 | 52 | logger.info( 53 | "test_inputs_loginhistory_clone done at " 54 | + utils.convert_to_utc(utils.get_epoch_timestamp()) 55 | ) 56 | -------------------------------------------------------------------------------- /tests/ucc_modinput_functional/test_settings.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from typing import Dict, Tuple 3 | from splunk_add_on_ucc_modinput_test.functional.decorators import ( 4 | attach, 5 | forge, 6 | ) 7 | from tests.ucc_modinput_functional.splunk.client import SplunkClient 8 | from tests.ucc_modinput_functional.splunk.forges import ( 9 | set_loglevel, 10 | try_to_configure_proxy, 11 | ) 12 | from tests.ucc_modinput_functional.splunk.probes import ( 13 | wait_for_loglevel, 14 | ) 15 | 16 | 17 | @attach(forge(set_loglevel, probe=wait_for_loglevel, loglevel="CRITICAL")) 18 | def test_valid_loglevel(splunk_client: SplunkClient, wait_for_loglevel: bool) -> None: 19 | assert wait_for_loglevel is True 20 | 21 | 22 | @pytest.mark.parametrize( 23 | "overwrite,expected_error", 24 | [ 25 | ({"proxy_url": "@#$%!*123"}, "Not matching the pattern: "), 26 | ( 27 | {"proxy_port": "not-a-number"}, 28 | "Bad Request -- Invalid format for integer value", 29 | ), 30 | ], 31 | ) 32 | @attach(forge(try_to_configure_proxy)) 33 | def test_proxy_validators__invalid_params( 34 | error: str, 35 | overwrite: Tuple[Dict[str, str], str], 36 | expected_error: str, 37 | ) -> None: 38 | assert expected_error in error 39 | -------------------------------------------------------------------------------- /tests/ucc_modinput_functional/vendor/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/splunk/splunk-example-ta/1378a9a04905e8edff6bfd0a6dc7354791053c16/tests/ucc_modinput_functional/vendor/__init__.py -------------------------------------------------------------------------------- /tests/ucc_modinput_functional/vendor/client/__init__.py: -------------------------------------------------------------------------------- 1 | from .client import VendorClient # noqa 2 | -------------------------------------------------------------------------------- /tests/ucc_modinput_functional/vendor/client/client.py: -------------------------------------------------------------------------------- 1 | from splunk_add_on_ucc_modinput_test.functional.decorators import ( 2 | register_vendor_class, 3 | ) 4 | from splunk_add_on_ucc_modinput_test.functional.vendor import VendorClientBase 5 | from tests.ucc_modinput_functional.vendor.client.configuration import ( 6 | Configuration, 7 | ) 8 | 9 | 10 | @register_vendor_class(Configuration) 11 | class VendorClient(VendorClientBase): 12 | pass 13 | -------------------------------------------------------------------------------- /tests/ucc_modinput_functional/vendor/client/configuration.py: -------------------------------------------------------------------------------- 1 | from typing import Optional 2 | from splunk_add_on_ucc_modinput_test.common import utils 3 | from splunk_add_on_ucc_modinput_test.functional.vendor import ( 4 | VendorConfigurationBase, 5 | ) 6 | 7 | 8 | class Configuration(VendorConfigurationBase): 9 | def customize_configuration(self) -> None: 10 | self._api_key = utils.get_from_environment_variable( 11 | "MODINPUT_TEST_EXAMPLE_API_KEY_BASE64", 12 | string_function=utils.Base64.decode, 13 | ) 14 | 15 | @property 16 | def api_key(self) -> Optional[str]: 17 | return self._api_key 18 | -------------------------------------------------------------------------------- /tests/ucc_modinput_functional/vendor/forges.py: -------------------------------------------------------------------------------- 1 | from tests.ucc_modinput_functional.vendor.client import ( # noqa F401 2 | VendorClient, 3 | ) 4 | -------------------------------------------------------------------------------- /ui/.gitignore: -------------------------------------------------------------------------------- 1 | node_modules 2 | /coverage/ 3 | -------------------------------------------------------------------------------- /ui/.nvmrc: -------------------------------------------------------------------------------- 1 | lts/* -------------------------------------------------------------------------------- /ui/README.md: -------------------------------------------------------------------------------- 1 | # UI of Splunk Example TA 2 | 3 | ## How to run 4 | 5 | After you have build TA and it is accessible on localhost:8000, run the following commands for start a development server for the UI: 6 | 7 | ```bash 8 | npm install 9 | npm start 10 | ``` 11 | 12 | Then open http://localhost:8080 in your browser. It would automatically refresh when you make changes to the UI code. -------------------------------------------------------------------------------- /ui/babel.config.cjs: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | presets: [ 3 | [ 4 | "@splunk/babel-preset", 5 | { 6 | runtime: "automatic", 7 | }, 8 | ], 9 | "@babel/preset-env", 10 | ], 11 | }; 12 | -------------------------------------------------------------------------------- /ui/eslint.config.js: -------------------------------------------------------------------------------- 1 | import js from "@eslint/js"; 2 | import globals from "globals"; 3 | import reactHooks from "eslint-plugin-react-hooks"; 4 | import reactRefresh from "eslint-plugin-react-refresh"; 5 | import tseslint from "typescript-eslint"; 6 | 7 | export default tseslint.config( 8 | { ignores: ["dist", "coverage"] }, 9 | { 10 | extends: [js.configs.recommended, ...tseslint.configs.recommended], 11 | files: ["**/*.{ts,tsx}"], 12 | languageOptions: { 13 | ecmaVersion: 2020, 14 | globals: globals.browser, 15 | }, 16 | plugins: { 17 | "react-hooks": reactHooks, 18 | "react-refresh": reactRefresh, 19 | }, 20 | rules: { 21 | ...reactHooks.configs.recommended.rules, 22 | "react-refresh/only-export-components": [ 23 | "warn", 24 | { allowConstantExport: true }, 25 | ], 26 | }, 27 | }, 28 | ); 29 | -------------------------------------------------------------------------------- /ui/jest.config.ts: -------------------------------------------------------------------------------- 1 | import type { Config } from "jest"; 2 | 3 | export default { 4 | // Mock 5 | clearMocks: true, 6 | restoreMocks: true, 7 | // env settings 8 | testEnvironment: "jest-fixed-jsdom", 9 | setupFilesAfterEnv: ["/tests/jest.setup.ts"], 10 | // Coverage 11 | collectCoverage: true, 12 | collectCoverageFrom: ["src/**/*.{js,jsx,ts,tsx}"], 13 | coveragePathIgnorePatterns: [ 14 | "/node_modules/", 15 | /* 16 | TYPES 17 | */ 18 | // *.d.ts files 19 | "\\.d\\.ts$", 20 | "/types/", 21 | "\\.types\\.ts$", 22 | ], 23 | coverageDirectory: "coverage", 24 | testEnvironmentOptions: { 25 | /** 26 | * @note Opt-out from JSDOM using browser-style resolution 27 | * for dependencies. This is simply incorrect, as JSDOM is 28 | * not a browser, and loading browser-oriented bundles in 29 | * Node.js will break things. 30 | * 31 | * Consider migrating to a more modern test runner if you 32 | * don't want to deal with this. 33 | */ 34 | customExportConditions: [""], 35 | }, 36 | errorOnDeprecated: true, 37 | // moduleNameMapper: { 38 | // // Force module uuid to resolve with the CJS entry point, because Jest does not support package.json.exports. See https://github.com/uuidjs/uuid/issues/451 39 | // uuid: require.resolve("uuid"), 40 | // "\\.(css)$": "/src/mocks/styleMock.js", 41 | // }, 42 | } satisfies Config; 43 | -------------------------------------------------------------------------------- /ui/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "splunk-example-ta", 3 | "private": true, 4 | "version": "0.0.0", 5 | "type": "module", 6 | "scripts": { 7 | "start": "webpack serve", 8 | "build": "cross-env NODE_ENV=production webpack --bail", 9 | "build:watch": "webpack --watch", 10 | "test": "jest", 11 | "test:watch": "jest --watch", 12 | "lint": "eslint && npm run format:verify", 13 | "lint:fix": "eslint --fix && npm run format", 14 | "format": "prettier \"src/**/*.(js|jsx|ts|tsx|css)\" --write", 15 | "format:verify": "prettier \"src/**/*.(js|jsx|ts|tsx|css)\" --list-different" 16 | }, 17 | "dependencies": { 18 | "@splunk/add-on-ucc-framework": "^5.58.0", 19 | "@splunk/react-ui": "^4.42.0", 20 | "@splunk/splunk-utils": "^3.1.0", 21 | "@splunk/themes": "^0.23.0", 22 | "es-toolkit": "^1.32.0", 23 | "react": "16.14.0", 24 | "react-dom": "16.14.0", 25 | "styled-components": "^5.3.11" 26 | }, 27 | "devDependencies": { 28 | "@babel/core": "^7.26.9", 29 | "@babel/eslint-parser": "^7.26.8", 30 | "@babel/plugin-transform-runtime": "^7.26.9", 31 | "@babel/preset-env": "^7.26.9", 32 | "@babel/preset-react": "^7.26.3", 33 | "@babel/preset-typescript": "^7.26.0", 34 | "@eslint/js": "^9.20.0", 35 | "@jest/globals": "^29.7.0", 36 | "@splunk/babel-preset": "^4.0.0", 37 | "@splunk/webpack-configs": "^7.0.2", 38 | "@testing-library/dom": "^8.20.1", 39 | "@testing-library/jest-dom": "^6.6.3", 40 | "@testing-library/react": "^12.1.5", 41 | "@testing-library/user-event": "^14.6.1", 42 | "@types/jest": "^29.5.14", 43 | "@types/node": "^22.13.1", 44 | "@types/react": "16.14.62", 45 | "@types/react-dom": "16.9.25", 46 | "@types/styled-components": "^5.1.34", 47 | "babel-jest": "^29.7.0", 48 | "cross-env": "^7.0.3", 49 | "eslint": "^9.19.0", 50 | "eslint-plugin-react-hooks": "^5.1.0", 51 | "eslint-plugin-react-refresh": "^0.4.18", 52 | "fork-ts-checker-webpack-plugin": "^9.0.2", 53 | "globals": "^15.14.0", 54 | "jest": "^29.7.0", 55 | "jest-environment-jsdom": "^29.7.0", 56 | "jest-fixed-jsdom": "^0.0.9", 57 | "msw": "^2.7.0", 58 | "prettier": "^3.4.2", 59 | "ts-node": "^10.9.2", 60 | "typescript": "~5.7.3", 61 | "typescript-eslint": "^8.24.1", 62 | "webpack-cli": "^5.1.4", 63 | "webpack-dev-server": "^5.2.0" 64 | }, 65 | "overrides": { 66 | "react": "16.14.0", 67 | "react-dom": "16.14.0", 68 | "@types/react": "16.14.62", 69 | "@types/react-dom": "16.9.25" 70 | }, 71 | "engines": { 72 | "node": ">=22", 73 | "npm": ">=10" 74 | } 75 | } 76 | -------------------------------------------------------------------------------- /ui/src/ucc-ui-extensions/AdvancedInputsTab/AdvancedInputsTab.spec.tsx: -------------------------------------------------------------------------------- 1 | import { screen, render } from "@testing-library/react"; 2 | import React from "react"; 3 | import { AdvancedInputsTab } from "./AdvancedInputsTab.tsx"; 4 | import { server } from "../../../tests/mocks/server.ts"; 5 | import { http, HttpResponse } from "msw"; 6 | 7 | jest.mock("@splunk/splunk-utils/config", () => ({ 8 | ...jest.requireActual("@splunk/splunk-utils/config"), 9 | app: "test_app", 10 | })); 11 | 12 | function setup() { 13 | return render(); 14 | } 15 | 16 | function mockResponse(errorResponse: Response) { 17 | server.use( 18 | http.get(`/servicesNS/-/test_app/test_app_example`, () => { 19 | return errorResponse; 20 | }), 21 | ); 22 | } 23 | 24 | describe("AdvancedInputsTab", () => { 25 | it("should render table", async () => { 26 | const response = HttpResponse.json({ 27 | entry: [ 28 | { 29 | name: "test_name", 30 | content: { 31 | account: "test", 32 | disabled: false, 33 | fetch_from: "test", 34 | index: "test", 35 | interval: 123, 36 | }, 37 | }, 38 | ], 39 | }); 40 | 41 | mockResponse(response); 42 | const { findByRole } = setup(); 43 | 44 | expect(await findByRole("table")).toBeInTheDocument(); 45 | expect(await findByRole("cell", { name: "test_name" })).toBeInTheDocument(); 46 | }); 47 | 48 | it("should render error message", async () => { 49 | const serverErrorMessage = "Internal error"; 50 | const errorResponse = HttpResponse.json( 51 | { message: serverErrorMessage }, 52 | { status: 500 }, 53 | ); 54 | mockResponse(errorResponse); 55 | setup(); 56 | 57 | expect(await screen.findByText("Something went wrong")).toBeInTheDocument(); 58 | expect(screen.queryByText(serverErrorMessage)).not.toBeInTheDocument(); 59 | }); 60 | }); 61 | -------------------------------------------------------------------------------- /ui/src/ucc-ui-extensions/AdvancedInputsTab/AdvancedInputsTab.tsx: -------------------------------------------------------------------------------- 1 | import React from "react"; 2 | import Table from "@splunk/react-ui/Table"; 3 | import { app } from "@splunk/splunk-utils/config"; 4 | import { SplunkThemeProvider, variables } from "@splunk/themes"; 5 | import styled from "styled-components"; 6 | 7 | import { useGetRequest } from "../../utils/apiHooks.ts"; 8 | import Message from "@splunk/react-ui/Message"; 9 | 10 | const HostStyles = styled.div` 11 | padding: ${variables.spacingLarge} 0; 12 | `; 13 | 14 | interface InputResponse { 15 | entry: { 16 | name: string; 17 | content: { 18 | account: string; 19 | disabled: boolean; 20 | fetch_from: string; 21 | index: string; 22 | interval: string; 23 | }; 24 | }[]; 25 | } 26 | 27 | export function AdvancedInputsTab() { 28 | const inputType = "example"; 29 | const { data, error } = useGetRequest({ 30 | endpointUrl: `${app}_${inputType}`, 31 | }); 32 | const inputs = data?.entry; 33 | 34 | return ( 35 | 36 | 37 | {error && {error.message}} 38 | {data && ( 39 | 40 | 41 | Name 42 | Interval 43 | Account 44 | 45 | 46 | {inputs?.map((row) => ( 47 | 48 | {row.name} 49 | {row.content.interval} 50 | {row.content.account} 51 | 52 | ))} 53 | 54 |
55 | )} 56 |
57 |
58 | ); 59 | } 60 | -------------------------------------------------------------------------------- /ui/src/ucc-ui-extensions/AdvancedInputsTab/index.tsx: -------------------------------------------------------------------------------- 1 | import React from "react"; 2 | import ReactDOM from "react-dom"; 3 | import { CustomTabBase } from "@splunk/add-on-ucc-framework"; 4 | import { AdvancedInputsTab } from "./AdvancedInputsTab.tsx"; 5 | 6 | export default class AdvancedInputsTabClass extends CustomTabBase { 7 | render(): void { 8 | ReactDOM.render(, this.el); 9 | } 10 | } 11 | -------------------------------------------------------------------------------- /ui/src/ucc-ui-extensions/DateInput/DateInput.spec.tsx: -------------------------------------------------------------------------------- 1 | import React from "react"; 2 | import { fireEvent, render } from "@testing-library/react"; 3 | import { userEvent } from "@testing-library/user-event"; 4 | import DateInput from "./DateInput"; 5 | 6 | describe("DateInput", () => { 7 | it("renders with default value and handles date change", async () => { 8 | const handleChange = jest.fn(); 9 | const user = userEvent.setup(); 10 | 11 | const { getByRole } = render(); 12 | 13 | const input = getByRole("combobox"); 14 | expect(input).toBeVisible(); 15 | 16 | const newDate = "12/1/2000"; 17 | 18 | await user.clear(input); 19 | await user.type(input, newDate, { skipClick: true }); 20 | // a weird implementation of Date component, it does not work with user.keyboard('[Enter]') 21 | fireEvent.keyDown(input, { key: "enter", keyCode: 13 }); 22 | 23 | expect(input).toHaveValue(newDate); 24 | expect(handleChange).toHaveBeenCalled(); 25 | }); 26 | }); 27 | -------------------------------------------------------------------------------- /ui/src/ucc-ui-extensions/DateInput/DateInput.tsx: -------------------------------------------------------------------------------- 1 | import React from "react"; 2 | import DateSuiInput, { DateChangeHandler } from "@splunk/react-ui/Date"; 3 | 4 | function DateInput(props: { value?: string; onChange: DateChangeHandler }) { 5 | const today = props.value ?? new Date().toISOString().split("T")[0]; 6 | 7 | return ; 8 | } 9 | 10 | export default DateInput; 11 | -------------------------------------------------------------------------------- /ui/src/ucc-ui-extensions/DateInput/index.tsx: -------------------------------------------------------------------------------- 1 | import React from "react"; 2 | import ReactDOM from "react-dom"; 3 | import { CustomControlBase } from "@splunk/add-on-ucc-framework"; 4 | 5 | import DateInput from "./DateInput"; 6 | import { DateChangeHandler } from "@splunk/react-ui/Date"; 7 | 8 | export default class DateInputClass extends CustomControlBase { 9 | onDateChange: DateChangeHandler = (_event, data) => { 10 | this.setValue(data.value); 11 | }; 12 | 13 | render() { 14 | const dateValue = this.data.value; 15 | const date = 16 | typeof dateValue === "string" && dateValue.length !== 0 17 | ? dateValue 18 | : undefined; 19 | 20 | ReactDOM.render( 21 | , 22 | this.el, 23 | ); 24 | } 25 | } 26 | -------------------------------------------------------------------------------- /ui/src/ucc-ui-extensions/README.md: -------------------------------------------------------------------------------- 1 | # How to bundle a new UI extension 2 | 3 | 1. Create a new directory in `ui/src/ucc-ui-extensions` with the name of your extension. 4 | 2. Add your extension's files to the directory. 5 | 3. Add your default export to the `ui/src/ucc-ui-extensions/index.ts` file. This the only file that will be included in the final bundle with all their dependencies. 6 | 4. Add the following in your service in globalConfig.json: 7 | 8 | ```json 9 | { 10 | "type": "custom", 11 | "options": { 12 | "src": "FolderName", 13 | "type": "external" 14 | } 15 | } 16 | ``` -------------------------------------------------------------------------------- /ui/src/utils/ResponseError.ts: -------------------------------------------------------------------------------- 1 | export class ResponseError extends Error { 2 | response: Response; 3 | 4 | constructor(params: { response: Response; message: string }) { 5 | super(params.message); 6 | this.response = params.response; 7 | } 8 | } 9 | -------------------------------------------------------------------------------- /ui/src/utils/api.ts: -------------------------------------------------------------------------------- 1 | import { app } from "@splunk/splunk-utils/config"; 2 | import { getDefaultFetchInit } from "@splunk/splunk-utils/fetch"; 3 | import { createRESTURL } from "@splunk/splunk-utils/url"; 4 | import { ResponseError } from "./ResponseError"; 5 | 6 | type ParamsRecord = Record; 7 | 8 | export interface RequestParams { 9 | endpointUrl: string; 10 | params?: ParamsRecord; 11 | signal?: AbortSignal; 12 | body?: BodyInit; 13 | } 14 | 15 | const DEFAULT_PARAMS = { output_mode: "json" }; 16 | 17 | function createUrl(endpointUrl: string, params: ParamsRecord): URL { 18 | const url = new URL( 19 | createRESTURL(endpointUrl, { app }), 20 | window.location.origin, 21 | ); 22 | Object.entries({ ...DEFAULT_PARAMS, ...params }) 23 | .filter(([, value]) => value !== undefined && value !== null) 24 | .forEach(([key, value]) => url.searchParams.append(key, value.toString())); 25 | return url; 26 | } 27 | 28 | function handleErrorResponse(response: Response) { 29 | if (!response.ok) { 30 | throw new ResponseError({ response, message: "Something went wrong" }); 31 | } 32 | } 33 | 34 | async function fetchWithErrorHandling( 35 | url: URL, 36 | options: RequestInit, 37 | ): Promise { 38 | const defaultInit = getDefaultFetchInit(); 39 | 40 | const response = await fetch(url.toString(), { 41 | ...defaultInit, 42 | ...options, 43 | }); 44 | 45 | handleErrorResponse(response); 46 | 47 | return await response.json(); 48 | } 49 | 50 | export async function getRequest({ 51 | endpointUrl, 52 | params = {}, 53 | signal, 54 | }: RequestParams) { 55 | const url = createUrl(endpointUrl, params); 56 | const options = { 57 | method: "GET", 58 | signal, 59 | } satisfies RequestInit; 60 | 61 | return fetchWithErrorHandling(url, options); 62 | } 63 | 64 | export async function postRequest({ 65 | endpointUrl, 66 | params = {}, 67 | body, 68 | signal, 69 | }: RequestParams) { 70 | const url = createUrl(endpointUrl, params); 71 | const defaultInit = getDefaultFetchInit(); 72 | const headers = { 73 | ...defaultInit.headers, 74 | "Content-Type": "application/x-www-form-urlencoded", 75 | } satisfies HeadersInit; 76 | 77 | const options = { 78 | method: "POST", 79 | headers, 80 | signal, 81 | body, 82 | } satisfies RequestInit; 83 | 84 | return fetchWithErrorHandling(url, options); 85 | } 86 | 87 | export async function deleteRequest({ 88 | endpointUrl, 89 | params = {}, 90 | signal, 91 | }: RequestParams) { 92 | const url = createUrl(endpointUrl, params); 93 | 94 | const options = { 95 | method: "DELETE", 96 | signal, 97 | } satisfies RequestInit; 98 | 99 | return fetchWithErrorHandling(url, options); 100 | } 101 | -------------------------------------------------------------------------------- /ui/src/utils/apiHooks.ts: -------------------------------------------------------------------------------- 1 | import { getRequest, RequestParams } from "./api.ts"; 2 | import { useEffect, useState } from "react"; 3 | import { isPlainObject } from "es-toolkit"; 4 | 5 | /** 6 | * Hashes the request parameters to use as a key in the cache. 7 | * @param params 8 | */ 9 | function hashParams(params: RequestParams): string { 10 | const url = params.endpointUrl; 11 | const requestParams = params.params; 12 | 13 | const paramsHash = JSON.stringify(requestParams, (_, val) => 14 | isPlainObject(val) 15 | ? Object.keys(val) 16 | .sort() 17 | .reduce( 18 | (result, key) => { 19 | result[key] = val[key]; 20 | return result; 21 | }, 22 | {} as Record, 23 | ) 24 | : val, 25 | ); 26 | 27 | return `${url}::${paramsHash ?? ""}`; 28 | } 29 | 30 | const cache = new Map(); 31 | 32 | export function useGetRequest(params: RequestParams) { 33 | const [data, setData] = useState(null); 34 | const [isLoading, setIsLoading] = useState(true); 35 | const [error, setError] = useState(null); 36 | 37 | // params is a new object on every render 38 | // hashing is used to preserve a reference to the params object 39 | const hash = hashParams(params); 40 | if (!cache.has(hash)) { 41 | cache.set(hash, params); 42 | } 43 | const cachedParams = cache.get(hash) ?? params; 44 | 45 | useEffect(() => { 46 | let isMounted = true; 47 | const abortController = new AbortController(); 48 | 49 | getRequest({ signal: abortController.signal, ...cachedParams }) 50 | .then((data) => { 51 | if (isMounted) { 52 | setData(data); 53 | setIsLoading(false); 54 | } 55 | }) 56 | .catch((error) => { 57 | if (isMounted) { 58 | setError(error); 59 | setIsLoading(false); 60 | } 61 | }); 62 | 63 | return () => { 64 | isMounted = false; 65 | abortController.abort(); 66 | }; 67 | }, [cachedParams]); 68 | 69 | return { data, isLoading, error }; 70 | } 71 | -------------------------------------------------------------------------------- /ui/tests/jest.setup.ts: -------------------------------------------------------------------------------- 1 | import "@testing-library/jest-dom"; 2 | import "@testing-library/jest-dom/jest-globals"; 3 | import "./mocks/server"; 4 | 5 | import { configure } from "@testing-library/react"; 6 | 7 | /** 8 | * Configure test attributes 9 | */ 10 | configure({ testIdAttribute: "data-test" }); 11 | 12 | /** 13 | * Failing tests if there is some console error during tests 14 | */ 15 | export let consoleError: jest.SpyInstance< 16 | void, 17 | Parameters<(typeof console)["error"]> 18 | >; 19 | 20 | beforeEach(() => { 21 | const originalConsoleError = console.error; 22 | consoleError = jest.spyOn(console, "error"); 23 | consoleError.mockImplementation( 24 | (...args: Parameters) => { 25 | originalConsoleError(...args); 26 | throw new Error( 27 | "Console error was called. Call consoleError.mockImplementation(() => {}) if this is expected.", 28 | ); 29 | }, 30 | ); 31 | }); 32 | -------------------------------------------------------------------------------- /ui/tests/mocks/README.md: -------------------------------------------------------------------------------- 1 | # Mocks 2 | 3 | Use this to mock any third party HTTP resources that you don't have running 4 | locally and want to have mocked for local development as well as tests. 5 | 6 | Learn more about how to use this at [mswjs.io](https://mswjs.io/) 7 | -------------------------------------------------------------------------------- /ui/tests/mocks/server.ts: -------------------------------------------------------------------------------- 1 | import { setupServer } from "msw/node"; 2 | 3 | export const server = setupServer(); 4 | 5 | server.listen({ 6 | onUnhandledRequest: "warn", 7 | }); 8 | afterEach(() => server.resetHandlers()); 9 | afterAll(() => server.close()); 10 | 11 | process.once("SIGINT", () => server.close()); 12 | process.once("SIGTERM", () => server.close()); 13 | -------------------------------------------------------------------------------- /ui/tsconfig.app.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "tsBuildInfoFile": "./node_modules/.tmp/tsconfig.app.tsbuildinfo", 4 | "target": "ES2020", 5 | "useDefineForClassFields": true, 6 | "lib": ["ES2020", "DOM", "DOM.Iterable"], 7 | "module": "ESNext", 8 | "skipLibCheck": true, 9 | "types": ["@testing-library/react", "jest", "@testing-library/jest-dom"], 10 | 11 | /* Bundler mode */ 12 | "moduleResolution": "Bundler", 13 | "allowImportingTsExtensions": true, 14 | "isolatedModules": true, 15 | "moduleDetection": "force", 16 | "noEmit": true, 17 | "jsx": "react", 18 | 19 | /* Linting */ 20 | "strict": true, 21 | "noUnusedLocals": true, 22 | "noUnusedParameters": true, 23 | "noFallthroughCasesInSwitch": true, 24 | "noUncheckedSideEffectImports": true 25 | }, 26 | "include": ["src"] 27 | } 28 | -------------------------------------------------------------------------------- /ui/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "files": [], 3 | "references": [ 4 | { "path": "./tsconfig.app.json" }, 5 | { "path": "./tsconfig.node.json" } 6 | ] 7 | } 8 | -------------------------------------------------------------------------------- /ui/tsconfig.node.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "tsBuildInfoFile": "./node_modules/.tmp/tsconfig.node.tsbuildinfo", 4 | "target": "ES2022", 5 | "lib": ["ES2023"], 6 | "module": "ESNext", 7 | "skipLibCheck": true, 8 | 9 | /* Bundler mode */ 10 | "moduleResolution": "Bundler", 11 | "allowImportingTsExtensions": true, 12 | "isolatedModules": true, 13 | "moduleDetection": "force", 14 | "noEmit": true, 15 | 16 | /* Linting */ 17 | "strict": true, 18 | "noUnusedLocals": true, 19 | "noUnusedParameters": true, 20 | "noFallthroughCasesInSwitch": true, 21 | "noUncheckedSideEffectImports": true 22 | }, 23 | "include": ["tests/jest.setup.ts"] 24 | } 25 | -------------------------------------------------------------------------------- /ui/webpack.config.js: -------------------------------------------------------------------------------- 1 | import path from "path"; 2 | 3 | import ForkTsCheckerWebpackPlugin from "fork-ts-checker-webpack-plugin"; 4 | import { fileURLToPath } from "node:url"; 5 | import { readdirSync, statSync } from "node:fs"; 6 | import { dirname, join, relative } from "node:path"; 7 | import { invariant } from "es-toolkit"; 8 | 9 | const __filename = fileURLToPath(import.meta.url); 10 | const __dirname = path.dirname(__filename); 11 | const DEBUG = process.env.NODE_ENV !== "production"; 12 | 13 | const proxyTargetUrl = "http://localhost:8000"; 14 | 15 | const jsAssetsRegex = /.+\/app\/.+\/js\/build\/custom(\/.+(js(.map)?))/; 16 | 17 | function isItStaticAsset(url) { 18 | return jsAssetsRegex.test(url); 19 | } 20 | 21 | const entryDir = join(__dirname, "src/ucc-ui-extensions"); 22 | 23 | /** 24 | * It looks for all index files in the given directory. 25 | * @param {string} dir 26 | * @return {string[]} 27 | */ 28 | function getAllIndexFiles(dir) { 29 | /** 30 | * @type {string[]} 31 | */ 32 | let results = []; 33 | const list = readdirSync(dir); 34 | list.forEach((file) => { 35 | const filePath = join(dir, file); 36 | const stat = statSync(filePath); 37 | if (stat && stat.isDirectory()) { 38 | results = results.concat(getAllIndexFiles(filePath)); 39 | } else if (file === "index.ts" || file === "index.tsx") { 40 | results.push(filePath); 41 | } 42 | }); 43 | return results; 44 | } 45 | 46 | const entryFiles = getAllIndexFiles(entryDir); 47 | 48 | invariant( 49 | entryFiles.length > 0, 50 | "No entry files found. Make sure the entryDir is correct and there are index files in some directory.", 51 | ); 52 | 53 | /** 54 | * @param {Record} acc - The accumulator object. 55 | * @param {string} file - The file path. 56 | * @returns {Record} The updated accumulator object. 57 | */ 58 | const entry = entryFiles.reduce((acc, file) => { 59 | const entryName = relative(entryDir, dirname(file)); 60 | acc[entryName] = file; 61 | return acc; 62 | }, {}); 63 | 64 | const TA_NAME = "Splunk_TA_Example"; 65 | const outputPath = path.resolve( 66 | __dirname, 67 | "../output", 68 | TA_NAME, 69 | "appserver/static/js/build/custom", 70 | ); 71 | 72 | export default { 73 | mode: DEBUG ? "development" : "production", 74 | entry: entry, 75 | output: { 76 | path: outputPath, 77 | filename: (pathData) => 78 | pathData.chunk.name in entry 79 | ? "[name].js" 80 | : DEBUG 81 | ? "[name].js?[hash]" 82 | : "[name].[contenthash].js", 83 | chunkFilename: DEBUG 84 | ? "[name].[id].js?[chunkhash]" 85 | : "[name].[id].[chunkhash].js", 86 | library: { 87 | type: "module", 88 | }, 89 | }, 90 | module: { 91 | rules: [ 92 | { 93 | test: /.tsx?$/, 94 | use: [ 95 | { 96 | loader: "babel-loader", 97 | }, 98 | ], 99 | exclude: /node_modules/, 100 | }, 101 | { 102 | test: /\.css$/, 103 | use: ["style-loader", "css-loader"], 104 | }, 105 | ], 106 | }, 107 | resolve: { 108 | extensions: [".tsx", ".ts", ".js"], 109 | }, 110 | plugins: [new ForkTsCheckerWebpackPlugin()], 111 | devtool: "source-map", 112 | devServer: { 113 | hot: false, 114 | proxy: [ 115 | { 116 | target: proxyTargetUrl, 117 | context(pathname) { 118 | return !isItStaticAsset(pathname); 119 | }, 120 | }, 121 | ], 122 | setupMiddlewares: (middlewares, devServer) => { 123 | devServer.app.use((req, res, next) => { 124 | if (isItStaticAsset(req.url)) { 125 | req.url = req.url.replace(jsAssetsRegex, "$1"); 126 | } 127 | next(); 128 | }); 129 | 130 | return middlewares; 131 | }, 132 | }, 133 | experiments: { 134 | outputModule: true, 135 | }, 136 | }; 137 | --------------------------------------------------------------------------------