├── .addonmatrix ├── .fossa.yml ├── .github ├── CODEOWNERS └── workflows │ ├── aggreements.yaml │ └── build-test-release.yml ├── .gitignore ├── .licenserc.yaml ├── .pre-commit-config.yaml ├── .releaserc ├── .semgrepignore ├── LICENSE ├── README.md ├── poetry.lock ├── pyproject.toml ├── renovate.json ├── scripts └── build-demo-addon.sh ├── splunktaucclib ├── __init__.py ├── alert_actions_base.py ├── cim_actions.py ├── common │ ├── __init__.py │ ├── log.py │ └── xml_dom_parser.py ├── config.py ├── global_config │ ├── __init__.py │ ├── configuration.py │ └── schema.py ├── legacy │ ├── __init__.py │ ├── credentials.py │ ├── rest.py │ └── util.py ├── modinput_wrapper │ ├── __init__.py │ └── base_modinput.py ├── rest_handler │ ├── __init__.py │ ├── admin_external.py │ ├── base_hook_mixin.py │ ├── credentials.py │ ├── eai.py │ ├── endpoint │ │ ├── __init__.py │ │ ├── converter.py │ │ ├── field.py │ │ └── validator.py │ ├── entity.py │ ├── error.py │ ├── error_ctl.py │ ├── handler.py │ ├── normaliser.py │ ├── schema.py │ └── util.py └── splunk_aoblib │ ├── __init__.py │ ├── rest_helper.py │ ├── rest_migration.py │ ├── setup_util.py │ └── utility.py └── tests ├── integration ├── demo │ ├── README.md │ ├── additional_packaging.py │ ├── globalConfig.json │ └── package │ │ ├── LICENSES │ │ └── Apache-2.0.txt │ │ ├── README.txt │ │ ├── README │ │ └── inputs.conf.spec │ │ ├── app.manifest │ │ ├── bin │ │ ├── demo.py │ │ ├── demo_rh_demo.py │ │ ├── demo_rh_settings.py │ │ └── demo_rh_test_reload_override.py │ │ └── default │ │ ├── app.conf │ │ └── inputs.conf └── test_rest_handler_handler.py └── unit ├── conftest.py ├── fake_module.py ├── test_admin_external.py └── test_rest_handler_error.py /.addonmatrix: -------------------------------------------------------------------------------- 1 | --splunkfeatures METRICS_MULTI,PYTHON3 -------------------------------------------------------------------------------- /.fossa.yml: -------------------------------------------------------------------------------- 1 | version: 3 2 | server: https://app.fossa.com 3 | 4 | project: 5 | id: "addonfactory-ucc-library" 6 | team: "TA-Automation" 7 | -------------------------------------------------------------------------------- /.github/CODEOWNERS: -------------------------------------------------------------------------------- 1 | * @splunk/ucc-be-developers 2 | -------------------------------------------------------------------------------- /.github/workflows/aggreements.yaml: -------------------------------------------------------------------------------- 1 | name: "CLA Assistant" 2 | on: 3 | issue_comment: 4 | types: [created] 5 | pull_request_target: 6 | types: [opened, closed, synchronize] 7 | 8 | jobs: 9 | call-workflow-agreements: 10 | uses: splunk/addonfactory-github-workflows/.github/workflows/reusable-agreements.yaml@v1.6 11 | permissions: 12 | actions: read 13 | contents: read 14 | pull-requests: write 15 | statuses: read 16 | secrets: 17 | GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} 18 | PERSONAL_ACCESS_TOKEN: ${{ secrets.PAT_CLATOOL }} 19 | -------------------------------------------------------------------------------- /.github/workflows/build-test-release.yml: -------------------------------------------------------------------------------- 1 | name: CI 2 | 3 | on: 4 | push: 5 | branches: 6 | - "main" 7 | - "develop" 8 | tags: 9 | - "v[0-9]+.[0-9]+.[0-9]+" 10 | pull_request: 11 | branches: [main, develop] 12 | jobs: 13 | meta: 14 | runs-on: ubuntu-latest 15 | outputs: 16 | matrix_supportedSplunk: ${{ steps.matrix.outputs.supportedSplunk }} 17 | steps: 18 | - uses: actions/checkout@v4 19 | - id: matrix 20 | uses: splunk/addonfactory-test-matrix-action@v2.1 21 | 22 | fossa-scan: 23 | continue-on-error: true 24 | runs-on: ubuntu-latest 25 | steps: 26 | - uses: actions/checkout@v4 27 | - name: run fossa anlyze and create report 28 | run: | 29 | curl -H 'Cache-Control: no-cache' https://raw.githubusercontent.com/fossas/fossa-cli/master/install-latest.sh | bash 30 | fossa analyze --debug 31 | fossa report attribution --format text > /tmp/THIRDPARTY 32 | env: 33 | FOSSA_API_KEY: ${{ secrets.FOSSA_API_KEY }} 34 | - name: upload THIRDPARTY file 35 | uses: actions/upload-artifact@v4 36 | with: 37 | name: THIRDPARTY 38 | path: /tmp/THIRDPARTY 39 | - name: run fossa test 40 | run: | 41 | fossa test --debug 42 | env: 43 | FOSSA_API_KEY: ${{ secrets.FOSSA_API_KEY }} 44 | 45 | compliance-copyrights: 46 | runs-on: ubuntu-latest 47 | steps: 48 | - uses: actions/checkout@v4 49 | - uses: apache/skywalking-eyes@v0.6.0 50 | 51 | pre-commit: 52 | runs-on: ubuntu-latest 53 | steps: 54 | - uses: actions/checkout@v4 55 | - uses: actions/setup-python@v5 56 | with: 57 | python-version: "3.12" 58 | - uses: pre-commit/action@v3.0.1 59 | 60 | semgrep: 61 | uses: splunk/sast-scanning/.github/workflows/sast-scan.yml@main 62 | secrets: 63 | SEMGREP_KEY: ${{ secrets.SEMGREP_PUBLISH_TOKEN }} 64 | 65 | build: 66 | runs-on: ubuntu-22.04 67 | needs: 68 | - fossa-scan 69 | - compliance-copyrights 70 | steps: 71 | - uses: actions/checkout@v4 72 | with: 73 | persist-credentials: false 74 | - uses: actions/setup-python@v5 75 | with: 76 | python-version: "3.7" 77 | - run: curl -sSL https://install.python-poetry.org | python3 - --version 1.5.1 78 | - name: Install Poetry 79 | run: | 80 | poetry build 81 | - uses: actions/upload-artifact@v4 82 | if: always() 83 | with: 84 | name: Package 85 | path: dist/ 86 | 87 | test-unit: 88 | name: test-unit ${{ matrix.python-version }} 89 | runs-on: ubuntu-22.04 90 | continue-on-error: true 91 | strategy: 92 | matrix: 93 | python-version: 94 | - "3.7" 95 | - "3.8" 96 | - "3.9" 97 | - "3.10" 98 | - "3.11" 99 | - "3.12" 100 | - "3.13" 101 | steps: 102 | - uses: actions/checkout@v4 103 | - uses: actions/setup-python@v5 104 | with: 105 | python-version: ${{ matrix.python-version }} 106 | - run: curl -sSL https://install.python-poetry.org | python3 - --version 1.5.1 107 | - name: run tests 108 | run: | 109 | poetry install 110 | poetry run pytest tests/unit 111 | 112 | integration-tests: 113 | runs-on: ubuntu-22.04 114 | needs: 115 | - meta 116 | - test-unit 117 | strategy: 118 | fail-fast: false 119 | matrix: 120 | splunk: ${{ fromJson(needs.meta.outputs.matrix_supportedSplunk) }} 121 | env: 122 | SPLUNK_USER: user 123 | SPLUNK_USER_PWD: Chang3d'!' 124 | SPLUNK_ADMIN: admin 125 | SPLUNK_ADMIN_PWD: Chang3d'!' 126 | steps: 127 | - uses: actions/checkout@v4 128 | - uses: actions/setup-python@v5 129 | with: 130 | python-version: 3.7 131 | - run: curl -sSL https://install.python-poetry.org | python3 - --version 1.5.1 132 | - run: ./scripts/build-demo-addon.sh 133 | - name: install Splunk 134 | run: | 135 | export SPLUNK_PRODUCT=splunk 136 | export SPLUNK_VERSION=${{ matrix.splunk.version }} 137 | export SPLUNK_BUILD=${{ matrix.splunk.build }} 138 | export SPLUNK_SLUG=$SPLUNK_VERSION-$SPLUNK_BUILD 139 | export SPLUNK_ARCH=amd64 140 | export SPLUNK_LINUX_FILENAME=splunk-${SPLUNK_VERSION}-${SPLUNK_BUILD}-linux-${SPLUNK_ARCH}.tgz 141 | 142 | # Before 9.4, the filename was splunk---Linux-x86_64.tgz 143 | if [[ $(echo $SPLUNK_VERSION | cut -d. -f1) -le 8 ]] || \ 144 | [[ $SPLUNK_VERSION == 9.0.* ]] || \ 145 | [[ $SPLUNK_VERSION == 9.1.* ]] || \ 146 | [[ $SPLUNK_VERSION == 9.2.* ]] || \ 147 | [[ $SPLUNK_VERSION == 9.3.* ]] 148 | then 149 | export SPLUNK_ARCH=x86_64 150 | export SPLUNK_LINUX_FILENAME=splunk-${SPLUNK_VERSION}-${SPLUNK_BUILD}-Linux-${SPLUNK_ARCH}.tgz 151 | fi 152 | 153 | export SPLUNK_BUILD_URL=https://download.splunk.com/products/${SPLUNK_PRODUCT}/releases/${SPLUNK_VERSION}/linux/${SPLUNK_LINUX_FILENAME} 154 | echo "$SPLUNK_BUILD_URL" 155 | export SPLUNK_HOME=/opt/splunk 156 | wget -qO /tmp/splunk.tgz "${SPLUNK_BUILD_URL}" 157 | sudo tar -C /opt -zxf /tmp/splunk.tgz 158 | sudo chown -R "$USER":"$USER" /opt/splunk 159 | echo -e "[user_info]\nUSERNAME=${SPLUNK_ADMIN}\nPASSWORD=${SPLUNK_ADMIN_PWD}" | sudo tee -a /opt/splunk/etc/system/local/user-seed.conf 160 | echo 'OPTIMISTIC_ABOUT_FILE_LOCKING=1' | sudo tee -a /opt/splunk/etc/splunk-launch.conf 161 | sudo /opt/splunk/bin/splunk start --accept-license 162 | sudo /opt/splunk/bin/splunk add user ${SPLUNK_USER} -password ${SPLUNK_USER_PWD} -role user -force-change-pass false -auth ${SPLUNK_ADMIN}:${SPLUNK_ADMIN_PWD} 163 | sudo /opt/splunk/bin/splunk install app demo-0.0.1.tar.gz -auth ${SPLUNK_ADMIN}:${SPLUNK_ADMIN_PWD} 164 | sudo /opt/splunk/bin/splunk restart 165 | - name: run tests 166 | run: | 167 | poetry run pytest tests/integration 168 | 169 | publish: 170 | needs: 171 | - pre-commit 172 | - build 173 | - test-unit 174 | - integration-tests 175 | runs-on: ubuntu-22.04 176 | steps: 177 | - uses: actions/checkout@v4 178 | with: 179 | # Very important: semantic-release won't trigger a tagged 180 | # build if this is not set false 181 | persist-credentials: false 182 | - uses: actions/setup-python@v5 183 | with: 184 | python-version: "3.7" 185 | - run: curl -sSL https://install.python-poetry.org | python3 - --version 1.5.1 186 | - name: Install and build 187 | run: | 188 | poetry install 189 | poetry build 190 | - id: semantic 191 | uses: splunk/semantic-release-action@v1.3 192 | with: 193 | git_committer_name: ${{ secrets.SA_GH_USER_NAME }} 194 | git_committer_email: ${{ secrets.SA_GH_USER_EMAIL }} 195 | gpg_private_key: ${{ secrets.SA_GPG_PRIVATE_KEY }} 196 | passphrase: ${{ secrets.SA_GPG_PASSPHRASE }} 197 | extra_plugins: | 198 | semantic-release-replace-plugin 199 | env: 200 | GITHUB_TOKEN: ${{ secrets.GH_TOKEN_ADMIN }} 201 | - if: ${{ steps.semantic.outputs.new_release_published == 'true' }} 202 | run: | 203 | poetry build 204 | poetry publish -n -u ${{ secrets.PYPI_USERNAME }} -p ${{ secrets.PYPI_TOKEN }} 205 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | pip-wheel-metadata/ 24 | share/python-wheels/ 25 | *.egg-info/ 26 | .installed.cfg 27 | *.egg 28 | MANIFEST 29 | 30 | # Installer logs 31 | pip-log.txt 32 | pip-delete-this-directory.txt 33 | 34 | # Unit test / coverage reports 35 | htmlcov/ 36 | .tox/ 37 | .nox/ 38 | .coverage 39 | .coverage.* 40 | .cache 41 | nosetests.xml 42 | coverage.xml 43 | *.cover 44 | *.py,cover 45 | .hypothesis/ 46 | .pytest_cache/ 47 | 48 | # Translations 49 | *.mo 50 | *.pot 51 | 52 | # Django stuff: 53 | *.log 54 | local_settings.py 55 | db.sqlite3 56 | db.sqlite3-journal 57 | 58 | # Flask stuff: 59 | instance/ 60 | .webassets-cache 61 | 62 | # Scrapy stuff: 63 | .scrapy 64 | 65 | # Sphinx documentation 66 | docs/_build/ 67 | 68 | # PyBuilder 69 | target/ 70 | 71 | # Jupyter Notebook 72 | .ipynb_checkpoints 73 | 74 | # IPython 75 | profile_default/ 76 | ipython_config.py 77 | 78 | # pyenv 79 | .python-version 80 | 81 | # pipenv 82 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 83 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 84 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 85 | # install all needed dependencies. 86 | #Pipfile.lock 87 | 88 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow 89 | __pypackages__/ 90 | 91 | # Celery stuff 92 | celerybeat-schedule 93 | celerybeat.pid 94 | 95 | # SageMath parsed files 96 | *.sage.py 97 | 98 | # Environments 99 | .env 100 | .venv 101 | env/ 102 | venv/ 103 | ENV/ 104 | env.bak/ 105 | venv.bak/ 106 | 107 | # Spyder project settings 108 | .spyderproject 109 | .spyproject 110 | 111 | # Rope project settings 112 | .ropeproject 113 | 114 | # mkdocs documentation 115 | /site 116 | 117 | # mypy 118 | .mypy_cache/ 119 | .dmypy.json 120 | dmypy.json 121 | 122 | # Pyre type checker 123 | .pyre/ 124 | 125 | # PyCharm 126 | .idea/ 127 | output 128 | demo-0.0.1.tar.gz -------------------------------------------------------------------------------- /.licenserc.yaml: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2025 Splunk Inc. 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | # 16 | header: 17 | license: 18 | spdx-id: Apache-2.0 19 | copyright-owner: Splunk Inc. 20 | 21 | paths-ignore: 22 | - ".github/" 23 | - ".vscode/" 24 | - ".reuse/" 25 | - "dist" 26 | - "**/*.md" 27 | - "LICENSE" 28 | - "NOTICE" 29 | - "*.lock" 30 | - "tests/**" 31 | - ".*" 32 | - "renovate.json" 33 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | repos: 2 | - repo: https://github.com/asottile/pyupgrade 3 | rev: v2.34.0 4 | hooks: 5 | - id: pyupgrade 6 | args: [--py37-plus] 7 | - repo: https://github.com/psf/black 8 | rev: 22.6.0 9 | hooks: 10 | - id: black 11 | -------------------------------------------------------------------------------- /.releaserc: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2025 Splunk Inc. 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | # 16 | { 17 | branches: 18 | [ 19 | "+([0-9])?(.{+([0-9]),x}).x", 20 | "main", 21 | { name: "develop", prerelease: "beta", channel: "beta" }, 22 | ], 23 | plugins: 24 | [ 25 | "@semantic-release/commit-analyzer", 26 | [ 27 | "semantic-release-replace-plugin", 28 | { 29 | "replacements": [ 30 | { 31 | "files": ["splunktaucclib/__init__.py"], 32 | "from": "__version__ ?=.*", 33 | "to": "__version__ = \"${nextRelease.version}\"", 34 | "results": [ 35 | { 36 | "file": "splunktaucclib/__init__.py", 37 | "hasChanged": true, 38 | "numMatches": 1, 39 | "numReplacements": 1 40 | } 41 | ], 42 | "countMatches": true 43 | }, 44 | { 45 | "files": ["pyproject.toml"], 46 | "from": "version ?=.*", 47 | "to": "version = \"${nextRelease.version}\"", 48 | "results": [ 49 | { 50 | "file": "pyproject.toml", 51 | "hasChanged": true, 52 | "numMatches": 1, 53 | "numReplacements": 1 54 | } 55 | ], 56 | "countMatches": true 57 | } 58 | ] 59 | } 60 | ], 61 | "@semantic-release/release-notes-generator", 62 | [ 63 | "@semantic-release/exec", 64 | { 65 | "verifyReleaseCmd": "echo \"version=${nextRelease.version}\" >> $GITHUB_OUTPUT", 66 | "successCmd": "echo \"new_release_published=${'true'}\" >> $GITHUB_OUTPUT" 67 | }, 68 | ], 69 | [ 70 | "@semantic-release/git", 71 | { 72 | "assets": ["NOTICE", "pyproject.toml", "splunktaucclib/__init__.py"], 73 | "message": "chore(release): ${nextRelease.version}\n\n${nextRelease.notes}", 74 | }, 75 | ], 76 | ["@semantic-release/github", { "assets": ["NOTICE", "pyproject.toml"] }], 77 | ], 78 | } 79 | -------------------------------------------------------------------------------- /.semgrepignore: -------------------------------------------------------------------------------- 1 | ## Default semgrep ignore 2 | # Ignore git items 3 | .gitignore 4 | .git/ 5 | :include .gitignore 6 | 7 | # Common large directories 8 | node_modules/ 9 | build/ 10 | dist/ 11 | vendor/ 12 | env/ 13 | .env/ 14 | venv/ 15 | .venv/ 16 | *.min.js 17 | 18 | # Common test directories 19 | test/ 20 | tests/ 21 | 22 | # Semgrep rules folder 23 | .semgrep 24 | 25 | ## Additional files to be ignored 26 | .circleci/ 27 | .github/ 28 | .reuse/ 29 | .vscode/ 30 | .idea/ -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright 2021 Splunk Inc. 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # README 2 | 3 | Splunk TA UCC Library Python (splunktaucclib) is an open source helper library used by Splunk Add-ons. 4 | This library is used by Splunk Add-on builder, and Splunk UCC based add-ons and is intended for use by partner 5 | developers. This SDK/Library extends the Splunk SDK for python. 6 | 7 | ## Communication channels 8 | 9 | If you are a Splunker use: https://splunk.slack.com/archives/C03T8QCHBTJ 10 | 11 | If you are a part of the community use: https://splunk-usergroups.slack.com/archives/C03SG3ZL4S1 12 | 13 | ## Support 14 | 15 | Splunk TA UCC Library is an open source product developed by Splunkers. This library is not "Supported Software" by Splunk, Inc. issues and defects can be reported via the public issue tracker. 16 | 17 | ## License 18 | 19 | * Configuration and documentation licensed subject to [APACHE-2.0](LICENSE) 20 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2025 Splunk Inc. 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | # 16 | 17 | [tool.poetry] 18 | name = "splunktaucclib" 19 | version = "8.0.0" 20 | description = "" 21 | authors = ["Splunk "] 22 | license = "APACHE-2.0" 23 | 24 | [tool.poetry.dependencies] 25 | python = ">=3.7,<3.14" 26 | defusedxml = ">=0.7" 27 | urllib3 = "<2" 28 | PySocks = "^1.7.1" 29 | splunk-sdk = ">=2.0.2" 30 | solnlib = ">=5" 31 | 32 | [tool.poetry.group.dev.dependencies] 33 | pytest = ">=7" 34 | splunk-add-on-ucc-framework = ">=5.53.0" 35 | requests = "^2.31.0" 36 | 37 | [build-system] 38 | requires = ["poetry>=1.0.0"] 39 | build-backend = "poetry.masonry.api" 40 | -------------------------------------------------------------------------------- /renovate.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": [ 3 | "config:base", 4 | "group:all", 5 | ":semanticCommitTypeAll(chore)", 6 | ":disableDependencyDashboard" 7 | ], 8 | "lockFileMaintenance": { 9 | "enabled": true, 10 | "extends": [ 11 | "schedule:weekends" 12 | ] 13 | }, 14 | "schedule": [ 15 | "every 2 weeks on Sunday" 16 | ], 17 | "packageRules": [ 18 | { 19 | "matchPackageNames": ["urllib3"], 20 | "allowedVersions": "<2.0.0" 21 | } 22 | ] 23 | } 24 | -------------------------------------------------------------------------------- /scripts/build-demo-addon.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # Copyright 2025 Splunk Inc. 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | poetry install 16 | poetry build 17 | poetry run ucc-gen build \ 18 | --source=tests/integration/demo/package \ 19 | --config=tests/integration/demo/globalConfig.json \ 20 | --ta-version=0.0.1 21 | poetry run pip install dist/*.whl --target output/demo/lib 22 | poetry run ucc-gen package --path output/demo 23 | -------------------------------------------------------------------------------- /splunktaucclib/__init__.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2025 Splunk Inc. 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | # 16 | 17 | __version__ = "8.0.0" 18 | -------------------------------------------------------------------------------- /splunktaucclib/alert_actions_base.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2025 Splunk Inc. 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | # 16 | 17 | import csv 18 | import gzip 19 | import logging 20 | import sys 21 | 22 | from solnlib import log 23 | 24 | from splunktaucclib.cim_actions import ModularAction 25 | from splunktaucclib.rest_handler import util 26 | from splunktaucclib.splunk_aoblib.rest_helper import TARestHelper 27 | from splunktaucclib.splunk_aoblib.setup_util import Setup_Util 28 | 29 | try: 30 | from splunk.clilib.bundle_paths import make_splunkhome_path 31 | except ImportError: 32 | from splunk.appserver.mrsparkle.lib.util import make_splunkhome_path 33 | 34 | sys.path.insert(0, make_splunkhome_path(["etc", "apps", "Splunk_SA_CIM", "lib"])) 35 | 36 | 37 | class ModularAlertBase(ModularAction): 38 | def __init__(self, ta_name, alert_name): 39 | self._alert_name = alert_name 40 | self._logger_name = alert_name + "_modalert" 41 | self._logger = log.Logs().get_logger(self._logger_name) 42 | super().__init__(sys.stdin.read(), self._logger, alert_name) 43 | self.setup_util_module = None 44 | self.setup_util = None 45 | self.result_handle = None 46 | self.ta_name = ta_name 47 | self.splunk_uri = self.settings.get("server_uri") 48 | self.setup_util = Setup_Util(self.splunk_uri, self.session_key, self._logger) 49 | 50 | self.rest_helper = TARestHelper(self._logger) 51 | 52 | def log_error(self, msg): 53 | self.message(msg, "failure", level=logging.ERROR) 54 | 55 | def log_info(self, msg): 56 | self.message(msg, "success", level=logging.INFO) 57 | 58 | def log_debug(self, msg): 59 | self.message(msg, None, level=logging.DEBUG) 60 | 61 | def log_warn(self, msg): 62 | self.message(msg, None, level=logging.WARN) 63 | 64 | def set_log_level(self, level): 65 | self._logger.setLevel(level) 66 | 67 | def get_param(self, param_name): 68 | return self.configuration.get(param_name) 69 | 70 | def get_global_setting(self, var_name): 71 | return self.setup_util.get_customized_setting(var_name) 72 | 73 | def get_user_credential(self, username): 74 | """ 75 | if the username exists, return 76 | { 77 | "username": username, 78 | "password": credential 79 | } 80 | """ 81 | return self.setup_util.get_credential_by_username(username) 82 | 83 | def get_user_credential_by_account_id(self, account_id): 84 | """ 85 | if the account_id exists, return 86 | { 87 | "username": username, 88 | "password": credential 89 | } 90 | """ 91 | return self.setup_util.get_credential_by_id(account_id) 92 | 93 | @property 94 | def log_level(self): 95 | return self.get_log_level() 96 | 97 | @property 98 | def proxy(self): 99 | return self.get_proxy() 100 | 101 | def get_log_level(self): 102 | return self.setup_util.get_log_level() 103 | 104 | def get_proxy(self): 105 | """if the proxy setting is set. return a dict like 106 | { 107 | proxy_url: ... , 108 | proxy_port: ... , 109 | proxy_username: ... , 110 | proxy_password: ... , 111 | proxy_type: ... , 112 | proxy_rdns: ... 113 | } 114 | """ 115 | return self.setup_util.get_proxy_settings() 116 | 117 | def _get_proxy_uri(self): 118 | proxy = self.get_proxy() 119 | return util.get_proxy_uri(proxy) 120 | 121 | def send_http_request( 122 | self, 123 | url, 124 | method, 125 | parameters=None, 126 | payload=None, 127 | headers=None, 128 | cookies=None, 129 | verify=True, 130 | cert=None, 131 | timeout=None, 132 | use_proxy=True, 133 | ): 134 | return self.rest_helper.send_http_request( 135 | url=url, 136 | method=method, 137 | parameters=parameters, 138 | payload=payload, 139 | headers=headers, 140 | cookies=cookies, 141 | verify=verify, 142 | cert=cert, 143 | timeout=timeout, 144 | proxy_uri=self._get_proxy_uri() if use_proxy else None, 145 | ) 146 | 147 | def build_http_connection(self, config, timeout=120, disable_ssl_validation=False): 148 | raise NotImplementedError( 149 | "Replace the usage of this function to send_http_request function of same class " 150 | "or use requests.request method" 151 | ) 152 | 153 | def process_event(self, *args, **kwargs): 154 | raise NotImplemented() 155 | 156 | def pre_handle(self, num, result): 157 | result.setdefault("rid", str(num)) 158 | self.update(result) 159 | return result 160 | 161 | def get_events(self): 162 | try: 163 | self.result_handle = gzip.open(self.results_file, "rt") 164 | return ( 165 | self.pre_handle(num, result) 166 | for num, result in enumerate(csv.DictReader(self.result_handle)) 167 | ) 168 | except OSError: 169 | msg = "Error: {}." 170 | self.log_error(msg.format("No search result. Cannot send alert action.")) 171 | sys.exit(2) 172 | 173 | def prepare_meta_for_cam(self): 174 | with gzip.open(self.results_file, "rt") as rf: 175 | for num, result in enumerate(csv.DictReader(rf)): 176 | result.setdefault("rid", str(num)) 177 | self.update(result) 178 | self.invoke() 179 | break 180 | 181 | def run(self, argv): 182 | status = 0 183 | if len(argv) < 2 or argv[1] != "--execute": 184 | msg = f'Error: argv="{argv}", expected="--execute"' 185 | print(msg, file=sys.stderr) 186 | sys.exit(1) 187 | 188 | # prepare meta first for permission lack error handling: TAB-2455 189 | self.prepare_meta_for_cam() 190 | try: 191 | level = self.get_log_level() 192 | if level: 193 | self._logger.setLevel(level) 194 | except Exception as e: 195 | if str(e) and "403" in str(e): # Handled e.message with str(e) 196 | self.log_error("User does not have permissions") 197 | else: 198 | self.log_error("Unable to set log level") 199 | sys.exit(2) 200 | 201 | try: 202 | status = self.process_event() 203 | except OSError: 204 | msg = "Error: {}." 205 | self.log_error(msg.format("No search result. Cannot send alert action.")) 206 | sys.exit(2) 207 | except Exception as e: 208 | msg = "Unexpected error: {}." 209 | if str(e): # e.message handled 210 | self.log_error(msg.format(str(e))) # e.message handled 211 | else: 212 | import traceback 213 | 214 | self.log_error(msg.format(traceback.format_exc())) 215 | sys.exit(2) 216 | finally: 217 | if self.result_handle: 218 | self.result_handle.close() 219 | 220 | return status 221 | -------------------------------------------------------------------------------- /splunktaucclib/common/__init__.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2025 Splunk Inc. 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | # 16 | 17 | import hashlib 18 | import json 19 | 20 | 21 | def load_schema_file(schema_file): 22 | """ 23 | Load schema file. 24 | """ 25 | 26 | with open(schema_file) as f: 27 | ret = json.load(f) 28 | 29 | common = ret.get("_common_", dict()) 30 | if common: 31 | for k, v in list(ret.items()): 32 | if k == "_common_" or not isinstance(v, dict): 33 | continue 34 | # merge common into other values 35 | for _k, _v in list(common.items()): 36 | if _k not in v: 37 | v[_k] = _v 38 | ret[k] = v 39 | 40 | return ret 41 | 42 | 43 | def md5_of_dict(data): 44 | """ 45 | MD5 of dict data. 46 | """ 47 | 48 | md5 = hashlib.sha256() 49 | if isinstance(data, dict): 50 | for key in sorted(data.keys()): 51 | md5.update(repr(key)) 52 | md5.update(md5_of_dict(data[key])) 53 | elif isinstance(data, list): 54 | for item in sorted(data): 55 | md5.update(md5_of_dict(item)) 56 | else: 57 | md5.update(repr(data)) 58 | 59 | return md5.hexdigest() 60 | 61 | 62 | class UCCException(Exception): 63 | """ 64 | Dispatch engine exception. 65 | """ 66 | 67 | pass 68 | -------------------------------------------------------------------------------- /splunktaucclib/common/log.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2025 Splunk Inc. 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | # 16 | 17 | import logging 18 | 19 | from solnlib import log 20 | 21 | _level_by_name = { 22 | "DEBUG": logging.DEBUG, 23 | "INFO": logging.INFO, 24 | "WARNING": logging.WARNING, 25 | "ERROR": logging.ERROR, 26 | "FATAL": logging.FATAL, 27 | "CRITICAL": logging.CRITICAL, 28 | } 29 | 30 | 31 | def _get_log_level(log_level, default_level=logging.INFO): 32 | if not log_level: 33 | return default_level 34 | if isinstance(log_level, str): 35 | log_level = log_level.upper() 36 | for k, v in _level_by_name.items(): 37 | if k.startswith(log_level): 38 | return v 39 | if isinstance(log_level, int): 40 | if log_level in list(_level_by_name.values()): 41 | return log_level 42 | return default_level 43 | 44 | 45 | def set_log_level(log_level): 46 | """ 47 | Set log level. 48 | """ 49 | log.Logs().set_level(_get_log_level(log_level)) 50 | 51 | 52 | # Global logger 53 | logger = log.Logs().get_logger("ucc_lib") 54 | -------------------------------------------------------------------------------- /splunktaucclib/common/xml_dom_parser.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2025 Splunk Inc. 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | # 16 | 17 | import re 18 | 19 | from defusedxml import ElementTree as et 20 | 21 | 22 | def parse_conf_xml_dom(xml_content): 23 | """ 24 | @xml_content: XML DOM from splunkd 25 | """ 26 | m = re.search(r'xmlns="([^"]+)"', xml_content) 27 | ns = m.group(1) 28 | m = re.search(r'xmlns:s="([^"]+)"', xml_content) 29 | sub_ns = m.group(1) 30 | entry_path = "./{%s}entry" % ns 31 | stanza_path = "./{%s}title" % ns 32 | key_path = f"./{{{ns}}}content/{{{sub_ns}}}dict/{{{sub_ns}}}key" 33 | meta_path = f"./{{{sub_ns}}}dict/{{{sub_ns}}}key" 34 | list_path = f"./{{{sub_ns}}}list/{{{sub_ns}}}item" 35 | 36 | xml_conf = et.fromstring(xml_content) 37 | stanza_objs = [] 38 | for entry in xml_conf.iterfind(entry_path): 39 | for stanza in entry.iterfind(stanza_path): 40 | stanza_obj = {"name": stanza.text, "stanza": stanza.text} 41 | break 42 | else: 43 | continue 44 | 45 | for key in entry.iterfind(key_path): 46 | if key.get("name") == "eai:acl": 47 | meta = {} 48 | for k in key.iterfind(meta_path): 49 | meta[k.get("name")] = k.text 50 | stanza_obj[key.get("name")] = meta 51 | elif key.get("name") != "eai:attributes": 52 | name = key.get("name") 53 | if name.startswith("eai:"): 54 | name = name[4:] 55 | list_vals = [k.text for k in key.iterfind(list_path)] 56 | if list_vals: 57 | stanza_obj[name] = list_vals 58 | else: 59 | stanza_obj[name] = key.text 60 | if key.text == "None": 61 | stanza_obj[name] = None 62 | stanza_objs.append(stanza_obj) 63 | return stanza_objs 64 | -------------------------------------------------------------------------------- /splunktaucclib/global_config/__init__.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2025 Splunk Inc. 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | # 16 | 17 | """ 18 | Global Config Module 19 | """ 20 | 21 | 22 | import urllib.parse 23 | 24 | from solnlib.splunk_rest_client import SplunkRestClient 25 | 26 | from .configuration import Configs, Configuration, GlobalConfigError, Inputs, Settings 27 | from .schema import GlobalConfigSchema 28 | 29 | __all__ = [ 30 | "GlobalConfigError", 31 | "GlobalConfigSchema", 32 | "GlobalConfig", 33 | "Inputs", 34 | "Configs", 35 | "Settings", 36 | ] 37 | 38 | 39 | class GlobalConfig: 40 | def __init__(self, splunkd_uri, session_key, schema): 41 | """ 42 | Global Config. 43 | 44 | :param splunkd_uri: 45 | :param session_key: 46 | :param schema: 47 | :type schema: GlobalConfigSchema 48 | """ 49 | self._splunkd_uri = splunkd_uri 50 | self._session_key = session_key 51 | self._schema = schema 52 | 53 | splunkd_info = urllib.parse.urlparse(self._splunkd_uri) 54 | self._client = SplunkRestClient( 55 | self._session_key, 56 | self._schema.product, 57 | scheme=splunkd_info.scheme, 58 | host=splunkd_info.hostname, 59 | port=splunkd_info.port, 60 | ) 61 | self._configuration = Configuration(self._client, self._schema) 62 | self._inputs = Inputs(self._client, self._schema) 63 | self._configs = Configs(self._client, self._schema) 64 | self._settings = Settings(self._client, self._schema) 65 | 66 | @property 67 | def inputs(self): 68 | return self._inputs 69 | 70 | @property 71 | def configs(self): 72 | return self._configs 73 | 74 | @property 75 | def settings(self): 76 | return self._settings 77 | 78 | # add support for batch save of configuration payload 79 | def save(self, payload): 80 | return self._configuration.save(payload) 81 | -------------------------------------------------------------------------------- /splunktaucclib/global_config/configuration.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2025 Splunk Inc. 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | # 16 | 17 | 18 | import copy 19 | import json 20 | from multiprocessing.pool import ThreadPool 21 | 22 | from splunklib.binding import HTTPError 23 | 24 | from ..rest_handler.handler import RestHandler 25 | from ..rest_handler.schema import RestSchema 26 | 27 | __all__ = [ 28 | "GlobalConfigError", 29 | "Configuration", 30 | "Inputs", 31 | "Configs", 32 | "Settings", 33 | ] 34 | 35 | 36 | class GlobalConfigError(Exception): 37 | pass 38 | 39 | 40 | class Configuration: 41 | """ 42 | Splunk Configuration Handler. 43 | """ 44 | 45 | FILTERS = ["eai:appName", "eai:acl", "eai:userName"] 46 | ENTITY_NAME = "name" 47 | SETTINGS = "settings" 48 | NOT_FOUND = "[404]: Not Found" 49 | 50 | def __init__(self, splunkd_client, schema): 51 | """ 52 | 53 | :param splunkd_client: SplunkRestClient 54 | :param schema: 55 | """ 56 | self._client = splunkd_client 57 | self._schema = schema 58 | 59 | def load(self, *args, **kwargs): 60 | """ 61 | Load all stored configuration for given schema. 62 | 63 | :param args: 64 | :param kwargs: 65 | :return: 66 | """ 67 | raise NotImplementedError() 68 | 69 | def save_stanza(self, item): 70 | """ 71 | Save configuration with type_name and configuration 72 | 73 | :param item: 74 | :return: error while save the configuration 75 | """ 76 | return self._save_configuration(item[0], item[1]) 77 | 78 | def save(self, payload): 79 | """ 80 | Save configuration. Return error while saving. 81 | It includes creating and updating. That is, it will try to 82 | update first, then create if NOT FOUND error occurs. 83 | 84 | :param payload: same format with return of ``load``. 85 | :return: 86 | 87 | Usage:: 88 | >>> from splunktaucclib.global_config import GlobalConfig 89 | >>> global_config = GlobalConfig() 90 | >>> payload = { 91 | >>> 'settings': [ 92 | >>> { 93 | >>> 'name': 'proxy', 94 | >>> 'proxy_host': '1.2.3.4', 95 | >>> 'proxy_port': '5678', 96 | >>> }, 97 | >>> { 98 | >>> 'name': 'logging', 99 | >>> 'level': 'DEBUG', 100 | >>> } 101 | >>> ] 102 | >>> } 103 | >>> global_config.settings.save(payload) 104 | """ 105 | # expand the payload to task_list 106 | task_list = [] 107 | for type_name, configurations in payload.items(): 108 | task_list.extend( 109 | [(type_name, configuration) for configuration in configurations] 110 | ) 111 | task_len = len(task_list) 112 | # return empty error list if task list is empty 113 | if not task_list: 114 | return [] 115 | task_len = min(8, task_len) 116 | pool = ThreadPool(processes=task_len) 117 | errors = pool.map(self.save_stanza, task_list) 118 | pool.close() 119 | pool.join() 120 | return errors 121 | 122 | @property 123 | def internal_schema(self): 124 | """ 125 | Get the schema for inputs, configs and settings 126 | 127 | :return: 128 | """ 129 | return self._schema.inputs + self._schema.configs + self._schema.settings 130 | 131 | def _save_configuration(self, type_name, configuration): 132 | schema = self._search_configuration_schema( 133 | type_name, 134 | configuration[self.ENTITY_NAME], 135 | ) 136 | configuration = copy.copy(configuration) 137 | self._dump_multiple_select(configuration, schema) 138 | 139 | # update 140 | try: 141 | self._update(type_name, copy.copy(configuration)) 142 | except HTTPError as exc: 143 | if self.NOT_FOUND in str(exc): 144 | # not exists, go to create 145 | pass 146 | else: 147 | return exc 148 | except Exception as exc: 149 | return exc 150 | else: 151 | return None 152 | 153 | # create 154 | try: 155 | self._create(type_name, configuration) 156 | except Exception as exc: 157 | return exc 158 | else: 159 | return None 160 | 161 | def _create(self, type_name, configuration): 162 | self._save_endpoint( 163 | type_name, 164 | configuration, 165 | ) 166 | 167 | def _update(self, type_name, configuration): 168 | name = configuration[self.ENTITY_NAME] 169 | del configuration[self.ENTITY_NAME] 170 | self._save_endpoint( 171 | type_name, 172 | configuration, 173 | name=name, 174 | ) 175 | 176 | @classmethod 177 | def _filter_fields(cls, entity): 178 | for (k, v) in list(entity.items()): 179 | if k in cls.FILTERS: 180 | del entity[k] 181 | 182 | def _load_endpoint(self, name, schema): 183 | query = { 184 | "output_mode": "json", 185 | "count": "0", 186 | "--cred--": "1", 187 | } 188 | response = self._client.get( 189 | RestHandler.path_segment(self._endpoint_path(name)), **query 190 | ) 191 | body = response.body.read() 192 | cont = json.loads(body) 193 | 194 | entities = [] 195 | for entry in cont["entry"]: 196 | entity = entry["content"] 197 | entity[self.ENTITY_NAME] = entry["name"] 198 | self._load_multiple_select(entity, schema) 199 | entities.append(entity) 200 | return entities 201 | 202 | def _save_endpoint(self, endpoint, content, name=None): 203 | endpoint = self._endpoint_path(endpoint) 204 | self._client.post(RestHandler.path_segment(endpoint, name=name), **content) 205 | 206 | @classmethod 207 | def _load_multiple_select(cls, entity, schema): 208 | for field in schema: 209 | field_type = field.get("type") 210 | value = entity.get(field["field"]) 211 | if field_type != "multipleSelect" or not value: 212 | continue 213 | delimiter = field["options"]["delimiter"] 214 | entity[field["field"]] = value.split(delimiter) 215 | 216 | @classmethod 217 | def _dump_multiple_select(cls, entity, schema): 218 | for field in schema: 219 | field_type = field.get("type") 220 | value = entity.get(field["field"]) 221 | if field_type != "multipleSelect" or not value: 222 | continue 223 | if not isinstance(value, list): 224 | continue 225 | delimiter = field["options"]["delimiter"] 226 | entity[field["field"]] = delimiter.join(value) 227 | 228 | def _endpoint_path(self, name): 229 | return "{admin_match}/{endpoint_name}".format( 230 | admin_match=self._schema.admin_match, 231 | endpoint_name=RestSchema.endpoint_name(name, self._schema.namespace), 232 | ) 233 | 234 | def _search_configuration_schema(self, type_name, configuration_name): 235 | for item in self.internal_schema: 236 | # add support for settings schema 237 | if item["name"] == type_name or ( 238 | type_name == self.SETTINGS and item["name"] == configuration_name 239 | ): 240 | return item["entity"] 241 | else: 242 | raise GlobalConfigError( 243 | "Schema Not Found for Configuration, " 244 | "configuration_type={configuration_type}, " 245 | "configuration_name={configuration_name}".format( 246 | configuration_type=type_name, 247 | configuration_name=configuration_name, 248 | ), 249 | ) 250 | 251 | 252 | class Inputs(Configuration): 253 | def __init__(self, splunkd_client, schema): 254 | super().__init__(splunkd_client, schema) 255 | self._splunkd_client = splunkd_client 256 | self._schema = schema 257 | self._references = None 258 | 259 | def load(self, input_type=None): 260 | """ 261 | 262 | :param input_type: 263 | :return: 264 | 265 | Usage:: 266 | >>> from splunktaucclib.global_config import GlobalConfig 267 | >>> global_config = GlobalConfig() 268 | >>> inputs = global_config.inputs.load() 269 | """ 270 | # move configs read operation out of init method 271 | if not self._references: 272 | self._references = Configs(self._splunkd_client, self._schema).load() 273 | inputs = {} 274 | for input_item in self.internal_schema: 275 | if input_type is None or input_item["name"] == input_type: 276 | input_entities = self._load_endpoint( 277 | input_item["name"], input_item["entity"] 278 | ) 279 | # filter unused fields in response 280 | for input_entity in input_entities: 281 | self._filter_fields(input_entity) 282 | # expand referenced entity 283 | self._reference( 284 | input_entities, 285 | input_item, 286 | self._references, 287 | ) 288 | inputs[input_item["name"]] = input_entities 289 | return inputs 290 | 291 | @property 292 | def internal_schema(self): 293 | return self._schema.inputs 294 | 295 | @classmethod 296 | def _reference(cls, input_entities, input_item, configs): 297 | for input_entity in input_entities: 298 | cls._input_reference( 299 | input_item["name"], input_entity, input_item["entity"], configs 300 | ) 301 | 302 | @classmethod 303 | def _input_reference(cls, input_type, input_entity, input_schema, configs): 304 | for field in input_schema: 305 | options = field.get("options", {}) 306 | config_type = options.get("referenceName") 307 | config_name = input_entity.get(field["field"]) 308 | if not config_type or not config_name: 309 | continue 310 | 311 | for config in configs.get(config_type, []): 312 | if config["name"] == config_name: 313 | input_entity[field["field"]] = config 314 | break 315 | else: 316 | raise GlobalConfigError( 317 | "Config Not Found for Input, " 318 | "input_type={input_type}, " 319 | "input_name={input_name}, " 320 | "config_type={config_type}, " 321 | "config_name={config_name}".format( 322 | input_type=input_type, 323 | input_name=input_entity["name"], 324 | config_type=config_type, 325 | config_name=config_name, 326 | ) 327 | ) 328 | 329 | 330 | class Configs(Configuration): 331 | def load(self, config_type=None): 332 | """ 333 | 334 | :param config_type: 335 | :return: 336 | 337 | Usage:: 338 | >>> from splunktaucclib.global_config import GlobalConfig 339 | >>> global_config = GlobalConfig() 340 | >>> configs = global_config.configs.load() 341 | """ 342 | configs = {} 343 | for config in self.internal_schema: 344 | if config_type is None or config["name"] == config_type: 345 | config_entities = self._load_endpoint(config["name"], config["entity"]) 346 | for config_entity in config_entities: 347 | self._filter_fields(config_entity) 348 | configs[config["name"]] = config_entities 349 | return configs 350 | 351 | @property 352 | def internal_schema(self): 353 | return self._schema.configs 354 | 355 | 356 | class Settings(Configuration): 357 | 358 | TYPE_NAME = "settings" 359 | 360 | def load(self): 361 | """ 362 | 363 | :return: 364 | 365 | Usage:: 366 | >>> from splunktaucclib.global_config import GlobalConfig 367 | >>> global_config = GlobalConfig() 368 | >>> settings = global_config.settings.load() 369 | """ 370 | settings = [] 371 | for setting in self.internal_schema: 372 | setting_entity = self._load_endpoint( 373 | "settings/%s" % setting["name"], setting["entity"] 374 | ) 375 | self._load_multiple_select(setting_entity[0], setting["entity"]) 376 | entity = setting_entity[0] 377 | self._filter_fields(entity) 378 | settings.append(entity) 379 | return {Settings.TYPE_NAME: settings} 380 | 381 | @property 382 | def internal_schema(self): 383 | return self._schema.settings 384 | 385 | def _search_configuration_schema(self, type_name, configuration_name): 386 | return super()._search_configuration_schema( 387 | configuration_name, 388 | configuration_name, 389 | ) 390 | -------------------------------------------------------------------------------- /splunktaucclib/global_config/schema.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2025 Splunk Inc. 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | # 16 | 17 | 18 | import traceback 19 | 20 | from ..rest_handler.schema import RestSchema, RestSchemaError 21 | 22 | 23 | class GlobalConfigSchema(RestSchema): 24 | def __init__(self, content, *args, **kwargs): 25 | """ 26 | 27 | :param content: Python object for Global Config Schema 28 | :param args: 29 | :param kwargs: 30 | """ 31 | super().__init__(*args, **kwargs) 32 | self._content = content 33 | self._inputs = [] 34 | self._configs = [] 35 | self._settings = [] 36 | 37 | try: 38 | self._parse() 39 | except Exception: 40 | raise RestSchemaError( 41 | "Invalid Global Config Schema: %s" % traceback.format_exc(), 42 | ) 43 | 44 | @property 45 | def product(self): 46 | return self._meta["name"] 47 | 48 | @property 49 | def namespace(self): 50 | return self._meta["restRoot"] 51 | 52 | @property 53 | def admin_match(self): 54 | return "" 55 | 56 | @property 57 | def inputs(self): 58 | return self._inputs 59 | 60 | @property 61 | def configs(self): 62 | return self._configs 63 | 64 | @property 65 | def settings(self): 66 | return self._settings 67 | 68 | def _parse(self): 69 | self._meta = self._content["meta"] 70 | pages = self._content["pages"] 71 | self._parse_configuration(pages.get("configuration")) 72 | self._parse_inputs(pages.get("inputs")) 73 | 74 | def _parse_configuration(self, configurations): 75 | if not configurations or "tabs" not in configurations: 76 | return 77 | for configuration in configurations["tabs"]: 78 | if "table" in configuration: 79 | self._configs.append(configuration) 80 | else: 81 | self._settings.append(configuration) 82 | 83 | def _parse_inputs(self, inputs): 84 | if not inputs or "services" not in inputs: 85 | return 86 | self._inputs = inputs["services"] 87 | -------------------------------------------------------------------------------- /splunktaucclib/legacy/__init__.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2025 Splunk Inc. 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | # 16 | -------------------------------------------------------------------------------- /splunktaucclib/legacy/credentials.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2025 Splunk Inc. 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | # 16 | 17 | """ 18 | Handles credentials related stuff 19 | """ 20 | 21 | import re 22 | import warnings 23 | 24 | import defusedxml.minidom as xdm 25 | 26 | import splunktaucclib.legacy.util as util 27 | import splunktaucclib.common.xml_dom_parser as xdp 28 | import splunktaucclib.legacy.rest as rest 29 | 30 | # Splunk can only encrypt string when length <=255 31 | SPLUNK_CRED_LEN_LIMIT = 255 32 | 33 | 34 | class CredException(Exception): 35 | pass 36 | 37 | 38 | class CredNotFound(CredException): 39 | """ 40 | Credential information not exists 41 | """ 42 | 43 | pass 44 | 45 | 46 | def create_credential_manager(username, password, splunkd_uri, app, owner, realm): 47 | warnings.warn( 48 | "This function is deprecated. " 49 | "Please see https://github.com/splunk/addonfactory-ta-library-python/issues/38", 50 | DeprecationWarning, 51 | stacklevel=2, 52 | ) 53 | session_key = CredentialManager.get_session_key(username, password, splunkd_uri) 54 | return CredentialManager(splunkd_uri, session_key, app, owner, realm) 55 | 56 | 57 | class CredentialManager: 58 | """ 59 | Credential related interfaces 60 | """ 61 | 62 | def __init__(self, splunkd_uri, session_key, app="-", owner="nobody", realm=None): 63 | """ 64 | :app: when creating/upating/deleting app is required 65 | """ 66 | warnings.warn( 67 | "This class is deprecated. " 68 | "Please see https://github.com/splunk/addonfactory-ta-library-python/issues/38", 69 | DeprecationWarning, 70 | stacklevel=2, 71 | ) 72 | 73 | self._app = app 74 | self._splunkd_uri = splunkd_uri 75 | self._owner = owner 76 | self._sep = "``splunk_cred_sep``" 77 | 78 | if realm: 79 | self._realm = realm 80 | else: 81 | self._realm = app 82 | 83 | self._session_key = session_key 84 | 85 | def set_appname(self, app): 86 | """ 87 | This are cases we need edit/remove/create confs in different app 88 | context. call this interface to switch app context before manipulate 89 | the confs in different app context 90 | """ 91 | 92 | self._app = app 93 | 94 | @staticmethod 95 | def get_session_key(username, password, splunkd_uri="https://localhost:8089"): 96 | """ 97 | Get session key by using login username and passwrod 98 | :return: session_key if successful, None if failed 99 | """ 100 | 101 | eid = "".join((splunkd_uri, "/services/auth/login")) 102 | postargs = { 103 | "username": username, 104 | "password": password, 105 | } 106 | 107 | response = rest.splunkd_request(eid, None, method="POST", data=postargs) 108 | 109 | if response is None: 110 | raise CredException("Get session key failed.") 111 | 112 | xml_obj = xdm.parseString(response.text) 113 | session_nodes = xml_obj.getElementsByTagName("sessionKey") 114 | if not session_nodes: 115 | raise CredException("Invalid username or password.") 116 | session_key = session_nodes[0].firstChild.nodeValue 117 | if not session_key: 118 | raise CredException("Get session key failed.") 119 | return session_key 120 | 121 | def update(self, stanza): 122 | """ 123 | Update or Create credentials based on the stanza 124 | :stanza: nested dict object. The outlayer keys are stanza name, and 125 | inner dict is user/pass key/value pair to be encrypted 126 | { 127 | "stanza_name": {"tommy": "tommypasswod", "jerry": "jerrypassword"} 128 | } 129 | :return: raise on failure 130 | """ 131 | 132 | for name, encr_dict in list(stanza.items()): 133 | encrypts = [] 134 | for key, val in list(encr_dict.items()): 135 | encrypts.append(key) 136 | encrypts.append(val) 137 | self._update(name, self._sep.join(encrypts)) 138 | 139 | def _update(self, name, str_to_encrypt): 140 | """ 141 | Update the string for the name. 142 | :return: raise on failure 143 | """ 144 | 145 | if len(str_to_encrypt) <= SPLUNK_CRED_LEN_LIMIT: 146 | self._do_update(name, str_to_encrypt) 147 | return 148 | 149 | # split the str_to_encrypt when len > 255 150 | length = SPLUNK_CRED_LEN_LIMIT 151 | i = 0 152 | while length < len(str_to_encrypt) + SPLUNK_CRED_LEN_LIMIT: 153 | curr_str = str_to_encrypt[length - SPLUNK_CRED_LEN_LIMIT : length] 154 | length += SPLUNK_CRED_LEN_LIMIT 155 | 156 | stanza_name = self._sep.join((name, str(i))) 157 | self._do_update(stanza_name, curr_str) 158 | i += 1 159 | 160 | def _do_update(self, name, password): 161 | try: 162 | self._create(name, password) 163 | except CredException: 164 | payload = {"password": password} 165 | endpoint = self._get_endpoint(name) 166 | response = rest.splunkd_request( 167 | endpoint, self._session_key, method="POST", data=payload 168 | ) 169 | if not response or response.status_code not in (200, 201): 170 | raise CredException( 171 | "Unable to update password for username={}, status={}".format( 172 | name, response.status_code 173 | ) 174 | ) 175 | 176 | def _create(self, name, str_to_encrypt): 177 | """ 178 | Create a new stored credential. 179 | :return: raise on failure 180 | """ 181 | 182 | payload = { 183 | "name": name, 184 | "password": str_to_encrypt, 185 | "realm": self._realm, 186 | } 187 | 188 | endpoint = self._get_endpoint(name) 189 | resp = rest.splunkd_request( 190 | endpoint, self._session_key, method="POST", data=payload 191 | ) 192 | if not resp or resp.status_code not in (200, 201): 193 | raise CredException(f"Failed to encrypt username {name}") 194 | 195 | def delete(self, name, throw=False): 196 | """ 197 | Delete the encrypted entry 198 | """ 199 | 200 | try: 201 | self._delete(name, throw=True) 202 | except CredNotFound: 203 | # try to delete the split stanzas 204 | try: 205 | stanzas = self._get_all_passwords() 206 | except Exception: 207 | raise 208 | 209 | prefix = self._realm + ":" + name + self._sep 210 | for stanza in stanzas: 211 | stanza_name = stanza.get("name") 212 | match = True 213 | try: 214 | if stanza_name[: len(prefix)] != prefix: 215 | match = False 216 | num = stanza_name[len(prefix) : -1] 217 | int(num) 218 | except (IndexError, ValueError): 219 | match = False 220 | if match: 221 | try: 222 | delete_name = name + self._sep + num 223 | self._delete(delete_name, throw=True) 224 | except CredNotFound: 225 | pass 226 | except CredException: 227 | raise 228 | except CredException: 229 | raise 230 | 231 | def _delete(self, name, throw=False): 232 | """ 233 | Delete the encrypted entry 234 | """ 235 | 236 | endpoint = self._get_endpoint(name) 237 | response = rest.splunkd_request(endpoint, self._session_key, method="DELETE") 238 | 239 | if response is not None and response.status_code == 404: 240 | if throw: 241 | raise CredNotFound(f"Credential stanza not exits - {name}") 242 | elif not response or response.status_code not in (200, 201): 243 | if throw: 244 | raise CredException(f"Failed to delete credential stanza {name}") 245 | 246 | def get_all_passwords(self): 247 | results = {} 248 | all_stanzas = self._get_all_passwords() 249 | for stanza in all_stanzas: 250 | name = stanza.get("name") 251 | match = re.match(rf"(.+){self._sep}(\d+)", name) 252 | if match: 253 | actual_name = match.group(1) + ":" 254 | index = int(match.group(2)) 255 | if results.get(actual_name): 256 | exist_stanza = results.get(actual_name) 257 | else: 258 | exist_stanza = stanza 259 | exist_stanza["name"] = actual_name 260 | exist_stanza["username"] = exist_stanza["username"].split( 261 | self._sep 262 | )[0] 263 | exist_stanza["clears"] = {} 264 | exist_stanza["encrs"] = {} 265 | 266 | try: 267 | exist_stanza["clears"][index] = stanza.get("clear_password") 268 | exist_stanza["encrs"][index] = stanza.get("encr_password") 269 | except KeyError: 270 | exist_stanza["clears"] = {} 271 | exist_stanza["encrs"] = {} 272 | exist_stanza["clears"][index] = stanza.get("clear_password") 273 | exist_stanza["encrs"][index] = stanza.get("encr_password") 274 | 275 | results[actual_name] = exist_stanza 276 | 277 | else: 278 | results[name] = stanza 279 | 280 | # merge the stanzas by index 281 | for name, stanza in list(results.items()): 282 | field_clear = stanza.get("clears") 283 | field_encr = stanza.get("encrs") 284 | if isinstance(field_clear, dict): 285 | clear_password = "" 286 | encr_password = "" 287 | for index in sorted(field_clear.keys()): 288 | clear_password += field_clear.get(index) 289 | encr_password += field_encr.get(index) 290 | stanza["clear_password"] = clear_password 291 | stanza["encr_password"] = encr_password 292 | 293 | del stanza["clears"] 294 | del stanza["encrs"] 295 | return list(results.values()) 296 | 297 | def _get_all_passwords(self): 298 | """ 299 | :return: a list of dict when successful, None when failed. 300 | the dict at least contains 301 | { 302 | "realm": xxx, 303 | "username": yyy, 304 | "clear_password": zzz, 305 | } 306 | """ 307 | 308 | endpoint = self._get_endpoint() 309 | response = rest.splunkd_request(endpoint, self._session_key, method="GET") 310 | if response and response.status_code in (200, 201) and response.text: 311 | return xdp.parse_conf_xml_dom(response.text) 312 | raise CredException("Failed to get credentials") 313 | 314 | def get_clear_password(self, name=None): 315 | """ 316 | :return: clear password(s) 317 | { 318 | stanza_name: {"user": pass} 319 | } 320 | """ 321 | 322 | return self._get_credentials("clear_password", name) 323 | 324 | def get_encrypted_password(self, name=None): 325 | """ 326 | :return: encyrpted password(s) 327 | """ 328 | 329 | return self._get_credentials("encr_password", name) 330 | 331 | def _get_credentials(self, prop, name=None): 332 | """ 333 | :return: clear or encrypted password for specified realm, user 334 | """ 335 | 336 | all_stanzas = self.get_all_passwords() 337 | results = {} 338 | 339 | for stanza in all_stanzas: 340 | if name and not stanza.get("name").endswith(":" + name + ":"): 341 | continue 342 | if stanza.get("realm") == self._realm: 343 | values = stanza[prop].split(self._sep) 344 | if len(values) % 2 == 1: 345 | continue 346 | result = {values[i]: values[i + 1] for i in range(0, len(values), 2)} 347 | results[stanza.get("username")] = result 348 | return results 349 | 350 | @staticmethod 351 | def _build_name(realm, name): 352 | return util.format_stanza_name( 353 | "".join( 354 | ( 355 | CredentialManager._escape_string(realm), 356 | ":", 357 | CredentialManager._escape_string(name), 358 | ":", 359 | ) 360 | ) 361 | ) 362 | 363 | @staticmethod 364 | def _escape_string(string_to_escape): 365 | r""" 366 | Splunk secure credential storage actually requires a custom style of 367 | escaped string where all the :'s are escaped by a single \. 368 | But don't escape the control : in the stanza name. 369 | """ 370 | 371 | return string_to_escape.replace(":", "\\:") 372 | 373 | def _get_endpoint(self, name=None, query=False): 374 | app = self._app 375 | owner = self._owner 376 | if query: 377 | app = "-" 378 | owner = "-" 379 | 380 | if name: 381 | realm_user = self._build_name(self._realm, name) 382 | rest_endpoint = "{}/servicesNS/{}/{}/storage/passwords/{}".format( 383 | self._splunkd_uri, owner, app, realm_user 384 | ) 385 | else: 386 | rest_endpoint = "{}/servicesNS/{}/{}/storage/passwords?count=-1" "".format( 387 | self._splunkd_uri, owner, app 388 | ) 389 | return rest_endpoint 390 | -------------------------------------------------------------------------------- /splunktaucclib/legacy/rest.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2025 Splunk Inc. 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | # 16 | 17 | import json 18 | import urllib.parse 19 | from traceback import format_exc 20 | from typing import Optional 21 | 22 | import requests 23 | 24 | from solnlib import log 25 | 26 | 27 | logger = log.Logs().get_logger("util") 28 | 29 | 30 | def splunkd_request( 31 | splunkd_uri, 32 | session_key, 33 | method="GET", 34 | headers=None, 35 | data=None, 36 | timeout=300, 37 | retry=1, 38 | verify=False, 39 | ) -> Optional[requests.Response]: 40 | 41 | headers = headers if headers is not None else {} 42 | headers["Authorization"] = f"Splunk {session_key}" 43 | content_type = headers.get("Content-Type") 44 | if not content_type: 45 | content_type = headers.get("content-type") 46 | 47 | if not content_type: 48 | content_type = "application/x-www-form-urlencoded" 49 | headers["Content-Type"] = content_type 50 | 51 | if data is not None: 52 | if content_type == "application/json": 53 | data = json.dumps(data) 54 | else: 55 | data = urllib.parse.urlencode(data) 56 | 57 | msg_temp = "Failed to send rest request=%s, errcode=%s, reason=%s" 58 | resp = None 59 | for _ in range(retry): 60 | try: 61 | resp = requests.request( 62 | method=method, 63 | url=splunkd_uri, 64 | data=data, 65 | headers=headers, 66 | timeout=timeout, 67 | verify=verify, 68 | ) 69 | except Exception: 70 | logger.error(msg_temp, splunkd_uri, "unknown", format_exc()) 71 | else: 72 | if resp.status_code not in (200, 201): 73 | if not (method == "GET" and resp.status_code == 404): 74 | logger.debug( 75 | msg_temp, splunkd_uri, resp.status_code, code_to_msg(resp) 76 | ) 77 | else: 78 | return resp 79 | else: 80 | return resp 81 | 82 | 83 | def code_to_msg(response: requests.Response): 84 | code_msg_tbl = { 85 | 400: f"Request error. reason={response.text}", 86 | 401: "Authentication failure, invalid access credentials.", 87 | 402: "In-use license disables this feature.", 88 | 403: "Insufficient permission.", 89 | 404: "Requested endpoint does not exist.", 90 | 409: f"Invalid operation for this endpoint. reason={response.text}", 91 | 500: f"Unspecified internal server error. reason={response.text}", 92 | 503: ( 93 | "Feature is disabled in the configuration file. " 94 | "reason={}".format(response.text) 95 | ), 96 | } 97 | 98 | return code_msg_tbl.get(response.status_code, response.text) 99 | -------------------------------------------------------------------------------- /splunktaucclib/legacy/util.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2025 Splunk Inc. 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | # 16 | 17 | 18 | import datetime 19 | import gc 20 | import os 21 | import os.path as op 22 | import sys 23 | import urllib.error 24 | import urllib.parse 25 | import urllib.request 26 | import warnings 27 | 28 | 29 | def handle_tear_down_signals(callback): 30 | warnings.warn( 31 | "This function is deprecated. " 32 | "Please see https://github.com/splunk/addonfactory-ta-library-python/issues/38", 33 | DeprecationWarning, 34 | stacklevel=2, 35 | ) 36 | import signal 37 | 38 | signal.signal(signal.SIGTERM, callback) 39 | signal.signal(signal.SIGINT, callback) 40 | 41 | if os.name == "nt": 42 | signal.signal(signal.SIGBREAK, callback) 43 | 44 | 45 | def datetime_to_seconds(dt): 46 | warnings.warn( 47 | "This function is deprecated. " 48 | "Please see https://github.com/splunk/addonfactory-ta-library-python/issues/38", 49 | DeprecationWarning, 50 | stacklevel=2, 51 | ) 52 | epoch_time = datetime.datetime.utcfromtimestamp(0) 53 | return (dt - epoch_time).total_seconds() 54 | 55 | 56 | def is_true(val): 57 | warnings.warn( 58 | "This function is deprecated. " 59 | "Please see https://github.com/splunk/addonfactory-ta-library-python/issues/38", 60 | DeprecationWarning, 61 | stacklevel=2, 62 | ) 63 | value = str(val).strip().upper() 64 | if value in ("1", "TRUE", "T", "Y", "YES"): 65 | return True 66 | return False 67 | 68 | 69 | def is_false(val): 70 | warnings.warn( 71 | "This function is deprecated. " 72 | "Please see https://github.com/splunk/addonfactory-ta-library-python/issues/38", 73 | DeprecationWarning, 74 | stacklevel=2, 75 | ) 76 | value = str(val).strip().upper() 77 | if value in ("0", "FALSE", "F", "N", "NO", "NONE", ""): 78 | return True 79 | return False 80 | 81 | 82 | def remove_http_proxy_env_vars(): 83 | warnings.warn( 84 | "This function is deprecated. " 85 | "Please see https://github.com/splunk/addonfactory-ta-library-python/issues/38", 86 | DeprecationWarning, 87 | stacklevel=2, 88 | ) 89 | for k in ("http_proxy", "https_proxy"): 90 | if k in os.environ: 91 | del os.environ[k] 92 | elif k.upper() in os.environ: 93 | del os.environ[k.upper()] 94 | 95 | 96 | def get_appname_from_path(absolute_path): 97 | absolute_path = op.normpath(absolute_path) 98 | parts = absolute_path.split(os.path.sep) 99 | parts.reverse() 100 | for key in ("apps", "slave-apps", "master-apps"): 101 | try: 102 | idx = parts.index(key) 103 | except ValueError: 104 | continue 105 | else: 106 | try: 107 | if parts[idx + 1] == "etc": 108 | return parts[idx - 1] 109 | except IndexError: 110 | pass 111 | continue 112 | return "-" 113 | 114 | 115 | def escape_cdata(data): 116 | data = data.encode("utf-8", errors="xmlcharrefreplace").decode("utf-8") 117 | data = data.replace("]]>", "]]>") 118 | if data.endswith("]"): 119 | data = data[:-1] + "%5D" 120 | return data 121 | 122 | 123 | def extract_datainput_name(stanza_name): 124 | """ 125 | stansa_name: string like aws_s3://my_s3_data_input 126 | """ 127 | 128 | sep = "://" 129 | try: 130 | idx = stanza_name.index(sep) 131 | except ValueError: 132 | return stanza_name 133 | 134 | return stanza_name[idx + len(sep) :] 135 | 136 | 137 | def disable_stdout_buffer(): 138 | os.environ["PYTHONUNBUFFERED"] = "1" 139 | sys.stdout = os.fdopen(sys.stdout.fileno(), "wb", 0) 140 | gc.garbage.append(sys.stdout) 141 | 142 | 143 | def format_stanza_name(name): 144 | return urllib.parse.quote(name.encode("utf-8"), "") 145 | -------------------------------------------------------------------------------- /splunktaucclib/modinput_wrapper/__init__.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2025 Splunk Inc. 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | # 16 | -------------------------------------------------------------------------------- /splunktaucclib/rest_handler/__init__.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2025 Splunk Inc. 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | # 16 | 17 | """ 18 | Custom REST Handler in Splunk add-on. 19 | """ 20 | -------------------------------------------------------------------------------- /splunktaucclib/rest_handler/admin_external.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2025 Splunk Inc. 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | # 16 | 17 | 18 | import os 19 | from functools import wraps 20 | 21 | from solnlib.splunkenv import get_splunkd_uri 22 | from solnlib.utils import is_true 23 | from splunk import admin 24 | 25 | from .eai import EAI_FIELDS 26 | from .endpoint import DataInputModel, MultipleModel, SingleModel 27 | from .handler import RestHandler 28 | 29 | try: 30 | from custom_hook_mixin import CustomHookMixin as HookMixin 31 | except ImportError: 32 | from .base_hook_mixin import BaseHookMixin as HookMixin 33 | 34 | 35 | __all__ = [ 36 | "make_conf_item", 37 | "build_conf_info", 38 | "AdminExternalHandler", 39 | ] 40 | 41 | 42 | def make_conf_item(conf_item, content, eai): 43 | for key, val in content.items(): 44 | conf_item[key] = val 45 | 46 | for eai_field in EAI_FIELDS: 47 | conf_item.setMetadata(eai_field, eai.content[eai_field]) 48 | 49 | return conf_item 50 | 51 | 52 | def build_conf_info(meth): 53 | """ 54 | Build conf info for admin external REST endpoint. 55 | 56 | :param meth: 57 | :return: 58 | """ 59 | 60 | @wraps(meth) 61 | def wrapper(self, confInfo): 62 | result = meth(self, confInfo) 63 | for entity in result: 64 | make_conf_item( 65 | confInfo[entity.name], 66 | entity.content, 67 | entity.eai, 68 | ) 69 | 70 | return wrapper 71 | 72 | 73 | def get_splunkd_endpoint(): 74 | if os.environ.get("SPLUNKD_URI"): 75 | return os.environ["SPLUNKD_URI"] 76 | else: 77 | splunkd_uri = get_splunkd_uri() 78 | os.environ["SPLUNKD_URI"] = splunkd_uri 79 | return splunkd_uri 80 | 81 | 82 | class AdminExternalHandler(HookMixin, admin.MConfigHandler): 83 | 84 | # Leave it for setting REST model 85 | endpoint = None 86 | 87 | # action parameter for getting clear credentials 88 | ACTION_CRED = "--cred--" 89 | 90 | def __init__(self, *args, **kwargs): 91 | # use classic inheritance to be compatible for 92 | # old version of Splunk private SDK 93 | admin.MConfigHandler.__init__(self, *args, **kwargs) 94 | self.handler = RestHandler( 95 | get_splunkd_endpoint(), 96 | self.getSessionKey(), 97 | self.endpoint, 98 | ) 99 | self.payload = self._convert_payload() 100 | 101 | def setup(self): 102 | # add args for getting clear credentials 103 | if self.requestedAction == admin.ACTION_LIST: 104 | self.supportedArgs.addOptArg(self.ACTION_CRED) 105 | 106 | # add args in payload while creating/updating 107 | actions = (admin.ACTION_LIST, admin.ACTION_REMOVE) 108 | if self.requestedAction in actions: 109 | return 110 | model = self.endpoint.model(self.callerArgs.id) 111 | if self.requestedAction == admin.ACTION_CREATE: 112 | for field in model.fields: 113 | if field.required: 114 | self.supportedArgs.addReqArg(field.name) 115 | else: 116 | self.supportedArgs.addOptArg(field.name) 117 | 118 | if self.requestedAction == admin.ACTION_EDIT: 119 | for field in model.fields: 120 | self.supportedArgs.addOptArg(field.name) 121 | 122 | @build_conf_info 123 | def handleList(self, confInfo): 124 | decrypt = self.callerArgs.data.get( 125 | self.ACTION_CRED, 126 | [False], 127 | ) 128 | decrypt = is_true(decrypt[0]) 129 | if self.callerArgs.id: 130 | result = self.handler.get( 131 | self.callerArgs.id, 132 | decrypt=decrypt, 133 | ) 134 | else: 135 | result = self.handler.all( 136 | decrypt=decrypt, 137 | count=0, 138 | ) 139 | return result 140 | 141 | @build_conf_info 142 | def handleCreate(self, confInfo): 143 | self.create_hook( 144 | session_key=self.getSessionKey(), 145 | config_name=self._get_name(), 146 | stanza_id=self.callerArgs.id, 147 | payload=self.payload, 148 | ) 149 | return self.handler.create( 150 | self.callerArgs.id, 151 | self.payload, 152 | ) 153 | 154 | @build_conf_info 155 | def handleEdit(self, confInfo): 156 | disabled = self.payload.get("disabled") 157 | if disabled is None: 158 | self.edit_hook( 159 | session_key=self.getSessionKey(), 160 | config_name=self._get_name(), 161 | stanza_id=self.callerArgs.id, 162 | payload=self.payload, 163 | ) 164 | return self.handler.update( 165 | self.callerArgs.id, 166 | self.payload, 167 | ) 168 | elif is_true(disabled): 169 | return self.handler.disable(self.callerArgs.id) 170 | else: 171 | return self.handler.enable(self.callerArgs.id) 172 | 173 | @build_conf_info 174 | def handleRemove(self, confInfo): 175 | self.delete_hook( 176 | session_key=self.getSessionKey(), 177 | config_name=self._get_name(), 178 | stanza_id=self.callerArgs.id, 179 | ) 180 | return self.handler.delete(self.callerArgs.id) 181 | 182 | def _get_name(self): 183 | name = None 184 | if isinstance(self.handler.get_endpoint(), DataInputModel): 185 | name = self.handler.get_endpoint().input_type 186 | elif isinstance(self.handler.get_endpoint(), SingleModel): 187 | name = self.handler.get_endpoint().config_name 188 | elif isinstance(self.handler.get_endpoint(), MultipleModel): 189 | # For multiple model, the configuraiton name is same with stanza id 190 | name = self.callerArgs.id 191 | return name 192 | 193 | def _convert_payload(self): 194 | check_actions = (admin.ACTION_CREATE, admin.ACTION_EDIT) 195 | if self.requestedAction not in check_actions: 196 | return None 197 | 198 | payload = {} 199 | for filed, value in self.callerArgs.data.items(): 200 | payload[filed] = value[0] if value and value[0] else "" 201 | return payload 202 | 203 | 204 | def handle( 205 | endpoint, 206 | handler=AdminExternalHandler, 207 | context_info=admin.CONTEXT_APP_ONLY, 208 | ): 209 | """ 210 | Handle request. 211 | 212 | :param endpoint: REST endpoint 213 | :param handler: REST handler 214 | :param context_info: 215 | :return: 216 | """ 217 | real_handler = type( 218 | handler.__name__, 219 | (handler,), 220 | {"endpoint": endpoint}, 221 | ) 222 | admin.init(real_handler, ctxInfo=context_info) 223 | -------------------------------------------------------------------------------- /splunktaucclib/rest_handler/base_hook_mixin.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2025 Splunk Inc. 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | # 16 | 17 | 18 | class BaseHookMixin: 19 | """Base Hook Mixin class""" 20 | 21 | def create_hook(self, session_key, config_name, stanza_id, payload): 22 | """Create hook called before the actual create action 23 | 24 | Args: 25 | config_name: configuration name 26 | stanza_id: the id of the stanza to create 27 | payload: data dict 28 | """ 29 | pass 30 | 31 | def edit_hook(self, session_key, config_name, stanza_id, payload): 32 | """Edit hook called before the actual create action 33 | 34 | Args: 35 | config_name: configuration name 36 | stanza_id: the id of the stanza to edit 37 | payload: data dict 38 | """ 39 | pass 40 | 41 | def delete_hook(self, session_key, config_name, stanza_id): 42 | """Delete hook called before the actual create action 43 | 44 | Args: 45 | config_name: configuration name 46 | stanza_id: the id of the stanza to delete 47 | """ 48 | pass 49 | -------------------------------------------------------------------------------- /splunktaucclib/rest_handler/eai.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2025 Splunk Inc. 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | # 16 | 17 | 18 | # EAI fields 19 | 20 | EAI_ACL = "eai:acl" 21 | EAI_ATTRIBUTES = "eai:attributes" 22 | EAI_USER = "eai:userName" 23 | EAI_APP = "eai:appName" 24 | 25 | EAI_FIELD_PREFIX = "eai:" 26 | EAI_FIELDS = [EAI_ACL, EAI_ATTRIBUTES, EAI_USER, EAI_APP] 27 | 28 | # elements of eai:attributes 29 | EAI_ATTRIBUTES_OPTIONAL = "optionalFields" 30 | EAI_ATTRIBUTES_REQUIRED = "requiredFields" 31 | EAI_ATTRIBUTES_WILDCARD = "wildcardFields" 32 | 33 | 34 | class RestEAI: 35 | def __init__(self, model, user, app, acl=None): 36 | self.model = model 37 | default_acl = { 38 | "owner": user, 39 | "app": app, 40 | "global": 1, 41 | "can_write": 1, 42 | "modifiable": 1, 43 | "removable": 1, 44 | "sharing": "global", 45 | "perms": {"read": ["*"], "write": ["admin"]}, 46 | } 47 | self.acl = acl or default_acl 48 | self.user = user 49 | self.app = app 50 | self.attributes = self._build_attributes() 51 | 52 | @property 53 | def content(self): 54 | return { 55 | EAI_ACL: self.acl, 56 | EAI_USER: self.user, 57 | EAI_APP: self.app, 58 | EAI_ATTRIBUTES: self.attributes, 59 | } 60 | 61 | def _build_attributes(self): 62 | optional_fields = [] 63 | required_fields = [] 64 | for field in self.model.fields: 65 | if field.required: 66 | required_fields.append(field.name) 67 | else: 68 | optional_fields.append(field.name) 69 | return { 70 | EAI_ATTRIBUTES_OPTIONAL: optional_fields, 71 | EAI_ATTRIBUTES_REQUIRED: required_fields, 72 | EAI_ATTRIBUTES_WILDCARD: [], 73 | } 74 | -------------------------------------------------------------------------------- /splunktaucclib/rest_handler/endpoint/__init__.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2025 Splunk Inc. 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | # 16 | 17 | from typing import List, Optional 18 | 19 | from .field import RestField 20 | from ..error import RestError 21 | from ..util import get_base_app_name 22 | 23 | __all__ = [ 24 | "RestModel", 25 | "RestEndpoint", 26 | "SingleModel", 27 | "MultipleModel", 28 | "DataInputModel", 29 | ] 30 | 31 | 32 | class RestModel: 33 | def __init__( 34 | self, fields, name=None, special_fields: Optional[List[RestField]] = None 35 | ): 36 | """ 37 | REST Model. 38 | :param name: 39 | :param fields: 40 | :param special_fields: 41 | """ 42 | self.name = name 43 | self.fields = fields 44 | self.special_fields = special_fields if special_fields else [] 45 | 46 | 47 | class RestEndpoint: 48 | """ 49 | REST Endpoint. 50 | """ 51 | 52 | def __init__(self, user="nobody", app=None, need_reload=False, *args, **kwargs): 53 | """ 54 | :param user: 55 | :param app: if None, it will be base app name 56 | :param need_reload: if reload is needed while GET request 57 | :param args: 58 | :param kwargs: 59 | """ 60 | self.user = user 61 | self.app = app or get_base_app_name() 62 | self.args = args 63 | self.kwargs = kwargs 64 | 65 | self.need_reload = need_reload 66 | 67 | @property 68 | def internal_endpoint(self): 69 | """ 70 | Endpoint of Splunk internal service. 71 | 72 | :return: 73 | """ 74 | raise NotImplementedError() 75 | 76 | def model(self, name): 77 | """ 78 | Real model for given name. 79 | 80 | :param name: 81 | :return: 82 | """ 83 | raise NotImplementedError() 84 | 85 | def _loop_fields(self, meth, name, data, *args, **kwargs): 86 | model = self.model(name) 87 | return [getattr(f, meth)(data, *args, **kwargs) for f in model.fields] 88 | 89 | def validate(self, name, data, existing=None): 90 | self._loop_fields("validate", name, data, existing=existing) 91 | 92 | def _loop_field_special(self, meth, name, data, *args, **kwargs): 93 | model = self.model(name) 94 | return [getattr(f, meth)(data, *args, **kwargs) for f in model.special_fields] 95 | 96 | def validate_special(self, name, data): 97 | self._loop_field_special("validate", name, data, validate_name=name) 98 | 99 | def encode(self, name, data): 100 | self._loop_fields("encode", name, data) 101 | 102 | def decode(self, name, data): 103 | self._loop_fields("decode", name, data) 104 | 105 | 106 | class SingleModel(RestEndpoint): 107 | """ 108 | REST Model with Single Mode. It will store stanzas 109 | with same format into one conf file. 110 | """ 111 | 112 | def __init__( 113 | self, 114 | conf_name, 115 | model, 116 | user="nobody", 117 | app=None, 118 | need_reload=False, 119 | *args, 120 | **kwargs, 121 | ): 122 | """ 123 | :param conf_name: conf file name 124 | :param model: REST model 125 | :type model: RestModel 126 | :param need_reload: if reload is needed while GET request 127 | :param args: 128 | :param kwargs: 129 | """ 130 | super().__init__(user=user, app=app, need_reload=need_reload, *args, **kwargs) 131 | 132 | self._model = model 133 | self.conf_name = conf_name 134 | self.config_name = kwargs.get("config_name") 135 | 136 | @property 137 | def internal_endpoint(self): 138 | return f"configs/conf-{self.conf_name}" 139 | 140 | def model(self, name): 141 | return self._model 142 | 143 | 144 | class MultipleModel(RestEndpoint): 145 | """ 146 | REST Model with Multiple Modes. It will store 147 | stanzas with different formats into one conf file. 148 | """ 149 | 150 | def __init__( 151 | self, 152 | conf_name, 153 | models, 154 | user="nobody", 155 | app=None, 156 | need_reload=False, 157 | *args, 158 | **kwargs, 159 | ): 160 | """ 161 | :param conf_name: 162 | :type conf_name: str 163 | :param models: list of RestModel 164 | :type models: list 165 | :param need_reload: if reload is needed while GET request 166 | :param args: 167 | :param kwargs: 168 | """ 169 | super().__init__(user=user, app=app, need_reload=need_reload, *args, **kwargs) 170 | 171 | self.conf_name = conf_name 172 | self.models = {model.name: model for model in models} 173 | 174 | @property 175 | def internal_endpoint(self): 176 | return f"configs/conf-{self.conf_name}" 177 | 178 | def model(self, name): 179 | try: 180 | return self.models[name] 181 | except KeyError: 182 | raise RestError(404, "name=%s" % name) 183 | 184 | 185 | class DataInputModel(RestEndpoint): 186 | """ 187 | REST Model for Data Input. 188 | """ 189 | 190 | def __init__( 191 | self, 192 | input_type, 193 | model, 194 | user="nobody", 195 | app=None, 196 | need_reload=False, 197 | *args, 198 | **kwargs, 199 | ): 200 | """ 201 | :param input_type: 202 | :param model: 203 | :param user: 204 | :param app: if None, it will be base app name 205 | :param need_reload: if reload is needed while GET request 206 | :param args: 207 | :param kwargs: 208 | """ 209 | super().__init__(user=user, app=app, need_reload=need_reload, *args, **kwargs) 210 | 211 | self.input_type = input_type 212 | self._model = model 213 | 214 | @property 215 | def internal_endpoint(self): 216 | return f"data/inputs/{self.input_type}" 217 | 218 | def model(self, name): 219 | return self._model 220 | -------------------------------------------------------------------------------- /splunktaucclib/rest_handler/endpoint/converter.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2025 Splunk Inc. 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | # 16 | 17 | """ 18 | Converters for Splunk configuration. 19 | """ 20 | 21 | 22 | import base64 23 | import json 24 | 25 | __all__ = [ 26 | "Converter", 27 | "Normaliser", 28 | "ChainOf", 29 | "UserDefined", 30 | "Unifier", 31 | "Boolean", 32 | "Lower", 33 | "Upper", 34 | "Mapping", 35 | "Base64", 36 | "JSON", 37 | ] 38 | 39 | 40 | class Converter: 41 | """ 42 | Converting data: encode for in-coming request 43 | and decode for out-coming response. 44 | """ 45 | 46 | def encode(self, value, request): 47 | """ 48 | Encode data from client for request. 49 | 50 | :param value: value to encode for request 51 | :param request: whole request data 52 | :return: 53 | """ 54 | raise NotImplementedError() 55 | 56 | def decode(self, value, response): 57 | """ 58 | Decode data from storage for response. 59 | 60 | :param value: value to decode for response 61 | :param response: whole response data 62 | :return: 63 | """ 64 | raise NotImplementedError() 65 | 66 | 67 | class Normaliser(Converter): 68 | """ 69 | Normalizing data: same converting logic for encode & decode. 70 | """ 71 | 72 | def normalize(self, value, data): 73 | """ 74 | Normalize a given value. 75 | 76 | :param value: value to normalize 77 | :param data: whole payload 78 | :returns: normalized value. 79 | """ 80 | raise NotImplementedError() 81 | 82 | def encode(self, value, request): 83 | return self.normalize(value, request) 84 | 85 | def decode(self, value, response): 86 | return self.normalize(value, response) 87 | 88 | 89 | class ChainOf(Converter): 90 | """ 91 | A composite of converters that will covert data with specified 92 | converters on by one, and returns result from the last converter. 93 | """ 94 | 95 | def __init__(self, *converters): 96 | """ 97 | 98 | :param converters: a list of converters 99 | """ 100 | super().__init__() 101 | self._converters = converters 102 | 103 | def encode(self, value, request): 104 | for converter in self._converters: 105 | value = converter.encode(value, request) 106 | return value 107 | 108 | def decode(self, value, response): 109 | import copy 110 | 111 | converters = copy.copy(self._converters) 112 | converters.reverse() 113 | for converter in converters: 114 | value = converter.decode(value, response) 115 | return value 116 | 117 | 118 | class UserDefined(Converter): 119 | """ 120 | User-defined normaliser. 121 | 122 | The user-defined normaliser function should be in form: 123 | ``def fun(value, *args, **kwargs): ...`` 124 | 125 | Usage:: 126 | >>> def my_encoder(value, request, args): 127 | >>> if request == args: 128 | >>> return value 129 | >>> else: 130 | >>> return value 131 | >>> my_converter = UserDefined(my_encoder, 'test_val') 132 | >>> my_converter.encode('value', {'key': 'value'}, 'value1') 133 | 134 | """ 135 | 136 | def __init__(self, encoder, decoder=None, *args, **kwargs): 137 | """ 138 | 139 | :param encoder: user-defined function for encoding 140 | :param decoder: user-defined function for decoding. 141 | If None, it is the same to encoder. 142 | :param args: 143 | :param kwargs: 144 | """ 145 | super().__init__() 146 | self._encoder = encoder 147 | self._decoder = decoder or self._encoder 148 | self._args = args 149 | self._kwargs = kwargs 150 | 151 | def encode(self, value, request): 152 | return self._encoder(value, request, *self._args, **self._kwargs) 153 | 154 | def decode(self, value, response): 155 | return self._decoder(value, response, *self._args, **self._kwargs) 156 | 157 | 158 | class Lower(Normaliser): 159 | """ 160 | Normalize a string to all lower cases. 161 | """ 162 | 163 | def normalize(self, value, data): 164 | return value.strip().lower() 165 | 166 | 167 | class Upper(Normaliser): 168 | """ 169 | Normalize a string to all upper cases. 170 | """ 171 | 172 | def normalize(self, value, data): 173 | return value.strip().upper() 174 | 175 | 176 | class Unifier(Normaliser): 177 | """ 178 | Many-to-one map for normalizing request & response. 179 | """ 180 | 181 | def __init__( 182 | self, 183 | value_map, 184 | default=None, 185 | case_sensitive=False, 186 | ): 187 | """ 188 | 189 | :param value_map: 190 | {"": ""} 191 | :param default: default value for input not in specific list 192 | :param case_sensitive: if it is False, 193 | it will return lower case 194 | """ 195 | super().__init__() 196 | self._case_sensitive = case_sensitive 197 | self._default = default 198 | self._value_map = {} 199 | for val_new, val_old_list in value_map.items(): 200 | for val_old in val_old_list: 201 | val_old = val_old if case_sensitive else val_old.lower() 202 | assert val_old not in self._value_map, ( 203 | 'Normaliser "Unifier" only supports Many-to-one mapping: %s' 204 | % val_old 205 | ) 206 | self._value_map[val_old] = val_new 207 | 208 | def normalize(self, value, data): 209 | need_lower = not self._case_sensitive and isinstance(value, str) 210 | val_old = value.lower() if need_lower else value 211 | val_default = self._default or value 212 | return self._value_map.get(val_old, val_default) 213 | 214 | 215 | class Boolean(Unifier): 216 | """ 217 | Normalize a boolean field. 218 | 219 | Normalize given value to boolean: 0 or 1 220 | (for False and True respectively). 221 | If the given value is not-a-string or unrecognizable, 222 | it returns default value. 223 | """ 224 | 225 | VALUES_TRUE = {"true", "t", "1", "yes", "y"} 226 | VALUES_FALSE = {"false", "f", "0", "no", "n"} 227 | 228 | def __init__(self, default=True): 229 | """ 230 | 231 | :param default: default for unrecognizable input of boolean. 232 | """ 233 | super().__init__( 234 | value_map={ 235 | "1": Boolean.VALUES_TRUE, 236 | "0": Boolean.VALUES_FALSE, 237 | }, 238 | default="1" if default else "0", 239 | case_sensitive=False, 240 | ) 241 | 242 | 243 | class Mapping(Converter): 244 | """ 245 | One-to-one map between interface value and storage value. 246 | If value is not in specific mapping, 247 | it will return the original value. 248 | """ 249 | 250 | def __init__(self, value_map, case_sensitive=False): 251 | """ 252 | 253 | :param value_map: {"": ""} 254 | :param case_sensitive: if it is False, 255 | it will return lower case 256 | """ 257 | super().__init__() 258 | self._case_sensitive = case_sensitive 259 | self._map_interface, self._map_storage = {}, {} 260 | for interface, storage in value_map.items(): 261 | self._check_and_set(interface, storage) 262 | 263 | def _check_and_set(self, interface, storage): 264 | if not self._case_sensitive: 265 | interface = interface.lower() 266 | storage = storage.lower() 267 | assert interface not in self._map_interface, ( 268 | 'Converter "Mapping" only supports one-to-one mapping: "%s"' % interface 269 | ) 270 | assert storage not in self._map_storage, ( 271 | 'Converter "Mapping" only supports one-to-one mapping: "%s"' % storage 272 | ) 273 | self._map_interface[interface] = storage 274 | self._map_storage[storage] = interface 275 | 276 | def encode(self, value, request): 277 | if self._case_sensitive: 278 | interface = value 279 | else: 280 | interface = value.lower() 281 | return self._map_interface.get(interface, value) 282 | 283 | def decode(self, value, response): 284 | if self._case_sensitive: 285 | storage = value 286 | else: 287 | storage = value.lower() 288 | return self._map_storage.get(storage, value) 289 | 290 | 291 | class Base64(Converter): 292 | """ 293 | Covert input data to base64 string. 294 | """ 295 | 296 | def encode(self, value, request): 297 | return base64.b64encode(value) 298 | 299 | def decode(self, value, response): 300 | return base64.b64decode(value) 301 | 302 | 303 | class JSON(Converter): 304 | """ 305 | Converter between object and JSON string. 306 | """ 307 | 308 | def encode(self, value, request): 309 | return json.dumps(value) 310 | 311 | def decode(self, value, response): 312 | return json.loads(value) 313 | -------------------------------------------------------------------------------- /splunktaucclib/rest_handler/endpoint/field.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2025 Splunk Inc. 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | # 16 | 17 | 18 | from ..error import RestError 19 | 20 | __all__ = ["RestField"] 21 | 22 | 23 | class RestField: 24 | """ 25 | REST Field. 26 | """ 27 | 28 | def __init__( 29 | self, 30 | name, 31 | required=False, 32 | encrypted=False, 33 | default=None, 34 | validator=None, 35 | converter=None, 36 | ): 37 | self.name = name 38 | self.required = required 39 | self.encrypted = encrypted 40 | self.default = default 41 | self.validator = validator 42 | self.converter = converter 43 | 44 | def validate(self, data, existing=None, validate_name=None): 45 | # update case: check required field in data 46 | if existing and self.name in data and not data.get(self.name) and self.required: 47 | raise RestError(400, "Required field is missing: %s" % self.name) 48 | value = data.get(self.name) if not validate_name else validate_name 49 | if not value and existing is None: 50 | if self.required: 51 | raise RestError(400, "Required field is missing: %s" % self.name) 52 | return 53 | if self.validator is None or not value: 54 | return 55 | 56 | res = self.validator.validate(value, data) 57 | if not res: 58 | raise RestError(400, self.validator.msg) 59 | 60 | def encode(self, data): 61 | value = data.get(self.name) 62 | if not value or self.converter is None: 63 | return 64 | data[self.name] = self.converter.encode(value, data) 65 | 66 | def decode(self, data): 67 | value = data.get(self.name) 68 | if not value or self.converter is None: 69 | return 70 | data[self.name] = self.converter.decode(value, data) 71 | -------------------------------------------------------------------------------- /splunktaucclib/rest_handler/endpoint/validator.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2025 Splunk Inc. 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | # 16 | 17 | """ 18 | Validators for Splunk configuration. 19 | """ 20 | 21 | 22 | import json 23 | import re 24 | import warnings 25 | from inspect import isfunction 26 | 27 | __all__ = [ 28 | "Validator", 29 | "ValidationError", 30 | "AnyOf", 31 | "AllOf", 32 | "RequiresIf", 33 | "UserDefined", 34 | "Enum", 35 | "Number", 36 | "String", 37 | "Pattern", 38 | "Host", 39 | "Port", 40 | "Datetime", 41 | "Email", 42 | "JsonString", 43 | ] 44 | 45 | 46 | class Validator: 47 | """ 48 | Base class of validators. 49 | """ 50 | 51 | def __init__(self): 52 | self._msg = "" 53 | 54 | def validate(self, value, data): 55 | """ 56 | Check if the given value is valid. It assumes that 57 | the given value is a string. 58 | 59 | :param value: value to validate. 60 | :param data: whole payload in request. 61 | :return If the value is invalid, return True. 62 | Or return False. 63 | """ 64 | raise NotImplementedError('Function "validate" needs to be implemented.') 65 | 66 | @property 67 | def msg(self): 68 | """ 69 | It will return the one with highest priority. 70 | 71 | :return: 72 | """ 73 | return self._msg if self._msg else "Invalid input value" 74 | 75 | def put_msg(self, msg, *args, **kwargs): 76 | """ 77 | Put message content into pool. 78 | 79 | :param msg: error message content 80 | :return: 81 | """ 82 | if args or "high_priority" in kwargs: 83 | warnings.warn( 84 | "`high_priority` arg is deprecated and at a time a single message string is kept in memory." 85 | " The last message passed to `put_msg` is returned by `msg` property.", 86 | FutureWarning, 87 | ) 88 | self._msg = msg 89 | 90 | 91 | class ValidationFailed(Exception): 92 | """ 93 | Validation error. 94 | """ 95 | 96 | pass 97 | 98 | 99 | class AnyOf(Validator): 100 | """ 101 | A composite of validators that accepts values accepted by 102 | any of its component validators. 103 | """ 104 | 105 | def __init__(self, *validators): 106 | """ 107 | 108 | :param validators: A list of validators. 109 | """ 110 | super().__init__() 111 | self._validators = validators 112 | 113 | def validate(self, value, data): 114 | msgs = [] 115 | for validator in self._validators: 116 | if not validator.validate(value, data): 117 | msgs.append(validator.msg) 118 | else: 119 | return True 120 | else: 121 | self.put_msg( 122 | "At least one of the following errors need to be fixed: %s" 123 | % json.dumps(msgs) 124 | ) 125 | return False 126 | 127 | 128 | class AllOf(Validator): 129 | """ 130 | A composite of validators that accepts values accepted by 131 | all of its component validators. 132 | """ 133 | 134 | def __init__(self, *validators): 135 | """ 136 | 137 | :param validators: A list of validators. 138 | """ 139 | super().__init__() 140 | self._validators = validators 141 | 142 | def validate(self, value, data): 143 | msgs = [] 144 | for validator in self._validators: 145 | if not validator.validate(value, data): 146 | msgs.append(validator.msg) 147 | if msgs: 148 | self.put_msg( 149 | "All of the following errors need to be fixed: %s" % json.dumps(msgs) 150 | ) 151 | return False 152 | return True 153 | 154 | 155 | class RequiresIf(Validator): 156 | """ 157 | If the given field makes the specified condition as True, 158 | it requires some other fields are not empty 159 | in the payload of request. 160 | """ 161 | 162 | def __init__(self, fields, condition=None): 163 | """ 164 | 165 | :param fields: conditionally required field name list. 166 | :param condition: it can be: 167 | 1. None means any non-empty string for given field 168 | 2. A function takes value & data as parameters and 169 | returns a boolean value 170 | """ 171 | assert isinstance( 172 | fields, (list, set, tuple) 173 | ), 'Argument "fields" should be list, set or tuple' 174 | super().__init__() 175 | self.fields = fields 176 | self.condition = condition 177 | 178 | @classmethod 179 | def _is_empty(cls, value): 180 | return value is None or value == "" 181 | 182 | def validate(self, value, data): 183 | if self.condition is None and not self._is_empty(value): 184 | need_validate = True 185 | else: 186 | assert isfunction( 187 | self.condition 188 | ), "Condition should be a function for RequiresIf validator" 189 | need_validate = self.condition(value, data) 190 | if not need_validate: 191 | return True 192 | 193 | fields = [] 194 | for field in self.fields: 195 | val = data.get(field) 196 | if val is None or val == "": 197 | fields.append(field) 198 | if fields: 199 | self.put_msg("For given input, fields are required: %s" % ", ".join(fields)) 200 | return False 201 | return True 202 | 203 | 204 | class UserDefined(Validator): 205 | """ 206 | A validator that defined by user. 207 | 208 | The user-defined validator function should be in form: 209 | ``def func(value, data, *args, **kwargs): ...`` 210 | ValidationFailed will be raised if validation failed. 211 | 212 | Usage:: 213 | >>> def my_validate(value, data, args): 214 | >>> if value != args or not data: 215 | >>> raise ValidationFailed('Invalid input') 216 | >>> 217 | >>> my_validator = UserDefined(my_validate, 'test_val') 218 | >>> my_validator.validate('value', {'key': 'value'}, 'value1') 219 | 220 | """ 221 | 222 | def __init__(self, validator, *args, **kwargs): 223 | """ 224 | 225 | :param validator: user-defined validating function 226 | """ 227 | super().__init__() 228 | self._validator = validator 229 | self._args = args 230 | self._kwargs = kwargs 231 | 232 | def validate(self, value, data): 233 | try: 234 | self._validator(value, data, *self._args, **self._kwargs) 235 | except ValidationFailed as exc: 236 | self.put_msg(str(exc)) 237 | return False 238 | else: 239 | return True 240 | 241 | 242 | class Enum(Validator): 243 | """ 244 | A validator that accepts only a finite set of values. 245 | """ 246 | 247 | def __init__(self, values=()): 248 | """ 249 | 250 | :param values: The collection of valid values 251 | """ 252 | super().__init__() 253 | try: 254 | self._values = set(values) 255 | except TypeError: 256 | self._values = list(values) 257 | 258 | self.put_msg("Value should be in %s" % json.dumps(list(self._values))) 259 | 260 | def validate(self, value, data): 261 | return value in self._values 262 | 263 | 264 | class Number(Validator): 265 | """ 266 | A validator that accepts values within a certain range. 267 | This is for numeric value. 268 | 269 | Accepted condition: min_val <= value <= max_val 270 | """ 271 | 272 | def __init__(self, min_val=None, max_val=None, is_int=False): 273 | """ 274 | 275 | :param min_val: if not None, it requires min_val <= value 276 | :param max_val: if not None, it requires value < max_val 277 | :param is_int: the value should be integer or not 278 | """ 279 | 280 | assert self._check(min_val) and self._check( 281 | max_val 282 | ), "{min_val} & {max_val} should be numbers".format( 283 | min_val=min_val, 284 | max_val=max_val, 285 | ) 286 | 287 | super().__init__() 288 | self._min_val = min_val 289 | self._max_val = max_val 290 | self._is_int = is_int 291 | 292 | def _check(self, val): 293 | return val is None or isinstance(val, (int, float)) 294 | 295 | def validate(self, value, data): 296 | try: 297 | value = int(value) if self._is_int else float(value) 298 | except ValueError: 299 | self.put_msg( 300 | "Invalid format for %s value" 301 | % ("integer" if self._is_int else "numeric") 302 | ) 303 | return False 304 | 305 | msg = None 306 | if not self._min_val and self._max_val and value > self._max_val: 307 | msg = f"Value should be smaller than {self._max_val}" 308 | elif not self._max_val and self._min_val and value < self._min_val: 309 | msg = "Value should be no smaller than {min_val}".format( 310 | min_val=self._min_val 311 | ) 312 | elif self._min_val and self._max_val: 313 | if value < self._min_val or value > self._max_val: 314 | msg = "Value should be between {min_val} and {max_val}".format( 315 | min_val=self._min_val, 316 | max_val=self._max_val, 317 | ) 318 | if msg is not None: 319 | self.put_msg(msg) 320 | return False 321 | return True 322 | 323 | 324 | class String(Validator): 325 | """ 326 | A validator that accepts string values. 327 | 328 | Accepted condition: min_len <= len(value) < max_len 329 | """ 330 | 331 | def __init__(self, min_len=None, max_len=None): 332 | """ 333 | 334 | :param min_len: If not None, 335 | it should be shorter than ``min_len`` 336 | :param max_len: If not None, 337 | it should be longer than ``max_len`` 338 | """ 339 | 340 | assert self._check(min_len) and self._check( 341 | max_len 342 | ), "{min_len} & {max_len} should be numbers".format( 343 | min_len=min_len, 344 | max_len=max_len, 345 | ) 346 | 347 | super().__init__() 348 | self._min_len, self._max_len = min_len, max_len 349 | 350 | def _check(self, val): 351 | if val is None: 352 | return True 353 | return isinstance(val, int) and val >= 0 354 | 355 | def validate(self, value, data): 356 | if not isinstance(value, str): 357 | self.put_msg("Input value should be string") 358 | return False 359 | 360 | str_len = len(value) 361 | msg = None 362 | 363 | if not self._min_len and self._max_len and str_len > self._max_len: 364 | msg = "String should be shorter than {max_len}".format( 365 | max_len=self._max_len 366 | ) 367 | elif self._min_len and not self._max_len and str_len < self._min_len: 368 | msg = "String should be no shorter than {min_len}".format( 369 | min_len=self._min_len 370 | ) 371 | elif self._min_len and self._max_len: 372 | if str_len < self._min_len or str_len > self._max_len: 373 | msg = "String length should be between {min_len} and {max_len}".format( 374 | min_len=self._min_len, 375 | max_len=self._max_len, 376 | ) 377 | if msg is not None: 378 | self.put_msg(msg) 379 | return False 380 | return True 381 | 382 | 383 | class Datetime(Validator): 384 | """ 385 | Date time validation. 386 | """ 387 | 388 | def __init__(self, datetime_format): 389 | """ 390 | 391 | :param datetime_format: Date time format, 392 | e.g. %Y-%m-%dT%H:%M:%S.%f 393 | """ 394 | super().__init__() 395 | self._format = datetime_format 396 | 397 | def validate(self, value, data): 398 | import datetime 399 | 400 | try: 401 | datetime.datetime.strptime(value, self._format) 402 | except ValueError as exc: 403 | error = f'Wrong datetime with format "{self._format}": {str(exc)}' 404 | self.put_msg(error) 405 | return False 406 | return True 407 | 408 | 409 | class Pattern(Validator): 410 | """ 411 | A validator that accepts strings that match 412 | a given regular expression. 413 | """ 414 | 415 | def __init__(self, regex, flags=0): 416 | """ 417 | 418 | :param regex: The regular expression (string or compiled) 419 | to be matched. 420 | :param flags: flags value for regular expression. 421 | """ 422 | super().__init__() 423 | self._regexp = re.compile(regex, flags=flags) 424 | self.put_msg("Not matching the pattern: %s" % regex) 425 | 426 | def validate(self, value, data): 427 | return self._regexp.match(value) and True or False 428 | 429 | 430 | class Host(Pattern): 431 | """ 432 | A validator that accepts strings that represent network hostname. 433 | """ 434 | 435 | def __init__(self): 436 | regexp = ( 437 | r"^(([a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9\-]*[a-zA-Z0-9])\.)*" 438 | r"([A-Za-z0-9]|[A-Za-z0-9][A-Za-z0-9\-]*[A-Za-z0-9])$" 439 | ) 440 | super().__init__(regexp, flags=re.I) 441 | self.put_msg("Invalid hostname") 442 | 443 | 444 | class Port(Number): 445 | """ 446 | Port number. 447 | """ 448 | 449 | def __init__(self): 450 | super().__init__( 451 | min_val=0, 452 | max_val=65536, 453 | is_int=True, 454 | ) 455 | self.put_msg( 456 | "Invalid port number, it should be a integer between 0 and 65535", 457 | ) 458 | 459 | 460 | class Email(Pattern): 461 | """ 462 | A validator that accepts strings that represent network hostname. 463 | """ 464 | 465 | def __init__(self): 466 | regexp = ( 467 | r"^[A-Z0-9][A-Z0-9._%+-]{0,63}@" 468 | r"(?:[A-Z0-9](?:[A-Z0-9-]{0,62}[A-Z0-9])?\.){1,8}[A-Z]{2,63}$" 469 | ) 470 | super().__init__(regexp, flags=re.I) 471 | self.put_msg("Invalid email address") 472 | 473 | 474 | class JsonString(Validator): 475 | """ 476 | Check if the given value is valid JSON string. 477 | """ 478 | 479 | def validate(self, value, data): 480 | try: 481 | json.loads(value) 482 | except ValueError: 483 | self.put_msg("Invalid JSON string") 484 | return False 485 | return True 486 | -------------------------------------------------------------------------------- /splunktaucclib/rest_handler/entity.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2025 Splunk Inc. 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | # 16 | 17 | 18 | from .eai import RestEAI 19 | 20 | __all__ = ["RestEntity"] 21 | 22 | 23 | class RestEntity: 24 | def __init__(self, name, content, model, user, app, acl=None): 25 | self.name = name 26 | self.content = content 27 | self.model = model 28 | self._eai = RestEAI(self.model, user, app, acl) 29 | 30 | @property 31 | def eai(self): 32 | return self._eai 33 | -------------------------------------------------------------------------------- /splunktaucclib/rest_handler/error.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2025 Splunk Inc. 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | # 16 | 17 | """ 18 | Error Handling. 19 | """ 20 | 21 | 22 | __all__ = ["STATUS_CODES", "RestError"] 23 | 24 | 25 | # HTTP status codes 26 | STATUS_CODES = { 27 | 400: "Bad Request", 28 | 401: "Unauthorized", 29 | 402: "Payment Required", 30 | 403: "Forbidden", 31 | 404: "Not Found", 32 | 405: "Method Not Allowed", 33 | 406: "Not Acceptable", 34 | 407: "Proxy Authentication Required", 35 | 408: "Request Timeout", 36 | 409: "Conflict", 37 | 411: "Length Required", 38 | 500: "Internal Server Error", 39 | 503: "Service Unavailable", 40 | } 41 | 42 | 43 | class RestError(Exception): 44 | """ 45 | REST Error. 46 | """ 47 | 48 | def __init__(self, status, message): 49 | self.status = status 50 | self.reason = STATUS_CODES.get( 51 | status, 52 | "Unknown Error", 53 | ) 54 | self.message = message 55 | err_msg = "REST Error [{status}]: {reason} -- {message}".format( 56 | status=self.status, 57 | reason=self.reason, 58 | message=self.message, 59 | ) 60 | super().__init__(err_msg) 61 | -------------------------------------------------------------------------------- /splunktaucclib/rest_handler/error_ctl.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2025 Splunk Inc. 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | # 16 | 17 | """Error control 18 | """ 19 | 20 | 21 | import logging 22 | import re 23 | import sys 24 | import traceback 25 | 26 | from splunk import RESTException 27 | 28 | import splunktaucclib.common.log as stulog 29 | 30 | __all__ = ["RestHandlerError", "ERROR_MAPPING"] 31 | 32 | 33 | # Errors mapping for add-on. 34 | # Edit it when you need to add new error type. 35 | ERROR_MAPPING = { 36 | # splunkd internal error, occurred while calling splunkd REST API. 37 | 400: "Bad Request", 38 | 401: "Client is not authenticated", 39 | 402: "Current license does not allow the requested action", 40 | 403: "Unauthorized client for the requested action", 41 | 404: "Resource/Endpoint requested dose not exist", 42 | 409: "Conflict occurred due to existing object with the same name", 43 | 500: "Splunkd internal error", 44 | # Rest handler predefined error in add-on. 45 | 1000: "An Add-on Internal ERROR Occurred", 46 | 1001: "Fatal Error", 47 | 1002: "Some mandatory attributes are missing or unusable for the handler", 48 | 1020: "Fail to encrypt credential information", 49 | 1021: "Fail to decrypt the encrypted credential information", 50 | 1022: "Fail to delete the encrypted credential information", 51 | 1100: "Unsupported value in request arguments", 52 | 1101: "Unsupported action on the requested endpoint", 53 | 1102: "Failed to check object for _sync action", 54 | 1103: "Failed to teardown configurations", 55 | 1104: "Poster REST handler error", 56 | } 57 | 58 | 59 | class RestHandlerError: 60 | """Control Error in Splunk Add-on REST API. 61 | code-message mapping for errors: 62 | code < 1000: splunkd internal error, occurred while 63 | calling splunkd REST API, 64 | code >= 1000: Rest handler predefined error in add-on, 65 | """ 66 | 67 | def __init__(self, code, msgx=""): 68 | if code == -1: 69 | self._conv(msgx) 70 | else: 71 | self._code = code 72 | self._msgx = msgx 73 | self._msg = RestHandlerError.map(code) 74 | 75 | def __str__(self): 76 | msgx = (self._msgx and self._msgx != self._msg) and " - %s" % self._msgx or "" 77 | return f"REST ERROR[{self._code}]: {self._msg}{msgx}" 78 | 79 | def _conv(self, exc): 80 | """Convert a Exception form 'splunk.rest.simpleRequest'""" 81 | if isinstance(exc, RESTException): 82 | self._code = exc.statusCode 83 | 84 | try: 85 | self._msg = RestHandlerError.map(self._code) 86 | except: 87 | self._msg = exc.get_message_text().strip() 88 | 89 | msgx = exc.get_extended_message_text().strip() 90 | if self._msg == msgx: 91 | self._msg = "Undefined Error" 92 | try: 93 | pattern = r"In handler \'\S+\': (?P.*$)" 94 | m = re.match(pattern, msgx) 95 | groupDict = m.groupdict() 96 | self._msgx = groupDict["msgx"] 97 | except: 98 | self._msgx = msgx 99 | else: 100 | self._code = 500 101 | self._msg = RestHandlerError.map(self._code) 102 | self._msgx = str(exc) 103 | 104 | @staticmethod 105 | def map(code): 106 | """Map error code to message. Raise an exception 107 | if the code dose not exist. 108 | :param code: error code 109 | :returns: error message for the input code 110 | """ 111 | msg = ERROR_MAPPING.get(code) 112 | assert msg, "Invalid error code is being used - code=%s" % code 113 | return msg 114 | 115 | @staticmethod 116 | def ctl(code, msgx="", logLevel=logging.ERROR, shouldPrint=True, shouldRaise=True): 117 | """Control error, including printing out the error message, 118 | logging it and raising an exception (BaseException). 119 | 120 | :param code: error code (it should be -1 121 | if 'msgx' is an splunkd internal error) 122 | :param msgx: extended message/detail, which will 123 | make it more clear (it is an exception of 124 | splunkd internal error if code=-1) 125 | :param logLevel: logging level (generally, it should be ` 126 | `ERROR`` for Add-on internal error/bug, 127 | ``INFO`` for client request error) 128 | :param shouldPrint: is it required to print error info 129 | (the printed content will be shown to user) 130 | :param shouldRaise: is it required to raise an exception 131 | (the process will be terminated 132 | if an exception raised) 133 | :return: error content 134 | 135 | Some Use Cases: 136 | 1. for splunkd internal exception/error (exc): 137 | ``RestHandlerError.ctl(code=-1, msgx=exc, logLevel=logging.INFO)`` 138 | 2. for bug in user-defined Rest handler in add-on: 139 | ``assert 'expression', \ 140 | RestHandlerError.ctl(code=1000, msgx='some detail...', 141 | shouldPrint=False, shouldRaise=False)`` 142 | 3. for client request error: 143 | RestHandlerError.ctl(code=1100, msgx='some detail...', 144 | logLevel=logging.INFO) 145 | """ 146 | err = RestHandlerError(code, msgx=msgx) 147 | tb = ( 148 | "\r\n" + ("".join(traceback.format_stack())) 149 | if logLevel >= logging.ERROR or isinstance(msgx, Exception) 150 | else "" 151 | ) 152 | 153 | stulog.logger.log(logLevel, f"{err}{tb}", exc_info=1) 154 | if shouldPrint: 155 | sys.stdout.write(str(err)) 156 | if shouldRaise: 157 | raise BaseException(err) 158 | return err 159 | -------------------------------------------------------------------------------- /splunktaucclib/rest_handler/normaliser.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2025 Splunk Inc. 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | # 16 | 17 | """Normalisers 18 | """ 19 | 20 | 21 | __all__ = ["Normaliser", "Boolean", "StringLower", "StringUpper"] 22 | 23 | 24 | class Normaliser: 25 | """Base class of Normaliser.""" 26 | 27 | _name = None 28 | 29 | def __init__(self): 30 | pass 31 | 32 | def normalize(self, value): 33 | """Normalize a given value. 34 | 35 | :param value: value to normalize. 36 | :returns: normalized value. 37 | """ 38 | raise NotImplementedError 39 | 40 | @property 41 | def name(self): 42 | """name of normaliser.""" 43 | return self._name or self.__class__.__name__ 44 | 45 | 46 | class Userdefined(Normaliser): 47 | """A Normaliser that defined by user itself. 48 | 49 | The user-defined normaliser function should be in form: 50 | ``def fun(value, *args, **kwargs): ...`` 51 | It will return the original data if any exception occurred. 52 | """ 53 | 54 | def __init__(self, normaliser, *args, **kwargs): 55 | """ 56 | :param values: The collection of valid values 57 | """ 58 | super().__init__() 59 | self._normaliser, self._args, self._kwargs = normaliser, args, kwargs 60 | 61 | def normalize(self, value): 62 | try: 63 | return self._normaliser(value, *self._args, **self._kwargs) 64 | except: 65 | return value 66 | 67 | 68 | class Boolean(Normaliser): 69 | """Normalize a boolean field. 70 | 71 | Normalize given value to boolean: ``0`` or ``1``. 72 | ``default`` means the return for unrecognizable input of boolean. 73 | """ 74 | 75 | def __init__(self, default=True): 76 | super().__init__() 77 | self._default = "1" if default else "0" 78 | 79 | def normalize(self, value): 80 | if isinstance(value, (bool, int)): 81 | return value and "1" or "0" 82 | if not isinstance(value, str): 83 | return self._default 84 | value = value.strip().lower() 85 | 86 | vals = { 87 | "1": {"true", "t", "1", "yes", "y"}, 88 | "0": {"false", "f", "0", "no", "n"}, 89 | } 90 | revDef = {"1": "0", "0": "1"}[self._default] 91 | return revDef if value in vals[revDef] else self._default 92 | 93 | 94 | class StringLower(Normaliser): 95 | """Normalize a string to all lower cases.""" 96 | 97 | def normalize(self, value): 98 | if isinstance(value, str): 99 | return value.strip().lower() 100 | return value 101 | 102 | 103 | class StringUpper(Normaliser): 104 | """Normalize a string to all upper cases.""" 105 | 106 | def normalize(self, value): 107 | if isinstance(value, str): 108 | return value.strip().upper() 109 | return value 110 | -------------------------------------------------------------------------------- /splunktaucclib/rest_handler/schema.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2025 Splunk Inc. 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | # 16 | 17 | """ 18 | REST Schema 19 | """ 20 | 21 | 22 | from abc import abstractproperty 23 | 24 | __all__ = [ 25 | "RestSchemaError", 26 | "RestSchema", 27 | ] 28 | 29 | 30 | class RestSchemaError(Exception): 31 | pass 32 | 33 | 34 | class RestSchema: 35 | """ 36 | REST Scheme. 37 | """ 38 | 39 | def __init__(self, *args, **kwargs): 40 | pass 41 | 42 | @staticmethod 43 | def endpoint_name(name, namespace): 44 | return f"{namespace}_{name}" 45 | 46 | @abstractproperty 47 | def product(self): 48 | pass 49 | 50 | @abstractproperty 51 | def namespace(self): 52 | pass 53 | 54 | @abstractproperty 55 | def version(self): 56 | pass 57 | -------------------------------------------------------------------------------- /splunktaucclib/rest_handler/util.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2025 Splunk Inc. 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | # 16 | 17 | import os.path 18 | from typing import Any, Dict, Optional 19 | 20 | import solnlib.utils as utils 21 | 22 | from .error import RestError 23 | 24 | try: 25 | from splunk import admin 26 | except Exception: 27 | print("Some functions will not be available outside of a splunk hosted process") 28 | 29 | __all__ = [ 30 | "get_base_app_name", 31 | "remove_http_proxy_env_vars", 32 | "makeConfItem", 33 | "getBaseAppName", 34 | ] 35 | 36 | 37 | def get_appname_from_path(absolute_path): 38 | absolute_path = os.path.normpath(absolute_path) 39 | parts = absolute_path.split(os.path.sep) 40 | parts.reverse() 41 | for key in ("apps", "slave-apps", "master-apps"): 42 | try: 43 | idx = parts.index(key) 44 | except ValueError: 45 | continue 46 | else: 47 | try: 48 | if parts[idx + 1] == "etc": 49 | return parts[idx - 1] 50 | except IndexError: 51 | pass 52 | continue 53 | return "-" 54 | 55 | 56 | def getBaseAppName(): 57 | """Base App name, which this script belongs to.""" 58 | appName = util.get_appname_from_path(__file__) 59 | if appName is None: 60 | raise Exception("Cannot get app name from file: %s" % __file__) 61 | return appName 62 | 63 | 64 | def makeConfItem(name, entity, confInfo, user="nobody", app="-"): 65 | confItem = confInfo[name] 66 | for key, val in list(entity.items()): 67 | if key not in ("eai:attributes", "eai:userName", "eai:appName"): 68 | confItem[key] = val 69 | confItem["eai:userName"] = entity.get("eai:userName") or user 70 | confItem["eai:appName"] = entity.get("eai:appName") or app 71 | confItem.setMetadata( 72 | admin.EAI_ENTRY_ACL, 73 | entity.get(admin.EAI_ENTRY_ACL) 74 | or { 75 | "owner": user, 76 | "app": app, 77 | "global": 1, 78 | "can_write": 1, 79 | "modifiable": 1, 80 | "removable": 1, 81 | "sharing": "global", 82 | "perms": {"read": ["*"], "write": ["admin"]}, 83 | }, 84 | ) 85 | return confItem 86 | 87 | 88 | def get_base_app_name(): 89 | """ 90 | Base App name, which this script belongs to. 91 | """ 92 | import __main__ 93 | 94 | main_name = __main__.__file__ 95 | absolute_path = os.path.normpath(main_name) 96 | parts = absolute_path.split(os.path.sep) 97 | parts.reverse() 98 | for key in ("apps", "peer-apps", "manager-apps"): 99 | try: 100 | idx = parts.index(key) 101 | if parts[idx + 1] == "etc": 102 | return parts[idx - 1] 103 | except (ValueError, IndexError): 104 | pass 105 | raise RestError(status=500, message="Cannot get app name from file: %s" % main_name) 106 | 107 | 108 | def remove_http_proxy_env_vars(): 109 | for k in ("http_proxy", "https_proxy"): 110 | if k in os.environ: 111 | del os.environ[k] 112 | elif k.upper() in os.environ: 113 | del os.environ[k.upper()] 114 | 115 | 116 | def get_proxy_uri(proxy: Dict[str, Any]) -> Optional[str]: 117 | """ 118 | :proxy: dict like, proxy information are in the following 119 | format { 120 | "proxy_url": zz, 121 | "proxy_port": aa, 122 | "proxy_username": bb, 123 | "proxy_password": cc, 124 | "proxy_type": http,sock4,sock5, 125 | "proxy_rdns": 0 or 1, 126 | } 127 | :return: proxy uri or None 128 | """ 129 | uri = None 130 | if proxy and proxy.get("proxy_url") and proxy.get("proxy_type"): 131 | uri = proxy["proxy_url"] 132 | # socks5 causes the DNS resolution to happen on the client 133 | # socks5h causes the DNS resolution to happen on the proxy server 134 | if proxy.get("proxy_type") == "socks5" and utils.is_true( 135 | proxy.get("proxy_rdns") 136 | ): 137 | proxy["proxy_type"] = "socks5h" 138 | # setting default value of proxy_type to "http" if 139 | # its value is not from ["http", "socks4", "socks5"] 140 | if proxy.get("proxy_type") not in ["http", "socks4", "socks5"]: 141 | proxy["proxy_type"] = "http" 142 | if proxy.get("proxy_port"): 143 | uri = "{}:{}".format(uri, proxy.get("proxy_port")) 144 | if proxy.get("proxy_username") and proxy.get("proxy_password"): 145 | uri = "{}://{}:{}@{}/".format( 146 | proxy["proxy_type"], 147 | proxy["proxy_username"], 148 | proxy["proxy_password"], 149 | uri, 150 | ) 151 | else: 152 | uri = "{}://{}".format(proxy["proxy_type"], uri) 153 | 154 | return uri 155 | -------------------------------------------------------------------------------- /splunktaucclib/splunk_aoblib/__init__.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2025 Splunk Inc. 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | # 16 | -------------------------------------------------------------------------------- /splunktaucclib/splunk_aoblib/rest_helper.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2025 Splunk Inc. 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | # 16 | 17 | import requests 18 | 19 | 20 | class TARestHelper: 21 | def __init__(self, logger=None): 22 | self.logger = logger 23 | self.http_session = None 24 | self.requests_proxy = None 25 | 26 | def _init_request_session(self, proxy_uri=None): 27 | self.http_session = requests.Session() 28 | self.http_session.mount("http://", requests.adapters.HTTPAdapter(max_retries=3)) 29 | self.http_session.mount( 30 | "https://", requests.adapters.HTTPAdapter(max_retries=3) 31 | ) 32 | if proxy_uri: 33 | self.requests_proxy = {"http": proxy_uri, "https": proxy_uri} 34 | 35 | def send_http_request( 36 | self, 37 | url, 38 | method, 39 | parameters=None, 40 | payload=None, 41 | headers=None, 42 | cookies=None, 43 | verify=True, 44 | cert=None, 45 | timeout=None, 46 | proxy_uri=None, 47 | ): 48 | if self.http_session is None: 49 | self._init_request_session(proxy_uri) 50 | requests_args = {"timeout": (10.0, 5.0), "verify": verify} 51 | if parameters: 52 | requests_args["params"] = parameters 53 | if payload: 54 | if isinstance(payload, (dict, list)): 55 | requests_args["json"] = payload 56 | else: 57 | requests_args["data"] = str(payload) 58 | if headers: 59 | requests_args["headers"] = headers 60 | if cookies: 61 | requests_args["cookies"] = cookies 62 | if cert: 63 | requests_args["cert"] = cert 64 | if timeout is not None: 65 | requests_args["timeout"] = timeout 66 | if self.requests_proxy: 67 | requests_args["proxies"] = self.requests_proxy 68 | return self.http_session.request(method, url, **requests_args) 69 | -------------------------------------------------------------------------------- /splunktaucclib/splunk_aoblib/rest_migration.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2025 Splunk Inc. 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | # 16 | 17 | import json 18 | import traceback 19 | from urllib.parse import urlparse 20 | 21 | from solnlib.conf_manager import ConfManager 22 | from solnlib.splunk_rest_client import SplunkRestClient 23 | from solnlib.splunkenv import get_splunkd_uri 24 | 25 | from splunktaucclib.rest_handler import util 26 | from splunktaucclib.rest_handler.admin_external import AdminExternalHandler 27 | from splunktaucclib.rest_handler.error import RestError 28 | 29 | 30 | def _migrate_error_handle(func): 31 | def handle(*args, **kwargs): 32 | try: 33 | return func(*args, **kwargs) 34 | except: 35 | raise RestError(500, "Migrating failed. %s" % traceback.format_exc()) 36 | 37 | return handle 38 | 39 | 40 | class ConfigMigrationHandler(AdminExternalHandler): 41 | """ 42 | REST handler, which will migrate configuration 43 | from add-on built by previous version of TAB (v2.0.0). 44 | """ 45 | 46 | def handleList(self, confInfo): 47 | self._migrate() 48 | # use classic inheritance to be compatible for 49 | # old version of Splunk private SDK 50 | AdminExternalHandler.handleList(self, confInfo) 51 | 52 | @_migrate_error_handle 53 | def _migrate(self): 54 | internal_endpoint = self.endpoint.internal_endpoint 55 | if not ( 56 | internal_endpoint.endswith("settings") 57 | or internal_endpoint.endswith("account") 58 | ): 59 | return 60 | 61 | splunkd_info = urlparse(get_splunkd_uri()) 62 | self.base_app_name = util.get_base_app_name() 63 | self.conf_mgr = ConfManager( 64 | self.getSessionKey(), 65 | self.base_app_name, 66 | scheme=splunkd_info.scheme, 67 | host=splunkd_info.hostname, 68 | port=splunkd_info.port, 69 | ) 70 | self.client = SplunkRestClient( 71 | self.getSessionKey(), 72 | self.base_app_name, 73 | scheme=splunkd_info.scheme, 74 | host=splunkd_info.hostname, 75 | port=splunkd_info.port, 76 | ) 77 | self.legacy_passwords = None 78 | 79 | # migration legacy configuration in related conf files 80 | if internal_endpoint.endswith("settings"): 81 | self._migrate_conf() 82 | self._migrate_conf_customized() 83 | elif internal_endpoint.endswith("account"): 84 | self._migrate_conf_credential() 85 | 86 | def get_legacy_passwords(self): 87 | if self.legacy_passwords is None: 88 | self.legacy_passwords = {} 89 | for pwd in self.client.storage_passwords.list(count=-1): 90 | if pwd.realm == self.base_app_name: 91 | self.legacy_passwords[pwd.username] = pwd 92 | return self.legacy_passwords 93 | 94 | def _migrate_conf(self): 95 | """ 96 | Migrate from .conf to _settings.conf 97 | :return: 98 | """ 99 | if self.callerArgs.id not in ("logging", "proxy"): 100 | return 101 | conf_file_name = self.base_app_name 102 | conf_file, stanzas = self._load_conf(conf_file_name) 103 | if not stanzas: 104 | return 105 | 106 | # migrate: global_settings ==> logging 107 | if "global_settings" in stanzas and self.callerArgs.id == "logging": 108 | stanza = stanzas["global_settings"] 109 | if "log_level" in stanza: 110 | stanza["loglevel"] = stanza["log_level"] 111 | del stanza["log_level"] 112 | name = "logging" 113 | response = self.handler.update( 114 | name, 115 | self._filter_stanza(name, stanza), 116 | ) 117 | self._loop_response(response) 118 | # delete legacy configuration 119 | self._delete_legacy(conf_file, {"global_settings": None}) 120 | 121 | # migrate: proxy_settings ==> proxy 122 | if "proxy_settings" in stanzas and self.callerArgs.id == "proxy": 123 | name = "proxy" 124 | response = self.handler.update( 125 | name, 126 | self._filter_stanza(name, stanzas["proxy_settings"]), 127 | ) 128 | self._loop_response(response) 129 | # delete legacy configuration 130 | self._delete_legacy(conf_file, {"proxy_settings": None}) 131 | 132 | def _migrate_conf_customized(self): 133 | """ 134 | Migrate from _customized.conf to _settings.conf 135 | :return: 136 | """ 137 | if self.callerArgs.id != "additional_parameters": 138 | return 139 | 140 | conf_file_name = self.base_app_name + "_customized" 141 | conf_file, stanzas = self._load_conf(conf_file_name) 142 | if not stanzas: 143 | return 144 | 145 | additional_parameters = {} 146 | for stanza_name, stanza in list(stanzas.items()): 147 | for key, val in list(stanza.items()): 148 | if key == "type": 149 | continue 150 | else: 151 | additional_parameter = val 152 | break 153 | else: 154 | continue 155 | if additional_parameter: 156 | additional_parameters[stanza_name] = additional_parameter 157 | 158 | name = "additional_parameters" 159 | response = self.handler.update( 160 | name, 161 | self._filter_stanza(name, additional_parameters), 162 | ) 163 | self._loop_response(response) 164 | 165 | # delete legacy configuration 166 | self._delete_legacy(conf_file, stanzas) 167 | 168 | def _migrate_conf_credential(self): 169 | """ 170 | Migrate from _credential.conf to _account.conf 171 | :return: 172 | """ 173 | conf_file_name = self.base_app_name + "_credential" 174 | conf_file, stanzas = self._load_conf(conf_file_name) 175 | 176 | for stanza_name, stanza in list(stanzas.items()): 177 | stanza["username"] = stanza_name 178 | response = self.handler.create( 179 | stanza_name, 180 | stanza, 181 | ) 182 | self._loop_response(response) 183 | 184 | # delete legacy configuration 185 | self._delete_legacy(conf_file, stanzas) 186 | 187 | def _load_conf(self, conf_file_name): 188 | if conf_file_name not in self.client.confs: 189 | return None, {} 190 | conf_file = self.conf_mgr.get_conf(conf_file_name) 191 | stanzas = conf_file.get_all() 192 | for stanza_name, stanza in list(stanzas.items()): 193 | pwd = self.get_legacy_passwords().get(stanza_name) 194 | if pwd: 195 | pwd_cont = json.loads(pwd.clear_password) 196 | stanza.update(pwd_cont) 197 | for key in list(stanza.keys()): 198 | if key.startswith("eai:") or key == "disabled": 199 | del stanza[key] 200 | 201 | return conf_file, stanzas 202 | 203 | def _delete_legacy(self, conf_file, stanzas): 204 | for stanza_name, _ in list(stanzas.items()): 205 | try: 206 | # delete stanza from related conf file 207 | conf_file.delete(stanza_name) 208 | except Exception: 209 | pass 210 | 211 | pwd = self.get_legacy_passwords().get(stanza_name) 212 | try: 213 | # delete password from passwords.conf 214 | if pwd: 215 | pwd.delete() 216 | except Exception: 217 | pass 218 | 219 | def _filter_stanza(self, stanza_name, stanza): 220 | model = self.endpoint.model(stanza_name, stanza) 221 | stanza_new = {f.name: stanza[f.name] for f in model.fields if f.name in stanza} 222 | return stanza_new 223 | 224 | @classmethod 225 | def _loop_response(cls, response): 226 | for _ in response: 227 | pass 228 | -------------------------------------------------------------------------------- /splunktaucclib/splunk_aoblib/setup_util.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2025 Splunk Inc. 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | # 16 | 17 | import json 18 | import os 19 | 20 | import solnlib.utils as utils 21 | 22 | from splunktaucclib.global_config import GlobalConfig, GlobalConfigSchema 23 | 24 | """ 25 | Usage Examples: 26 | setup_util = Setup_Util(uri, session_key) 27 | setup_util.get_log_level() 28 | setup_util.get_proxy_settings() 29 | setup_util.get_credential_account("my_account_name") 30 | setup_util.get_customized_setting("my_customized_field_name") 31 | """ 32 | 33 | """ 34 | setting object structure. 35 | It is stored in self.__cached_global_settings 36 | Note, this structure is only maintained in this util. 37 | setup_util transforms global settings in os environment or from ucc into this structure. 38 | { 39 | "proxy_settings": { 40 | "proxy_enabled": False/True, 41 | "proxy_url": "example.com", 42 | "proxy_port": "1234", 43 | "proxy_username": "", 44 | "proxy_password": "", 45 | "proxy_type": "http", 46 | "proxy_rdns": False/True 47 | }, 48 | "log_settings": { 49 | "loglevel": "DEBUG" 50 | }, 51 | "credential_settings": [{ 52 | "name": "account_id", 53 | "username": "example_account", 54 | "password": "example_password" 55 | }, { # supported by ucc, not seen any usage in AoB 56 | "api_key": "admin", 57 | "api_uuid": "admin", 58 | "endpoint": "some url", 59 | "name": "account1" 60 | }], 61 | "customized_settings": { 62 | "text_name": "content", 63 | "pass_name": "password", 64 | "checkbox": 0/1 65 | } 66 | } 67 | """ 68 | 69 | GLOBAL_SETTING_KEY = "global_settings" 70 | AOB_TEST_FLAG = "AOB_TEST" 71 | 72 | PROXY_SETTINGS = "proxy_settings" 73 | LOG_SETTINGS = "log_settings" 74 | CREDENTIAL_SETTINGS = "credential_settings" 75 | CUSTOMIZED_SETTINGS = "customized_settings" 76 | 77 | UCC_PROXY = "proxy" 78 | UCC_LOGGING = "logging" 79 | UCC_CUSTOMIZED = "additional_parameters" 80 | UCC_CREDENTIAL = "account" 81 | 82 | CONFIGS = [CREDENTIAL_SETTINGS] 83 | SETTINGS = [PROXY_SETTINGS, LOG_SETTINGS, CUSTOMIZED_SETTINGS] 84 | 85 | PROXY_ENABLE_KEY = "proxy_enabled" 86 | PROXY_RDNS_KEY = "proxy_rdns" 87 | LOG_LEVEL_KEY = "loglevel" 88 | LOG_LEVEL_KEY_ENV = "log_level" 89 | 90 | TYPE_CHECKBOX = "checkbox" 91 | ALL_SETTING_TYPES = [ 92 | "text", 93 | "password", 94 | "checkbox", 95 | "dropdownlist", 96 | "multi_dropdownlist", 97 | "radiogroup", 98 | ] 99 | 100 | 101 | def get_schema_path(): 102 | dirname = os.path.dirname 103 | basedir = dirname(dirname(dirname(dirname(__file__)))) 104 | return os.path.join( 105 | basedir, "appserver", "static", "js", "build", "globalConfig.json" 106 | ) 107 | 108 | 109 | class Setup_Util: 110 | def __init__(self, uri, session_key, logger=None): 111 | self.__uri = uri 112 | self.__session_key = session_key 113 | self.__logger = logger 114 | self.scheme, self.host, self.port = utils.extract_http_scheme_host_port( 115 | self.__uri 116 | ) 117 | self.__cached_global_settings = {} 118 | self.__global_config = None 119 | 120 | def init_global_config(self): 121 | if self.__global_config is not None: 122 | return 123 | schema_file = get_schema_path() 124 | if not os.path.isfile(schema_file): 125 | self.log_error("Global config JSON file not found!") 126 | self.__global_config = None 127 | else: 128 | with open(get_schema_path()) as f: 129 | json_schema = "".join([l for l in f]) 130 | self.__global_config = GlobalConfig( 131 | self.__uri, 132 | self.__session_key, 133 | GlobalConfigSchema(json.loads(json_schema)), 134 | ) 135 | 136 | def log_error(self, msg): 137 | if self.__logger: 138 | self.__logger.error(msg) 139 | 140 | def log_info(self, msg): 141 | if self.__logger: 142 | self.__logger.info(msg) 143 | 144 | def log_debug(self, msg): 145 | if self.__logger: 146 | self.__logger.debug(msg) 147 | 148 | def _parse_conf(self, key): 149 | if os.environ.get(AOB_TEST_FLAG, "false") == "true": 150 | global_settings = self._parse_conf_from_env( 151 | json.loads(os.environ.get(GLOBAL_SETTING_KEY, "{}")) 152 | ) 153 | return global_settings.get(key) 154 | else: 155 | return self._parse_conf_from_global_config(key) 156 | 157 | def _parse_conf_from_env(self, global_settings): 158 | """ 159 | this is run in test env 160 | """ 161 | if not self.__cached_global_settings: 162 | # format the settings, the setting from env is from global_setting 163 | # meta 164 | self.__cached_global_settings = {} 165 | for s_k, s_v in list(global_settings.items()): 166 | if s_k == PROXY_SETTINGS: 167 | proxy_enabled = s_v.get(PROXY_ENABLE_KEY) 168 | proxy_rdns = s_v.get(PROXY_RDNS_KEY) 169 | if type(proxy_enabled) != bool: 170 | s_v[PROXY_ENABLE_KEY] = utils.is_true(proxy_enabled) 171 | if type(proxy_rdns) != bool: 172 | s_v[PROXY_RDNS_KEY] = utils.is_true(proxy_rdns) 173 | self.__cached_global_settings[PROXY_SETTINGS] = s_v 174 | elif s_k == LOG_SETTINGS: 175 | self.__cached_global_settings[LOG_SETTINGS] = { 176 | LOG_LEVEL_KEY: s_v.get(LOG_LEVEL_KEY_ENV) 177 | } 178 | elif s_k == CREDENTIAL_SETTINGS: 179 | # add account id to accounts 180 | for i in range(0, len(s_v)): 181 | s_v[i]["name"] = "account" + str(i) 182 | self.__cached_global_settings[CREDENTIAL_SETTINGS] = s_v 183 | else: # should be customized settings 184 | self.__cached_global_settings[CUSTOMIZED_SETTINGS] = {} 185 | for s in s_v: 186 | field_type = s.get("type") 187 | if not field_type: 188 | self.log_error(f"unknown type for customized var:{s}") 189 | continue 190 | self.__cached_global_settings["customized_settings"][ 191 | s.get("name", "") 192 | ] = self._transform(s.get("value", ""), field_type) 193 | 194 | return self.__cached_global_settings 195 | 196 | def _parse_conf_from_global_config(self, key): 197 | if self.__cached_global_settings and key in self.__cached_global_settings: 198 | return self.__cached_global_settings.get(key) 199 | self.init_global_config() 200 | if self.__global_config is None: 201 | return None 202 | if key in CONFIGS: 203 | accounts = self.__global_config.configs.load().get(UCC_CREDENTIAL, []) 204 | if accounts: 205 | for account in accounts: 206 | if "disabled" in account: 207 | del account["disabled"] 208 | self.__cached_global_settings[CREDENTIAL_SETTINGS] = accounts 209 | elif key in SETTINGS: 210 | settings = self.__global_config.settings.load() 211 | self.__cached_global_settings.update( 212 | {UCC_PROXY: None, UCC_LOGGING: None, UCC_CUSTOMIZED: None} 213 | ) 214 | customized_setting = {} 215 | for setting in settings.get("settings", []): 216 | # filter out disabled setting page and 'disabled' field 217 | if setting.get("disabled", False): 218 | continue 219 | if setting["name"] == UCC_LOGGING: 220 | self.__cached_global_settings[LOG_SETTINGS] = { 221 | LOG_LEVEL_KEY: setting.get(LOG_LEVEL_KEY) 222 | } 223 | elif setting["name"] == UCC_PROXY: 224 | if "disabled" in setting: 225 | del setting["disabled"] 226 | setting[PROXY_ENABLE_KEY] = utils.is_true( 227 | setting.get(PROXY_ENABLE_KEY, "0") 228 | ) 229 | setting[PROXY_RDNS_KEY] = utils.is_true( 230 | setting.get(PROXY_RDNS_KEY, "0") 231 | ) 232 | self.__cached_global_settings[PROXY_SETTINGS] = setting 233 | else: # should be customized settings 234 | if "disabled" in setting: 235 | del setting["disabled"] 236 | customized_setting.update(setting) 237 | self.__cached_global_settings[CUSTOMIZED_SETTINGS] = customized_setting 238 | 239 | return self.__cached_global_settings.get(key) 240 | 241 | def get_log_level(self): 242 | log_level = "INFO" 243 | log_settings = self._parse_conf(LOG_SETTINGS) 244 | if log_settings is None: 245 | self.log_debug("Log level is not set, use default INFO") 246 | else: 247 | log_level = log_settings.get(LOG_LEVEL_KEY, None) 248 | if not log_level: 249 | self.log_debug("Log level is not set, use default INFO") 250 | log_level = "INFO" 251 | return log_level 252 | 253 | def get_proxy_settings(self): 254 | proxy_settings = self._parse_conf(PROXY_SETTINGS) 255 | if proxy_settings is None: 256 | self.log_debug("Proxy is not set!") 257 | return {} 258 | proxy_enabled = proxy_settings.get(PROXY_ENABLE_KEY) 259 | if not proxy_enabled: 260 | self.log_debug("Proxy is not enabled!") 261 | return {} 262 | proxy_settings = { 263 | "proxy_url": proxy_settings.get("proxy_url", ""), 264 | "proxy_port": proxy_settings.get("proxy_port", None), 265 | "proxy_username": proxy_settings.get("proxy_username", ""), 266 | "proxy_password": proxy_settings.get("proxy_password", ""), 267 | "proxy_type": proxy_settings.get("proxy_type", ""), 268 | "proxy_rdns": proxy_settings.get("proxy_rdns"), 269 | } 270 | self._validate_proxy_settings(proxy_settings) 271 | return proxy_settings 272 | 273 | def get_credential_by_id(self, account_id): 274 | credential_settings = self._parse_conf(CREDENTIAL_SETTINGS) 275 | for account in credential_settings: 276 | if account.get("name", None) == account_id: 277 | return account 278 | self.log_error( 279 | f"Credential account with account id {account_id} can not be found" 280 | ) 281 | return None 282 | 283 | def get_credential_by_username(self, username): 284 | credential_settings = self._parse_conf(CREDENTIAL_SETTINGS) 285 | for account in credential_settings: 286 | if account.get("username", None) == username: 287 | return account 288 | self.log_error(f"Credential account with username {username} can not be found") 289 | return None 290 | 291 | def get_customized_setting(self, key): 292 | customized_settings = self._parse_conf(CUSTOMIZED_SETTINGS) 293 | if customized_settings is None: 294 | self.log_info("Customized setting is not set") 295 | return None 296 | if key not in customized_settings: 297 | self.log_info("Customized key can not be found") 298 | return None 299 | customized_setting = customized_settings.get(key, None) 300 | if customized_setting is None: 301 | self.log_error("Cannot find customized setting with key %s" % key) 302 | return customized_setting 303 | 304 | def _validate_proxy_settings(self, proxy_settings): 305 | if proxy_settings: 306 | if proxy_settings.get("proxy_url") == "": 307 | raise Exception("Proxy host must not be empty!") 308 | proxy_port = proxy_settings.get("proxy_port") 309 | if proxy_port is None or not proxy_port.isdigit(): 310 | raise Exception("Proxy port must be a number!") 311 | 312 | def _transform(self, value, field_type): 313 | """ 314 | This is method is only used when parsing customized global params from env. 315 | Only checkbox type needs transform. Other types will be extracted automatically when apply json.loads. 316 | :param value: 317 | :param field_type: can be checkbox, text, password, dropdownlist, multi_dropdownlist, radiogroup 318 | :return: 319 | """ 320 | if field_type == TYPE_CHECKBOX: 321 | return utils.is_true(value) 322 | elif field_type in ALL_SETTING_TYPES: 323 | return value 324 | else: 325 | raise Exception( 326 | "Type of this customized setting is corrupted. Value: {}, type: {}".format( 327 | value, field_type 328 | ) 329 | ) 330 | 331 | """ 332 | # the following methods is used by AoB internally 333 | # user should not use this 334 | # These methods returns the similiar structure like ucc libs 335 | 336 | the output of config is like 337 | { 338 | "account": [ 339 | { 340 | "username": "admin", 341 | "credential": "a", 342 | "name": "ddddd", 343 | "disabled": false 344 | } 345 | ] 346 | } 347 | 348 | the output of settings is like 349 | { 350 | "settings": [ 351 | { 352 | "additional_parameters": { 353 | "checkbox": "1", 354 | "text": "msn", 355 | "disabled": false 356 | } 357 | }, 358 | { 359 | "proxy": { 360 | "proxy_type": "http", 361 | "proxy_port": "9999", 362 | "proxy_url": "localhost", 363 | "proxy_rdns": "1", 364 | "disabled": false, 365 | "proxy_password": "a", 366 | "proxy_username": "admin", 367 | "proxy_enabled": "1" 368 | } 369 | }, 370 | { 371 | "logging": { 372 | "loglevel": "ERROR", 373 | "disabled": false 374 | } 375 | } 376 | ] 377 | } 378 | """ 379 | 380 | def get_ucc_log_setting(self): 381 | return {UCC_LOGGING: self._parse_conf(LOG_SETTINGS)} 382 | 383 | def get_ucc_proxy_setting(self): 384 | p = dict(self.get_proxy_settings()) 385 | p[PROXY_ENABLE_KEY] = True if p else False 386 | return {UCC_PROXY: p} 387 | 388 | def get_ucc_customized_setting(self): 389 | customized_settings = self._parse_conf(CUSTOMIZED_SETTINGS) 390 | if customized_settings: 391 | return {UCC_CUSTOMIZED: customized_settings} 392 | else: 393 | return {} 394 | 395 | # account belongs to the configs 396 | def get_ucc_account_config(self): 397 | return {UCC_CREDENTIAL: self._parse_conf(CREDENTIAL_SETTINGS)} 398 | -------------------------------------------------------------------------------- /splunktaucclib/splunk_aoblib/utility.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2025 Splunk Inc. 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | # 16 | 17 | # encoding = utf-8 18 | 19 | import logging 20 | import sys 21 | 22 | 23 | def get_stderr_stream_logger(logger_name=None, log_level=logging.INFO): 24 | if logger_name is None: 25 | logger_name = "aob_default_logger" 26 | logger = logging.getLogger(logger_name) 27 | formatter = logging.Formatter( 28 | "%(asctime)s - %(name)s - [%(levelname)s] - %(message)s" 29 | ) 30 | stderr_handler = logging.StreamHandler(stream=sys.stderr) 31 | stderr_handler.setLevel(logging.DEBUG) 32 | stderr_handler.setFormatter(formatter) 33 | logger.addHandler(stderr_handler) 34 | logger.setLevel(log_level) 35 | return logger 36 | -------------------------------------------------------------------------------- /tests/integration/demo/README.md: -------------------------------------------------------------------------------- 1 | # demo 2 | 3 | ## To run locally 4 | 5 | ```bash 6 | ./scripts/build-demo-addon.sh 7 | ``` 8 | 9 | Then you will see an archive in the root directory which you can install through Splunk UI. 10 | -------------------------------------------------------------------------------- /tests/integration/demo/additional_packaging.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | from textwrap import dedent 3 | 4 | 5 | def cleanup_output_files(output_path: str, ta_name: str) -> None: 6 | """Helper function used by UCC. It is here used to register a new handler not defined in the globalConfig.""" 7 | web_conf = Path(output_path) / ta_name / "default" / "web.conf" 8 | restmap_conf = Path(output_path) / ta_name / "default" / "restmap.conf" 9 | 10 | assert web_conf.exists() 11 | assert restmap_conf.exists() 12 | 13 | web_conf.write_text( 14 | web_conf.read_text() 15 | + dedent( 16 | """ 17 | [expose:demo_test_reload_override] 18 | pattern = demo_test_reload_override 19 | methods = POST, GET 20 | 21 | [expose:demo_test_reload_override_specified] 22 | pattern = demo_test_reload_override/* 23 | methods = POST, GET, DELETE 24 | """ 25 | ) 26 | ) 27 | 28 | restmap_conf.write_text( 29 | restmap_conf.read_text().replace( 30 | "members =", "members = demo_test_reload_override," 31 | ) 32 | + dedent( 33 | """ 34 | [admin_external:demo_test_reload_override] 35 | handlertype = python 36 | python.version = python3 37 | handlerfile = demo_rh_test_reload_override.py 38 | handleractions = create, edit, list, remove 39 | handlerpersistentmode = true 40 | """ 41 | ) 42 | ) 43 | -------------------------------------------------------------------------------- /tests/integration/demo/globalConfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "pages": { 3 | "configuration": { 4 | "tabs": [ 5 | { 6 | "type": "loggingTab", 7 | "levels": [ 8 | "DEBUG", 9 | "INFO", 10 | "WARN", 11 | "ERROR", 12 | "CRITICAL" 13 | ] 14 | } 15 | ], 16 | "title": "Configuration", 17 | "description": "Set up your add-on" 18 | }, 19 | "inputs": { 20 | "services": [ 21 | { 22 | "name": "demo", 23 | "entity": [ 24 | { 25 | "type": "text", 26 | "label": "Name", 27 | "validators": [ 28 | { 29 | "type": "regex", 30 | "errorMsg": "Input Name must begin with a letter and consist exclusively of alphanumeric characters and underscores.", 31 | "pattern": "^[a-dA-D]\\w*$" 32 | }, 33 | { 34 | "type": "string", 35 | "errorMsg": "Length of input name should be between 1 and 100", 36 | "minLength": 1, 37 | "maxLength": 100 38 | } 39 | ], 40 | "field": "name", 41 | "help": "A unique name for the data input.", 42 | "required": true 43 | }, 44 | { 45 | "type": "interval", 46 | "field": "interval", 47 | "label": "Interval", 48 | "help": "Time interval of the data input, in seconds.", 49 | "required": true, 50 | "defaultValue": "300" 51 | } 52 | ], 53 | "title": "Demo" 54 | } 55 | ], 56 | "title": "Inputs", 57 | "description": "Manage your data inputs", 58 | "table": { 59 | "actions": [ 60 | "edit", 61 | "delete", 62 | "clone" 63 | ], 64 | "header": [ 65 | { 66 | "label": "Name", 67 | "field": "name" 68 | }, 69 | { 70 | "label": "Interval", 71 | "field": "interval" 72 | }, 73 | { 74 | "label": "Index", 75 | "field": "index" 76 | }, 77 | { 78 | "label": "Status", 79 | "field": "disabled" 80 | } 81 | ], 82 | "moreInfo": [ 83 | { 84 | "label": "Name", 85 | "field": "name" 86 | }, 87 | { 88 | "label": "Interval", 89 | "field": "interval" 90 | }, 91 | { 92 | "label": "Index", 93 | "field": "index" 94 | }, 95 | { 96 | "label": "Status", 97 | "field": "disabled", 98 | "mapping": { 99 | "true": "Disabled", 100 | "false": "Enabled" 101 | } 102 | } 103 | ] 104 | } 105 | } 106 | }, 107 | "meta": { 108 | "name": "demo", 109 | "restRoot": "demo", 110 | "version": "0.0.1", 111 | "displayName": "Demo", 112 | "schemaVersion": "0.0.9" 113 | } 114 | } 115 | -------------------------------------------------------------------------------- /tests/integration/demo/package/LICENSES/Apache-2.0.txt: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright 2022 Splunk, Inc 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /tests/integration/demo/package/README.txt: -------------------------------------------------------------------------------- 1 | demo -------------------------------------------------------------------------------- /tests/integration/demo/package/README/inputs.conf.spec: -------------------------------------------------------------------------------- 1 | [demo://] 2 | interval = 3 | index = 4 | -------------------------------------------------------------------------------- /tests/integration/demo/package/app.manifest: -------------------------------------------------------------------------------- 1 | { 2 | "schemaVersion": "2.0.0", 3 | "info": { 4 | "title": "demo", 5 | "id": { 6 | "group": null, 7 | "name": "demo", 8 | "version": "0.0.1" 9 | }, 10 | "author": [ 11 | { 12 | "name": "Artem Rys", 13 | "email": "email@example.com", 14 | "company": "Splunk, Inc" 15 | } 16 | ], 17 | "releaseDate": null, 18 | "description": "Demo", 19 | "classification": { 20 | "intendedAudience": "IT Professionals", 21 | "categories": [ 22 | "Security, Fraud & Compliance" 23 | ], 24 | "developmentStatus": "Production/Stable" 25 | }, 26 | "commonInformationModels": null, 27 | "license": { 28 | "name": null, 29 | "text": "LICENSES/Apache-2.0.txt", 30 | "uri": null 31 | }, 32 | "privacyPolicy": { 33 | "name": null, 34 | "text": null, 35 | "uri": null 36 | }, 37 | "releaseNotes": { 38 | "name": "README", 39 | "text": "README.txt", 40 | "uri": "" 41 | } 42 | }, 43 | "dependencies": null, 44 | "tasks": null, 45 | "inputGroups": null, 46 | "incompatibleApps": null, 47 | "platformRequirements": null, 48 | "supportedDeployments": [ 49 | "_standalone", 50 | "_distributed", 51 | "_search_head_clustering" 52 | ], 53 | "targetWorkloads": [ 54 | "_search_heads", 55 | "_indexers" 56 | ] 57 | } 58 | -------------------------------------------------------------------------------- /tests/integration/demo/package/bin/demo.py: -------------------------------------------------------------------------------- 1 | import sys 2 | 3 | import import_declare_test 4 | from splunklib import modularinput as smi 5 | 6 | 7 | class Input(smi.Script): 8 | def __init__(self): 9 | super().__init__() 10 | 11 | def get_scheme(self): 12 | scheme = smi.Scheme("demo") 13 | scheme.description = "demo input" 14 | scheme.use_external_validation = True 15 | scheme.streaming_mode_xml = True 16 | scheme.use_single_instance = False 17 | scheme.add_argument( 18 | smi.Argument( 19 | "name", title="Name", description="Name", required_on_create=True 20 | ) 21 | ) 22 | return scheme 23 | 24 | def validate_input(self, definition): 25 | return 26 | 27 | def stream_events(self, inputs: smi.InputDefinition, event_writer: smi.EventWriter): 28 | event = smi.Event( 29 | data="test data", 30 | sourcetype="test-sourcetype", 31 | ) 32 | event_writer.write_event(event) 33 | 34 | 35 | if __name__ == "__main__": 36 | exit_code = Input().run(sys.argv) 37 | sys.exit(exit_code) 38 | -------------------------------------------------------------------------------- /tests/integration/demo/package/bin/demo_rh_demo.py: -------------------------------------------------------------------------------- 1 | import import_declare_test 2 | 3 | from splunktaucclib.rest_handler.endpoint import ( 4 | field, 5 | validator, 6 | RestModel, 7 | DataInputModel, 8 | ) 9 | from splunktaucclib.rest_handler import admin_external, util 10 | from splunktaucclib.rest_handler.admin_external import AdminExternalHandler 11 | import logging 12 | 13 | util.remove_http_proxy_env_vars() 14 | 15 | 16 | special_fields = [ 17 | field.RestField( 18 | "name", 19 | required=True, 20 | encrypted=False, 21 | default=None, 22 | validator=validator.AllOf( 23 | validator.Pattern( 24 | regex=r"""^[a-dA-D]\w*$""", 25 | ), 26 | validator.String( 27 | max_len=100, 28 | min_len=1, 29 | ), 30 | ), 31 | ) 32 | ] 33 | 34 | fields = [ 35 | field.RestField( 36 | "interval", 37 | required=True, 38 | encrypted=False, 39 | default="300", 40 | validator=validator.Pattern( 41 | regex=r"""^(?:-1|\d+(?:\.\d+)?)$""", 42 | ), 43 | ), 44 | field.RestField("disabled", required=False, validator=None), 45 | ] 46 | model = RestModel(fields, name=None, special_fields=special_fields) 47 | 48 | 49 | endpoint = DataInputModel( 50 | "demo", 51 | model, 52 | ) 53 | 54 | 55 | if __name__ == "__main__": 56 | logging.getLogger().addHandler(logging.NullHandler()) 57 | admin_external.handle( 58 | endpoint, 59 | handler=AdminExternalHandler, 60 | ) 61 | -------------------------------------------------------------------------------- /tests/integration/demo/package/bin/demo_rh_settings.py: -------------------------------------------------------------------------------- 1 | import import_declare_test 2 | 3 | from splunktaucclib.rest_handler.endpoint import ( 4 | field, 5 | validator, 6 | RestModel, 7 | MultipleModel, 8 | ) 9 | from splunktaucclib.rest_handler import admin_external, util 10 | from splunktaucclib.rest_handler.admin_external import AdminExternalHandler 11 | import logging 12 | 13 | util.remove_http_proxy_env_vars() 14 | 15 | 16 | special_fields = [] 17 | 18 | fields_logging = [ 19 | field.RestField( 20 | "loglevel", required=True, encrypted=False, default="INFO", validator=None 21 | ) 22 | ] 23 | model_logging = RestModel(fields_logging, name="logging", special_fields=special_fields) 24 | 25 | 26 | endpoint = MultipleModel( 27 | "demo_settings", 28 | models=[model_logging], 29 | ) 30 | 31 | 32 | if __name__ == "__main__": 33 | logging.getLogger().addHandler(logging.NullHandler()) 34 | admin_external.handle( 35 | endpoint, 36 | handler=AdminExternalHandler, 37 | ) 38 | -------------------------------------------------------------------------------- /tests/integration/demo/package/bin/demo_rh_test_reload_override.py: -------------------------------------------------------------------------------- 1 | import import_declare_test 2 | 3 | from splunktaucclib.rest_handler.endpoint import ( 4 | RestModel, 5 | SingleModel, 6 | ) 7 | from splunktaucclib.rest_handler import admin_external, util 8 | from splunktaucclib.rest_handler.admin_external import AdminExternalHandler 9 | import logging 10 | 11 | util.remove_http_proxy_env_vars() 12 | 13 | 14 | special_fields = [] 15 | fields_logging = [] 16 | model = RestModel(fields_logging, name=None, special_fields=special_fields) 17 | 18 | endpoint = SingleModel( 19 | "demo_test_reload_override", 20 | model=model, 21 | need_reload=True, 22 | ) 23 | 24 | 25 | if __name__ == "__main__": 26 | logging.getLogger().addHandler(logging.NullHandler()) 27 | admin_external.handle( 28 | endpoint, 29 | handler=AdminExternalHandler, 30 | ) 31 | -------------------------------------------------------------------------------- /tests/integration/demo/package/default/app.conf: -------------------------------------------------------------------------------- 1 | [install] 2 | is_configured = false 3 | state = enabled 4 | build = 1 5 | 6 | [launcher] 7 | author = Splunk 8 | description = Demo 9 | version = 0.0.1 10 | 11 | [ui] 12 | is_visible = true 13 | label = Demo 14 | 15 | [package] 16 | id = demo 17 | 18 | [id] 19 | name = demo 20 | version = 0.0.1 21 | -------------------------------------------------------------------------------- /tests/integration/demo/package/default/inputs.conf: -------------------------------------------------------------------------------- 1 | [demo] 2 | python.version = python3 3 | 4 | [demo://test_input] 5 | interval = 1200 6 | name = demo 7 | disabled = 1 -------------------------------------------------------------------------------- /tests/unit/conftest.py: -------------------------------------------------------------------------------- 1 | import os 2 | from unittest.mock import MagicMock 3 | 4 | import pytest 5 | 6 | from tests.unit.fake_module import mock_splunk_module 7 | 8 | 9 | mock_splunk_module() 10 | 11 | 12 | @pytest.fixture 13 | def admin(monkeypatch) -> MagicMock: 14 | from splunktaucclib.rest_handler import admin_external 15 | 16 | new_admin = mock_splunk_module() 17 | monkeypatch.setattr(admin_external, "admin", new_admin) 18 | 19 | return new_admin 20 | 21 | 22 | @pytest.fixture 23 | def client_mock(): 24 | return MagicMock() 25 | 26 | 27 | @pytest.fixture(autouse=True) 28 | def setup(monkeypatch, client_mock): 29 | from splunktaucclib.rest_handler import credentials 30 | from splunktaucclib.rest_handler import handler 31 | 32 | monkeypatch.setitem(os.environ, "SPLUNKD_URI", "https://localhost:1234") 33 | monkeypatch.setattr(credentials, "get_base_app_name", lambda: "splunk_ta_test") 34 | monkeypatch.setattr( 35 | handler, "SplunkRestClient", MagicMock(return_value=client_mock) 36 | ) 37 | -------------------------------------------------------------------------------- /tests/unit/fake_module.py: -------------------------------------------------------------------------------- 1 | import sys 2 | import types 3 | from collections import namedtuple 4 | from typing import Any, Optional 5 | from unittest.mock import MagicMock 6 | 7 | 8 | CallerArgs = namedtuple("CallerArgs", ["id", "data"]) 9 | 10 | ACTION_LIST = 0 11 | ACTION_REMOVE = 1 12 | ACTION_CREATE = 2 13 | ACTION_EDIT = 3 14 | 15 | 16 | class MConfigHandler: 17 | def __init__(self, action: int, caller_args: CallerArgs): 18 | self.requestedAction = action 19 | self.callerArgs = caller_args 20 | 21 | def handleList(self, confInfo: Any): 22 | raise NotImplementedError() 23 | 24 | def handleCreate(self, confInfo: Any): 25 | raise NotImplementedError() 26 | 27 | def handleEdit(self, confInfo: Any): 28 | raise NotImplementedError() 29 | 30 | def handleRemove(self, confInfo: Any): 31 | raise NotImplementedError() 32 | 33 | def getSessionKey(self): 34 | return "abcd" 35 | 36 | @classmethod 37 | def get(cls, name: Optional[str] = None): 38 | return cls(ACTION_LIST, CallerArgs(name, {})).handleList(MagicMock()) 39 | 40 | 41 | def mock_splunk_module() -> MagicMock: 42 | sys.modules["splunk"] = types.ModuleType("splunk") 43 | 44 | admin = MagicMock() 45 | sys.modules["splunk.admin"] = admin 46 | 47 | admin.MConfigHandler = MConfigHandler 48 | 49 | for action in ("ACTION_LIST", "ACTION_REMOVE", "ACTION_CREATE", "ACTION_EDIT"): 50 | setattr(admin, action, globals()[action]) 51 | 52 | return admin 53 | -------------------------------------------------------------------------------- /tests/unit/test_admin_external.py: -------------------------------------------------------------------------------- 1 | import json 2 | from collections import namedtuple 3 | from io import StringIO 4 | 5 | import pytest 6 | 7 | from splunktaucclib.rest_handler import admin_external 8 | from splunktaucclib.rest_handler.admin_external import AdminExternalHandler 9 | from splunktaucclib.rest_handler.endpoint import RestModel, SingleModel, MultipleModel 10 | 11 | Response = namedtuple("Response", ["body", "status"]) 12 | 13 | 14 | def eai_response(value, status, name="test"): 15 | return Response( 16 | body=StringIO( 17 | json.dumps({"entry": [{"content": value, "name": name, "acl": "acl"}]}) 18 | ), 19 | status=status, 20 | ) 21 | 22 | 23 | @pytest.mark.parametrize("need_reload", [True, False]) 24 | @pytest.mark.parametrize("cls", [SingleModel, MultipleModel]) 25 | def test_handle_single_model_reload(admin, client_mock, need_reload, cls, monkeypatch): 26 | def _get(path, *args, **kwargs): 27 | _get.call_count += 1 28 | _get.paths.append(path) 29 | 30 | status = 200 31 | 32 | if path.startswith("configs/conf-_TA_config"): 33 | status = 404 34 | 35 | return eai_response({"key": "value"}, status) 36 | 37 | _get.call_count = 0 38 | _get.paths = [] 39 | 40 | monkeypatch.setattr(client_mock, "get", _get) 41 | 42 | model = RestModel([], name=None, special_fields=[]) 43 | 44 | if cls is MultipleModel: 45 | model = [model, RestModel([], name="test", special_fields=[])] 46 | 47 | endpoint = cls( 48 | "demo_reload", 49 | model, 50 | app="fake_app", 51 | need_reload=need_reload, 52 | ) 53 | 54 | admin_external.handle( 55 | endpoint, 56 | handler=AdminExternalHandler, 57 | ) 58 | 59 | assert admin.init.call_count == 1 60 | 61 | handler: AdminExternalHandler = admin.init.call_args[0][0] 62 | 63 | for _ in range(3): 64 | handler.get() 65 | 66 | if need_reload: 67 | assert client_mock.get.call_count == 9 68 | assert client_mock.get.paths == [ 69 | "configs/conf-_TA_config/config", 70 | "configs/conf-demo_reload/_reload", 71 | "configs/conf-demo_reload", 72 | "configs/conf-_TA_config/config", 73 | "configs/conf-demo_reload/_reload", 74 | "configs/conf-demo_reload", 75 | "configs/conf-_TA_config/config", 76 | "configs/conf-demo_reload/_reload", 77 | "configs/conf-demo_reload", 78 | ] 79 | else: 80 | assert client_mock.get.call_count == 6 81 | assert client_mock.get.paths == [ 82 | "configs/conf-_TA_config/config", 83 | "configs/conf-demo_reload", 84 | "configs/conf-_TA_config/config", 85 | "configs/conf-demo_reload", 86 | "configs/conf-_TA_config/config", 87 | "configs/conf-demo_reload", 88 | ] 89 | 90 | 91 | @pytest.mark.parametrize("override", [True, False]) 92 | def test_handle_single_model_reload_override(admin, client_mock, monkeypatch, override): 93 | def _get(path, *args, **kwargs): 94 | _get.call_count += 1 95 | _get.paths.append(path) 96 | 97 | status = 200 98 | value = {"key": "value"} 99 | name = "test" 100 | 101 | if path == f"configs/conf-_TA_config/config": 102 | value = {"need_reload": override} 103 | name = "config" 104 | elif path.startswith("configs/conf-_TA_config"): 105 | status = 404 106 | 107 | return eai_response(value, status, name) 108 | 109 | _get.call_count = 0 110 | _get.paths = [] 111 | 112 | monkeypatch.setattr(client_mock, "get", _get) 113 | 114 | model = RestModel([], name=None, special_fields=[]) 115 | 116 | endpoint = SingleModel( 117 | "demo_reload", 118 | model, 119 | app="fake_app", 120 | need_reload=True, 121 | ) 122 | 123 | admin_external.handle( 124 | endpoint, 125 | handler=AdminExternalHandler, 126 | ) 127 | 128 | assert admin.init.call_count == 1 129 | 130 | handler: AdminExternalHandler = admin.init.call_args[0][0] 131 | 132 | for _ in range(2): 133 | handler.get() 134 | 135 | if override: 136 | assert client_mock.get.call_count == 6 137 | assert client_mock.get.paths == [ 138 | "configs/conf-_TA_config/config", 139 | "configs/conf-demo_reload/_reload", 140 | "configs/conf-demo_reload", 141 | "configs/conf-_TA_config/config", 142 | "configs/conf-demo_reload/_reload", 143 | "configs/conf-demo_reload", 144 | ] 145 | 146 | if not override: 147 | assert client_mock.get.call_count == 4 148 | assert client_mock.get.paths == [ 149 | "configs/conf-_TA_config/config", 150 | "configs/conf-demo_reload", 151 | "configs/conf-_TA_config/config", 152 | "configs/conf-demo_reload", 153 | ] 154 | -------------------------------------------------------------------------------- /tests/unit/test_rest_handler_error.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from splunktaucclib.rest_handler import error, handler 4 | from splunklib import binding 5 | from splunklib.data import record 6 | 7 | 8 | def make_response_record(body, status=200): 9 | class _MocBufReader: 10 | def __init__(self, buf): 11 | if isinstance(buf, str): 12 | self._buf = buf.encode("utf-8") 13 | else: 14 | self._buf = buf 15 | 16 | def read(self, size=None): 17 | return self._buf 18 | 19 | return record( 20 | { 21 | "body": binding.ResponseReader(_MocBufReader(body)), 22 | "status": status, 23 | "reason": "", 24 | "headers": None, 25 | } 26 | ) 27 | 28 | 29 | @pytest.mark.parametrize( 30 | "status_code,message,expected_message", 31 | [ 32 | (999, "message", "REST Error [999]: Unknown Error -- message"), 33 | (400, "message", "REST Error [400]: Bad Request -- message"), 34 | (401, "message", "REST Error [401]: Unauthorized -- message"), 35 | (402, "message", "REST Error [402]: Payment Required -- message"), 36 | (403, "message", "REST Error [403]: Forbidden -- message"), 37 | (404, "message", "REST Error [404]: Not Found -- message"), 38 | (405, "message", "REST Error [405]: Method Not Allowed -- message"), 39 | (406, "message", "REST Error [406]: Not Acceptable -- message"), 40 | (407, "message", "REST Error [407]: Proxy Authentication Required -- message"), 41 | (408, "message", "REST Error [408]: Request Timeout -- message"), 42 | (409, "message", "REST Error [409]: Conflict -- message"), 43 | (411, "message", "REST Error [411]: Length Required -- message"), 44 | (500, "message", "REST Error [500]: Internal Server Error -- message"), 45 | (503, "message", "REST Error [503]: Service Unavailable -- message"), 46 | ], 47 | ) 48 | def test_rest_error(status_code, message, expected_message): 49 | with pytest.raises(Exception) as exc_info: 50 | raise error.RestError(status_code, "message") 51 | assert str(exc_info.value) == expected_message 52 | 53 | 54 | def test_parse_err_msg_xml_forbidden(): 55 | original_err_msg = """\n\n \n \ 56 | You (user=user) do not have permission to perform this operation (requires capability: \ 57 | list_storage_passwords OR edit_storage_passwords OR admin_all_objects).\n \n\n""" 58 | expected_err_msg = "This operation is forbidden." 59 | err = binding.HTTPError(make_response_record(original_err_msg, status=403)) 60 | result = handler._parse_error_msg(err) 61 | assert result == expected_err_msg 62 | 63 | 64 | def test_parse_err_msg_xml_forbidden_invalid(): 65 | original_err_msg = "Error message - wrong format" 66 | err = binding.HTTPError(make_response_record(original_err_msg, status=403)) 67 | result = handler._parse_error_msg(err) 68 | assert result == original_err_msg 69 | 70 | 71 | def test_parse_err_msg_json_forbidden(): 72 | original_err_msg = """{"messages":[{"type":"ERROR","text":"You (user=user) do not have permission to \ 73 | perform this operation (requires capability: admin_all_objects)."}]}""" 74 | expected_err_msg = "This operation is forbidden." 75 | err = binding.HTTPError(make_response_record(original_err_msg, status=403)) 76 | result = handler._parse_error_msg(err) 77 | assert result == expected_err_msg 78 | 79 | 80 | def test_parse_err_msg_json_forbidden_invalid(): 81 | original_err_msg = """{"messages":{"type":"ERROR","text":"You (user=user) do not have permission to \ 82 | perform this operation (requires capability: admin_all_objects)."}}""" 83 | err = binding.HTTPError(make_response_record(original_err_msg, status=400)) 84 | result = handler._parse_error_msg(err) 85 | assert result == original_err_msg 86 | 87 | 88 | def test_parse_err_msg_json_bad_request(): 89 | original_err_msg = """{"messages":[{"type":"ERROR","text":"\ 90 | Object id=demo://test_input cannot be deleted in config=inputs."}]}""" 91 | expected_err_msg = "Object id=demo://test_input cannot be deleted in config=inputs." 92 | err = binding.HTTPError(make_response_record(original_err_msg, status=400)) 93 | result = handler._parse_error_msg(err) 94 | assert result == expected_err_msg 95 | --------------------------------------------------------------------------------