├── .github ├── ISSUE_TEMPLATE │ ├── bug_report.md │ └── feature_request.md ├── pull_request_template.md └── workflows │ ├── ci.yml │ ├── cla.yml │ └── release.yml ├── Dockerfile ├── LICENSE ├── README.md ├── faros_event.sh ├── resources ├── events-1.png ├── events-2.png └── events-3.jpg ├── sonar-project.properties └── test ├── .shellspec ├── Dockerfile ├── demo.sh └── spec ├── faros_event_community_edition_spec.sh ├── faros_event_spec.sh └── spec_helper.sh /.github/ISSUE_TEMPLATE/bug_report.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Bug report 3 | about: Report a defect or a problem in feature 4 | title: "[Bug]: " 5 | labels: 'Type: Bug' 6 | assignees: '' 7 | 8 | --- 9 | 10 | **Describe the bug 🐛** 11 | 12 | 13 | **Steps to reproduce the behavior 🕹** 14 | 19 | 20 | **Expected behavior 🤖** 21 | 22 | 23 | **Screenshots 🔦** 24 | 25 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/feature_request.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Feature request 3 | about: Create a new feature description 4 | title: '' 5 | labels: 'Type: Feature' 6 | assignees: '' 7 | 8 | --- 9 | 10 | **Feature description 📝** 11 | 12 | 13 | 14 | **Acceptance criteria ✅** 15 | 16 | 17 | * 18 | 19 | **Additional context 🔦** 20 | 21 | -------------------------------------------------------------------------------- /.github/pull_request_template.md: -------------------------------------------------------------------------------- 1 | Closes # 2 | 3 | ## About 4 | 8 | 9 | 10 | ## Screenshots 11 | 12 | 13 | 14 | ## Extras 15 | 16 | -------------------------------------------------------------------------------- /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | name: CI 2 | 3 | on: 4 | push: 5 | branches: 6 | - main 7 | paths-ignore: 8 | - '**.md' 9 | pull_request: 10 | branches: 11 | - main 12 | 13 | jobs: 14 | test: 15 | name: Test 16 | runs-on: ubuntu-latest 17 | timeout-minutes: 5 18 | env: 19 | FORCE_COLOR: true 20 | steps: 21 | - uses: actions/checkout@v2 22 | # Based on https://github.com/snyk/snyk/blob/master/.github/workflows/smoke-tests.yml 23 | 24 | # Shellspec base image does not have curl, jq, awk, sed 25 | - name: Build Shellspec image 26 | working-directory: test 27 | run: | 28 | docker build . -t 'farosai/shellspec:kcov' 29 | 30 | - name: Run ShellSpec tests 31 | run: | 32 | docker run --rm \ 33 | -v "$PWD/test:/src" \ 34 | -v "$PWD/faros_event.sh:/faros_event.sh" \ 35 | farosai/shellspec:kcov 36 | 37 | - name: Check for style & bugs 38 | uses: ludeeus/action-shellcheck@1.1.0 39 | with: 40 | ignore: test 41 | env: 42 | SHELLCHECK_OPTS: -s bash -e SC2059 -e SC2237 -e SC2004 -e SC2001 -e SC2317 43 | 44 | - name: SonarCloud Scan 45 | uses: sonarsource/sonarcloud-github-action@master 46 | env: 47 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 48 | SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }} 49 | -------------------------------------------------------------------------------- /.github/workflows/cla.yml: -------------------------------------------------------------------------------- 1 | name: "CLA Assistant" 2 | on: 3 | issue_comment: 4 | types: [created] 5 | pull_request_target: 6 | types: [opened,closed,synchronize] 7 | permissions: 8 | contents: read 9 | pull-requests: write 10 | jobs: 11 | CLAssistant: 12 | runs-on: ubuntu-latest 13 | steps: 14 | - name: "CLA Assistant" 15 | if: (github.event.comment.body == 'recheck' || github.event.comment.body == 'I have read the CLA Document and I hereby sign the CLA') || github.event_name == 'pull_request_target' 16 | # Beta Release 17 | uses: contributor-assistant/github-action@v2.2.0 18 | env: 19 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 20 | # the below token should have repo scope and must be manually added by you in the repository's secret 21 | PERSONAL_ACCESS_TOKEN : ${{ secrets.CLA_BOT_GITHUB_TOKEN }} 22 | with: 23 | path-to-signatures: 'signatures/v1/cla.json' 24 | path-to-document: 'https://docs.faros.ai/docs/individual-cla' 25 | # branch should not be protected 26 | branch: 'main' 27 | allowlist: cjwooo,tovbinm,thomas-gerber,ypc-faros,dependabot*,sonarcloud* 28 | 29 | #below are the optional inputs - If the optional inputs are not given, then default values will be taken 30 | remote-organization-name: faros-ai 31 | remote-repository-name: cla 32 | create-file-commit-message: 'Creating/updating file for storing CLA Signatures' 33 | signed-commit-message: '$contributorName has signed the CLA in #$pullRequestNo' 34 | custom-notsigned-prcomment: 'Thank you for your submission, we really appreciate it. Faros can accept contrbutions only from authors who accepted and signed [Contributor License Agreement](https://docs.faros.ai/docs/individual-cla). Please kindly read it and accept by posting the following comment on this PR:' 35 | custom-pr-sign-comment: 'I have read the CLA Document and I hereby sign the CLA' 36 | #custom-allsigned-prcomment: 'pull request comment when all contributors has signed, defaults to **CLA Assistant Lite bot** All Contributors have signed the CLA.' 37 | lock-pullrequest-aftermerge: true 38 | -------------------------------------------------------------------------------- /.github/workflows/release.yml: -------------------------------------------------------------------------------- 1 | name: Release 2 | 3 | on: 4 | release: 5 | types: [created] 6 | 7 | jobs: 8 | publish-events-cli: 9 | name: Publish Events CLI image 10 | runs-on: ubuntu-latest 11 | timeout-minutes: 5 12 | env: 13 | FAROS_DEV_API_URL: https://dev.api.faros.ai 14 | FAROS_PROD_API_URL: https://prod.api.faros.ai 15 | FAROS_PIPELINE_ID: faros-events-cli-release 16 | FAROS_GRAPH: faros 17 | steps: 18 | - name: Check out 19 | uses: actions/checkout@v2.3.4 20 | 21 | - name: Set CI start time # https://serverfault.com/a/151112 22 | run: echo "CI_STARTED_AT=$(date +%s%N | cut -b1-13)" >> $GITHUB_ENV 23 | 24 | # GITHUB_REF looks like: refs/tags/v0.2.5 25 | # ${GITHUB_REF:10} ignores the first 10 characters leaving v0.2.5 26 | - name: Generate image tags 27 | id: gen-tags 28 | env: 29 | EVENTS_CLI_IMAGE: farosai/faros-events-cli 30 | COMMIT_SHA: ${{ github.sha }} 31 | run: | 32 | TAG="${GITHUB_REF:10}" 33 | TAG_VERSION="${TAG//v/}" 34 | echo "EVENTS_CLI_LATEST_TAG=$EVENTS_CLI_IMAGE:latest" >> "$GITHUB_ENV" 35 | echo "EVENTS_CLI_TAG=$EVENTS_CLI_IMAGE:$TAG" >> "$GITHUB_ENV" 36 | echo "EVENTS_CLI_VERSION_TAG=$EVENTS_CLI_IMAGE:$TAG_VERSION" >> "$GITHUB_ENV" 37 | echo "EVENTS_CLI_COMMIT_SHA_TAG=$EVENTS_CLI_IMAGE:$COMMIT_SHA" >> "$GITHUB_ENV" 38 | echo "EVENTS_CLI_ARTIFACT=Docker://$EVENTS_CLI_IMAGE/$TAG_VERSION" >> "$GITHUB_ENV" 39 | 40 | - name: Docker login 41 | uses: docker/login-action@v1.10.0 42 | with: 43 | username: ${{ secrets.DOCKERHUB_USERNAME }} 44 | password: ${{ secrets.DOCKERHUB_TOKEN }} 45 | 46 | - name: Publish Events CLI image 47 | run: | 48 | docker build . -t $EVENTS_CLI_LATEST_TAG \ 49 | -t $EVENTS_CLI_TAG \ 50 | -t $EVENTS_CLI_VERSION_TAG \ 51 | -t $EVENTS_CLI_COMMIT_SHA_TAG \ 52 | --label "faros-build-id=$GITHUB_RUN_ID" \ 53 | --label "faros-ci-pipeline=$FAROS_PIPELINE_ID" \ 54 | --label "faros-ci-org=faros-ai" \ 55 | --label "faros-ci-source=GitHub" \ 56 | --label "faros-commit-sha=$GITHUB_SHA" \ 57 | --label "faros-vcs-repo=faros-events-cli" \ 58 | --label "faros-vcs-org=faros-ai" \ 59 | --label "faros-vcs-source=GitHub" && \ 60 | docker push $EVENTS_CLI_LATEST_TAG && \ 61 | docker push $EVENTS_CLI_COMMIT_SHA_TAG && \ 62 | docker push $EVENTS_CLI_TAG && \ 63 | docker push $EVENTS_CLI_VERSION_TAG 64 | 65 | - name: Set CI end time 66 | run: echo "CI_ENDED_AT=$(date +%s%N | cut -b1-13)" >> $GITHUB_ENV 67 | 68 | - name: Set run status 69 | run: | 70 | if [[ ${{ job.status }} == success ]]; then 71 | echo "FAROS_RUN_STATUS=Success" >> $GITHUB_ENV 72 | else 73 | echo "FAROS_RUN_STATUS=Failed" >> $GITHUB_ENV 74 | fi 75 | 76 | - name: Send CI event to Faros - prod 77 | if: ${{ always() }} 78 | run: | 79 | ./faros_event.sh CI -k ${{ secrets.HEPHAESTUS_PROD_FAROS_API_KEY }} \ 80 | -u ${{ env.FAROS_PROD_API_URL }} \ 81 | -g ${{ env.FAROS_GRAPH }} \ 82 | --artifact ${{ env.EVENTS_CLI_ARTIFACT }} \ 83 | --commit GitHub://faros-ai/faros-events-cli/${{ github.sha }} \ 84 | --run GitHub://faros-ai/${{ env.FAROS_PIPELINE_ID }}/$GITHUB_RUN_ID \ 85 | --run_status ${{ env.FAROS_RUN_STATUS }} \ 86 | --run_status_details ${{ job.status }} \ 87 | --run_start_time ${{ env.CI_STARTED_AT }} \ 88 | --run_end_time ${{ env.CI_ENDED_AT }} 89 | 90 | - name: Send CI event to Faros - dev 91 | if: ${{ always() }} 92 | run: | 93 | ./faros_event.sh CI -k ${{ secrets.HEPHAESTUS_DEV_FAROS_API_KEY }} \ 94 | -u ${{ env.FAROS_DEV_API_URL }} \ 95 | -g ${{ env.FAROS_GRAPH }} \ 96 | --artifact ${{ env.EVENTS_CLI_ARTIFACT }} \ 97 | --commit GitHub://faros-ai/faros-events-cli/${{ github.sha }} \ 98 | --run GitHub://faros-ai/${{ env.FAROS_PIPELINE_ID }}/$GITHUB_RUN_ID \ 99 | --run_status ${{ env.FAROS_RUN_STATUS }} \ 100 | --run_status_details ${{ job.status }} \ 101 | --run_start_time ${{ env.CI_STARTED_AT }} \ 102 | --run_end_time ${{ env.CI_ENDED_AT }} 103 | 104 | - name: Send CD event to Faros - prod 105 | if: ${{ always() }} 106 | run: | 107 | ./faros_event.sh CD -k ${{ secrets.HEPHAESTUS_PROD_FAROS_API_KEY }} \ 108 | -u ${{ env.FAROS_PROD_API_URL }} \ 109 | -g ${{ env.FAROS_GRAPH }} \ 110 | --artifact ${{ env.EVENTS_CLI_ARTIFACT }} \ 111 | --deploy GitHub://faros-events-cli/Prod/$GITHUB_RUN_ID \ 112 | --deploy_status ${{ env.FAROS_RUN_STATUS }} \ 113 | --deploy_start_time ${{ env.CI_ENDED_AT }} \ 114 | --deploy_end_time ${{ env.CI_ENDED_AT }} \ 115 | --run GitHub://faros-ai/${{ env.FAROS_PIPELINE_ID }}/$GITHUB_RUN_ID \ 116 | --run_status ${{ env.FAROS_RUN_STATUS }} \ 117 | --run_status_details ${{ job.status }} \ 118 | --run_start_time ${{ env.CI_STARTED_AT }} \ 119 | --run_end_time ${{ env.CI_ENDED_AT }} 120 | 121 | - name: Send CD event to Faros - dev 122 | if: ${{ always() }} 123 | run: | 124 | ./faros_event.sh CD -k ${{ secrets.HEPHAESTUS_DEV_FAROS_API_KEY }} \ 125 | -u ${{ env.FAROS_DEV_API_URL }} \ 126 | -g ${{ env.FAROS_GRAPH }} \ 127 | --artifact ${{ env.EVENTS_CLI_ARTIFACT }} \ 128 | --deploy GitHub://faros-events-cli/Prod/$GITHUB_RUN_ID \ 129 | --deploy_status ${{ env.FAROS_RUN_STATUS }} \ 130 | --deploy_start_time ${{ env.CI_ENDED_AT }} \ 131 | --deploy_end_time ${{ env.CI_ENDED_AT }} \ 132 | --run GitHub://faros-ai/${{ env.FAROS_PIPELINE_ID }}/$GITHUB_RUN_ID \ 133 | --run_status ${{ env.FAROS_RUN_STATUS }} \ 134 | --run_status_details ${{ job.status }} \ 135 | --run_start_time ${{ env.CI_STARTED_AT }} \ 136 | --run_end_time ${{ env.CI_ENDED_AT }} 137 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM alpine:latest 2 | RUN apk add --no-cache curl jq gawk bash 3 | RUN mkdir -p /faros 4 | RUN adduser -D faros 5 | COPY faros_event.sh /faros 6 | RUN chmod +x /faros/faros_event.sh 7 | USER faros 8 | ENTRYPOINT ["/faros/faros_event.sh"] 9 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright [yyyy] [name of copyright owner] 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # :computer: Faros Events CLI [![CI](https://github.com/faros-ai/faros-events-cli/actions/workflows/ci.yml/badge.svg)](https://github.com/faros-ai/faros-events-cli/actions/workflows/ci.yml) [![Latest Release](https://img.shields.io/github/v/release/faros-ai/faros-events-cli?label=latest%20version&logo=latest%20version&style=plastic)](https://github.com/faros-ai/faros-events-cli/releases/latest) 2 | 3 | CLI for reporting events to Faros platform. 4 | 5 | - [Installation](#installation) 6 | - [Using Docker](#using-docker) 7 | - [Using Bash](#using-bash) 8 | - [Instrumenting CI pipelines](#instrumenting-ci-pipelines) 9 | - [Reporting builds and build steps in parts](#reporting-builds-and-build-steps-in-parts) 10 | - [Reporting test execution results](#reporting-test-execution-results) 11 | - [Reporting deployments](#reporting-deployments) 12 | - [Arguments](#arguments) 13 | - [Passing arguments: flags or environment variables](#passing-arguments-flags-or-environment-variables) 14 | - [General arguments](#general-arguments) 15 | - [CI arguments](#ci-arguments) 16 | - [CD arguments](#cd-arguments) 17 | - [Test Execution arguments](#test-execution-arguments) 18 | - [URI arguments alternative](#uri-arguments-alternative) 19 | - [Additional arguments](#additional-arguments) 20 | - [Tips](#tips) 21 | - [Validating your command](#validating-your-command) 22 | - [Usage with Faros Community Edition](#usage-with-faros-community-edition) 23 | - [Development](#hammer-development) 24 | 25 | ## Installation 26 | 27 | ### Using Docker 28 | 29 | **Requirements**: `docker` 30 | 31 | ```sh 32 | docker pull farosai/faros-events-cli:v0.6.12 && docker run farosai/faros-events-cli:v0.6.12 help 33 | ``` 34 | 35 | ### Using Bash 36 | 37 | **Requirements**: `curl`, `jq` (1.6+), `sed`, `awk` (we recommend `gawk`). 38 | 39 | Either [download the script manually](https://raw.githubusercontent.com/faros-ai/faros-events-cli/v0.6.12/faros_event.sh) or invoke the script directly with curl: 40 | 41 | ```sh 42 | bash <(curl -s https://raw.githubusercontent.com/faros-ai/faros-events-cli/v0.6.12/faros_event.sh) help 43 | ``` 44 | 45 | 46 | ## Instrumenting CI pipelines 47 | Report CI events to the Faros platform if you would like to analyze success/failure rates of your CI pipelines and how long different stages take. 48 | 49 | This CI event reports a successful build event where an artifact is built from a commit: 50 | ```sh 51 | ./faros_event.sh CI -k "" \ 52 | --commit ":////" \ 53 | --artifact ":////" \ 54 | --run ":////" \ 55 | --run_status "Success" \ 56 | --run_start_time "2021-07-20T18:05:46.019Z" \ 57 | --run_end_time "2021-07-20T18:08:42.024Z" 58 | ``` 59 | 60 | Example usage: 61 | 62 | ```sh 63 | ./faros_event.sh CI -k "" \ 64 | --commit "GitHub://faros-ai/faros-events-cli/4414ad2b3b13b17055171678437a92e5d788cad1" \ 65 | --artifact "Docker://farosai/faros-events-cli/v0.6.12" \ 66 | --run "Jenkins://faros-ai/faros-events-cli/168_1700016590" \ 67 | --run_status "Success" \ 68 | --run_start_time "2023-11-14T18:05:46.019Z" \ 69 | --run_end_time "2023-11-14T18:08:42.024Z" 70 | ``` 71 | 72 | > :exclamation: The `run_status` is an enum. Read the documentation on arguments [here](#ci-arguments) for accepted values. 73 | 74 | > :exclamation: If your CI pipeline does not build artifacts, omit the `--artifact` parameter, and be sure to add the `--no-artifact` flag. 75 | 76 | ### Reporting builds and build steps in parts 77 | 78 | In addition to tracking build outcomes, you can also instrument specific steps in your build processes, and report on information in parts, as it becomes available. 79 | 80 | For example, after reporting the start of a build: 81 | 82 | ```sh 83 | ./faros_event.sh CI -k "" \ 84 | --commit ":////" \ 85 | --artifact ":////" \ 86 | --run ":////" \ 87 | --run_start_time "Now" 88 | ``` 89 | 90 | You can report the start of a specific build step: 91 | 92 | ```sh 93 | ./faros_event.sh CI -k "" \ 94 | --run ":////" \ 95 | --run_step_id "" \ 96 | --run_step_start_time "Now" 97 | ``` 98 | 99 | Then report its outcome and end time: 100 | 101 | ```sh 102 | ./faros_event.sh CI -k "" \ 103 | --run ":////" \ 104 | --run_step_id "" \ 105 | --run_step_status "Success" \ 106 | --run_step_end_time "Now" 107 | ``` 108 | 109 | Don't forget to report the end of the build itself! 110 | 111 | ```sh 112 | ./faros_event.sh CI -k "" \ 113 | --run ":////" \ 114 | --run_status "Success" \ 115 | --run_end_time "Now" 116 | ``` 117 | 118 | 119 | ## Reporting test execution results 120 | 121 | Use this event type if you would like to analyze success/failure rates and execution times of Test Suites 122 | 123 | > :exclamation: `--full` flag must be provided with TestExecution event 124 | 125 | This event reports a successful test suite invocation: 126 | 127 | ```sh 128 | ./faros_event.sh TestExecution -k "" \ 129 | --commit ":////" \ 130 | --test_id "" \ 131 | --test_source "" \ 132 | --test_type "Functional" \ 133 | --test_status "Success" \ 134 | --test_suite "" \ 135 | --test_stats "failure=0,success=18,skipped=3,unknown=0,custom=2,total=23" \ 136 | --test_start_time "2021-07-20T18:05:46.019Z" \ 137 | --test_end_time "2021-07-20T18:08:42.024Z" \ 138 | --full 139 | ``` 140 | 141 | ## Reporting deployments 142 | 143 | Send CD events to the Faros platform if you would like to analyze your deploy frequency and lead time metrics. 144 | 145 | **Option 1**: 146 | If information about the specific commit that is being deployed is available at the time of deployment, use this CD event to report the successful deployment of an application to an environment: 147 | 148 | ```sh 149 | ./faros_event.sh CD -k "" \ 150 | --commit ":////" \ 151 | --deploy ":////" \ 152 | --deploy_status "Success" \ 153 | --deploy_start_time "2021-07-20T18:05:46.019Z" \ 154 | --deploy_end_time "2021-07-20T18:08:42.024Z" 155 | ``` 156 | 157 | **Option 2**: 158 | If commit information is not readily available at the time of deployment, but you do have artifact information, you can reference the artifact instead of the commit. 159 | In such a scenario, you must also spearately report CI events as described [above](#instrumenting-ci-pipelines), and the Faros Platform will do the work of figuring out what commit got deployed. 160 | 161 | ```sh 162 | ./faros_event.sh CD -k "" \ 163 | --artifact ":////" \ 164 | --deploy ":////" \ 165 | --deploy_status "Success" \ 166 | --deploy_start_time "2021-07-20T18:05:46.019Z" \ 167 | --deploy_end_time "2021-07-20T18:08:42.024Z" 168 | ``` 169 | 170 | > :exclamation: If choosing Option 2 to report your deployment events, the `--artifact` parameter in the CD event should exactly match the artifact parameter in the CI event. 171 | 172 | > :exclamation: The `deploy_status` is an enum. Read the documentation on arguments [here](#cd-arguments) for accepted values. 173 | 174 | > :exclamation: The `deploy_environment` is also an enum. Read the documentation on arguments [here](#cd-arguments) for accepted values. 175 | 176 | 177 | ## Arguments 178 | 179 | ### Passing arguments: flags or environment variables 180 | 181 | There are two ways that arguments can be passed into the script. The first, is via flags. The second is via environment variables. You may use a combination of these two options. If both are set, flags will take precedence over environment variables. 182 | 183 | :pencil: **Note**: By convention, you can switch between using a flag or an environment variable by simply capitalizing the argument name and prefixing it with `FAROS_`. For example, `--commit` becomes `FAROS_COMMIT`, `--artifact` becomes `FAROS_ARTIFACT`. 184 | 185 | ### General arguments 186 | 187 | | Argument | Description | Required | Default | 188 | |---------------------|---------------------------------------------------------------------------------------------------------------------------------------------------------------|---------------------------------------------------------------|-----------------------------------------------------------------------------------| 189 | | -k, --api_key | Your Faros API key. See the documentation for more information on [obtaining an api key](https://docs.faros.ai/#/api?id=getting-access). | Yes (not required when `--community_edition` flag is present) | | 190 | | -u, --url | The Faros API url to send the event to. | | `https://prod.api.faros.ai` (`http://localhost:8080` for Faros Community Edition) | 191 | | -g, --graph | The graph(s) that the event should be sent to. If specifying more than one graph, they should be provided as a comma separated array (e.g. `graph_1,graph_2`) | | "default" | 192 | | --validate_only | Event will not be consumed but instead will only be validated against event schema. | | | 193 | | --dry_run | Print the event instead of sending. | | | 194 | | --community_edition | Events will be formatted and sent to [Faros Community Edition](https://github.com/faros-ai/faros-community-edition). | | | 195 | 196 | ### CI arguments 197 | 198 | | Argument | Description | Dependency | 199 | |---------------------------|--------------------------------------------------------------------------------------------------------------------------------------------|------------| 200 | | --run | The URI of the job run that built the code. (`:////`) | | 201 | | --run_status | The status of the job run that built the code. (Allowed values: `Success`, `Failed`, `Canceled`, `Queued`, `Running`, `Unknown`, `Custom`) | --run | 202 | | --run_status_details | Any extra details about the status of the job run. | --run | 203 | | --run_start_time | The start time of the job run in milliseconds since the epoch, ISO-8601, or `Now`. (e.g. `1626804346019`, `2021-07-20T18:05:46.019Z`) | --run | 204 | | --run_end_time | The end time of the job run in milliseconds since the epoch, ISO-8601, or `Now`. (e.g. `1626804346019`, `2021-07-20T18:05:46.019Z`) | --run | 205 | | --run_name | The name of the job run that built the code. | --run | 206 | | --commit | The URI of the commit. (`:////`) | --run | 207 | | --artifact | The URI of the artifact. (`:////`) | --commit | 208 | | --pull_request_number | The pull request number of the commit. (e.g. `123`). | --commit | 209 | | --run_step_id | The id of the job run step. (e.g. `123`). | --run | 210 | | --run_step_name | The name of the job run step (e.g. `Lint`). | --run | 211 | | --run_step_status | The status of the job run step. (Allowed values: `Success`, `Failed`, `Canceled`, `Queued`, `Running`, `Unknown`, `Custom`) | --run | 212 | | --run_step_status_details | Any extra details about the status of the job run step. | --run | 213 | | --run_step_type | The type of the job run step. (Allowed values: `Script`, `Manual`, `Custom`) | --run | 214 | | --run_step_type_details | Any extra details about the type of the job run step. | --run | 215 | | --run_step_start_time | The start time of the job run step in milliseconds since the epoch, ISO-8601, or `Now`. (e.g. `1626804346019`, `2021-07-20T18:05:46.019Z`) | --run | 216 | | --run_step_end_time | The end time of the job run step in milliseconds since the epoch, ISO-8601, or `Now`. (e.g. `1626804346019`, `2021-07-20T18:05:46.019Z`) | --run | 217 | | --run_step_command | The command executed by the job run step. | --run | 218 | | --run_step_url | The url to the job run step. | --run | 219 | 220 | ### CD arguments 221 | 222 | | Argument | Description | Dependency | 223 | |-------------------------|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|------------| 224 | | --deploy | The URI of the deployment. (`:////`) (`` allowed values: `Prod`, `Staging`, `QA`, `Dev`, `Sandbox`, `Canary`, `Custom`) | | 225 | | --deploy_start_time | The start time of the deployment in milliseconds since the epoch, ISO-8601, or `Now`. (e.g. `1626804346019`, `2021-07-20T18:05:46.019Z`) | --deploy | 226 | | --deploy_end_time | The end time of the deployment in milliseconds since the epoch, ISO-8601, or `Now`. (e.g. `1626804346019`, `2021-07-20T18:05:46.019Z`) | --deploy | 227 | | --deploy_requested_at | The time the deployment was requested in milliseconds since the epoch, ISO-8601, or `Now`. (e.g. `1626804346019`, `2021-07-20T18:05:46.019Z`) | --deploy | 228 | | --deploy_status | The status of the deployment. (Allowed values: `Success`, `Failed`, `Canceled`, `Queued`, `Running`, `RolledBack`, `Custom`) | --deploy | 229 | | --deploy_status_details | Any extra details about the status of the deployment. | --deploy | 230 | | --deploy_url | The url of the deployment. | --deploy | 231 | | --deploy_app_platform | The compute platform that runs the application. | --deploy | 232 | | --deploy_app_tags | A comma separated array of `key:value` application tags. (e.g. `key1:value1,key2:value2`) | --deploy | 233 | | --deploy_app_paths | A comma separated array of application slash separated paths. (e.g. `aws/us-east-1/eks-001,aws/us-west-2/eks-002`) | --deploy | 234 | | --deploy_env_details | Any extra details about the deployment environment. | --deploy | 235 | | --deploy_tags | A comma separated array of `key:value` deploy tags. (e.g. `key1:value1,key2:value2`) | --deploy | 236 | | --commit | The URI of the commit. If you specify `--artifact` in your CI events, you should use `--artifact` in your CD events. Otherwise, use `--commit`. (`:////`) | --deploy | 237 | | --artifact | The URI of the artifact. If you specify `--artifact` in your CI events, you should use `--artifact` in your CD events. Otherwise, use `--commit`. (`:////`) | --deploy | 238 | | --pull_request_number | The pull request number of the commit. (e.g. 123). Used only if --commit is included | --commit | 239 | | --run | The URI of the job run executing the deployment. (`:////` e.g. `Jenkins://faros-ai/my-pipeline/1234`) | | 240 | | --run_status | The status of the job run executing the deployment. (Allowed values: `Success`, `Failed`, `Canceled`, `Queued`, `Running`, `Unknown`, `Custom`) | --run | 241 | | --run_status_details | Any extra details about the status of the job run executing the deployment. | --run | 242 | | --run_start_time | The start time of the job run in milliseconds since the epoch, ISO-8601, or `Now`. (e.g. `1626804346019`, `2021-07-20T18:05:46.019Z`) | --run | 243 | | --run_end_time | The end time of the job run in milliseconds since the epoch, ISO-8601, or `Now`. (e.g. `1626804346019`, `2021-07-20T18:05:46.019Z`) | --run | 244 | 245 | ### Test Execution arguments 246 | 247 | | Argument | Description | Required | 248 | |-----------------------|------------------------------------------------------------------------------------------------------------------------------------------------------------|----------| 249 | | --commit | The URI of the commit. (`:////` e.g. `GitHub://faros-ai/my-repo/da500aa4f54cbf8f3eb47a1dc2c136715c9197b9`) | Yes | 250 | | --pull_request_number | The pull request number of the commit. (e.g. 123). | | 251 | | --test_id | The unique identifier of the test within the test source system. | Yes | 252 | | --test_source | The test source system. (e.g. `Jenkins`) | Yes | 253 | | --test_type | The type of the test that was executed: (Allowed values: `Functional`, `Integration`, `Manual`, `Performance`, `Regression`, `Security`, `Unit`, `Custom`) | Yes | 254 | | --test_type_details | Additional details about the type of the test that was executed. | | 255 | | --test_status | The outcome status of the test execution. (Allowed values: `Success`, `Failure`, `Custom`, `Skipped`, `Unknown`) | Yes | 256 | | --test_status_details | Additional details about the status of the outcome status of the test. | | 257 | | --test_suite | The name of the test suite. | Yes | 258 | | --test_stats | The stats of the test outcome as a string of comma separated `key=value` pairs. (e.g. `failure=0,success=18,skipped=3,unknown=0,custom=2,total=23`) | | 259 | | --test_start_time | The start time of the test in milliseconds since the epoch, ISO-8601, or `Now`. (e.g. `1626804346019`, `2021-07-20T18:05:46.019Z`) | | 260 | | --test_end_time | The end time of the test in milliseconds since the epoch, ISO-8601, or `Now`. (e.g. `1626804346019`, `2021-07-20T18:05:46.019Z`) | | 261 | | --test_tags | A string of comma separated tags to associate with the test. (e.g. `tag1,tag2`) | | 262 | | --environments | A string of comma separated environments to associate with the test. (e.g. `env1,env2`) | | 263 | | --device_name | The name of the device on which the test was executed. (e.g. `MacBook`) | | 264 | | --device_os | The operating system of the device on which the test was executed. (e.g. `OSX`) | | 265 | | --device_browser | The browser on which the test was executed. (e.g. `Chrome`) | | 266 | | --device_type | The type of the device on which the test was executed. | 267 | | --task_source | The type of TMS (Task Management System). (e.g. `Jira`, `Shortcut`, `GitHub`) | If --test_task, --defect_task, --test_suite_task, or --test_execution_task provided | 268 | | --test_task | A comma separated array of one or many unique identifiers of test tasks within the TMS (Task Management System). The outcome of a specific test for this execution can be provided as a `key=value` pair (e.g. `TEST-123=Success,TEST-456=Failure` with allowed statuses: `Success`, `Failure`, `Custom`, `Skipped`, `Unknown` ) | | 269 | | --defect_task | The unique identifier of the defect task within the TMS (Task Management System). | | 270 | | --test_suite_task | The unique identifier of the test suite task within the TMS (Task Management System). | | 271 | | --test_execution_task | The unique identifier of the test execution task within the TMS (Task Management System). | | 272 | | --run | The URI of the job run executing the test. (`:////` e.g. `Jenkins://faros-ai/my-pipeline/1234`) | | 273 | | --run_status | The status of the job run executing the test. (Allowed values: `Success`, `Failed`, `Canceled`, `Queued`, `Running`, `Unknown`, `Custom`) | --run | 274 | | --run_status_details | Any extra details about the status of the job run executing the test. | --run | 275 | | --run_start_time | The start time of the job run in milliseconds since the epoch, ISO-8601, or `Now`. (e.g. `1626804346019`, `2021-07-20T18:05:46.019Z`) | --run | 276 | | --run_end_time | The end time of the job run in milliseconds since the epoch, ISO-8601, or `Now`. (e.g. `1626804346019`, `2021-07-20T18:05:46.019Z`) | --run | 277 | 278 | ### URI arguments alternative 279 | 280 | Sometimes using the URI format required by `--run`, `--commit`, `--artifact`, or `--deploy` arguments gets in the way. For example when your commit repository has a `/` in the name. Here is how you can supply the required information as individual fields instead. Each alternative requires **all** listed fields. 281 | 282 | #### `--run` argument alternative (**all** listed fields are required) 283 | 284 | | Argument | Description | 285 | |----------------|------------------------------| 286 | | --run_id | The id of the run | 287 | | --run_pipeline | The pipeline of the run | 288 | | --run_org | The organization of the run | 289 | | --run_source | The source system of the run | 290 | 291 | #### `--deploy` argument alternative (**all** listed fields are required) 292 | 293 | | Argument | Description | 294 | |-----------------|-------------------------------------------------------------------------------------------------------------------| 295 | | --deploy_id | The id of the deployment | 296 | | --deploy_env | The environment of the deployment (allowed values: `Prod`, `Staging`, `QA`, `Dev`, `Sandbox`, `Canary`, `Custom`) | 297 | | --deploy_app | The application being deployed | 298 | | --deploy_source | The source system of the deployment | 299 | 300 | #### `--commit` argument alternative (**all** listed fields are required) 301 | 302 | | Argument | Description | 303 | |-----------------|--------------------------------------| 304 | | --commit_sha | The SHA of the commit | 305 | | --commit_repo | The repository of the commit | 306 | | --commit_org | The organization of the commit | 307 | | --commit_source | The source system storing the commit | 308 | 309 | #### `--artifact` argument alternative (**all** listed fields are required) 310 | 311 | | Argument | Description | 312 | |-------------------|----------------------------------------| 313 | | --artifact_id | The id of the artifact | 314 | | --artifact_repo | The repository of the artifact | 315 | | --artifact_org | The organization of the artifact | 316 | | --artifact_source | The source system storing the artifact | 317 | 318 | ### Additional arguments 319 | 320 | | Argument | Description | Default | 321 | |-----------------------|---------------------------------------------------------------------|----------------------| 322 | | --origin | The origin of the event that is being sent to Faros. | "Faros_Script_Event" | 323 | | --full | The event being sent should be validated as a full event. | | 324 | | --silent | Unexceptional output will be silenced. | | 325 | | --debug | Helpful information will be printed. | | 326 | | --skip_saving_run | Do not include `cicd_Build` in the event. | | 327 | | --no_lowercase_vcs | Do not lowercase commit_organization and commit_repo. | | 328 | | --hasura_admin_secret | The Hasura Admin Secret. Only used with `‑‑community_edition` flag. | "admin" | 329 | | --max_time | The time in seconds allowed for each retry attempt. | 10 | 330 | | --retry | The number of allowed retry attempts. | 3 | 331 | | --retry_delay | The delay in seconds between each retry attempt. | 1 | 332 | | --retry_max_time | The total time in seconds the request with retries can take. | 40 | 333 | 334 | --- 335 | 336 | ## Tips 337 | 338 | ### Validating your command 339 | 340 | As you are iterating on instrumentation you can use the `--validate_only` flag to test before you are ready to send actual data: 341 | 342 | ```sh 343 | ./faros_event.sh <...your command arguments...> --validate_only 344 | ``` 345 | 346 | ### Usage with Faros Community Edition 347 | 348 | > :exclamation: Sending events in parts is not currently supported 349 | > :exclamation: Build steps in CI events are not currently supported 350 | > :exclamation: Test Execution events are not currently supported 351 | 352 | When using Faros Community Edition, you can use the tool in exactly the same way as described above. Just include the `--community_edition` flag. The Faros API key is not needed, since the tool will call your locally deployed Hasura to perform mutations derived from the events. See the [Faros Community Edition repo](https://github.com/faros-ai/faros-community-edition) for more details. 353 | 354 | ```sh 355 | ./faros_event.sh <...your command arguments...> --community_edition 356 | ``` 357 | 358 | ## :hammer: Development 359 | 360 | ### :white_check_mark: Testing & Checking for Bugs 361 | 362 | We use [ShellSpec](https://github.com/shellspec/shellspec) to test our scripts and [ShellCheck](https://www.shellcheck.net/) to check for potential bugs. 363 | 364 | #### Install using Homebrew 365 | 366 | ```sh 367 | brew tap shellspec/shellspec 368 | brew install shellspec shellcheck 369 | ``` 370 | 371 | #### Running the tests 372 | 373 | Move to the `/test` directory and execute `shellspec` 374 | 375 | ```sh 376 | cd test && shellspec 377 | ``` 378 | 379 | #### Checking for bugs 380 | 381 | Go to root directory and execute: 382 | 383 | ```sh 384 | shellcheck -s bash faros_event.sh 385 | ``` 386 | -------------------------------------------------------------------------------- /faros_event.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | # This is used for testing purposes. It is a noop unless under testing with shellspec 4 | # See https://github.com/shellspec/shellspec#intercepting for details. 5 | test || __() { :; } 6 | 7 | set -eo pipefail 8 | 9 | version="0.6.12" 10 | canonical_model_version="0.15.9" 11 | github_url="https://github.com/faros-ai/faros-events-cli" 12 | 13 | declare -a arr=("curl" "jq" "sed" "awk") 14 | for i in "${arr[@]}"; do 15 | which "$i" &> /dev/null || 16 | { echo "Error: $i is required." && missing_require=1; } 17 | done 18 | 19 | if ((${missing_require:-0})); then 20 | echo "Please ensure curl, jq (1.6+), sed, and an implementation of awk (we recommend gawk) are available before running the script." 21 | exit 1 22 | fi 23 | 24 | # Defaults 25 | FAROS_GRAPH_DEFAULT="default" 26 | FAROS_URL_DEFAULT="https://prod.api.faros.ai" 27 | FAROS_ORIGIN_DEFAULT="Faros_Script_Event" 28 | FAROS_MAX_TIME_DEFAULT="10" 29 | FAROS_RETRY_DEFAULT="3" 30 | FAROS_RETRY_DELAY_DEFAULT="1" 31 | FAROS_RETRY_MAX_TIME_DEFAULT="40" 32 | HASURA_URL_DEFAULT="http://localhost:8080" 33 | HASURA_ADMIN_SECRET_DEFAULT="admin" 34 | 35 | declare -a ENVS=("Prod" "Staging" "QA" "Dev" "Sandbox" "Canary" "Custom") 36 | envs=$(printf '%s\n' "$(IFS=,; printf '%s' "${ENVS[*]}")") 37 | declare -a BUILD_STATUSES=("Success" "Failed" "Canceled" "Queued" "Running" "Unknown" "Custom") 38 | run_statuses=$(printf '%s\n' "$(IFS=,; printf '%s' "${BUILD_STATUSES[*]}")") 39 | declare -a DEPLOY_STATUSES=("Success" "Failed" "Canceled" "Queued" "Running" "RolledBack" "Custom") 40 | deploy_statuses=$(printf '%s\n' "$(IFS=,; printf '%s' "${DEPLOY_STATUSES[*]}")") 41 | declare -a TEST_TYPES=("Functional" "Integration" "Manual" "Performance" "Regression" "Security" "Unit" "Custom") 42 | test_types=$(printf '%s\n' "$(IFS=,; printf '%s' "${TEST_TYPES[*]}")") 43 | declare -a TEST_STATUSES=("Custom" "Failure" "Skipped" "Success" "Unknown") 44 | test_statuses=$(printf '%s\n' "$(IFS=,; printf '%s' "${TEST_STATUSES[*]}")") 45 | 46 | commit_uri_form="source://organization/repository/commit_sha" 47 | artifact_uri_form="source://organization/repository/artifact_id" 48 | run_uri_form="source://organization/pipeline/run_id" 49 | deploy_uri_form="source://application/environment/deploy_id" 50 | 51 | # Script settings' defaults 52 | dry_run=${FAROS_DRY_RUN:-0} 53 | silent=${FAROS_SILENT:-0} 54 | debug=${FAROS_DEBUG:-0} 55 | no_format=${FAROS_NO_FORMAT:-0} 56 | community_edition=${FAROS_COMMUNITY_EDITION:-0} 57 | 58 | # Theme 59 | RED='\033[0;31m' 60 | BLUE='\033[0;34m' 61 | YELLOW='\033[0;33m' 62 | GREEN='\033[0;32m' 63 | NC='\033[0m' # No Color 64 | 65 | function help() { 66 | printf "${BLUE} _____ ${RED} _ ___\\n" 67 | printf "${BLUE} | ___|__ _ _ __ ___ ___ ${RED} / \\ |_ _| (v$version)\\n" 68 | printf "${BLUE} | |_ / _\` || '__|/ _ \\ / __| ${RED} / _ \\ | |\\n" 69 | printf "${BLUE} | _|| (_| || | | (_) |\\__ \\ ${RED} / ___ \\ | |\\n" 70 | printf "${BLUE} |_| \\__,_||_| \\___/ |___/ ${RED}/_/ \\_\\|___|\\n" 71 | printf "${NC}\\n" 72 | printf "${RED}Canonical Model Version: v$canonical_model_version ${NC}\\n" 73 | echo 74 | echo "This script sends information to Faros." 75 | echo "There are multiple event types that can be used, each with a set of required" 76 | echo "and optional fields." 77 | echo 78 | printf "${RED}Event Types:${NC}\\n" 79 | echo "CI" 80 | echo "CD" 81 | echo "TestExecution" 82 | echo 83 | printf "${BLUE}Example Event:${NC}\\n" 84 | echo "./faros_event.sh CD -k \"\" \\" 85 | echo "--artifact \"$artifact_uri_form\" \\" 86 | echo "--deploy \"$deploy_uri_form\" \\" 87 | echo "--deploy_status \"Success\"" 88 | echo 89 | printf "${RED}Arguments:${NC}\\n" 90 | echo "Arguments can be provided either by flag or by environment variable." 91 | echo "By convention, you can switch to using environment variables by prefixing the" 92 | echo "flag name with 'FAROS_'. For example, --commit becomes FAROS_COMMIT and" 93 | echo "--deploy becomes FAROS_DEPLOY" 94 | echo "-----------------------------------------------------------------------------" 95 | echo "Argument | Req | Default Value" 96 | echo "-----------------------------------------------------------------------------" 97 | echo "-k / --api_key | *1 |" 98 | echo "-u / --url | | $FAROS_URL_DEFAULT ($HASURA_URL_DEFAULT if --community_edition specified)" 99 | echo "--hasura_admin_secret | | \"$HASURA_ADMIN_SECRET_DEFAULT\" (only used if --community_edition specified)" 100 | echo "-g / --graph | | \"$FAROS_GRAPH_DEFAULT\"" 101 | echo "--origin | | \"$FAROS_ORIGIN_DEFAULT\"" 102 | echo "*1 Unless --community_edition specified" 103 | echo 104 | printf "${BLUE}CI Event Arguments:${NC}\\n" 105 | echo "-----------------------------------------------------------------------------" 106 | echo "Argument | Req | Allowed Values / URI form" 107 | echo "-----------------------------------------------------------------------------" 108 | echo "--commit | Yes | $commit_uri_form" 109 | echo "--pull_request_number | | e.g. 123 (should be a number)" 110 | echo "--artifact | | $artifact_uri_form" 111 | echo "--run | | $run_uri_form" 112 | echo "--run_status | *1 | $run_statuses" 113 | echo "--run_status_details | |" 114 | echo "--run_name | |" 115 | echo "--run_start_time | | e.g. 1626804346019 (milliseconds since epoch)" 116 | echo "--run_end_time | | e.g. 1626804346019 (milliseconds since epoch)" 117 | echo "--no_artifact | |Use if CI event does not generate artifact. (Do not specify the --artifact param)" 118 | echo "*1 If --run included" 119 | echo 120 | printf "${BLUE}CD Event Arguments:${NC}\\n" 121 | echo "-----------------------------------------------------------------------------" 122 | echo "Argument | Req | Allowed Values / URI form" 123 | echo "-----------------------------------------------------------------------------" 124 | echo "--deploy | Yes | $deploy_uri_form *1" 125 | echo "--deploy_status | Yes | $deploy_statuses" 126 | echo "--artifact | *2 | $artifact_uri_form" 127 | echo "--commit | *2 | $commit_uri_form" 128 | echo "--deploy_url | |" 129 | echo "--deploy_status_details | |" 130 | echo "--deploy_env_details | |" 131 | echo "--deploy_app_platform | |" 132 | echo "--deploy_app_tags | | e.g. key1:value1,key2:value2" 133 | echo "--deploy_app_paths | | e.g. path1,path2" 134 | echo "--deploy_requested_at | | e.g. 1626804346019 (milliseconds since epoch)" 135 | echo "--deploy_start_time | | e.g. 1626804346019 (milliseconds since epoch)" 136 | echo "--deploy_end_time | | e.g. 1626804346019 (milliseconds since epoch)" 137 | echo "--deploy_tags | | e.g. tag1:value1,tag2:value2" 138 | echo "--pull_request_number | | *3 e.g. 123 (should be a number)" 139 | echo "--run | | $run_uri_form" 140 | echo "--run_status | *4 | $run_statuses" 141 | echo "--run_status_details | |" 142 | echo "--run_name | |" 143 | echo "--run_start_time | | e.g. 1626804346019 (milliseconds since epoch)" 144 | echo "--run_end_time | | e.g. 1626804346019 (milliseconds since epoch)" 145 | echo "*1 environment must be: $envs" 146 | echo "*2 Either --artifact or --commit required" 147 | echo "*3 Used only if --commit is included" 148 | echo "*4 If --run included" 149 | echo 150 | printf "${BLUE}Test Execution Event Arguments:${NC}\\n" 151 | echo "-----------------------------------------------------------------------------" 152 | echo "Argument | Req | Allowed Values / URI form" 153 | echo "-----------------------------------------------------------------------------" 154 | echo "--commit | Yes | $commit_uri_form" 155 | echo "--pull_request_number | | e.g. 123 (should be a number)" 156 | echo "--test_id | Yes |" 157 | echo "--test_source | Yes |" 158 | echo "--test_type | Yes | $test_types" 159 | echo "--test_type_details | |" 160 | echo "--test_status | Yes | $test_statuses" 161 | echo "--test_status_details | |" 162 | echo "--test_suite | Yes | e.g. My test suite name" 163 | echo "--test_stats | | e.g. failure=N,success=N,skipped=N,unknown=N,custom=N,total=N" 164 | echo "--test_tags | | e.g. tag1,tag2,tag3" 165 | echo "--environments | | e.g. env1,env2,env3" 166 | echo "--device_name | | e.g. MacBook" 167 | echo "--device_os | | e.g. OSX" 168 | echo "--device_browser | | e.g. Chrome" 169 | echo "--device_type | |" 170 | echo "--test_start_time | | e.g. 1626804346019 (milliseconds since epoch)" 171 | echo "--test_end_time | | e.g. 1626804346019 (milliseconds since epoch)" 172 | echo "--test_task | | e.g. TEST-123=Success,TEST-456 *2" 173 | echo "--defect_task | | e.g. TEST-123" 174 | echo "--test_suite_task | | e.g. TEST-123" 175 | echo "--test_execution_task | | e.g. TEST-123" 176 | echo "--task_source | *1 | e.g. Jira" 177 | echo "*1 If --test_task, --defect_task, --test_suite_task, or --test_execution_task included" 178 | echo "*2 Allowed statuses: $test_statuses" 179 | echo 180 | echo "Additional Settings:" 181 | echo "--dry_run Do not send the event." 182 | echo "--silent Unexceptional output will be silenced." 183 | echo "--debug Helpful information will be printed." 184 | echo "--no_format Log formatting will be turned off." 185 | echo "--full Event should be validated as a full event." 186 | echo "--skip-saving-run Do not include a cicd_Build in event." 187 | echo "--validate_only Only validate event body against event api." 188 | echo "--community_edition Format and send event to Faros Community Edition." 189 | echo "--max-time The time in seconds allowed for each retry attempt" 190 | echo "--retry The number of allowed retry attempts" 191 | echo "--retry-delay The delay in seconds between each retry attempt" 192 | echo "--retry-max-time The total time in seconds the request with retries can take" 193 | echo 194 | echo "For more usage information please visit: $github_url" 195 | } 196 | 197 | function parseControls() { 198 | while (($#)); do 199 | case "$1" in 200 | -k|--api_key) api_key="$2" && shift 2 ;; 201 | -g|--graph) graph="$2" && shift 2 ;; 202 | --origin) origin="$2" && shift 2 ;; 203 | -u|--url) url="$2" && shift 2 ;; 204 | --hasura_admin_secret) hasura_admin_secret="$2" && shift 2 ;; 205 | --dry_run) dry_run=1 && shift ;; 206 | --full) full="true" && shift ;; 207 | --no_build_object) 208 | warn "no_build_object flag is deprecated, use skip_saving_run" 209 | skip_saving_run="true" 210 | shift ;; 211 | --skip_saving_run) skip_saving_run="true" && shift ;; 212 | --no_artifact) no_artifact="true" && shift ;; 213 | --validate_only) validate_only="true" && shift ;; 214 | -s|--silent) silent=1 && shift ;; 215 | --max_time) max_time="$2" && shift 2 ;; 216 | --retry) retry="$2" && shift 2 ;; 217 | --retry_delay) retry_delay="$2" && shift 2 ;; 218 | --retry_max_time) retry_max_time="$2" && shift 2 ;; 219 | --debug) debug=1 && shift ;; 220 | --no_format) no_format=1 && shift ;; 221 | --community_edition) community_edition=1 && shift ;; 222 | --help) help exit 0 ;; 223 | -v|--version) echo "$version" exit 0 ;; 224 | *) 225 | FLAGS+=("$1") # save it in an array for later 226 | shift ;; 227 | esac 228 | done 229 | } 230 | 231 | function parseFlags() { 232 | while (($#)); do 233 | case "$1" in 234 | --artifact) setFlag "$1" artifact_uri "$2" && shift 2 ;; 235 | --artifact_id) setFlag "$1" artifact_id "$2" && shift 2 ;; 236 | --artifact_repo) setFlag "$1" artifact_repo "$2" && shift 2 ;; 237 | --artifact_org) setFlag "$1" artifact_org "$2" && shift 2 ;; 238 | --artifact_source) setFlag "$1" artifact_source "$2" && shift 2 ;; 239 | --commit) setFlag "$1" commit_uri "$2" && shift 2 ;; 240 | --commit_sha) setFlag "$1" commit_sha "$2" && shift 2 ;; 241 | --commit_repo) setFlag "$1" commit_repo "$2" && shift 2 ;; 242 | --commit_org) setFlag "$1" commit_org "$2" && shift 2 ;; 243 | --commit_source) setFlag "$1" commit_source "$2" && shift 2 ;; 244 | --branch) setFlag "$1" branch "$2" && shift 2 ;; 245 | --pull_request_number) setFlag "$1" pull_request_number "$2" && shift 2 ;; 246 | --deploy) setFlag "$1" deploy_uri "$2" && shift 2 ;; 247 | --deploy_id) setFlag "$1" deploy_id "$2" && shift 2 ;; 248 | --deploy_env) setFlag "$1" deploy_env "$2" && shift 2 ;; 249 | --deploy_app) setFlag "$1" deploy_app "$2" && shift 2 ;; 250 | --deploy_source) setFlag "$1" deploy_source "$2" && shift 2 ;; 251 | --deploy_app_platform) setFlag "$1" deploy_app_platform "$2" && shift 2 ;; 252 | --deploy_app_tags) setFlag "$1" deploy_app_tags "$2" && shift 2 ;; 253 | --deploy_app_paths) setFlag "$1" deploy_app_paths "$2" && shift 2 ;; 254 | --deploy_url) setFlag "$1" deploy_url "$2" && shift 2 ;; 255 | --deploy_env_details) setFlag "$1" deploy_env_details "$2" && shift 2 ;; 256 | --deploy_status) setFlag "$1" deploy_status "$2" && shift 2 ;; 257 | --deploy_status_details) setFlag "$1" deploy_status_details "$2" && shift 2 ;; 258 | --deploy_requested_at) setFlag "$1" deploy_requested_at "$2" && shift 2 ;; 259 | --deploy_start_time) setFlag "$1" deploy_start_time "$2" && shift 2 ;; 260 | --deploy_end_time) setFlag "$1" deploy_end_time "$2" && shift 2 ;; 261 | --deploy_tags) setFlag "$1" deploy_tags "$2" && shift 2 ;; 262 | --test_id) setFlag "$1" test_id "$2" && shift 2 ;; 263 | --test_source) setFlag "$1" test_source "$2" && shift 2 ;; 264 | --test_type) setFlag "$1" test_type "$2" && shift 2 ;; 265 | --test_type_details) setFlag "$1" test_type_details "$2" && shift 2 ;; 266 | --test_status) setFlag "$1" test_status "$2" && shift 2 ;; 267 | --test_status_details) setFlag "$1" test_status_details "$2" && shift 2 ;; 268 | --test_suite) setFlag "$1" test_suite "$2" && shift 2 ;; 269 | --test_stats) setFlag "$1" test_stats "$2" && shift 2 ;; 270 | --test_tags) setFlag "$1" test_tags "$2" && shift 2 ;; 271 | --environments) setFlag "$1" environments "$2" && shift 2 ;; 272 | --device_name) setFlag "$1" device_name "$2" && shift 2 ;; 273 | --device_os) setFlag "$1" device_os "$2" && shift 2 ;; 274 | --device_browser) setFlag "$1" device_browser "$2" && shift 2 ;; 275 | --device_type) setFlag "$1" device_type "$2" && shift 2 ;; 276 | --test_start_time) setFlag "$1" test_start_time "$2" && shift 2 ;; 277 | --test_end_time) setFlag "$1" test_end_time "$2" && shift 2 ;; 278 | --test_task) setFlag "$1" test_task "$2" && shift 2 ;; 279 | --defect_task) setFlag "$1" defect_task "$2" && shift 2 ;; 280 | --test_suite_task) setFlag "$1" test_suite_task "$2" && shift 2 ;; 281 | --test_execution_task) setFlag "$1" test_execution_task "$2" && shift 2 ;; 282 | --task_source) setFlag "$1" task_source "$2" && shift 2 ;; 283 | --run) setFlag "$1" run_uri "$2" && shift 2 ;; 284 | --run_id) setFlag "$1" run_id "$2" && shift 2 ;; 285 | --run_pipeline) setFlag "$1" run_pipeline "$2" && shift 2 ;; 286 | --run_org) setFlag "$1" run_org "$2" && shift 2 ;; 287 | --run_source) setFlag "$1" run_source "$2" && shift 2 ;; 288 | --run_name) setFlag "$1" run_name "$2" && shift 2 ;; 289 | --run_status) setFlag "$1" run_status "$2" && shift 2 ;; 290 | --run_status_details) setFlag "$1" run_status_details "$2" && shift 2 ;; 291 | --run_start_time) setFlag "$1" run_start_time "$2" && shift 2 ;; 292 | --run_end_time) setFlag "$1" run_end_time "$2" && shift 2 ;; 293 | --run_step_id) setFlag "$1" run_step_id "$2" && shift 2 ;; 294 | --run_step_name) setFlag "$1" run_step_name "$2" && shift 2 ;; 295 | --run_step_type) setFlag "$1" run_step_type "$2" && shift 2 ;; 296 | --run_step_type_details) setFlag "$1" run_step_type_details "$2" && shift 2 ;; 297 | --run_step_command) setFlag "$1" run_step_command "$2" && shift 2 ;; 298 | --run_step_start_time) setFlag "$1" run_step_start_time "$2" && shift 2 ;; 299 | --run_step_end_time) setFlag "$1" run_step_end_time "$2" && shift 2 ;; 300 | --run_step_status) setFlag "$1" run_step_status "$2" && shift 2 ;; 301 | --run_step_status_details) setFlag "$1" run_step_status_details "$2" && shift 2 ;; 302 | --run_step_url) setFlag "$1" run_step_url "$2" && shift 2 ;; 303 | *) 304 | POSITIONAL+=("$1") # save it in an array for later 305 | shift ;; 306 | esac 307 | done 308 | } 309 | 310 | function setFlag() { 311 | export "$2"="$3" 312 | debug "| $1 [ $3 ]" 313 | } 314 | 315 | # Determine which event types are present 316 | function processArgs() { 317 | # No positional arg passed - show help 318 | if ! (($#)) || [ "$1" == "help" ] || [ -z "$1" ]; then 319 | help 320 | fi 321 | 322 | ci_event=0 323 | cd_event=0 324 | test_execution_event=0 325 | 326 | # loop through positional args 327 | while (($#)); do 328 | case "$1" in 329 | CI) 330 | ci_event=1 331 | shift ;; 332 | CD) 333 | cd_event=1 334 | shift ;; 335 | TestExecution) 336 | test_execution_event=1 337 | shift ;; 338 | help) 339 | help ;; 340 | *) 341 | UNRECOGNIZED+=("$1") 342 | shift ;; 343 | esac 344 | done 345 | 346 | if [ -n "${UNRECOGNIZED:-}" ]; then 347 | err "Unrecognized arg(s): ${UNRECOGNIZED[*]}" 348 | fail 349 | fi 350 | } 351 | 352 | # Resolve input and populate event depending on present event types 353 | # Only one event should be considered per execution 354 | function processEventTypes() { 355 | if ((ci_event)); then 356 | event_type="CI" 357 | makeEvent 358 | resolveCIInput 359 | addArtifactToData 360 | addCommitToData 361 | addRunToData 362 | addRunStepToData 363 | elif ((cd_event)); then 364 | event_type="CD" 365 | makeEvent 366 | resolveCDInput 367 | addDeployToData 368 | addArtifactToData 369 | addCommitToData 370 | addRunToData 371 | elif ((test_execution_event)); then 372 | event_type="TestExecution" 373 | makeEvent 374 | resolveTestExecutionInput 375 | addTestToData 376 | addCommitToData 377 | addRunToData 378 | fi 379 | } 380 | 381 | function now_as_iso8601() { 382 | jq -nr 'now | todate' 383 | } 384 | 385 | # Attempt to convert to iso8601 format 386 | # Converts from Unix millis or the literal 'Now' 387 | # Anything else is returned unchanged 388 | function convert_to_iso8601() { 389 | if [[ "$1" =~ ^[0-9]+$ ]]; then 390 | jq -r '. / 1000 | todate' <<< "$1" 391 | elif [[ "$1" =~ ^Now$ ]]; then 392 | __ begin __ 393 | now_as_iso8601 394 | else 395 | echo "$1" 396 | fi 397 | } 398 | 399 | function make_commit_key() { 400 | jq '{data_commit_sha,data_commit_repository,data_commit_organization,data_commit_source}' <<< "$flat" 401 | } 402 | 403 | function make_artifact_key() { 404 | if [ -n "$has_artifact" ]; then 405 | keys_matching "$flat" "data_artifact_(id|repository|organization|source)" 406 | else 407 | jq -n \ 408 | --arg commit_sha "$commit_sha" \ 409 | --arg commit_repo "$commit_repo" \ 410 | --arg commit_org "$commit_org" \ 411 | --arg commit_source "$commit_source" \ 412 | '{ 413 | "data_artifact_id": $commit_sha, 414 | "data_artifact_repository": $commit_repo, 415 | "data_artifact_organization": $commit_org, 416 | "data_artifact_source": $commit_source, 417 | }' 418 | fi 419 | } 420 | 421 | function doPullRequestCommitMutation() { 422 | if [ -n "$has_commit" ] && [ -n "$pull_request_number" ]; then 423 | pull_request=$(jq -n \ 424 | --arg pull_request_number "$pull_request_number" \ 425 | '{ 426 | "data_pull_request_uid": $pull_request_number, 427 | "data_pull_request_number": $pull_request_number|tonumber, 428 | }' 429 | ) 430 | pull_request_commit=$(concat "$pull_request" "$commit_key") 431 | make_mutation vcs_pull_request_commit "$pull_request_commit" 432 | fi 433 | } 434 | 435 | function doCDMutations() { 436 | flat=$(flatten "$request_body") 437 | 438 | app_platform="${deploy_app_platform:-}" 439 | if [ -z "${app_platform}" ]; then 440 | app_uid="$deploy_app" 441 | else 442 | app_uid="${deploy_app}_${app_platform}" 443 | fi 444 | 445 | compute_Application=$(jq -n \ 446 | --arg name "$deploy_app" \ 447 | --arg platform "${app_platform}" \ 448 | --arg app_uid "${app_uid}" \ 449 | '{ 450 | "name": $name, 451 | "platform": $platform, 452 | "uid": $app_uid 453 | }' 454 | ) 455 | make_mutation compute_application "$compute_Application" 456 | 457 | cicd_Deployment_base=$(keys_matching "$flat" "data_deploy_(id|source)") 458 | status_env=$(jq -n \ 459 | --arg status_category "$deploy_status" \ 460 | --arg status_detail "${deploy_status_details:-}" \ 461 | --arg env_category "$deploy_env" \ 462 | --arg env_detail "${deploy_env_details:-}" \ 463 | --arg app_uid "${app_uid}" \ 464 | '{ 465 | "status": {"category" : $status_category, "detail" : $status_detail}, 466 | "env": {"category" : $env_category, "detail" : $env_detail}, 467 | "compute_Application": $app_uid 468 | }' 469 | ) 470 | cicd_Deployment_base=$(concat "$cicd_Deployment_base" "$status_env") 471 | if [ -n "$deploy_start_time" ] && [ -n "$deploy_end_time" ]; then 472 | start_end=$(jq -n \ 473 | --arg start_time "$deploy_start_time" \ 474 | --arg end_time "$deploy_end_time" \ 475 | '{ 476 | "deploy_start_time": $start_time, 477 | "deploy_end_time": $end_time, 478 | }' 479 | ) 480 | else 481 | start_end=$(jq -n \ 482 | '{ 483 | "deploy_start_time": null, 484 | "deploy_end_time": null, 485 | }' 486 | ) 487 | fi 488 | cicd_Deployment_with_start_end=$(concat "$cicd_Deployment_base" "$start_end") 489 | 490 | artifact_key=$(make_artifact_key) 491 | 492 | cicd_ArtifactDeployment=$(keys_matching "$flat" "data_deploy_(id|source)") 493 | cicd_ArtifactDeployment=$(concat "$cicd_ArtifactDeployment" "$artifact_key") 494 | make_mutation cicd_artifact_deployment "$cicd_ArtifactDeployment" 495 | 496 | if [ -n "$has_run" ]; then 497 | make_mutations_from_run 498 | 499 | cicd_Deployment=$(concat "$cicd_Deployment_with_start_end" "$buildKey") 500 | make_mutation cicd_deployment_with_build "$cicd_Deployment" 501 | else 502 | make_mutation cicd_deployment "$cicd_Deployment_with_start_end" 503 | fi 504 | 505 | if [ -z "$has_artifact" ]; then 506 | if [ -n "$has_run" ]; then 507 | cicd_Artifact_with_build=$(concat "$artifact_key" "$buildKey") 508 | make_mutation cicd_artifact_with_build "$cicd_Artifact_with_build" 509 | else 510 | make_mutation cicd_artifact "$artifact_key" 511 | fi 512 | 513 | commit_key=$(make_commit_key) 514 | cicd_ArtifactCommitAssociation=$(concat "$artifact_key" "$commit_key") 515 | make_mutation cicd_artifact_commit_association "$cicd_ArtifactCommitAssociation" 516 | fi 517 | 518 | doPullRequestCommitMutation 519 | } 520 | 521 | function make_mutations_from_run { 522 | buildKey=$(jq \ 523 | '{data_run_id,data_run_pipeline,data_run_organization,data_run_source}' <<< "$flat" 524 | ) 525 | if ! ((skip_saving_run)); then 526 | if [ -z "$has_run_status" ]; then 527 | err "Please provided --run_status" 528 | fail 529 | fi 530 | if [ -n "$has_run_start_time" ] && [ -n "$has_run_end_time" ]; then 531 | cicd_Build_with_start_end=$(jq -n \ 532 | --arg run_status "$run_status" \ 533 | --arg run_status_details "$run_status_details" \ 534 | --arg run_start_time "$run_start_time" \ 535 | --arg run_end_time "$run_end_time" \ 536 | '{ 537 | "run_status": {"category": $run_status, "detail": $run_status_details}, 538 | "run_start_time": $run_start_time, 539 | "run_end_time": $run_end_time, 540 | }' 541 | ) 542 | cicd_Build_with_start_end=$(concat "$cicd_Build_with_start_end" "$buildKey") 543 | make_mutation cicd_build_with_start_end "$cicd_Build_with_start_end" 544 | else 545 | cicd_Build=$(jq -n \ 546 | --arg run_status "$run_status" \ 547 | --arg run_status_details "$run_status_details" \ 548 | '{ 549 | "run_status": {"category": $run_status, "detail": $run_status_details}, 550 | }' 551 | ) 552 | cicd_Build=$(concat "$cicd_Build" "$buildKey") 553 | make_mutation cicd_build "$cicd_Build" 554 | fi 555 | 556 | cicd_Pipeline=$(jq \ 557 | '{data_run_pipeline,data_run_organization,data_run_source}' <<< "$flat" 558 | ) 559 | make_mutation cicd_pipeline "$cicd_Pipeline" 560 | 561 | cicd_Organization_from_run=$(jq \ 562 | '{data_run_organization,data_run_source}' <<< "$flat" 563 | ) 564 | make_mutation cicd_organization_from_run "$cicd_Organization_from_run" 565 | fi 566 | } 567 | 568 | function doCIMutations() { 569 | flat=$(flatten "$request_body") 570 | 571 | artifact_key=$(make_artifact_key) 572 | commit_key=$(make_commit_key) 573 | 574 | if [ -n "$has_run" ]; then 575 | make_mutations_from_run 576 | 577 | cicd_Artifact_with_build=$(concat "$artifact_key" "$buildKey") 578 | make_mutation cicd_artifact_with_build "$cicd_Artifact_with_build" 579 | else 580 | make_mutation cicd_artifact "$artifact_key" 581 | fi 582 | 583 | cicd_ArtifactCommitAssociation=$(concat "$artifact_key" "$commit_key") 584 | make_mutation cicd_artifact_commit_association "$cicd_ArtifactCommitAssociation" 585 | 586 | cicd_Repository=$(jq \ 587 | '{data_artifact_repository,data_artifact_organization,data_artifact_source}' <<< "$artifact_key" 588 | ) 589 | make_mutation cicd_repository "$cicd_Repository" 590 | 591 | cicd_Organization=$(jq \ 592 | '{data_artifact_organization,data_artifact_source}' <<< "$artifact_key" 593 | ) 594 | make_mutation cicd_organization "$cicd_Organization" 595 | 596 | doPullRequestCommitMutation 597 | } 598 | 599 | function make_mutation() { 600 | entity_origin=$(jq -n \ 601 | --arg data_origin "$origin" \ 602 | '{"data_origin": $data_origin}' 603 | ) 604 | data=$(concat "$2" "$entity_origin") 605 | log Calling Hasura rest endpoint "$1" with payload "$data" 606 | 607 | if ! ((dry_run)); then 608 | log "Sending mutation to Hasura..." 609 | 610 | http_response=$(curl -s -S --retry 5 --retry-delay 5 \ 611 | --write-out "HTTPSTATUS:%{http_code}" -X POST \ 612 | "$url/api/rest/$1" \ 613 | -H "content-type: application/json" \ 614 | -H "X-Hasura-Admin-Secret: $hasura_admin_secret" \ 615 | -d "$data") 616 | 617 | http_response_status=$(echo "$http_response" | tr -d '\n' | sed -e 's/.*HTTPSTATUS://') 618 | http_response_body=$(echo "$http_response" | sed -e 's/HTTPSTATUS\:.*//g') 619 | 620 | if [ ! "$http_response_status" -eq 200 ]; then 621 | err "[HTTP status: $http_response_status]" 622 | err "Response Body:" 623 | err "$http_response_body" 624 | fail 625 | else 626 | log "[HTTP status OK: $http_response_status]" 627 | log "Response Body:" 628 | log "$http_response_body" 629 | fi 630 | else 631 | log "Dry run: Mutation NOT sent to Faros." 632 | fi 633 | } 634 | 635 | function keys_matching() { 636 | jq --arg regexp "$2" \ 637 | 'with_entries(if (.key|test($regexp)) then ( {key: .key, value: .value } ) else empty end )' <<< "$1" 638 | } 639 | 640 | function concat() { 641 | jq --argjson json_2 "$2" '.+=$json_2' <<< "$1" 642 | } 643 | 644 | function flatten() { 645 | jq '[paths(scalars) as $path | { ($path | map(tostring) | join("_")): getpath($path) } ] | add' <<< "$1" 646 | } 647 | 648 | function resolveDefaults() { 649 | FAROS_GRAPH=${FAROS_GRAPH:-$FAROS_GRAPH_DEFAULT} 650 | FAROS_URL=${FAROS_URL:-$FAROS_URL_DEFAULT} 651 | FAROS_ORIGIN=${FAROS_ORIGIN:-$FAROS_ORIGIN_DEFAULT} 652 | HASURA_URL=${HASURA_URL:-$HASURA_URL_DEFAULT} 653 | HASURA_ADMIN_SECRET=${HASURA_ADMIN_SECRET:-$HASURA_ADMIN_SECRET_DEFAULT} 654 | FAROS_MAX_TIME=${FAROS_MAX_TIME:-$FAROS_MAX_TIME_DEFAULT} 655 | FAROS_RETRY=${FAROS_RETRY:-$FAROS_RETRY_DEFAULT} 656 | FAROS_RETRY_DELAY=${FAROS_RETRY_DELAY:-$FAROS_RETRY_DELAY_DEFAULT} 657 | FAROS_RETRY_MAX_TIME=${FAROS_RETRY_MAX_TIME:-$FAROS_RETRY_MAX_TIME_DEFAULT} 658 | } 659 | 660 | function resolveControlInput() { 661 | # Required fields: 662 | if [ -n "${api_key+x}" ] || [ -n "${FAROS_API_KEY+x}" ]; then 663 | api_key=${api_key:-$FAROS_API_KEY} 664 | else 665 | if ! ((community_edition)); then 666 | err "A Faros API key must be provided" 667 | fail 668 | fi 669 | fi 670 | 671 | # Optional fields: 672 | resolveDefaults 673 | graph=${graph:-$FAROS_GRAPH} 674 | IFS=',' read -ra graphs <<< "$graph" 675 | 676 | origin=${origin:-$FAROS_ORIGIN} 677 | if ! ((community_edition)); then 678 | url=${url:-$FAROS_URL} 679 | else 680 | url=${url:-$HASURA_URL} 681 | hasura_admin_secret=${hasura_admin_secret:-$HASURA_ADMIN_SECRET} 682 | fi 683 | 684 | # Curl settings 685 | max_time=${max_time:-$FAROS_MAX_TIME} 686 | retry=${retry:-$FAROS_RETRY} 687 | retry_delay=${retry_delay:-$FAROS_RETRY_DELAY} 688 | retry_max_time=${retry_max_time:-$FAROS_RETRY_MAX_TIME} 689 | 690 | # Optional script settings: If unset then false 691 | no_lowercase_vcs=${no_lowercase_vcs:-0} 692 | full=${full:-"false"} 693 | skip_saving_run=${skip_saving_run:-"false"} 694 | validate_only=${validate_only:-"false"} 695 | no_artifact=${no_artifact:-"false"} 696 | } 697 | 698 | function resolveCDInput() { 699 | resolveDeployInput 700 | resolveArtifactInput 701 | resolveCommitInput 702 | resolveRunInput 703 | } 704 | 705 | function resolveCIInput() { 706 | resolveArtifactInput 707 | resolveCommitInput 708 | resolveRunInput 709 | } 710 | 711 | function resolveTestExecutionInput() { 712 | resolveTestInput 713 | resolveCommitInput 714 | resolveRunInput 715 | } 716 | 717 | function resolveDeployInput() { 718 | deploy_uri=${deploy_uri:-$FAROS_DEPLOY} 719 | if ((community_edition)); then 720 | parseDeployUri 721 | fi 722 | deploy_id=${deploy_id:-$FAROS_DEPLOY_ID} 723 | deploy_app=${deploy_app:-$FAROS_DEPLOY_APP} 724 | deploy_url=${deploy_url:-$FAROS_DEPLOY_URL} 725 | deploy_env=${deploy_env:-$FAROS_DEPLOY_ENV} 726 | deploy_source=${deploy_source:-$FAROS_DEPLOY_SOURCE} 727 | deploy_status=${deploy_status:-$FAROS_DEPLOY_STATUS} 728 | deploy_app_platform=${deploy_app_platform:-$FAROS_DEPLOY_APP_PLATFORM} 729 | deploy_app_tags=${deploy_app_tags:-$FAROS_DEPLOY_APP_TAGS} 730 | deploy_app_paths=${deploy_app_paths:-$FAROS_DEPLOY_APP_PATHS} 731 | deploy_env_details=${deploy_env_details:-$FAROS_DEPLOY_ENV_DETAILS} 732 | deploy_status_details=${deploy_status_details:-$FAROS_DEPLOY_STATUS_DETAILS} 733 | deploy_requested_at=${deploy_requested_at:-$FAROS_DEPLOY_REQUESTED_AT} 734 | deploy_start_time=${deploy_start_time:-$FAROS_DEPLOY_START_TIME} 735 | deploy_end_time=${deploy_end_time:-$FAROS_DEPLOY_END_TIME} 736 | deploy_tags=${deploy_tags:-$FAROS_DEPLOY_TAGS} 737 | 738 | if [ -n "$deploy_requested_at" ]; then 739 | deploy_requested_at=$(convert_to_iso8601 "$deploy_requested_at") 740 | fi 741 | if [ -n "$deploy_start_time" ]; then 742 | deploy_start_time=$(convert_to_iso8601 "$deploy_start_time") 743 | fi 744 | if [ -n "$deploy_end_time" ]; then 745 | deploy_end_time=$(convert_to_iso8601 "$deploy_end_time") 746 | fi 747 | } 748 | 749 | function resolveArtifactInput() { 750 | artifact_uri=${artifact_uri:-$FAROS_ARTIFACT} 751 | if ((community_edition)); then 752 | parseArtifactUri 753 | fi 754 | artifact_id=${artifact_id:-$FAROS_ARTIFACT_ID} 755 | artifact_repo=${artifact_repo:-$FAROS_ARTIFACT_REPO} 756 | artifact_org=${artifact_org:-$FAROS_ARTIFACT_ORG} 757 | artifact_source=${artifact_source:-$FAROS_ARTIFACT_SOURCE} 758 | } 759 | 760 | function resolveCommitInput() { 761 | commit_uri=${commit_uri:-$FAROS_COMMIT} 762 | if ((community_edition)); then 763 | parseCommitUri 764 | fi 765 | commit_sha=${commit_sha:-$FAROS_COMMIT_SHA} 766 | commit_repo=${commit_repo:-$FAROS_COMMIT_REPO} 767 | commit_org=${commit_org:-$FAROS_COMMIT_ORG} 768 | commit_source=${commit_source:-$FAROS_COMMIT_SOURCE} 769 | branch=${branch:-$FAROS_BRANCH} 770 | pull_request_number=${pull_request_number:-$FAROS_PULL_REQUEST_NUMBER} 771 | } 772 | 773 | function resolveRunInput() { 774 | run_uri=${run_uri:-$FAROS_RUN} 775 | if ((community_edition)); then 776 | parseRunUri 777 | fi 778 | run_id=${run_id:-$FAROS_RUN_ID} 779 | run_pipeline=${run_pipeline:-$FAROS_RUN_PIPELINE} 780 | run_org=${run_org:-$FAROS_RUN_ORG} 781 | run_source=${run_source:-$FAROS_RUN_SOURCE} 782 | run_status=${run_status:-$FAROS_RUN_STATUS} 783 | run_name=${run_name:-$FAROS_RUN_NAME} 784 | run_status_details=${run_status_details:-$FAROS_RUN_STATUS_DETAILS} 785 | run_start_time=${run_start_time:-$FAROS_RUN_START_TIME} 786 | run_end_time=${run_end_time:-$FAROS_RUN_END_TIME} 787 | run_step_id=${run_step_id:-$FAROS_RUN_STEP_ID} 788 | run_step_name=${run_step_name:-$FAROS_RUN_STEP_NAME} 789 | run_step_type=${run_step_type:-$FAROS_RUN_STEP_TYPE} 790 | run_step_type_details=${run_step_type_details:-$FAROS_RUN_STEP_TYPE_DETAILS} 791 | run_step_status=${run_step_status:-$FAROS_RUN_STEP_STATUS} 792 | run_step_status_details=${run_step_status_details:-$FAROS_RUN_STEP_STATUS_DETAILS} 793 | run_step_command=${run_step_command:-$FAROS_RUN_STEP_COMMAND} 794 | run_step_url=${run_step_url:-$FAROS_RUN_STEP_URL} 795 | run_step_start_time=${run_step_start_time:-$FAROS_RUN_STEP_START_TIME} 796 | run_step_end_time=${run_step_end_time:-$FAROS_RUN_STEP_END_TIME} 797 | 798 | if [ -n "$run_status" ]; then 799 | has_run_status=1 800 | fi 801 | if [ -n "$run_start_time" ]; then 802 | has_run_start_time=1 803 | run_start_time=$(convert_to_iso8601 "$run_start_time") 804 | fi 805 | if [ -n "$run_end_time" ]; then 806 | has_run_end_time=1 807 | run_end_time=$(convert_to_iso8601 "$run_end_time") 808 | fi 809 | if [ -n "$run_step_start_time" ]; then 810 | run_step_start_time=$(convert_to_iso8601 "$run_step_start_time") 811 | fi 812 | if [ -n "$run_step_end_time" ]; then 813 | run_step_end_time=$(convert_to_iso8601 "$run_step_end_time") 814 | fi 815 | } 816 | 817 | function resolveTestInput() { 818 | test_id=${test_id:-$FAROS_TEST_ID} 819 | test_source=${test_source:-$FAROS_TEST_SOURCE} 820 | test_type=${test_type:-$FAROS_TEST_TYPE} 821 | test_type_details=${test_type_details:-$FAROS_TEST_TYPE_DETAILS} 822 | test_status=${test_status:-$FAROS_TEST_STATUS} 823 | test_status_details=${test_status_details:-$FAROS_TEST_STATUS_DETAILS} 824 | test_suite=${test_suite:-$FAROS_TEST_SUITE} 825 | test_stats=${test_stats:-$FAROS_TEST_STATS} 826 | test_tags=${test_tags:-$FAROS_TEST_TAGS} 827 | environments=${environments:-$FAROS_ENVIRONMENTS} 828 | device_name=${device_name:-$FAROS_DEVICE_NAME} 829 | device_os=${device_os:-$FAROS_DEVICE_OS} 830 | device_browser=${device_browser:-$FAROS_DEVICE_BROWSER} 831 | device_type=${device_type:-$FAROS_DEVICE_TYPE} 832 | test_start_time=${test_start_time:-$FAROS_TEST_START_TIME} 833 | test_end_time=${test_end_time:-$FAROS_TEST_END_TIME} 834 | test_task=${test_task:-$FAROS_TEST_TASK} 835 | defect_task=${defect_task:-$FAROS_DEFECT_TASK} 836 | test_suite_task=${test_suite_task:-$FAROS_TEST_SUITE_TASK} 837 | test_execution_task=${test_execution_task:-$FAROS_TEST_EXECUTION_TASK} 838 | task_source=${task_source:-$FAROS_TASK_SOURCE} 839 | 840 | if [ -n "$test_start_time" ]; then 841 | test_start_time=$(convert_to_iso8601 "$test_start_time") 842 | fi 843 | if [ -n "$test_end_time" ]; then 844 | test_end_time=$(convert_to_iso8601 "$test_end_time") 845 | fi 846 | } 847 | 848 | # Parses a uri of the form: 849 | # value_A://value_B/value_C/value_D 850 | # arg1: The env var name in which to store value_A 851 | # arg2: The env var name in which to store value_B 852 | # arg3: The env var name in which to store value_C 853 | # arg4: The env var name in which to store value_D 854 | # arg5: The form of the URI to communicate when parsing fails 855 | # arg6: The uri to parse 856 | function parseUri() { 857 | valid_chars="a-zA-Z0-9_.<>-" 858 | uri_regex="^[$valid_chars]+:\/\/[$valid_chars]+\/[$valid_chars]+\/[$valid_chars]+$" 859 | if [ -n "$6" ]; then 860 | if [[ "$6" =~ $uri_regex ]]; then 861 | export "$1"="$(sed 's/:.*//' <<< "$6")" 862 | export "$2"="$(sed 's/.*:\/\/\(.*\)\/.*\/.*/\1/' <<< "$6")" 863 | export "$3"="$(sed 's/.*:\/\/.*\/\(.*\)\/.*/\1/' <<< "$6")" 864 | export "$4"="$(sed 's/.*:\/\/.*\/.*\///' <<< "$6")" 865 | else 866 | err "Resource URI could not be parsed: [$6] The URI should be of the form: $5" 867 | fail 868 | fi 869 | fi 870 | } 871 | 872 | function parseCommitUri() { 873 | parseUri "commit_source" "commit_org" "commit_repo" "commit_sha" "$commit_uri_form" "$commit_uri" 874 | if [ -n "$commit_source" ] && [ -n "$commit_org" ] && [ -n "$commit_repo" ] && [ -n "$commit_sha" ]; then 875 | has_commit=1 876 | fi 877 | } 878 | 879 | function parseRunUri() { 880 | parseUri "run_source" "run_org" "run_pipeline" "run_id" "$run_uri_form" "$run_uri" 881 | if [ -n "$run_source" ] && [ -n "$run_org" ] && [ -n "$run_pipeline" ] && [ -n "$run_id" ]; then 882 | has_run=1 883 | fi 884 | } 885 | 886 | function parseDeployUri() { 887 | parseUri "deploy_source" "deploy_app" "deploy_env" "deploy_id" $deploy_uri_form "$deploy_uri" 888 | } 889 | 890 | function parseArtifactUri() { 891 | parseUri "artifact_source" "artifact_org" "artifact_repo" "artifact_id" "$artifact_uri_form" "$artifact_uri" 892 | if [ -n "$artifact_source" ] && [ -n "$artifact_org" ] && [ -n "$artifact_repo" ] && [ -n "$artifact_id" ]; then 893 | has_artifact=1 894 | fi 895 | } 896 | 897 | function makeEvent() { 898 | request_body=$(jq -n \ 899 | --arg origin "$origin" \ 900 | --arg event_type "$event_type" \ 901 | '{ 902 | "type": $event_type, 903 | "version": "0.0.1", 904 | "origin": $origin, 905 | }' 906 | ) 907 | } 908 | 909 | function tryAddToEvent() { 910 | if [ -n "$2" ]; then 911 | request_body=$(jq \ 912 | --argjson path "$1" \ 913 | --arg val "$2" \ 914 | 'getpath($path) += $val' <<< "$request_body" 915 | ) 916 | fi 917 | } 918 | 919 | function addDeployToData() { 920 | tryAddToEvent '["data","deploy","uri"]' "$deploy_uri" 921 | tryAddToEvent '["data","deploy","id"]' "$deploy_id" 922 | tryAddToEvent '["data","deploy","url"]' "$deploy_url" 923 | tryAddToEvent '["data","deploy","environment"]' "$deploy_env" 924 | tryAddToEvent '["data","deploy","application"]' "$deploy_app" 925 | tryAddToEvent '["data","deploy","source"]' "$deploy_source" 926 | tryAddToEvent '["data","deploy","status"]' "$deploy_status" 927 | tryAddToEvent '["data","deploy","applicationPlatform"]' "$deploy_app_platform" 928 | tryAddToEvent '["data","deploy","statusDetails"]' "$deploy_status_details" 929 | tryAddToEvent '["data","deploy","environmentDetails"]' "$deploy_env_details" 930 | tryAddToEvent '["data","deploy","requestedAt"]' "$deploy_requested_at" 931 | tryAddToEvent '["data","deploy","startTime"]' "$deploy_start_time" 932 | tryAddToEvent '["data","deploy","endTime"]' "$deploy_end_time" 933 | tryAddToEvent '["data","deploy","applicationTags"]' "$deploy_app_tags" 934 | tryAddToEvent '["data","deploy","applicationPaths"]' "$deploy_app_paths" 935 | tryAddToEvent '["data","deploy","tags"]' "$deploy_tags" 936 | } 937 | 938 | function addCommitToData() { 939 | tryAddToEvent '["data","commit","uri"]' "$commit_uri" 940 | tryAddToEvent '["data","commit","sha"]' "$commit_sha" 941 | tryAddToEvent '["data","commit","repository"]' "$commit_repo" 942 | tryAddToEvent '["data","commit","organization"]' "$commit_org" 943 | tryAddToEvent '["data","commit","source"]' "$commit_source" 944 | tryAddToEvent '["data","commit","branch"]' "$branch" 945 | if [ -n "$pull_request_number" ]; then 946 | request_body=$(jq \ 947 | --arg pull_request_number "$pull_request_number" \ 948 | '.data.commit += 949 | { 950 | "pullRequestNumber": $pull_request_number|tonumber, 951 | }' <<< "$request_body" 952 | ) 953 | fi 954 | } 955 | 956 | function addArtifactToData() { 957 | tryAddToEvent '["data","artifact","uri"]' "$artifact_uri" 958 | tryAddToEvent '["data","artifact","id"]' "$artifact_id" 959 | tryAddToEvent '["data","artifact","repository"]' "$artifact_repo" 960 | tryAddToEvent '["data","artifact","organization"]' "$artifact_org" 961 | tryAddToEvent '["data","artifact","source"]' "$artifact_source" 962 | } 963 | 964 | function addRunToData() { 965 | tryAddToEvent '["data","run","uri"]' "$run_uri" 966 | tryAddToEvent '["data","run","id"]' "$run_id" 967 | tryAddToEvent '["data","run","pipeline"]' "$run_pipeline" 968 | tryAddToEvent '["data","run","organization"]' "$run_org" 969 | tryAddToEvent '["data","run","source"]' "$run_source" 970 | tryAddToEvent '["data","run","name"]' "$run_name" 971 | tryAddToEvent '["data","run","status"]' "$run_status" 972 | tryAddToEvent '["data","run","statusDetails"]' "$run_status_details" 973 | tryAddToEvent '["data","run","startTime"]' "$run_start_time" 974 | tryAddToEvent '["data","run","endTime"]' "$run_end_time" 975 | } 976 | 977 | function addRunStepToData() { 978 | tryAddToEvent '["data","run","step","id"]' "$run_step_id" 979 | tryAddToEvent '["data","run","step","name"]' "$run_step_name" 980 | tryAddToEvent '["data","run","step","type"]' "$run_step_type" 981 | tryAddToEvent '["data","run","step","typeDetails"]' "$run_step_type_details" 982 | tryAddToEvent '["data","run","step","status"]' "$run_step_status" 983 | tryAddToEvent '["data","run","step","statusDetails"]' "$run_step_status_details" 984 | tryAddToEvent '["data","run","step","command"]' "$run_step_command" 985 | tryAddToEvent '["data","run","step","url"]' "$run_step_url" 986 | tryAddToEvent '["data","run","step","startTime"]' "$run_step_start_time" 987 | tryAddToEvent '["data","run","step","endTime"]' "$run_step_end_time" 988 | } 989 | 990 | function addTestToData() { 991 | tryAddToEvent '["data","test","id"]' "$test_id" 992 | tryAddToEvent '["data","test","source"]' "$test_source" 993 | tryAddToEvent '["data","test","type"]' "$test_type" 994 | tryAddToEvent '["data","test","typeDetails"]' "$test_type_details" 995 | tryAddToEvent '["data","test","status"]' "$test_status" 996 | tryAddToEvent '["data","test","statusDetails"]' "$test_status_details" 997 | tryAddToEvent '["data","test","suite"]' "$test_suite" 998 | tryAddToEvent '["data","test","tags"]' "$test_tags" 999 | tryAddToEvent '["data","test","environments"]' "$environments" 1000 | tryAddToEvent '["data","test","deviceInfo","name"]' "$device_name" 1001 | tryAddToEvent '["data","test","deviceInfo","os"]' "$device_os" 1002 | tryAddToEvent '["data","test","deviceInfo","browser"]' "$device_browser" 1003 | tryAddToEvent '["data","test","deviceInfo","type"]' "$device_type" 1004 | tryAddToEvent '["data","test","testTask"]' "$test_task" 1005 | tryAddToEvent '["data","test","defectTask"]' "$defect_task" 1006 | tryAddToEvent '["data","test","suiteTask"]' "$test_suite_task" 1007 | tryAddToEvent '["data","test","executionTask"]' "$test_execution_task" 1008 | tryAddToEvent '["data","test","taskSource"]' "$task_source" 1009 | if [ -n "$test_stats" ]; then 1010 | IFS=',' read -ra ADDR <<< "$test_stats" 1011 | for i in "${ADDR[@]}"; do 1012 | IFS='=' read -r key value <<< "$i" 1013 | request_body=$(jq \ 1014 | --arg key "$key" \ 1015 | --arg value "$value" \ 1016 | '.data.test.stats[$key] += ($value|tonumber)' <<< "$request_body" 1017 | ) 1018 | done 1019 | fi 1020 | tryAddToEvent '["data","test","startTime"]' "$test_start_time" 1021 | tryAddToEvent '["data","test","endTime"]' "$test_end_time" 1022 | } 1023 | 1024 | function sendEventToFaros() { 1025 | log "Sending event to Faros (graph: $1)..." 1026 | 1027 | http_response=$(curl -s -S \ 1028 | --max-time "$max_time" \ 1029 | --retry "$retry" \ 1030 | --retry-delay "$retry_delay" \ 1031 | --retry-max-time "$retry_max_time" \ 1032 | --write-out "HTTPSTATUS:%{http_code}" -X POST \ 1033 | "$url/graphs/$1/events?validateOnly=$validate_only&skipSavingRun=$skip_saving_run&full=$full&noArtifact=$no_artifact" \ 1034 | -H "authorization: $api_key" \ 1035 | -H "content-type: application/json" \ 1036 | -d "$request_body" 1037 | ) 1038 | 1039 | http_response_status=$(echo "$http_response" | tr -d '\n' | sed -e 's/.*HTTPSTATUS://') 1040 | http_response_body=$(echo "$http_response" | sed -e 's/HTTPSTATUS\:.*//g') 1041 | } 1042 | 1043 | function fmtLog(){ 1044 | if ((no_format)); then 1045 | fmtLog="" 1046 | else 1047 | fmtTime="[$(jq -r -n 'now|strflocaltime("%Y-%m-%d %T")')]" 1048 | if [ "$1" == "error" ]; then 1049 | fmtLog="$fmtTime ${RED}ERROR${NC} " 1050 | elif [ "$1" == "warn" ]; then 1051 | fmtLog="$fmtTime ${YELLOW}WARN${NC} " 1052 | elif [ "$1" == "debug" ]; then 1053 | fmtLog="$fmtTime ${GREEN}DEBUG${NC} " 1054 | else 1055 | fmtLog="$fmtTime ${BLUE}INFO${NC} " 1056 | fi 1057 | fi 1058 | } 1059 | 1060 | function printLog() { 1061 | if jq -e . >/dev/null 2>&1 <<< "$1"; then 1062 | if ! ((no_format)); then 1063 | printf "$fmtLog \n" 1064 | echo "$*" | jq . 1065 | else 1066 | # Minify JSON 1067 | echo "$*" | jq -c . 1068 | fi 1069 | else 1070 | printf "$fmtLog" 1071 | printf "$* \n" 1072 | fi 1073 | } 1074 | 1075 | function debug() { 1076 | if ((debug)); then 1077 | fmtLog "debug" 1078 | printLog "$*" 1079 | fi 1080 | } 1081 | 1082 | function log() { 1083 | if ! ((silent)); then 1084 | fmtLog "info" 1085 | printLog "$*" 1086 | fi 1087 | } 1088 | 1089 | function warn() { 1090 | if ! ((silent)); then 1091 | fmtLog "warn" 1092 | printLog "$*" 1093 | fi 1094 | } 1095 | 1096 | function err() { 1097 | fmtLog "error" 1098 | printLog "$*" 1099 | } 1100 | 1101 | function fail() { 1102 | err "Failed." 1103 | exit 1 1104 | } 1105 | 1106 | main() { 1107 | parseControls "$@" 1108 | set -- "${FLAGS[@]:-}" # Restore positional args 1109 | resolveControlInput # Resolve control fields 1110 | 1111 | debug "+=========================================================" 1112 | debug "| version: $version" 1113 | debug "| jq version: $(jq --version)" 1114 | debug "| url: $url" 1115 | debug "| graph: $graph" 1116 | debug "| origin: $origin" 1117 | debug "| dry run: $(if ((dry_run)); then echo true; else echo false; fi)" 1118 | debug "| validate only: $validate_only" 1119 | debug "| no artifact: $no_artifact" 1120 | debug "+---------------------------------------------------------" 1121 | 1122 | parseFlags "$@" 1123 | set -- "${POSITIONAL[@]:-}" # Restore positional args 1124 | 1125 | debug "+=========================================================" 1126 | 1127 | processArgs "$@" # Determine which event types are present 1128 | processEventTypes # Resolve input and populate event 1129 | 1130 | if ! ((community_edition)); then 1131 | log "Request Body:" 1132 | log "$request_body" 1133 | 1134 | if ! ((dry_run)); then 1135 | for i in "${graphs[@]}"; 1136 | do 1137 | sendEventToFaros "$i" 1138 | 1139 | # Log error response as an error and fail 1140 | if [ ! "$http_response_status" -eq 202 ]; then 1141 | err "[HTTP status: $http_response_status]" 1142 | err "Response Body:" 1143 | err "$http_response_body" 1144 | fail 1145 | else 1146 | log "[HTTP status ACCEPTED: $http_response_status]" 1147 | fi 1148 | done 1149 | else 1150 | log "Dry run: Event NOT sent to Faros." 1151 | fi 1152 | else 1153 | if ((ci_event)); then 1154 | doCIMutations 1155 | elif ((cd_event)); then 1156 | doCDMutations 1157 | else 1158 | err "Event type not support for community edition." 1159 | fail 1160 | fi 1161 | fi 1162 | 1163 | log "Done." 1164 | exit 0 1165 | } 1166 | 1167 | main "$@" 1168 | -------------------------------------------------------------------------------- /resources/events-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/faros-ai/faros-events-cli/150279b2499b78d23059209bef24c4d1c8bc2776/resources/events-1.png -------------------------------------------------------------------------------- /resources/events-2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/faros-ai/faros-events-cli/150279b2499b78d23059209bef24c4d1c8bc2776/resources/events-2.png -------------------------------------------------------------------------------- /resources/events-3.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/faros-ai/faros-events-cli/150279b2499b78d23059209bef24c4d1c8bc2776/resources/events-3.jpg -------------------------------------------------------------------------------- /sonar-project.properties: -------------------------------------------------------------------------------- 1 | sonar.projectKey=faros-ai_faros-events-cli 2 | sonar.organization=faros-ai -------------------------------------------------------------------------------- /test/.shellspec: -------------------------------------------------------------------------------- 1 | --require spec_helper 2 | --shell bash 3 | 4 | ## Default kcov (coverage) options 5 | # --kcov-options "--include-path=. --path-strip-level=1" 6 | # --kcov-options "--include-pattern=.sh" 7 | # --kcov-options "--exclude-pattern=/.shellspec,/spec/,/coverage/,/report/" 8 | 9 | ## Example: Include script "myprog" with no extension 10 | # --kcov-options "--include-pattern=.sh,myprog" 11 | 12 | ## Example: Only specified files/directories 13 | # --kcov-options "--include-pattern=myprog,/lib/" 14 | -------------------------------------------------------------------------------- /test/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM shellspec/shellspec:kcov 2 | RUN apk add --no-cache curl jq gawk sed 3 | -------------------------------------------------------------------------------- /test/demo.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | # To run this script: 4 | # 1) Allow this script to be executed by running the following: 5 | # $ chmod u+x ./demo.sh 6 | # 2) Uncomment the command you would like to demo 7 | # 3) Run the script: 8 | # $ ./demo.sh 9 | 10 | set -euo pipefail 11 | 12 | # Connecting to Faros 13 | # ============================================================================= 14 | # visit https://docs.faros.ai/#/api?id=getting-access for api access 15 | export FAROS_API_KEY="" 16 | export FAROS_DRY_RUN=1 # Set to 0 or comment out to send the event to Faros 17 | # ============================================================================= 18 | 19 | # # help 20 | # ../faros_event.sh --help 21 | 22 | # # CI (minimum) 23 | # ../faros_event.sh CI \ 24 | # --commit "vcs_source://vcs_org/vcs_repo/commit_sha" 25 | 26 | # # CI (with pull request) 27 | # ../faros_event.sh CI \ 28 | # --commit "vcs_source://vcs_org/vcs_repo/commit_sha" \ 29 | # --pull_request_number "1" 30 | 31 | # # CI Event 32 | # ../faros_event.sh CI \ 33 | # --artifact "artifact_source://artifact_org/artifact_repo/artifact_id" \ 34 | # --commit "vcs_source://vcs_org/vcs_repo/commit_sha" \ 35 | # --run "run_source://run_org/run_pipeline/run_id" \ 36 | # --run_status "Success" 37 | 38 | # # CI Event (with pull request) 39 | # ../faros_event.sh CI \ 40 | # --artifact "artifact_source://artifact_org/artifact_repo/artifact_id" \ 41 | # --commit "vcs_source://vcs_org/vcs_repo/commit_sha" \ 42 | # --pull_request_number "1" \ 43 | # --run "run_source://run_org/run_pipeline/run_id" \ 44 | # --run_status "Success" 45 | 46 | # # CI Event (without artifact information) 47 | # ../faros_event.sh CI \ 48 | # --commit "vcs_source://vcs_org/vcs_repo/commit_sha" \ 49 | # --run "run_source://run_org/run_pipeline/run_id" \ 50 | # --run_status "Success" 51 | 52 | # # CD Event (Minimum with --artifact) 53 | # ../faros_event.sh CD \ 54 | # --deploy "deploy_source://app/QA/deploy_id" \ 55 | # --artifact "artifact_source://artifact_org/artifact_repo/artifact_id" \ 56 | # --deploy_status "Success" 57 | 58 | # # CD Event (Minimum with --commit) 59 | # ../faros_event.sh CD \ 60 | # --deploy "deploy_source://app/QA/deploy_id" \ 61 | # --artifact "artifact_source://artifact_org/artifact_repo/artifact_id" \ 62 | # --deploy_status "Success" 63 | 64 | # # CD Event (with run information) 65 | # ../faros_event.sh CD \ 66 | # --artifact "artifact_source://artifact_org/artifact_repo/artifact_id" \ 67 | # --run "run_source://run_org/run_pipeline/run_id" \ 68 | # --deploy "deploy_source://app/QA/deploy_id" \ 69 | # --deploy_status "Success" \ 70 | # --run_status "Success" 71 | 72 | # # CD Event (using commit) 73 | # ../faros_event.sh CD \ 74 | # --deploy "deploy_source://app/QA/deploy_id" \ 75 | # --commit "vcs_source://vcs_org/vcs_repo/commit_sha" \ 76 | # --run "run_source://run_org/run_pipeline/run_id" \ 77 | # --deploy_status "Success" \ 78 | # --run_status "Success" 79 | 80 | # # CD Event (using commit and pull request) 81 | # ../faros_event.sh CD \ 82 | # --deploy "deploy_source://app/QA/deploy_id" \ 83 | # --commit "vcs_source://vcs_org/vcs_repo/commit_sha" \ 84 | # --pull_request_number "1" \ 85 | # --run "run_source://run_org/run_pipeline/run_id" \ 86 | # --deploy_status "Success" \ 87 | # --run_status "Success" 88 | 89 | # # CD Event (with --skip_saving_run) 90 | # ../faros_event.sh CD \ 91 | # --commit "vcs_source://vcs_org/vcs_repo/commit_sha" \ 92 | # --run "run_source://run_org/run_pipeline/run_id" \ 93 | # --run_status "Success" \ 94 | # --deploy "deploy_source://app/QA/deploy_id" \ 95 | # --deploy_status "Success" \ 96 | # --skip_saving_run 97 | 98 | # # CD Event (with --no_lowercase_vcs) 99 | # ../faros_event.sh CD \ 100 | # --deploy "deploy_source://app/QA/deploy_id" \ 101 | # --commit "vcs_source://VCS_ORG/VCS_REPO/commit_sha" \ 102 | # --run "run_source://run_org/run_pipeline/run_id" \ 103 | # --deploy_status "Success" \ 104 | # --run_status "Success" \ 105 | # --no_lowercase_vcs 106 | 107 | # # CD Event (with --no_format) 108 | # ../faros_event.sh CD \ 109 | # --deploy "deploy_source://app/QA/deploy_id" \ 110 | # --commit "vcs_source://VCS_ORG/VCS_REPO/commit_sha" \ 111 | # --run "run_source://run_org/run_pipeline/run_id" \ 112 | # --deploy_status "Success" \ 113 | # --run_status "Success" \ 114 | # --no_format 115 | -------------------------------------------------------------------------------- /test/spec/faros_event_community_edition_spec.sh: -------------------------------------------------------------------------------- 1 | Describe 'faros_event.sh (Community edition)' 2 | export FAROS_NO_FORMAT=1 3 | export FAROS_DRY_RUN=1 4 | 5 | Describe 'Community edition CI event' 6 | cicd_organization_from_run='Calling Hasura rest endpoint cicd_organization_from_run with payload { "data_run_organization": "", "data_run_source": "", "data_origin": "Faros_Script_Event" }' 7 | cicd_pipeline='Calling Hasura rest endpoint cicd_pipeline with payload { "data_run_pipeline": "", "data_run_organization": "", "data_run_source": "", "data_origin": "Faros_Script_Event" }' 8 | cicd_build_with_start_end='Calling Hasura rest endpoint cicd_build_with_start_end with payload { "run_status": { "category": "Success", "detail": "Some extra details" }, "run_start_time": "1970-01-01T00:00:01Z", "run_end_time": "1970-01-01T00:00:02Z", "data_run_id": "", "data_run_pipeline": "", "data_run_organization": "", "data_run_source": "", "data_origin": "Faros_Script_Event" }' 9 | cicd_artifact_with_build='Calling Hasura rest endpoint cicd_artifact_with_build with payload { "data_artifact_id": "", "data_artifact_repository": "", "data_artifact_organization": "", "data_artifact_source": "", "data_run_id": "", "data_run_pipeline": "", "data_run_organization": "", "data_run_source": "", "data_origin": "Faros_Script_Event" }' 10 | cicd_organization='Calling Hasura rest endpoint cicd_organization with payload { "data_artifact_organization": "", "data_artifact_source": "", "data_origin": "Faros_Script_Event" }' 11 | cicd_repository='Calling Hasura rest endpoint cicd_repository with payload { "data_artifact_repository": "", "data_artifact_organization": "", "data_artifact_source": "", "data_origin": "Faros_Script_Event" }' 12 | cicd_artifact_commit_association='Calling Hasura rest endpoint cicd_artifact_commit_association with payload { "data_artifact_id": "", "data_artifact_repository": "", "data_artifact_organization": "", "data_artifact_source": "", "data_commit_sha": "", "data_commit_repository": "", "data_commit_organization": "", "data_commit_source": "", "data_origin": "Faros_Script_Event" }' 13 | cicd_artifact='Calling Hasura rest endpoint cicd_artifact with payload { "data_artifact_id": "", "data_artifact_repository": "", "data_artifact_organization": "", "data_artifact_source": "", "data_origin": "Faros_Script_Event" }' 14 | cicd_build='Calling Hasura rest endpoint cicd_build with payload { "run_status": { "category": "Success", "detail": "Some extra details" }, "data_run_id": "", "data_run_pipeline": "", "data_run_organization": "", "data_run_source": "", "data_origin": "Faros_Script_Event" }' 15 | vcs_pull_request_commit='Calling Hasura rest endpoint vcs_pull_request_commit with payload { "data_pull_request_uid": "1", "data_pull_request_number": 1, "data_commit_sha": "", "data_commit_repository": "", "data_commit_organization": "", "data_commit_source": "", "data_origin": "Faros_Script_Event" }' 16 | 17 | It 'All data present' 18 | ci_event_test() { 19 | echo $( 20 | ../faros_event.sh CI \ 21 | --run ":////" \ 22 | --commit ":////" \ 23 | --pull_request_number 1 \ 24 | --artifact ":////" \ 25 | --run_status "Success" \ 26 | --run_status_details "Some extra details" \ 27 | --run_start_time "1000" \ 28 | --run_end_time "2000" \ 29 | --community_edition 30 | ) 31 | } 32 | When call ci_event_test 33 | The output should include "$cicd_organization_from_run" 34 | The output should include "$cicd_pipeline" 35 | The output should include "$cicd_build_with_start_end" 36 | The output should include "$cicd_artifact_with_build" 37 | The output should include "$cicd_organization" 38 | The output should include "$cicd_repository" 39 | The output should include "$cicd_artifact_commit_association" 40 | The output should include "$vcs_pull_request_commit" 41 | End 42 | vcs_pull_request_commit='Calling Hasura rest endpoint vcs_pull_request_commit with payload { "data_pull_request_uid": "1", "data_pull_request_number": 1, "data_commit_sha": "", "data_commit_repository": "", "data_commit_organization": "", "data_commit_source": "", "data_origin": "my_origin" }' 43 | 44 | It 'Uses origin from flag' 45 | ci_event_test() { 46 | echo $( 47 | ../faros_event.sh CI \ 48 | --run ":////" \ 49 | --commit ":////" \ 50 | --pull_request_number 1 \ 51 | --artifact ":////" \ 52 | --run_status "Success" \ 53 | --run_status_details "Some extra details" \ 54 | --run_start_time "1000" \ 55 | --run_end_time "2000" \ 56 | --origin my_origin \ 57 | --community_edition 58 | ) 59 | } 60 | When call ci_event_test 61 | The output should include "$vcs_pull_request_commit" 62 | End 63 | It 'Resolves literal Now and converts to iso8601 format' 64 | Intercept begin 65 | __begin__() { 66 | now_as_iso8601() { echo "2022-04-22T18:31:46Z"; } 67 | } 68 | 69 | When run source ../faros_event.sh CI \ 70 | --run ":////" \ 71 | --commit ":////" \ 72 | --artifact ":////" \ 73 | --run_status "Success" \ 74 | --run_status_details "Some extra details" \ 75 | --run_start_time "Now" \ 76 | --run_end_time "2000" \ 77 | --community_edition 78 | The output should include '"run_start_time": "2022-04-22T18:31:46Z"' 79 | End 80 | It 'Leaves time unchanged if not Unix millis or Now' 81 | ci_event_test() { 82 | echo $( 83 | ../faros_event.sh CI \ 84 | --run ":////" \ 85 | --commit ":////" \ 86 | --artifact ":////" \ 87 | --run_status "Success" \ 88 | --run_status_details "Some extra details" \ 89 | --run_start_time "2022-04-22T18:36:28Z" \ 90 | --run_end_time "2000" \ 91 | --community_edition 92 | ) 93 | } 94 | When call ci_event_test 95 | The output should include '"run_start_time": "2022-04-22T18:36:28Z"' 96 | End 97 | It 'No run data' 98 | ci_event_test() { 99 | echo $( 100 | ../faros_event.sh CI \ 101 | --commit ":////" \ 102 | --artifact ":////" \ 103 | --community_edition 104 | ) 105 | } 106 | When call ci_event_test 107 | The output should include "$cicd_organization" 108 | The output should include "$cicd_repository" 109 | The output should include "$cicd_artifact_commit_association" 110 | The output should include "$cicd_artifact" 111 | End 112 | It 'No run start/end time' 113 | ci_event_test() { 114 | echo $( 115 | ../faros_event.sh CI \ 116 | --run ":////" \ 117 | --commit ":////" \ 118 | --artifact ":////" \ 119 | --run_status "Success" \ 120 | --run_status_details "Some extra details" \ 121 | --community_edition 122 | ) 123 | } 124 | When call ci_event_test 125 | The output should include "$cicd_organization_from_run" 126 | The output should include "$cicd_pipeline" 127 | The output should include "$cicd_build" 128 | The output should include "$cicd_artifact_with_build" 129 | The output should include "$cicd_organization" 130 | The output should include "$cicd_repository" 131 | The output should include "$cicd_artifact_commit_association" 132 | End 133 | It 'All data present and skip_saving_run' 134 | ci_event_test() { 135 | echo $( 136 | ../faros_event.sh CI \ 137 | --run ":////" \ 138 | --commit ":////" \ 139 | --artifact ":////" \ 140 | --run_status "Success" \ 141 | --run_status_details "Some extra details" \ 142 | --run_start_time "1000" \ 143 | --run_end_time "2000" \ 144 | --community_edition \ 145 | --skip_saving_run 146 | ) 147 | } 148 | When call ci_event_test 149 | The output should include "$cicd_artifact_with_build" 150 | The output should include "$cicd_organization" 151 | The output should include "$cicd_repository" 152 | The output should include "$cicd_artifact_commit_association" 153 | End 154 | End 155 | Describe 'Community edition CD event' 156 | compute_application='Calling Hasura rest endpoint compute_application with payload { "name": "", "platform": "", "uid": "", "data_origin": "Faros_Script_Event" }' 157 | cicd_artifact_deployment='Calling Hasura rest endpoint cicd_artifact_deployment with payload { "data_deploy_id": "", "data_deploy_source": "", "data_artifact_id": "", "data_artifact_repository": "", "data_artifact_organization": "", "data_artifact_source": "", "data_origin": "Faros_Script_Event" }' 158 | cicd_build_with_start_end='Calling Hasura rest endpoint cicd_build_with_start_end with payload { "run_status": { "category": "Success", "detail": "Some extra details" }, "run_start_time": "1970-01-01T00:00:01Z", "run_end_time": "1970-01-01T00:00:02Z", "data_run_id": "", "data_run_pipeline": "", "data_run_organization": "", "data_run_source": "", "data_origin": "Faros_Script_Event" }' 159 | cicd_pipeline='Calling Hasura rest endpoint cicd_pipeline with payload { "data_run_pipeline": "", "data_run_organization": "", "data_run_source": "", "data_origin": "Faros_Script_Event" }' 160 | cicd_organization_from_run='Calling Hasura rest endpoint cicd_organization_from_run with payload { "data_run_organization": "", "data_run_source": "", "data_origin": "Faros_Script_Event" }' 161 | cicd_deployment_with_build='Calling Hasura rest endpoint cicd_deployment_with_build with payload { "data_deploy_id": "", "data_deploy_source": "", "status": { "category": "Success", "detail": "" }, "env": { "category": "", "detail": "" }, "compute_Application": "", "deploy_start_time": "1970-01-01T00:00:03Z", "deploy_end_time": "1970-01-01T00:00:04Z", "data_run_id": "", "data_run_pipeline": "", "data_run_organization": "", "data_run_source": "", "data_origin": "Faros_Script_Event" }' 162 | cicd_deployment='Calling Hasura rest endpoint cicd_deployment with payload { "data_deploy_id": "", "data_deploy_source": "", "status": { "category": "Success", "detail": "" }, "env": { "category": "", "detail": "" }, "compute_Application": "", "deploy_start_time": "1970-01-01T00:00:03Z", "deploy_end_time": "1970-01-01T00:00:04Z", "data_origin": "Faros_Script_Event" }' 163 | cicd_artifact_from_commit_info='Calling Hasura rest endpoint cicd_artifact_with_build with payload { "data_artifact_id": "", "data_artifact_repository": "", "data_artifact_organization": "", "data_artifact_source": "", "data_run_id": "", "data_run_pipeline": "", "data_run_organization": "", "data_run_source": "", "data_origin": "Faros_Script_Event" }' 164 | cicd_artifact_commit_association='Calling Hasura rest endpoint cicd_artifact_commit_association with payload { "data_artifact_id": "", "data_artifact_repository": "", "data_artifact_organization": "", "data_artifact_source": "", "data_commit_sha": "", "data_commit_repository": "", "data_commit_organization": "", "data_commit_source": "", "data_origin": "Faros_Script_Event" }' 165 | cicd_artifact_deployment_from_commit='Calling Hasura rest endpoint cicd_artifact_deployment with payload { "data_deploy_id": "", "data_deploy_source": "", "data_artifact_id": "", "data_artifact_repository": "", "data_artifact_organization": "", "data_artifact_source": "", "data_origin": "Faros_Script_Event" }' 166 | vcs_pull_request_commit='Calling Hasura rest endpoint vcs_pull_request_commit with payload { "data_pull_request_uid": "1", "data_pull_request_number": 1, "data_commit_sha": "", "data_commit_repository": "", "data_commit_organization": "", "data_commit_source": "", "data_origin": "Faros_Script_Event" }' 167 | 168 | It 'All data present' 169 | cd_event_test() { 170 | echo $( 171 | ../faros_event.sh CD \ 172 | --run ":////" \ 173 | --artifact ":////" \ 174 | --run_status "Success" \ 175 | --run_status_details "Some extra details" \ 176 | --run_start_time "1000" \ 177 | --run_end_time "2000" \ 178 | --deploy ":////" \ 179 | --deploy_status "Success" \ 180 | --deploy_start_time "3000" \ 181 | --deploy_end_time "4000" \ 182 | --community_edition 183 | ) 184 | } 185 | When call cd_event_test 186 | The output should include "$compute_application" 187 | The output should include "$cicd_artifact_deployment" 188 | The output should include "$cicd_build_with_start_end" 189 | The output should include "$cicd_pipeline" 190 | The output should include "$cicd_organization_from_run" 191 | The output should include "$cicd_deployment_with_build" 192 | End 193 | It 'Resolves literal Now and converts to iso8601 format' 194 | Intercept begin 195 | __begin__() { 196 | now_as_iso8601() { echo "2022-04-22T18:31:46Z"; } 197 | } 198 | 199 | When run source ../faros_event.sh CD \ 200 | --run ":////" \ 201 | --artifact ":////" \ 202 | --run_status "Success" \ 203 | --run_status_details "Some extra details" \ 204 | --run_start_time "1000" \ 205 | --run_end_time "2000" \ 206 | --deploy ":////" \ 207 | --deploy_status "Success" \ 208 | --deploy_start_time "Now" \ 209 | --deploy_end_time "4000" \ 210 | --community_edition 211 | The output should include '"deploy_start_time": "2022-04-22T18:31:46Z"' 212 | End 213 | It 'Leaves time unchanged if not Unix millis or Now' 214 | cd_event_test() { 215 | echo $( 216 | ../faros_event.sh CD \ 217 | --run ":////" \ 218 | --artifact ":////" \ 219 | --run_status "Success" \ 220 | --run_status_details "Some extra details" \ 221 | --run_start_time "1000" \ 222 | --run_end_time "2000" \ 223 | --deploy ":////" \ 224 | --deploy_status "Success" \ 225 | --deploy_start_time "2022-04-22T18:31:46Z" \ 226 | --deploy_end_time "4000" \ 227 | --community_edition 228 | ) 229 | } 230 | When call cd_event_test 231 | The output should include '"deploy_start_time": "2022-04-22T18:31:46Z"' 232 | End 233 | It 'No run data' 234 | cd_event_test() { 235 | echo $( 236 | ../faros_event.sh CD \ 237 | --artifact ":////" \ 238 | --deploy ":////" \ 239 | --deploy_status "Success" \ 240 | --deploy_start_time "3000" \ 241 | --deploy_end_time "4000" \ 242 | --community_edition 243 | ) 244 | } 245 | When call cd_event_test 246 | The output should include "$compute_application" 247 | The output should include "$cicd_artifact_deployment" 248 | The output should include "$cicd_deployment" 249 | End 250 | It 'All data present and skip_saving_run' 251 | cd_event_test() { 252 | echo $( 253 | ../faros_event.sh CD \ 254 | --run ":////" \ 255 | --artifact ":////" \ 256 | --run_status "Success" \ 257 | --run_status_details "Some extra details" \ 258 | --run_start_time "1000" \ 259 | --run_end_time "2000" \ 260 | --deploy ":////" \ 261 | --deploy_status "Success" \ 262 | --deploy_start_time "3000" \ 263 | --deploy_end_time "4000" \ 264 | --skip_saving_run \ 265 | --community_edition 266 | ) 267 | } 268 | When call cd_event_test 269 | The output should include "$compute_application" 270 | The output should include "$cicd_artifact_deployment" 271 | The output should include "$cicd_deployment_with_build" 272 | End 273 | It 'Creates dummy cicd_Artifact from commit info' 274 | cd_event_test() { 275 | echo $( 276 | ../faros_event.sh CD \ 277 | --commit ":////" \ 278 | --run ":////" \ 279 | --run_status "Success" \ 280 | --run_status_details "Some extra details" \ 281 | --run_start_time "1000" \ 282 | --run_end_time "2000" \ 283 | --deploy ":////" \ 284 | --deploy_status "Success" \ 285 | --deploy_start_time "3000" \ 286 | --deploy_end_time "4000" \ 287 | --community_edition 288 | ) 289 | } 290 | When call cd_event_test 291 | The output should include "$compute_application" 292 | The output should include "$cicd_artifact_deployment_from_commit" 293 | The output should include "$cicd_deployment_with_build" 294 | The output should include "$cicd_artifact_commit_association" 295 | The output should include "$cicd_artifact_from_commit_info" 296 | End 297 | It 'Creates PR/commit association if PR number and commit data present' 298 | cd_event_test() { 299 | echo $( 300 | ../faros_event.sh CD \ 301 | --commit ":////" \ 302 | --pull_request_number 1 \ 303 | --run ":////" \ 304 | --run_status "Success" \ 305 | --run_status_details "Some extra details" \ 306 | --run_start_time "1000" \ 307 | --run_end_time "2000" \ 308 | --deploy ":////" \ 309 | --deploy_status "Success" \ 310 | --deploy_start_time "3000" \ 311 | --deploy_end_time "4000" \ 312 | --community_edition 313 | ) 314 | } 315 | When call cd_event_test 316 | The output should include "$vcs_pull_request_commit" 317 | End 318 | It 'Fails with malformed URI responds with parsing error' 319 | bad_input_test() { 320 | echo $( 321 | ../faros_event.sh CI -k "" \ 322 | --commit "$1" \ 323 | --community_edition 324 | ) 325 | } 326 | When call bad_input_test "bad://uri" 327 | The output should equal 'Resource URI could not be parsed: [bad://uri] The URI should be of the form: source://organization/repository/commit_sha Failed.' 328 | End 329 | End 330 | End 331 | -------------------------------------------------------------------------------- /test/spec/faros_event_spec.sh: -------------------------------------------------------------------------------- 1 | Describe 'faros_event.sh' 2 | export FAROS_NO_FORMAT=1 3 | export FAROS_DRY_RUN=1 4 | 5 | Describe 'CD event' 6 | CDAllFields='{"type":"CD","version":"0.0.1","origin":"Faros_Script_Event","data":{"deploy":{"uri":":///QA/","id":"","url":"","environment":"QA","application":"","source":"","status":"Success","applicationPlatform":"","statusDetails":"","environmentDetails":"","requestedAt":"1970-01-01T00:00:05Z","startTime":"1970-01-01T00:00:03Z","endTime":"1970-01-01T00:00:04Z","applicationTags":":,:","applicationPaths":",","tags":":,:"},"artifact":{"uri":":////","id":"","repository":"","organization":"","source":""},"commit":{"uri":":////","sha":"","repository":"","organization":"","source":"","branch":"","pullRequestNumber":101},"run":{"uri":":////","id":"","pipeline":"","organization":"","source":"","status":"Success","statusDetails":"","startTime":"1970-01-01T00:00:01Z","endTime":"1970-01-01T00:00:02Z"}}}' 7 | 8 | It 'populates all fields using flags' 9 | cd_event_test() { 10 | echo $( 11 | ../faros_event.sh CD -k "" \ 12 | --artifact ":////" \ 13 | --artifact_id "" \ 14 | --artifact_repo "" \ 15 | --artifact_org "" \ 16 | --artifact_source "" \ 17 | --commit ":////" \ 18 | --commit_sha "" \ 19 | --commit_repo "" \ 20 | --commit_org "" \ 21 | --commit_source "" \ 22 | --pull_request_number "101" \ 23 | --branch "" \ 24 | --run ":////" \ 25 | --run_id "" \ 26 | --run_pipeline "" \ 27 | --run_org "" \ 28 | --run_source "" \ 29 | --run_status "Success" \ 30 | --run_status_details "" \ 31 | --run_start_time "1000" \ 32 | --run_end_time "2000" \ 33 | --deploy ":///QA/" \ 34 | --deploy_id "" \ 35 | --deploy_env "QA" \ 36 | --deploy_app "" \ 37 | --deploy_source "" \ 38 | --deploy_url "" \ 39 | --deploy_app_platform "" \ 40 | --deploy_app_tags ":,:" \ 41 | --deploy_app_paths "," \ 42 | --deploy_env_details "" \ 43 | --deploy_status "Success" \ 44 | --deploy_status_details "" \ 45 | --deploy_requested_at "5000" \ 46 | --deploy_start_time "3000" \ 47 | --deploy_end_time "4000" \ 48 | --deploy_tags ":,:" 49 | ) 50 | } 51 | When call cd_event_test 52 | The output should include "$CDAllFields" 53 | End 54 | 55 | It 'populates all fields using environment variables' 56 | cd_event_test() { 57 | echo $( 58 | FAROS_API_KEY="" \ 59 | FAROS_ARTIFACT=":////" \ 60 | FAROS_ARTIFACT_ID="" \ 61 | FAROS_ARTIFACT_REPO="" \ 62 | FAROS_ARTIFACT_ORG="" \ 63 | FAROS_ARTIFACT_SOURCE="" \ 64 | FAROS_COMMIT=":////" \ 65 | FAROS_COMMIT_SHA="" \ 66 | FAROS_COMMIT_REPO="" \ 67 | FAROS_COMMIT_ORG="" \ 68 | FAROS_COMMIT_SOURCE="" \ 69 | FAROS_PULL_REQUEST_NUMBER="101" \ 70 | FAROS_BRANCH="" \ 71 | FAROS_RUN=":////" \ 72 | FAROS_RUN_ID="" \ 73 | FAROS_RUN_PIPELINE="" \ 74 | FAROS_RUN_ORG="" \ 75 | FAROS_RUN_SOURCE="" \ 76 | FAROS_RUN_STATUS="Success" \ 77 | FAROS_RUN_STATUS_DETAILS="" \ 78 | FAROS_RUN_START_TIME="1000" \ 79 | FAROS_RUN_END_TIME="2000" \ 80 | FAROS_DEPLOY=":///QA/" \ 81 | FAROS_DEPLOY_ID="" \ 82 | FAROS_DEPLOY_ENV="QA" \ 83 | FAROS_DEPLOY_APP="" \ 84 | FAROS_DEPLOY_SOURCE="" \ 85 | FAROS_DEPLOY_URL="" \ 86 | FAROS_DEPLOY_APP_PLATFORM="" \ 87 | FAROS_DEPLOY_APP_TAGS=":,:" \ 88 | FAROS_DEPLOY_APP_PATHS="," \ 89 | FAROS_DEPLOY_ENV_DETAILS="" \ 90 | FAROS_DEPLOY_STATUS="Success" \ 91 | FAROS_DEPLOY_STATUS_DETAILS="" \ 92 | FAROS_DEPLOY_REQUESTED_AT="5000" \ 93 | FAROS_DEPLOY_START_TIME="3000" \ 94 | FAROS_DEPLOY_END_TIME="4000" \ 95 | FAROS_DEPLOY_TAGS=":,:" \ 96 | ../faros_event.sh CD 97 | ) 98 | } 99 | When call cd_event_test 100 | The output should include "$CDAllFields" 101 | End 102 | 103 | It 'resolves literal Now and converts to iso8601 format' 104 | Intercept begin 105 | __begin__() { 106 | now_as_iso8601() { echo "2022-04-22T18:31:46Z"; } 107 | } 108 | 109 | When run source ../faros_event.sh CD -k "" \ 110 | --artifact ":////" \ 111 | --run ":////" \ 112 | --run_status "Success" \ 113 | --run_status_details "" \ 114 | --run_start_time "Now" \ 115 | --run_end_time "Now" \ 116 | --deploy ":///QA/" \ 117 | --deploy_app_platform "" \ 118 | --deploy_env_details "" \ 119 | --deploy_status "Success" \ 120 | --deploy_status_details "" \ 121 | --deploy_start_time "Now" \ 122 | --deploy_end_time "Now" 123 | The output should include '"startTime":"2022-04-22T18:31:46Z"' 124 | The output should include '"endTime":"2022-04-22T18:31:46Z"' 125 | End 126 | 127 | It 'leaves time unchanged if not Unix millis or Now' 128 | ci_event_test() { 129 | echo $( 130 | ../faros_event.sh CD -k "" \ 131 | --artifact ":////" \ 132 | --run ":////" \ 133 | --run_status "Success" \ 134 | --run_status_details "" \ 135 | --run_start_time "2022-04-22T18:36:01Z" \ 136 | --run_end_time "2022-04-22T18:36:02Z" \ 137 | --deploy ":///QA/" \ 138 | --deploy_app_platform "" \ 139 | --deploy_env_details "" \ 140 | --deploy_status "Success" \ 141 | --deploy_status_details "" \ 142 | --deploy_start_time "2022-04-22T18:36:03Z" \ 143 | --deploy_end_time "2022-04-22T18:36:04Z" 144 | ) 145 | } 146 | When call ci_event_test 147 | The output should include '"startTime":"2022-04-22T18:36:01Z"' 148 | The output should include '"endTime":"2022-04-22T18:36:02Z"' 149 | The output should include '"startTime":"2022-04-22T18:36:03Z"' 150 | The output should include '"endTime":"2022-04-22T18:36:04Z"' 151 | End 152 | End 153 | 154 | Describe 'CI event' 155 | 156 | CIAllFields='{"type":"CI","version":"0.0.1","origin":"Faros_Script_Event","data":{"artifact":{"uri":":////","id":"","repository":"","organization":"","source":""},"commit":{"uri":":////","sha":"","repository":"","organization":"","source":"","branch":"","pullRequestNumber":101},"run":{"uri":":////","id":"","pipeline":"","organization":"","source":"","status":"Success","statusDetails":"","startTime":"1970-01-01T00:00:01Z","endTime":"1970-01-01T00:00:02Z","step":{"id":"","name":"","type":"","typeDetails":"","status":"","statusDetails":"","command":"","url":"","startTime":"1970-01-01T00:00:03Z","endTime":"1970-01-01T00:00:04Z"}}}}' 157 | 158 | It 'populates all fields using flags' 159 | ci_event_test() { 160 | echo $( 161 | ../faros_event.sh CI -k "" \ 162 | --artifact ":////" \ 163 | --artifact_id "" \ 164 | --artifact_repo "" \ 165 | --artifact_org "" \ 166 | --artifact_source "" \ 167 | --commit ":////" \ 168 | --commit_sha "" \ 169 | --commit_repo "" \ 170 | --commit_org "" \ 171 | --commit_source "" \ 172 | --pull_request_number "101" \ 173 | --branch "" \ 174 | --run ":////" \ 175 | --run_id "" \ 176 | --run_pipeline "" \ 177 | --run_org "" \ 178 | --run_source "" \ 179 | --run_status "Success" \ 180 | --run_status_details "" \ 181 | --run_start_time "1000" \ 182 | --run_end_time "2000" \ 183 | --run_step_id "" \ 184 | --run_step_name "" \ 185 | --run_step_type "" \ 186 | --run_step_type_details "" \ 187 | --run_step_status "" \ 188 | --run_step_status_details "" \ 189 | --run_step_command "" \ 190 | --run_step_url "" \ 191 | --run_step_start_time "3000" \ 192 | --run_step_end_time "4000" 193 | ) 194 | } 195 | When call ci_event_test 196 | The output should include "$CIAllFields" 197 | End 198 | 199 | It 'populates all fields using environment variables' 200 | ci_event_test() { 201 | echo $( 202 | FAROS_API_KEY="" \ 203 | FAROS_ARTIFACT=":////" \ 204 | FAROS_ARTIFACT_ID="" \ 205 | FAROS_ARTIFACT_REPO="" \ 206 | FAROS_ARTIFACT_ORG="" \ 207 | FAROS_ARTIFACT_SOURCE="" \ 208 | FAROS_COMMIT=":////" \ 209 | FAROS_COMMIT_SHA="" \ 210 | FAROS_COMMIT_REPO="" \ 211 | FAROS_COMMIT_ORG="" \ 212 | FAROS_COMMIT_SOURCE="" \ 213 | FAROS_PULL_REQUEST_NUMBER="101" \ 214 | FAROS_BRANCH="" \ 215 | FAROS_RUN=":////" \ 216 | FAROS_RUN_ID="" \ 217 | FAROS_RUN_PIPELINE="" \ 218 | FAROS_RUN_ORG="" \ 219 | FAROS_RUN_SOURCE="" \ 220 | FAROS_RUN_STATUS="Success" \ 221 | FAROS_RUN_STATUS_DETAILS="" \ 222 | FAROS_RUN_START_TIME="1000" \ 223 | FAROS_RUN_END_TIME="2000" \ 224 | FAROS_RUN_STEP_ID="" \ 225 | FAROS_RUN_STEP_NAME="" \ 226 | FAROS_RUN_STEP_TYPE="" \ 227 | FAROS_RUN_STEP_TYPE_DETAILS="" \ 228 | FAROS_RUN_STEP_STATUS="" \ 229 | FAROS_RUN_STEP_STATUS_DETAILS="" \ 230 | FAROS_RUN_STEP_COMMAND="" \ 231 | FAROS_RUN_STEP_URL="" \ 232 | FAROS_RUN_STEP_START_TIME="3000" \ 233 | FAROS_RUN_STEP_END_TIME="4000" \ 234 | ../faros_event.sh CI 235 | ) 236 | } 237 | When call ci_event_test 238 | The output should include "$CIAllFields" 239 | End 240 | 241 | CIWithPullRequestExpectedOutput='{"type":"CI","version":"0.0.1","origin":"Faros_Script_Event","data":{"commit":{"pullRequestNumber":101}}}' 242 | 243 | It 'populates pull request when provided as number input' 244 | ci_event_test() { 245 | echo $( 246 | ../faros_event.sh CI -k "" \ 247 | --pull_request_number 101 248 | ) 249 | } 250 | When call ci_event_test 251 | The output should include "$CIWithPullRequestExpectedOutput" 252 | End 253 | End 254 | 255 | Describe 'TaskExecution event' 256 | 257 | TestExecutionAllFields='{"type":"TestExecution","version":"0.0.1","origin":"Faros_Script_Event","data":{"test":{"id":"","source":"","type":"","typeDetails":"","status":"","statusDetails":"","suite":"","tags":"","environments":"","deviceInfo":{"name":"","os":"","browser":"","type":""},"testTask":"","defectTask":"","suiteTask":"","executionTask":"","taskSource":"","stats":{"failure":1,"success":2,"skipped":3,"unknown":4,"custom":5,"total":15},"startTime":"1970-01-01T00:00:01Z","endTime":"1970-01-01T00:00:02Z"},"commit":{"uri":":////","sha":"","repository":"","organization":"","source":"","branch":"","pullRequestNumber":101},"run":{"uri":":////","id":"","pipeline":"","organization":"","source":""}}}' 258 | 259 | It 'populates all fields using flags' 260 | test_execution_event_test() { 261 | echo $( 262 | ../faros_event.sh TestExecution -k "" \ 263 | --test_id "" \ 264 | --test_source "" \ 265 | --test_type "" \ 266 | --test_type_details "" \ 267 | --test_status "" \ 268 | --test_status_details "" \ 269 | --test_suite "" \ 270 | --test_stats "failure=1,success=2,skipped=3,unknown=4,custom=5,total=15" \ 271 | --test_tags "" \ 272 | --environments "" \ 273 | --device_name "" \ 274 | --device_os "" \ 275 | --device_browser "" \ 276 | --device_type "" \ 277 | --test_start_time "1000" \ 278 | --test_end_time "2000" \ 279 | --test_task "" \ 280 | --defect_task "" \ 281 | --test_suite_task "" \ 282 | --test_execution_task "" \ 283 | --task_source "" \ 284 | --commit ":////" \ 285 | --commit_sha "" \ 286 | --commit_repo "" \ 287 | --commit_org "" \ 288 | --commit_source "" \ 289 | --pull_request_number "101" \ 290 | --branch "" \ 291 | --run ":////" \ 292 | --run_id "" \ 293 | --run_pipeline "" \ 294 | --run_org "" \ 295 | --run_source "" 296 | ) 297 | } 298 | When call test_execution_event_test 299 | The output should include $TestExecutionAllFields 300 | End 301 | 302 | It 'populates all fields using environment variables' 303 | test_execution_event_test() { 304 | echo $( 305 | FAROS_API_KEY="" \ 306 | FAROS_TEST_ID="" \ 307 | FAROS_TEST_SOURCE="" \ 308 | FAROS_TEST_TYPE="" \ 309 | FAROS_TEST_TYPE_DETAILS="" \ 310 | FAROS_TEST_STATUS="" \ 311 | FAROS_TEST_STATUS_DETAILS="" \ 312 | FAROS_TEST_SUITE="" \ 313 | FAROS_TEST_STATS="failure=1,success=2,skipped=3,unknown=4,custom=5,total=15" \ 314 | FAROS_TEST_TAGS="" \ 315 | FAROS_ENVIRONMENTS="" \ 316 | FAROS_DEVICE_NAME="" \ 317 | FAROS_DEVICE_OS="" \ 318 | FAROS_DEVICE_BROWSER="" \ 319 | FAROS_DEVICE_TYPE="" \ 320 | FAROS_TEST_START_TIME="1000" \ 321 | FAROS_TEST_END_TIME="2000" \ 322 | FAROS_TEST_TASK="" \ 323 | FAROS_DEFECT_TASK="" \ 324 | FAROS_TEST_SUITE_TASK="" \ 325 | FAROS_TEST_EXECUTION_TASK="" \ 326 | FAROS_TASK_SOURCE="" \ 327 | FAROS_COMMIT=":////" \ 328 | FAROS_COMMIT_SHA="" \ 329 | FAROS_COMMIT_REPO="" \ 330 | FAROS_COMMIT_ORG="" \ 331 | FAROS_COMMIT_SOURCE="" \ 332 | FAROS_PULL_REQUEST_NUMBER="101" \ 333 | FAROS_BRANCH="" \ 334 | FAROS_RUN=":////" \ 335 | FAROS_RUN_ID="" \ 336 | FAROS_RUN_PIPELINE="" \ 337 | FAROS_RUN_ORG="" \ 338 | FAROS_RUN_SOURCE="" \ 339 | ../faros_event.sh TestExecution 340 | ) 341 | } 342 | When call test_execution_event_test 343 | The output should include $TestExecutionAllFields 344 | End 345 | End 346 | 347 | Describe 'bad input' 348 | 349 | It 'responds with bad input' 350 | bad_input_test() { 351 | echo $( 352 | ../faros_event.sh $1 \ 353 | -k "" \ 354 | --run "" \ 355 | $2 356 | ) 357 | } 358 | When call bad_input_test "Bad_Input" "Also_Bad" 359 | The output should equal 'Unrecognized arg(s): Bad_Input Also_Bad Failed.' 360 | End 361 | End 362 | 363 | Describe 'Common Tests - ' 364 | It 'Multiple graphs send multiple events' 365 | multi_graph_test() { 366 | echo $( 367 | ../faros_event.sh CD -k "" \ 368 | --graph test_1,test_2 \ 369 | --deploy ":///QA/" \ 370 | --debug 371 | ) 372 | } 373 | When call multi_graph_test 374 | The output should include 'test_1' 375 | The output should include 'test_2' 376 | End 377 | End 378 | End 379 | -------------------------------------------------------------------------------- /test/spec/spec_helper.sh: -------------------------------------------------------------------------------- 1 | # shellcheck shell=sh 2 | 3 | # Defining variables and functions here will affect all specfiles. 4 | # Change shell options inside a function may cause different behavior, 5 | # so it is better to set them here. 6 | # set -eu 7 | 8 | # This callback function will be invoked only once before loading specfiles. 9 | spec_helper_precheck() { 10 | # Available functions: info, warn, error, abort, setenv, unsetenv 11 | # Available variables: VERSION, SHELL_TYPE, SHELL_VERSION 12 | : minimum_version "0.28.1" 13 | } 14 | 15 | # This callback function will be invoked after a specfile has been loaded. 16 | spec_helper_loaded() { 17 | : 18 | } 19 | 20 | # This callback function will be invoked after core modules has been loaded. 21 | spec_helper_configure() { 22 | # Available functions: import, before_each, after_each, before_all, after_all 23 | : import 'support/custom_matcher' 24 | } 25 | --------------------------------------------------------------------------------