├── tests ├── __init__.py ├── utils_test.py └── process_test.py ├── .shellcheckrc ├── requirements.txt ├── doc └── flow.png ├── .gitignore ├── .dockerignore ├── requirements_dev.txt ├── setup.cfg ├── .pylintrc ├── .mergify.yml ├── .tekton ├── tekton.yaml ├── run.yaml └── pipeline.yaml ├── Makefile ├── triggers ├── bindings.yaml ├── eventlistener.yaml ├── template.yaml └── role.yaml ├── tektonasacode ├── __init__.py ├── config.py ├── cli.py ├── utils.py ├── github.py ├── process_templates.py └── main.py ├── Dockerfile ├── .yamllint ├── misc ├── tkaac-add-secret ├── tkaac-grab-parameters ├── tkaac-status └── send-slack-notifications.py ├── pipeline └── pipeline.yaml ├── tasks └── tekton-asa-code.yaml ├── setup.py ├── deploy.sh ├── Flow.md └── README.md /tests/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /.shellcheckrc: -------------------------------------------------------------------------------- 1 | disable=SC2046,SC2086 2 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | pyyaml 2 | tektonbundle>=0.3.0 3 | -------------------------------------------------------------------------------- /doc/flow.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/chmouel/tekton-asa-code/HEAD/doc/flow.png -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .eggs/ 2 | tmp/* 3 | **/__pycache__ 4 | **/*.pyc 5 | .venv/ 6 | htmlcov/ 7 | -------------------------------------------------------------------------------- /.dockerignore: -------------------------------------------------------------------------------- 1 | **/__pycache__/ 2 | **/*.pyc 3 | .venv 4 | .git 5 | **/.mypy_cache 6 | **/.pytest_cache 7 | -------------------------------------------------------------------------------- /requirements_dev.txt: -------------------------------------------------------------------------------- 1 | pip==19.2.3 2 | 3 | pylint==2.6.0 4 | pytest==4.6.5 5 | pytest-runner==5.1 6 | pytest-cov 7 | yapf 8 | -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [bdist_wheel] 2 | universal = 1 3 | 4 | [flake8] 5 | exclude = docs 6 | 7 | [aliases] 8 | test = pytest 9 | 10 | [tool:pytest] 11 | addopts = --cov-report html -ra -q 12 | -------------------------------------------------------------------------------- /.pylintrc: -------------------------------------------------------------------------------- 1 | [MESSAGES CONTROL] 2 | enable=c-extension-no-member 3 | disable=C0301,R0914,W0511,too-many-branches,too-many-statements,too-many-branches,broad-except,too-many-arguments,bad-continuation,unsubscriptable-object 4 | -------------------------------------------------------------------------------- /.mergify.yml: -------------------------------------------------------------------------------- 1 | pull_request_rules: 2 | - name: Automatic merge on approval 3 | conditions: 4 | - status-success="Tekton CI" 5 | - label!=work-in-progress 6 | actions: 7 | merge: 8 | method: merge 9 | -------------------------------------------------------------------------------- /.tekton/tekton.yaml: -------------------------------------------------------------------------------- 1 | # Tekton yaml 2 | owners: 3 | - "@openshift-pipelines" 4 | 5 | # Install tasks from catalog 6 | tasks: 7 | - git-clone 8 | - yaml-lint 9 | - pylint 10 | - shellcheck 11 | 12 | # Apply those files in order in the .tekton directory 13 | files: 14 | - pipeline.yaml 15 | - run.yaml 16 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | IMAGE_NAME = quay.io/chmouel/tekton-asa-code 2 | 3 | image: 4 | @docker build -t $(IMAGE_NAME) . 5 | 6 | push: 7 | @docker push $(IMAGE_NAME) 8 | 9 | imagepush: image push 10 | 11 | lint: ## check style with flake8 12 | @pylint tektonasacode tests 13 | 14 | coverage: 15 | @pytest --cov-report html --cov=tektonasacode tests/ 16 | -------------------------------------------------------------------------------- /triggers/bindings.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | apiVersion: triggers.tekton.dev/v1alpha1 3 | kind: TriggerBinding 4 | metadata: 5 | name: tekton-asa-code-bindings 6 | spec: 7 | params: 8 | - name: github_json 9 | value: $(body) 10 | - name: installation_id 11 | value: $(body.installation.id) 12 | - name: application_id 13 | value: {{application_id}} 14 | -------------------------------------------------------------------------------- /.tekton/run.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | apiVersion: tekton.dev/v1beta1 3 | kind: PipelineRun 4 | metadata: 5 | name: tekton-asa-code-check-pr 6 | spec: 7 | pipelineRef: 8 | name: tekton-asa-code-check 9 | serviceAccountName: 'builder' 10 | params: 11 | - name: repo_url 12 | value: {{repo_url}} 13 | - name: revision 14 | value: {{revision}} 15 | workspaces: 16 | - name: source 17 | volumeClaimTemplate: 18 | spec: 19 | accessModes: 20 | - ReadWriteOnce 21 | resources: 22 | requests: 23 | storage: 1Gi 24 | -------------------------------------------------------------------------------- /triggers/eventlistener.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: triggers.tekton.dev/v1alpha1 2 | kind: EventListener 3 | metadata: 4 | name: tekton-asa-code-listener-interceptor 5 | spec: 6 | serviceAccountName: tekton-triggers-github-sa 7 | triggers: 8 | - name: github-listener 9 | bindings: 10 | - ref: tekton-asa-code-bindings 11 | interceptors: 12 | - github: 13 | eventTypes: 14 | - pull_request 15 | - issue_comment 16 | - cel: 17 | filter: "body.action in ['created', 'opened', 'synchronize'] && 'installation' in body" 18 | template: 19 | ref: tekton-asa-code-template 20 | -------------------------------------------------------------------------------- /tektonasacode/__init__.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # Author: Chmouel Boudjnah 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); you may 5 | # not use this file except in compliance with the License. You may obtain 6 | # a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT 12 | # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the 13 | # License for the specific language governing permissions and limitations 14 | # under the License. 15 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM registry.access.redhat.com/ubi8/ubi:8.2 2 | ARG TKN_VERSION=0.15.0 3 | 4 | COPY . /code 5 | WORKDIR /code 6 | 7 | RUN curl -sL -o- https://mirror.openshift.com/pub/openshift-v4/clients/oc/latest/linux/oc.tar.gz | \ 8 | tar xvzf - -C /usr/local/bin oc kubectl && \ 9 | chmod +x /usr/local/bin/{kubectl,oc} 10 | 11 | RUN curl -Ls -o- https://github.com/tektoncd/cli/releases/download/v${TKN_VERSION}/tkn_${TKN_VERSION}_Linux_x86_64.tar.gz | tar zxf - -C /usr/local/bin && \ 12 | chmod +x /usr/local/bin/tkn 13 | 14 | 15 | RUN INSTALL_PKGS="git python38" && \ 16 | yum -y --setopt=tsflags=nodocs install $INSTALL_PKGS && \ 17 | rpm -V $INSTALL_PKGS && \ 18 | yum -y clean all --enablerepo='*' 19 | 20 | RUN pip3 install -r requirements.txt -e. 21 | ENTRYPOINT ["tekton-asa-code"] 22 | -------------------------------------------------------------------------------- /.yamllint: -------------------------------------------------------------------------------- 1 | ignore: | 2 | /vendor 3 | 4 | rules: 5 | braces: enable 6 | brackets: enable 7 | colons: enable 8 | commas: enable 9 | comments: 10 | level: warning 11 | comments-indentation: 12 | level: warning 13 | document-end: disable 14 | document-start: disable 15 | empty-lines: enable 16 | empty-values: enable 17 | hyphens: enable 18 | key-duplicates: enable 19 | key-ordering: disable 20 | line-length: disable 21 | new-line-at-end-of-file: disable 22 | new-lines: enable 23 | octal-values: enable 24 | quoted-strings: disable 25 | trailing-spaces: enable 26 | truthy: 27 | level: warning 28 | 29 | # accept both key: 30 | # - item 31 | # 32 | # and key: 33 | # - item 34 | indentation: 35 | indent-sequences: whatever 36 | -------------------------------------------------------------------------------- /misc/tkaac-add-secret: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | TARGET_NAMESPACE="tekton-asa-code" 3 | secrettype=${1} 4 | repoorguser=${2} 5 | secret=${3} 6 | 7 | [[ -z ${3} ]] && { 8 | echo "tkaac-add-secret secrettype repo/org key=value" 9 | } 10 | 11 | [[ -z ${secrettype} ]] && { echo "I need a secrettype: i.e: coverage or docker or whatever string to differentiate the puprose of the secret." ;exit 1;} 12 | [[ -z ${repoorguser} ]] && { echo "I need a repoorguser user i.e: openshift/origin"; exit 1 ;} 13 | [[ -z ${secret} ]] && { echo "I need a secret i.e: token=secret"; exit 1 ;} 14 | 15 | orguser=${repoorguser%/*} 16 | repo=${repoorguser#*/} 17 | 18 | kubectl -n ${TARGET_NAMESPACE} create secret generic ${orguser}-${repo} --from-literal="${secret}" 19 | kubectl label secret ${orguser}-${repo} tekton/asa-code-repoorgusersitory-owner="${orguser}" tekton/asa-code-repoorgusersitory-name="${repo}" -------------------------------------------------------------------------------- /pipeline/pipeline.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | apiVersion: tekton.dev/v1beta1 3 | kind: Pipeline 4 | metadata: 5 | name: tekton-asa-code 6 | spec: 7 | params: 8 | - name: application_id 9 | - name: installation_id 10 | - name: github_json 11 | workspaces: 12 | - name: secrets 13 | tasks: 14 | - name: get-token 15 | taskRef: 16 | name: github-app-token 17 | params: 18 | - name: application_id 19 | value: $(params.application_id) 20 | - name: installation_id 21 | value: $(params.installation_id) 22 | workspaces: 23 | - name: secrets 24 | workspace: secrets 25 | - name: tekton-asa-code 26 | runAfter: [get-token] 27 | taskRef: 28 | name: tekton-asa-code 29 | params: 30 | - name: github_token 31 | value: "$(tasks.get-token.results.token)" 32 | - name: github_json 33 | value: "$(params.github_json)" 34 | -------------------------------------------------------------------------------- /triggers/template.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | apiVersion: triggers.tekton.dev/v1alpha1 3 | kind: TriggerTemplate 4 | metadata: 5 | name: tekton-asa-code-template 6 | annotations: 7 | triggers.tekton.dev/old-escape-quotes: "true" 8 | spec: 9 | params: 10 | - name: installation_id 11 | - name: application_id 12 | - name: github_json 13 | resourcetemplates: 14 | - apiVersion: tekton.dev/v1beta1 15 | kind: PipelineRun 16 | metadata: 17 | generateName: tekton-asa-code-run- 18 | labels: 19 | tekton.dev/pipeline: tekton-asa-code 20 | spec: 21 | serviceAccountName: tkn-aac-sa 22 | params: 23 | - name: application_id 24 | value: $(tt.params.application_id) 25 | - name: installation_id 26 | value: $(tt.params.installation_id) 27 | - name: github_json 28 | value: $(tt.params.github_json) 29 | pipelineRef: 30 | name: tekton-asa-code 31 | workspaces: 32 | - name: secrets 33 | secret: 34 | secretName: github-app-secret 35 | -------------------------------------------------------------------------------- /tasks/tekton-asa-code.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: tekton.dev/v1beta1 2 | kind: Task 3 | metadata: 4 | name: tekton-asa-code 5 | spec: 6 | description: >- 7 | This task will take a PR and checks which files are contained in there. 8 | 9 | The github-check-pr-files will take a GitHUB PR as an argument check what 10 | files are contained. 11 | 12 | params: 13 | - name: github_json 14 | type: string 15 | description: the full json received from json 16 | 17 | - name: github_token 18 | type: string 19 | description: the github token used for github operation 20 | 21 | steps: 22 | - name: apply-and-launch 23 | env: 24 | - name: TKC_PIPELINERUN 25 | valueFrom: 26 | fieldRef: 27 | fieldPath: metadata.labels['tekton.dev/pipelineRun'] 28 | - name: TKC_NAMESPACE 29 | valueFrom: 30 | fieldRef: 31 | fieldPath: metadata.namespace 32 | - name: PYTHONUNBUFFERED 33 | value: "true" 34 | image: quay.io/chmouel/tekton-asa-code:latest 35 | args: 36 | - "$(params.github_json)" 37 | - "$(params.github_token)" 38 | -------------------------------------------------------------------------------- /tektonasacode/config.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # Author: Chmouel Boudjnah 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); you may 5 | # not use this file except in compliance with the License. You may obtain 6 | # a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT 12 | # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the 13 | # License for the specific language governing permissions and limitations 14 | # under the License. 15 | """Constant stuff""" 16 | import os 17 | 18 | TEKTON_ASA_CODE_DIR = os.environ.get("TEKTON_ASA_CODE_DIR", ".tekton") 19 | 20 | REPOSITORY_DIR = "/tmp/repository" 21 | 22 | GITHUB_RAW_URL = "https://raw.githubusercontent.com/tektoncd/catalog/main/task" 23 | GITHUB_API_URL = "https://api.github.com" 24 | 25 | COMMENT_ALLOWED_STRING = "/ok-to-test" 26 | COMMENT_RETEST_STRING = "/retest" 27 | 28 | TEKTON_CATALOG_REPOSITORY = "tektoncd/catalog" 29 | 30 | ALLOW_PRERUNS_CMD = False 31 | -------------------------------------------------------------------------------- /tektonasacode/cli.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | # Author: Chmouel Boudjnah 4 | # 5 | # Licensed under the Apache License, Version 2.0 (the "License"); you may 6 | # not use this file except in compliance with the License. You may obtain 7 | # a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, software 12 | # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT 13 | # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the 14 | # License for the specific language governing permissions and limitations 15 | # under the License. 16 | """Tekton asa Code""" 17 | import argparse 18 | import sys 19 | 20 | from tektonasacode import main 21 | 22 | 23 | def run(): 24 | """Console script for tektonasacode.""" 25 | parser = argparse.ArgumentParser() 26 | parser.add_argument('github_json', help="The full json from Github") 27 | parser.add_argument('github_token', 28 | help="The Github token to do operation with") 29 | 30 | args = parser.parse_args() 31 | tkaac = main.TektonAsaCode(args.github_token, args.github_json) 32 | tkaac.runwrap() 33 | 34 | 35 | if __name__ == "__main__": 36 | sys.exit(run()) # pragma: no cover 37 | -------------------------------------------------------------------------------- /misc/tkaac-grab-parameters: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | # Take the last pipelineRun (or the pipelinerun you specify as argument) 3 | # and dump the github_json and token to /tmp, you can then launch manually the 4 | # tkaac cli with the argument to debug the code : 5 | # 6 | # tkaac-grab-parameters && python tektonasacode/cli.py "$(cat /tmp/pr.json)" $(cat /tmp/pr.token) 7 | set -e 8 | namespace="tekton-asa-code" 9 | 10 | type -p jq >/dev/null || { echo "need jq installed"; exit 1 ;} 11 | type -p tkn >/dev/null || { echo "need tkn installed"; exit 1 ;} 12 | R=/tmp/.pr.json 13 | P=/tmp/tekton-asa-code-lastrun.json 14 | T=/tmp/tekton-asa-code-lastrun.token 15 | 16 | latest=$1 17 | [[ -z ${1} ]] && { 18 | latest="$(tkn pr -n ${namespace} ls --limit=1 --no-headers -o name|sed 's/.*\///')" 19 | echo -n "Using parameters from ${latest}: " 20 | } 21 | 22 | kubectl -n ${namespace} get pr ${latest} -o json > ${R} 23 | 24 | jq -r '.spec.params[] | select(.name=="github_json").value' "${R}"|tr '\n' ' '|tr '\r' ' ' > "${P}" 25 | jq -r '.status.taskRuns[].status.taskResults[]? | select(.name=="token").value' ${R} > "${T}" 26 | 27 | echo "success." 28 | echo "github_json and github_token are saved in ${P} and ${T}" 29 | 30 | echo -e "\nPR Detail:\n---------\n" 31 | echo -n "Title: " 32 | jq -r '.pull_request.title' ${P} 33 | echo -n "URL: " 34 | jq -r '.repository.html_url + "/pull/" + (.number | tostring)' ${P} 35 | -------------------------------------------------------------------------------- /.tekton/pipeline.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | apiVersion: tekton.dev/v1beta1 3 | kind: Pipeline 4 | metadata: 5 | name: tekton-asa-code-check 6 | spec: 7 | params: 8 | - name: repo_url 9 | - name: revision 10 | workspaces: 11 | - name: source 12 | tasks: 13 | - name: fetch 14 | taskRef: 15 | name: git-clone 16 | params: 17 | - name: url 18 | value: $(params.repo_url) 19 | - name: revision 20 | value: $(params.revision) 21 | workspaces: 22 | - name: output 23 | workspace: source 24 | - name: yaml-lint 25 | runAfter: [fetch] 26 | taskRef: 27 | name: yaml-lint 28 | params: 29 | - name: args 30 | value: ["."] 31 | workspaces: 32 | - name: shared-workspace 33 | workspace: source 34 | - name: pthon-lint 35 | runAfter: [fetch] 36 | taskRef: 37 | name: pylint 38 | params: 39 | - name: path 40 | value: "tektonasacode/" 41 | - name: args 42 | value: ["-r", "y"] 43 | workspaces: 44 | - name: source 45 | workspace: source 46 | - name: shellcheck 47 | runAfter: [fetch] 48 | taskRef: 49 | name: shellcheck 50 | params: 51 | - name: args 52 | value: ["-s", "bash", "./deploy.sh"] 53 | workspaces: 54 | - name: shared-workspace 55 | workspace: source 56 | -------------------------------------------------------------------------------- /triggers/role.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: ServiceAccount 3 | metadata: 4 | name: tekton-triggers-github-sa 5 | --- 6 | apiVersion: rbac.authorization.k8s.io/v1 7 | kind: RoleBinding 8 | metadata: 9 | name: tekton-triggers-github-binding 10 | subjects: 11 | - kind: ServiceAccount 12 | name: tekton-triggers-github-sa 13 | roleRef: 14 | apiGroup: rbac.authorization.k8s.io 15 | kind: Role 16 | name: tekton-triggers-github-minimal 17 | --- 18 | kind: Role 19 | apiVersion: rbac.authorization.k8s.io/v1 20 | metadata: 21 | name: tekton-triggers-github-minimal 22 | rules: 23 | # Permissions for every EventListener deployment to function 24 | - apiGroups: ["triggers.tekton.dev"] 25 | resources: ["eventlisteners", "triggerbindings", "triggertemplates", "triggers"] 26 | verbs: ["get", "list", "watch"] 27 | - apiGroups: [""] 28 | # secrets are only needed for Github/Gitlab interceptors, serviceaccounts only for per trigger authorization 29 | resources: ["configmaps", "secrets"] 30 | verbs: ["get", "list", "watch"] 31 | # Permissions to create resources in associated TriggerTemplates 32 | - apiGroups: ["tekton.dev"] 33 | resources: ["pipelineruns", "pipelineresources", "taskruns"] 34 | verbs: ["create"] 35 | - apiGroups: [""] 36 | resources: ["serviceaccounts"] 37 | verbs: ["impersonate"] 38 | - apiGroups: ["policy"] 39 | resources: ["podsecuritypolicies"] 40 | resourceNames: ["tekton-triggers"] 41 | verbs: ["use"] 42 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | """The setup script.""" 3 | 4 | from setuptools import setup, find_packages 5 | 6 | with open('README.md') as readme_file: 7 | readme = readme_file.read() 8 | 9 | requirements = [] 10 | 11 | setup_requirements = [ 12 | 'pytest-runner', 13 | ] 14 | 15 | test_requirements = [ 16 | 'pytest>=3', 17 | ] 18 | 19 | setup( 20 | author="Chmouel Boudjnah", 21 | author_email='chmouel@chmouel.com', 22 | python_requires='>=3.5', 23 | classifiers=[ 24 | 'Development Status :: 2 - Pre-Alpha', 25 | 'Intended Audience :: Developers', 26 | 'License :: OSI Approved :: MIT License', 27 | 'Natural Language :: English', 28 | 'Programming Language :: Python :: 3', 29 | 'Programming Language :: Python :: 3.5', 30 | 'Programming Language :: Python :: 3.6', 31 | 'Programming Language :: Python :: 3.7', 32 | 'Programming Language :: Python :: 3.8', 33 | ], 34 | description="Tekton asa CODE", 35 | entry_points={ 36 | 'console_scripts': [ 37 | 'tekton-asa-code=tektonasacode.cli:run', 38 | ], 39 | }, 40 | install_requires=requirements, 41 | license="MIT license", 42 | long_description=readme, 43 | long_description_content_type='text/markdown', 44 | include_package_data=True, 45 | keywords='Tekton', 46 | name='tektonasacode', 47 | packages=find_packages(include=['tektonasacode', 'tektonasacode.*']), 48 | setup_requires=setup_requirements, 49 | test_suite='tests', 50 | tests_require=test_requirements, 51 | url='https://github.com/chmouel/tekton-asa-code', 52 | version='0.1.0', 53 | zip_safe=False, 54 | ) 55 | -------------------------------------------------------------------------------- /tests/utils_test.py: -------------------------------------------------------------------------------- 1 | """Test when processing templates""" 2 | # pylint: disable=redefined-outer-name,too-few-public-methods 3 | import subprocess 4 | 5 | import yaml 6 | from tektonasacode import utils 7 | 8 | 9 | def test_kapply(): 10 | """Test kapply utils""" 11 | tests = [ 12 | ("""foo: {{allo}}""", { 13 | 'allo': 'maman' 14 | }, "foo: maman"), 15 | ("""foo: {{allo.maman}}""", { 16 | 'allo': { 17 | 'maman': 'bobo' 18 | } 19 | }, "foo: bobo"), 20 | ("""foo: {{allo.maman}}""", { 21 | 'allo': { 22 | 'maman': ['jai', 'bobo'] 23 | } 24 | }, "foo: ['jai', 'bobo']"), 25 | ("""foo: {{allo.maman}}""", { 26 | 'allo': { 27 | 'maman': [{ 28 | 'jai': 'bobo', 29 | 'jveux': 'manger' 30 | }] 31 | } 32 | }, "foo: [{'jai': 'bobo', 'jveux': 'manger'}]"), 33 | ] 34 | for test in tests: 35 | tools = utils.Utils() 36 | _, res = tools.kapply(test[0], test[1], [], name="test") 37 | assert res == test[2] 38 | 39 | 40 | def test_get_errors(): 41 | """Test get_errors""" 42 | tools = utils.Utils() 43 | 44 | text = """I have failed to do 45 | what my love would want 46 | my error my mistake""" 47 | output = tools.get_errors(text) 48 | assert "**failed**" in output 49 | assert "**error**" in output 50 | assert "my love" not in output 51 | 52 | assert not tools.get_errors("Happy as a cucumber") 53 | 54 | 55 | def test_kubectl_get(): 56 | """Test kubectl_get""" 57 | tools = utils.Utils() 58 | 59 | # pylint: disable=unused-argument 60 | def my_execute(command, check_error=""): 61 | item = yaml.safe_dump({ 62 | "items": [{ 63 | "metadata": { 64 | "namespace": "random", 65 | "name": "hello" 66 | } 67 | }] 68 | }) 69 | return subprocess.run(f"""echo "{item}" """, 70 | shell=True, 71 | check=True, 72 | capture_output=True) 73 | 74 | tools.execute = my_execute 75 | output = tools.kubectl_get(obj="none", output_type="yaml") 76 | assert 'items' in output 77 | assert 'namespace' not in output['items'][0]['metadata'] 78 | -------------------------------------------------------------------------------- /misc/tkaac-status: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # jq, tkn, pip3 install --home arrow 3 | NAMESPACE="tekton-asa-code" 4 | TMPF=$(mktemp /tmp/.mm.XXXXXX) 5 | TMPF2=$(mktemp /tmp/.mm.XXXXXX) 6 | clean() { rm -f ${TMPF2} ${TMPF}; } 7 | trap clean EXIT 8 | 9 | function c() { 10 | BOLD='\033[1m' 11 | ITALIC='\033[3m' 12 | UNDERLINE='\033[4m' 13 | NONE='\033[00m' 14 | RED='\033[01;31m' 15 | GREEN='\033[01;32m' 16 | YELLOW='\033[01;33m' 17 | BLUE='\033[01;34m' 18 | 19 | case $1 in 20 | yellow) 21 | color=${BOLD}${YELLOW} 22 | ;; 23 | italic) 24 | color=${ITALIC} 25 | ;; 26 | underline) 27 | color=${UNDERLINE} 28 | ;; 29 | bold) 30 | color=$BOLD 31 | ;; 32 | normal) 33 | color=$NONE 34 | ;; 35 | green) 36 | color=$BOLD$GREEN 37 | ;; 38 | blue) 39 | color=$BOLD$BLUE 40 | ;; 41 | red) 42 | color=$BOLD$RED 43 | ;; 44 | *) 45 | 46 | esac 47 | printf "%b" "${color}$2${NONE} " 48 | } 49 | 50 | IFS=" 51 | " 52 | 53 | SEENS=() 54 | 55 | in_array() { 56 | for i in "${SEENS[@]}"; do 57 | if [[ "$i" = ${1} ]]; then 58 | return 0 59 | fi 60 | done 61 | return 1 62 | } 63 | 64 | printf "%-30s%s\n" "$(c underline 'TIME AGO')" 65 | while :;do 66 | for line in $(tkn pr -n ${NAMESPACE} ls --no-headers --limit=${1:-5});do 67 | pr=$(echo ${line}|awk '{print $1}'|tr -d '\n') 68 | status=$(echo ${line}|awk '{print $NF}'|tr -d '\n') 69 | kubectl get -n ${NAMESPACE} pr -o json ${pr} | tee ${TMPF2} | jq -r '.spec.params[] | select(.name == "github_json").value'|tr -d '\n'|tr -d ' '|jq '.' > ${TMPF} 70 | timeago=$(jq .metadata.creationTimestamp ${TMPF2}|python3 -c 'import sys,arrow;print(arrow.get(sys.stdin.read()).humanize())'|sed 's/ago//') 71 | url=$(jq -r '.pull_request.html_url' ${TMPF}) 72 | sha=$(jq -r '.pull_request.head.sha' ${TMPF}|cut -c1-4) 73 | loginuser=$(jq -r '.pull_request.user.login' ${TMPF}) 74 | 75 | in_array ${sha}${status} && { read -t 3 -n1 ;break ;} 76 | 77 | SEENS+=(${sha}${status}) 78 | 79 | case ${status} in 80 | Succeeded) 81 | status=$(c green ${status}) 82 | ;; 83 | Failed) 84 | status=$(c red ${status}) 85 | ;; 86 | Running) 87 | status=$(c blue ${status}) 88 | ;; 89 | esac 90 | printf "%-30s%s %-10s %s %s %-30s %s %s\n" "$(c yellow ${timeago})" "$(c none)" ${loginuser} ${sha} ${pr} ${status} "$(c underline $url)" 91 | done 92 | done 93 | -------------------------------------------------------------------------------- /deploy.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | # Configure this to your own route 3 | PUBLIC_ROUTE_HOSTNAME=${PUBLIC_ROUTE_HOSTNAME:-tektonic.apps.psipipelines.devcluster.openshift.com} 4 | 5 | GITHUB_APP_PRIVATE_KEY=${GITHUB_APP_PRIVATE_KEY:-./tmp/github.app.key} 6 | GITHUB_APP_ID=${GITHUB_APP_ID:-"81262"} 7 | GITHUB_WEBHOOK_SECRET=${GITHUB_WEBHOOK_SECRET:-} 8 | 9 | SERVICE=tekton-asa-code-listener-interceptor 10 | TARGET_NAMESPACE=tekton-asa-code 11 | SERVICE_ACCOUNT=tkn-aac-sa 12 | if type -p oc >/dev/null 2>/dev/null;then 13 | DEFAULT_KB=oc 14 | elif type -p kubectl >/dev/null 2>/dev/null;then 15 | DEFAULT_KB=kubectl 16 | fi 17 | 18 | [[ -e ${GITHUB_APP_PRIVATE_KEY} ]] || { 19 | echo "I could not find a private key in ${GITHUB_APP_PRIVATE_KEY} please install it from your github app" 20 | exit 1 21 | } 22 | 23 | if ! type -p ${KB} >/dev/null;then 24 | echo "Couldn't find a ${DEFAULT_KB} in the path, please set the kubectl or oc binary accordingly " 25 | exit 1 26 | fi 27 | 28 | set -e 29 | 30 | EXTERNAL_TASKS="https://raw.githubusercontent.com/tektoncd/catalog/main/task/github-app-token/0.1/github-app-token.yaml" 31 | 32 | TMPFILE=$(mktemp /tmp/.mm.XXXXXX) 33 | clean() { rm -f ${TMPFILE}; } 34 | trap clean EXIT 35 | 36 | while getopts "rn:" o; do 37 | case "${o}" in 38 | n) 39 | TARGET_NAMESPACE=${OPTARG}; 40 | ;; 41 | r) 42 | recreate=yes 43 | ;; 44 | *) 45 | echo "Invalid option"; exit 1; 46 | ;; 47 | esac 48 | done 49 | shift $((OPTIND-1)) 50 | 51 | KB="${KUBECTL_BINARY:-${DEFAULT_KB}} -n ${TARGET_NAMESPACE}" 52 | 53 | 54 | ${KB} get namespace ${TARGET_NAMESPACE} >/dev/null 2>/dev/null || ${KB} create namespace ${TARGET_NAMESPACE} || true 55 | github_webhook_secret=$(${KB} get secret github-webhook-secret -o jsonpath='{.data.token}' 2>/dev/null || true) 56 | 57 | if [[ -n ${github_webhook_secret} ]];then 58 | github_webhook_secret=$(echo ${github_webhook_secret}|base64 --decode) 59 | else 60 | if [[ -n ${GITHUB_WEBHOOK_SECRET} ]];then 61 | github_webhook_secret=${GITHUB_WEBHOOK_SECRET} 62 | echo "Using Github Webhook scret provided: ${GITHUB_WEBHOOK_SECRET}" 63 | else 64 | github_webhook_secret=${GITHUB_WEBHOOK_SECRET:-$(openssl rand -hex 20|tr -d '\n')} 65 | echo "Password for Github Webhook secret generated is: ${github_webhook_secret}" 66 | fi 67 | ${KB} create secret generic github-webhook-secret --from-literal token="${github_webhook_secret}" 68 | fi 69 | 70 | function k() { 71 | for file in "$@";do 72 | [[ -n ${recreate} ]] && { 73 | ${KB} delete -f ${file} 74 | } 75 | if [[ "$(basename ${file})" == bindings.yaml ]];then 76 | sed "s/{{application_id}}/\"${GITHUB_APP_ID}\"/" ${file} > ${TMPFILE} 77 | file=${TMPFILE} 78 | fi 79 | ${KB} apply -f ${file} 80 | done 81 | } 82 | 83 | function openshift_expose_service () { 84 | local s=${1} 85 | local n=${2} 86 | ${KB} delete route ${s} 2>/dev/null >/dev/null || true 87 | [[ -n ${n} ]] && n="--hostname=${n}" 88 | 89 | while true;do 90 | if ${KB} get service ${s} 2>/dev/null >/dev/null;then 91 | break 92 | fi 93 | [[ ${max} == 12 ]] && { echo "cannot find ${s}"; exit 1 ;} 94 | sleep 10 95 | (( max++ )) 96 | done 97 | 98 | ${KB} expose service ${s} ${n} && \ 99 | ${KB} apply -f <(${KB} get route ${s} -o json |jq -r '.spec |= . + {tls: {"insecureEdgeTerminationPolicy": "Redirect", "termination": "edge"}}') >/dev/null && \ 100 | echo "Webhook URL: https://$(${KB} get route ${s} -o jsonpath='{.spec.host}')" 101 | } 102 | 103 | function create_secret() { 104 | local s=${1} 105 | local literal=${2} 106 | ${KB} delete secret ${s} || true 107 | ${KB} get secret ${s} >/dev/null 2>/dev/null || \ 108 | ${KB} create secret generic ${s} --from-literal "${literal}" 109 | } 110 | 111 | function give_cluster_admin() { 112 | #TODO: not ideal 113 | cat </dev/null 2>/dev/null ;then 152 | openshift_expose_service el-${SERVICE} ${PUBLIC_ROUTE_HOSTNAME} || true 153 | fi 154 | echo "Webhook secret: ${github_webhook_secret}" 155 | -------------------------------------------------------------------------------- /Flow.md: -------------------------------------------------------------------------------- 1 | # Tekton asa code flow 2 | 3 | ## Init 4 | 5 | * Get user token from app token and installation_id 6 | 7 | * Extract variables path from webhook payload : 8 | 9 | - pull_request.base.repo.full_name 10 | - pull_request.head.sha 11 | - pull_request.number 12 | - pull_request.user.login 13 | - repository.full_name 14 | - repository.html_url 15 | - repository.owner.login 16 | 17 | * Create temporary namespace 18 | 19 | * Set static parameters for easy substition in template : 20 | 21 | - revision: pull_request.head.sha 22 | - repo_url: repo_html_url, 23 | - repo_owner: repo_owner_login, 24 | - namespace: current_namespace, 25 | - openshift_console_pipelinerun_href: console_pipelinerun_link, 26 | 27 | * Create a check run - https://docs.github.com/en/developers/apps/creating-ci-tests-with-the-checks-api 28 | 29 | * Checkout GIT repo on FileSystem for the pull_request_sha 30 | 31 | - Create dir 32 | - `git init` 33 | - `git remote add -f origin https://$repo_owner_login:$github.token@$repo_html_url` 34 | - `git fetch origin refs/pull/$pull_request_number/head` 35 | - `git reset --hard {pull_request_sha}` 36 | 37 | * Check if there is a .tekton directory, 38 | - If not, set GitHUB check status as conclusion=neutral, skipping the PR 39 | 40 | 41 | ## Process .tekton directory 42 | 43 | Start processing tekton directory templates 44 | 45 | * If there is a .tekton/tekton.yaml files parse it : 46 | 47 | ### Access control for running the CI 48 | 49 | * if the owner of the repo is the submitted of the PR, always allows her to run the CI run. 50 | * If the submmitter of the PR is in the contributors of the REPO always allow her to run the PR -- https://docs.github.com/en/rest/reference/repos#list-repository-contributors 51 | 52 | * Grab the file from the `master_branch` (getting `master_branch` of the repo via GitHUB) : 53 | 54 | - Check if there is `allowed` key in tekton.yaml 55 | - if the submmitter user is in the allowed list allow her. 56 | - if the item start with a `@` followed by a string, assume that string is a 57 | GitHUB organisation and check if the submmitter user is part of this 58 | organisation and it she does then allow her to run the CI. 59 | 60 | * If the user is not allowed then exit with setting strtus of the checks as denied. 61 | 62 | ### Prerun commands (TODO: to remove) 63 | 64 | - if there is a `prerun` key in tekton.yaml run the command in the items before 65 | doing anything. 66 | - This should be removed, since this could be a security issue. 67 | 68 | ### Tasks auto install 69 | 70 | * Check if there is a `tasks` key in tekton.yaml and : 71 | - If task start with *http* or *https* then grab it remotely 72 | - If task doesnt start with http/https : 73 | - if the name of the task finishes by a version number (i.e: *0.2*) grab that version from the tekton catalog repository. 74 | - if the name of the task finishes by **:latest**, grab the latest version of that task from the tekton catalog repository. 75 | 76 | ### Secrets 77 | 78 | - Check if there is an `secret` key in tekton.yaml 79 | - Check if there is a secret specified in the item in the main tekton asa code 80 | namespace with the same name and has the labels : 81 | 82 | `tekton/asa-code-repository-name: $repository.name` 83 | `tekton/asa-code-repository-owner: $repository.owner_or_organisation` 84 | 85 | Apply that secret to the temporary namespace. 86 | 87 | - This to avoid hijacking of other tekton.yaml repositories to make sure it 88 | only belongs to that `user/repository`. 89 | 90 | - Installing secret is a pre-ci run step, where the admin would create those 91 | secrets with the right labels in the tekton-asa-code namespace. 92 | 93 | 94 | #### Files in tekton directory 95 | 96 | - If there is a `files` key in tekton.yaml, use this as the list of files to 97 | apply from the checked out repository. 98 | 99 | - If there isn't `files` key, go in order in every files in the `.tekton` directory finishes by `yaml` or `yml` excluding the `tekton.yaml` file. 100 | 101 | - Apply all files with the variable substitions, where user can specify the static parameters i.e: 102 | 103 | {{revision}} 104 | 105 | To get the revision from Webhook payload 106 | 107 | Or access directly to a json key from the payload i.e: 108 | 109 | {{repository.full_name}} 110 | 111 | ## Follow logs and set status 112 | 113 | - When all templates are applied in the temporary namespace, we grab the last 114 | version of the pipelinerun from there and follow the logs with : 115 | 116 | `tkn pr logs -n {namespace} --follow --last` 117 | 118 | - Print it to the output of the current pipelinerun. 119 | 120 | - Which mean we don't support multiple pipelineruns setup. 121 | 122 | - Which mean we don't support tekton asa code with something else than pipeline. 123 | 124 | - When pipeline is finished execute a : 125 | 126 | `tkn pr describe -n {namespace} --follow --last` 127 | 128 | - Grab status of the task run in PR, to detect which one **succeeded** or which 129 | one has **failed** and the time it took to run. 130 | 131 | - Set the github check run according to the status of the pipelinerun (success or failed). 132 | 133 | - Add a list of all the task status and how long it took to the github check. 134 | 135 | - Add a link to the PipelineRun on openshift console. 136 | 137 | - Exit the task accoding to the exit of the PR in the temporary namespace. 138 | -------------------------------------------------------------------------------- /misc/send-slack-notifications.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | # Author: Chmouel Boudjnah 4 | # 5 | # Licensed under the Apache License, Version 2.0 (the "License"); you may 6 | # not use this file except in compliance with the License. You may obtain 7 | # a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, software 12 | # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT 13 | # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the 14 | # License for the specific language governing permissions and limitations 15 | # under the License. 16 | """Script to send a slack notification to be plugged in a finally task""" 17 | import argparse 18 | import json 19 | import os 20 | import subprocess 21 | import sys 22 | import typing 23 | import urllib.request 24 | 25 | 26 | class SlackNotificationError(Exception): 27 | """Custom exception when we fail""" 28 | 29 | 30 | def get_openshift_console_url(namespace: str) -> str: 31 | """Get the openshift console url for a namespace""" 32 | cmd = ( 33 | "kubectl get route -n openshift-console console -o jsonpath='{.spec.host}'", 34 | ) 35 | ret = subprocess.run(cmd, shell=True, check=True, capture_output=True) 36 | if ret.returncode != 0: 37 | raise SlackNotificationError( 38 | "Could not detect the location of openshift console url: {ret.stdout.decode()}" 39 | ) 40 | return f"https://{ret.stdout.decode()}/k8s/ns/{namespace}/tekton.dev~v1beta1~PipelineRun/" 41 | 42 | 43 | def check_label(label_eval: str, label_to_check: str) -> bool: 44 | """Check a label: if you get a string that has all the labels as specified 45 | by github, it will eval it and check if one contains the label_to_check""" 46 | return bool([x for x in eval(label_eval) if x['name'] == label_to_check]) # pylint: disable=eval-used 47 | 48 | 49 | def get_json_of_pipelinerun(pipelinerun: str) -> typing.Dict[str, typing.Dict]: 50 | """Find which namespace where we are running currently by checking the 51 | pipelinerun namespace""" 52 | cmd = f"kubectl get pipelinerun {pipelinerun} -o json" 53 | ret = subprocess.run(cmd, shell=True, check=True, capture_output=True) 54 | if ret.returncode != 0: 55 | raise SlackNotificationError(f"Could not run command: {cmd}") 56 | return json.loads(ret.stdout) 57 | 58 | 59 | def check_status_of_pipelinerun( 60 | jeez: typing.Dict[str, typing.Dict]) -> typing.List[str]: 61 | """Check status of a pipelinerun using kubectl, we avoid the the Running 62 | ones since we run in finally, it will have a running ones""" 63 | task_runs = jeez['status']['taskRuns'] 64 | failed = [] 65 | 66 | pname = jeez['metadata']['name'] 67 | for task in task_runs.keys(): 68 | bname = task.replace(pname + "-", '') 69 | bname = bname.replace("-" + bname.split("-")[-1], '') 70 | if bool([ 71 | x['message'] for x in task_runs[task]['status']['conditions'] 72 | if x['status'] != 'Running' and x['status'] == 'False' 73 | ]): 74 | failed.append(bname) 75 | return failed 76 | 77 | 78 | def send_slack_message(webhook_url: str, subject: str, text: str, 79 | icon: str) -> str: 80 | """Send a slack message""" 81 | msg = { 82 | "text": 83 | subject, 84 | "attachments": [{ 85 | "blocks": [ 86 | { 87 | "type": "section", 88 | "text": { 89 | "type": "mrkdwn", 90 | "text": text, 91 | }, 92 | "accessory": { 93 | "type": "image", 94 | "image_url": icon, 95 | "alt_text": "From tekton with love" 96 | } 97 | }, 98 | ] 99 | }] 100 | } 101 | 102 | req = urllib.request.Request(webhook_url, 103 | data=json.dumps(msg).encode(), 104 | headers={"Content-type": "application/json"}, 105 | method="POST") 106 | # TODO: Handle error? 107 | return urllib.request.urlopen(req).read().decode() 108 | 109 | 110 | def main() -> int: 111 | """Main""" 112 | parser = argparse.ArgumentParser() 113 | parser.add_argument("--label-to-check", help="Label to check") 114 | parser.add_argument( 115 | "--failure-url-icon", 116 | default=os.environ.get( 117 | "FAILURE_URL_ICON", 118 | "https://publicdomainvectors.org/photos/21826-REMIX-ARRET.png"), 119 | help="The icon of failure") 120 | 121 | parser.add_argument( 122 | "--success-url-icon", 123 | default=os.environ.get( 124 | "SUCCESS_URL_ICON", 125 | "https://publicdomainvectors.org/photos/Checkmark.png"), 126 | help="The icon of success") 127 | 128 | parser.add_argument("--failure-subject", 129 | help="The subject of the slack message when failure", 130 | default=os.environ.get("FAILURE_SUBJECT", 131 | "CI has failed :cry:")) 132 | 133 | parser.add_argument( 134 | "--success-subject", 135 | default=os.environ.get("SUCCESS_SUBJECT", 136 | "CI has succeeded :thumbsup:"), 137 | help="The subject of the slack message when succes", 138 | ) 139 | 140 | parser.add_argument("--log-url", 141 | default=os.environ.get("LOG_URL"), 142 | help="Link to the log url") 143 | 144 | parser.add_argument( 145 | "--github-pull-label", 146 | default=os.environ.get("GITHUB_PULL_LABEL"), 147 | help="pull_request.labels dict as get from tekton asa code") 148 | 149 | parser.add_argument("--pipelinerun", 150 | default=os.environ.get("PIPELINERUN"), 151 | help="The pipelinerun to check the status on") 152 | 153 | parser.add_argument("--slack-webhook-url", 154 | default=os.environ.get("SLACK_WEBHOOK_URL"), 155 | help="Slack webhook URL") 156 | 157 | args = parser.parse_args() 158 | if args.label_to_check and args.github_pull_label: 159 | if not check_label(args.github_pull_label, args.label_to_check): 160 | print( 161 | f"Pull request doesn't have the label {args.label_to_check} skipping." 162 | ) 163 | return 0 164 | 165 | if not args.pipelinerun: 166 | print( 167 | "error --pipelinerun need to be set via env env variable or other means." 168 | ) 169 | return 1 170 | 171 | if not args.slack_webhook_url: 172 | print( 173 | "error --slack-webhook-url need to be set via env variable or other means." 174 | ) 175 | return 1 176 | 177 | jeez = get_json_of_pipelinerun(args.pipelinerun) 178 | failures = check_status_of_pipelinerun(jeez) 179 | if failures: 180 | slack_icon = args.failure_url_icon 181 | slack_subject = args.failure_subject 182 | slack_text = f"""• *Failed Tasks*: {", ".join(failures)}\n""" 183 | else: 184 | slack_icon = args.success_url_icon 185 | slack_subject = args.success_subject 186 | slack_text = "\n" 187 | 188 | if args.log_url and args.log_url == "openshift": 189 | # TODO: Add tekton dashboard if we can find this automatically 190 | args.log_url = get_openshift_console_url(jeez['metadata']['namespace']) + \ 191 | args.pipelinerun + "/logs" 192 | 193 | if args.log_url: 194 | slack_text += f"• *PipelineRun logs*: {args.log_url}" 195 | 196 | ret = send_slack_message(args.slack_webhook_url, slack_subject, slack_text, 197 | slack_icon) 198 | if ret: 199 | print(ret) 200 | 201 | return 0 202 | 203 | 204 | if __name__ == '__main__': 205 | sys.exit(main()) 206 | -------------------------------------------------------------------------------- /tektonasacode/utils.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # Author: Chmouel Boudjnah 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); you may 5 | # not use this file except in compliance with the License. You may obtain 6 | # a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT 12 | # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the 13 | # License for the specific language governing permissions and limitations 14 | # under the License. 15 | """Dropzone of stuff""" 16 | import datetime 17 | import io 18 | import json 19 | import os 20 | import re 21 | import subprocess 22 | import sys 23 | import time 24 | import urllib.error 25 | import urllib.request 26 | from typing import Dict, Optional 27 | 28 | import yaml 29 | 30 | 31 | # pylint: disable=unnecessary-pass 32 | class CouldNotFindConfigKeyException(Exception): 33 | """Raise an exception when we cannot find the key string in json""" 34 | 35 | pass 36 | 37 | 38 | class Utils: 39 | """Tools for running tekton as a code""" 40 | @staticmethod 41 | def execute(command, check_error=""): 42 | """Execute commmand""" 43 | result = "" 44 | try: 45 | result = subprocess.run(["/bin/sh", "-c", command], 46 | stdout=subprocess.PIPE, 47 | stderr=subprocess.STDOUT, 48 | check=True) 49 | except subprocess.CalledProcessError as exception: 50 | if check_error: 51 | print(check_error) 52 | print( 53 | f"Status code: {exception.returncode}: Output: \n{exception.output}" 54 | ) 55 | raise exception 56 | return result 57 | 58 | @staticmethod 59 | def process_pipelineresult(jeez): 60 | """Take a pipelinerun and output nicely which task fails from that 61 | pieplinerun""" 62 | ret = [] 63 | pname = jeez['metadata']['name'] 64 | if 'taskRuns' not in jeez['status']: 65 | return [ 66 | f"• {cond['message']}" 67 | for cond in jeez['status']['conditions'] 68 | ] 69 | for task in jeez['status']['taskRuns']: 70 | result = jeez['status']['taskRuns'][task]['status'] 71 | elapsed = "N/A" 72 | if 'completionTime' in result and 'startTime' in result: 73 | elapsed = str( 74 | datetime.datetime.strptime(result['completionTime'], 75 | '%Y-%m-%dT%H:%M:%SZ') - 76 | datetime.datetime.strptime(result['startTime'], 77 | '%Y-%m-%dT%H:%M:%SZ')) 78 | emoji = "✅" 79 | for condition in result['conditions']: 80 | if condition['status'] != 'True': 81 | emoji = "❌" 82 | 83 | bname = task.replace(pname + '-', '') 84 | bname = bname.replace("-" + bname.split("-")[-1], '') 85 | ret.append(f"{emoji} {elapsed} {bname}") 86 | return ret 87 | 88 | def kubectl_get(self, 89 | obj: str, 90 | output_type: str = "yaml", 91 | raw: bool = False, 92 | namespace: str = "", 93 | labels: Optional[dict] = None) -> Dict: 94 | """Get an object""" 95 | output_str = '' 96 | label_str = '' 97 | ret = '' 98 | if labels: 99 | label_str = " ".join( 100 | [f"-l {label}={labels[label]}" for label in labels]) 101 | if output_type: 102 | output_str = f"-o {output_type}" 103 | namespace_str = f"-n {namespace}" if namespace else "" 104 | _out = self.execute( 105 | f"kubectl get {namespace_str} {obj} {output_str} {label_str}", 106 | check_error=f"Cannot run kubectl get {obj} {output_str} {label_str}" 107 | ) 108 | if _out.returncode != 0: 109 | return {} 110 | out = _out.stdout.decode() 111 | if raw or not output_type: 112 | return out 113 | if output_type == "yaml": 114 | ret = yaml.safe_load(out) 115 | if output_type == "json": 116 | ret = json.loads(out) 117 | 118 | # Cleanup namespaces from all 119 | for index in range(0, len(ret['items'])): 120 | if 'metadata' in ret['items'][index] and 'namespace' in ret[ 121 | 'items'][index]['metadata']: 122 | del ret['items'][index]['metadata']['namespace'] 123 | return ret 124 | 125 | @staticmethod 126 | def retrieve_url(url): 127 | """Retrieve an URL""" 128 | try: 129 | url_retrieved, _ = urllib.request.urlretrieve(url) 130 | except urllib.error.HTTPError as http_error: 131 | msg = f"Cannot retrieve remote task {url} as specified in install.map: {http_error}" 132 | print(msg) 133 | raise http_error 134 | return url_retrieved 135 | 136 | def get_openshift_console_url(self, namespace: str) -> str: 137 | """Get the openshift console url for a namespace""" 138 | openshift_console_url = self.execute( 139 | "kubectl get route -n openshift-console console -o jsonpath='{.spec.host}'", 140 | check_error="cannot openshift-console route", 141 | ) 142 | return f"https://{openshift_console_url.stdout.decode()}/k8s/ns/{namespace}/tekton.dev~v1beta1~PipelineRun/" \ 143 | if openshift_console_url.returncode == 0 else "" 144 | 145 | # https://stackoverflow.com/a/18422264 146 | @staticmethod 147 | def stream(command, filename, check_error=""): 148 | """Stream command""" 149 | with io.open(filename, "wb") as writer, io.open(filename, "rb", 150 | 0) as reader: 151 | try: 152 | process = subprocess.Popen(command.split(" "), stdout=writer) 153 | except subprocess.CalledProcessError as exception: 154 | print(check_error) 155 | raise exception 156 | 157 | while process.poll() is None: 158 | sys.stdout.write(reader.read().decode()) 159 | time.sleep(0.5) 160 | # Read the remaining 161 | sys.stdout.write(reader.read().decode()) 162 | 163 | @staticmethod 164 | def get_key(key, jeez, error=True): 165 | """Get key as a string like foo.bar.blah in dict => [foo][bar][blah] """ 166 | curr = jeez 167 | for k in key.split("."): 168 | if k not in curr: 169 | if error: 170 | raise CouldNotFindConfigKeyException( 171 | f"Could not find key {key} in json while parsing file") 172 | return "" 173 | curr = curr[k] 174 | if not isinstance(curr, str): 175 | curr = str(curr) 176 | return curr 177 | 178 | @staticmethod 179 | def get_errors(text): 180 | """ Get all errors coming from """ 181 | errorstrings = r"(error|fail(ed)?)" 182 | errorre = re.compile("^(.*%s.*)$" % (errorstrings), 183 | re.IGNORECASE | re.MULTILINE) 184 | ret = "" 185 | for i in errorre.findall(text): 186 | i = re.sub(errorstrings, r"**\1**", i[0], flags=re.IGNORECASE) 187 | ret += f" * *{i}*\n" 188 | 189 | if not ret: 190 | return "" 191 | return f""" 192 |
193 | Errors detected 194 |
{ret}
195 |
196 | """ 197 | 198 | def kapply(self, yaml_string_or_file, jeez, parameters_extras, name=None): 199 | """Apply kubernetes yaml template in a namespace with simple transformations 200 | from a dict""" 201 | if os.path.exists(yaml_string_or_file): 202 | yaml_string = open(yaml_string_or_file, 'r').read() 203 | elif isinstance(yaml_string_or_file, str): 204 | yaml_string = yaml_string_or_file 205 | else: 206 | return ("", "") 207 | 208 | def tpl_apply(param): 209 | if param in parameters_extras: 210 | return parameters_extras[param] 211 | if self.get_key(param, jeez, error=False): 212 | return self.get_key(param, jeez) 213 | 214 | return "{{%s}}" % (param) 215 | 216 | if os.path.exists(yaml_string_or_file) and not name: 217 | name = yaml_string_or_file 218 | 219 | content = re.sub( 220 | r"\{\{([_a-zA-Z0-9\.]*)\}\}", 221 | lambda m: tpl_apply(m.group(1)), 222 | yaml_string, 223 | ) 224 | return (name, content) 225 | -------------------------------------------------------------------------------- /tektonasacode/github.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # Author: Chmouel Boudjnah 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); you may 5 | # not use this file except in compliance with the License. You may obtain 6 | # a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT 12 | # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the 13 | # License for the specific language governing permissions and limitations 14 | # under the License. 15 | """Github Stuff""" 16 | import base64 17 | import datetime 18 | import http.client 19 | import json 20 | import urllib.parse 21 | from typing import Any, Dict, Tuple, List 22 | 23 | import pkg_resources 24 | 25 | from tektonasacode import config 26 | 27 | 28 | class GithubEventNotProcessed(Exception): 29 | """Raised when the event is not processed.""" 30 | 31 | 32 | class GitHUBAPIException(Exception): 33 | """Exceptions when GtiHUB API fails""" 34 | status = None 35 | 36 | def __init__(self, status, message): 37 | self.status = status 38 | super().__init__(message) 39 | 40 | 41 | class Github: 42 | """Github operations""" 43 | 44 | def __init__(self, token): 45 | self.token = token 46 | self.github_api_url = config.GITHUB_API_URL 47 | 48 | def request(self, 49 | method: str, 50 | url: str, 51 | headers=None, 52 | data=None, 53 | params=None) -> (Tuple[http.client.HTTPResponse, Any]): 54 | """Execute a request to the GitHUB API, handling redirect""" 55 | if not url.startswith("http"): 56 | if url[0] == "/": 57 | url = url[1:] 58 | url = f"{self.github_api_url}/{url}" 59 | 60 | if not headers: 61 | headers = {} 62 | headers.update({ 63 | "User-Agent": "TektonCD, the peaceful cat", 64 | "Authorization": f"Bearer {self.token}", 65 | }) 66 | url_parsed = urllib.parse.urlparse(url) 67 | url_path = url_parsed.path 68 | if params: 69 | url_path += "?" + urllib.parse.urlencode(params) 70 | data = data and json.dumps(data) 71 | hostname = str(url_parsed.hostname) 72 | conn = http.client.HTTPSConnection(hostname) 73 | conn.request(method, url_path, body=data, headers=headers) 74 | response = conn.getresponse() 75 | 76 | if response.status == 302: 77 | return self.request(method, response.headers["Location"]) 78 | 79 | if response.status >= 400: 80 | headers.pop("Authorization", None) 81 | raise GitHUBAPIException( 82 | response.status, 83 | f"Error: {response.status} - {json.loads(response.read())} - {method} - {url} - {data} - {headers}" 84 | ) 85 | 86 | return (response, json.loads(response.read().decode())) 87 | 88 | def filter_event_json(self, event_json): 89 | """Filter the json received if it's a comment add the pull request 90 | information into it. If there is nothing then return an execption 91 | NotProcessed""" 92 | if "pull_request" in event_json: 93 | return event_json 94 | # Check if the event has a /retest in a pull_request comment, it can be 95 | # any line. 96 | if all([ 97 | "issue" in event_json, "pull_request" in event_json["issue"], 98 | "comment" in event_json, config.COMMENT_RETEST_STRING 99 | in event_json["comment"]["body"].split("\n") 100 | ]): 101 | response, pull_request = self.request( 102 | "GET", event_json["issue"]["pull_request"]["url"]) 103 | if response.status >= 400: 104 | raise GithubEventNotProcessed( 105 | f'Error loading {event_json["issue"]["pull_request"]["url"]}' 106 | ) 107 | event_json["pull_request"] = pull_request 108 | return event_json 109 | 110 | raise GithubEventNotProcessed("Not processing this GitHUB event") 111 | 112 | def get_file_content(self, owner_repo: str, path: str) -> bytes: 113 | """Get file path contents from GITHUB API""" 114 | try: 115 | _, content = self.request("GET", 116 | f"/repos/{owner_repo}/contents/{path}") 117 | except GitHUBAPIException as error: 118 | if error.status and error.status == 404: 119 | return b"" 120 | raise error 121 | return base64.b64decode(content['content']) 122 | 123 | def get_task_latest_version(self, repository: str, task: str) -> str: 124 | """Use the github api to retrieve the latest task verison from a repository""" 125 | error = None 126 | catalog = None 127 | # TODO: Get default_branch from github api instead of mucking around with this 128 | # See https://stackoverflow.com/a/16501903 129 | for tip_branch in ('main', 'master'): 130 | try: 131 | _, catalog = self.request( 132 | "GET", 133 | f"{self.github_api_url}/repos/{repository}/git/trees/{tip_branch}", 134 | params={ 135 | "recursive": "true", 136 | }, 137 | ) 138 | if catalog: 139 | break 140 | except Exception as exc: 141 | error = exc 142 | 143 | if error: 144 | raise error 145 | 146 | version = ("0.0", None) 147 | for tree in catalog["tree"]: 148 | path = tree["path"] 149 | if path.startswith(f"task/{task}") and path.endswith( 150 | f"{task}.yaml"): 151 | splitted = path.split("/") 152 | if pkg_resources.parse_version( 153 | splitted[2]) > pkg_resources.parse_version(version[0]): 154 | version = (path.split("/")[2], tree["url"]) 155 | 156 | if not version[1]: 157 | raise GitHUBAPIException( 158 | message=f"I could not find a task in '{repository}' for '{task}' ", 159 | status=404, 160 | ) 161 | 162 | print(f"💡 Task {task} in {repository} latest version is {version[0]}") 163 | 164 | return version[0] 165 | 166 | def check_organization_of_user( 167 | self, 168 | organization: str, 169 | pull_request_user_login: str, 170 | ) -> bool: 171 | """Check if a user is part of an organization an deny her, unless a approved 172 | member leaves a /tekton ok-to-test comments""" 173 | _, _orgs = self.request( 174 | "GET", 175 | f"{self.github_api_url}/users/{pull_request_user_login}/orgs", 176 | ) 177 | organizations = [user["login"] for user in _orgs] 178 | if organization in organizations: 179 | return True 180 | return False 181 | 182 | def get_repo_contributors( 183 | self, 184 | repo_full_name: str, 185 | ) -> List[str]: 186 | """Get contributors of a repo via API""" 187 | _, contributors = self.request( 188 | "GET", 189 | f"{self.github_api_url}/repos/{repo_full_name}/contributors", 190 | ) 191 | return [x['login'] for x in contributors] 192 | 193 | def set_status( 194 | self, 195 | repository_full_name: str, 196 | check_run_id: int, 197 | target_url: str, 198 | conclusion: str, 199 | output: Dict[str, str], 200 | status: str, 201 | ) -> str: 202 | """ 203 | Set status on the GitHUB Check 204 | """ 205 | 206 | data = { 207 | "name": "Tekton CI", 208 | "status": status, 209 | "conclusion": conclusion, 210 | "completed_at": 211 | datetime.datetime.now().strftime("%Y-%m-%dT%H:%M:%SZ"), 212 | "output": output, 213 | } 214 | if target_url: 215 | data["details_url"] = target_url 216 | 217 | _, jeez = self.request( 218 | "PATCH", 219 | f"/repos/{repository_full_name}/check-runs/{check_run_id}", 220 | headers={"Accept": "application/vnd.github.antiope-preview+json"}, 221 | data=data, 222 | ) 223 | 224 | return jeez 225 | 226 | def create_check_run(self, 227 | repository_full_name, 228 | target_url, 229 | head_sha, 230 | status='in_progress', 231 | started_at=""): 232 | """Create a check run id for a repository""" 233 | date_now = datetime.datetime.now().strftime("%Y-%m-%dT%H:%M:%SZ") 234 | data = { 235 | "name": "Tekton CI", 236 | "details_url": target_url, 237 | "status": status, 238 | "head_sha": head_sha, 239 | "started_at": started_at and started_at or date_now, 240 | } 241 | _, jeez = self.request( 242 | "POST", 243 | f"{self.github_api_url}/repos/{repository_full_name}/check-runs", 244 | headers={"Accept": "application/vnd.github.antiope-preview+json"}, 245 | data=data, 246 | ) 247 | 248 | return jeez 249 | -------------------------------------------------------------------------------- /tests/process_test.py: -------------------------------------------------------------------------------- 1 | """Test when processing templates""" 2 | # pylint: disable=redefined-outer-name,too-few-public-methods 3 | 4 | import copy 5 | import os 6 | from typing import Optional 7 | 8 | import pytest 9 | from tektonasacode import config 10 | from tektonasacode import process_templates as pt 11 | from tektonasacode import utils 12 | 13 | github_json_pr = { 14 | 'pull_request': { 15 | 'user': { 16 | 'login': 'foo', 17 | }, 18 | "base": { 19 | "repo": { 20 | "full_name": "https://github.com/border/land" 21 | } 22 | } 23 | }, 24 | "repository": { 25 | "owner": { 26 | "login": "bar" 27 | } 28 | } 29 | } 30 | 31 | 32 | @pytest.fixture 33 | def fixtrepo(tmpdir): 34 | """Create temporary tekton repository""" 35 | repo = tmpdir.mkdir("repository") 36 | 37 | tektondir = repo.mkdir(".tekton") 38 | 39 | pipeline = tektondir.join("pipeline.yaml") 40 | pipeline.write("""--- \n 41 | apiVersion: tekton.dev/v1beta1 42 | kind: PipelineRun 43 | metadata: 44 | name: pipelinespec-taskspecs-embedded 45 | spec: 46 | pipelineSpec: 47 | tasks: 48 | - name: hello1 49 | taskSpec: 50 | steps: 51 | - name: hello-moto 52 | image: scratch 53 | - name: hello2 54 | taskRef: 55 | name: task-hello-moto2 56 | """) 57 | task = tektondir.join("task.yaml") 58 | task.write("""--- 59 | apiVersion: tekton.dev/v1beta1 60 | kind: Task 61 | metadata: 62 | name: task-hello-moto2 63 | spec: 64 | steps: 65 | - name: hello-moto2 66 | image: scratch2 67 | """) 68 | configmap = tektondir.join("configmap.yaml") 69 | configmap.write("""--- 70 | apiVersion: v1 71 | kind: Configmap 72 | metadata: 73 | name: configmap 74 | data: 75 | hello: "moto" 76 | """) 77 | yield repo 78 | 79 | 80 | def test_process_not_allowed_no_owner_not_same_submitter_owner(fixtrepo): 81 | """Test processing tempaltes not allowed because submitter is not the same as repo owner""" 82 | class FakeGithub: 83 | """Fake Github Class""" 84 | def get_file_content(self, owner_repo, path): # pylint: disable=unused-argument,missing-function-docstring,no-self-use 85 | return b'' 86 | 87 | # Add a file to make sure we check we skip those files that are not ending in yaml or are OWNERS files 88 | fixtrepo.join(config.TEKTON_ASA_CODE_DIR, "README.md").write("Hello Moto") 89 | # Make sure we skip tekton.yaml and only parsing if needed (empty here) 90 | fixtrepo.join(config.TEKTON_ASA_CODE_DIR, "tekton.yaml").write("---") 91 | 92 | process = pt.Process(FakeGithub()) 93 | process.checked_repo = fixtrepo 94 | 95 | ret = process.process_tekton_dir(github_json_pr, {}) 96 | assert not ret["allowed"] 97 | 98 | 99 | def test_process_allowed_same_owner_submitter(fixtrepo): 100 | """Test processing allowed because submitter is the same as repo owner""" 101 | class FakeGithub: 102 | """Fake Github Class""" 103 | def get_file_content(self, owner_repo, path): # pylint: disable=unused-argument,missing-function-docstring,no-self-use 104 | return b'' 105 | 106 | process = pt.Process(FakeGithub()) 107 | process.checked_repo = fixtrepo 108 | 109 | jeez = copy.deepcopy(github_json_pr) 110 | jeez['pull_request']['user']['login'] = jeez['repository']['owner'][ 111 | 'login'] 112 | 113 | ret = process.process_tekton_dir(jeez, {}) 114 | assert ret["allowed"] 115 | 116 | 117 | def test_process_allowed_owner_file(fixtrepo): 118 | """Allowed user via the OWNER file in github repo from parent branch.""" 119 | class FakeGithub: 120 | """Fake Github Class""" 121 | def get_file_content(self, owner_repo, path): # pylint: disable=unused-argument,missing-function-docstring,no-self-use 122 | return b'foo' 123 | 124 | process = pt.Process(FakeGithub()) 125 | process.checked_repo = fixtrepo 126 | ret = process.process_tekton_dir(github_json_pr, {}) 127 | assert ret["allowed"] 128 | 129 | 130 | def test_process_allowed_tekton_yaml(fixtrepo): 131 | """Allowed user via the owner section of tekton.yaml in github repo parent branch.""" 132 | class FakeGithub: 133 | """Fake Github Class""" 134 | def get_file_content(self, owner_repo, path): # pylint: disable=unused-argument,missing-function-docstring,no-self-use 135 | if path == os.path.join(config.TEKTON_ASA_CODE_DIR, "tekton.yaml"): 136 | return """--- 137 | owners: 138 | - foo 139 | """ 140 | return b'' 141 | 142 | process = pt.Process(FakeGithub()) 143 | process.checked_repo = fixtrepo 144 | ret = process.process_tekton_dir(github_json_pr, {}) 145 | assert ret["allowed"] 146 | 147 | 148 | def test_process_via_moulinette(fixtrepo): 149 | """Test that the moulinette is working (via tektonbundle)""" 150 | class FakeGithub: 151 | """fake Github like a champ""" 152 | def get_file_content(self, owner_repo, path): # pylint: disable=unused-argument,missing-function-docstring,no-self-use 153 | return b'' 154 | 155 | (fixtrepo / config.TEKTON_ASA_CODE_DIR / "tekton.yaml").write("""--- 156 | bundled: true 157 | """) 158 | process = pt.Process(FakeGithub()) 159 | process.checked_repo = fixtrepo 160 | ret = process.process_tekton_dir(github_json_pr, {}) 161 | assert 'bundled-file.yaml' in ret['templates'] 162 | 163 | 164 | def test_process_allowed_organizations(fixtrepo): 165 | """Allowed user via the owner section of tekton.yaml where the user belong to allowed org.""" 166 | class FakeGithubTektonYaml: 167 | """Fake Github Class""" 168 | def get_file_content(self, owner_repo, path): # pylint: disable=unused-argument,missing-function-docstring,no-self-use 169 | if path == os.path.join(config.TEKTON_ASA_CODE_DIR, "tekton.yaml"): 170 | return b"""--- 171 | owners: 172 | - "@fakeorg" 173 | """ 174 | return b'' 175 | 176 | def check_organization_of_user(self, org, pruserlogin): # pylint: disable=unused-argument,missing-function-docstring,no-self-use 177 | return True 178 | 179 | process = pt.Process(FakeGithubTektonYaml()) 180 | process.checked_repo = fixtrepo 181 | 182 | ret = process.process_tekton_dir(github_json_pr, {}) 183 | assert ret["allowed"] 184 | 185 | class FakeGithubOwners: 186 | """Fake Github Class""" 187 | def get_file_content(self, owner_repo, path): # pylint: disable=unused-argument,missing-function-docstring,no-self-use 188 | if path == os.path.join(config.TEKTON_ASA_CODE_DIR, "OWNERS"): 189 | return b"""@fakeorg""" 190 | return b"" 191 | 192 | def check_organization_of_user(self, org, pruserlogin): # pylint: disable=unused-argument,missing-function-docstring,no-self-use 193 | return True 194 | 195 | process = pt.Process(FakeGithubOwners()) 196 | process.checked_repo = fixtrepo 197 | ret = process.process_tekton_dir(github_json_pr, {}) 198 | assert ret["allowed"] 199 | 200 | 201 | def test_process_yaml_ini(tmp_path, fixtrepo): 202 | """Test processing all fields in tekton.yaml""" 203 | class FakeGithub: 204 | """Fake Github class""" 205 | def get_task_latest_version(self, repo, name): # pylint: disable=unused-argument,missing-function-docstring,no-self-use 206 | return "0.0.7" 207 | 208 | def get_file_content(self, owner_repo, path): # pylint: disable=unused-argument,missing-function-docstring,no-self-use 209 | return b'' 210 | 211 | class FakeUtils(utils.Utils): 212 | """Fake Utils class""" 213 | @staticmethod 214 | def retrieve_url(url): 215 | """ 216 | Retrieve a fake url 217 | """ 218 | taskname = tmp_path / os.path.basename(url).replace(".yaml", "") 219 | taskname.write_text("""--- 220 | kind: Pipeline 221 | metadata: 222 | name: fakepipeline 223 | """) 224 | return taskname 225 | 226 | def kubectl_get(self, 227 | obj: str, 228 | output_type: str = "yaml", 229 | raw: bool = False, 230 | namespace: str = "", 231 | labels: Optional[dict] = None): # pylint: disable=unused-argument,missing-function-docstring,no-self-use 232 | return {"items": [{"metadata": {"name": "shuss"}}]} 233 | 234 | (fixtrepo / config.TEKTON_ASA_CODE_DIR / "pr_use_me.yaml").write("--- ") 235 | tektonyaml = tmp_path / "tekton.yaml" 236 | tektonyaml.write_text("""--- 237 | tasks: 238 | - task1 239 | - task2:latest 240 | - task3:0.2 241 | - https://this.is.not/a/repo/a.xml 242 | 243 | secrets: 244 | - shuss 245 | 246 | files: 247 | - pr_use_me.yaml 248 | """) 249 | process = pt.Process(FakeGithub()) 250 | process.checked_repo = fixtrepo 251 | process.utils = FakeUtils() 252 | processed = process.process_yaml_ini(tektonyaml, github_json_pr, {}) 253 | 254 | # Assert tasks processing 255 | tasks = [ 256 | os.path.dirname(x.replace(config.GITHUB_RAW_URL + "/", '')) 257 | for x in list(processed['templates']) 258 | ] 259 | assert tasks[0] == "task1/0.0.7" 260 | assert tasks[1] == "task2/0.0.7" 261 | assert tasks[2] == "task3/0.2" 262 | assert tasks[3].startswith("https") 263 | assert list(processed['templates'])[4] == "shuss.secret.yaml" 264 | assert os.path.basename(list( 265 | processed['templates'])[5]) == "pr_use_me.yaml" 266 | -------------------------------------------------------------------------------- /tektonasacode/process_templates.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # Author: Chmouel Boudjnah 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); you may 5 | # not use this file except in compliance with the License. You may obtain 6 | # a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT 12 | # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the 13 | # License for the specific language governing permissions and limitations 14 | # under the License. 15 | """Do some processing of the templates""" 16 | import os 17 | import tempfile 18 | from typing import Dict 19 | 20 | import yaml 21 | from tektonbundle import tektonbundle 22 | 23 | from tektonasacode import config, utils 24 | 25 | 26 | class Process: 27 | """Main processing class""" 28 | 29 | def __init__(self, github_cls): 30 | self.utils = utils.Utils() 31 | self.github = github_cls 32 | self.checked_repo = config.REPOSITORY_DIR 33 | self.moulinette = False 34 | 35 | def apply(self, processed_templates, namespace): 36 | """Apply templates from a dict of filename=>content""" 37 | for filename in processed_templates: 38 | print(f"🌊 Processing {filename} in {namespace}") 39 | content = processed_templates[filename] 40 | tmpfile = tempfile.NamedTemporaryFile(delete=False).name 41 | open(tmpfile, "w").write(content) 42 | self.utils.execute( 43 | f"kubectl create -f {tmpfile} -n {namespace}", 44 | f"Cannot create {filename} in {namespace}", 45 | ) 46 | os.remove(tmpfile) 47 | 48 | def process_owner_section_or_file(self, jeez): 49 | """Process the owner section from config or a file on the tip branch""" 50 | pr_login = self.utils.get_key("pull_request.user.login", jeez) 51 | repo_owner = self.utils.get_key("repository.owner.login", jeez) 52 | owner_repo = self.utils.get_key("pull_request.base.repo.full_name", 53 | jeez) 54 | repo_full_name = self.utils.get_key("repository.full_name", jeez) 55 | 56 | # Always allow the repo owner to submit. 57 | if repo_owner == pr_login: 58 | return True 59 | 60 | owners_allowed = [] 61 | owner_content = self.github.get_file_content( 62 | owner_repo, os.path.join(config.TEKTON_ASA_CODE_DIR, "OWNERS")) 63 | if owner_content: 64 | owners_allowed = [ 65 | x.strip() for x in owner_content.decode("utf8").split("\n") 66 | if x != "" 67 | ] 68 | else: 69 | owner_content = yaml.safe_load( 70 | self.github.get_file_content( 71 | owner_repo, 72 | os.path.join(config.TEKTON_ASA_CODE_DIR, "tekton.yaml"))) 73 | if owner_content and 'owners' in owner_content: 74 | owners_allowed = owner_content['owners'] 75 | # By default we deny unless explictely allowed 76 | allowed = False 77 | 78 | for owner in owners_allowed: 79 | # If the line starts with a @ it means it's a github 80 | # organization, check if the user is part of it 81 | if owner[0] == "@": 82 | allowed = self.github.check_organization_of_user( 83 | owner[1:], pr_login) 84 | else: 85 | if owner == pr_login: 86 | allowed = True 87 | 88 | if pr_login in self.github.get_repo_contributors(repo_full_name): 89 | allowed = True 90 | 91 | return allowed 92 | 93 | def process_yaml_ini(self, yaml_file, jeez, parameters_extras): 94 | """Process yaml ini files""" 95 | cfg = yaml.safe_load(open(yaml_file, 'r')) 96 | if not cfg: 97 | return {'allowed': False, 'templates': []} 98 | 99 | processed = {'templates': {}} 100 | owner_repo = self.utils.get_key("pull_request.base.repo.full_name", 101 | jeez) 102 | 103 | if 'bundled' in cfg and cfg['bundled']: 104 | self.moulinette = True 105 | 106 | if 'tasks' in cfg: 107 | for task in cfg['tasks']: 108 | if 'http://' in task or 'https://' in task: 109 | url = task 110 | else: 111 | if ':' in task and not task.endswith(":latest"): 112 | name, version = task.split(":") 113 | else: 114 | name = task.replace( 115 | ":latest", 116 | "") if task.endswith(":latest") else task 117 | version = self.github.get_task_latest_version( 118 | config.TEKTON_CATALOG_REPOSITORY, name) 119 | url = f"{config.GITHUB_RAW_URL}/{name}/{version}/{name}.yaml" 120 | ret = self.utils.kapply(self.utils.retrieve_url(url), 121 | jeez, 122 | parameters_extras, 123 | name=url) 124 | processed['templates'][ret[0]] = ret[1] 125 | 126 | processed['allowed'] = self.process_owner_section_or_file(jeez) 127 | 128 | # Only get secrets that belong to that owner/repo, so malicious user 129 | # cannot get things they should not. 130 | if 'secrets' in cfg: 131 | # Do not pass input from cfg to kubectl_get or this could get 132 | # exploited. 133 | all_secrets_for_repo = self.utils.kubectl_get( 134 | "secret", 135 | output_type="yaml", 136 | labels={ 137 | "tekton/asa-code-repository-name": 138 | owner_repo.split("/")[1], 139 | "tekton/asa-code-repository-owner": 140 | owner_repo.split("/")[0] 141 | }) 142 | for secret in cfg['secrets']: 143 | for allsecretin in all_secrets_for_repo['items']: 144 | secretname = allsecretin['metadata']['name'] 145 | if secretname == secret: 146 | processed['templates'][ 147 | f"{secretname}.secret.yaml"] = yaml.safe_dump( 148 | allsecretin) 149 | 150 | # TODO: i don't like this, i probably goign to remove it 151 | # we just need this temporary because of the operator and 152 | # pipelines-catalog are not in synchronize. 153 | processed['prerun'] = [] 154 | if 'prerun' in cfg: 155 | processed['prerun'] = cfg['prerun'] 156 | 157 | if 'files' in cfg: 158 | for filepath in cfg['files']: 159 | fpath = os.path.join(self.checked_repo, 160 | config.TEKTON_ASA_CODE_DIR, filepath) 161 | if not os.path.exists(fpath): 162 | raise Exception( 163 | f"{filepath} does not exists in {config.TEKTON_ASA_CODE_DIR} directory" 164 | ) 165 | ret = self.utils.kapply(fpath, jeez, parameters_extras) 166 | processed['templates'][ret[0]] = ret[1] 167 | else: 168 | processed['templates'].update( 169 | self.process_all_yaml_in_dir(jeez, 170 | parameters_extras)['templates']) 171 | return processed 172 | 173 | def process_all_yaml_in_dir(self, jeez, parameters_extras): 174 | """Process directory directly, not caring about stuff just getting every 175 | yaml files in there""" 176 | processed = {'templates': {}} 177 | processed['allowed'] = self.process_owner_section_or_file(jeez) 178 | 179 | for filename in os.listdir( 180 | os.path.join(self.checked_repo, config.TEKTON_ASA_CODE_DIR)): 181 | if filename.split(".")[-1] not in ["yaml", "yml"]: 182 | continue 183 | if filename == "tekton.yaml": 184 | continue 185 | filename = os.path.join(self.checked_repo, 186 | config.TEKTON_ASA_CODE_DIR, filename) 187 | ret = self.utils.kapply(filename, jeez, parameters_extras) 188 | processed['templates'][ret[0]] = ret[1] 189 | 190 | return processed 191 | 192 | def mouline_this(self, templates: Dict[str, str]): # pylint: disable=no-self-use 193 | """Takes the templates""" 194 | bundled = [] 195 | print("🍝 Files bundled: ") 196 | for template in templates: 197 | if template.startswith("https://"): 198 | local_path = os.path.join(config.REPOSITORY_DIR, 199 | config.TEKTON_ASA_CODE_DIR, 200 | os.path.basename(template)) 201 | open(local_path, 'w').write(templates[template]) 202 | bundled.append(local_path) 203 | print(" • " + 204 | template.replace(config.GITHUB_RAW_URL + "/", "")) 205 | else: 206 | print(" • " + 207 | template.replace(config.REPOSITORY_DIR + "/", "")) 208 | bundled.append(template) 209 | bundled = tektonbundle.parse(bundled, parameters=[], skip_inlining=[]) 210 | thebundle = f"--- \n{bundled['bundle']}--- \n" 211 | thebundle += "--- \n".join(bundled['ignored_not_k8'] + 212 | bundled['ignored_not_tekton']) 213 | return {'bundled-file.yaml': thebundle} 214 | 215 | def process_tekton_dir(self, jeez, parameters_extras): 216 | """Apply templates according, check first for tekton.yaml and then 217 | process all yaml files in directory""" 218 | ret = {} 219 | 220 | if os.path.exists( 221 | f"{self.checked_repo}/{config.TEKTON_ASA_CODE_DIR}/tekton.yaml" 222 | ): 223 | processed_yaml_ini = self.process_yaml_ini( 224 | f"{self.checked_repo}/{config.TEKTON_ASA_CODE_DIR}/tekton.yaml", 225 | jeez, parameters_extras) 226 | if processed_yaml_ini: 227 | ret = processed_yaml_ini 228 | else: 229 | ret = self.process_all_yaml_in_dir(jeez, parameters_extras) 230 | 231 | if self.moulinette and ret['templates']: 232 | ret['templates'] = self.mouline_this(ret['templates']) 233 | 234 | return ret 235 | -------------------------------------------------------------------------------- /tektonasacode/main.py: -------------------------------------------------------------------------------- 1 | # coding=utf8 2 | """ 3 | Tekton as a CODE: Main script 4 | """ 5 | import json 6 | import os 7 | import random 8 | import re 9 | import string 10 | import sys 11 | import tempfile 12 | import time 13 | import traceback 14 | 15 | from tektonasacode import config, github, process_templates, utils 16 | 17 | 18 | class TektonAsaCode: 19 | """Tekton as a Code main class""" 20 | 21 | def __init__(self, github_token, github_json): 22 | self.utils = utils.Utils() 23 | self.github = github.Github(github_token) 24 | self.pcs = process_templates.Process(self.github) 25 | self.check_run_id = None 26 | self.repo_full_name = "" 27 | self.github_json = github_json.replace("\n", " ").replace("\r", " ") 28 | self.console_pipelinerun_link = f"{self.utils.get_openshift_console_url(os.environ.get('TKC_NAMESPACE'))}{os.environ.get('TKC_PIPELINERUN')}/logs/tekton-asa-code" 29 | 30 | def github_checkout_pull_request(self, repo_owner_login, repo_html_url, 31 | pull_request_number, pull_request_sha): 32 | """Checkout a pull request from github""" 33 | if not os.path.exists(config.REPOSITORY_DIR): 34 | os.makedirs(config.REPOSITORY_DIR) 35 | os.chdir(config.REPOSITORY_DIR) 36 | 37 | exec_init = self.utils.execute("git init") 38 | if exec_init.returncode != 0: 39 | print( 40 | "😞 Error creating a GitHUB repo in {config.REPOSITORY_DIR}" 41 | ) 42 | print(exec_init.stdout.decode()) 43 | print(exec_init.stderr.decode()) 44 | else: 45 | os.chdir(config.REPOSITORY_DIR) 46 | exec_init = self.utils.execute("git remote remove origin") 47 | 48 | cmds = [ 49 | f"git remote add -f origin https://{repo_owner_login}:{self.github.token}@{repo_html_url.replace('https://', '')}", 50 | f"git fetch origin refs/pull/{pull_request_number}/head", 51 | f"git reset --hard {pull_request_sha}", 52 | ] 53 | for cmd in cmds: 54 | self.utils.execute( 55 | cmd, 56 | "Error checking out the GitHUB repo %s to the branch %s" % 57 | (repo_html_url, pull_request_sha), 58 | ) 59 | 60 | def create_temporary_namespace(self, namespace, repo_full_name, 61 | pull_request_number): 62 | """Create a temporary namespace and labels""" 63 | self.utils.execute(f"kubectl create ns {namespace}", 64 | "Cannot create a temporary namespace") 65 | print(f"🚜 Namespace {namespace} has been created") 66 | 67 | # Apply label! 68 | self.utils.execute( 69 | f'kubectl label namespace {namespace} tekton.dev/generated-by="tekton-asa-code"' 70 | ) 71 | self.utils.execute( 72 | f'kubectl label namespace {namespace} tekton.dev/pr="{repo_full_name.replace("/", "-")}-{pull_request_number}"' 73 | ) 74 | 75 | def grab_output(self, namespace): 76 | """Grab output of the last pipelinerun in a namespace""" 77 | output_file = tempfile.NamedTemporaryFile(delete=False).name 78 | self.utils.stream( 79 | f"tkn pr logs -n {namespace} --follow --last", 80 | output_file, 81 | f"Cannot show Pipelinerun log in {namespace}", 82 | ) 83 | output = open(output_file).read() 84 | 85 | # TODO: Need a better way! 86 | tkn_describe_output = self.utils.execute( 87 | f"tkn pr describe -n {namespace} --last").stdout.decode() 88 | regexp = re.compile(r"^STARTED\s*DURATION\s*STATUS\n(.*)$", 89 | re.MULTILINE) 90 | status = regexp.findall(tkn_describe_output)[0].split(" ")[-1] 91 | 92 | pipelinerun_jeez = self.utils.kubectl_get("pipelinerun", 93 | output_type="json", 94 | namespace=namespace) 95 | pipelinerun_status = "\n".join( 96 | self.utils.process_pipelineresult(pipelinerun_jeez['items'][0])) 97 | 98 | report = f"""{pipelinerun_status} 99 | 100 | {self.utils.get_errors(output)} 101 | 102 |
103 | More detailled status 104 |
{tkn_describe_output}
105 |
106 | 107 | """ 108 | status_emoji = "❌" if "failed" in status.lower() else "✅" 109 | report_output = { 110 | "title": "CI Run: Report", 111 | "summary": f"{status_emoji} CI has **{status}**", 112 | "text": report 113 | } 114 | 115 | return status, tkn_describe_output, report_output 116 | 117 | def main(self): 118 | """main function""" 119 | jeez = self.github.filter_event_json(json.loads(self.github_json)) 120 | self.repo_full_name = self.utils.get_key("repository.full_name", jeez) 121 | random_str = "".join( 122 | random.choices(string.ascii_letters + string.digits, k=2)).lower() 123 | pull_request_sha = self.utils.get_key("pull_request.head.sha", jeez) 124 | pull_request_number = self.utils.get_key("pull_request.number", jeez) 125 | repo_owner_login = self.utils.get_key("repository.owner.login", jeez) 126 | repo_html_url = self.utils.get_key("repository.html_url", jeez) 127 | namespace = f"pull-{pull_request_number}-{pull_request_sha[:5]}-{random_str}" 128 | 129 | # Extras template parameters to add aside of the stuff from json 130 | parameters_extras = { 131 | "revision": pull_request_sha, 132 | "repo_url": repo_html_url, 133 | "repo_owner": repo_owner_login, 134 | "namespace": namespace, 135 | "openshift_console_pipelinerun_href": 136 | self.console_pipelinerun_link, 137 | } 138 | 139 | target_url = self.utils.get_openshift_console_url(namespace) 140 | 141 | check_run = self.github.create_check_run(self.repo_full_name, 142 | target_url, pull_request_sha) 143 | 144 | self.check_run_id = check_run['id'] 145 | 146 | self.github_checkout_pull_request(repo_owner_login, repo_html_url, 147 | pull_request_number, 148 | pull_request_sha) 149 | 150 | # Exit if there is not tekton directory 151 | if not os.path.exists(config.TEKTON_ASA_CODE_DIR): 152 | # Set status as pending 153 | self.github.set_status( 154 | self.repo_full_name, 155 | check_run['id'], 156 | "https://tenor.com/search/sad-cat-gifs", 157 | conclusion='neutral', 158 | status="completed", 159 | output={ 160 | "title": 161 | "CI Run: Skipped", 162 | "summary": 163 | "Skipping this check 🤷🏻‍♀️", 164 | "text": 165 | f"No tekton-asa-code directory '{config.TEKTON_ASA_CODE_DIR}' has been found in this repository 😿", 166 | }) 167 | print("😿 No tekton directory has been found") 168 | sys.exit(0) 169 | 170 | processed = self.pcs.process_tekton_dir(jeez, parameters_extras) 171 | if processed['allowed']: 172 | print( 173 | f"✅ User {self.utils.get_key('pull_request.user.login', jeez)} is allowed to run this PR" 174 | ) 175 | else: 176 | message = f"❌👮‍♂️ Skipping running the CI since the user **{self.utils.get_key('pull_request.user.login', jeez)}** is not in the owner file or section" 177 | self.github.set_status( 178 | self.repo_full_name, 179 | check_run['id'], 180 | "https://tenor.com/search/police-gifs", 181 | conclusion="neutral", 182 | output={ 183 | "title": "CI Run: Denied", 184 | "summary": "Skipping checking this repository 🤷🏻‍♀️", 185 | "text": message, 186 | }, 187 | status="completed", 188 | ) 189 | raise Exception(message) 190 | 191 | self.create_temporary_namespace(namespace, self.repo_full_name, 192 | pull_request_number) 193 | self.pcs.apply(processed['templates'], namespace) 194 | 195 | if config.ALLOW_PRERUNS_CMD and 'prerun' in processed: 196 | for cmd in processed['prerun']: 197 | _, cmd_processed = self.utils.kapply(cmd, 198 | jeez, 199 | parameters_extras, 200 | name="command") 201 | print(f"⚙️ Running prerun command {cmd_processed}") 202 | self.utils.execute( 203 | cmd_processed, 204 | check_error=f"Cannot run command '{cmd_processed}'") 205 | 206 | time.sleep(2) 207 | 208 | status, describe_output, report_output = self.grab_output(namespace) 209 | print(describe_output) 210 | 211 | # Set final status 212 | self.github.set_status( 213 | self.repo_full_name, 214 | check_run["id"], 215 | # Only set target_url which goest to the namespace in case of failure, 216 | # since we delete the namespace in case of success. 217 | ("failed" in status.lower() and target_url 218 | or self.console_pipelinerun_link), 219 | ("failed" in status.lower() and "failure" or "success"), 220 | report_output, 221 | status="completed") 222 | 223 | if "failed" in status.lower(): 224 | sys.exit(1) 225 | 226 | # Delete the namespace on success ,since this consumes too much 227 | # resources to be kept. Maybe do this as variable? 228 | self.utils.execute( 229 | f"echo kubectl delete ns {namespace}", 230 | "Cannot delete temporary namespace {namespace}", 231 | ) 232 | 233 | def runwrap(self): 234 | """Wrap main() and catch errors to report if we can""" 235 | try: 236 | self.main() 237 | except github.GithubEventNotProcessed: 238 | return 239 | except Exception as err: 240 | exc_type, exc_value, exc_tb = sys.exc_info() 241 | tracebackerr = traceback.format_exception(exc_type, exc_value, 242 | exc_tb) 243 | if self.check_run_id: 244 | self.github.set_status( 245 | repository_full_name=self.repo_full_name, 246 | check_run_id=self.check_run_id, 247 | target_url=self.console_pipelinerun_link, 248 | conclusion="failure", 249 | output={ 250 | "title": "CI Run: Failure", 251 | "summary": "Tekton asa code has failed 💣", 252 | "text": f'
{"
".join(tracebackerr)}
', 253 | }, 254 | status="completed") 255 | raise err 256 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # ⚠️ This project has been deprecated ⚠️ 2 | 3 | ## Please use [openshift-pipelines/pipelines-as-code](https://github.com/openshift-pipelines/pipelines-as-code/) 4 | 5 | [![Container Repository on Quay](https://quay.io/repository/chmouel/tekton-asa-code/status "Container Repository on Quay")](https://quay.io/repository/chmouel/tekton-asa-code) 6 | 7 | 355 | --------------------------------------------------------------------------------