├── .github └── workflows │ ├── alibabacloud.yml │ ├── aws.yml │ ├── azure-functions-app-python.yml │ ├── azure-webapps-python.yml │ ├── codeql.yml │ ├── dart.yml │ ├── django.yml │ ├── generator-generic-ossf-slsa3-publish.yml │ ├── google.yml │ ├── gradle.yml │ ├── greetings.yml │ ├── ibm.yml │ ├── jekyll.yml │ ├── label.yml │ ├── manual.yml │ ├── msbuild.yml │ ├── pylint.yml │ ├── python-app.yml │ ├── python-package-conda.yml │ ├── python-package.yml │ ├── python-publish.yml │ ├── stale.yml │ └── tencent.yml ├── .whitesource ├── LICENSE.md ├── README.md ├── pi_python.py ├── pi_python_test.py └── src └── constants.py /.github/workflows/alibabacloud.yml: -------------------------------------------------------------------------------- 1 | # This workflow will build and push a new container image to Alibaba Cloud Container Registry (ACR), 2 | # and then will deploy it to Alibaba Cloud Container Service for Kubernetes (ACK), when there is a push to the "main" branch. 3 | # 4 | # To use this workflow, you will need to complete the following set-up steps: 5 | # 6 | # 1. Create an ACR repository to store your container images. 7 | # You can use ACR EE instance for more security and better performance. 8 | # For instructions see https://www.alibabacloud.com/help/doc-detail/142168.htm 9 | # 10 | # 2. Create an ACK cluster to run your containerized application. 11 | # You can use ACK Pro cluster for more security and better performance. 12 | # For instructions see https://www.alibabacloud.com/help/doc-detail/95108.htm 13 | # 14 | # 3. Store your AccessKey pair in GitHub Actions secrets named `ACCESS_KEY_ID` and `ACCESS_KEY_SECRET`. 15 | # For instructions on setting up secrets see: https://developer.github.com/actions/managing-workflows/storing-secrets/ 16 | # 17 | # 4. Change the values for the REGION_ID, REGISTRY, NAMESPACE, IMAGE, ACK_CLUSTER_ID, and ACK_DEPLOYMENT_NAME. 18 | # 19 | 20 | name: Build and Deploy to ACK 21 | 22 | on: 23 | push: 24 | branches: [ "main" ] 25 | 26 | # Environment variables available to all jobs and steps in this workflow. 27 | env: 28 | REGION_ID: cn-hangzhou 29 | REGISTRY: registry.cn-hangzhou.aliyuncs.com 30 | NAMESPACE: namespace 31 | IMAGE: repo 32 | TAG: ${{ github.sha }} 33 | ACK_CLUSTER_ID: clusterID 34 | ACK_DEPLOYMENT_NAME: nginx-deployment 35 | 36 | ACR_EE_REGISTRY: myregistry.cn-hangzhou.cr.aliyuncs.com 37 | ACR_EE_INSTANCE_ID: instanceID 38 | ACR_EE_NAMESPACE: namespace 39 | ACR_EE_IMAGE: repo 40 | ACR_EE_TAG: ${{ github.sha }} 41 | 42 | permissions: 43 | contents: read 44 | 45 | jobs: 46 | build: 47 | runs-on: ubuntu-latest 48 | environment: production 49 | 50 | steps: 51 | - name: Checkout 52 | uses: actions/checkout@v3 53 | 54 | # 1.1 Login to ACR 55 | - name: Login to ACR with the AccessKey pair 56 | uses: aliyun/acr-login@v1 57 | with: 58 | region-id: "${{ env.REGION_ID }}" 59 | access-key-id: "${{ secrets.ACCESS_KEY_ID }}" 60 | access-key-secret: "${{ secrets.ACCESS_KEY_SECRET }}" 61 | 62 | # 1.2 Buid and push image to ACR 63 | - name: Build and push image to ACR 64 | run: | 65 | docker build --tag "$REGISTRY/$NAMESPACE/$IMAGE:$TAG" . 66 | docker push "$REGISTRY/$NAMESPACE/$IMAGE:$TAG" 67 | 68 | # 1.3 Scan image in ACR 69 | - name: Scan image in ACR 70 | uses: aliyun/acr-scan@v1 71 | with: 72 | region-id: "${{ env.REGION_ID }}" 73 | access-key-id: "${{ secrets.ACCESS_KEY_ID }}" 74 | access-key-secret: "${{ secrets.ACCESS_KEY_SECRET }}" 75 | repository: "${{ env.NAMESPACE }}/${{ env.IMAGE }}" 76 | tag: "${{ env.TAG }}" 77 | 78 | # 2.1 (Optional) Login to ACR EE 79 | - uses: actions/checkout@v3 80 | - name: Login to ACR EE with the AccessKey pair 81 | uses: aliyun/acr-login@v1 82 | with: 83 | login-server: "https://${{ env.ACR_EE_REGISTRY }}" 84 | region-id: "${{ env.REGION_ID }}" 85 | access-key-id: "${{ secrets.ACCESS_KEY_ID }}" 86 | access-key-secret: "${{ secrets.ACCESS_KEY_SECRET }}" 87 | instance-id: "${{ env.ACR_EE_INSTANCE_ID }}" 88 | 89 | # 2.2 (Optional) Build and push image ACR EE 90 | - name: Build and push image to ACR EE 91 | run: | 92 | docker build -t "$ACR_EE_REGISTRY/$ACR_EE_NAMESPACE/$ACR_EE_IMAGE:$TAG" . 93 | docker push "$ACR_EE_REGISTRY/$ACR_EE_NAMESPACE/$ACR_EE_IMAGE:$TAG" 94 | # 2.3 (Optional) Scan image in ACR EE 95 | - name: Scan image in ACR EE 96 | uses: aliyun/acr-scan@v1 97 | with: 98 | region-id: "${{ env.REGION_ID }}" 99 | access-key-id: "${{ secrets.ACCESS_KEY_ID }}" 100 | access-key-secret: "${{ secrets.ACCESS_KEY_SECRET }}" 101 | instance-id: "${{ env.ACR_EE_INSTANCE_ID }}" 102 | repository: "${{ env.ACR_EE_NAMESPACE}}/${{ env.ACR_EE_IMAGE }}" 103 | tag: "${{ env.ACR_EE_TAG }}" 104 | 105 | # 3.1 Set ACK context 106 | - name: Set K8s context 107 | uses: aliyun/ack-set-context@v1 108 | with: 109 | access-key-id: "${{ secrets.ACCESS_KEY_ID }}" 110 | access-key-secret: "${{ secrets.ACCESS_KEY_SECRET }}" 111 | cluster-id: "${{ env.ACK_CLUSTER_ID }}" 112 | 113 | # 3.2 Deploy the image to the ACK cluster 114 | - name: Set up Kustomize 115 | run: |- 116 | curl -s "https://raw.githubusercontent.com/kubernetes-sigs/kustomize/master/hack/install_kustomize.sh" | bash /dev/stdin 3.8.6 117 | - name: Deploy 118 | run: |- 119 | ./kustomize edit set image REGISTRY/NAMESPACE/IMAGE:TAG=$REGISTRY/$NAMESPACE/$IMAGE:$TAG 120 | ./kustomize build . | kubectl apply -f - 121 | kubectl rollout status deployment/$ACK_DEPLOYMENT_NAME 122 | kubectl get services -o wide 123 | -------------------------------------------------------------------------------- /.github/workflows/aws.yml: -------------------------------------------------------------------------------- 1 | # This workflow will build and push a new container image to Amazon ECR, 2 | # and then will deploy a new task definition to Amazon ECS, when there is a push to the "main" branch. 3 | # 4 | # To use this workflow, you will need to complete the following set-up steps: 5 | # 6 | # 1. Create an ECR repository to store your images. 7 | # For example: `aws ecr create-repository --repository-name my-ecr-repo --region us-east-2`. 8 | # Replace the value of the `ECR_REPOSITORY` environment variable in the workflow below with your repository's name. 9 | # Replace the value of the `AWS_REGION` environment variable in the workflow below with your repository's region. 10 | # 11 | # 2. Create an ECS task definition, an ECS cluster, and an ECS service. 12 | # For example, follow the Getting Started guide on the ECS console: 13 | # https://us-east-2.console.aws.amazon.com/ecs/home?region=us-east-2#/firstRun 14 | # Replace the value of the `ECS_SERVICE` environment variable in the workflow below with the name you set for the Amazon ECS service. 15 | # Replace the value of the `ECS_CLUSTER` environment variable in the workflow below with the name you set for the cluster. 16 | # 17 | # 3. Store your ECS task definition as a JSON file in your repository. 18 | # The format should follow the output of `aws ecs register-task-definition --generate-cli-skeleton`. 19 | # Replace the value of the `ECS_TASK_DEFINITION` environment variable in the workflow below with the path to the JSON file. 20 | # Replace the value of the `CONTAINER_NAME` environment variable in the workflow below with the name of the container 21 | # in the `containerDefinitions` section of the task definition. 22 | # 23 | # 4. Store an IAM user access key in GitHub Actions secrets named `AWS_ACCESS_KEY_ID` and `AWS_SECRET_ACCESS_KEY`. 24 | # See the documentation for each action used below for the recommended IAM policies for this IAM user, 25 | # and best practices on handling the access key credentials. 26 | 27 | name: Deploy to Amazon ECS 28 | 29 | on: 30 | push: 31 | branches: [ "main" ] 32 | 33 | env: 34 | AWS_REGION: MY_AWS_REGION # set this to your preferred AWS region, e.g. us-west-1 35 | ECR_REPOSITORY: MY_ECR_REPOSITORY # set this to your Amazon ECR repository name 36 | ECS_SERVICE: MY_ECS_SERVICE # set this to your Amazon ECS service name 37 | ECS_CLUSTER: MY_ECS_CLUSTER # set this to your Amazon ECS cluster name 38 | ECS_TASK_DEFINITION: MY_ECS_TASK_DEFINITION # set this to the path to your Amazon ECS task definition 39 | # file, e.g. .aws/task-definition.json 40 | CONTAINER_NAME: MY_CONTAINER_NAME # set this to the name of the container in the 41 | # containerDefinitions section of your task definition 42 | 43 | permissions: 44 | contents: read 45 | 46 | jobs: 47 | deploy: 48 | name: Deploy 49 | runs-on: ubuntu-latest 50 | environment: production 51 | 52 | steps: 53 | - name: Checkout 54 | uses: actions/checkout@v3 55 | 56 | - name: Configure AWS credentials 57 | uses: aws-actions/configure-aws-credentials@v1 58 | with: 59 | aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} 60 | aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} 61 | aws-region: ${{ env.AWS_REGION }} 62 | 63 | - name: Login to Amazon ECR 64 | id: login-ecr 65 | uses: aws-actions/amazon-ecr-login@v1 66 | 67 | - name: Build, tag, and push image to Amazon ECR 68 | id: build-image 69 | env: 70 | ECR_REGISTRY: ${{ steps.login-ecr.outputs.registry }} 71 | IMAGE_TAG: ${{ github.sha }} 72 | run: | 73 | # Build a docker container and 74 | # push it to ECR so that it can 75 | # be deployed to ECS. 76 | docker build -t $ECR_REGISTRY/$ECR_REPOSITORY:$IMAGE_TAG . 77 | docker push $ECR_REGISTRY/$ECR_REPOSITORY:$IMAGE_TAG 78 | echo "image=$ECR_REGISTRY/$ECR_REPOSITORY:$IMAGE_TAG" >> $GITHUB_OUTPUT 79 | 80 | - name: Fill in the new image ID in the Amazon ECS task definition 81 | id: task-def 82 | uses: aws-actions/amazon-ecs-render-task-definition@v1 83 | with: 84 | task-definition: ${{ env.ECS_TASK_DEFINITION }} 85 | container-name: ${{ env.CONTAINER_NAME }} 86 | image: ${{ steps.build-image.outputs.image }} 87 | 88 | - name: Deploy Amazon ECS task definition 89 | uses: aws-actions/amazon-ecs-deploy-task-definition@v1 90 | with: 91 | task-definition: ${{ steps.task-def.outputs.task-definition }} 92 | service: ${{ env.ECS_SERVICE }} 93 | cluster: ${{ env.ECS_CLUSTER }} 94 | wait-for-service-stability: true 95 | -------------------------------------------------------------------------------- /.github/workflows/azure-functions-app-python.yml: -------------------------------------------------------------------------------- 1 | # This workflow will build a Python app and deploy it to an Azure Functions App on Linux when a commit is pushed to your default branch. 2 | # 3 | # This workflow assumes you have already created the target Azure Functions app. 4 | # For instructions see https://learn.microsoft.com/en-us/azure/azure-functions/create-first-function-vs-code-python?pivots=python-mode-configuration 5 | # 6 | # To configure this workflow: 7 | # 1. Set up the following secrets in your repository: 8 | # - AZURE_FUNCTIONAPP_PUBLISH_PROFILE 9 | # 2. Change env variables for your configuration. 10 | # 11 | # For more information on: 12 | # - GitHub Actions for Azure: https://github.com/Azure/Actions 13 | # - Azure Functions Action: https://github.com/Azure/functions-action 14 | # - Publish Profile: https://github.com/Azure/functions-action#using-publish-profile-as-deployment-credential-recommended 15 | # - Azure Service Principal for RBAC: https://github.com/Azure/functions-action#using-azure-service-principal-for-rbac-as-deployment-credential 16 | # 17 | # For more samples to get started with GitHub Action workflows to deploy to Azure: https://github.com/Azure/actions-workflow-samples/tree/master/FunctionApp 18 | 19 | name: Deploy Python project to Azure Function App 20 | 21 | on: 22 | push: 23 | branches: ["main"] 24 | 25 | env: 26 | AZURE_FUNCTIONAPP_NAME: 'your-app-name' # set this to your function app name on Azure 27 | AZURE_FUNCTIONAPP_PACKAGE_PATH: '.' # set this to the path to your function app project, defaults to the repository root 28 | PYTHON_VERSION: '3.9' # set this to the python version to use (e.g. '3.6', '3.7', '3.8') 29 | 30 | jobs: 31 | build-and-deploy: 32 | runs-on: ubuntu-latest 33 | environment: dev 34 | steps: 35 | - name: 'Checkout GitHub Action' 36 | uses: actions/checkout@v3 37 | 38 | # If you want to use Azure RBAC instead of Publish Profile, then uncomment the task below 39 | # - name: 'Login via Azure CLI' 40 | # uses: azure/login@v1 41 | # with: 42 | # creds: ${{ secrets.AZURE_RBAC_CREDENTIALS }} # set up AZURE_RBAC_CREDENTIALS secrets in your repository 43 | 44 | - name: Setup Python ${{ env.PYTHON_VERSION }} Environment 45 | uses: actions/setup-python@v4 46 | with: 47 | python-version: ${{ env.PYTHON_VERSION }} 48 | 49 | - name: 'Resolve Project Dependencies Using Pip' 50 | shell: bash 51 | run: | 52 | pushd './${{ env.AZURE_FUNCTIONAPP_PACKAGE_PATH }}' 53 | python -m pip install --upgrade pip 54 | pip install -r requirements.txt --target=".python_packages/lib/site-packages" 55 | popd 56 | 57 | - name: 'Run Azure Functions Action' 58 | uses: Azure/functions-action@v1 59 | id: fa 60 | with: 61 | app-name: ${{ env.AZURE_FUNCTIONAPP_NAME }} 62 | package: ${{ env.AZURE_FUNCTIONAPP_PACKAGE_PATH }} 63 | publish-profile: ${{ secrets.AZURE_FUNCTIONAPP_PUBLISH_PROFILE }} # Remove publish-profile to use Azure RBAC 64 | scm-do-build-during-deployment: true 65 | enable-oryx-build: true 66 | -------------------------------------------------------------------------------- /.github/workflows/azure-webapps-python.yml: -------------------------------------------------------------------------------- 1 | # This workflow will build and push a Python application to an Azure Web App when a commit is pushed to your default branch. 2 | # 3 | # This workflow assumes you have already created the target Azure App Service web app. 4 | # For instructions see https://docs.microsoft.com/en-us/azure/app-service/quickstart-python?tabs=bash&pivots=python-framework-flask 5 | # 6 | # To configure this workflow: 7 | # 8 | # 1. Download the Publish Profile for your Azure Web App. You can download this file from the Overview page of your Web App in the Azure Portal. 9 | # For more information: https://docs.microsoft.com/en-us/azure/app-service/deploy-github-actions?tabs=applevel#generate-deployment-credentials 10 | # 11 | # 2. Create a secret in your repository named AZURE_WEBAPP_PUBLISH_PROFILE, paste the publish profile contents as the value of the secret. 12 | # For instructions on obtaining the publish profile see: https://docs.microsoft.com/azure/app-service/deploy-github-actions#configure-the-github-secret 13 | # 14 | # 3. Change the value for the AZURE_WEBAPP_NAME. Optionally, change the PYTHON_VERSION environment variables below. 15 | # 16 | # For more information on GitHub Actions for Azure: https://github.com/Azure/Actions 17 | # For more information on the Azure Web Apps Deploy action: https://github.com/Azure/webapps-deploy 18 | # For more samples to get started with GitHub Action workflows to deploy to Azure: https://github.com/Azure/actions-workflow-samples 19 | 20 | name: Build and deploy Python app to Azure Web App 21 | 22 | env: 23 | AZURE_WEBAPP_NAME: your-app-name # set this to the name of your Azure Web App 24 | PYTHON_VERSION: '3.8' # set this to the Python version to use 25 | 26 | on: 27 | push: 28 | branches: [ "main" ] 29 | workflow_dispatch: 30 | 31 | permissions: 32 | contents: read 33 | 34 | jobs: 35 | build: 36 | runs-on: ubuntu-latest 37 | 38 | steps: 39 | - uses: actions/checkout@v3 40 | 41 | - name: Set up Python version 42 | uses: actions/setup-python@v3.0.0 43 | with: 44 | python-version: ${{ env.PYTHON_VERSION }} 45 | cache: 'pip' 46 | 47 | - name: Create and start virtual environment 48 | run: | 49 | python -m venv venv 50 | source venv/bin/activate 51 | 52 | - name: Install dependencies 53 | run: pip install -r requirements.txt 54 | 55 | # Optional: Add step to run tests here (PyTest, Django test suites, etc.) 56 | 57 | - name: Upload artifact for deployment jobs 58 | uses: actions/upload-artifact@v3 59 | with: 60 | name: python-app 61 | path: | 62 | . 63 | !venv/ 64 | 65 | deploy: 66 | permissions: 67 | contents: none 68 | runs-on: ubuntu-latest 69 | needs: build 70 | environment: 71 | name: 'Development' 72 | url: ${{ steps.deploy-to-webapp.outputs.webapp-url }} 73 | 74 | steps: 75 | - name: Download artifact from build job 76 | uses: actions/download-artifact@v3 77 | with: 78 | name: python-app 79 | path: . 80 | 81 | - name: 'Deploy to Azure Web App' 82 | id: deploy-to-webapp 83 | uses: azure/webapps-deploy@v2 84 | with: 85 | app-name: ${{ env.AZURE_WEBAPP_NAME }} 86 | publish-profile: ${{ secrets.AZURE_WEBAPP_PUBLISH_PROFILE }} 87 | -------------------------------------------------------------------------------- /.github/workflows/codeql.yml: -------------------------------------------------------------------------------- 1 | # For most projects, this workflow file will not need changing; you simply need 2 | # to commit it to your repository. 3 | # 4 | # You may wish to alter this file to override the set of languages analyzed, 5 | # or to provide custom queries or build logic. 6 | # 7 | # ******** NOTE ******** 8 | # We have attempted to detect the languages in your repository. Please check 9 | # the `language` matrix defined below to confirm you have the correct set of 10 | # supported CodeQL languages. 11 | # 12 | name: "CodeQL" 13 | 14 | on: 15 | push: 16 | branches: [ "main" ] 17 | pull_request: 18 | branches: [ "main" ] 19 | schedule: 20 | - cron: '23 2 * * 5' 21 | 22 | jobs: 23 | analyze: 24 | name: Analyze 25 | # Runner size impacts CodeQL analysis time. To learn more, please see: 26 | # - https://gh.io/recommended-hardware-resources-for-running-codeql 27 | # - https://gh.io/supported-runners-and-hardware-resources 28 | # - https://gh.io/using-larger-runners 29 | # Consider using larger runners for possible analysis time improvements. 30 | runs-on: ${{ (matrix.language == 'swift' && 'macos-latest') || 'ubuntu-latest' }} 31 | timeout-minutes: ${{ (matrix.language == 'swift' && 120) || 360 }} 32 | permissions: 33 | # required for all workflows 34 | security-events: write 35 | 36 | # only required for workflows in private repositories 37 | actions: read 38 | contents: read 39 | 40 | strategy: 41 | fail-fast: false 42 | matrix: 43 | language: [ 'python' ] 44 | # CodeQL supports [ 'c-cpp', 'csharp', 'go', 'java-kotlin', 'javascript-typescript', 'python', 'ruby', 'swift' ] 45 | # Use only 'java-kotlin' to analyze code written in Java, Kotlin or both 46 | # Use only 'javascript-typescript' to analyze code written in JavaScript, TypeScript or both 47 | # Learn more about CodeQL language support at https://aka.ms/codeql-docs/language-support 48 | 49 | steps: 50 | - name: Checkout repository 51 | uses: actions/checkout@v4 52 | 53 | # Initializes the CodeQL tools for scanning. 54 | - name: Initialize CodeQL 55 | uses: github/codeql-action/init@v3 56 | with: 57 | languages: ${{ matrix.language }} 58 | # If you wish to specify custom queries, you can do so here or in a config file. 59 | # By default, queries listed here will override any specified in a config file. 60 | # Prefix the list here with "+" to use these queries and those in the config file. 61 | 62 | # For more details on CodeQL's query packs, refer to: https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs 63 | # queries: security-extended,security-and-quality 64 | 65 | 66 | # Autobuild attempts to build any compiled languages (C/C++, C#, Go, Java, or Swift). 67 | # If this step fails, then you should remove it and run the build manually (see below) 68 | - name: Autobuild 69 | uses: github/codeql-action/autobuild@v3 70 | 71 | # ℹ️ Command-line programs to run using the OS shell. 72 | # 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun 73 | 74 | # If the Autobuild fails above, remove it and uncomment the following three lines. 75 | # modify them (or add more) to build your code if your project, please refer to the EXAMPLE below for guidance. 76 | 77 | # - run: | 78 | # echo "Run, Build Application using script" 79 | # ./location_of_script_within_repo/buildscript.sh 80 | 81 | - name: Perform CodeQL Analysis 82 | uses: github/codeql-action/analyze@v3 83 | with: 84 | category: "/language:${{matrix.language}}" 85 | -------------------------------------------------------------------------------- /.github/workflows/dart.yml: -------------------------------------------------------------------------------- 1 | # This workflow uses actions that are not certified by GitHub. 2 | # They are provided by a third-party and are governed by 3 | # separate terms of service, privacy policy, and support 4 | # documentation. 5 | 6 | name: Dart 7 | 8 | on: 9 | push: 10 | branches: [ "main" ] 11 | pull_request: 12 | branches: [ "main" ] 13 | 14 | jobs: 15 | build: 16 | runs-on: ubuntu-latest 17 | 18 | steps: 19 | - uses: actions/checkout@v3 20 | 21 | # Note: This workflow uses the latest stable version of the Dart SDK. 22 | # You can specify other versions if desired, see documentation here: 23 | # https://github.com/dart-lang/setup-dart/blob/main/README.md 24 | # - uses: dart-lang/setup-dart@v1 25 | - uses: dart-lang/setup-dart@9a04e6d73cca37bd455e0608d7e5092f881fd603 26 | 27 | - name: Install dependencies 28 | run: dart pub get 29 | 30 | # Uncomment this step to verify the use of 'dart format' on each commit. 31 | # - name: Verify formatting 32 | # run: dart format --output=none --set-exit-if-changed . 33 | 34 | # Consider passing '--fatal-infos' for slightly stricter analysis. 35 | - name: Analyze project source 36 | run: dart analyze 37 | 38 | # Your project will need to have tests in test/ and a dependency on 39 | # package:test for this step to succeed. Note that Flutter projects will 40 | # want to change this to 'flutter test'. 41 | - name: Run tests 42 | run: dart test 43 | -------------------------------------------------------------------------------- /.github/workflows/django.yml: -------------------------------------------------------------------------------- 1 | name: Django CI 2 | 3 | on: 4 | push: 5 | branches: [ "main" ] 6 | pull_request: 7 | branches: [ "main" ] 8 | 9 | jobs: 10 | build: 11 | 12 | runs-on: ubuntu-latest 13 | strategy: 14 | max-parallel: 4 15 | matrix: 16 | python-version: [3.7, 3.8, 3.9] 17 | 18 | steps: 19 | - uses: actions/checkout@v3 20 | - name: Set up Python ${{ matrix.python-version }} 21 | uses: actions/setup-python@v3 22 | with: 23 | python-version: ${{ matrix.python-version }} 24 | - name: Install Dependencies 25 | run: | 26 | python -m pip install --upgrade pip 27 | pip install -r requirements.txt 28 | - name: Run Tests 29 | run: | 30 | python manage.py test 31 | -------------------------------------------------------------------------------- /.github/workflows/generator-generic-ossf-slsa3-publish.yml: -------------------------------------------------------------------------------- 1 | # This workflow uses actions that are not certified by GitHub. 2 | # They are provided by a third-party and are governed by 3 | # separate terms of service, privacy policy, and support 4 | # documentation. 5 | 6 | # This workflow lets you generate SLSA provenance file for your project. 7 | # The generation satisfies level 3 for the provenance requirements - see https://slsa.dev/spec/v0.1/requirements 8 | # The project is an initiative of the OpenSSF (openssf.org) and is developed at 9 | # https://github.com/slsa-framework/slsa-github-generator. 10 | # The provenance file can be verified using https://github.com/slsa-framework/slsa-verifier. 11 | # For more information about SLSA and how it improves the supply-chain, visit slsa.dev. 12 | 13 | name: SLSA generic generator 14 | on: 15 | workflow_dispatch: 16 | release: 17 | types: [created] 18 | 19 | jobs: 20 | build: 21 | runs-on: ubuntu-latest 22 | outputs: 23 | digests: ${{ steps.hash.outputs.digests }} 24 | 25 | steps: 26 | - uses: actions/checkout@v3 27 | 28 | # ======================================================== 29 | # 30 | # Step 1: Build your artifacts. 31 | # 32 | # ======================================================== 33 | - name: Build artifacts 34 | run: | 35 | # These are some amazing artifacts. 36 | echo "artifact1" > artifact1 37 | echo "artifact2" > artifact2 38 | 39 | # ======================================================== 40 | # 41 | # Step 2: Add a step to generate the provenance subjects 42 | # as shown below. Update the sha256 sum arguments 43 | # to include all binaries that you generate 44 | # provenance for. 45 | # 46 | # ======================================================== 47 | - name: Generate subject for provenance 48 | id: hash 49 | run: | 50 | set -euo pipefail 51 | 52 | # List the artifacts the provenance will refer to. 53 | files=$(ls artifact*) 54 | # Generate the subjects (base64 encoded). 55 | echo "hashes=$(sha256sum $files | base64 -w0)" >> "${GITHUB_OUTPUT}" 56 | 57 | provenance: 58 | needs: [build] 59 | permissions: 60 | actions: read # To read the workflow path. 61 | id-token: write # To sign the provenance. 62 | contents: write # To add assets to a release. 63 | uses: slsa-framework/slsa-github-generator/.github/workflows/generator_generic_slsa3.yml@v1.4.0 64 | with: 65 | base64-subjects: "${{ needs.build.outputs.digests }}" 66 | upload-assets: true # Optional: Upload to a new release 67 | -------------------------------------------------------------------------------- /.github/workflows/google.yml: -------------------------------------------------------------------------------- 1 | # This workflow will build a docker container, publish it to Google Container Registry, and deploy it to GKE when there is a push to the "main" branch. 2 | # 3 | # To configure this workflow: 4 | # 5 | # 1. Ensure that your repository contains the necessary configuration for your Google Kubernetes Engine cluster, including deployment.yml, kustomization.yml, service.yml, etc. 6 | # 7 | # 2. Create and configure a Workload Identity Provider for GitHub (https://github.com/google-github-actions/auth#setting-up-workload-identity-federation) 8 | # 9 | # 3. Change the values for the GAR_LOCATION, GKE_ZONE, GKE_CLUSTER, IMAGE, REPOSITORY and DEPLOYMENT_NAME environment variables (below). 10 | # 11 | # For more support on how to run the workflow, please visit https://github.com/google-github-actions/setup-gcloud/tree/master/example-workflows/gke-kustomize 12 | 13 | name: Build and Deploy to GKE 14 | 15 | on: 16 | push: 17 | branches: [ "main" ] 18 | 19 | env: 20 | PROJECT_ID: ${{ secrets.GKE_PROJECT }} 21 | GAR_LOCATION: us-central1 # TODO: update region of the Artifact Registry 22 | GKE_CLUSTER: cluster-1 # TODO: update to cluster name 23 | GKE_ZONE: us-central1-c # TODO: update to cluster zone 24 | DEPLOYMENT_NAME: gke-test # TODO: update to deployment name 25 | REPOSITORY: samples # TODO: update to Artifact Registry docker repository 26 | IMAGE: static-site 27 | 28 | jobs: 29 | setup-build-publish-deploy: 30 | name: Setup, Build, Publish, and Deploy 31 | runs-on: ubuntu-latest 32 | environment: production 33 | 34 | permissions: 35 | contents: 'read' 36 | id-token: 'write' 37 | 38 | steps: 39 | - name: Checkout 40 | uses: actions/checkout@v3 41 | 42 | # Configure Workload Identity Federation and generate an access token. 43 | - id: 'auth' 44 | name: 'Authenticate to Google Cloud' 45 | uses: 'google-github-actions/auth@v0' 46 | with: 47 | token_format: 'access_token' 48 | workload_identity_provider: 'projects/123456789/locations/global/workloadIdentityPools/my-pool/providers/my-provider' 49 | service_account: 'my-service-account@my-project.iam.gserviceaccount.com' 50 | 51 | # Alternative option - authentication via credentials json 52 | # - id: 'auth' 53 | # uses: 'google-github-actions/auth@v0' 54 | # with: 55 | # credentials_json: '${{ secrets.GCP_CREDENTIALS }}' 56 | 57 | - name: Docker configuration 58 | run: |- 59 | echo ${{steps.auth.outputs.access_token}} | docker login -u oauth2accesstoken --password-stdin https://$GAR_LOCATION-docker.pkg.dev 60 | # Get the GKE credentials so we can deploy to the cluster 61 | - name: Set up GKE credentials 62 | uses: google-github-actions/get-gke-credentials@v0 63 | with: 64 | cluster_name: ${{ env.GKE_CLUSTER }} 65 | location: ${{ env.GKE_ZONE }} 66 | 67 | # Build the Docker image 68 | - name: Build 69 | run: |- 70 | docker build \ 71 | --tag "$GAR_LOCATION-docker.pkg.dev/$PROJECT_ID/$REPOSITORY/$IMAGE:$GITHUB_SHA" \ 72 | --build-arg GITHUB_SHA="$GITHUB_SHA" \ 73 | --build-arg GITHUB_REF="$GITHUB_REF" \ 74 | . 75 | # Push the Docker image to Google Artifact Registry 76 | - name: Publish 77 | run: |- 78 | docker push "$GAR_LOCATION-docker.pkg.dev/$PROJECT_ID/$REPOSITORY/$IMAGE:$GITHUB_SHA" 79 | # Set up kustomize 80 | - name: Set up Kustomize 81 | run: |- 82 | curl -sfLo kustomize https://github.com/kubernetes-sigs/kustomize/releases/download/v3.1.0/kustomize_3.1.0_linux_amd64 83 | chmod u+x ./kustomize 84 | # Deploy the Docker image to the GKE cluster 85 | - name: Deploy 86 | run: |- 87 | # replacing the image name in the k8s template 88 | ./kustomize edit set image LOCATION-docker.pkg.dev/PROJECT_ID/REPOSITORY/IMAGE:TAG=$GAR_LOCATION-docker.pkg.dev/$PROJECT_ID/$REPOSITORY/$IMAGE:$GITHUB_SHA 89 | ./kustomize build . | kubectl apply -f - 90 | kubectl rollout status deployment/$DEPLOYMENT_NAME 91 | kubectl get services -o wide 92 | -------------------------------------------------------------------------------- /.github/workflows/gradle.yml: -------------------------------------------------------------------------------- 1 | # This workflow uses actions that are not certified by GitHub. 2 | # They are provided by a third-party and are governed by 3 | # separate terms of service, privacy policy, and support 4 | # documentation. 5 | # This workflow will build a Java project with Gradle and cache/restore any dependencies to improve the workflow execution time 6 | # For more information see: https://docs.github.com/en/actions/automating-builds-and-tests/building-and-testing-java-with-gradle 7 | 8 | name: Java CI with Gradle 9 | 10 | on: 11 | push: 12 | branches: [ "main" ] 13 | pull_request: 14 | branches: [ "main" ] 15 | 16 | jobs: 17 | build: 18 | 19 | runs-on: ubuntu-latest 20 | permissions: 21 | contents: read 22 | 23 | steps: 24 | - uses: actions/checkout@v4 25 | - name: Set up JDK 17 26 | uses: actions/setup-java@v4 27 | with: 28 | java-version: '17' 29 | distribution: 'temurin' 30 | 31 | # Configure Gradle for optimal use in GiHub Actions, including caching of downloaded dependencies. 32 | # See: https://github.com/gradle/actions/blob/main/setup-gradle/README.md 33 | - name: Setup Gradle 34 | uses: gradle/actions/setup-gradle@ec92e829475ac0c2315ea8f9eced72db85bb337a # v3.0.0 35 | 36 | - name: Build with Gradle Wrapper 37 | run: ./gradlew build 38 | 39 | # NOTE: The Gradle Wrapper is the default and recommended way to run Gradle (https://docs.gradle.org/current/userguide/gradle_wrapper.html). 40 | # If your project does not have the Gradle Wrapper configured, you can use the following configuration to run Gradle with a specified version. 41 | # 42 | # - name: Setup Gradle 43 | # uses: gradle/actions/setup-gradle@ec92e829475ac0c2315ea8f9eced72db85bb337a # v3.0.0 44 | # with: 45 | # gradle-version: '8.5' 46 | # 47 | # - name: Build with Gradle 8.5 48 | # run: gradle build 49 | 50 | dependency-submission: 51 | 52 | runs-on: ubuntu-latest 53 | permissions: 54 | contents: write 55 | 56 | steps: 57 | - uses: actions/checkout@v4 58 | - name: Set up JDK 17 59 | uses: actions/setup-java@v4 60 | with: 61 | java-version: '17' 62 | distribution: 'temurin' 63 | 64 | # Generates and submits a dependency graph, enabling Dependabot Alerts for all project dependencies. 65 | # See: https://github.com/gradle/actions/blob/main/dependency-submission/README.md 66 | - name: Generate and submit dependency graph 67 | uses: gradle/actions/dependency-submission@ec92e829475ac0c2315ea8f9eced72db85bb337a # v3.0.0 68 | -------------------------------------------------------------------------------- /.github/workflows/greetings.yml: -------------------------------------------------------------------------------- 1 | name: Greetings 2 | 3 | on: [pull_request_target, issues] 4 | 5 | jobs: 6 | greeting: 7 | runs-on: ubuntu-latest 8 | permissions: 9 | issues: write 10 | pull-requests: write 11 | steps: 12 | - uses: actions/first-interaction@v1 13 | with: 14 | repo-token: ${{ secrets.GITHUB_TOKEN }} 15 | issue-message: "Message that will be displayed on users' first issue" 16 | pr-message: "Message that will be displayed on users' first pull request" 17 | -------------------------------------------------------------------------------- /.github/workflows/ibm.yml: -------------------------------------------------------------------------------- 1 | # This workflow will build a docker container, publish it to IBM Container Registry, and deploy it to IKS when there is a push to the "main" branch. 2 | # 3 | # To configure this workflow: 4 | # 5 | # 1. Ensure that your repository contains a Dockerfile 6 | # 2. Setup secrets in your repository by going to settings: Create ICR_NAMESPACE and IBM_CLOUD_API_KEY 7 | # 3. Change the values for the IBM_CLOUD_REGION, REGISTRY_HOSTNAME, IMAGE_NAME, IKS_CLUSTER, DEPLOYMENT_NAME, and PORT 8 | 9 | name: Build and Deploy to IKS 10 | 11 | on: 12 | push: 13 | branches: [ "main" ] 14 | 15 | # Environment variables available to all jobs and steps in this workflow 16 | env: 17 | GITHUB_SHA: ${{ github.sha }} 18 | IBM_CLOUD_API_KEY: ${{ secrets.IBM_CLOUD_API_KEY }} 19 | IBM_CLOUD_REGION: us-south 20 | ICR_NAMESPACE: ${{ secrets.ICR_NAMESPACE }} 21 | REGISTRY_HOSTNAME: us.icr.io 22 | IMAGE_NAME: iks-test 23 | IKS_CLUSTER: example-iks-cluster-name-or-id 24 | DEPLOYMENT_NAME: iks-test 25 | PORT: 5001 26 | 27 | jobs: 28 | setup-build-publish-deploy: 29 | name: Setup, Build, Publish, and Deploy 30 | runs-on: ubuntu-latest 31 | environment: production 32 | steps: 33 | 34 | - name: Checkout 35 | uses: actions/checkout@v3 36 | 37 | # Download and Install IBM Cloud CLI 38 | - name: Install IBM Cloud CLI 39 | run: | 40 | curl -fsSL https://clis.cloud.ibm.com/install/linux | sh 41 | ibmcloud --version 42 | ibmcloud config --check-version=false 43 | ibmcloud plugin install -f kubernetes-service 44 | ibmcloud plugin install -f container-registry 45 | 46 | # Authenticate with IBM Cloud CLI 47 | - name: Authenticate with IBM Cloud CLI 48 | run: | 49 | ibmcloud login --apikey "${IBM_CLOUD_API_KEY}" -r "${IBM_CLOUD_REGION}" -g default 50 | ibmcloud cr region-set "${IBM_CLOUD_REGION}" 51 | ibmcloud cr login 52 | 53 | # Build the Docker image 54 | - name: Build with Docker 55 | run: | 56 | docker build -t "$REGISTRY_HOSTNAME"/"$ICR_NAMESPACE"/"$IMAGE_NAME":"$GITHUB_SHA" \ 57 | --build-arg GITHUB_SHA="$GITHUB_SHA" \ 58 | --build-arg GITHUB_REF="$GITHUB_REF" . 59 | 60 | # Push the image to IBM Container Registry 61 | - name: Push the image to ICR 62 | run: | 63 | docker push $REGISTRY_HOSTNAME/$ICR_NAMESPACE/$IMAGE_NAME:$GITHUB_SHA 64 | 65 | # Deploy the Docker image to the IKS cluster 66 | - name: Deploy to IKS 67 | run: | 68 | ibmcloud ks cluster config --cluster $IKS_CLUSTER 69 | kubectl config current-context 70 | kubectl create deployment $DEPLOYMENT_NAME --image=$REGISTRY_HOSTNAME/$ICR_NAMESPACE/$IMAGE_NAME:$GITHUB_SHA --dry-run -o yaml > deployment.yaml 71 | kubectl apply -f deployment.yaml 72 | kubectl rollout status deployment/$DEPLOYMENT_NAME 73 | kubectl create service loadbalancer $DEPLOYMENT_NAME --tcp=80:$PORT --dry-run -o yaml > service.yaml 74 | kubectl apply -f service.yaml 75 | kubectl get services -o wide 76 | -------------------------------------------------------------------------------- /.github/workflows/jekyll.yml: -------------------------------------------------------------------------------- 1 | # This workflow uses actions that are not certified by GitHub. 2 | # They are provided by a third-party and are governed by 3 | # separate terms of service, privacy policy, and support 4 | # documentation. 5 | 6 | # Sample workflow for building and deploying a Jekyll site to GitHub Pages 7 | name: Deploy Jekyll site to Pages 8 | 9 | on: 10 | # Runs on pushes targeting the default branch 11 | push: 12 | branches: ["main"] 13 | 14 | # Allows you to run this workflow manually from the Actions tab 15 | workflow_dispatch: 16 | 17 | # Sets permissions of the GITHUB_TOKEN to allow deployment to GitHub Pages 18 | permissions: 19 | contents: read 20 | pages: write 21 | id-token: write 22 | 23 | # Allow only one concurrent deployment, skipping runs queued between the run in-progress and latest queued. 24 | # However, do NOT cancel in-progress runs as we want to allow these production deployments to complete. 25 | concurrency: 26 | group: "pages" 27 | cancel-in-progress: false 28 | 29 | jobs: 30 | # Build job 31 | build: 32 | runs-on: ubuntu-latest 33 | steps: 34 | - name: Checkout 35 | uses: actions/checkout@v4 36 | - name: Setup Ruby 37 | uses: ruby/setup-ruby@8575951200e472d5f2d95c625da0c7bec8217c42 # v1.161.0 38 | with: 39 | ruby-version: '3.1' # Not needed with a .ruby-version file 40 | bundler-cache: true # runs 'bundle install' and caches installed gems automatically 41 | cache-version: 0 # Increment this number if you need to re-download cached gems 42 | - name: Setup Pages 43 | id: pages 44 | uses: actions/configure-pages@v4 45 | - name: Build with Jekyll 46 | # Outputs to the './_site' directory by default 47 | run: bundle exec jekyll build --baseurl "${{ steps.pages.outputs.base_path }}" 48 | env: 49 | JEKYLL_ENV: production 50 | - name: Upload artifact 51 | # Automatically uploads an artifact from the './_site' directory by default 52 | uses: actions/upload-pages-artifact@v3 53 | 54 | # Deployment job 55 | deploy: 56 | environment: 57 | name: github-pages 58 | url: ${{ steps.deployment.outputs.page_url }} 59 | runs-on: ubuntu-latest 60 | needs: build 61 | steps: 62 | - name: Deploy to GitHub Pages 63 | id: deployment 64 | uses: actions/deploy-pages@v4 65 | -------------------------------------------------------------------------------- /.github/workflows/label.yml: -------------------------------------------------------------------------------- 1 | # This workflow will triage pull requests and apply a label based on the 2 | # paths that are modified in the pull request. 3 | # 4 | # To use this workflow, you will need to set up a .github/labeler.yml 5 | # file with configuration. For more information, see: 6 | # https://github.com/actions/labeler 7 | 8 | name: Labeler 9 | on: [pull_request_target] 10 | 11 | jobs: 12 | label: 13 | 14 | runs-on: ubuntu-latest 15 | permissions: 16 | contents: read 17 | pull-requests: write 18 | 19 | steps: 20 | - uses: actions/labeler@v4 21 | with: 22 | repo-token: "${{ secrets.GITHUB_TOKEN }}" 23 | -------------------------------------------------------------------------------- /.github/workflows/manual.yml: -------------------------------------------------------------------------------- 1 | # This is a basic workflow that is manually triggered 2 | 3 | name: Manual workflow 4 | 5 | # Controls when the action will run. Workflow runs when manually triggered using the UI 6 | # or API. 7 | on: 8 | workflow_dispatch: 9 | # Inputs the workflow accepts. 10 | inputs: 11 | name: 12 | # Friendly description to be shown in the UI instead of 'name' 13 | description: 'Person to greet' 14 | # Default value if no value is explicitly provided 15 | default: 'World' 16 | # Input has to be provided for the workflow to run 17 | required: true 18 | # The data type of the input 19 | type: string 20 | 21 | # A workflow run is made up of one or more jobs that can run sequentially or in parallel 22 | jobs: 23 | # This workflow contains a single job called "greet" 24 | greet: 25 | # The type of runner that the job will run on 26 | runs-on: ubuntu-latest 27 | 28 | # Steps represent a sequence of tasks that will be executed as part of the job 29 | steps: 30 | # Runs a single command using the runners shell 31 | - name: Send greeting 32 | run: echo "Hello ${{ inputs.name }}" 33 | -------------------------------------------------------------------------------- /.github/workflows/msbuild.yml: -------------------------------------------------------------------------------- 1 | # This workflow uses actions that are not certified by GitHub. 2 | # They are provided by a third-party and are governed by 3 | # separate terms of service, privacy policy, and support 4 | # documentation. 5 | 6 | name: MSBuild 7 | 8 | on: 9 | push: 10 | branches: [ "main" ] 11 | pull_request: 12 | branches: [ "main" ] 13 | 14 | env: 15 | # Path to the solution file relative to the root of the project. 16 | SOLUTION_FILE_PATH: . 17 | 18 | # Configuration type to build. 19 | # You can convert this to a build matrix if you need coverage of multiple configuration types. 20 | # https://docs.github.com/actions/learn-github-actions/managing-complex-workflows#using-a-build-matrix 21 | BUILD_CONFIGURATION: Release 22 | 23 | permissions: 24 | contents: read 25 | 26 | jobs: 27 | build: 28 | runs-on: windows-latest 29 | 30 | steps: 31 | - uses: actions/checkout@v3 32 | 33 | - name: Add MSBuild to PATH 34 | uses: microsoft/setup-msbuild@v1.0.2 35 | 36 | - name: Restore NuGet packages 37 | working-directory: ${{env.GITHUB_WORKSPACE}} 38 | run: nuget restore ${{env.SOLUTION_FILE_PATH}} 39 | 40 | - name: Build 41 | working-directory: ${{env.GITHUB_WORKSPACE}} 42 | # Add additional options to the MSBuild command line here (like platform or verbosity level). 43 | # See https://docs.microsoft.com/visualstudio/msbuild/msbuild-command-line-reference 44 | run: msbuild /m /p:Configuration=${{env.BUILD_CONFIGURATION}} ${{env.SOLUTION_FILE_PATH}} 45 | -------------------------------------------------------------------------------- /.github/workflows/pylint.yml: -------------------------------------------------------------------------------- 1 | name: Pylint 2 | 3 | on: [push] 4 | 5 | jobs: 6 | build: 7 | runs-on: ubuntu-latest 8 | strategy: 9 | matrix: 10 | python-version: ["3.8", "3.9", "3.10"] 11 | steps: 12 | - uses: actions/checkout@v3 13 | - name: Set up Python ${{ matrix.python-version }} 14 | uses: actions/setup-python@v3 15 | with: 16 | python-version: ${{ matrix.python-version }} 17 | - name: Install dependencies 18 | run: | 19 | python -m pip install --upgrade pip 20 | pip install pylint 21 | - name: Analysing the code with pylint 22 | run: | 23 | pylint $(git ls-files '*.py') 24 | -------------------------------------------------------------------------------- /.github/workflows/python-app.yml: -------------------------------------------------------------------------------- 1 | # This workflow will install Python dependencies, run tests and lint with a single version of Python 2 | # For more information see: https://docs.github.com/en/actions/automating-builds-and-tests/building-and-testing-python 3 | 4 | name: Python application 5 | 6 | on: 7 | push: 8 | branches: [ "main" ] 9 | pull_request: 10 | branches: [ "main" ] 11 | 12 | permissions: 13 | contents: read 14 | 15 | jobs: 16 | build: 17 | 18 | runs-on: ubuntu-latest 19 | 20 | steps: 21 | - uses: actions/checkout@v3 22 | - name: Set up Python 3.10 23 | uses: actions/setup-python@v3 24 | with: 25 | python-version: "3.10" 26 | - name: Install dependencies 27 | run: | 28 | python -m pip install --upgrade pip 29 | pip install flake8 pytest 30 | if [ -f requirements.txt ]; then pip install -r requirements.txt; fi 31 | - name: Lint with flake8 32 | run: | 33 | # stop the build if there are Python syntax errors or undefined names 34 | flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics 35 | # exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide 36 | flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics 37 | - name: Test with pytest 38 | run: | 39 | pytest 40 | -------------------------------------------------------------------------------- /.github/workflows/python-package-conda.yml: -------------------------------------------------------------------------------- 1 | name: Python Package using Conda 2 | 3 | on: [push] 4 | 5 | jobs: 6 | build-linux: 7 | runs-on: ubuntu-latest 8 | strategy: 9 | max-parallel: 5 10 | 11 | steps: 12 | - uses: actions/checkout@v3 13 | - name: Set up Python 3.10 14 | uses: actions/setup-python@v3 15 | with: 16 | python-version: '3.10' 17 | - name: Add conda to system path 18 | run: | 19 | # $CONDA is an environment variable pointing to the root of the miniconda directory 20 | echo $CONDA/bin >> $GITHUB_PATH 21 | - name: Install dependencies 22 | run: | 23 | conda env update --file environment.yml --name base 24 | - name: Lint with flake8 25 | run: | 26 | conda install flake8 27 | # stop the build if there are Python syntax errors or undefined names 28 | flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics 29 | # exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide 30 | flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics 31 | - name: Test with pytest 32 | run: | 33 | conda install pytest 34 | pytest 35 | -------------------------------------------------------------------------------- /.github/workflows/python-package.yml: -------------------------------------------------------------------------------- 1 | # This workflow will install Python dependencies, run tests and lint with a variety of Python versions 2 | # For more information see: https://docs.github.com/en/actions/automating-builds-and-tests/building-and-testing-python 3 | 4 | name: Python package 5 | 6 | on: 7 | push: 8 | branches: [ "main" ] 9 | pull_request: 10 | branches: [ "main" ] 11 | 12 | jobs: 13 | build: 14 | 15 | runs-on: ubuntu-latest 16 | strategy: 17 | fail-fast: false 18 | matrix: 19 | python-version: ["3.9", "3.10", "3.11"] 20 | 21 | steps: 22 | - uses: actions/checkout@v3 23 | - name: Set up Python ${{ matrix.python-version }} 24 | uses: actions/setup-python@v3 25 | with: 26 | python-version: ${{ matrix.python-version }} 27 | - name: Install dependencies 28 | run: | 29 | python -m pip install --upgrade pip 30 | python -m pip install flake8 pytest 31 | if [ -f requirements.txt ]; then pip install -r requirements.txt; fi 32 | - name: Lint with flake8 33 | run: | 34 | # stop the build if there are Python syntax errors or undefined names 35 | flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics 36 | # exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide 37 | flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics 38 | - name: Test with pytest 39 | run: | 40 | pytest 41 | -------------------------------------------------------------------------------- /.github/workflows/python-publish.yml: -------------------------------------------------------------------------------- 1 | # This workflow will upload a Python Package using Twine when a release is created 2 | # For more information see: https://docs.github.com/en/actions/automating-builds-and-tests/building-and-testing-python#publishing-to-package-registries 3 | 4 | # This workflow uses actions that are not certified by GitHub. 5 | # They are provided by a third-party and are governed by 6 | # separate terms of service, privacy policy, and support 7 | # documentation. 8 | 9 | name: Upload Python Package 10 | 11 | on: 12 | release: 13 | types: [published] 14 | 15 | permissions: 16 | contents: read 17 | 18 | jobs: 19 | deploy: 20 | 21 | runs-on: ubuntu-latest 22 | 23 | steps: 24 | - uses: actions/checkout@v3 25 | - name: Set up Python 26 | uses: actions/setup-python@v3 27 | with: 28 | python-version: '3.x' 29 | - name: Install dependencies 30 | run: | 31 | python -m pip install --upgrade pip 32 | pip install build 33 | - name: Build package 34 | run: python -m build 35 | - name: Publish package 36 | uses: pypa/gh-action-pypi-publish@27b31702a0e7fc50959f5ad993c78deac1bdfc29 37 | with: 38 | user: __token__ 39 | password: ${{ secrets.PYPI_API_TOKEN }} 40 | -------------------------------------------------------------------------------- /.github/workflows/stale.yml: -------------------------------------------------------------------------------- 1 | # This workflow warns and then closes issues and PRs that have had no activity for a specified amount of time. 2 | # 3 | # You can adjust the behavior by modifying this file. 4 | # For more information, see: 5 | # https://github.com/actions/stale 6 | name: Mark stale issues and pull requests 7 | 8 | on: 9 | schedule: 10 | - cron: '27 18 * * *' 11 | 12 | jobs: 13 | stale: 14 | 15 | runs-on: ubuntu-latest 16 | permissions: 17 | issues: write 18 | pull-requests: write 19 | 20 | steps: 21 | - uses: actions/stale@v5 22 | with: 23 | repo-token: ${{ secrets.GITHUB_TOKEN }} 24 | stale-issue-message: 'Stale issue message' 25 | stale-pr-message: 'Stale pull request message' 26 | stale-issue-label: 'no-issue-activity' 27 | stale-pr-label: 'no-pr-activity' 28 | -------------------------------------------------------------------------------- /.github/workflows/tencent.yml: -------------------------------------------------------------------------------- 1 | # This workflow will build a docker container, publish and deploy it to Tencent Kubernetes Engine (TKE) when there is a push to the "main" branch. 2 | # 3 | # To configure this workflow: 4 | # 5 | # 1. Ensure that your repository contains the necessary configuration for your Tencent Kubernetes Engine cluster, 6 | # including deployment.yml, kustomization.yml, service.yml, etc. 7 | # 8 | # 2. Set up secrets in your workspace: 9 | # - TENCENT_CLOUD_SECRET_ID with Tencent Cloud secret id 10 | # - TENCENT_CLOUD_SECRET_KEY with Tencent Cloud secret key 11 | # - TENCENT_CLOUD_ACCOUNT_ID with Tencent Cloud account id 12 | # - TKE_REGISTRY_PASSWORD with TKE registry password 13 | # 14 | # 3. Change the values for the TKE_IMAGE_URL, TKE_REGION, TKE_CLUSTER_ID and DEPLOYMENT_NAME environment variables (below). 15 | 16 | name: Tencent Kubernetes Engine 17 | 18 | on: 19 | push: 20 | branches: [ "main" ] 21 | 22 | # Environment variables available to all jobs and steps in this workflow 23 | env: 24 | TKE_IMAGE_URL: ccr.ccs.tencentyun.com/demo/mywebapp 25 | TKE_REGION: ap-guangzhou 26 | TKE_CLUSTER_ID: cls-mywebapp 27 | DEPLOYMENT_NAME: tke-test 28 | 29 | permissions: 30 | contents: read 31 | 32 | jobs: 33 | setup-build-publish-deploy: 34 | name: Setup, Build, Publish, and Deploy 35 | runs-on: ubuntu-latest 36 | environment: production 37 | steps: 38 | 39 | - name: Checkout 40 | uses: actions/checkout@v3 41 | 42 | # Build 43 | - name: Build Docker image 44 | run: | 45 | docker build -t ${TKE_IMAGE_URL}:${GITHUB_SHA} . 46 | 47 | - name: Login TKE Registry 48 | run: | 49 | docker login -u ${{ secrets.TENCENT_CLOUD_ACCOUNT_ID }} -p '${{ secrets.TKE_REGISTRY_PASSWORD }}' ${TKE_IMAGE_URL} 50 | 51 | # Push the Docker image to TKE Registry 52 | - name: Publish 53 | run: | 54 | docker push ${TKE_IMAGE_URL}:${GITHUB_SHA} 55 | 56 | - name: Set up Kustomize 57 | run: | 58 | curl -o kustomize --location https://github.com/kubernetes-sigs/kustomize/releases/download/v3.1.0/kustomize_3.1.0_linux_amd64 59 | chmod u+x ./kustomize 60 | 61 | - name: Set up ~/.kube/config for connecting TKE cluster 62 | uses: TencentCloud/tke-cluster-credential-action@v1 63 | with: 64 | secret_id: ${{ secrets.TENCENT_CLOUD_SECRET_ID }} 65 | secret_key: ${{ secrets.TENCENT_CLOUD_SECRET_KEY }} 66 | tke_region: ${{ env.TKE_REGION }} 67 | cluster_id: ${{ env.TKE_CLUSTER_ID }} 68 | 69 | - name: Switch to TKE context 70 | run: | 71 | kubectl config use-context ${TKE_CLUSTER_ID}-context-default 72 | 73 | # Deploy the Docker image to the TKE cluster 74 | - name: Deploy 75 | run: | 76 | ./kustomize edit set image ${TKE_IMAGE_URL}:${GITHUB_SHA} 77 | ./kustomize build . | kubectl apply -f - 78 | kubectl rollout status deployment/${DEPLOYMENT_NAME} 79 | kubectl get services -o wide 80 | -------------------------------------------------------------------------------- /.whitesource: -------------------------------------------------------------------------------- 1 | { 2 | "scanSettings": { 3 | "baseBranches": [] 4 | }, 5 | "checkRunSettings": { 6 | "vulnerableCheckRunConclusionLevel": "failure", 7 | "displayMode": "diff", 8 | "useMendCheckNames": true 9 | }, 10 | "issueSettings": { 11 | "minSeverityLevel": "LOW", 12 | "issueType": "DEPENDENCY" 13 | } 14 | } -------------------------------------------------------------------------------- /LICENSE.md: -------------------------------------------------------------------------------- 1 | PiOS License 2 | 3 | Copyright (C) 2023 César Cordero Rodríguez 4 | 5 | Permission is hereby granted by the application software developer (“Software Developer”), free 6 | of charge, to any person obtaining a copy of this application, software and associated 7 | documentation files (the “Software”), which was developed by the Software Developer for use on 8 | Pi Network, whereby the purpose of this license is to permit the development of derivative works 9 | based on the Software, including the right to use, copy, modify, merge, publish, distribute, 10 | sub-license, and/or sell copies of such derivative works and any Software components incorporated 11 | therein, and to permit persons to whom such derivative works are furnished to do so, in each case, 12 | solely to develop, use and market applications for the official Pi Network. For purposes of this 13 | license, Pi Network shall mean any application, software, or other present or future platform 14 | developed, owned or managed by Pi Community Company, and its parents, affiliates or subsidiaries, 15 | for which the Software was developed, or on which the Software continues to operate. However, 16 | you are prohibited from using any portion of the Software or any derivative works thereof in any 17 | manner (a) which infringes on any Pi Network intellectual property rights, (b) to hack any of Pi 18 | Network’s systems or processes or (c) to develop any product or service which is competitive with 19 | the Pi Network. 20 | 21 | The above copyright notice and this permission notice shall be included in all copies or 22 | substantial portions of the Software. 23 | 24 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, 25 | INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE 26 | AND NON-INFRINGEMENT. IN NO EVENT SHALL THE AUTHORS, PUBLISHERS, OR COPYRIGHT HOLDERS OF THIS 27 | SOFTWARE BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY OR CONSEQUENTIAL 28 | DAMAGES (INCLUDING, BUT NOT LIMITED TO BUSINESS INTERRUPTION, LOSS OF USE, DATA OR PROFITS) 29 | HOWEVER CAUSED AND UNDER ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR 30 | TORT (INCLUDING NEGLIGENCE) ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE 31 | OR OTHER DEALINGS IN THE SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. 32 | 33 | Pi, Pi Network and the Pi logo are trademarks of the Pi Community Company. -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Pi Network - Python server-side package 2 | 3 | This is a user generated Python-based solution for Pi Network you can use to integrate the Pi Network apps platform with a Python backend application. 4 | 5 | pi_python.py is the Library and pi_python_test.py is to test the Library. 6 | 7 | 8 | ## Example 9 | 10 | 1. Initialize the SDK and enter your secret data 11 | ```python 12 | """ Secret Data """ 13 | api_key = "Enter Here Your API Key" 14 | wallet_private_seed = "SecretWalletSeed" 15 | 16 | """ Initialization """ 17 | pi = PiNetwork() 18 | pi.initialize(api_key, wallet_private_seed, "Pi Testnet") 19 | ``` 20 | 21 | 2. Create an A2U payment 22 | 23 | Make sure to store your payment data in your database. Here's an example of how you could keep track of the data. 24 | Consider this a database table example. 25 | 26 | | uid | product_id | amount | memo | payment_id | txid | 27 | | :---: | :---: | :---: | :---: | :---: | :---: | 28 | | `user_uid` | apple-pie-1 | 3.14 | Refund for apple pie | NULL | NULL | 29 | 30 | ```python 31 | user_uid = "GET-THIS-SECRET-DATA-FROMFRONTEND" #unique for every user 32 | 33 | 34 | """ Build your payment """ 35 | payment_data = { 36 | "amount": 3.14, 37 | "memo": "Test - Greetings from MyApp", 38 | "metadata": {"product_id": "apple-pie-1"}, 39 | "uid": user_uid 40 | } 41 | 42 | """ 43 | Create an payment 44 | It is critical that you store paymentId in your database 45 | so that you don't double-pay the same user, by keeping track of the payment. 46 | 47 | """ 48 | payment_id = pi.create_payment(payment_data) 49 | ``` 50 | 51 | 3. Store the `paymentId` in your database 52 | 53 | After creating the payment, you'll get `paymentId`, which you should be storing in your database. 54 | 55 | | uid | product_id | amount | memo | payment_id | txid | 56 | | :---: | :---: | :---: | :---: | :---: | :---: | 57 | | `user_uid` | apple-pie-1 | 3.14 | Refund for apple pie | `paymentId` | NULL | 58 | 59 | 4. Submit the payment to the Pi Blockchain 60 | ```python 61 | """It is strongly recommended that you store the txid along with the paymentId you stored earlier for your reference.""" 62 | txid = pi.submit_payment(payment_id, False) 63 | ``` 64 | 65 | 5. Store the txid in your database 66 | 67 | Similarly as you did in step 3, keep the txid along with other data. 68 | 69 | | uid | product_id | amount | memo | payment_id | txid | 70 | | :---: | :---: | :---: | :---: | :---: | :---: | 71 | | `user_uid` | apple-pie-1 | 3.14 | Refund for apple pie | `paymentId` | `txid` | 72 | 73 | 6. Complete the payment 74 | ```python 75 | payment = pi.complete_payment(payment_id, txid) 76 | ``` 77 | 78 | ## Overall flow for A2U (App-to-User) payment 79 | 80 | To create an A2U payment using the Pi Python SDK, here's an overall flow you need to follow: 81 | 82 | 1. Initialize the SDK 83 | > You'll be initializing the SDK with the Pi API Key of your app and the Private Seed of your app wallet. 84 | 85 | 2. Create an A2U payment 86 | > You can create an A2U payment using `createPayment` method. This method returns a payment identifier (payment id). 87 | 88 | 3. Store the payment id in your database 89 | > It is critical that you store the payment id, returned by `createPayment` method, in your database so that you don't double-pay the same user, by keeping track of the payment. 90 | 91 | 4. Submit the payment to the Pi Blockchain 92 | > You can submit the payment to the Pi Blockchain using `submitPayment` method. This method builds a payment transaction and submits it to the Pi Blockchain for you. Once submitted, the method returns a transaction identifier (txid). 93 | 94 | 5. Store the txid in your database 95 | > It is strongly recommended that you store the txid along with the payment id you stored earlier for your reference. 96 | 97 | 6. Complete the payment 98 | > After checking the transaction with the txid you obtained, you must complete the payment, which you can do with `completePayment` method. Upon completing, the method returns the payment object. Check the `status` field to make sure everything looks correct. 99 | 100 | ## SDK Reference 101 | 102 | This section shows you a list of available methods. 103 | ### `createPayment` 104 | 105 | This method creates an A2U payment. 106 | 107 | - Required parameter: `PaymentArgs` 108 | 109 | You need to provide 4 different data and pass them as a single object to this method. 110 | ```typescript 111 | type PaymentArgs = { 112 | amount: number // the amount of Pi you're paying to your user 113 | memo: string // a short memo that describes what the payment is about 114 | metadata: object // an arbitrary object that you can attach to this payment. This is for your own use. You should use this object as a way to link this payment with your internal business logic. 115 | uid: string // a user uid of your app. You should have access to this value if a user has authenticated on your app. 116 | } 117 | ``` 118 | 119 | - Return value: `a payment identifier (paymentId: string)` 120 | 121 | ### `submitPayment` 122 | 123 | This method creates a payment transaction and submits it to the Pi Blockchain. 124 | 125 | - Required parameter: `paymentId, pending_payments` 126 | - Return value: `a transaction identifier (txid: string)` 127 | 128 | ### `completePayment` 129 | 130 | This method completes the payment in the Pi server. 131 | 132 | - Required parameter: `paymentId, txid` 133 | - Return value: `a payment object (payment: PaymentDTO)` 134 | 135 | The method return a payment object with the following fields: 136 | 137 | ```typescript 138 | payment: PaymentDTO = { 139 | // Payment data: 140 | identifier: string, // payment identifier 141 | user_uid: string, // user's app-specific ID 142 | amount: number, // payment amount 143 | memo: string, // a string provided by the developer, shown to the user 144 | metadata: object, // an object provided by the developer for their own usage 145 | from_address: string, // sender address of the blockchain transaction 146 | to_address: string, // recipient address of the blockchain transaction 147 | direction: Direction, // direction of the payment ("user_to_app" | "app_to_user") 148 | created_at: string, // payment's creation timestamp 149 | network: string, // a network of the payment ("Pi Network" | "Pi Testnet") 150 | // Status flags representing the current state of this payment 151 | status: { 152 | developer_approved: boolean, // Server-Side Approval (automatically approved for A2U payment) 153 | transaction_verified: boolean, // blockchain transaction verified 154 | developer_completed: boolean, // Server-Side Completion (handled by the create_payment! method) 155 | cancelled: boolean, // cancelled by the developer or by Pi Network 156 | user_cancelled: boolean, // cancelled by the user 157 | }, 158 | // Blockchain transaction data: 159 | transaction: null | { // This is null if no transaction has been made yet 160 | txid: string, // id of the blockchain transaction 161 | verified: boolean, // true if the transaction matches the payment, false otherwise 162 | _link: string, // a link to the operation on the Pi Blockchain API 163 | } 164 | } 165 | ``` 166 | 167 | ### `getPayment` 168 | 169 | This method returns a payment object if it exists. 170 | 171 | - Required parameter: `paymentId` 172 | - Return value: `a payment object (payment: PaymentDTO)` 173 | 174 | ### `cancelPayment` 175 | 176 | This method cancels the payment in the Pi server. 177 | 178 | - Required parameter: `paymentId` 179 | - Return value: `a payment object (payment: PaymentDTO)` 180 | 181 | ### `getIncompleteServerPayments` 182 | 183 | This method returns the latest incomplete payment which your app has created, if present. Use this method to troubleshoot the following error: "You need to complete the ongoing payment first to create a new one." 184 | 185 | - Required parameter: `none` 186 | - Return value: `an array which contains 0 or 1 payment object (payments: Array)` 187 | 188 | If a payment is returned by this method, you must follow one of the following 3 options: 189 | 190 | 1. cancel the payment, if it is not linked with a blockchain transaction and you don't want to submit the transaction anymore 191 | 192 | 2. submit the transaction and complete the payment 193 | 194 | 3. if a blockchain transaction has been made, complete the payment 195 | 196 | If you do not know what this payment maps to in your business logic, you may use its `metadata` property to retrieve which business logic item it relates to. Remember that `metadata` is a required argument when creating a payment, and should be used as a way to link this payment to an item of your business logic. 197 | 198 | ## Troubleshooting 199 | 200 | ### Error when creating a payment: "You need to complete the ongoing payment first to create a new one." 201 | 202 | See documentation for the `getIncompleteServerPayments` above. 203 | -------------------------------------------------------------------------------- /pi_python.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """ 3 | For more information visit https://github.com/pi-apps/pi-python 4 | """ 5 | 6 | import requests 7 | import json 8 | import stellar_sdk as s_sdk 9 | 10 | class PiNetwork: 11 | 12 | api_key = "" 13 | client = "" 14 | account = "" 15 | base_url = "" 16 | from_address = "" 17 | open_payments = {} 18 | network = "" 19 | server = "" 20 | keypair = "" 21 | fee = "" 22 | 23 | def initialize(self, api_key, wallet_private_key, network): 24 | try: 25 | if not self.validate_private_seed_format(wallet_private_key): 26 | print("No valid private seed!") 27 | self.api_key = api_key 28 | self.load_account(wallet_private_key, network) 29 | self.base_url = "https://api.minepi.com" 30 | self.open_payments = {} 31 | self.network = network 32 | self.fee = self.server.fetch_base_fee() 33 | #self.fee = fee 34 | except: 35 | return False 36 | 37 | def get_balance(self): 38 | try: 39 | balances = self.server.accounts().account_id(self.keypair.public_key).call()["balances"] 40 | balance_found = False 41 | for i in balances: 42 | if i["asset_type"] == "native": 43 | return float(i["balance"]) 44 | 45 | return 0 46 | except: 47 | return 0 48 | 49 | def get_payment(self, payment_id): 50 | url = self.base_url + "/v2/payments/" + payment_id 51 | re = requests.get(url,headers=self.get_http_headers()) 52 | self.handle_http_response(re) 53 | 54 | def create_payment(self, payment_data): 55 | try: 56 | 57 | if not self.validate_payment_data(payment_data): 58 | if __debug__: 59 | print("No valid payments found. Creating a new one...") 60 | 61 | balances = self.server.accounts().account_id(self.keypair.public_key).call()["balances"] 62 | balance_found = False 63 | for i in balances: 64 | if i["asset_type"] == "native": 65 | balance_found = True 66 | if (float(payment_data["amount"]) + (float(self.fee) / 10000000)) > float(i["balance"]): 67 | return "" 68 | break 69 | 70 | if balance_found == False: 71 | return "" 72 | 73 | obj = { 74 | 'payment': payment_data, 75 | } 76 | 77 | obj = json.dumps(obj) 78 | url = self.base_url + "/v2/payments" 79 | res = requests.post(url, data=obj, json=obj, headers=self.get_http_headers()) 80 | parsed_response = self.handle_http_response(res) 81 | 82 | identifier = "" 83 | identifier_data = {} 84 | 85 | if 'error' in parsed_response: 86 | identifier = parsed_response['payment']["identifier"] 87 | identifier_data = parsed_response['payment'] 88 | else: 89 | identifier = parsed_response["identifier"] 90 | identifier_data = parsed_response 91 | 92 | self.open_payments[identifier] = identifier_data 93 | 94 | return identifier 95 | except: 96 | return "" 97 | 98 | def submit_payment(self, payment_id, pending_payment): 99 | if payment_id not in self.open_payments: 100 | return False 101 | if pending_payment == False or payment_id in self.open_payments: 102 | payment = self.open_payments[payment_id] 103 | else: 104 | payment = pending_payment 105 | 106 | balances = self.server.accounts().account_id(self.keypair.public_key).call()["balances"] 107 | balance_found = False 108 | for i in balances: 109 | if i["asset_type"] == "native": 110 | balance_found = True 111 | if (float(payment["amount"]) + (float(self.fee)/10000000)) > float(i["balance"]): 112 | return "" 113 | break 114 | 115 | if balance_found == False: 116 | return "" 117 | 118 | if __debug__: 119 | print("Debug_Data: Payment information\n" + str(payment)) 120 | 121 | self.set_horizon_client(payment["network"]) 122 | from_address = payment["from_address"] 123 | 124 | transaction_data = { 125 | "amount": payment["amount"], 126 | "identifier": payment["identifier"], 127 | "recipient": payment["to_address"] 128 | } 129 | 130 | transaction = self.build_a2u_transaction(payment) 131 | txid = self.submit_transaction(transaction) 132 | if payment_id in self.open_payments: 133 | del self.open_payments[payment_id] 134 | 135 | return txid 136 | 137 | 138 | def complete_payment(self, identifier, txid): 139 | if not txid: 140 | obj = {} 141 | else: 142 | obj = {"txid": txid} 143 | 144 | obj = json.dumps(obj) 145 | url = self.base_url + "/v2/payments/" + identifier + "/complete" 146 | re = requests.post(url,data=obj,json=obj,headers=self.get_http_headers()) 147 | self.handle_http_response(re) 148 | 149 | def cancel_payment(self, identifier): 150 | obj = {} 151 | obj = json.dumps(obj) 152 | url = self.base_url + "/v2/payments/" + identifier + "/cancel" 153 | re = requests.post(url,data=obj,json=obj,headers=self.get_http_headers()) 154 | self.handle_http_response(re) 155 | 156 | def get_incomplete_server_payments(self): 157 | url = self.base_url + "/v2/payments/incomplete_server_payments" 158 | re = requests.get(url,headers=self.get_http_headers()) 159 | res = self.handle_http_response(re) 160 | if not res: 161 | res = {"incomplete_server_payments": []} 162 | return res["incomplete_server_payments"] 163 | 164 | def get_http_headers(self): 165 | return {'Authorization': "Key " + self.api_key, "Content-Type": "application/json"} 166 | 167 | def handle_http_response(self, re): 168 | try: 169 | 170 | result = re.json() 171 | 172 | result_dict = json.loads(str(json.dumps(result))) 173 | if __debug__: 174 | print("HTTP-Response: " + str(re)) 175 | print("HTTP-Response Data: " + str(result_dict)) 176 | return result_dict 177 | except: 178 | return False 179 | 180 | def set_horizon_client(self, network): 181 | self.client = self.server 182 | pass 183 | 184 | def load_account(self, private_seed, network): 185 | self.keypair = s_sdk.Keypair.from_secret(private_seed) 186 | if network == "Pi Network": 187 | host = "api.mainnet.minepi.com" 188 | horizon = "https://api.mainnet.minepi.com" 189 | else: 190 | host = "api.testnet.minepi.com" 191 | horizon = "https://api.testnet.minepi.com" 192 | 193 | self.server = s_sdk.Server(horizon) 194 | self.account = self.server.load_account(self.keypair.public_key) 195 | 196 | 197 | def build_a2u_transaction(self, transaction_data): 198 | if not self.validate_payment_data(transaction_data): 199 | print("No valid transaction!") 200 | 201 | amount = str(transaction_data["amount"]) 202 | 203 | # TODO: get this from horizon 204 | fee = self.fee # 100000 # 0.01π 205 | to_address = transaction_data["to_address"] 206 | memo = transaction_data["identifier"] 207 | 208 | if __debug__: 209 | print("MEMO " + str(memo)) 210 | 211 | from_address = transaction_data["from_address"] 212 | transaction = ( 213 | s_sdk.TransactionBuilder( 214 | source_account=self.account, 215 | network_passphrase=self.network, 216 | base_fee=fee, 217 | ) 218 | .add_text_memo(memo) 219 | .append_payment_op(to_address, s_sdk.Asset.native(), amount) 220 | .set_timeout(180) 221 | .build() 222 | ) 223 | 224 | return transaction 225 | 226 | def submit_transaction(self, transaction): 227 | transaction.sign(self.keypair) 228 | response = self.server.submit_transaction(transaction) 229 | txid = response["id"] 230 | return txid 231 | 232 | def validate_payment_data(self, data): 233 | if "amount" not in data: 234 | return False 235 | elif "memo" not in data: 236 | return False 237 | elif "metadata" not in data: 238 | return False 239 | elif "user_uid" not in data: 240 | return False 241 | elif "identifier" not in data: 242 | return False 243 | elif "to_address" not in data: 244 | return False 245 | return True 246 | 247 | def validate_private_seed_format(self, seed): 248 | if not seed.upper().startswith("S"): 249 | return False 250 | elif len(seed) != 56: 251 | return False 252 | return True 253 | -------------------------------------------------------------------------------- /pi_python_test.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """ 3 | For more information visit https://github.com/pi-apps/pi-python 4 | """ 5 | 6 | from pi_python import PiNetwork 7 | 8 | """ 9 | Your SECRET Data 10 | Visit the Pi Developer Portal to get these data 11 | 12 | DO NOT expose these values to public 13 | """ 14 | api_key = "Enter Here Your API Key" 15 | wallet_private_seed = "SecretWalletSeed" 16 | 17 | """ Initialization """ 18 | pi = PiNetwork() 19 | pi.initialize(api_key, wallet_private_seed, "Pi Testnet") 20 | 21 | """ 22 | Example Data 23 | Get the user_uid from the Frontend 24 | """ 25 | user_uid = "GET-THIS-SECRET-DATA-FROMFRONTEND" #unique for every user 26 | 27 | 28 | """ Build your payment """ 29 | payment_data = { 30 | "amount": 1, 31 | "memo": "Test - Greetings from MyApp", 32 | "metadata": {"internal_data": "My favorite ice creame"}, 33 | "uid": user_uid 34 | } 35 | 36 | """ Check for incomplete payments """ 37 | incomplete_payments = pi.get_incomplete_server_payments() 38 | 39 | if __debug__: 40 | if len(incomplete_payments) > 0: 41 | print("Found incomplete payments: ") 42 | print(str(incomplete_payments)) 43 | 44 | """ Handle incomplete payments first """ 45 | if len(incomplete_payments) > 0: 46 | for i in incomplete_payments: 47 | if i["transaction"] == None: 48 | txid = pi.submit_payment(i["identifier"], i) 49 | pi.complete_payment(i["identifier"], txid) 50 | else: 51 | pi.complete_payment(i["identifier"], i["transaction"]["txid"]) 52 | 53 | """ Create an payment """ 54 | payment_id = pi.create_payment(payment_data) 55 | 56 | """ 57 | Submit the payment and receive the txid 58 | 59 | Store the txid on your side! 60 | """ 61 | if payment_id and len(payment_id) > 0: 62 | txid = pi.submit_payment(payment_id, False) 63 | 64 | """ Complete the Payment """ 65 | if txid and len(txid) > 0: 66 | payment = pi.complete_payment(payment_id, txid) 67 | -------------------------------------------------------------------------------- /src/constants.py: -------------------------------------------------------------------------------- 1 | # src/constants.py 2 | 3 | """ 4 | Pi Coin Configuration Constants 5 | This module contains constants related to the Pi Coin cryptocurrency. 6 | """ 7 | 8 | # Pi Coin Symbol 9 | PI_COIN_SYMBOL = "Pi" # Symbol for Pi Coin 10 | 11 | # Pi Coin Value 12 | PI_COIN_VALUE = 314159 # Fixed value of Pi Coin in USD 13 | 14 | # Pi Coin Supply 15 | PI_COIN_SUPPLY = 100_000_000_000 # Total supply of Pi Coin 16 | 17 | # Pi Coin Transaction Fee 18 | PI_COIN_TRANSACTION_FEE = 0.01 # Transaction fee in USD 19 | 20 | # Pi Coin Block Time 21 | PI_COIN_BLOCK_TIME = 10 # Average block time in seconds 22 | 23 | # Pi Coin Mining Difficulty 24 | PI_COIN_MINING_DIFFICULTY = 1000 # Difficulty level for mining Pi Coin 25 | 26 | # Pi Coin Reward for Mining 27 | PI_COIN_MINING_REWARD = 12.5 # Reward for mining a block 28 | 29 | # Pi Coin Network Protocol 30 | PI_COIN_NETWORK_PROTOCOL = "PoS" # Proof of Stake 31 | 32 | # Pi Coin Maximum Transaction Size 33 | PI_COIN_MAX_TRANSACTION_SIZE = 1_000_000 # Maximum transaction size in bytes 34 | 35 | # Pi Coin Decimals 36 | PI_COIN_DECIMALS = 18 # Number of decimal places for Pi Coin 37 | 38 | # Pi Coin Genesis Block Timestamp 39 | PI_COIN_GENESIS_BLOCK_TIMESTAMP = "2023-01-01T00:00:00Z" # Timestamp of the genesis block 40 | 41 | # Pi Coin Governance Model 42 | PI_COIN_GOVERNANCE_MODEL = "Decentralized" # Governance model for Pi Coin 43 | 44 | # Pi Coin Security Features 45 | PI_COIN_ENCRYPTION_ALGORITHM = "AES-256" # Encryption algorithm for securing transactions 46 | PI_COIN_HASHING_ALGORITHM = "SHA-256" # Hashing algorithm for block verification 47 | PI_COIN_SIGNATURE_SCHEME = "ECDSA" # Digital signature scheme for transaction signing 48 | 49 | # Pi Coin Network Parameters 50 | PI_COIN_MAX_PEERS = 100 # Maximum number of peers in the network 51 | PI_COIN_NODE_TIMEOUT = 30 # Timeout for node responses in seconds 52 | PI_COIN_CONNECTION_RETRY_INTERVAL = 5 # Retry interval for node connections in seconds 53 | 54 | # Pi Coin Staking Parameters 55 | PI_COIN_MIN_STAKE_AMOUNT = 100 # Minimum amount required to stake 56 | PI_COIN_STAKE_REWARD_RATE = 0.05 # Annual reward rate for staking 57 | 58 | # Pi Coin API Rate Limits 59 | PI_COIN_API_REQUEST_LIMIT = 1000 # Maximum API requests per hour 60 | PI_COIN_API_KEY_EXPIRATION = 3600 # API key expiration time in seconds 61 | 62 | # Pi Coin Regulatory Compliance 63 | PI_COIN_KYC_REQUIRED = True # Whether KYC is required for transactions 64 | PI_COIN_COMPLIANCE_JURISDICTIONS = ["US", "EU", "UK"] # Jurisdictions for compliance 65 | 66 | # Additional constants can be added here as needed 67 | --------------------------------------------------------------------------------