├── .deepsource.toml ├── .devcontainer └── devcontainer.json ├── .github ├── ISSUE_TEMPLATE │ ├── bug_report.md │ ├── custom.md │ └── feature_request.md └── workflows │ ├── alibabacloud.yml │ ├── aws.yml │ ├── azure-container-webapp.yml │ ├── azure-webapps-python.yml │ ├── codeql.yml │ ├── django.yml │ ├── docker-image.yml │ ├── docker-publish.yml │ ├── google.yml │ ├── greetings.yml │ ├── ibm.yml │ ├── jekyll-docker.yml │ ├── label.yml │ ├── manual.yml │ ├── openshift.yml │ ├── pylint.yml │ ├── python-app.yml │ ├── python-package-conda.yml │ ├── python-package.yml │ ├── python-publish.yml │ ├── stale.yml │ ├── static.yml │ └── tencent.yml ├── .gitignore ├── .whitesource ├── LICENSE ├── README.md ├── bin ├── app.py ├── init.sh ├── run.sh └── stop.sh ├── docs ├── Pi-Velocity.md └── pi-velocity.jpeg ├── functions ├── add_block_to_chain.py ├── broadcast_transaction.py ├── calculate_fee.py ├── get_account_balance.py ├── get_network_status.py ├── get_transaction_history.py ├── handle_incoming_connections.py ├── handle_outgoing_connections.py ├── monitor_network_activity.py ├── optimize_transaction_processing.py ├── process_block.py ├── process_transaction.py ├── secure_communication.py ├── start_transaction.py ├── sync_blockchain.py ├── update_node_peers.py ├── validate_block.py ├── validate_transaction.py ├── verify_block_signature.py └── verify_signature.py ├── lib ├── __init__.py ├── blockchain.py ├── network.py ├── util.py └── wallet.py ├── src ├── __init__.py ├── api.py ├── app.py ├── configure_app.py ├── models.py ├── serializers.py └── views.py ├── test ├── __init__.py ├── test_models.py └── test_views.py └── web ├── Dockerfile ├── app.py ├── requirements.txt ├── run.sh ├── static └── main.css └── templates └── index.html /.deepsource.toml: -------------------------------------------------------------------------------- 1 | version = 1 2 | 3 | [[analyzers]] 4 | name = "test-coverage" 5 | 6 | [[analyzers]] 7 | name = "terraform" 8 | 9 | [[analyzers]] 10 | name = "swift" 11 | 12 | [[analyzers]] 13 | name = "sql" 14 | 15 | [[analyzers]] 16 | name = "shell" 17 | 18 | [[analyzers]] 19 | name = "secrets" 20 | 21 | [[analyzers]] 22 | name = "scala" 23 | 24 | [[analyzers]] 25 | name = "rust" 26 | 27 | [analyzers.meta] 28 | msrv = "stable" 29 | 30 | [[analyzers]] 31 | name = "ruby" 32 | 33 | [[analyzers]] 34 | name = "python" 35 | 36 | [analyzers.meta] 37 | runtime_version = "3.x.x" 38 | 39 | [[analyzers]] 40 | name = "php" 41 | 42 | [analyzers.meta] 43 | bootstrap_files = ["PI_VELOCITY_PHP_BOOTSTRAP"] 44 | 45 | [[analyzers]] 46 | name = "kotlin" 47 | 48 | [analyzers.meta] 49 | runtime_version = "19" 50 | language_version = "1.9" 51 | 52 | [[analyzers]] 53 | name = "javascript" 54 | 55 | [analyzers.meta] 56 | plugins = [ 57 | "react", 58 | "ember", 59 | "angularjs", 60 | "vue", 61 | "meteor", 62 | "angular" 63 | ] 64 | environment = [ 65 | "nodejs", 66 | "browser", 67 | "jest", 68 | "jasmine", 69 | "mongo", 70 | "vitest", 71 | "mocha", 72 | "jquery", 73 | "cypress" 74 | ] 75 | 76 | [[analyzers]] 77 | name = "java" 78 | 79 | [analyzers.meta] 80 | runtime_version = "19" 81 | 82 | [[analyzers]] 83 | name = "go" 84 | 85 | [analyzers.meta] 86 | import_root = "github.com/KOSASIH/pi-velocity-core" 87 | 88 | [[analyzers]] 89 | name = "docker" 90 | 91 | [analyzers.meta] 92 | dockerfile_paths = ["PI_VELOCITY_CORE_DOCKERFILE_PATH"] 93 | 94 | [[analyzers]] 95 | name = "cxx" 96 | 97 | [[analyzers]] 98 | name = "csharp" 99 | 100 | [[analyzers]] 101 | name = "ansible" 102 | 103 | [[transformers]] 104 | name = "swift-format" 105 | 106 | [[transformers]] 107 | name = "scalafmt" 108 | 109 | [[transformers]] 110 | name = "rustfmt" 111 | 112 | [[transformers]] 113 | name = "rubocop" 114 | 115 | [[transformers]] 116 | name = "standardrb" 117 | 118 | [[transformers]] 119 | name = "autopep8" 120 | 121 | [[transformers]] 122 | name = "ruff" 123 | 124 | [[transformers]] 125 | name = "isort" 126 | 127 | [[transformers]] 128 | name = "yapf" 129 | 130 | [[transformers]] 131 | name = "black" 132 | 133 | [[transformers]] 134 | name = "php-cs-fixer" 135 | 136 | [[transformers]] 137 | name = "ktlint" 138 | 139 | [[transformers]] 140 | name = "prettier" 141 | 142 | [[transformers]] 143 | name = "standardjs" 144 | 145 | [[transformers]] 146 | name = "google-java-format" 147 | 148 | [[transformers]] 149 | name = "gofmt" 150 | 151 | [[transformers]] 152 | name = "gofumpt" 153 | 154 | [[transformers]] 155 | name = "clang-format" 156 | 157 | [[transformers]] 158 | name = "dotnet-format" -------------------------------------------------------------------------------- /.devcontainer/devcontainer.json: -------------------------------------------------------------------------------- 1 | { 2 | "image": "mcr.microsoft.com/devcontainers/universal:2", 3 | "features": "ghcr.io/devcontainers-contrib/features/airplane-cli:1": {} 4 | } 5 | } 6 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/bug_report.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Bug report 3 | about: Create a report to help us improve 4 | title: '' 5 | labels: '' 6 | assignees: '' 7 | 8 | --- 9 | 10 | **Describe the bug** 11 | A clear and concise description of what the bug is. 12 | 13 | **To Reproduce** 14 | Steps to reproduce the behavior: 15 | 1. Go to '...' 16 | 2. Click on '....' 17 | 3. Scroll down to '....' 18 | 4. See error 19 | 20 | **Expected behavior** 21 | A clear and concise description of what you expected to happen. 22 | 23 | **Screenshots** 24 | If applicable, add screenshots to help explain your problem. 25 | 26 | **Desktop (please complete the following information):** 27 | - OS: [e.g. iOS] 28 | - Browser [e.g. chrome, safari] 29 | - Version [e.g. 22] 30 | 31 | **Smartphone (please complete the following information):** 32 | - Device: [e.g. iPhone6] 33 | - OS: [e.g. iOS8.1] 34 | - Browser [e.g. stock browser, safari] 35 | - Version [e.g. 22] 36 | 37 | **Additional context** 38 | Add any other context about the problem here. 39 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/custom.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Custom issue template 3 | about: Describe this issue template's purpose here. 4 | title: '' 5 | labels: '' 6 | assignees: '' 7 | 8 | --- 9 | 10 | 11 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/feature_request.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Feature request 3 | about: Suggest an idea for this project 4 | title: '' 5 | labels: '' 6 | assignees: '' 7 | 8 | --- 9 | 10 | **Is your feature request related to a problem? Please describe.** 11 | A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] 12 | 13 | **Describe the solution you'd like** 14 | A clear and concise description of what you want to happen. 15 | 16 | **Describe alternatives you've considered** 17 | A clear and concise description of any alternative solutions or features you've considered. 18 | 19 | **Additional context** 20 | Add any other context or screenshots about the feature request here. 21 | -------------------------------------------------------------------------------- /.github/workflows/alibabacloud.yml: -------------------------------------------------------------------------------- 1 | # This workflow will build and push a new container image to Alibaba Cloud Container Registry (ACR), 2 | # and then will deploy it to Alibaba Cloud Container Service for Kubernetes (ACK), when there is a push to the "main" branch. 3 | # 4 | # To use this workflow, you will need to complete the following set-up steps: 5 | # 6 | # 1. Create an ACR repository to store your container images. 7 | # You can use ACR EE instance for more security and better performance. 8 | # For instructions see https://www.alibabacloud.com/help/doc-detail/142168.htm 9 | # 10 | # 2. Create an ACK cluster to run your containerized application. 11 | # You can use ACK Pro cluster for more security and better performance. 12 | # For instructions see https://www.alibabacloud.com/help/doc-detail/95108.htm 13 | # 14 | # 3. Store your AccessKey pair in GitHub Actions secrets named `ACCESS_KEY_ID` and `ACCESS_KEY_SECRET`. 15 | # For instructions on setting up secrets see: https://developer.github.com/actions/managing-workflows/storing-secrets/ 16 | # 17 | # 4. Change the values for the REGION_ID, REGISTRY, NAMESPACE, IMAGE, ACK_CLUSTER_ID, and ACK_DEPLOYMENT_NAME. 18 | # 19 | 20 | name: Build and Deploy to ACK 21 | 22 | on: 23 | push: 24 | branches: [ "main" ] 25 | 26 | # Environment variables available to all jobs and steps in this workflow. 27 | env: 28 | REGION_ID: cn-hangzhou 29 | REGISTRY: registry.cn-hangzhou.aliyuncs.com 30 | NAMESPACE: namespace 31 | IMAGE: repo 32 | TAG: ${{ github.sha }} 33 | ACK_CLUSTER_ID: clusterID 34 | ACK_DEPLOYMENT_NAME: nginx-deployment 35 | 36 | ACR_EE_REGISTRY: myregistry.cn-hangzhou.cr.aliyuncs.com 37 | ACR_EE_INSTANCE_ID: instanceID 38 | ACR_EE_NAMESPACE: namespace 39 | ACR_EE_IMAGE: repo 40 | ACR_EE_TAG: ${{ github.sha }} 41 | 42 | permissions: 43 | contents: read 44 | 45 | jobs: 46 | build: 47 | runs-on: ubuntu-latest 48 | environment: production 49 | 50 | steps: 51 | - name: Checkout 52 | uses: actions/checkout@v3 53 | 54 | # 1.1 Login to ACR 55 | - name: Login to ACR with the AccessKey pair 56 | uses: aliyun/acr-login@v1 57 | with: 58 | region-id: "${{ env.REGION_ID }}" 59 | access-key-id: "${{ secrets.ACCESS_KEY_ID }}" 60 | access-key-secret: "${{ secrets.ACCESS_KEY_SECRET }}" 61 | 62 | # 1.2 Buid and push image to ACR 63 | - name: Build and push image to ACR 64 | run: | 65 | docker build --tag "$REGISTRY/$NAMESPACE/$IMAGE:$TAG" . 66 | docker push "$REGISTRY/$NAMESPACE/$IMAGE:$TAG" 67 | 68 | # 1.3 Scan image in ACR 69 | - name: Scan image in ACR 70 | uses: aliyun/acr-scan@v1 71 | with: 72 | region-id: "${{ env.REGION_ID }}" 73 | access-key-id: "${{ secrets.ACCESS_KEY_ID }}" 74 | access-key-secret: "${{ secrets.ACCESS_KEY_SECRET }}" 75 | repository: "${{ env.NAMESPACE }}/${{ env.IMAGE }}" 76 | tag: "${{ env.TAG }}" 77 | 78 | # 2.1 (Optional) Login to ACR EE 79 | - uses: actions/checkout@v3 80 | - name: Login to ACR EE with the AccessKey pair 81 | uses: aliyun/acr-login@v1 82 | with: 83 | login-server: "https://${{ env.ACR_EE_REGISTRY }}" 84 | region-id: "${{ env.REGION_ID }}" 85 | access-key-id: "${{ secrets.ACCESS_KEY_ID }}" 86 | access-key-secret: "${{ secrets.ACCESS_KEY_SECRET }}" 87 | instance-id: "${{ env.ACR_EE_INSTANCE_ID }}" 88 | 89 | # 2.2 (Optional) Build and push image ACR EE 90 | - name: Build and push image to ACR EE 91 | run: | 92 | docker build -t "$ACR_EE_REGISTRY/$ACR_EE_NAMESPACE/$ACR_EE_IMAGE:$TAG" . 93 | docker push "$ACR_EE_REGISTRY/$ACR_EE_NAMESPACE/$ACR_EE_IMAGE:$TAG" 94 | # 2.3 (Optional) Scan image in ACR EE 95 | - name: Scan image in ACR EE 96 | uses: aliyun/acr-scan@v1 97 | with: 98 | region-id: "${{ env.REGION_ID }}" 99 | access-key-id: "${{ secrets.ACCESS_KEY_ID }}" 100 | access-key-secret: "${{ secrets.ACCESS_KEY_SECRET }}" 101 | instance-id: "${{ env.ACR_EE_INSTANCE_ID }}" 102 | repository: "${{ env.ACR_EE_NAMESPACE}}/${{ env.ACR_EE_IMAGE }}" 103 | tag: "${{ env.ACR_EE_TAG }}" 104 | 105 | # 3.1 Set ACK context 106 | - name: Set K8s context 107 | uses: aliyun/ack-set-context@v1 108 | with: 109 | access-key-id: "${{ secrets.ACCESS_KEY_ID }}" 110 | access-key-secret: "${{ secrets.ACCESS_KEY_SECRET }}" 111 | cluster-id: "${{ env.ACK_CLUSTER_ID }}" 112 | 113 | # 3.2 Deploy the image to the ACK cluster 114 | - name: Set up Kustomize 115 | run: |- 116 | curl -s "https://raw.githubusercontent.com/kubernetes-sigs/kustomize/master/hack/install_kustomize.sh" | bash /dev/stdin 3.8.6 117 | - name: Deploy 118 | run: |- 119 | ./kustomize edit set image REGISTRY/NAMESPACE/IMAGE:TAG=$REGISTRY/$NAMESPACE/$IMAGE:$TAG 120 | ./kustomize build . | kubectl apply -f - 121 | kubectl rollout status deployment/$ACK_DEPLOYMENT_NAME 122 | kubectl get services -o wide 123 | -------------------------------------------------------------------------------- /.github/workflows/aws.yml: -------------------------------------------------------------------------------- 1 | # This workflow will build and push a new container image to Amazon ECR, 2 | # and then will deploy a new task definition to Amazon ECS, when there is a push to the "main" branch. 3 | # 4 | # To use this workflow, you will need to complete the following set-up steps: 5 | # 6 | # 1. Create an ECR repository to store your images. 7 | # For example: `aws ecr create-repository --repository-name my-ecr-repo --region us-east-2`. 8 | # Replace the value of the `ECR_REPOSITORY` environment variable in the workflow below with your repository's name. 9 | # Replace the value of the `AWS_REGION` environment variable in the workflow below with your repository's region. 10 | # 11 | # 2. Create an ECS task definition, an ECS cluster, and an ECS service. 12 | # For example, follow the Getting Started guide on the ECS console: 13 | # https://us-east-2.console.aws.amazon.com/ecs/home?region=us-east-2#/firstRun 14 | # Replace the value of the `ECS_SERVICE` environment variable in the workflow below with the name you set for the Amazon ECS service. 15 | # Replace the value of the `ECS_CLUSTER` environment variable in the workflow below with the name you set for the cluster. 16 | # 17 | # 3. Store your ECS task definition as a JSON file in your repository. 18 | # The format should follow the output of `aws ecs register-task-definition --generate-cli-skeleton`. 19 | # Replace the value of the `ECS_TASK_DEFINITION` environment variable in the workflow below with the path to the JSON file. 20 | # Replace the value of the `CONTAINER_NAME` environment variable in the workflow below with the name of the container 21 | # in the `containerDefinitions` section of the task definition. 22 | # 23 | # 4. Store an IAM user access key in GitHub Actions secrets named `AWS_ACCESS_KEY_ID` and `AWS_SECRET_ACCESS_KEY`. 24 | # See the documentation for each action used below for the recommended IAM policies for this IAM user, 25 | # and best practices on handling the access key credentials. 26 | 27 | name: Deploy to Amazon ECS 28 | 29 | on: 30 | push: 31 | branches: [ "main" ] 32 | 33 | env: 34 | AWS_REGION: MY_AWS_REGION # set this to your preferred AWS region, e.g. us-west-1 35 | ECR_REPOSITORY: MY_ECR_REPOSITORY # set this to your Amazon ECR repository name 36 | ECS_SERVICE: MY_ECS_SERVICE # set this to your Amazon ECS service name 37 | ECS_CLUSTER: MY_ECS_CLUSTER # set this to your Amazon ECS cluster name 38 | ECS_TASK_DEFINITION: MY_ECS_TASK_DEFINITION # set this to the path to your Amazon ECS task definition 39 | # file, e.g. .aws/task-definition.json 40 | CONTAINER_NAME: MY_CONTAINER_NAME # set this to the name of the container in the 41 | # containerDefinitions section of your task definition 42 | 43 | permissions: 44 | contents: read 45 | 46 | jobs: 47 | deploy: 48 | name: Deploy 49 | runs-on: ubuntu-latest 50 | environment: production 51 | 52 | steps: 53 | - name: Checkout 54 | uses: actions/checkout@v3 55 | 56 | - name: Configure AWS credentials 57 | uses: aws-actions/configure-aws-credentials@v1 58 | with: 59 | aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} 60 | aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} 61 | aws-region: ${{ env.AWS_REGION }} 62 | 63 | - name: Login to Amazon ECR 64 | id: login-ecr 65 | uses: aws-actions/amazon-ecr-login@v1 66 | 67 | - name: Build, tag, and push image to Amazon ECR 68 | id: build-image 69 | env: 70 | ECR_REGISTRY: ${{ steps.login-ecr.outputs.registry }} 71 | IMAGE_TAG: ${{ github.sha }} 72 | run: | 73 | # Build a docker container and 74 | # push it to ECR so that it can 75 | # be deployed to ECS. 76 | docker build -t $ECR_REGISTRY/$ECR_REPOSITORY:$IMAGE_TAG . 77 | docker push $ECR_REGISTRY/$ECR_REPOSITORY:$IMAGE_TAG 78 | echo "image=$ECR_REGISTRY/$ECR_REPOSITORY:$IMAGE_TAG" >> $GITHUB_OUTPUT 79 | 80 | - name: Fill in the new image ID in the Amazon ECS task definition 81 | id: task-def 82 | uses: aws-actions/amazon-ecs-render-task-definition@v1 83 | with: 84 | task-definition: ${{ env.ECS_TASK_DEFINITION }} 85 | container-name: ${{ env.CONTAINER_NAME }} 86 | image: ${{ steps.build-image.outputs.image }} 87 | 88 | - name: Deploy Amazon ECS task definition 89 | uses: aws-actions/amazon-ecs-deploy-task-definition@v1 90 | with: 91 | task-definition: ${{ steps.task-def.outputs.task-definition }} 92 | service: ${{ env.ECS_SERVICE }} 93 | cluster: ${{ env.ECS_CLUSTER }} 94 | wait-for-service-stability: true 95 | -------------------------------------------------------------------------------- /.github/workflows/azure-container-webapp.yml: -------------------------------------------------------------------------------- 1 | # This workflow will build and push a Docker container to an Azure Web App when a commit is pushed to your default branch. 2 | # 3 | # This workflow assumes you have already created the target Azure App Service web app. 4 | # For instructions see https://docs.microsoft.com/en-us/azure/app-service/quickstart-custom-container?tabs=dotnet&pivots=container-linux 5 | # 6 | # To configure this workflow: 7 | # 8 | # 1. Download the Publish Profile for your Azure Web App. You can download this file from the Overview page of your Web App in the Azure Portal. 9 | # For more information: https://docs.microsoft.com/en-us/azure/app-service/deploy-github-actions?tabs=applevel#generate-deployment-credentials 10 | # 11 | # 2. Create a secret in your repository named AZURE_WEBAPP_PUBLISH_PROFILE, paste the publish profile contents as the value of the secret. 12 | # For instructions on obtaining the publish profile see: https://docs.microsoft.com/azure/app-service/deploy-github-actions#configure-the-github-secret 13 | # 14 | # 3. Create a GitHub Personal access token with "repo" and "read:packages" permissions. 15 | # 16 | # 4. Create three app settings on your Azure Web app: 17 | # DOCKER_REGISTRY_SERVER_URL: Set this to "https://ghcr.io" 18 | # DOCKER_REGISTRY_SERVER_USERNAME: Set this to the GitHub username or organization that owns the repository 19 | # DOCKER_REGISTRY_SERVER_PASSWORD: Set this to the value of your PAT token from the previous step 20 | # 21 | # 5. Change the value for the AZURE_WEBAPP_NAME. 22 | # 23 | # For more information on GitHub Actions for Azure: https://github.com/Azure/Actions 24 | # For more information on the Azure Web Apps Deploy action: https://github.com/Azure/webapps-deploy 25 | # For more samples to get started with GitHub Action workflows to deploy to Azure: https://github.com/Azure/actions-workflow-samples 26 | 27 | name: Build and deploy a container to an Azure Web App 28 | 29 | env: 30 | AZURE_WEBAPP_NAME: your-app-name # set this to the name of your Azure Web App 31 | 32 | on: 33 | push: 34 | branches: [ "main" ] 35 | workflow_dispatch: 36 | 37 | permissions: 38 | contents: read 39 | 40 | jobs: 41 | build: 42 | runs-on: ubuntu-latest 43 | 44 | steps: 45 | - uses: actions/checkout@v3 46 | 47 | - name: Set up Docker Buildx 48 | uses: docker/setup-buildx-action@f95db51fddba0c2d1ec667646a06c2ce06100226 # v3.0.0 49 | 50 | - name: Log in to GitHub container registry 51 | uses: docker/login-action@343f7c4344506bcbf9b4de18042ae17996df046d # v3.0.0 52 | with: 53 | registry: ghcr.io 54 | username: ${{ github.actor }} 55 | password: ${{ github.token }} 56 | 57 | - name: Lowercase the repo name and username 58 | run: echo "REPO=${GITHUB_REPOSITORY,,}" >>${GITHUB_ENV} 59 | 60 | - name: Build and push container image to registry 61 | uses: docker/build-push-action@0565240e2d4ab88bba5387d719585280857ece09 # v5.0.0 62 | with: 63 | push: true 64 | tags: ghcr.io/${{ env.REPO }}:${{ github.sha }} 65 | file: ./Dockerfile 66 | 67 | deploy: 68 | permissions: 69 | contents: none 70 | runs-on: ubuntu-latest 71 | needs: build 72 | environment: 73 | name: 'Development' 74 | url: ${{ steps.deploy-to-webapp.outputs.webapp-url }} 75 | 76 | steps: 77 | - name: Lowercase the repo name and username 78 | run: echo "REPO=${GITHUB_REPOSITORY,,}" >>${GITHUB_ENV} 79 | 80 | - name: Deploy to Azure Web App 81 | id: deploy-to-webapp 82 | uses: azure/webapps-deploy@v2 83 | with: 84 | app-name: ${{ env.AZURE_WEBAPP_NAME }} 85 | publish-profile: ${{ secrets.AZURE_WEBAPP_PUBLISH_PROFILE }} 86 | images: 'ghcr.io/${{ env.REPO }}:${{ github.sha }}' 87 | -------------------------------------------------------------------------------- /.github/workflows/azure-webapps-python.yml: -------------------------------------------------------------------------------- 1 | # This workflow will build and push a Python application to an Azure Web App when a commit is pushed to your default branch. 2 | # 3 | # This workflow assumes you have already created the target Azure App Service web app. 4 | # For instructions see https://docs.microsoft.com/en-us/azure/app-service/quickstart-python?tabs=bash&pivots=python-framework-flask 5 | # 6 | # To configure this workflow: 7 | # 8 | # 1. Download the Publish Profile for your Azure Web App. You can download this file from the Overview page of your Web App in the Azure Portal. 9 | # For more information: https://docs.microsoft.com/en-us/azure/app-service/deploy-github-actions?tabs=applevel#generate-deployment-credentials 10 | # 11 | # 2. Create a secret in your repository named AZURE_WEBAPP_PUBLISH_PROFILE, paste the publish profile contents as the value of the secret. 12 | # For instructions on obtaining the publish profile see: https://docs.microsoft.com/azure/app-service/deploy-github-actions#configure-the-github-secret 13 | # 14 | # 3. Change the value for the AZURE_WEBAPP_NAME. Optionally, change the PYTHON_VERSION environment variables below. 15 | # 16 | # For more information on GitHub Actions for Azure: https://github.com/Azure/Actions 17 | # For more information on the Azure Web Apps Deploy action: https://github.com/Azure/webapps-deploy 18 | # For more samples to get started with GitHub Action workflows to deploy to Azure: https://github.com/Azure/actions-workflow-samples 19 | 20 | name: Build and deploy Python app to Azure Web App 21 | 22 | env: 23 | AZURE_WEBAPP_NAME: your-app-name # set this to the name of your Azure Web App 24 | PYTHON_VERSION: '3.8' # set this to the Python version to use 25 | 26 | on: 27 | push: 28 | branches: [ "main" ] 29 | workflow_dispatch: 30 | 31 | permissions: 32 | contents: read 33 | 34 | jobs: 35 | build: 36 | runs-on: ubuntu-latest 37 | 38 | steps: 39 | - uses: actions/checkout@v3 40 | 41 | - name: Set up Python version 42 | uses: actions/setup-python@v3.0.0 43 | with: 44 | python-version: ${{ env.PYTHON_VERSION }} 45 | cache: 'pip' 46 | 47 | - name: Create and start virtual environment 48 | run: | 49 | python -m venv venv 50 | source venv/bin/activate 51 | 52 | - name: Install dependencies 53 | run: pip install -r requirements.txt 54 | 55 | # Optional: Add step to run tests here (PyTest, Django test suites, etc.) 56 | 57 | - name: Upload artifact for deployment jobs 58 | uses: actions/upload-artifact@v3 59 | with: 60 | name: python-app 61 | path: | 62 | . 63 | !venv/ 64 | 65 | deploy: 66 | permissions: 67 | contents: none 68 | runs-on: ubuntu-latest 69 | needs: build 70 | environment: 71 | name: 'Development' 72 | url: ${{ steps.deploy-to-webapp.outputs.webapp-url }} 73 | 74 | steps: 75 | - name: Download artifact from build job 76 | uses: actions/download-artifact@v3 77 | with: 78 | name: python-app 79 | path: . 80 | 81 | - name: 'Deploy to Azure Web App' 82 | id: deploy-to-webapp 83 | uses: azure/webapps-deploy@v2 84 | with: 85 | app-name: ${{ env.AZURE_WEBAPP_NAME }} 86 | publish-profile: ${{ secrets.AZURE_WEBAPP_PUBLISH_PROFILE }} 87 | -------------------------------------------------------------------------------- /.github/workflows/codeql.yml: -------------------------------------------------------------------------------- 1 | # For most projects, this workflow file will not need changing; you simply need 2 | # to commit it to your repository. 3 | # 4 | # You may wish to alter this file to override the set of languages analyzed, 5 | # or to provide custom queries or build logic. 6 | # 7 | # ******** NOTE ******** 8 | # We have attempted to detect the languages in your repository. Please check 9 | # the `language` matrix defined below to confirm you have the correct set of 10 | # supported CodeQL languages. 11 | # 12 | name: "CodeQL" 13 | 14 | on: 15 | push: 16 | branches: [ "main" ] 17 | pull_request: 18 | branches: [ "main" ] 19 | schedule: 20 | - cron: '34 5 * * 3' 21 | 22 | jobs: 23 | analyze: 24 | name: Analyze 25 | # Runner size impacts CodeQL analysis time. To learn more, please see: 26 | # - https://gh.io/recommended-hardware-resources-for-running-codeql 27 | # - https://gh.io/supported-runners-and-hardware-resources 28 | # - https://gh.io/using-larger-runners 29 | # Consider using larger runners for possible analysis time improvements. 30 | runs-on: ${{ (matrix.language == 'swift' && 'macos-latest') || 'ubuntu-latest' }} 31 | timeout-minutes: ${{ (matrix.language == 'swift' && 120) || 360 }} 32 | permissions: 33 | # required for all workflows 34 | security-events: write 35 | 36 | # only required for workflows in private repositories 37 | actions: read 38 | contents: read 39 | 40 | strategy: 41 | fail-fast: false 42 | matrix: 43 | language: [ 'python' ] 44 | # CodeQL supports [ 'c-cpp', 'csharp', 'go', 'java-kotlin', 'javascript-typescript', 'python', 'ruby', 'swift' ] 45 | # Use only 'java-kotlin' to analyze code written in Java, Kotlin or both 46 | # Use only 'javascript-typescript' to analyze code written in JavaScript, TypeScript or both 47 | # Learn more about CodeQL language support at https://aka.ms/codeql-docs/language-support 48 | 49 | steps: 50 | - name: Checkout repository 51 | uses: actions/checkout@v4 52 | 53 | # Initializes the CodeQL tools for scanning. 54 | - name: Initialize CodeQL 55 | uses: github/codeql-action/init@v3 56 | with: 57 | languages: ${{ matrix.language }} 58 | # If you wish to specify custom queries, you can do so here or in a config file. 59 | # By default, queries listed here will override any specified in a config file. 60 | # Prefix the list here with "+" to use these queries and those in the config file. 61 | 62 | # For more details on CodeQL's query packs, refer to: https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs 63 | # queries: security-extended,security-and-quality 64 | 65 | 66 | # Autobuild attempts to build any compiled languages (C/C++, C#, Go, Java, or Swift). 67 | # If this step fails, then you should remove it and run the build manually (see below) 68 | - name: Autobuild 69 | uses: github/codeql-action/autobuild@v3 70 | 71 | # ℹ️ Command-line programs to run using the OS shell. 72 | # 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun 73 | 74 | # If the Autobuild fails above, remove it and uncomment the following three lines. 75 | # modify them (or add more) to build your code if your project, please refer to the EXAMPLE below for guidance. 76 | 77 | # - run: | 78 | # echo "Run, Build Application using script" 79 | # ./location_of_script_within_repo/buildscript.sh 80 | 81 | - name: Perform CodeQL Analysis 82 | uses: github/codeql-action/analyze@v3 83 | with: 84 | category: "/language:${{matrix.language}}" 85 | -------------------------------------------------------------------------------- /.github/workflows/django.yml: -------------------------------------------------------------------------------- 1 | name: Django CI 2 | 3 | on: 4 | push: 5 | branches: [ "main" ] 6 | pull_request: 7 | branches: [ "main" ] 8 | 9 | jobs: 10 | build: 11 | 12 | runs-on: ubuntu-latest 13 | strategy: 14 | max-parallel: 4 15 | matrix: 16 | python-version: [3.7, 3.8, 3.9] 17 | 18 | steps: 19 | - uses: actions/checkout@v3 20 | - name: Set up Python ${{ matrix.python-version }} 21 | uses: actions/setup-python@v3 22 | with: 23 | python-version: ${{ matrix.python-version }} 24 | - name: Install Dependencies 25 | run: | 26 | python -m pip install --upgrade pip 27 | pip install -r requirements.txt 28 | - name: Run Tests 29 | run: | 30 | python manage.py test 31 | -------------------------------------------------------------------------------- /.github/workflows/docker-image.yml: -------------------------------------------------------------------------------- 1 | name: Docker Image CI 2 | 3 | on: 4 | push: 5 | branches: [ "main" ] 6 | pull_request: 7 | branches: [ "main" ] 8 | 9 | jobs: 10 | 11 | build: 12 | 13 | runs-on: ubuntu-latest 14 | 15 | steps: 16 | - uses: actions/checkout@v3 17 | - name: Build the Docker image 18 | run: docker build . --file Dockerfile --tag my-image-name:$(date +%s) 19 | -------------------------------------------------------------------------------- /.github/workflows/docker-publish.yml: -------------------------------------------------------------------------------- 1 | name: Docker 2 | 3 | # This workflow uses actions that are not certified by GitHub. 4 | # They are provided by a third-party and are governed by 5 | # separate terms of service, privacy policy, and support 6 | # documentation. 7 | 8 | on: 9 | schedule: 10 | - cron: '21 19 * * *' 11 | push: 12 | branches: [ "main" ] 13 | # Publish semver tags as releases. 14 | tags: [ 'v*.*.*' ] 15 | pull_request: 16 | branches: [ "main" ] 17 | 18 | env: 19 | # Use docker.io for Docker Hub if empty 20 | REGISTRY: ghcr.io 21 | # github.repository as / 22 | IMAGE_NAME: ${{ github.repository }} 23 | 24 | 25 | jobs: 26 | build: 27 | 28 | runs-on: ubuntu-latest 29 | permissions: 30 | contents: read 31 | packages: write 32 | # This is used to complete the identity challenge 33 | # with sigstore/fulcio when running outside of PRs. 34 | id-token: write 35 | 36 | steps: 37 | - name: Checkout repository 38 | uses: actions/checkout@v3 39 | 40 | # Install the cosign tool except on PR 41 | # https://github.com/sigstore/cosign-installer 42 | - name: Install cosign 43 | if: github.event_name != 'pull_request' 44 | uses: sigstore/cosign-installer@6e04d228eb30da1757ee4e1dd75a0ec73a653e06 #v3.1.1 45 | with: 46 | cosign-release: 'v2.1.1' 47 | 48 | # Set up BuildKit Docker container builder to be able to build 49 | # multi-platform images and export cache 50 | # https://github.com/docker/setup-buildx-action 51 | - name: Set up Docker Buildx 52 | uses: docker/setup-buildx-action@f95db51fddba0c2d1ec667646a06c2ce06100226 # v3.0.0 53 | 54 | # Login against a Docker registry except on PR 55 | # https://github.com/docker/login-action 56 | - name: Log into registry ${{ env.REGISTRY }} 57 | if: github.event_name != 'pull_request' 58 | uses: docker/login-action@343f7c4344506bcbf9b4de18042ae17996df046d # v3.0.0 59 | with: 60 | registry: ${{ env.REGISTRY }} 61 | username: ${{ github.actor }} 62 | password: ${{ secrets.GITHUB_TOKEN }} 63 | 64 | # Extract metadata (tags, labels) for Docker 65 | # https://github.com/docker/metadata-action 66 | - name: Extract Docker metadata 67 | id: meta 68 | uses: docker/metadata-action@96383f45573cb7f253c731d3b3ab81c87ef81934 # v5.0.0 69 | with: 70 | images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }} 71 | 72 | # Build and push Docker image with Buildx (don't push on PR) 73 | # https://github.com/docker/build-push-action 74 | - name: Build and push Docker image 75 | id: build-and-push 76 | uses: docker/build-push-action@0565240e2d4ab88bba5387d719585280857ece09 # v5.0.0 77 | with: 78 | context: . 79 | push: ${{ github.event_name != 'pull_request' }} 80 | tags: ${{ steps.meta.outputs.tags }} 81 | labels: ${{ steps.meta.outputs.labels }} 82 | cache-from: type=gha 83 | cache-to: type=gha,mode=max 84 | 85 | # Sign the resulting Docker image digest except on PRs. 86 | # This will only write to the public Rekor transparency log when the Docker 87 | # repository is public to avoid leaking data. If you would like to publish 88 | # transparency data even for private images, pass --force to cosign below. 89 | # https://github.com/sigstore/cosign 90 | - name: Sign the published Docker image 91 | if: ${{ github.event_name != 'pull_request' }} 92 | env: 93 | # https://docs.github.com/en/actions/security-guides/security-hardening-for-github-actions#using-an-intermediate-environment-variable 94 | TAGS: ${{ steps.meta.outputs.tags }} 95 | DIGEST: ${{ steps.build-and-push.outputs.digest }} 96 | # This step uses the identity token to provision an ephemeral certificate 97 | # against the sigstore community Fulcio instance. 98 | run: echo "${TAGS}" | xargs -I {} cosign sign --yes {}@${DIGEST} 99 | -------------------------------------------------------------------------------- /.github/workflows/google.yml: -------------------------------------------------------------------------------- 1 | # This workflow will build a docker container, publish it to Google Container Registry, and deploy it to GKE when there is a push to the "main" branch. 2 | # 3 | # To configure this workflow: 4 | # 5 | # 1. Ensure that your repository contains the necessary configuration for your Google Kubernetes Engine cluster, including deployment.yml, kustomization.yml, service.yml, etc. 6 | # 7 | # 2. Create and configure a Workload Identity Provider for GitHub (https://github.com/google-github-actions/auth#setting-up-workload-identity-federation) 8 | # 9 | # 3. Change the values for the GAR_LOCATION, GKE_ZONE, GKE_CLUSTER, IMAGE, REPOSITORY and DEPLOYMENT_NAME environment variables (below). 10 | # 11 | # For more support on how to run the workflow, please visit https://github.com/google-github-actions/setup-gcloud/tree/master/example-workflows/gke-kustomize 12 | 13 | name: Build and Deploy to GKE 14 | 15 | on: 16 | push: 17 | branches: [ "main" ] 18 | 19 | env: 20 | PROJECT_ID: ${{ secrets.GKE_PROJECT }} 21 | GAR_LOCATION: us-central1 # TODO: update region of the Artifact Registry 22 | GKE_CLUSTER: cluster-1 # TODO: update to cluster name 23 | GKE_ZONE: us-central1-c # TODO: update to cluster zone 24 | DEPLOYMENT_NAME: gke-test # TODO: update to deployment name 25 | REPOSITORY: samples # TODO: update to Artifact Registry docker repository 26 | IMAGE: static-site 27 | 28 | jobs: 29 | setup-build-publish-deploy: 30 | name: Setup, Build, Publish, and Deploy 31 | runs-on: ubuntu-latest 32 | environment: production 33 | 34 | permissions: 35 | contents: 'read' 36 | id-token: 'write' 37 | 38 | steps: 39 | - name: Checkout 40 | uses: actions/checkout@v3 41 | 42 | # Configure Workload Identity Federation and generate an access token. 43 | - id: 'auth' 44 | name: 'Authenticate to Google Cloud' 45 | uses: 'google-github-actions/auth@v0' 46 | with: 47 | token_format: 'access_token' 48 | workload_identity_provider: 'projects/123456789/locations/global/workloadIdentityPools/my-pool/providers/my-provider' 49 | service_account: 'my-service-account@my-project.iam.gserviceaccount.com' 50 | 51 | # Alternative option - authentication via credentials json 52 | # - id: 'auth' 53 | # uses: 'google-github-actions/auth@v0' 54 | # with: 55 | # credentials_json: '${{ secrets.GCP_CREDENTIALS }}' 56 | 57 | - name: Docker configuration 58 | run: |- 59 | echo ${{steps.auth.outputs.access_token}} | docker login -u oauth2accesstoken --password-stdin https://$GAR_LOCATION-docker.pkg.dev 60 | # Get the GKE credentials so we can deploy to the cluster 61 | - name: Set up GKE credentials 62 | uses: google-github-actions/get-gke-credentials@v0 63 | with: 64 | cluster_name: ${{ env.GKE_CLUSTER }} 65 | location: ${{ env.GKE_ZONE }} 66 | 67 | # Build the Docker image 68 | - name: Build 69 | run: |- 70 | docker build \ 71 | --tag "$GAR_LOCATION-docker.pkg.dev/$PROJECT_ID/$REPOSITORY/$IMAGE:$GITHUB_SHA" \ 72 | --build-arg GITHUB_SHA="$GITHUB_SHA" \ 73 | --build-arg GITHUB_REF="$GITHUB_REF" \ 74 | . 75 | # Push the Docker image to Google Artifact Registry 76 | - name: Publish 77 | run: |- 78 | docker push "$GAR_LOCATION-docker.pkg.dev/$PROJECT_ID/$REPOSITORY/$IMAGE:$GITHUB_SHA" 79 | # Set up kustomize 80 | - name: Set up Kustomize 81 | run: |- 82 | curl -sfLo kustomize https://github.com/kubernetes-sigs/kustomize/releases/download/v3.1.0/kustomize_3.1.0_linux_amd64 83 | chmod u+x ./kustomize 84 | # Deploy the Docker image to the GKE cluster 85 | - name: Deploy 86 | run: |- 87 | # replacing the image name in the k8s template 88 | ./kustomize edit set image LOCATION-docker.pkg.dev/PROJECT_ID/REPOSITORY/IMAGE:TAG=$GAR_LOCATION-docker.pkg.dev/$PROJECT_ID/$REPOSITORY/$IMAGE:$GITHUB_SHA 89 | ./kustomize build . | kubectl apply -f - 90 | kubectl rollout status deployment/$DEPLOYMENT_NAME 91 | kubectl get services -o wide 92 | -------------------------------------------------------------------------------- /.github/workflows/greetings.yml: -------------------------------------------------------------------------------- 1 | name: Greetings 2 | 3 | on: [pull_request_target, issues] 4 | 5 | jobs: 6 | greeting: 7 | runs-on: ubuntu-latest 8 | permissions: 9 | issues: write 10 | pull-requests: write 11 | steps: 12 | - uses: actions/first-interaction@v1 13 | with: 14 | repo-token: ${{ secrets.GITHUB_TOKEN }} 15 | issue-message: "Message that will be displayed on users' first issue" 16 | pr-message: "Message that will be displayed on users' first pull request" 17 | -------------------------------------------------------------------------------- /.github/workflows/ibm.yml: -------------------------------------------------------------------------------- 1 | # This workflow will build a docker container, publish it to IBM Container Registry, and deploy it to IKS when there is a push to the "main" branch. 2 | # 3 | # To configure this workflow: 4 | # 5 | # 1. Ensure that your repository contains a Dockerfile 6 | # 2. Setup secrets in your repository by going to settings: Create ICR_NAMESPACE and IBM_CLOUD_API_KEY 7 | # 3. Change the values for the IBM_CLOUD_REGION, REGISTRY_HOSTNAME, IMAGE_NAME, IKS_CLUSTER, DEPLOYMENT_NAME, and PORT 8 | 9 | name: Build and Deploy to IKS 10 | 11 | on: 12 | push: 13 | branches: [ "main" ] 14 | 15 | # Environment variables available to all jobs and steps in this workflow 16 | env: 17 | GITHUB_SHA: ${{ github.sha }} 18 | IBM_CLOUD_API_KEY: ${{ secrets.IBM_CLOUD_API_KEY }} 19 | IBM_CLOUD_REGION: us-south 20 | ICR_NAMESPACE: ${{ secrets.ICR_NAMESPACE }} 21 | REGISTRY_HOSTNAME: us.icr.io 22 | IMAGE_NAME: iks-test 23 | IKS_CLUSTER: example-iks-cluster-name-or-id 24 | DEPLOYMENT_NAME: iks-test 25 | PORT: 5001 26 | 27 | jobs: 28 | setup-build-publish-deploy: 29 | name: Setup, Build, Publish, and Deploy 30 | runs-on: ubuntu-latest 31 | environment: production 32 | steps: 33 | 34 | - name: Checkout 35 | uses: actions/checkout@v3 36 | 37 | # Download and Install IBM Cloud CLI 38 | - name: Install IBM Cloud CLI 39 | run: | 40 | curl -fsSL https://clis.cloud.ibm.com/install/linux | sh 41 | ibmcloud --version 42 | ibmcloud config --check-version=false 43 | ibmcloud plugin install -f kubernetes-service 44 | ibmcloud plugin install -f container-registry 45 | 46 | # Authenticate with IBM Cloud CLI 47 | - name: Authenticate with IBM Cloud CLI 48 | run: | 49 | ibmcloud login --apikey "${IBM_CLOUD_API_KEY}" -r "${IBM_CLOUD_REGION}" -g default 50 | ibmcloud cr region-set "${IBM_CLOUD_REGION}" 51 | ibmcloud cr login 52 | 53 | # Build the Docker image 54 | - name: Build with Docker 55 | run: | 56 | docker build -t "$REGISTRY_HOSTNAME"/"$ICR_NAMESPACE"/"$IMAGE_NAME":"$GITHUB_SHA" \ 57 | --build-arg GITHUB_SHA="$GITHUB_SHA" \ 58 | --build-arg GITHUB_REF="$GITHUB_REF" . 59 | 60 | # Push the image to IBM Container Registry 61 | - name: Push the image to ICR 62 | run: | 63 | docker push $REGISTRY_HOSTNAME/$ICR_NAMESPACE/$IMAGE_NAME:$GITHUB_SHA 64 | 65 | # Deploy the Docker image to the IKS cluster 66 | - name: Deploy to IKS 67 | run: | 68 | ibmcloud ks cluster config --cluster $IKS_CLUSTER 69 | kubectl config current-context 70 | kubectl create deployment $DEPLOYMENT_NAME --image=$REGISTRY_HOSTNAME/$ICR_NAMESPACE/$IMAGE_NAME:$GITHUB_SHA --dry-run -o yaml > deployment.yaml 71 | kubectl apply -f deployment.yaml 72 | kubectl rollout status deployment/$DEPLOYMENT_NAME 73 | kubectl create service loadbalancer $DEPLOYMENT_NAME --tcp=80:$PORT --dry-run -o yaml > service.yaml 74 | kubectl apply -f service.yaml 75 | kubectl get services -o wide 76 | -------------------------------------------------------------------------------- /.github/workflows/jekyll-docker.yml: -------------------------------------------------------------------------------- 1 | name: Jekyll site CI 2 | 3 | on: 4 | push: 5 | branches: [ "main" ] 6 | pull_request: 7 | branches: [ "main" ] 8 | 9 | jobs: 10 | build: 11 | 12 | runs-on: ubuntu-latest 13 | 14 | steps: 15 | - uses: actions/checkout@v3 16 | - name: Build the site in the jekyll/builder container 17 | run: | 18 | docker run \ 19 | -v ${{ github.workspace }}:/srv/jekyll -v ${{ github.workspace }}/_site:/srv/jekyll/_site \ 20 | jekyll/builder:latest /bin/bash -c "chmod -R 777 /srv/jekyll && jekyll build --future" 21 | -------------------------------------------------------------------------------- /.github/workflows/label.yml: -------------------------------------------------------------------------------- 1 | # This workflow will triage pull requests and apply a label based on the 2 | # paths that are modified in the pull request. 3 | # 4 | # To use this workflow, you will need to set up a .github/labeler.yml 5 | # file with configuration. For more information, see: 6 | # https://github.com/actions/labeler 7 | 8 | name: Labeler 9 | on: [pull_request_target] 10 | 11 | jobs: 12 | label: 13 | 14 | runs-on: ubuntu-latest 15 | permissions: 16 | contents: read 17 | pull-requests: write 18 | 19 | steps: 20 | - uses: actions/labeler@v4 21 | with: 22 | repo-token: "${{ secrets.GITHUB_TOKEN }}" 23 | -------------------------------------------------------------------------------- /.github/workflows/manual.yml: -------------------------------------------------------------------------------- 1 | # This is a basic workflow that is manually triggered 2 | 3 | name: Manual workflow 4 | 5 | # Controls when the action will run. Workflow runs when manually triggered using the UI 6 | # or API. 7 | on: 8 | workflow_dispatch: 9 | # Inputs the workflow accepts. 10 | inputs: 11 | name: 12 | # Friendly description to be shown in the UI instead of 'name' 13 | description: 'Person to greet' 14 | # Default value if no value is explicitly provided 15 | default: 'World' 16 | # Input has to be provided for the workflow to run 17 | required: true 18 | # The data type of the input 19 | type: string 20 | 21 | # A workflow run is made up of one or more jobs that can run sequentially or in parallel 22 | jobs: 23 | # This workflow contains a single job called "greet" 24 | greet: 25 | # The type of runner that the job will run on 26 | runs-on: ubuntu-latest 27 | 28 | # Steps represent a sequence of tasks that will be executed as part of the job 29 | steps: 30 | # Runs a single command using the runners shell 31 | - name: Send greeting 32 | run: echo "Hello ${{ inputs.name }}" 33 | -------------------------------------------------------------------------------- /.github/workflows/openshift.yml: -------------------------------------------------------------------------------- 1 | # This workflow uses actions that are not certified by GitHub. 2 | # They are provided by a third-party and are governed by 3 | # separate terms of service, privacy policy, and support 4 | # documentation. 5 | 6 | # 💁 The OpenShift Starter workflow will: 7 | # - Checkout your repository 8 | # - Perform a container image build 9 | # - Push the built image to the GitHub Container Registry (GHCR) 10 | # - Log in to your OpenShift cluster 11 | # - Create an OpenShift app from the image and expose it to the internet 12 | 13 | # ℹ️ Configure your repository and the workflow with the following steps: 14 | # 1. Have access to an OpenShift cluster. Refer to https://www.openshift.com/try 15 | # 2. Create the OPENSHIFT_SERVER and OPENSHIFT_TOKEN repository secrets. Refer to: 16 | # - https://github.com/redhat-actions/oc-login#readme 17 | # - https://docs.github.com/en/actions/reference/encrypted-secrets 18 | # - https://cli.github.com/manual/gh_secret_set 19 | # 3. (Optional) Edit the top-level 'env' section as marked with '🖊️' if the defaults are not suitable for your project. 20 | # 4. (Optional) Edit the build-image step to build your project. 21 | # The default build type is by using a Dockerfile at the root of the repository, 22 | # but can be replaced with a different file, a source-to-image build, or a step-by-step buildah build. 23 | # 5. Commit and push the workflow file to your default branch to trigger a workflow run. 24 | 25 | # 👋 Visit our GitHub organization at https://github.com/redhat-actions/ to see our actions and provide feedback. 26 | 27 | name: OpenShift 28 | 29 | env: 30 | # 🖊️ EDIT your repository secrets to log into your OpenShift cluster and set up the context. 31 | # See https://github.com/redhat-actions/oc-login#readme for how to retrieve these values. 32 | # To get a permanent token, refer to https://github.com/redhat-actions/oc-login/wiki/Using-a-Service-Account-for-GitHub-Actions 33 | OPENSHIFT_SERVER: ${{ secrets.OPENSHIFT_SERVER }} 34 | OPENSHIFT_TOKEN: ${{ secrets.OPENSHIFT_TOKEN }} 35 | # 🖊️ EDIT to set the kube context's namespace after login. Leave blank to use your user's default namespace. 36 | OPENSHIFT_NAMESPACE: "" 37 | 38 | # 🖊️ EDIT to set a name for your OpenShift app, or a default one will be generated below. 39 | APP_NAME: "" 40 | 41 | # 🖊️ EDIT with the port your application should be accessible on. 42 | # If the container image exposes *exactly one* port, this can be left blank. 43 | # Refer to the 'port' input of https://github.com/redhat-actions/oc-new-app 44 | APP_PORT: "" 45 | 46 | # 🖊️ EDIT to change the image registry settings. 47 | # Registries such as GHCR, Quay.io, and Docker Hub are supported. 48 | IMAGE_REGISTRY: ghcr.io/${{ github.repository_owner }} 49 | IMAGE_REGISTRY_USER: ${{ github.actor }} 50 | IMAGE_REGISTRY_PASSWORD: ${{ github.token }} 51 | 52 | # 🖊️ EDIT to specify custom tags for the container image, or default tags will be generated below. 53 | IMAGE_TAGS: "" 54 | 55 | on: 56 | # https://docs.github.com/en/actions/reference/events-that-trigger-workflows 57 | workflow_dispatch: 58 | push: 59 | # Edit to the branch(es) you want to build and deploy on each push. 60 | branches: [ "main" ] 61 | 62 | jobs: 63 | # 🖊️ EDIT if you want to run vulnerability check on your project before deploying 64 | # the application. Please uncomment the below CRDA scan job and configure to run it in 65 | # your workflow. For details about CRDA action visit https://github.com/redhat-actions/crda/blob/main/README.md 66 | # 67 | # TODO: Make sure to add 'CRDA Scan' starter workflow from the 'Actions' tab. 68 | # For guide on adding new starter workflow visit https://docs.github.com/en/github-ae@latest/actions/using-workflows/using-starter-workflows 69 | 70 | crda-scan: 71 | uses: ./.github/workflows/crda.yml 72 | secrets: 73 | CRDA_KEY: ${{ secrets.CRDA_KEY }} 74 | # SNYK_TOKEN: ${{ secrets.SNYK_TOKEN }} # Either use SNYK_TOKEN or CRDA_KEY 75 | 76 | openshift-ci-cd: 77 | # 🖊️ Uncomment this if you are using CRDA scan step above 78 | # needs: crda-scan 79 | name: Build and deploy to OpenShift 80 | runs-on: ubuntu-20.04 81 | environment: production 82 | 83 | outputs: 84 | ROUTE: ${{ steps.deploy-and-expose.outputs.route }} 85 | SELECTOR: ${{ steps.deploy-and-expose.outputs.selector }} 86 | 87 | steps: 88 | - name: Check for required secrets 89 | uses: actions/github-script@v6 90 | with: 91 | script: | 92 | const secrets = { 93 | OPENSHIFT_SERVER: `${{ secrets.OPENSHIFT_SERVER }}`, 94 | OPENSHIFT_TOKEN: `${{ secrets.OPENSHIFT_TOKEN }}`, 95 | }; 96 | 97 | const GHCR = "ghcr.io"; 98 | if (`${{ env.IMAGE_REGISTRY }}`.startsWith(GHCR)) { 99 | core.info(`Image registry is ${GHCR} - no registry password required`); 100 | } 101 | else { 102 | core.info("A registry password is required"); 103 | secrets["IMAGE_REGISTRY_PASSWORD"] = `${{ secrets.IMAGE_REGISTRY_PASSWORD }}`; 104 | } 105 | 106 | const missingSecrets = Object.entries(secrets).filter(([ name, value ]) => { 107 | if (value.length === 0) { 108 | core.error(`Secret "${name}" is not set`); 109 | return true; 110 | } 111 | core.info(`✔️ Secret "${name}" is set`); 112 | return false; 113 | }); 114 | 115 | if (missingSecrets.length > 0) { 116 | core.setFailed(`❌ At least one required secret is not set in the repository. \n` + 117 | "You can add it using:\n" + 118 | "GitHub UI: https://docs.github.com/en/actions/reference/encrypted-secrets#creating-encrypted-secrets-for-a-repository \n" + 119 | "GitHub CLI: https://cli.github.com/manual/gh_secret_set \n" + 120 | "Also, refer to https://github.com/redhat-actions/oc-login#getting-started-with-the-action-or-see-example"); 121 | } 122 | else { 123 | core.info(`✅ All the required secrets are set`); 124 | } 125 | 126 | - name: Check out repository 127 | uses: actions/checkout@v3 128 | 129 | - name: Determine app name 130 | if: env.APP_NAME == '' 131 | run: | 132 | echo "APP_NAME=$(basename $PWD)" | tee -a $GITHUB_ENV 133 | 134 | - name: Determine image tags 135 | if: env.IMAGE_TAGS == '' 136 | run: | 137 | echo "IMAGE_TAGS=latest ${GITHUB_SHA::12}" | tee -a $GITHUB_ENV 138 | 139 | # https://github.com/redhat-actions/buildah-build#readme 140 | - name: Build from Dockerfile 141 | id: build-image 142 | uses: redhat-actions/buildah-build@v2 143 | with: 144 | image: ${{ env.APP_NAME }} 145 | tags: ${{ env.IMAGE_TAGS }} 146 | 147 | # If you don't have a Dockerfile/Containerfile, refer to https://github.com/redhat-actions/buildah-build#scratch-build-inputs 148 | # Or, perform a source-to-image build using https://github.com/redhat-actions/s2i-build 149 | # Otherwise, point this to your Dockerfile/Containerfile relative to the repository root. 150 | dockerfiles: | 151 | ./Dockerfile 152 | 153 | # https://github.com/redhat-actions/push-to-registry#readme 154 | - name: Push to registry 155 | id: push-image 156 | uses: redhat-actions/push-to-registry@v2 157 | with: 158 | image: ${{ steps.build-image.outputs.image }} 159 | tags: ${{ steps.build-image.outputs.tags }} 160 | registry: ${{ env.IMAGE_REGISTRY }} 161 | username: ${{ env.IMAGE_REGISTRY_USER }} 162 | password: ${{ env.IMAGE_REGISTRY_PASSWORD }} 163 | 164 | # The path the image was pushed to is now stored in ${{ steps.push-image.outputs.registry-path }} 165 | 166 | - name: Install oc 167 | uses: redhat-actions/openshift-tools-installer@v1 168 | with: 169 | oc: 4 170 | 171 | # https://github.com/redhat-actions/oc-login#readme 172 | - name: Log in to OpenShift 173 | uses: redhat-actions/oc-login@v1 174 | with: 175 | openshift_server_url: ${{ env.OPENSHIFT_SERVER }} 176 | openshift_token: ${{ env.OPENSHIFT_TOKEN }} 177 | insecure_skip_tls_verify: true 178 | namespace: ${{ env.OPENSHIFT_NAMESPACE }} 179 | 180 | # This step should create a deployment, service, and route to run your app and expose it to the internet. 181 | # https://github.com/redhat-actions/oc-new-app#readme 182 | - name: Create and expose app 183 | id: deploy-and-expose 184 | uses: redhat-actions/oc-new-app@v1 185 | with: 186 | app_name: ${{ env.APP_NAME }} 187 | image: ${{ steps.push-image.outputs.registry-path }} 188 | namespace: ${{ env.OPENSHIFT_NAMESPACE }} 189 | port: ${{ env.APP_PORT }} 190 | 191 | - name: Print application URL 192 | env: 193 | ROUTE: ${{ steps.deploy-and-expose.outputs.route }} 194 | SELECTOR: ${{ steps.deploy-and-expose.outputs.selector }} 195 | run: | 196 | [[ -n ${{ env.ROUTE }} ]] || (echo "Determining application route failed in previous step"; exit 1) 197 | echo 198 | echo "======================== Your application is available at: ========================" 199 | echo ${{ env.ROUTE }} 200 | echo "===================================================================================" 201 | echo 202 | echo "Your app can be taken down with: \"oc delete all --selector='${{ env.SELECTOR }}'\"" 203 | -------------------------------------------------------------------------------- /.github/workflows/pylint.yml: -------------------------------------------------------------------------------- 1 | name: Pylint 2 | 3 | on: [push] 4 | 5 | jobs: 6 | build: 7 | runs-on: ubuntu-latest 8 | strategy: 9 | matrix: 10 | python-version: ["3.8", "3.9", "3.10"] 11 | steps: 12 | - uses: actions/checkout@v3 13 | - name: Set up Python ${{ matrix.python-version }} 14 | uses: actions/setup-python@v3 15 | with: 16 | python-version: ${{ matrix.python-version }} 17 | - name: Install dependencies 18 | run: | 19 | python -m pip install --upgrade pip 20 | pip install pylint 21 | - name: Analysing the code with pylint 22 | run: | 23 | pylint $(git ls-files '*.py') 24 | -------------------------------------------------------------------------------- /.github/workflows/python-app.yml: -------------------------------------------------------------------------------- 1 | # This workflow will install Python dependencies, run tests and lint with a single version of Python 2 | # For more information see: https://docs.github.com/en/actions/automating-builds-and-tests/building-and-testing-python 3 | 4 | name: Python application 5 | 6 | on: 7 | push: 8 | branches: [ "main" ] 9 | pull_request: 10 | branches: [ "main" ] 11 | 12 | permissions: 13 | contents: read 14 | 15 | jobs: 16 | build: 17 | 18 | runs-on: ubuntu-latest 19 | 20 | steps: 21 | - uses: actions/checkout@v3 22 | - name: Set up Python 3.10 23 | uses: actions/setup-python@v3 24 | with: 25 | python-version: "3.10" 26 | - name: Install dependencies 27 | run: | 28 | python -m pip install --upgrade pip 29 | pip install flake8 pytest 30 | if [ -f requirements.txt ]; then pip install -r requirements.txt; fi 31 | - name: Lint with flake8 32 | run: | 33 | # stop the build if there are Python syntax errors or undefined names 34 | flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics 35 | # exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide 36 | flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics 37 | - name: Test with pytest 38 | run: | 39 | pytest 40 | -------------------------------------------------------------------------------- /.github/workflows/python-package-conda.yml: -------------------------------------------------------------------------------- 1 | name: Python Package using Conda 2 | 3 | on: [push] 4 | 5 | jobs: 6 | build-linux: 7 | runs-on: ubuntu-latest 8 | strategy: 9 | max-parallel: 5 10 | 11 | steps: 12 | - uses: actions/checkout@v3 13 | - name: Set up Python 3.10 14 | uses: actions/setup-python@v3 15 | with: 16 | python-version: '3.10' 17 | - name: Add conda to system path 18 | run: | 19 | # $CONDA is an environment variable pointing to the root of the miniconda directory 20 | echo $CONDA/bin >> $GITHUB_PATH 21 | - name: Install dependencies 22 | run: | 23 | conda env update --file environment.yml --name base 24 | - name: Lint with flake8 25 | run: | 26 | conda install flake8 27 | # stop the build if there are Python syntax errors or undefined names 28 | flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics 29 | # exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide 30 | flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics 31 | - name: Test with pytest 32 | run: | 33 | conda install pytest 34 | pytest 35 | -------------------------------------------------------------------------------- /.github/workflows/python-package.yml: -------------------------------------------------------------------------------- 1 | # This workflow will install Python dependencies, run tests and lint with a variety of Python versions 2 | # For more information see: https://docs.github.com/en/actions/automating-builds-and-tests/building-and-testing-python 3 | 4 | name: Python package 5 | 6 | on: 7 | push: 8 | branches: [ "main" ] 9 | pull_request: 10 | branches: [ "main" ] 11 | 12 | jobs: 13 | build: 14 | 15 | runs-on: ubuntu-latest 16 | strategy: 17 | fail-fast: false 18 | matrix: 19 | python-version: ["3.9", "3.10", "3.11"] 20 | 21 | steps: 22 | - uses: actions/checkout@v3 23 | - name: Set up Python ${{ matrix.python-version }} 24 | uses: actions/setup-python@v3 25 | with: 26 | python-version: ${{ matrix.python-version }} 27 | - name: Install dependencies 28 | run: | 29 | python -m pip install --upgrade pip 30 | python -m pip install flake8 pytest 31 | if [ -f requirements.txt ]; then pip install -r requirements.txt; fi 32 | - name: Lint with flake8 33 | run: | 34 | # stop the build if there are Python syntax errors or undefined names 35 | flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics 36 | # exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide 37 | flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics 38 | - name: Test with pytest 39 | run: | 40 | pytest 41 | -------------------------------------------------------------------------------- /.github/workflows/python-publish.yml: -------------------------------------------------------------------------------- 1 | # This workflow will upload a Python Package using Twine when a release is created 2 | # For more information see: https://docs.github.com/en/actions/automating-builds-and-tests/building-and-testing-python#publishing-to-package-registries 3 | 4 | # This workflow uses actions that are not certified by GitHub. 5 | # They are provided by a third-party and are governed by 6 | # separate terms of service, privacy policy, and support 7 | # documentation. 8 | 9 | name: Upload Python Package 10 | 11 | on: 12 | release: 13 | types: [published] 14 | 15 | permissions: 16 | contents: read 17 | 18 | jobs: 19 | deploy: 20 | 21 | runs-on: ubuntu-latest 22 | 23 | steps: 24 | - uses: actions/checkout@v3 25 | - name: Set up Python 26 | uses: actions/setup-python@v3 27 | with: 28 | python-version: '3.x' 29 | - name: Install dependencies 30 | run: | 31 | python -m pip install --upgrade pip 32 | pip install build 33 | - name: Build package 34 | run: python -m build 35 | - name: Publish package 36 | uses: pypa/gh-action-pypi-publish@27b31702a0e7fc50959f5ad993c78deac1bdfc29 37 | with: 38 | user: __token__ 39 | password: ${{ secrets.PYPI_API_TOKEN }} 40 | -------------------------------------------------------------------------------- /.github/workflows/stale.yml: -------------------------------------------------------------------------------- 1 | # This workflow warns and then closes issues and PRs that have had no activity for a specified amount of time. 2 | # 3 | # You can adjust the behavior by modifying this file. 4 | # For more information, see: 5 | # https://github.com/actions/stale 6 | name: Mark stale issues and pull requests 7 | 8 | on: 9 | schedule: 10 | - cron: '26 6 * * *' 11 | 12 | jobs: 13 | stale: 14 | 15 | runs-on: ubuntu-latest 16 | permissions: 17 | issues: write 18 | pull-requests: write 19 | 20 | steps: 21 | - uses: actions/stale@v5 22 | with: 23 | repo-token: ${{ secrets.GITHUB_TOKEN }} 24 | stale-issue-message: 'Stale issue message' 25 | stale-pr-message: 'Stale pull request message' 26 | stale-issue-label: 'no-issue-activity' 27 | stale-pr-label: 'no-pr-activity' 28 | -------------------------------------------------------------------------------- /.github/workflows/static.yml: -------------------------------------------------------------------------------- 1 | # Simple workflow for deploying static content to GitHub Pages 2 | name: Deploy static content to Pages 3 | 4 | on: 5 | # Runs on pushes targeting the default branch 6 | push: 7 | branches: ["main"] 8 | 9 | # Allows you to run this workflow manually from the Actions tab 10 | workflow_dispatch: 11 | 12 | # Sets permissions of the GITHUB_TOKEN to allow deployment to GitHub Pages 13 | permissions: 14 | contents: read 15 | pages: write 16 | id-token: write 17 | 18 | # Allow only one concurrent deployment, skipping runs queued between the run in-progress and latest queued. 19 | # However, do NOT cancel in-progress runs as we want to allow these production deployments to complete. 20 | concurrency: 21 | group: "pages" 22 | cancel-in-progress: false 23 | 24 | jobs: 25 | # Single deploy job since we're just deploying 26 | deploy: 27 | environment: 28 | name: github-pages 29 | url: ${{ steps.deployment.outputs.page_url }} 30 | runs-on: ubuntu-latest 31 | steps: 32 | - name: Checkout 33 | uses: actions/checkout@v4 34 | - name: Setup Pages 35 | uses: actions/configure-pages@v4 36 | - name: Upload artifact 37 | uses: actions/upload-pages-artifact@v3 38 | with: 39 | # Upload entire repository 40 | path: '.' 41 | - name: Deploy to GitHub Pages 42 | id: deployment 43 | uses: actions/deploy-pages@v4 44 | -------------------------------------------------------------------------------- /.github/workflows/tencent.yml: -------------------------------------------------------------------------------- 1 | # This workflow will build a docker container, publish and deploy it to Tencent Kubernetes Engine (TKE) when there is a push to the "main" branch. 2 | # 3 | # To configure this workflow: 4 | # 5 | # 1. Ensure that your repository contains the necessary configuration for your Tencent Kubernetes Engine cluster, 6 | # including deployment.yml, kustomization.yml, service.yml, etc. 7 | # 8 | # 2. Set up secrets in your workspace: 9 | # - TENCENT_CLOUD_SECRET_ID with Tencent Cloud secret id 10 | # - TENCENT_CLOUD_SECRET_KEY with Tencent Cloud secret key 11 | # - TENCENT_CLOUD_ACCOUNT_ID with Tencent Cloud account id 12 | # - TKE_REGISTRY_PASSWORD with TKE registry password 13 | # 14 | # 3. Change the values for the TKE_IMAGE_URL, TKE_REGION, TKE_CLUSTER_ID and DEPLOYMENT_NAME environment variables (below). 15 | 16 | name: Tencent Kubernetes Engine 17 | 18 | on: 19 | push: 20 | branches: [ "main" ] 21 | 22 | # Environment variables available to all jobs and steps in this workflow 23 | env: 24 | TKE_IMAGE_URL: ccr.ccs.tencentyun.com/demo/mywebapp 25 | TKE_REGION: ap-guangzhou 26 | TKE_CLUSTER_ID: cls-mywebapp 27 | DEPLOYMENT_NAME: tke-test 28 | 29 | permissions: 30 | contents: read 31 | 32 | jobs: 33 | setup-build-publish-deploy: 34 | name: Setup, Build, Publish, and Deploy 35 | runs-on: ubuntu-latest 36 | environment: production 37 | steps: 38 | 39 | - name: Checkout 40 | uses: actions/checkout@v3 41 | 42 | # Build 43 | - name: Build Docker image 44 | run: | 45 | docker build -t ${TKE_IMAGE_URL}:${GITHUB_SHA} . 46 | 47 | - name: Login TKE Registry 48 | run: | 49 | docker login -u ${{ secrets.TENCENT_CLOUD_ACCOUNT_ID }} -p '${{ secrets.TKE_REGISTRY_PASSWORD }}' ${TKE_IMAGE_URL} 50 | 51 | # Push the Docker image to TKE Registry 52 | - name: Publish 53 | run: | 54 | docker push ${TKE_IMAGE_URL}:${GITHUB_SHA} 55 | 56 | - name: Set up Kustomize 57 | run: | 58 | curl -o kustomize --location https://github.com/kubernetes-sigs/kustomize/releases/download/v3.1.0/kustomize_3.1.0_linux_amd64 59 | chmod u+x ./kustomize 60 | 61 | - name: Set up ~/.kube/config for connecting TKE cluster 62 | uses: TencentCloud/tke-cluster-credential-action@v1 63 | with: 64 | secret_id: ${{ secrets.TENCENT_CLOUD_SECRET_ID }} 65 | secret_key: ${{ secrets.TENCENT_CLOUD_SECRET_KEY }} 66 | tke_region: ${{ env.TKE_REGION }} 67 | cluster_id: ${{ env.TKE_CLUSTER_ID }} 68 | 69 | - name: Switch to TKE context 70 | run: | 71 | kubectl config use-context ${TKE_CLUSTER_ID}-context-default 72 | 73 | # Deploy the Docker image to the TKE cluster 74 | - name: Deploy 75 | run: | 76 | ./kustomize edit set image ${TKE_IMAGE_URL}:${GITHUB_SHA} 77 | ./kustomize build . | kubectl apply -f - 78 | kubectl rollout status deployment/${DEPLOYMENT_NAME} 79 | kubectl get services -o wide 80 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | share/python-wheels/ 24 | *.egg-info/ 25 | .installed.cfg 26 | *.egg 27 | MANIFEST 28 | 29 | # PyInstaller 30 | # Usually these files are written by a python script from a template 31 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 32 | *.manifest 33 | *.spec 34 | 35 | # Installer logs 36 | pip-log.txt 37 | pip-delete-this-directory.txt 38 | 39 | # Unit test / coverage reports 40 | htmlcov/ 41 | .tox/ 42 | .nox/ 43 | .coverage 44 | .coverage.* 45 | .cache 46 | nosetests.xml 47 | coverage.xml 48 | *.cover 49 | *.py,cover 50 | .hypothesis/ 51 | .pytest_cache/ 52 | cover/ 53 | 54 | # Translations 55 | *.mo 56 | *.pot 57 | 58 | # Django stuff: 59 | *.log 60 | local_settings.py 61 | db.sqlite3 62 | db.sqlite3-journal 63 | 64 | # Flask stuff: 65 | instance/ 66 | .webassets-cache 67 | 68 | # Scrapy stuff: 69 | .scrapy 70 | 71 | # Sphinx documentation 72 | docs/_build/ 73 | 74 | # PyBuilder 75 | .pybuilder/ 76 | target/ 77 | 78 | # Jupyter Notebook 79 | .ipynb_checkpoints 80 | 81 | # IPython 82 | profile_default/ 83 | ipython_config.py 84 | 85 | # pyenv 86 | # For a library or package, you might want to ignore these files since the code is 87 | # intended to run in multiple environments; otherwise, check them in: 88 | # .python-version 89 | 90 | # pipenv 91 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 92 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 93 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 94 | # install all needed dependencies. 95 | #Pipfile.lock 96 | 97 | # poetry 98 | # Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. 99 | # This is especially recommended for binary packages to ensure reproducibility, and is more 100 | # commonly ignored for libraries. 101 | # https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control 102 | #poetry.lock 103 | 104 | # pdm 105 | # Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. 106 | #pdm.lock 107 | # pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it 108 | # in version control. 109 | # https://pdm.fming.dev/#use-with-ide 110 | .pdm.toml 111 | 112 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm 113 | __pypackages__/ 114 | 115 | # Celery stuff 116 | celerybeat-schedule 117 | celerybeat.pid 118 | 119 | # SageMath parsed files 120 | *.sage.py 121 | 122 | # Environments 123 | .env 124 | .venv 125 | env/ 126 | venv/ 127 | ENV/ 128 | env.bak/ 129 | venv.bak/ 130 | 131 | # Spyder project settings 132 | .spyderproject 133 | .spyproject 134 | 135 | # Rope project settings 136 | .ropeproject 137 | 138 | # mkdocs documentation 139 | /site 140 | 141 | # mypy 142 | .mypy_cache/ 143 | .dmypy.json 144 | dmypy.json 145 | 146 | # Pyre type checker 147 | .pyre/ 148 | 149 | # pytype static type analyzer 150 | .pytype/ 151 | 152 | # Cython debug symbols 153 | cython_debug/ 154 | 155 | # PyCharm 156 | # JetBrains specific template is maintained in a separate JetBrains.gitignore that can 157 | # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore 158 | # and can be added to the global gitignore or merged into this file. For a more nuclear 159 | # option (not recommended) you can uncomment the following to ignore the entire idea folder. 160 | #.idea/ 161 | -------------------------------------------------------------------------------- /.whitesource: -------------------------------------------------------------------------------- 1 | { 2 | "scanSettings": { 3 | "baseBranches": [] 4 | }, 5 | "checkRunSettings": { 6 | "vulnerableCheckRunConclusionLevel": "failure", 7 | "displayMode": "diff", 8 | "useMendCheckNames": true 9 | }, 10 | "issueSettings": { 11 | "minSeverityLevel": "LOW", 12 | "issueType": "DEPENDENCY" 13 | } 14 | } -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2024 KOSASIH 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | ![Static Badge](https://img.shields.io/badge/High-Tech-green) 2 | [![Jekyll site CI](https://github.com/KOSASIH/pi-velocity-core/actions/workflows/jekyll-docker.yml/badge.svg)](https://github.com/KOSASIH/pi-velocity-core/actions/workflows/jekyll-docker.yml) 3 | [![Deploy static content to Pages](https://github.com/KOSASIH/pi-velocity-core/actions/workflows/static.yml/badge.svg)](https://github.com/KOSASIH/pi-velocity-core/actions/workflows/static.yml) 4 | [![CodeQL](https://github.com/KOSASIH/pi-velocity-core/actions/workflows/codeql.yml/badge.svg)](https://github.com/KOSASIH/pi-velocity-core/actions/workflows/codeql.yml) 5 | [![Greetings](https://github.com/KOSASIH/pi-velocity-core/actions/workflows/greetings.yml/badge.svg)](https://github.com/KOSASIH/pi-velocity-core/actions/workflows/greetings.yml) 6 | [![DeepSource](https://app.deepsource.com/gh/KOSASIH/pi-velocity-core.svg/?label=active+issues&show_trend=true&token=eHJ0A4h1v6KAXfbnE25tC-8f)](https://app.deepsource.com/gh/KOSASIH/pi-velocity-core/) 7 | 8 |

pi-velocity-core by KOSASIH is licensed under Attribution 4.0 International

9 | 10 | # pi-velocity-core 11 | 12 | pi-velocity-core is a cutting-edge high-tech system for Pi Network, focused on providing high-speed transaction processing to ensure fast and efficient transactions. 13 | 14 | Pi Velocity Core is a high-performance, lightweight, and feature-rich DNS caching library for Python. It intercepts all the DNS/Protocol resolution calls and caches them, providing a significant performance boost to network-bound applications. 15 | 16 | Pi-Velocity-Core is a state-of-the-art, high-tech system designed specifically for the Pi Network, prioritizing high-speed transaction processing to facilitate seamless and efficient transactions. At the heart of this system lies advanced artificial intelligence (AI) technology, empowered by machine learning algorithms, natural language processing capabilities, and data analysis tools. 17 | 18 | # Key Features and Capabilities 19 | 20 | ## Machine Learning Algorithms 21 | 22 | Pi-Velocity-Core utilizes sophisticated machine learning algorithms to optimize transaction processing and ensure the system's performance is always at its peak. These algorithms facilitate the intelligent allocation of resources, enabling the system to adapt to changing network conditions and maintain optimal transaction speeds, even during high-traffic periods. 23 | 24 | ## Natural Language Processing (NLP) 25 | 26 | Leveraging the power of NLP, Pi-Velocity-Core effectively interprets transaction data and metadata, enhancing the overall accuracy and automation of transaction processing. This feature enables the system to detect and mitigate potential errors, ensuring that each transaction is processed quickly and accurately. 27 | 28 | ## Real-time Data Analysis Tools 29 | 30 | Integrated real-time data analysis tools monitor the entire system's performance, enabling administrators to detect any bottlenecks or inefficiencies that may impact transaction speed. These tools provide actionable insights, allowing for proactive adjustments to be made to maintain the system's optimal performance. 31 | 32 | ## Scalability and Adaptability 33 | 34 | Pi-Velocity-Core has been designed with scalability and adaptability in mind, ensuring the system can accommodate the evolving needs of the Pi Network. This flexibility allows for seamless integration of new technologies and features, enabling Pi-Velocity-Core to remain at the forefront of transaction processing innovation. 35 | 36 | ## Industry Trends and Research 37 | 38 | Pi-Velocity-Core stays up-to-date with the latest industry trends and research in artificial intelligence, high-speed transaction processing, and blockchain technology. By continually monitoring and integrating the most advanced techniques and methodologies, Pi-Velocity-Core guarantees its users receive the best possible service and experience. 39 | 40 | ## Conclusion 41 | 42 | In summary, Pi-Velocity-Core offers an unmatched high-techsolution for fast and efficient transaction processing in the Pi Network. With its powerful AI capabilities, advanced data analysis tools, and inherent adaptability, Pi-Velocity-Core is a cutting-edge innovation for the ever-growing digital ecosystem. 43 | 44 | # Features 45 | 46 | - In-memory caching of DNS responses 47 | - Automatic IPv6 to IPv4 address mapping 48 | - Manual caching of hostnames 49 | - Access to in-memory cache databases 50 | - Support for managing local cache 51 | 52 | # License 53 | 54 | Pi Velocity Core is released under the [MIT license](LICENSE.md) 55 | 56 | About 57 | Pi Velocity Core is a product of pi-velocity project. 58 | 59 | For any questions, feedback, or suggestions, please don't hesitate to reach out to [KOSASIH](https://www.linkedin.com/in/kosasih-81b46b5a) on LinkedIn. 60 | 61 | ![](https://img.shields.io/github/forks/KOSASIH/pi-velocity-core.svg?style=social&label=Fork&-maxAge=2592000) 62 | -------------------------------------------------------------------------------- /bin/app.py: -------------------------------------------------------------------------------- 1 | import sys 2 | import logging 3 | import pi_velocity 4 | 5 | def main(): 6 | # Initialize the Pi-Velocity system 7 | pi_velocity.init() 8 | 9 | # Start the Pi-Velocity system 10 | pi_velocity.start() 11 | 12 | if __name__ == '__main__': 13 | # Set up logging 14 | logging.basicConfig(level=logging.INFO, format='%(asctime)s [%(levelname)s] %(message)s') 15 | 16 | try: 17 | main() 18 | except Exception as e: 19 | logging.error(f'Error in main: {e}') 20 | sys.exit(1) 21 | -------------------------------------------------------------------------------- /bin/init.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Set the working directory 4 | cd /path/to/project/directory 5 | 6 | # Create a virtual environment 7 | virtualenv venv 8 | 9 | # Activate the virtual environment 10 | source venv/bin/activate 11 | 12 | # Install the project dependencies 13 | pip install -r requirements.txt 14 | 15 | # Run the database migrations (if applicable) 16 | python manage.py migrate 17 | 18 | # Start the application 19 | python manage.py runserver 20 | -------------------------------------------------------------------------------- /bin/run.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Start the Pi-Velocity application 4 | python3 bin/app.py & 5 | 6 | # Start the Pi-Velocity web interface 7 | cd web 8 | nginx -g "daemon off;" 9 | -------------------------------------------------------------------------------- /bin/stop.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Stop the Pi-Velocity application 4 | pkill -f python3 5 | 6 | # Stop the Pi-Velocity web interface 7 | cd web 8 | nginx -s stop 9 | -------------------------------------------------------------------------------- /docs/Pi-Velocity.md: -------------------------------------------------------------------------------- 1 | # Pi-Velocity 2 | 3 | Pi-Velocity is a high-speed transaction processing system for Pi Network, designed to ensure fast and efficient transactions. 4 | 5 | ## Getting Started 6 | 7 | To get started with Pi-Velocity, follow these steps: 8 | 9 | 1. Clone the repository: `git clone https://github.com/KOSASIH/pi-velocity.git` 10 | 2. Create a virtual environment: `python3 -m venv venv` 11 | 3. Activate the virtual environment: `source venv/bin/activate` 12 | 4. Install the dependencies: `pip install -r requirements.txt` 13 | 5. Run the application: `python3 app.py` 14 | 15 | ## API Documentation 16 | 17 | Pi-Velocity uses FastAPI for its API documentation. To access the documentation, navigate to `http://localhost:8000/docs` in your web browser. 18 | 19 | ### API Endpoints 20 | 21 | Pi-Velocity provides the following API endpoints: 22 | 23 | - `/`: The root endpoint, which returns a simple message. 24 | - `/transactions`: The transactions endpoint, which allows you to create and retrieve transactions. 25 | 26 | ### Transaction Endpoint 27 | 28 | The transactions endpoint allows you to create and retrieve transactions. 29 | 30 | #### Create a Transaction 31 | 32 | To create a transaction, send a `POST` request to `/transactions` with the following JSON payload: 33 | 34 | ```json 35 | { 36 | "sender": "address1", 37 | "receiver": "address2", 38 | "amount": 100 39 | } 40 | -------------------------------------------------------------------------------- /docs/pi-velocity.jpeg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/KOSASIH/pi-velocity-core/93cd141b238a9f014ebe87b6fc8a0d19710cbf33/docs/pi-velocity.jpeg -------------------------------------------------------------------------------- /functions/add_block_to_chain.py: -------------------------------------------------------------------------------- 1 | def add_block_to_chain(block, chain): 2 | """ 3 | Adds the validated block to the blockchain. 4 | 5 | Args: 6 | block (dict): The block to be added. 7 | chain (list): The current blockchain. 8 | 9 | Returns: 10 | list: The updated blockchain. 11 | """ 12 | 13 | # Add the block to the blockchain 14 | chain.append(block) 15 | 16 | return chain 17 | -------------------------------------------------------------------------------- /functions/broadcast_transaction.py: -------------------------------------------------------------------------------- 1 | import hashlib 2 | import json 3 | 4 | 5 | def broadcast_transaction(transaction): 6 | """ 7 | Broadcasts the given transaction to the Pi Network. 8 | 9 | Args: 10 | transaction (dict): The transaction to broadcast. 11 | 12 | Returns: 13 | bool: True if the transaction is successfully broadcasted, False otherwise. 14 | """ 15 | 16 | # Serialize the transaction to JSON 17 | transaction_json = json.dumps(transaction) 18 | 19 | # Calculate the transaction hash 20 | transaction_hash = hashlib.sha256(transaction_json.encode()).hexdigest() 21 | 22 | # Broadcast the transaction to the network 23 | # TODO: Implement transmission details 24 | 25 | return True 26 | -------------------------------------------------------------------------------- /functions/calculate_fee.py: -------------------------------------------------------------------------------- 1 | def calculate_fee(transaction_size, network_params): 2 | """ 3 | Sure, here's an example code for `calculate_fee()` function in Python that calculates the transaction fee: 4 | ```python 5 | def calculate_fee(transaction_size, network_params): 6 | """ 7 | Calculates the transaction fee for a given transaction size and network parameters. 8 | 9 | :param transaction_size: The size of the transaction in bytes. 10 | :param network_params: The network parameters object. 11 | :return: The transaction fee in the network's native currency. 12 | """ 13 | 14 | # Check if network parameters are valid 15 | if network_params is None or 'fee_per_byte' not in network_params or network_params['fee_per_byte'] is None: 16 | return None 17 | 18 | # Calculate the transaction fee 19 | fee = transaction_size * network_params['fee_per_byte'] 20 | 21 | return fee 22 | -------------------------------------------------------------------------------- /functions/get_account_balance.py: -------------------------------------------------------------------------------- 1 | def get_account_balance(account_address): 2 | """ 3 | Retrieves the balance of the specified account address. 4 | 5 | :param str account_address: the account address to retrieve the balance for 6 | :return: int, the account balance 7 | """ 8 | 9 | # Look up the account in the blockchain 10 | account = get_account_from_blockchain(account_address) 11 | 12 | # Return the balance of the account 13 | return account["balance"] 14 | -------------------------------------------------------------------------------- /functions/get_network_status.py: -------------------------------------------------------------------------------- 1 | import requests 2 | 3 | HORIZON_URL = "http://localhost:8000" 4 | 5 | 6 | def get_network_status(): 7 | """ 8 | Retrieves the status of the Pi Network. 9 | """ 10 | 11 | # Send a request to the local Horizon instance 12 | response = requests.get(f"{HORIZON_URL}/status") 13 | 14 | # Check if the request was successful 15 | if response.status_code == 200: 16 | # Extract the status data from the response 17 | network_status = response.json()["status"] 18 | 19 | # Return the status data 20 | return network_status 21 | 22 | # Return None if the request was not successful 23 | return None 24 | -------------------------------------------------------------------------------- /functions/get_transaction_history.py: -------------------------------------------------------------------------------- 1 | def get_transaction_history(user_address, pi_network): 2 | """ 3 | Retrieve the transaction history of a user. 4 | 5 | :param user_address: The address of the user. 6 | :param pi_network: The Pi network object. 7 | :return: A list of transactions. 8 | """ 9 | 10 | # Get all transactions associated with the user's address 11 | user_transactions = pi_network.get_transactions_by_address(user_address) 12 | 13 | # Filter out the incoming and outgoing transactions 14 | transaction_history = [ 15 | t for t in user_transactions if user_address in (t["from"], t["to"]) 16 | ] 17 | 18 | return transaction_history 19 | -------------------------------------------------------------------------------- /functions/handle_incoming_connections.py: -------------------------------------------------------------------------------- 1 | def handle_incoming_connections(listener): 2 | """ 3 | Handles incoming connections from other nodes in the network. 4 | 5 | :param listener: A listener object for accepting incoming connections. 6 | 7 | """ 8 | 9 | # Loop forever to handle incoming connections 10 | while True: 11 | # Wait for a new connection to be established 12 | connection, address = listener.accept() 13 | 14 | # Log the incoming connection 15 | print(f"Accepted connection from {address}") 16 | 17 | # Start a new thread for handling the incoming connection 18 | thread = threading.Thread(target=handle_client_connection, args=(connection,)) 19 | thread.start() 20 | 21 | 22 | def handle_client_connection(connection): 23 | """ 24 | Handles a single incoming connection from a client. 25 | 26 | :param connection: A socket object for the incoming connection. 27 | 28 | """ 29 | 30 | # Loop until the connection is closed 31 | while True: 32 | try: 33 | # Receive data from the client 34 | data = connection.recv(BUFF_SIZE) 35 | 36 | # Check if the connection has been closed 37 | if not data: 38 | break 39 | 40 | # Parse the received data 41 | message = json.loads(data.decode()) 42 | 43 | # Handle the received message 44 | handle_message(message) 45 | 46 | except Exception as e: 47 | print(f"Error handling client connection: {e}") 48 | 49 | # Close the connection 50 | connection.close() 51 | 52 | break 53 | -------------------------------------------------------------------------------- /functions/handle_outgoing_connections.py: -------------------------------------------------------------------------------- 1 | def handle_outgoing_connections(peer_list): 2 | """ 3 | Handles outgoing connections to other nodes in the network. 4 | 5 | :param peer_list: A list of peer addresses to connect to. 6 | 7 | """ 8 | 9 | # Loop through the list of peer addresses 10 | for peer in peer_list: 11 | # Create a new socket for the outgoing connection 12 | connection = socket.socket(socket.AF_INET, socket.SOCK_STREAM) 13 | 14 | # Set a timeout for the connection attempt 15 | connection.settimeout(5.0) 16 | 17 | try: 18 | # Connect to the peer 19 | connection.connect(peer) 20 | 21 | # Log the successful connection 22 | print(f"Connected to peer {peer}") 23 | 24 | # Start a new thread for handling the outgoing connection 25 | thread = threading.Thread( 26 | target=handle_client_connection, args=(connection,) 27 | ) 28 | thread.start() 29 | 30 | except Exception as e: 31 | print(f"Error connecting to peer {peer}: {e}") 32 | 33 | # Close the connection 34 | connection.close() 35 | -------------------------------------------------------------------------------- /functions/monitor_network_activity.py: -------------------------------------------------------------------------------- 1 | import time 2 | 3 | 4 | class Network: 5 | def __init__(self): 6 | self.status = "idle" 7 | 8 | def monitor_network_activity(self): 9 | """ 10 | Monitors the activity in the network and updates the node's status accordingly. 11 | """ 12 | while True: 13 | time.sleep(5) # check the network activity every 5 seconds 14 | if self.is_network_active(): 15 | self.status = "active" 16 | else: 17 | self.status = "idle" 18 | 19 | def is_network_active(self): 20 | """ 21 | Checks if the network is active. 22 | Returns: 23 | bool: True if the network is active, False otherwise. 24 | """ 25 | # Implement your own logic to check if the network is active. 26 | # For example, you can check if there is any incoming or outgoing connections. 27 | pass 28 | -------------------------------------------------------------------------------- /functions/optimize_transaction_processing.py: -------------------------------------------------------------------------------- 1 | def optimize_transaction_processing(transaction_list): 2 | """ 3 | Optimizes the transaction processing algorithm for faster processing. 4 | 5 | Args: 6 | transaction_list (list): A list of transactions to be processed. 7 | 8 | Returns: 9 | tuple: A tuple containing the optimized transaction list and a boolean value 10 | indicating whether the optimization was successful. 11 | """ 12 | 13 | # Check if input list is empty or None 14 | if not transaction_list or len(transaction_list) == 0: 15 | return [], True 16 | 17 | # Check if there are any duplicate transactions 18 | seen = set() 19 | optimized_list = [] 20 | for tx in transaction_list: 21 | if tx["id"] in seen: 22 | continue 23 | seen.add(tx["id"]) 24 | optimized_list.append(tx) 25 | 26 | # Check if optimization was successful 27 | if len(optimized_list) == len(transaction_list): 28 | return optimized_list, True 29 | 30 | return optimized_list, False 31 | -------------------------------------------------------------------------------- /functions/process_block.py: -------------------------------------------------------------------------------- 1 | def process_block(block, network): 2 | """ 3 | Processes a block of transactions and updates the network. 4 | 5 | Args: 6 | block (dict): The block to be processed. 7 | network (Network): The network object. 8 | 9 | Returns: 10 | bool: True if the block is processed successfully, False otherwise. 11 | """ 12 | 13 | # Validate the block 14 | if not validate_block(block): 15 | return False 16 | 17 | # Process the transactions in the block 18 | for transaction in block["transactions"]: 19 | if not process_transaction(transaction, network): 20 | return False 21 | 22 | # Add the block to the blockchain 23 | if not network.add_block(block): 24 | return False 25 | 26 | return True 27 | -------------------------------------------------------------------------------- /functions/process_transaction.py: -------------------------------------------------------------------------------- 1 | def process_transaction(transaction, network): 2 | """ 3 | Processes a transaction and updates the network. 4 | :param transaction: The transaction object. 5 | :param network: The Pi Network object. 6 | :return: True if the transaction is processed successfully, False otherwise. 7 | """ 8 | # Validate the transaction 9 | if not validate_transaction(transaction): 10 | print("Error: The transaction is invalid.") 11 | return False 12 | 13 | # Check if the sender has enough balance 14 | sender = network.get_account(transaction["sender"]) 15 | if sender.balance < transaction["amount"] + transaction["fee"]: 16 | print("Error: The sender has insufficient balance.") 17 | return False 18 | 19 | # Process the transaction 20 | sender.balance -= transaction["amount"] + transaction["fee"] 21 | receiver = network.get_account(transaction["receiver"]) 22 | receiver.balance += transaction["amount"] 23 | network.add_transaction(transaction) 24 | 25 | # Add the transaction fee to the miner's reward 26 | miner = network.get_miner() 27 | miner.reward += transaction["fee"] 28 | 29 | # Broadcast the transaction to the network 30 | network.broadcast_transaction(transaction) 31 | 32 | return True 33 | -------------------------------------------------------------------------------- /functions/secure_communication.py: -------------------------------------------------------------------------------- 1 | import socket 2 | import ssl 3 | 4 | 5 | def secure_communication(host, port, certfile, keyfile, timeout=None): 6 | """ 7 | Establish a secure communication channel with a remote server. 8 | 9 | Args: 10 | host (str): The remote host to connect to. 11 | port (int): The remote port to connect to. 12 | certfile (str): The path to the SSL certificate file. 13 | keyfile (str): The path to the SSL key file. 14 | timeout (float): The timeout value for the socket. 15 | 16 | Returns: 17 | A secure socket connection. 18 | 19 | Raises: 20 | SSLCertificateError: If the SSL certificate cannot be verified. 21 | TimeoutError: If the socket times out. 22 | """ 23 | 24 | # Create a context with the SSL certificate and key 25 | context = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH) 26 | context.load_cert_chain(certfile, keyfile) 27 | 28 | # Connect to the remote host 29 | sock = socket.create_connection((host, port)) 30 | 31 | # Create a socket wrapper with SSL 32 | wrapped_sock = context.wrap_socket(sock, server_hostname=host) 33 | 34 | # Set the timeout 35 | if timeout is not None: 36 | wrapped_sock.settimeout(timeout) 37 | 38 | # Return the secure socket 39 | return wrapped_sock 40 | -------------------------------------------------------------------------------- /functions/start_transaction.py: -------------------------------------------------------------------------------- 1 | def start_transaction(): 2 | """ 3 | Initiates a new transaction. 4 | """ 5 | # Create a new transaction object 6 | transaction = { 7 | "id": str(uuid.uuid4()), 8 | "sender": None, 9 | "receiver": None, 10 | "amount": None, 11 | "fee": None, 12 | "timestamp": datetime.datetime.now(), 13 | } 14 | return transaction 15 | -------------------------------------------------------------------------------- /functions/sync_blockchain.py: -------------------------------------------------------------------------------- 1 | async function sync_blockchain(ctx) { 2 | // Retrieve the latest block header from the local blockchain 3 | const localBlockchain = await getBlockchain(ctx); 4 | const latestLocalBlock = localBlockchain.blockchain[localBlockchain.blockchain.length - 1]; 5 | 6 | // Query the latest block header from the network 7 | const networkLatestBlock = await queryLatestBlock(ctx); 8 | 9 | // Compare the local and network block heights 10 | if (latestLocalBlock.header.height < networkLatestBlock.height) { 11 | // Download and add new blocks from the network 12 | let currentBlock = latestLocalBlock; 13 | while (currentBlock.height < networkLatestBlock.height) { 14 | const blockData = await queryBlockByHeight(ctx, currentBlock.header.height + 1); 15 | currentBlock = await addBlockToBlockchain(ctx, blockData); 16 | } 17 | } 18 | } 19 | -------------------------------------------------------------------------------- /functions/update_node_peers.py: -------------------------------------------------------------------------------- 1 | def update_node_peers(): 2 | """ 3 | Updates the list of peers in the network. 4 | 5 | This function should have the logic for updating the list of peers 6 | that the node is connected to. 7 | 8 | """ 9 | 10 | # TODO: Implement the logic for updating the list of peers 11 | 12 | pass 13 | -------------------------------------------------------------------------------- /functions/validate_block.py: -------------------------------------------------------------------------------- 1 | def validate_block(block): 2 | """ 3 | Validates the block headers and transactions. 4 | 5 | Args: 6 | block (dict): The block to be validated. 7 | 8 | Returns: 9 | bool: True if the block is valid, False otherwise. 10 | """ 11 | 12 | # Check if the block headers are valid 13 | if not validate_block_headers(block): 14 | return False 15 | 16 | # Check if the block transactions are valid 17 | if not validate_block_transactions(block): 18 | return False 19 | 20 | return True 21 | -------------------------------------------------------------------------------- /functions/validate_transaction.py: -------------------------------------------------------------------------------- 1 | def validate_transaction(transaction, network): 2 | """ 3 | Validates the transaction inputs and outputs for pi-velocity-core network. 4 | :param transaction: The transaction object to validate. 5 | :param network: The pi-velocity-core network object. 6 | :return: True if the transaction is valid, False otherwise. 7 | """ 8 | 9 | # Check if transaction is null or empty 10 | if transaction is None or len(transaction) == 0: 11 | return False 12 | 13 | # Check if transaction fields are valid 14 | if ( 15 | "id" not in transaction 16 | or transaction["id"] is None 17 | or len(transaction["id"]) == 0 18 | ): 19 | return False 20 | if ( 21 | "sender" not in transaction 22 | or transaction["sender"] is None 23 | or len(transaction["sender"]) == 0 24 | ): 25 | return False 26 | if ( 27 | "receiver" not in transaction 28 | or transaction["receiver"] is None 29 | or len(transaction["receiver"]) == 0 30 | ): 31 | return False 32 | if "amount" not in transaction or transaction["amount"] is None: 33 | return False 34 | if "fee" not in transaction or transaction["fee"] is None: 35 | return False 36 | 37 | # Check if transaction inputs and outputs are valid 38 | sender = network.get_account(transaction["sender"]) 39 | if sender is None or sender.balance < transaction["amount"] + transaction["fee"]: 40 | return False 41 | 42 | receiver = network.get_account(transaction["receiver"]) 43 | if receiver is None: 44 | return False 45 | 46 | return True 47 | -------------------------------------------------------------------------------- /functions/verify_block_signature.py: -------------------------------------------------------------------------------- 1 | def verify_block_signature(block, signature): 2 | """ 3 | Verifies the block signature. 4 | 5 | Args: 6 | block (dict): The block to be verified. 7 | signature (str): The block signature. 8 | 9 | Returns: 10 | bool: True if the block signature is valid, False otherwise. 11 | """ 12 | 13 | # Serialize the block 14 | block_data = json.dumps(block) 15 | 16 | # Verify the block signature using the block data and public key 17 | public_key = get_block_author_public_key(block) 18 | return verify_signature(public_key, signature, block_data) 19 | -------------------------------------------------------------------------------- /functions/verify_signature.py: -------------------------------------------------------------------------------- 1 | def verify_signature(public_key, signature, transaction): 2 | """ 3 | Verifies the signature of a transaction using the given public key. 4 | 5 | :param public_key: The public key of the transaction signer. 6 | :param signature: The signature of the transaction. 7 | :param transaction: The transaction object to verify. 8 | :return: True if the signature is valid, False otherwise. 9 | """ 10 | # TODO: Implement the signature verification logic here 11 | # For example, you can use a cryptographic library such as 'cryptography' in Python 12 | # to verify the signature using the public key and transaction data. 13 | pass 14 | -------------------------------------------------------------------------------- /lib/__init__.py: -------------------------------------------------------------------------------- 1 | from .util import * 2 | -------------------------------------------------------------------------------- /lib/blockchain.py: -------------------------------------------------------------------------------- 1 | import hashlib 2 | import time 3 | 4 | from lib.util import to_binary 5 | 6 | class Block: 7 | def __init__(self, index, prev_hash, timestamp, data, hash, difficulty): 8 | self.index = index 9 | self.prev_hash = prev_hash 10 | self.timestamp = timestamp 11 | self.data = data 12 | self.hash = hash 13 | self.difficulty = difficulty 14 | 15 | class Blockchain: 16 | def __init__(self): 17 | self.chain = [self.create_genesis_block()] 18 | 19 | def create_genesis_block(self): 20 | return Block(0, "0" * 64, int(time.time()), "Genesis Block", "0" * 64, 4) 21 | 22 | def get_latest_block(self): 23 | return self.chain[-1] 24 | 25 | def add_new_block(self, data): 26 | prev_block = self.get_latest_block() 27 | new_block = Block(prev_block.index + 1, prev_block.hash, int(time.time()), data, None, prev_block.difficulty) 28 | new_block.hash = self.calculate_hash_for_block(new_block) 29 | self.chain.append(new_block) 30 | 31 | def is_chain_valid(self): 32 | for i in range(1, len(self.chain)): 33 | current_block = self.chain[i] 34 | prev_block = self.chain[i - 1] 35 | 36 | if current_block.hash != self.calculate_hash_for_block(current_block): 37 | return False 38 | 39 | if current_block.difficulty > prev_block.difficulty: 40 | return False 41 | 42 | if current_block.difficulty < self.target_difficulty: 43 | current_block.difficulty = self.target_difficulty 44 | 45 | return True 46 | 47 | def calculate_hash_for_block(self, block): 48 | block_string = to_binary(block.index) + to_binary(block.prev_hash) + to_binary(block.timestamp) + to_binary(block.data) + to_binary(block.difficulty) 49 | return hashlib.sha256(block_string.encode('utf-8')).hexdigest() 50 | 51 | @property 52 | def target_difficulty(self): 53 | return 2 ** (64 - 4) 54 | -------------------------------------------------------------------------------- /lib/network.py: -------------------------------------------------------------------------------- 1 | import socket 2 | 3 | class Node: 4 | def __init__(self, host, port): 5 | self.host = host 6 | self.port = port 7 | 8 | def connect_to_node(self): 9 | self.socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) 10 | self.socket.connect((self.host, self.port)) 11 | 12 | def send_to_node(self, message): 13 | self.socket.sendall(message.encode('utf-8')) 14 | 15 | def recv_from_node(self): 16 | return self.socket.recv(1024).decode('utf-8') 17 | 18 | def close_connection(self): 19 | self.socket.close() 20 | 21 | class Network: 22 | def __init__(self): 23 | self.nodes = [] 24 | 25 | def add_node(self, node): 26 | self.nodes.append(node) 27 | 28 | def send_messages_to_all_nodes(self, message): 29 | for node in self.nodes: 30 | node.send_to_node(message) 31 | 32 | def recv_messages_from_all_nodes(self): 33 | messages = [] 34 | for node in self.nodes: 35 | message = node.recv_from_node() 36 | if message: 37 | messages.append(message) 38 | return messages 39 | 40 | def update_blockchain(self, new_chain): 41 | for node in self.nodes: 42 | node.send_to_node(new_chain) 43 | 44 | def connect_to_nodes(self): 45 | for node in self.nodes: 46 | node.connect_to_node() 47 | 48 | def close_connections(self): 49 | for node in self.nodes: 50 | node.close_connection() 51 | -------------------------------------------------------------------------------- /lib/util.py: -------------------------------------------------------------------------------- 1 | import hashlib 2 | import base58 3 | 4 | def hash_block(block): 5 | """ 6 | Hashes a block using SHA-256. 7 | 8 | Args: 9 | block (Block): The block to hash. 10 | 11 | Returns: 12 | str: The hashed block. 13 | """ 14 | block_string = str(block.index) + str(block.prev_hash) + str(block.timestamp) + str(block.data) + str(block.nonce) 15 | return hashlib.sha256(block_string.encode('utf-8')).hexdigest() 16 | 17 | def is_valid_proof(block, prev_block, difficulty): 18 | """ 19 | Validates the proof of work for a block. 20 | 21 | Args: 22 | block (Block): The block to validate. 23 | prev_block (Block): The previous block. 24 | difficulty (int): The difficulty level. 25 | 26 | Returns: 27 | bool: True if the proof is valid, False otherwise. 28 | """ 29 | prev_block_hash = prev_block.hash 30 | current_block_hash = hash_block(block) 31 | return current_block_hash.startswith('0' * difficulty) and prev_block_hash == block.prev_hash 32 | 33 | def get_transaction_value(transaction): 34 | """ 35 | Returns the value of a transaction. 36 | 37 | Args: 38 | transaction (Transaction): The transaction to get the value of. 39 | 40 | Returns: 41 | int: The value of the transaction. 42 | """ 43 | return transaction.amount 44 | 45 | def get_transaction_sender(transaction): 46 | """ 47 | Returns the sender of a transaction. 48 | 49 | Args: 50 | transaction (Transaction): The transaction to get the sender of. 51 | 52 | Returns: 53 | str: The sender of the transaction. 54 | """ 55 | return transaction.sender 56 | 57 | def get_transaction_receiver(transaction): 58 | """ 59 | Returns the receiver of a transaction. 60 | 61 | Args: 62 | transaction (Transaction): The transaction to get the receiver of. 63 | 64 | Returns: 65 | str: The receiver of the transaction. 66 | """ 67 | return transaction.receiver 68 | 69 | def encode_base58(bytes_string): 70 | """ 71 | Encodes a byte string in base58 format. 72 | 73 | Args: 74 | bytes_string (bytes): The byte string to encode. 75 | 76 | Returns: 77 | str: The encoded base58 string. 78 | """ 79 | alphabet = "123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz" 80 | b58 = b"" 81 | 82 | for byte in bytes_string: 83 | coeff = 0 84 | for char in alphabet: 85 | if byte == (pow(58, coeff) * int(char, 16)): 86 | b58 += bytes(char, encoding='utf8') 87 | break 88 | coeff += 1 89 | 90 | return b58.decode('utf-8') 91 | 92 | def decode_base58(base58_string): 93 | """ 94 | Decodes a base58 string to a byte string. 95 | 96 | Args: 97 | base58_string (str): The base58 string to decode. 98 | 99 | Returns: 100 | bytes: The decoded byte string. 101 | """ 102 | alphabet = "123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz" 103 | bytes_string = b"" 104 | 105 | for char in base58_string: 106 | if char not in alphabet: 107 | raise ValueError("Invalid base58 string.") 108 | coeff = 0 109 | for i in range(39, -1, -1): 110 | if alphabet[i] == char: 111 | bytes_string += pow(58, coeff, 256) 112 | break 113 | coeff += 1 114 | 115 | return bytes_string 116 | 117 | def validate_address(address): 118 | if address is None: 119 | return False 120 | 121 | try: 122 | decode_base58(address) 123 | except ValueError: 124 | return False 125 | 126 | return True 127 | -------------------------------------------------------------------------------- /lib/wallet.py: -------------------------------------------------------------------------------- 1 | import hashlib 2 | import base58 3 | 4 | class Wallet: 5 | def __init__(self): 6 | self.private_key = None 7 | self.public_key = None 8 | self.address = None 9 | 10 | def generate_keys(self): 11 | import rsa 12 | (self.private_key, public_key) = rsa.newkeys(bits=2048) 13 | self.public_key = public_key 14 | 15 | def get_address(self): 16 | if self.address is None: 17 | public_key_bytes = self.public_key.save_pkcs1('PEM') 18 | public_key_contents = public_key_bytes.decode('utf-8') 19 | public_key_hash = hashlib.sha256(public_key_contents.encode('utf-8')).digest() 20 | public_key_checksum = hashlib.new('ripemd160').update(public_key_hash).digest() 21 | public_key_checksum_bytes = base58.b58encode(public_key_checksum) 22 | address = 'P' + str(public_key_checksum_bytes, 'utf-8') 23 | self.address = address 24 | 25 | def sign_data(self, data): 26 | message = int(hashlib.sha256(data.encode('utf-8')).digest(), 16) 27 | signature = self.private_key.sign(message, rsa.pkcs1.PSS(mgf=rsa.pkcs1.MGF1(hashlib.sha256()))) 28 | return signature.verify(message, self.public_key) 29 | 30 | def verify_signature(self, data, signature): 31 | message = int(hashlib.sha256(data.encode('utf-8')).digest(), 16) 32 | try: 33 | self.public_key.verify(message, signature, rsa.pkcs1.PSS(mgf=rsa.pkcs1.MGF1(hashlib.sha256()))) 34 | return True 35 | except rsa.PKCS11Error: 36 | return False 37 | -------------------------------------------------------------------------------- /src/__init__.py: -------------------------------------------------------------------------------- 1 | from flask import Flask 2 | 3 | # import application modules 4 | from src.configure_app import configure_app, register_blueprints 5 | 6 | # initialize the Flask app 7 | app = Flask(__name__) 8 | 9 | # configure the Flask app 10 | configure_app(app) 11 | 12 | # register Flask Blueprints 13 | register_blueprints(app) 14 | 15 | # run the Flask app 16 | if __name__ == "__main__": 17 | app.run() 18 | -------------------------------------------------------------------------------- /src/api.py: -------------------------------------------------------------------------------- 1 | from flask_restful import Api, Resource, reqparse 2 | 3 | from src.models import Block 4 | 5 | class BlockListAPI(Resource): 6 | """ 7 | Handles CRUD operations on Block objects 8 | """ 9 | 10 | def __init__(self): 11 | self.reqparse = reqparse.RequestParser() 12 | 13 | super().__init__() 14 | 15 | def get(self): 16 | """ 17 | Get all blocks in the Blockchain 18 | 19 | Returns: 20 | list: A list of Block objects 21 | """ 22 | 23 | blocks = Block.query.all() 24 | 25 | return blocks, 200 26 | 27 | def post(self): 28 | """ 29 | Create a new Block 30 | 31 | Returns: 32 | Block: The newly created Block 33 | """ 34 | 35 | # parse the request data 36 | self.reqparse.add_argument('data', type=str, help='No data in the request', required=True) 37 | 38 | data = self.reqparse.parse_args() 39 | 40 | # create a new Block 41 | block = Block.create(data=data['data']) 42 | 43 | return block, 201 44 | 45 | block_parser = reqparse.RequestParser() 46 | 47 | block_parser.add_argument('data', type=str, help='No data in the request', required=True) 48 | 49 | class BlockAPI(Resource): 50 | """ 51 | Handles CRUD operations on individual Block objects 52 | """ 53 | 54 | def get(self, block_id): 55 | """ 56 | Get a specific Block 57 | 58 | Args: 59 | block_id (int): The ID of the Block 60 | 61 | Returns: 62 | Block: The Block object 63 | """ 64 | 65 | block = Block.query.filter_by(id=block_id).first() 66 | 67 | if block is None: 68 | return {'message': 'Block not found'}, 404 69 | 70 | return block, 200 71 | 72 | def put(self, block_id): 73 | """ 74 | Update a specific Block 75 | 76 | Args: 77 | block_id (int): The ID of the Block 78 | 79 | Returns: 80 | Block: The updated Block 81 | """ 82 | 83 | block = Block.query.filter_by(id=block_id).first() 84 | 85 | if block is None: 86 | return {'message': 'Block not found'}, 404 87 | 88 | # parse the request data 89 | self.reqparse.add_argument('data', type=str, help='No data in the request', required=True) 90 | 91 | data = self.reqparse.parse_args() 92 | 93 | block.data = data['data'] 94 | 95 | return block, 200 96 | 97 | def delete(self, block_id): 98 | """ 99 | Delete a specific Block 100 | 101 | Args: 102 | block_id (int): The ID of the Block 103 | 104 | Returns: 105 | dict: A success message 106 | """ 107 | 108 | block = Block.query.filter_by(id=block_id).first() 109 | 110 | if block is None: 111 | return {'message': 'Block not found'}, 404 112 | 113 | block.delete() 114 | 115 | return {'message': 'Block deleted'}, 200 116 | 117 | api = Api() 118 | 119 | api.add_resource(BlockListAPI, '/blocks', endpoint='blocks') 120 | api.add_resource(BlockAPI, '/blocks/', endpoint='block') 121 | -------------------------------------------------------------------------------- /src/app.py: -------------------------------------------------------------------------------- 1 | from app import create_app 2 | 3 | app = create_app() 4 | 5 | if __name__ == "__main__": 6 | app.run() 7 | -------------------------------------------------------------------------------- /src/configure_app.py: -------------------------------------------------------------------------------- 1 | from flask import Flask 2 | from flask_restful import Api 3 | 4 | from src.api import ExampleResource 5 | 6 | def create_app(): 7 | app = Flask(__name__) 8 | 9 | api = Api(app) 10 | 11 | api.add_resource(ExampleResource, "/example") 12 | 13 | return app 14 | -------------------------------------------------------------------------------- /src/models.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | 3 | from sqlalchemy import Column, Integer, String, DateTime 4 | from sqlalchemy.ext.declarative import declarative_base 5 | 6 | Base = declarative_base() 7 | 8 | class Block(Base): 9 | """ 10 | A Block represents a block in the Blockchain 11 | 12 | Attributes: 13 | id (int): The ID of the block 14 | data (str): The data stored in the block 15 | timestamp (datetime): The timestamp of the block creation 16 | """ 17 | 18 | __tablename__ = "blocks" 19 | 20 | id = Column(Integer, primary_key=True) 21 | data = Column(String(255), nullable=False) 22 | timestamp = Column(DateTime, default=datetime.datetime.utcnow) 23 | 24 | def __repr__(self): 25 | """ 26 | Returns a string representation of the object 27 | 28 | Returns: 29 | str: A string representation of the object 30 | """ 31 | 32 | return f"Block(id={self.id}, data={self.data}, timestamp={self.timestamp})" 33 | 34 | def __init__(self, data: str): 35 | """ 36 | Initializes the Block object 37 | 38 | Args: 39 | data (str): The data to be stored in the block 40 | """ 41 | 42 | self.data = data 43 | 44 | @staticmethod 45 | def create(data: str): 46 | """ 47 | Creates a new Block 48 | 49 | Args: 50 | data (str): The data to be stored in the block 51 | 52 | Returns: 53 | Block: The newly created Block 54 | """ 55 | 56 | block = Block(data=data) 57 | 58 | return block 59 | 60 | def to_dict(self): 61 | """ 62 | Converts the object to a dictionary 63 | 64 | Returns: 65 | dict: The object as a dictionary 66 | """ 67 | 68 | return { 69 | "id": self.id, 70 | "data": self.data, 71 | "timestamp": self.timestamp 72 | } 73 | 74 | @staticmethod 75 | def from_dict(data: dict): 76 | """ 77 | Creates a Block object from a dictionary 78 | 79 | Args: 80 | data (dict): A dictionary to create the Block objectfrom 81 | 82 | Returns: 83 | Block: A Block object created from the dictionary 84 | """ 85 | 86 | block = Block( 87 | data=data.get("data"), 88 | timestamp=data.get("timestamp") 89 | ) 90 | 91 | block.id = data.get("id") 92 | 93 | return block 94 | 95 | def delete(self): 96 | """ 97 | Deletes the object from the database 98 | """ 99 | 100 | from src import db 101 | 102 | db.session.delete(self) 103 | db.session.commit() 104 | -------------------------------------------------------------------------------- /src/serializers.py: -------------------------------------------------------------------------------- 1 | from marshmallow import Schema, fields 2 | 3 | class BlockSchema(Schema): 4 | """ 5 | Serializes and deserializes Block objects 6 | 7 | Attributes: 8 | id (int): The ID of the Block 9 | data (str): The data stored in the Block 10 | timestamp (datetime): The timestamp of the Block 11 | """ 12 | 13 | id = fields.Int(dump_only=True) 14 | data = fields.Str(required=True) 15 | timestamp = fields.DateTime(dump_only=True) 16 | 17 | class Meta: 18 | """ 19 | Configures the serializer 20 | 21 | Attributes: 22 | unknown (EXCLUDE): Excludes any unknown fields during deserialization 23 | """ 24 | 25 | unknown = EXCLUDE 26 | 27 | block_schema = BlockSchema() 28 | blocks_schema = BlockSchema(many=True) 29 | 30 | def serialize(data: dict): 31 | """ 32 | Serializes a dictionary 33 | 34 | Args: 35 | data (dict): A dictionary to serialize 36 | 37 | Returns: 38 | str: A JSON-encoded string of the serialized data 39 | """ 40 | 41 | return block_schema.jsonify(data) 42 | 43 | def deserialize(data: str): 44 | """ 45 | Deserializes a JSON-encoded string 46 | 47 | Args: 48 | data (str): A JSON-encoded string to deserialize 49 | 50 | Returns: 51 | dict: The deserialized dictionary 52 | """ 53 | 54 | return block_schema.load(data) 55 | 56 | def serialize_list(data: list): 57 | """ 58 | Serializes a list of dictionaries 59 | 60 | Args: 61 | data (list): A list of dictionaries to serialize 62 | 63 | Returns: 64 | str: A JSON-encoded string of the serialized list of dictionaries 65 | """ 66 | 67 | return blocks_schema.jsonify(data) 68 | 69 | def deserialize_list(data: str): 70 | """ 71 | Deserializes a JSON-encoded string of a list of dictionaries 72 | 73 | Args: 74 | data (str): A JSON-encoded string to deserialize 75 | 76 | Returns: 77 | list: The deserialized list of dictionaries 78 | """ 79 | 80 | return blocks_schema.load(data) 81 | -------------------------------------------------------------------------------- /src/views.py: -------------------------------------------------------------------------------- 1 | from flask import jsonify, request 2 | 3 | from src import app, db 4 | from src.models import Block 5 | from src.serializers import serialize 6 | 7 | 8 | @app.route("/") 9 | def index(): 10 | """ 11 | Handles the root route 12 | 13 | Returns: 14 | str: A greeting message 15 | """ 16 | 17 | return "Hello, World!" 18 | 19 | 20 | @app.route("/blocks", methods=["GET", "POST"]) 21 | def blocks(): 22 | """ 23 | Handles the blocks route 24 | 25 | Args: 26 | methods (list): A list of HTTP methods 27 | 28 | Returns: 29 | str: A JSON-encoded string 30 | """ 31 | 32 | if request.method == "GET": 33 | blocks = Block.query.all() 34 | 35 | return serialize(blocks) 36 | 37 | if request.method == "POST": 38 | data = request.json 39 | 40 | block = Block.create(data["data"]) 41 | 42 | db.session.add(block) 43 | db.session.commit() 44 | 45 | return serialize(block), 201 46 | 47 | 48 | @app.route("/blocks/", methods=["PUT", "DELETE"]) 49 | def block(block_id): 50 | """ 51 | Handles the individual block route 52 | 53 | Args: 54 | block_id (int): The ID of the block 55 | 56 | Args: 57 | methods (list): A list of HTTP methods 58 | 59 | Returns: 60 | str: A JSON-encoded string 61 | """ 62 | 63 | if request.method == "PUT": 64 | data = request.json 65 | 66 | block = Block.query.get(block_id) 67 | 68 | block.data = data["data"] 69 | 70 | db.session.commit() 71 | 72 | return serialize(block) 73 | 74 | if request.method == "DELETE": 75 | block = Block.query.get(block_id) 76 | 77 | db.session.delete(block) 78 | db.session.commit() 79 | 80 | return jsonify({"result": "success"}) 81 | 82 | 83 | if __name__ == "__main__": 84 | app.run() 85 | -------------------------------------------------------------------------------- /test/__init__.py: -------------------------------------------------------------------------------- 1 | import os 2 | import sys 3 | 4 | sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))) 5 | -------------------------------------------------------------------------------- /test/test_models.py: -------------------------------------------------------------------------------- 1 | from datetime import datetime 2 | 3 | import pytest 4 | 5 | import models 6 | 7 | def test_create_block(): 8 | """ 9 | Test creating a new block 10 | """ 11 | 12 | block = models.Block.create(data='Test Block') 13 | 14 | assert block.id is not None 15 | assert block.data == 'Test Block' 16 | assert block.timestamp is not None 17 | 18 | def test_block_eq(): 19 | """ 20 | Test block equality 21 | """ 22 | 23 | block1 = models.Block.create(data='Test Block 1') 24 | block2 = models.Block.create(data='Test Block 1') 25 | 26 | assert block1 == block2 27 | 28 | def test_block_ne(): 29 | """ 30 | Test block inequality 31 | """ 32 | 33 | block1 = models.Block.create(data='Test Block 1') 34 | block2 = models.Block.create(data='Test Block 2') 35 | 36 | assert block1 != block2 37 | 38 | def test_block_gte(): 39 | """ 40 | Test block timestamp is greater than or equal to 41 | """ 42 | 43 | block1 = models.Block(data='Test Block', timestamp=datetime.utcnow()) 44 | block2 = models.Block.create(data='Test Block') 45 | 46 | assert block1.timestamp >= block2.timestamp 47 | 48 | def test_block_lte(): 49 | """ 50 | Test block timestamp is less than or equal to 51 | """ 52 | 53 | block1 = models.Block(data='Test Block', timestamp=datetime.utcnow()) 54 | block2 = models.Block.create(data='Test Block') 55 | 56 | assert block1.timestamp <= block2.timestamp 57 | 58 | def 59 | -------------------------------------------------------------------------------- /test/test_views.py: -------------------------------------------------------------------------------- 1 | import json 2 | import unittest 3 | from test.__init__ import TestCase 4 | 5 | 6 | class TestViews(TestCase): 7 | def test_index(self): 8 | response = self.client.get("/") 9 | 10 | self.assertEqual(response.status_code, 200) 11 | 12 | def test_blocks(self): 13 | response = self.client.get("/blocks") 14 | 15 | self.assertEqual(response.status_code, 200) 16 | 17 | data = json.loads(response.data) 18 | 19 | self.assertGreaterEqual(len(data), 1) 20 | 21 | def test_block(self): 22 | block = models.Block.create(data="Test Block") 23 | 24 | response = self.client.get(f"/blocks/{block.id}") 25 | 26 | self.assertEqual(response.status_code, 200) 27 | 28 | data = json.loads(response.data) 29 | 30 | self.assertEqual(data["data"], "Test Block") 31 | 32 | def test_create_block(self): 33 | data = {"data": "Test Block"} 34 | 35 | response = self.client.post("/blocks", json=data) 36 | 37 | self.assertEqual(response.status_code, 201) 38 | 39 | data = json.loads(response.data) 40 | 41 | self.assertEqual(data["data"], "Test Block") 42 | 43 | def test_update_block(self): 44 | block = models.Block.create(data="Test Block") 45 | 46 | data = {"data": "New Test Block"} 47 | 48 | response = self.client.put(f"/blocks/{block.id}", json=data) 49 | 50 | self.assertEqual(response.status_code, 200) 51 | 52 | data = json.loads(response.data) 53 | 54 | self.assertEqual(data["data"], "New Test Block") 55 | 56 | def test_delete_block(self): 57 | block = models.Block.create(data="Test Block") 58 | 59 | response = self.client.delete(f"/blocks/{block.id}") 60 | 61 | self.assertEqual(response.status_code, 200) 62 | 63 | response = self.client.get(f"/blocks/{block.id}") 64 | 65 | self.assertEqual(response.status_code, 404) 66 | 67 | 68 | if __name__ == "__main__": 69 | unittest.main() 70 | -------------------------------------------------------------------------------- /web/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM python:3.9 2 | 3 | WORKDIR /app 4 | 5 | COPY requirements.txt . 6 | 7 | RUN pip install -r requirements.txt 8 | 9 | COPY . . 10 | 11 | CMD ["python", "app.py"] 12 | -------------------------------------------------------------------------------- /web/app.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | # Set the base directory for the pi-velocity-core git repo 4 | base_dir = "./pi-velocity-core" 5 | 6 | # Define the main function to run when the app.py script is executed 7 | def main(): 8 | # Print a welcome message and the current working directory 9 | print("Welcome to the pi-velocity-core app.py script!") 10 | print(f"Current working directory: {os.getcwd()}") 11 | 12 | # Change the current working directory to the base directory 13 | os.chdir(base_dir) 14 | 15 | # TODO: Add functionality to the script as needed 16 | 17 | # Print a farewell message and the current working directory 18 | print("Thanks for using the pi-velocity-core app.py script!") 19 | print(f"Current working directory: {os.getcwd()}") 20 | 21 | # Call the main function to run the app.py script 22 | if __name__ == "__main__": 23 | main() 24 | -------------------------------------------------------------------------------- /web/requirements.txt: -------------------------------------------------------------------------------- 1 | # Standard library dependencies 2 | Flask==2.2.5 3 | 4 | # Third-party dependencies 5 | nltk==3.7 6 | spacy==3.3.0 7 | 8 | # Optional dependencies 9 | ipykernel==6.15.0 10 | jupyter==1.0.0 11 | -------------------------------------------------------------------------------- /web/run.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Navigate to the web directory in the pi-velocity-core git repository 4 | cd /path/to/pi-velocity-core/web 5 | 6 | # Perform the desired operation (e.g., start a server, run tests, build documentation, etc.) 7 | npm start 8 | -------------------------------------------------------------------------------- /web/static/main.css: -------------------------------------------------------------------------------- 1 | body { 2 | font-family: Arial, sans-serif; 3 | margin: 0; 4 | padding: 0; 5 | background-color: #f0f0f0; 6 | } 7 | 8 | header { 9 | background-color: #333; 10 | color: #fff; 11 | padding: 10px; 12 | text-align: center; 13 | } 14 | 15 | main { 16 | padding: 20px; 17 | } 18 | 19 | footer { 20 | background-color: #333; 21 | color: #fff; 22 | padding: 10px; 23 | text-align: center; 24 | } 25 | -------------------------------------------------------------------------------- /web/templates/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | Welcome to Processing for Pi 5 | 6 | 7 | 8 |
9 |

Welcome to Processing for Pi

10 |
11 |
12 |

Hello and welcome to our website! We're excited to share our passion for Processing on the Raspberry Pi with you.

13 |

Here's a quick overview of what you can find on our site:

14 | 19 |
20 | 23 | 24 | 25 | --------------------------------------------------------------------------------