├── .dockerignore ├── .github ├── renovate.json └── workflows │ ├── backend_build_prod_ecr.yml │ ├── backend_docker_release.yml │ ├── backend_release.yml │ ├── backend_test.yml │ ├── cli_release.yml │ ├── cli_release_multiarch.yml │ ├── cli_test.yml │ ├── cli_test_e2e.yml │ ├── dgctl_release.yml │ ├── drift_deploy.yml │ ├── ee_backend_docker_release.yml │ ├── ee_backend_docker_release_fips.yml │ ├── ee_backend_test.yml │ ├── ee_cli_release.yml │ ├── ee_cli_release_fips.yml │ ├── ee_cli_release_multiarch.yml │ ├── ee_cli_test.yml │ ├── ee_tasks_release.yml │ ├── latest_tag.yml │ ├── libs_test.yml │ ├── misc_top_issues.yml │ ├── next_deploy.yml │ ├── next_test.yml │ ├── pro-deploy.yml │ ├── tasks_release.yml │ └── tasks_run_test.yml ├── .gitignore ├── CONTRIBUTING.md ├── Dockerfile_backend ├── Dockerfile_backend_ee ├── Dockerfile_drift ├── Dockerfile_next ├── Dockerfile_tasks ├── LICENSE ├── README.md ├── action.yml ├── backend ├── .dockerignore ├── .gitignore ├── LICENSE ├── Makefile ├── README.md ├── atlas.hcl ├── bootstrap │ └── main.go ├── ci_backends │ ├── ci_backends.go │ ├── github_actions.go │ ├── jenkins.go │ └── provider.go ├── config │ ├── config.go │ └── envgetters.go ├── controllers │ ├── activity.go │ ├── cache.go │ ├── connections.go │ ├── dashboard.go │ ├── github.go │ ├── github_api.go │ ├── github_test.go │ ├── helpers.go │ ├── internal_users.go │ ├── jobs.go │ ├── locking.go │ ├── orgs.go │ ├── policies.go │ ├── policies_api.go │ ├── projects.go │ ├── projects_test.go │ ├── repos.go │ └── runs.go ├── docker-compose.yml ├── go.mod ├── go.sum ├── hooks │ └── hooks.go ├── locking │ └── backend_locking.go ├── main.go ├── middleware │ ├── basic.go │ ├── headers.go │ ├── jwt.go │ ├── middleware.go │ ├── noop.go │ └── webhook.go ├── migrations │ ├── 20231227132525.sql │ ├── 20240115170600.sql │ ├── 20240116123649.sql │ ├── 20240125121106.sql │ ├── 20240125181812.sql │ ├── 20240301211741.sql │ ├── 20240328182453.sql │ ├── 20240329100957.sql │ ├── 20240329114422.sql │ ├── 20240402110915.sql │ ├── 20240403155357_drop_dup_idx.sql │ ├── 20240403155456.sql │ ├── 20240404160724.sql │ ├── 20240404161121.sql │ ├── 20240404161723.sql │ ├── 20240404165910.sql │ ├── 20240405150942.sql │ ├── 20240405160110.sql │ ├── 20240409161739.sql │ ├── 20240510162721.sql │ ├── 20240518182629.sql │ ├── 20240524110010.sql │ ├── 20240527112209.sql │ ├── 20240530074832.sql │ ├── 20240703121051.sql │ ├── 20240704192835.sql │ ├── 20240705144450.sql │ ├── 20240709165155.sql │ ├── 20240729155442.sql │ ├── 20240729155926.sql │ ├── 20240729160028.sql │ ├── 20241107162605.sql │ ├── 20241107163722.sql │ ├── 20241107172343.sql │ ├── 20241114202249.sql │ ├── 20241229112312.sql │ ├── 20250220084846.sql │ ├── 20250220172054.sql │ ├── 20250220172321.sql │ ├── 20250220173053.sql │ ├── 20250220173439.sql │ ├── 20250221044813.sql │ ├── 20250224152926.sql │ ├── 20250226185150.sql │ ├── 20250302190926.sql │ ├── 20250325115901.sql │ ├── 20250325134924.sql │ ├── 20250416152705.sql │ ├── 20250512172515.sql │ ├── 20250512213729.sql │ └── atlas.sum ├── models │ ├── artefact.go │ ├── cache.go │ ├── github.go │ ├── locking.go │ ├── orgs.go │ ├── policies.go │ ├── runs.go │ ├── scheduler.go │ ├── scheduler_test.go │ ├── setup.go │ ├── storage.go │ ├── storage_dashboard.go │ ├── storage_test.go │ └── user.go ├── queries │ └── queries.go ├── scripts │ ├── curl_bootstrap.sh │ └── entrypoint.sh ├── segment │ └── segment.go ├── services │ ├── auth.go │ ├── messages.go │ ├── scheduler.go │ └── spec.go ├── sql │ └── migration_25_08_2023.sql ├── tasks │ ├── .gitignore │ ├── runs.go │ ├── runs_test.go │ └── tasks.go ├── templates │ ├── github_repos.tmpl │ ├── github_setup.tmpl │ ├── github_success.tmpl │ ├── home.tmpl │ ├── index.tmpl │ ├── static │ │ ├── css │ │ │ ├── bootstrap.min.css │ │ │ ├── main.css │ │ │ ├── prism.dev.css │ │ │ ├── prism.min.css │ │ │ └── tf.css │ │ ├── js │ │ │ ├── bootstrap.bundle.min.js │ │ │ ├── prism-live-javascript.js │ │ │ ├── prism-live.js │ │ │ ├── prism.dev.js │ │ │ └── prism.min.js │ │ └── prism-live.css │ └── top.tmpl ├── tools.go ├── utils │ ├── ai.go │ ├── allowlist.go │ ├── allowlist_test.go │ ├── batch_utils.go │ ├── bitbucket.go │ ├── crypt.go │ ├── github.go │ ├── github_test.go │ ├── gitlab.go │ ├── gitshell.go │ ├── graphs.go │ ├── graphs_test.go │ ├── log.go │ └── pr_comment.go └── version.txt ├── cli ├── .gitignore ├── cmd │ └── digger │ │ ├── default.go │ │ ├── main.go │ │ ├── main_test.go │ │ ├── root.go │ │ └── run_spec.go ├── dockerfiles │ ├── branch │ │ └── Dockerfile │ └── release │ │ └── Dockerfile ├── go.mod ├── go.sum └── pkg │ ├── core │ └── drift │ │ └── drift.go │ ├── digger │ ├── digger.go │ ├── digger_test.go │ ├── io.go │ └── isNonEmptyPlan.txt │ ├── drift │ ├── Provider.go │ ├── slack.go │ └── slack_test.go │ ├── github │ ├── github.go │ └── models │ │ └── models.go │ ├── integration │ └── integration_test.go │ ├── spec │ ├── manual.go │ └── spec.go │ ├── usage │ ├── usage.go │ └── usage_test.go │ └── utils │ ├── commands.go │ ├── io.go │ ├── strings.go │ └── version.go ├── cli_e2e ├── aws_dynamodb_test.go ├── go.mod ├── go.sum └── plan_storage_test.go ├── dgctl ├── .gitignore ├── cmd │ ├── exec.go │ ├── root.go │ └── validate.go ├── dgctl.json ├── go.mod ├── go.sum ├── main.go └── utils │ └── gitio.go ├── docs ├── ce │ ├── azure-specific │ │ ├── azure-devops-locking-connection-methods.mdx │ │ └── azure.mdx │ ├── cloud-providers │ │ ├── authenticating-with-oidc-on-aws.mdx │ │ ├── aws.mdx │ │ └── setting-up-separate-mgmt-account.mdx │ ├── features │ │ ├── commentops.mdx │ │ ├── concurrency.mdx │ │ ├── opa-policies.mdx │ │ ├── plan-persistence.mdx │ │ ├── plan-preview.mdx │ │ ├── pr-level-locks.mdx │ │ └── private-runners.mdx │ ├── gcp │ │ ├── federated-oidc-access.mdx │ │ ├── setting-up-gcp-+-gh-actions.mdx │ │ └── using-gcp-bucket-for-locks.mdx │ ├── getting-started │ │ ├── azure-devops.mdx │ │ ├── github-actions-+-aws.mdx │ │ ├── github-actions-and-gcp.mdx │ │ └── gitlab-pipelines-+-aws.mdx │ ├── howto │ │ ├── apply-on-merge.mdx │ │ ├── apply-requirements.mdx │ │ ├── auto-merge.mdx │ │ ├── backendless-mode.mdx │ │ ├── commenting-strategies.mdx │ │ ├── custom-commands.mdx │ │ ├── destroy-manual.mdx │ │ ├── disable-auto-checkout.mdx │ │ ├── disable-locking.mdx │ │ ├── disable-telemetry.mdx │ │ ├── draft-prs.mdx │ │ ├── generate-projects.mdx │ │ ├── group-plans-by-source.mdx │ │ ├── include-exclude-patterns.mdx │ │ ├── multiacc-aws.mdx │ │ ├── policy-overrides.mdx │ │ ├── project-level-roles.mdx │ │ ├── segregate-cloud-accounts.mdx │ │ ├── specify-terraform-version.mdx │ │ ├── store-plans-in-a-bucket.mdx │ │ ├── trigger-directly.mdx │ │ ├── using-checkov.mdx │ │ ├── using-infracost.mdx │ │ ├── using-opa-conftest.mdx │ │ ├── using-terragrunt.mdx │ │ ├── versioning.mdx │ │ └── workspaces.mdx │ ├── reference │ │ ├── action-inputs.mdx │ │ ├── api.mdx │ │ ├── digger.yml.mdx │ │ └── terraform.lock.mdx │ ├── securing-digger │ │ ├── external-provider.mdx │ │ └── spec-signing.mdx │ ├── self-host │ │ ├── auth-methods.mdx │ │ ├── deploy-binary.mdx │ │ ├── deploy-docker-compose.mdx │ │ ├── deploy-docker.mdx │ │ ├── deploy-helm.mdx │ │ └── self-host-on-azure.mdx │ └── troubleshooting │ │ ├── action-errors.mdx │ │ ├── comments.mdx │ │ └── importing-existing-resources.mdx ├── ee │ ├── ai-summaries.mdx │ ├── buildkite.mdx │ ├── dashboard.mdx │ ├── drift-detection.mdx │ ├── ee-setup.mdx │ ├── fips-140.mdx │ ├── gitlab-support.mdx │ ├── gitlab.mdx │ ├── multi-github.mdx │ ├── opa.mdx │ └── rbac.mdx ├── favicon.png ├── images │ ├── Screenshot2025-05-26at19.00.33.png │ ├── Screenshot2025-05-26at19.00.38.png │ ├── Screenshot2025-05-26at19.00.55.png │ ├── Screenshot2025-05-26at19.14.34.png │ ├── Screenshot2025-05-26at19.58.03.png │ ├── Screenshot2025-05-26at21.31.18.png │ ├── buildkite │ │ ├── buildkite.png │ │ └── github_comment.png │ ├── configuration │ │ ├── 1.png │ │ ├── 1.webp │ │ ├── 2.png │ │ ├── 2.webp │ │ ├── 3.webp │ │ ├── 4.png │ │ ├── 5.png │ │ ├── image.png │ │ └── infracost-example.png │ ├── custom-command-output-infracost.png │ ├── digger-dashboard-screenshot.png │ ├── digger-dashboard.png │ ├── digger-plan-preview.png │ ├── drift-issues.png │ ├── ee │ │ ├── ai-summaries.png │ │ ├── example-plan.png │ │ ├── gitlab-1.png │ │ ├── gitlab-2.png │ │ └── multi-tenant-github.png │ ├── gcp │ │ ├── 1.png │ │ ├── 2.png │ │ ├── 3.png │ │ ├── 4.png │ │ └── image.png │ ├── getting-started │ │ ├── 1.webp │ │ ├── 2.webp │ │ ├── 3.png │ │ ├── 4.png │ │ ├── 5.png │ │ ├── 6.png │ │ ├── 7.png │ │ ├── azure-devops-1.png │ │ ├── azure-devops-2.png │ │ ├── azure-devops-3.png │ │ ├── azure-devops-4.png │ │ ├── azure-devops-5.png │ │ ├── azure-devops-6.png │ │ ├── azure-devops-7.png │ │ ├── azure-devops-8.5.png │ │ ├── azure-devops-8.png │ │ └── image.png │ ├── gitlab │ │ ├── gitlab-apply.png │ │ └── gitlab-plan.png │ ├── infracost-diff-comment-digger.png │ └── readme │ │ └── 1.png ├── logo │ ├── dark.png │ └── light.png ├── mint.json ├── readme │ ├── faq.mdx │ ├── feedback.mdx │ ├── howitworks.mdx │ ├── introduction.mdx │ └── pricing.mdx ├── team │ ├── features │ │ └── variables.mdx │ └── getting-started │ │ └── gha-aws.mdx └── troubleshooting-errors.mdx ├── ee ├── LICENSE ├── backend │ ├── .dockerignore │ ├── .gitignore │ ├── atlas.hcl │ ├── ci_backends │ │ ├── bitbucket_pipeline.go │ │ ├── buildkite.go │ │ ├── gitlab_pipeline.go │ │ └── provider.go │ ├── controllers │ │ ├── artefacts.go │ │ ├── bitbucket.go │ │ ├── bitbucket_utils.go │ │ ├── github.go │ │ ├── gitlab.go │ │ ├── spec.go │ │ └── web.go │ ├── docker-compose.yml │ ├── go.mod │ ├── go.sum │ ├── hooks │ │ └── github.go │ ├── main.go │ ├── providers │ │ └── github │ │ │ └── providers.go │ └── templates │ │ ├── bottom.tmpl │ │ ├── github_connections.tmpl │ │ ├── github_repos.tmpl │ │ ├── github_setup.tmpl │ │ ├── github_success.tmpl │ │ ├── healthy.tmpl │ │ ├── index.tmpl │ │ ├── notifications.tmpl │ │ ├── policies.tmpl │ │ ├── policy_add.tmpl │ │ ├── policy_details.tmpl │ │ ├── project_add.tmpl │ │ ├── project_details.tmpl │ │ ├── projects.tmpl │ │ ├── repo_add.tmpl │ │ ├── repos.tmpl │ │ ├── run_details.tmpl │ │ ├── runs.tmpl │ │ ├── static │ │ ├── css │ │ │ ├── bootstrap.min.css │ │ │ ├── main.css │ │ │ ├── prism.dev.css │ │ │ ├── prism.min.css │ │ │ └── tf.css │ │ ├── js │ │ │ ├── bootstrap.bundle.min.js │ │ │ ├── prism-live-javascript.js │ │ │ ├── prism-live.js │ │ │ ├── prism.dev.js │ │ │ └── prism.min.js │ │ └── prism-live.css │ │ └── top.tmpl ├── cli │ ├── .gitignore │ ├── cmd │ │ └── digger │ │ │ ├── default.go │ │ │ ├── main.go │ │ │ ├── main_test.go │ │ │ ├── root.go │ │ │ └── run_spec.go │ ├── go.mod │ ├── go.sum │ └── pkg │ │ ├── comment_updater │ │ ├── provider.go │ │ └── updater.go │ │ ├── drift │ │ ├── github_issue.go │ │ └── provider.go │ │ ├── github │ │ └── providers.go │ │ ├── gitlab │ │ └── gitlab.go │ │ ├── policy │ │ ├── policy.go │ │ ├── policy_test.go │ │ └── providers.go │ │ ├── utils │ │ └── github.go │ │ └── vcs │ │ └── providers.go └── drift │ ├── .dockerignore │ ├── .gitignore │ ├── README.md │ ├── controllers │ ├── ci_jobs.go │ ├── controllers.go │ ├── drift.go │ ├── github.go │ ├── health.go │ └── notifications.go │ ├── dbgen │ ├── dbgen.go │ ├── go.mod │ └── go.sum │ ├── dbmodels │ ├── ci_jobs.go │ ├── github.go │ ├── projects.go │ ├── repos.go │ ├── setup.go │ ├── storage.go │ └── tokens.go │ ├── go.mod │ ├── go.sum │ ├── main.go │ ├── middleware │ ├── job_token.go │ ├── middleware.go │ └── webhooks.go │ ├── model │ ├── digger_ci_job_tokens.gen.go │ ├── digger_ci_jobs.gen.go │ ├── github_app_installation_links.gen.go │ ├── org_settings.gen.go │ ├── organisations.gen.go │ ├── projects.gen.go │ ├── repos.gen.go │ ├── user_settings.gen.go │ └── users.gen.go │ ├── models_generated │ ├── digger_ci_job_tokens.gen.go │ ├── digger_ci_jobs.gen.go │ ├── gen.go │ ├── github_app_installation_links.gen.go │ ├── org_settings.gen.go │ ├── organisations.gen.go │ ├── projects.gen.go │ ├── repos.gen.go │ ├── user_settings.gen.go │ └── users.gen.go │ ├── scripts │ ├── cron │ │ ├── notifications.sql │ │ └── scheduler.sql │ └── entrypoint.sh │ ├── services │ └── spec.go │ ├── tasks │ └── github.go │ └── utils │ └── github.go ├── fly-drift-igor-dev.toml ├── fly-drift.toml ├── fly-pro.toml ├── fly-staging.toml ├── fly.toml ├── go.mod ├── go.work ├── go.work.sum ├── libs ├── backendapi │ ├── backend.go │ ├── diggerapi.go │ └── mocks.go ├── ci │ ├── azure │ │ ├── azure.go │ │ └── azure_test.go │ ├── bitbucket │ │ ├── bitbucket.go │ │ └── bitbucket_service.go │ ├── ci.go │ ├── generic │ │ └── events.go │ ├── github │ │ ├── comment.go │ │ ├── errors.go │ │ ├── github.go │ │ ├── github_test.go │ │ ├── mocks.go │ │ └── models │ │ │ └── models.go │ ├── gitlab │ │ ├── gitlab.go │ │ ├── gitlab_test.go │ │ └── webhooks.go │ ├── mocks.go │ └── utils.go ├── comment_utils │ ├── reporting │ │ ├── core.go │ │ ├── mock.go │ │ ├── noop.go │ │ ├── reporting.go │ │ ├── reporting_test.go │ │ ├── source_grouping.go │ │ └── utils.go │ ├── summary │ │ ├── provider.go │ │ └── updater.go │ └── utils │ │ └── comments.go ├── crypto │ ├── decrypt.go │ └── decrypt_test.go ├── digger_config │ ├── .gitignore │ ├── config.go │ ├── converters.go │ ├── digger_config.go │ ├── digger_config_test.go │ ├── terragrunt │ │ └── atlantis │ │ │ ├── LICENSE │ │ │ ├── config.go │ │ │ ├── generate.go │ │ │ ├── parse_hcl.go │ │ │ ├── parse_locals.go │ │ │ ├── parse_tf.go │ │ │ └── readme.md │ ├── utils.go │ ├── utils_test.go │ ├── validators.go │ └── yaml.go ├── execution │ ├── execution.go │ ├── execution_test.go │ ├── opentofu.go │ ├── opentofu_test.go │ ├── pulumi.go │ ├── runners.go │ ├── terragrunt.go │ ├── test_utils.go │ ├── tf.go │ └── tf_test.go ├── go.mod ├── go.sum ├── iac_utils │ ├── iac_utils.go │ ├── pulumi.go │ ├── terraform.go │ └── terraform_test.go ├── license │ ├── license.go │ └── license_test.go ├── locking │ ├── aws │ │ ├── dynamo_locking.go │ │ ├── dynamo_locking_test.go │ │ └── envprovider │ │ │ ├── envprovider.go │ │ │ └── envprovider_test.go │ ├── azure │ │ ├── storage_account.go │ │ └── storage_account_test.go │ ├── core.go │ ├── gcp │ │ ├── gcp_lock.go │ │ └── gcp_lock_test.go │ ├── locking.go │ ├── locking_test.go │ ├── mock.go │ └── utils.go ├── orchestrator │ ├── .gitignore │ ├── locking.go │ └── mock.go ├── policy │ ├── core.go │ ├── mocks.go │ ├── policy.go │ ├── policy_test.go │ └── providers.go ├── scheduler │ ├── aws.go │ ├── aws_test.go │ ├── convert.go │ ├── jobs.go │ ├── json_models.go │ ├── json_models_test.go │ ├── models.go │ ├── serializers.go │ └── utils.go ├── spec │ ├── models.go │ ├── models_test.go │ ├── payloads.go │ ├── providers.go │ ├── variables_provider.go │ └── variables_provider_test.go └── storage │ ├── aws_plan_storage.go │ ├── aws_plan_storage_test.go │ ├── azure_plan_storage.go │ ├── gcp_plan_storage.go │ ├── io.go │ ├── io_test.go │ ├── mocks.go │ ├── plan_storage.go │ └── storage.go └── next ├── .gitignore ├── ci_backends ├── ci_backends.go ├── github_actions.go ├── jenkins.go └── provider.go ├── controllers ├── drift.go ├── github.go ├── github_after_merge.go ├── projects.go ├── runs.go └── static.go ├── dbgen ├── dbgen.go ├── go.mod └── go.sum ├── dbmodels ├── github.go ├── orgs.go ├── projects.go ├── runs.go ├── scheduler.go ├── setup.go ├── storage.go └── variables.go ├── go.mod ├── go.sum ├── main.go ├── middleware ├── job_token_auth.go ├── middleware.go ├── supabase_cookie_auth.go └── webhooks.go ├── model ├── account_delete_tokens.gen.go ├── billing_bypass_organizations.gen.go ├── chats.gen.go ├── customers.gen.go ├── digger_batches.gen.go ├── digger_job_parent_links.gen.go ├── digger_job_summaries.gen.go ├── digger_job_tokens.gen.go ├── digger_jobs.gen.go ├── digger_locks.gen.go ├── digger_run_queue_items.gen.go ├── digger_run_stages.gen.go ├── digger_runs.gen.go ├── env_vars.gen.go ├── github_app_installation_links.gen.go ├── github_app_installations.gen.go ├── github_apps.gen.go ├── internal_blog_author_posts.gen.go ├── internal_blog_author_profiles.gen.go ├── internal_blog_post_tags.gen.go ├── internal_blog_post_tags_relationship.gen.go ├── internal_blog_posts.gen.go ├── internal_changelog.gen.go ├── internal_feedback_comments.gen.go ├── internal_feedback_threads.gen.go ├── organization_credits.gen.go ├── organization_join_invitations.gen.go ├── organization_members.gen.go ├── organizations.gen.go ├── organizations_private_info.gen.go ├── prices.gen.go ├── products.gen.go ├── project_comments.gen.go ├── project_tfvars.gen.go ├── projects.gen.go ├── repos.gen.go ├── subscriptions.gen.go ├── team_members.gen.go ├── teams.gen.go ├── user_api_keys.gen.go ├── user_m2m_applications.gen.go ├── user_notifications.gen.go ├── user_onboarding.gen.go ├── user_private_info.gen.go ├── user_profiles.gen.go └── user_roles.gen.go ├── models_generated ├── account_delete_tokens.gen.go ├── billing_bypass_organizations.gen.go ├── chats.gen.go ├── customers.gen.go ├── digger_batches.gen.go ├── digger_job_parent_links.gen.go ├── digger_job_summaries.gen.go ├── digger_job_tokens.gen.go ├── digger_jobs.gen.go ├── digger_locks.gen.go ├── digger_run_queue_items.gen.go ├── digger_run_stages.gen.go ├── digger_runs.gen.go ├── env_vars.gen.go ├── gen.go ├── github_app_installation_links.gen.go ├── github_app_installations.gen.go ├── github_apps.gen.go ├── internal_blog_author_posts.gen.go ├── internal_blog_author_profiles.gen.go ├── internal_blog_post_tags.gen.go ├── internal_blog_post_tags_relationship.gen.go ├── internal_blog_posts.gen.go ├── internal_changelog.gen.go ├── internal_feedback_comments.gen.go ├── internal_feedback_threads.gen.go ├── organization_credits.gen.go ├── organization_join_invitations.gen.go ├── organization_members.gen.go ├── organizations.gen.go ├── organizations_private_info.gen.go ├── prices.gen.go ├── products.gen.go ├── project_comments.gen.go ├── project_tfvars.gen.go ├── projects.gen.go ├── repos.gen.go ├── subscriptions.gen.go ├── team_members.gen.go ├── teams.gen.go ├── user_api_keys.gen.go ├── user_m2m_applications.gen.go ├── user_notifications.gen.go ├── user_onboarding.gen.go ├── user_private_info.gen.go ├── user_profiles.gen.go └── user_roles.gen.go ├── scripts ├── cron │ ├── process_drift.query │ └── process_runs_queue.query └── entrypoint.sh ├── services ├── config.go ├── drift.go ├── runs.go ├── scheduler.go └── spec.go ├── supa └── supa.go ├── templates ├── github_repos.tmpl ├── github_setup.tmpl ├── github_success.tmpl ├── home.tmpl ├── index.tmpl ├── static │ ├── css │ │ ├── bootstrap.min.css │ │ ├── main.css │ │ ├── prism.dev.css │ │ ├── prism.min.css │ │ └── tf.css │ ├── js │ │ ├── bootstrap.bundle.min.js │ │ ├── prism-live-javascript.js │ │ ├── prism-live.js │ │ ├── prism.dev.js │ │ └── prism.min.js │ └── prism-live.css └── top.tmpl └── utils ├── crontab.go ├── crontab_test.go └── github.go /.github/renovate.json: -------------------------------------------------------------------------------- 1 | { 2 | "$schema": "https://docs.renovatebot.com/renovate-schema.json", 3 | "extends": [ 4 | "config:base" 5 | ], 6 | "labels": ["dependencies"], 7 | "vulnerabilityAlerts": { 8 | "labels": ["security"] 9 | } 10 | } 11 | -------------------------------------------------------------------------------- /.github/workflows/backend_release.yml: -------------------------------------------------------------------------------- 1 | --- 2 | name: Backend release 3 | 4 | "on": 5 | release: 6 | types: 7 | - 'released' 8 | 9 | jobs: 10 | binary: 11 | strategy: 12 | matrix: 13 | arch: [arm, arm64, amd64, "386"] 14 | os: [linux, darwin, freebsd, windows] 15 | exclude: 16 | - os: darwin 17 | arch: arm 18 | - os: darwin 19 | arch: "386" 20 | 21 | runs-on: ubuntu-latest 22 | 23 | steps: 24 | - name: Check out repository 25 | uses: actions/checkout@v4 26 | 27 | - name: Build and publish binary artifact to GitHub 28 | id: build-and-release-binary 29 | uses: wangyoucao577/go-release-action@8fa1e8368c8465264d64e0198208e10f71474c87 # v1.50 30 | with: 31 | github_token: ${{ secrets.GITHUB_TOKEN }} 32 | goos: ${{ matrix.os }} 33 | goarch: ${{ matrix.arch }} 34 | goversion: 1.24.0 35 | project_path: ./backend 36 | binary_name: digger-api 37 | pre_command: export CGO_ENABLED=0 38 | ldflags: ${{ matrix.ldflags }} 39 | sha256sum: true 40 | md5sum: false 41 | asset_name: "digger-api-${{matrix.os}}-${{matrix.arch}}" 42 | compress_assets: "OFF" 43 | -------------------------------------------------------------------------------- /.github/workflows/backend_test.yml: -------------------------------------------------------------------------------- 1 | --- 2 | name: Backend Go Tests 3 | 4 | "on": 5 | push: 6 | branches: ['develop'] 7 | pull_request: 8 | 9 | jobs: 10 | build: 11 | name: Build 12 | runs-on: ubuntu-latest 13 | 14 | steps: 15 | - name: Download Go 16 | uses: actions/setup-go@v5 17 | with: 18 | go-version: 1.24.0 19 | id: go 20 | 21 | - name: Check out code into the Go module directory 22 | uses: actions/checkout@v4 23 | 24 | - name: Test that the docker image still builds successfully 25 | run: | 26 | export COMMIT_SHA=$(git rev-parse --short HEAD) 27 | docker build -t testingbuild:latest --build-arg COMMIT_SHA=${COMMIT_SHA} . -f Dockerfile_backend 28 | 29 | - name: Deps 30 | run: go get -v ./... 31 | working-directory: backend 32 | 33 | - name: Build 34 | run: go build 35 | working-directory: backend 36 | 37 | - name: Test 38 | run: | 39 | go test ./... 40 | env: 41 | GITHUB_PAT_TOKEN: ${{ secrets.TOKEN_GITHUB }} 42 | working-directory: backend 43 | -------------------------------------------------------------------------------- /.github/workflows/cli_release.yml: -------------------------------------------------------------------------------- 1 | --- 2 | name: release cli 3 | 4 | "on": 5 | release: 6 | branches: 7 | - 'go' 8 | types: 9 | - 'released' 10 | 11 | jobs: 12 | binary: 13 | runs-on: ubuntu-latest 14 | 15 | steps: 16 | - name: Check out repository 17 | uses: actions/checkout@v4 18 | 19 | - name: Publish linux-x64 exec to github 20 | id: build-and-release-binary 21 | uses: wangyoucao577/go-release-action@8fa1e8368c8465264d64e0198208e10f71474c87 # v1.50 22 | with: 23 | github_token: ${{ secrets.GITHUB_TOKEN }} 24 | goos: linux 25 | goarch: amd64 26 | goversion: 1.24.0 27 | project_path: ./cli/cmd/digger 28 | binary_name: digger 29 | pre_command: export CGO_ENABLED=0 30 | ldflags: ${{ matrix.ldflags }} 31 | sha256sum: true 32 | md5sum: false 33 | asset_name: "digger-cli-Linux-X64" 34 | compress_assets: "OFF" 35 | -------------------------------------------------------------------------------- /.github/workflows/cli_test.yml: -------------------------------------------------------------------------------- 1 | --- 2 | name: Cli tests 3 | 4 | "on": 5 | push: 6 | pull_request: 7 | 8 | jobs: 9 | build: 10 | name: Build 11 | runs-on: ubuntu-latest 12 | 13 | steps: 14 | - name: Download Go 15 | uses: actions/setup-go@v5 16 | with: 17 | go-version: 1.24.0 18 | id: go 19 | 20 | - name: Setup Opentofu 21 | uses: opentofu/setup-opentofu@v1 22 | with: 23 | tofu_version: 1.8.5 24 | 25 | - name: Check out code into the Go module directory 26 | uses: actions/checkout@v4 27 | 28 | - name: Deps 29 | run: | 30 | go get -v ./... 31 | working-directory: cli 32 | 33 | - name: Build 34 | run: | 35 | go build -v ./cmd/digger 36 | working-directory: cli 37 | 38 | - name: Test 39 | shell: bash 40 | run: | 41 | go test ./... 42 | working-directory: cli 43 | -------------------------------------------------------------------------------- /.github/workflows/drift_deploy.yml: -------------------------------------------------------------------------------- 1 | name: driftapp Deploy 2 | on: 3 | push: 4 | branches: 5 | - develop # change to main if needed 6 | - feat-drift-app 7 | jobs: 8 | deploy: 9 | name: Deploy app 10 | runs-on: ubuntu-latest 11 | concurrency: deploy-group # optional: ensure only one action runs at a time 12 | steps: 13 | - uses: actions/checkout@v4 14 | - uses: superfly/flyctl-actions/setup-flyctl@master 15 | - run: flyctl deploy --remote-only --config fly-drift.toml 16 | env: 17 | FLY_API_TOKEN: ${{ secrets.FLYIO_DRIFT_TOKEN }} -------------------------------------------------------------------------------- /.github/workflows/ee_backend_test.yml: -------------------------------------------------------------------------------- 1 | --- 2 | name: EE Backend Go Tests 3 | 4 | "on": 5 | push: 6 | branches: ['develop'] 7 | pull_request: 8 | 9 | jobs: 10 | build: 11 | name: Build 12 | runs-on: ubuntu-latest 13 | 14 | steps: 15 | - name: Download Go 16 | uses: actions/setup-go@v5 17 | with: 18 | go-version: 1.24.0 19 | id: go 20 | 21 | - name: Check out code into the Go module directory 22 | uses: actions/checkout@v4 23 | 24 | - name: Test that the docker image still builds successfully 25 | run: | 26 | export COMMIT_SHA=$(git rev-parse --short HEAD) 27 | docker build -t testingbuild:latest --build-arg COMMIT_SHA=${COMMIT_SHA} . -f Dockerfile_backend 28 | 29 | - name: Deps 30 | run: go get -v ./... 31 | working-directory: ee/backend 32 | 33 | - name: Build 34 | run: go build 35 | working-directory: ee/backend 36 | 37 | - name: Test 38 | run: go test -v ./... 39 | env: 40 | GITHUB_PAT_TOKEN: ${{ secrets.TOKEN_GITHUB }} 41 | working-directory: ee/backend 42 | -------------------------------------------------------------------------------- /.github/workflows/ee_cli_release.yml: -------------------------------------------------------------------------------- 1 | --- 2 | name: release ee cli 3 | 4 | "on": 5 | release: 6 | branches: 7 | - 'go' 8 | types: 9 | - 'released' 10 | 11 | jobs: 12 | binary: 13 | runs-on: ubuntu-latest 14 | 15 | steps: 16 | - name: Download Go 17 | uses: actions/setup-go@v5 18 | with: 19 | go-version: 1.24.0 20 | id: go 21 | 22 | - name: Check out repository 23 | uses: actions/checkout@v4 24 | 25 | - name: Publish linux-x64 exec to github 26 | id: build-and-release-binary 27 | uses: wangyoucao577/go-release-action@8fa1e8368c8465264d64e0198208e10f71474c87 # v1.50 28 | with: 29 | github_token: ${{ secrets.GITHUB_TOKEN }} 30 | goos: linux 31 | goarch: amd64 32 | goversion: 1.24.0 33 | project_path: ./ee/cli/cmd/digger 34 | binary_name: digger 35 | pre_command: export CGO_ENABLED=0 36 | ldflags: ${{ matrix.ldflags }} 37 | sha256sum: true 38 | md5sum: false 39 | asset_name: "digger-ee-cli-Linux-X64" 40 | compress_assets: "OFF" 41 | 42 | -------------------------------------------------------------------------------- /.github/workflows/ee_cli_release_fips.yml: -------------------------------------------------------------------------------- 1 | --- 2 | name: release ee cli 3 | 4 | "on": 5 | release: 6 | branches: 7 | - 'go' 8 | types: 9 | - 'released' 10 | 11 | jobs: 12 | binary: 13 | runs-on: ubuntu-latest 14 | 15 | steps: 16 | - name: Download Go 17 | uses: actions/setup-go@v5 18 | with: 19 | go-version: 1.24.0 20 | id: go 21 | 22 | - name: Check out repository 23 | uses: actions/checkout@v4 24 | 25 | - name: Publish linux-x64 exec to github 26 | id: build-and-release-binary 27 | uses: wangyoucao577/go-release-action@8fa1e8368c8465264d64e0198208e10f71474c87 # v1.50 28 | with: 29 | github_token: ${{ secrets.GITHUB_TOKEN }} 30 | goos: linux 31 | goarch: amd64 32 | goversion: 1.24.0 33 | project_path: ./ee/cli/cmd/digger 34 | binary_name: digger 35 | pre_command: export CGO_ENABLED=0 36 | sha256sum: true 37 | md5sum: false 38 | asset_name: "digger-ee-cli-Linux-X64-fips" 39 | compress_assets: "OFF" 40 | env: 41 | GODEBUG: fips140=only 42 | GOFIPS140: v1.0.0 43 | 44 | -------------------------------------------------------------------------------- /.github/workflows/ee_cli_test.yml: -------------------------------------------------------------------------------- 1 | --- 2 | name: EE Cli tests 3 | 4 | "on": 5 | push: 6 | pull_request: 7 | 8 | jobs: 9 | build: 10 | name: Build 11 | runs-on: ubuntu-latest 12 | 13 | steps: 14 | - name: Download Go 15 | uses: actions/setup-go@v5 16 | with: 17 | go-version: 1.24.0 18 | id: go 19 | 20 | - name: Setup Opentofu 21 | uses: opentofu/setup-opentofu@v1 22 | with: 23 | tofu_version: 1.8.5 24 | 25 | - name: Check out code into the Go module directory 26 | uses: actions/checkout@v4 27 | 28 | - name: Deps 29 | run: | 30 | go get -v ./... 31 | working-directory: ee/cli 32 | 33 | - name: Build 34 | run: | 35 | go build -v ./cmd/digger 36 | working-directory: ee/cli 37 | 38 | - name: Test 39 | run: go test -v ./... 40 | working-directory: ee/cli 41 | -------------------------------------------------------------------------------- /.github/workflows/misc_top_issues.yml: -------------------------------------------------------------------------------- 1 | --- 2 | name: Top issues updater 3 | 4 | "on": 5 | schedule: 6 | - cron: "0 * * * *" # every hour 7 | workflow_dispatch: 8 | 9 | jobs: 10 | get-top-issues: 11 | if: github.repository_owner == 'diggerhq' 12 | runs-on: ubuntu-latest 13 | 14 | steps: 15 | - name: update-top-issues 16 | uses: diggerhq/top-issues@main 17 | with: 18 | org_name: diggerhq 19 | repo_name: digger 20 | issue_number: 1352 21 | env: 22 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 23 | -------------------------------------------------------------------------------- /.github/workflows/next_deploy.yml: -------------------------------------------------------------------------------- 1 | name: Next Deploy 2 | on: 3 | push: 4 | branches: 5 | - develop # change to main if needed 6 | - feat/regen-db 7 | jobs: 8 | deploy: 9 | name: Deploy app 10 | runs-on: ubuntu-latest 11 | concurrency: next-deploy-group # optional: ensure only one action runs at a time 12 | steps: 13 | - uses: actions/checkout@v4 14 | - uses: superfly/flyctl-actions/setup-flyctl@master 15 | - run: flyctl deploy --remote-only 16 | env: 17 | FLY_API_TOKEN: ${{ secrets.FLYIO_TOKEN }} 18 | -------------------------------------------------------------------------------- /.github/workflows/next_test.yml: -------------------------------------------------------------------------------- 1 | --- 2 | name: Next Go Tests 3 | 4 | "on": 5 | push: 6 | branches: ['develop'] 7 | pull_request: 8 | 9 | jobs: 10 | build: 11 | name: Build 12 | runs-on: ubuntu-latest 13 | 14 | steps: 15 | - name: Download Go 16 | uses: actions/setup-go@v5 17 | with: 18 | go-version: 1.24.0 19 | id: go 20 | 21 | - name: Check out code into the Go module directory 22 | uses: actions/checkout@v4 23 | 24 | - name: Test that the docker image still builds successfully 25 | run: | 26 | export COMMIT_SHA=$(git rev-parse --short HEAD) 27 | docker build -t testingbuild:latest --build-arg COMMIT_SHA=${COMMIT_SHA} . -f Dockerfile_next 28 | 29 | - name: Deps 30 | run: go get -v ./... 31 | working-directory: backend 32 | 33 | - name: Build 34 | run: go build 35 | working-directory: backend 36 | 37 | - name: Test 38 | run: go test -v ./... 39 | env: 40 | GITHUB_PAT_TOKEN: ${{ secrets.TOKEN_GITHUB }} 41 | working-directory: next 42 | -------------------------------------------------------------------------------- /.github/workflows/pro-deploy.yml: -------------------------------------------------------------------------------- 1 | # See https://fly.io/docs/app-guides/continuous-deployment-with-github-actions/ 2 | 3 | name: Deploy pro-backend 4 | on: 5 | push: 6 | branches: 7 | - develop 8 | - pro 9 | jobs: 10 | deploy: 11 | name: Deploy app 12 | runs-on: ubuntu-latest 13 | concurrency: deploy-group # optional: ensure only one action runs at a time 14 | steps: 15 | - uses: actions/checkout@v4 16 | - uses: superfly/flyctl-actions/setup-flyctl@master 17 | - run: flyctl deploy --remote-only --config fly-pro.toml 18 | env: 19 | FLY_API_TOKEN: ${{ secrets.FLYIO_PRO_TOKEN }} 20 | -------------------------------------------------------------------------------- /.github/workflows/tasks_run_test.yml: -------------------------------------------------------------------------------- 1 | --- 2 | name: Tasks run tests 3 | 4 | "on": 5 | push: 6 | pull_request: 7 | 8 | jobs: 9 | build: 10 | name: Build 11 | runs-on: ubuntu-latest 12 | 13 | steps: 14 | - name: Download Go 15 | uses: actions/setup-go@v5 16 | with: 17 | go-version: 1.24.0 18 | id: go 19 | 20 | - name: Check out code into the Go module directory 21 | uses: actions/checkout@v4 22 | 23 | - name: Deps 24 | run: | 25 | pwd 26 | go get -v ./... 27 | working-directory: backend/tasks 28 | 29 | - name: Test 30 | run: go test -v ./... 31 | working-directory: backend/tasks 32 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .idea/ 2 | **/.env 3 | **/.env* 4 | .DS_Store 5 | venv/ 6 | **/__pycache__/ 7 | __azurite* 8 | -------------------------------------------------------------------------------- /Dockerfile_tasks: -------------------------------------------------------------------------------- 1 | FROM golang:1.22 as builder 2 | ARG COMMIT_SHA 3 | RUN echo "commit sha: ${COMMIT_SHA}" 4 | 5 | # Set the working directory 6 | WORKDIR $GOPATH/src/github.com/diggerhq/digger 7 | 8 | # Copy all required source, blacklist files that are not required through `.dockerignore` 9 | COPY . . 10 | 11 | # Get the vendor library 12 | RUN go version 13 | 14 | # RUN vgo install 15 | 16 | # https://github.com/ethereum/go-ethereum/issues/2738 17 | # Build static binary "-getmode=vendor" does not work with go-ethereum 18 | 19 | RUN go build -ldflags="-X 'main.Version=${COMMIT_SHA}'" -o tasks_exe ./backend/tasks 20 | 21 | # Multi-stage build will just copy the binary to an alpine image. 22 | FROM ubuntu:24.04 as runner 23 | ENV ATLAS_VERSION v0.28.0 24 | ARG COMMIT_SHA 25 | WORKDIR /app 26 | 27 | RUN apt-get update && apt-get install -y ca-certificates curl && apt-get install -y git && apt-get clean all 28 | RUN update-ca-certificates 29 | 30 | RUN echo "commit sha: ${COMMIT_SHA}" 31 | 32 | # install atlas 33 | RUN curl -sSf https://atlasgo.sh | sh 34 | 35 | 36 | # Copy the binary to the corresponding folder 37 | COPY --from=builder /go/src/github.com/diggerhq/digger/tasks_exe /app/tasks 38 | 39 | # Run the binary 40 | ENTRYPOINT ["/app/tasks"] 41 | -------------------------------------------------------------------------------- /backend/.dockerignore: -------------------------------------------------------------------------------- 1 | Dockerfile 2 | cloud -------------------------------------------------------------------------------- /backend/.gitignore: -------------------------------------------------------------------------------- 1 | backend 2 | main 3 | .idea/ 4 | .DS_Store 5 | venv/ 6 | **/__pycache__/ 7 | __azurite* 8 | ./digger 9 | cloud 10 | *.env 11 | *.env.* 12 | .docker-compose-env 13 | controllers/database_test.db 14 | -------------------------------------------------------------------------------- /backend/Makefile: -------------------------------------------------------------------------------- 1 | # Include only if exist 2 | -include .env 3 | export 4 | 5 | start: 6 | go run main.go -------------------------------------------------------------------------------- /backend/atlas.hcl: -------------------------------------------------------------------------------- 1 | data "external_schema" "gorm" { 2 | program = [ 3 | "go", 4 | "run", 5 | "-mod=mod", 6 | "ariga.io/atlas-provider-gorm", 7 | "load", 8 | "--path", "./models", 9 | "--dialect", "postgres", 10 | ] 11 | } 12 | 13 | env "gorm" { 14 | src = data.external_schema.gorm.url 15 | dev = "docker://postgres/16.1" 16 | migration { 17 | dir = "file://migrations" 18 | } 19 | format { 20 | migrate { 21 | diff = "{{ sql . \" \" }}" 22 | } 23 | } 24 | } -------------------------------------------------------------------------------- /backend/ci_backends/ci_backends.go: -------------------------------------------------------------------------------- 1 | package ci_backends 2 | 3 | import ( 4 | "github.com/diggerhq/digger/backend/utils" 5 | "github.com/diggerhq/digger/libs/spec" 6 | ) 7 | 8 | type CiBackend interface { 9 | TriggerWorkflow(spec spec.Spec, runName string, vcsToken string) error 10 | GetWorkflowUrl(spec spec.Spec) (string, error) 11 | } 12 | 13 | type JenkinsCi struct{} 14 | 15 | type CiBackendOptions struct { 16 | GithubClientProvider utils.GithubClientProvider 17 | GithubInstallationId int64 18 | GithubAppId int64 19 | GitlabProjectId int 20 | GitlabmergeRequestEventName string 21 | GitlabCIPipelineID string 22 | GitlabCIPipelineIID int 23 | GitlabCIMergeRequestID int 24 | GitlabCIMergeRequestIID int 25 | GitlabCIProjectName string 26 | GitlabciprojectNamespace string 27 | GitlabciprojectId int 28 | GitlabciprojectNamespaceId int 29 | GitlabDiscussionId string 30 | RepoFullName string 31 | RepoOwner string 32 | RepoName string 33 | } 34 | -------------------------------------------------------------------------------- /backend/ci_backends/jenkins.go: -------------------------------------------------------------------------------- 1 | package ci_backends 2 | -------------------------------------------------------------------------------- /backend/ci_backends/provider.go: -------------------------------------------------------------------------------- 1 | package ci_backends 2 | 3 | import ( 4 | "fmt" 5 | "log/slog" 6 | 7 | "github.com/diggerhq/digger/backend/utils" 8 | ) 9 | 10 | type CiBackendProvider interface { 11 | GetCiBackend(options CiBackendOptions) (CiBackend, error) 12 | } 13 | 14 | type DefaultBackendProvider struct{} 15 | 16 | func (d DefaultBackendProvider) GetCiBackend(options CiBackendOptions) (CiBackend, error) { 17 | client, _, err := utils.GetGithubClientFromAppId(options.GithubClientProvider, options.GithubInstallationId, options.GithubAppId, options.RepoFullName) 18 | if err != nil { 19 | slog.Error("GetCiBackend: could not get github client", "error", err) 20 | return nil, fmt.Errorf("could not get github client: %v", err) 21 | } 22 | backend := &GithubActionCi{ 23 | Client: client, 24 | } 25 | return backend, nil 26 | } 27 | -------------------------------------------------------------------------------- /backend/config/config.go: -------------------------------------------------------------------------------- 1 | package config 2 | 3 | import ( 4 | "github.com/spf13/cast" 5 | "os" 6 | "strings" 7 | "time" 8 | 9 | "github.com/spf13/viper" 10 | ) 11 | 12 | // Config represents an alias to viper config 13 | type Config = viper.Viper 14 | 15 | var DiggerConfig *Config 16 | 17 | // New returns a new pointer to the config 18 | func New() *Config { 19 | v := viper.New() 20 | v.SetEnvPrefix("DIGGER") 21 | v.SetEnvKeyReplacer(strings.NewReplacer("-", "_")) 22 | v.SetDefault("port", 3000) 23 | v.SetDefault("usersvc_on", true) 24 | v.SetDefault("build_date", "null") 25 | v.SetDefault("deployed_at", time.Now().UTC().Format(time.RFC3339)) 26 | v.SetDefault("max_concurrency_per_batch", "0") 27 | v.BindEnv() 28 | return v 29 | } 30 | 31 | func GetPort() int { 32 | port := cast.ToInt(os.Getenv("PORT")) 33 | if port == 0 { 34 | port = 3000 35 | } 36 | return port 37 | } 38 | 39 | func init() { 40 | cfg := New() 41 | cfg.AutomaticEnv() 42 | DiggerConfig = cfg 43 | } 44 | -------------------------------------------------------------------------------- /backend/config/envgetters.go: -------------------------------------------------------------------------------- 1 | package config 2 | 3 | import ( 4 | "os" 5 | ) 6 | 7 | func LimitByNumOfFilesChanged() bool { 8 | // if this flag is set then it will fail if there are more projects impacted than the 9 | // number of files changed 10 | return os.Getenv("DIGGER_LIMIT_MAX_PROJECTS_TO_FILES_CHANGED") == "1" 11 | } 12 | -------------------------------------------------------------------------------- /backend/controllers/helpers.go: -------------------------------------------------------------------------------- 1 | package controllers 2 | 3 | import ( 4 | "github.com/gin-gonic/gin" 5 | "net/http" 6 | ) 7 | 8 | func Home(c *gin.Context) { 9 | c.HTML(http.StatusOK, "home.tmpl", gin.H{}) 10 | } 11 | -------------------------------------------------------------------------------- /backend/controllers/locking.go: -------------------------------------------------------------------------------- 1 | package controllers 2 | -------------------------------------------------------------------------------- /backend/docker-compose.yml: -------------------------------------------------------------------------------- 1 | version: '3.7' 2 | 3 | services: 4 | postgres: 5 | image: postgres:alpine 6 | ports: 7 | - "5432:5432" 8 | environment: 9 | - POSTGRES_PASSWORD=23q4RSDFSDFS 10 | healthcheck: 11 | test: [ "CMD-SHELL", "pg_isready -U postgres" ] 12 | interval: 5s 13 | timeout: 5s 14 | retries: 5 15 | 16 | web: 17 | links: 18 | - postgres 19 | depends_on: 20 | postgres: 21 | condition: service_healthy 22 | build: ./ 23 | env_file: 24 | - .env.docker-compose 25 | environment: 26 | - ALLOW_DIRTY=false 27 | ports: 28 | - "3100:3000" 29 | -------------------------------------------------------------------------------- /backend/hooks/hooks.go: -------------------------------------------------------------------------------- 1 | package hooks 2 | -------------------------------------------------------------------------------- /backend/locking/backend_locking.go: -------------------------------------------------------------------------------- 1 | package locking 2 | 3 | import ( 4 | "errors" 5 | "fmt" 6 | "github.com/diggerhq/digger/backend/models" 7 | "gorm.io/gorm" 8 | ) 9 | 10 | type BackendDBLock struct { 11 | OrgId uint 12 | } 13 | 14 | func (lock BackendDBLock) Lock(lockId int, resource string) (bool, error) { 15 | _, err := models.DB.CreateDiggerLock(resource, lockId, lock.OrgId) 16 | if err != nil { 17 | return false, fmt.Errorf("could not create lock record: %v", err) 18 | } 19 | return true, nil 20 | } 21 | 22 | func (lock BackendDBLock) Unlock(resource string) (bool, error) { 23 | // delete all locks that match this resource 24 | l := models.DiggerLock{} 25 | err := models.DB.GormDB.Where("resource=?", resource).Delete(&l).Error 26 | if err != nil { 27 | return false, fmt.Errorf("could not delete all locks: %v", err) 28 | } 29 | return true, nil 30 | } 31 | 32 | func (lock BackendDBLock) GetLock(resource string) (*int, error) { 33 | theLock, err := models.DB.GetDiggerLock(resource) 34 | if errors.Is(err, gorm.ErrRecordNotFound) { 35 | return nil, nil 36 | } 37 | if err != nil { 38 | return nil, fmt.Errorf("could not get lock record: %v", err) 39 | } 40 | return &theLock.LockId, nil 41 | } 42 | -------------------------------------------------------------------------------- /backend/main.go: -------------------------------------------------------------------------------- 1 | package main 2 | 3 | import ( 4 | "embed" 5 | "fmt" 6 | "github.com/diggerhq/digger/backend/bootstrap" 7 | "github.com/diggerhq/digger/backend/ci_backends" 8 | "github.com/diggerhq/digger/backend/config" 9 | "github.com/diggerhq/digger/backend/controllers" 10 | "github.com/diggerhq/digger/backend/utils" 11 | ) 12 | 13 | //go:embed templates 14 | var templates embed.FS 15 | 16 | func main() { 17 | ghController := controllers.DiggerController{ 18 | CiBackendProvider: ci_backends.DefaultBackendProvider{}, 19 | GithubClientProvider: utils.DiggerGithubRealClientProvider{}, 20 | GithubWebhookPostIssueCommentHooks: make([]controllers.IssueCommentHook, 0), 21 | } 22 | r := bootstrap.Bootstrap(templates, ghController) 23 | r.GET("/", controllers.Home) 24 | port := config.GetPort() 25 | r.Run(fmt.Sprintf(":%d", port)) 26 | } 27 | -------------------------------------------------------------------------------- /backend/middleware/headers.go: -------------------------------------------------------------------------------- 1 | package middleware 2 | 3 | import ( 4 | "github.com/gin-gonic/gin" 5 | "net/http" 6 | ) 7 | 8 | func HeadersApiAuth() gin.HandlerFunc { 9 | return func(c *gin.Context) { 10 | orgId := c.Request.Header.Get("DIGGER_ORG_ID") 11 | orgSource := c.Request.Header.Get("DIGGER_ORG_SOURCE") 12 | userId := c.Request.Header.Get("DIGGER_USER_ID") 13 | 14 | if orgId == "" { 15 | c.String(http.StatusBadRequest, "Missing parameter: DIGGER_ORG_ID") 16 | c.Abort() 17 | return 18 | } 19 | 20 | if orgSource == "" { 21 | c.String(http.StatusBadRequest, "Missing parameter: DIGGER_ORG_SOURCE") 22 | c.Abort() 23 | return 24 | } 25 | 26 | if userId == "" { 27 | c.String(http.StatusBadRequest, "Missing parameter: DIGGER_USER_ID") 28 | c.Abort() 29 | return 30 | } 31 | 32 | c.Set(ORGANISATION_ID_KEY, orgId) 33 | c.Set(ORGANISATION_SOURCE_KEY, orgSource) 34 | c.Set(USER_ID_KEY, userId) 35 | 36 | c.Next() 37 | } 38 | } 39 | -------------------------------------------------------------------------------- /backend/middleware/noop.go: -------------------------------------------------------------------------------- 1 | package middleware 2 | 3 | import ( 4 | "github.com/diggerhq/digger/backend/models" 5 | "github.com/gin-gonic/gin" 6 | ) 7 | 8 | func NoopWebAuth() gin.HandlerFunc { 9 | return func(c *gin.Context) { 10 | setDefaultOrganisationId(c) 11 | c.Set(ACCESS_LEVEL_KEY, models.AdminPolicyType) 12 | c.Next() 13 | } 14 | } 15 | 16 | func NoopApiAuth() gin.HandlerFunc { 17 | return func(c *gin.Context) { 18 | setDefaultOrganisationId(c) 19 | c.Set(ACCESS_LEVEL_KEY, models.AdminPolicyType) 20 | c.Next() 21 | } 22 | } 23 | -------------------------------------------------------------------------------- /backend/middleware/webhook.go: -------------------------------------------------------------------------------- 1 | package middleware 2 | 3 | import ( 4 | "github.com/gin-gonic/gin" 5 | "net/http" 6 | "os" 7 | "strings" 8 | ) 9 | 10 | func InternalApiAuth() gin.HandlerFunc { 11 | return func(c *gin.Context) { 12 | webhookSecret := os.Getenv("DIGGER_INTERNAL_SECRET") 13 | authHeader := c.Request.Header.Get("Authorization") 14 | if authHeader == "" { 15 | c.String(http.StatusForbidden, "No Authorization header provided") 16 | c.Abort() 17 | return 18 | } 19 | token := strings.TrimPrefix(authHeader, "Bearer ") 20 | if token != webhookSecret { 21 | c.String(http.StatusForbidden, "invalid token") 22 | c.Abort() 23 | return 24 | } 25 | // webhook auth optionally accepts organisation ID as a value 26 | orgIdHeader := c.GetHeader("X-Digger-Org-ID") 27 | if orgIdHeader != "" { 28 | c.Set(ORGANISATION_ID_KEY, orgIdHeader) 29 | } 30 | 31 | c.Next() 32 | return 33 | } 34 | } 35 | -------------------------------------------------------------------------------- /backend/migrations/20240115170600.sql: -------------------------------------------------------------------------------- 1 | -- Create "digger_job_summaries" table 2 | CREATE TABLE "public"."digger_job_summaries" ( 3 | "id" bigserial NOT NULL, 4 | "created_at" timestamptz NULL, 5 | "updated_at" timestamptz NULL, 6 | "deleted_at" timestamptz NULL, 7 | "resources_created" bigint NULL, 8 | "resources_deleted" bigint NULL, 9 | "resources_updated" bigint NULL, 10 | PRIMARY KEY ("id") 11 | ); 12 | -- Create index "idx_digger_job_summaries_deleted_at" to table: "digger_job_summaries" 13 | CREATE INDEX "idx_digger_job_summaries_deleted_at" ON "public"."digger_job_summaries" ("deleted_at"); 14 | -- Modify "digger_jobs" table 15 | ALTER TABLE "public"."digger_jobs" ADD COLUMN "digger_job_summary_id" bigint NULL, ADD 16 | CONSTRAINT "fk_digger_jobs_digger_job_summary" FOREIGN KEY ("digger_job_summary_id") REFERENCES "public"."digger_job_summaries" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION; 17 | -------------------------------------------------------------------------------- /backend/migrations/20240116123649.sql: -------------------------------------------------------------------------------- 1 | -- Modify "digger_batches" table 2 | ALTER TABLE "public"."digger_batches" ADD COLUMN "comment_id" bigint NULL; 3 | -------------------------------------------------------------------------------- /backend/migrations/20240125121106.sql: -------------------------------------------------------------------------------- 1 | -- Modify "digger_jobs" table 2 | ALTER TABLE "public"."digger_jobs" DROP COLUMN "serialized_job", ADD COLUMN "serialized_job_spec" bytea NULL; 3 | -------------------------------------------------------------------------------- /backend/migrations/20240125181812.sql: -------------------------------------------------------------------------------- 1 | -- Modify "digger_jobs" table 2 | ALTER TABLE "public"."digger_jobs" ADD COLUMN "workflow_run_url" text NULL; 3 | -------------------------------------------------------------------------------- /backend/migrations/20240301211741.sql: -------------------------------------------------------------------------------- 1 | -- Modify "digger_jobs" table 2 | ALTER TABLE "public"."digger_jobs" ADD COLUMN "workflow_file" text NULL; 3 | -------------------------------------------------------------------------------- /backend/migrations/20240328182453.sql: -------------------------------------------------------------------------------- 1 | -- Modify "repos" table 2 | ALTER TABLE "public"."repos" ADD COLUMN "repo_full_name" text NULL, ADD COLUMN "repo_organisation" text NULL, ADD COLUMN "repo_name" text NULL, ADD COLUMN "repo_url" text NULL; 3 | -------------------------------------------------------------------------------- /backend/migrations/20240329114422.sql: -------------------------------------------------------------------------------- 1 | -- Modify "digger_run_stages" table 2 | ALTER TABLE "public"."digger_run_stages" DROP COLUMN "digger_run_stage_id", DROP COLUMN "project_name", DROP COLUMN "status", DROP COLUMN "digger_job_summary_id", DROP COLUMN "serialized_job_spec", DROP COLUMN "workflow_file", DROP COLUMN "workflow_run_url", ADD COLUMN "batch_id" text NULL, ADD 3 | CONSTRAINT "fk_digger_run_stages_batch" FOREIGN KEY ("batch_id") REFERENCES "public"."digger_batches" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION; 4 | -------------------------------------------------------------------------------- /backend/migrations/20240403155357_drop_dup_idx.sql: -------------------------------------------------------------------------------- 1 | -- drop the duplicate index to fix the next migration of renaming 2 | DROP INDEX "public"."idx_digger_job_id"; 3 | DROP INDEX "idx_digger_run_queues_deleted_at"; 4 | DROP INDEX "idx_digger_run_queue_project_id"; 5 | DROP INDEX "idx_digger_run_queue_run_id"; 6 | -------------------------------------------------------------------------------- /backend/migrations/20240404160724.sql: -------------------------------------------------------------------------------- 1 | -- Modify "digger_run_queue_items" table 2 | ALTER TABLE "public"."digger_run_queue_items" DROP COLUMN "project_id"; 3 | -------------------------------------------------------------------------------- /backend/migrations/20240404161121.sql: -------------------------------------------------------------------------------- 1 | -- Modify "digger_run_stages" table 2 | ALTER TABLE "public"."digger_run_stages" DROP COLUMN "run_id"; 3 | -------------------------------------------------------------------------------- /backend/migrations/20240404161723.sql: -------------------------------------------------------------------------------- 1 | -- Modify "digger_runs" table 2 | ALTER TABLE "public"."digger_runs" ADD COLUMN "plan_stage_id" bigint NULL, ADD COLUMN "apply_stage_id" bigint NULL, ADD 3 | CONSTRAINT "fk_digger_runs_apply_stage" FOREIGN KEY ("apply_stage_id") REFERENCES "public"."digger_run_stages" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION, ADD 4 | CONSTRAINT "fk_digger_runs_plan_stage" FOREIGN KEY ("plan_stage_id") REFERENCES "public"."digger_run_stages" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION; 5 | -------------------------------------------------------------------------------- /backend/migrations/20240404165910.sql: -------------------------------------------------------------------------------- 1 | -- Modify "digger_runs" table 2 | ALTER TABLE "public"."digger_runs" DROP COLUMN "project_id", ADD COLUMN "project_name" text NULL; 3 | -------------------------------------------------------------------------------- /backend/migrations/20240405150942.sql: -------------------------------------------------------------------------------- 1 | -- Modify "digger_runs" table 2 | ALTER TABLE "public"."digger_runs" ADD COLUMN "is_approved" boolean NULL, ADD COLUMN "approval_author" text NULL, ADD COLUMN "approval_date" timestamptz NULL; 3 | -------------------------------------------------------------------------------- /backend/migrations/20240405160110.sql: -------------------------------------------------------------------------------- 1 | -- Modify "digger_run_queue_items" table 2 | ALTER TABLE "public"."digger_run_queue_items" ADD COLUMN "project_id" bigint NULL, ADD 3 | CONSTRAINT "fk_digger_run_queue_items_project" FOREIGN KEY ("project_id") REFERENCES "public"."projects" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION; 4 | -------------------------------------------------------------------------------- /backend/migrations/20240409161739.sql: -------------------------------------------------------------------------------- 1 | -- Create "job_tokens" table 2 | CREATE TABLE "public"."job_tokens" ( 3 | "id" bigserial NOT NULL, 4 | "created_at" timestamptz NULL, 5 | "updated_at" timestamptz NULL, 6 | "deleted_at" timestamptz NULL, 7 | "value" text NULL, 8 | "expiry" timestamptz NULL, 9 | "organisation_id" bigint NULL, 10 | "type" text NULL, 11 | PRIMARY KEY ("id"), 12 | CONSTRAINT "fk_job_tokens_organisation" FOREIGN KEY ("organisation_id") REFERENCES "public"."organisations" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION 13 | ); 14 | -- Create index "idx_job_tokens_deleted_at" to table: "job_tokens" 15 | CREATE INDEX "idx_job_tokens_deleted_at" ON "public"."job_tokens" ("deleted_at"); 16 | -------------------------------------------------------------------------------- /backend/migrations/20240510162721.sql: -------------------------------------------------------------------------------- 1 | -- Modify "digger_jobs" table 2 | ALTER TABLE "public"."digger_jobs" ADD COLUMN "plan_footprint" bytea NULL; 3 | -------------------------------------------------------------------------------- /backend/migrations/20240518182629.sql: -------------------------------------------------------------------------------- 1 | -- Modify "digger_jobs" table 2 | ALTER TABLE "public"."digger_jobs" ADD COLUMN "pr_comment_url" text NULL; 3 | -------------------------------------------------------------------------------- /backend/migrations/20240524110010.sql: -------------------------------------------------------------------------------- 1 | -- Modify "digger_jobs" table 2 | ALTER TABLE "public"."digger_jobs" ADD COLUMN "terraform_output" text NULL; 3 | -------------------------------------------------------------------------------- /backend/migrations/20240527112209.sql: -------------------------------------------------------------------------------- 1 | -- Modify "digger_batches" table 2 | ALTER TABLE "public"."digger_batches" ADD COLUMN "source_details" bytea NULL; 3 | -------------------------------------------------------------------------------- /backend/migrations/20240530074832.sql: -------------------------------------------------------------------------------- 1 | -- Create "digger_locks" table 2 | CREATE TABLE "public"."digger_locks" ( 3 | "id" bigserial NOT NULL, 4 | "created_at" timestamptz NULL, 5 | "updated_at" timestamptz NULL, 6 | "deleted_at" timestamptz NULL, 7 | "resource" text NULL, 8 | "lock_id" bigint NULL, 9 | "organisation_id" bigint NULL, 10 | PRIMARY KEY ("id"), 11 | CONSTRAINT "fk_digger_locks_organisation" FOREIGN KEY ("organisation_id") REFERENCES "public"."organisations" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION 12 | ); 13 | -- Create index "idx_digger_locked_resource" to table: "digger_locks" 14 | CREATE INDEX "idx_digger_locked_resource" ON "public"."digger_locks" ("resource"); 15 | -- Create index "idx_digger_locks_deleted_at" to table: "digger_locks" 16 | CREATE INDEX "idx_digger_locks_deleted_at" ON "public"."digger_locks" ("deleted_at"); 17 | -------------------------------------------------------------------------------- /backend/migrations/20240703121051.sql: -------------------------------------------------------------------------------- 1 | -- Modify "projects" table 2 | ALTER TABLE "public"."projects" ADD COLUMN "is_generated" boolean NULL, ADD COLUMN "is_in_main_branch" boolean NULL; 3 | -------------------------------------------------------------------------------- /backend/migrations/20240704192835.sql: -------------------------------------------------------------------------------- 1 | -- Modify "digger_batches" table 2 | ALTER TABLE "public"."digger_batches" ADD COLUMN "vcs" text NULL; 3 | -------------------------------------------------------------------------------- /backend/migrations/20240705144450.sql: -------------------------------------------------------------------------------- 1 | -- Modify "project_runs" table 2 | ALTER TABLE "public"."project_runs" ADD COLUMN "actor_username" text NULL; 3 | -------------------------------------------------------------------------------- /backend/migrations/20240709165155.sql: -------------------------------------------------------------------------------- 1 | -- Modify "digger_batches" table 2 | ALTER TABLE "public"."digger_batches" ADD COLUMN "gitlab_project_id" bigint NULL; 3 | -------------------------------------------------------------------------------- /backend/migrations/20240729155442.sql: -------------------------------------------------------------------------------- 1 | -- Create "job_artefacts" table 2 | CREATE TABLE "public"."job_artefacts" ( 3 | "id" bigserial NOT NULL, 4 | "created_at" timestamptz NULL, 5 | "updated_at" timestamptz NULL, 6 | "deleted_at" timestamptz NULL, 7 | "job_token_id" bigint NULL, 8 | "contents" bytea NULL, 9 | PRIMARY KEY ("id"), 10 | CONSTRAINT "fk_job_artefacts_job_token" FOREIGN KEY ("job_token_id") REFERENCES "public"."job_tokens" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION 11 | ); 12 | -- Create index "idx_job_artefacts_deleted_at" to table: "job_artefacts" 13 | CREATE INDEX "idx_job_artefacts_deleted_at" ON "public"."job_artefacts" ("deleted_at"); 14 | -------------------------------------------------------------------------------- /backend/migrations/20240729155926.sql: -------------------------------------------------------------------------------- 1 | -- Modify "job_artefacts" table 2 | ALTER TABLE "public"."job_artefacts" ADD COLUMN "size" bigint NULL, ADD COLUMN "content_type" text NULL; 3 | -------------------------------------------------------------------------------- /backend/migrations/20240729160028.sql: -------------------------------------------------------------------------------- 1 | -- Modify "job_artefacts" table 2 | ALTER TABLE "public"."job_artefacts" ADD COLUMN "filename" text NULL; 3 | -------------------------------------------------------------------------------- /backend/migrations/20241107162605.sql: -------------------------------------------------------------------------------- 1 | -- Modify "github_apps" table 2 | ALTER TABLE "public"."github_apps" ADD COLUMN "client_id" text NULL, ADD COLUMN "client_secret_encrypted" text NULL, ADD COLUMN "webhook_secret_encrypted" text NULL, ADD COLUMN "private_key_encrypted" text NULL, ADD COLUMN "private_key_base64_encrypted" text NULL, ADD COLUMN "org" text NULL; 3 | -------------------------------------------------------------------------------- /backend/migrations/20241107163722.sql: -------------------------------------------------------------------------------- 1 | -- Create "github_app_connections" table 2 | CREATE TABLE "public"."github_app_connections" ( 3 | "id" bigserial NOT NULL, 4 | "created_at" timestamptz NULL, 5 | "updated_at" timestamptz NULL, 6 | "deleted_at" timestamptz NULL, 7 | "github_id" bigint NULL, 8 | "client_id" text NULL, 9 | "client_secret_encrypted" text NULL, 10 | "webhook_secret_encrypted" text NULL, 11 | "private_key_encrypted" text NULL, 12 | "private_key_base64_encrypted" text NULL, 13 | "org" text NULL, 14 | "name" text NULL, 15 | "github_app_url" text NULL, 16 | PRIMARY KEY ("id") 17 | ); 18 | -- Create index "idx_github_app_connections_deleted_at" to table: "github_app_connections" 19 | CREATE INDEX "idx_github_app_connections_deleted_at" ON "public"."github_app_connections" ("deleted_at"); 20 | -- Drop "github_apps" table 21 | DROP TABLE "public"."github_apps"; 22 | -------------------------------------------------------------------------------- /backend/migrations/20241107172343.sql: -------------------------------------------------------------------------------- 1 | -- Modify "github_app_connections" table 2 | ALTER TABLE "public"."github_app_connections" ADD COLUMN "organisation_id" bigint NULL, ADD 3 | CONSTRAINT "fk_github_app_connections_organisation" FOREIGN KEY ("organisation_id") REFERENCES "public"."organisations" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION; 4 | -------------------------------------------------------------------------------- /backend/migrations/20241114202249.sql: -------------------------------------------------------------------------------- 1 | -- Create "repo_caches" table 2 | CREATE TABLE "public"."repo_caches" ( 3 | "id" bigserial NOT NULL, 4 | "created_at" timestamptz NULL, 5 | "updated_at" timestamptz NULL, 6 | "deleted_at" timestamptz NULL, 7 | "org_id" bigint NULL, 8 | "repo_full_name" text NULL, 9 | "digger_yml_str" text NULL, 10 | "digger_config" bytea NULL, 11 | PRIMARY KEY ("id") 12 | ); 13 | -- Create index "idx_repo_caches_deleted_at" to table: "repo_caches" 14 | CREATE INDEX "idx_repo_caches_deleted_at" ON "public"."repo_caches" ("deleted_at"); 15 | -------------------------------------------------------------------------------- /backend/migrations/20241229112312.sql: -------------------------------------------------------------------------------- 1 | -- Modify "digger_batches" table 2 | ALTER TABLE "public"."digger_batches" ADD COLUMN "ai_summary_comment_id" text NULL, ADD COLUMN "report_terraform_outputs" boolean NULL; 3 | -------------------------------------------------------------------------------- /backend/migrations/20250220084846.sql: -------------------------------------------------------------------------------- 1 | -- Modify "users" table 2 | ALTER TABLE "public"."users" ADD COLUMN "email" text NULL, ADD COLUMN "external_id" text NULL, ADD COLUMN "org_id" bigint NULL; 3 | -------------------------------------------------------------------------------- /backend/migrations/20250220172054.sql: -------------------------------------------------------------------------------- 1 | -- Modify "users" table 2 | ALTER TABLE "public"."users" ADD COLUMN "external_source" text NULL; 3 | -------------------------------------------------------------------------------- /backend/migrations/20250220172321.sql: -------------------------------------------------------------------------------- 1 | -- Create index "idx_user_external_source" to table: "users" 2 | CREATE UNIQUE INDEX "idx_user_external_source" ON "public"."users" ("external_source", "external_id"); 3 | -------------------------------------------------------------------------------- /backend/migrations/20250220173053.sql: -------------------------------------------------------------------------------- 1 | -- Create index "idx_users_email" to table: "users" 2 | CREATE UNIQUE INDEX "idx_users_email" ON "public"."users" ("email"); 3 | -------------------------------------------------------------------------------- /backend/migrations/20250220173439.sql: -------------------------------------------------------------------------------- 1 | -- Rename a column from "org_id" to "organisation_id" 2 | ALTER TABLE "public"."users" RENAME COLUMN "org_id" TO "organisation_id"; 3 | -- Modify "users" table 4 | ALTER TABLE "public"."users" ADD CONSTRAINT "fk_users_organisation" FOREIGN KEY ("organisation_id") REFERENCES "public"."organisations" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION; 5 | -------------------------------------------------------------------------------- /backend/migrations/20250221044813.sql: -------------------------------------------------------------------------------- 1 | -- Drop index "idx_organisation" from table: "organisations" 2 | DROP INDEX "public"."idx_organisation"; 3 | -- Create index "idx_organisation" to table: "organisations" 4 | CREATE INDEX "idx_organisation" ON "public"."organisations" ("name"); 5 | -------------------------------------------------------------------------------- /backend/migrations/20250224152926.sql: -------------------------------------------------------------------------------- 1 | -- Modify "repos" table 2 | ALTER TABLE "public"."repos" ADD COLUMN "vcs" text NULL DEFAULT 'github'; 3 | -------------------------------------------------------------------------------- /backend/migrations/20250226185150.sql: -------------------------------------------------------------------------------- 1 | -- Modify "github_app_connections" table 2 | ALTER TABLE "public"."github_app_connections" ADD COLUMN "bitbucket_access_token_encrypted" text NULL, ADD COLUMN "bitbucket_webhook_secret_encrypted" text NULL; 3 | -------------------------------------------------------------------------------- /backend/migrations/20250302190926.sql: -------------------------------------------------------------------------------- 1 | -- Modify "digger_batches" table 2 | ALTER TABLE "public"."digger_batches" ADD COLUMN "vcs_connection_id" bigint NULL, ADD CONSTRAINT "fk_digger_batches_vcs_connection" FOREIGN KEY ("vcs_connection_id") REFERENCES "public"."github_app_connections" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION; 3 | -------------------------------------------------------------------------------- /backend/migrations/20250325115901.sql: -------------------------------------------------------------------------------- 1 | -- Modify "github_app_connections" table 2 | ALTER TABLE "public"."github_app_connections" ADD COLUMN "gitlab_access_token_encrypted" text NULL, ADD COLUMN "gitlab_webhook_secret_encrypted" text NULL; 3 | -------------------------------------------------------------------------------- /backend/migrations/20250325134924.sql: -------------------------------------------------------------------------------- 1 | -- Modify "github_app_connections" table 2 | ALTER TABLE "public"."github_app_connections" ADD COLUMN "vcs_type" text NULL DEFAULT 'bitbucket'; 3 | -------------------------------------------------------------------------------- /backend/migrations/20250416152705.sql: -------------------------------------------------------------------------------- 1 | -- Modify "digger_batches" table 2 | ALTER TABLE "public"."digger_batches" ADD COLUMN "cover_all_impacted_projects" boolean NULL; 3 | -------------------------------------------------------------------------------- /backend/migrations/20250512172515.sql: -------------------------------------------------------------------------------- 1 | -- Modify "digger_jobs" table 2 | ALTER TABLE "public"."digger_jobs" ADD COLUMN "pr_comment_id" bigint NULL; 3 | -------------------------------------------------------------------------------- /backend/migrations/20250512213729.sql: -------------------------------------------------------------------------------- 1 | -- Modify "digger_batches" table 2 | ALTER TABLE "public"."digger_batches" ADD COLUMN "created_at" timestamptz NULL, ADD COLUMN "updated_at" timestamptz NULL, ADD COLUMN "deleted_at" timestamptz NULL; 3 | -- Create index "idx_digger_batches_deleted_at" to table: "digger_batches" 4 | CREATE INDEX "idx_digger_batches_deleted_at" ON "public"."digger_batches" ("deleted_at"); 5 | -------------------------------------------------------------------------------- /backend/models/artefact.go: -------------------------------------------------------------------------------- 1 | package models 2 | 3 | import ( 4 | "gorm.io/gorm" 5 | ) 6 | 7 | type JobArtefact struct { 8 | gorm.Model 9 | JobTokenID uint 10 | JobToken JobToken 11 | Filename string 12 | Contents []byte `gorm:"type:bytea"` 13 | Size int64 14 | ContentType string 15 | } 16 | -------------------------------------------------------------------------------- /backend/models/cache.go: -------------------------------------------------------------------------------- 1 | package models 2 | 3 | import ( 4 | "gorm.io/gorm" 5 | ) 6 | 7 | // storing repo cache such as digger.yml configuration 8 | type RepoCache struct { 9 | gorm.Model 10 | OrgId uint 11 | RepoFullName string 12 | DiggerYmlStr string 13 | DiggerConfig []byte `gorm:"type:bytea"` 14 | } 15 | -------------------------------------------------------------------------------- /backend/models/locking.go: -------------------------------------------------------------------------------- 1 | package models 2 | 3 | import "gorm.io/gorm" 4 | 5 | type DiggerLock struct { 6 | gorm.Model 7 | Resource string `gorm:"index:idx_digger_locked_resource"` 8 | LockId int 9 | Organisation *Organisation 10 | OrganisationID uint 11 | } 12 | -------------------------------------------------------------------------------- /backend/models/policies.go: -------------------------------------------------------------------------------- 1 | package models 2 | 3 | import "gorm.io/gorm" 4 | 5 | const ( 6 | POLICY_TYPE_ACCESS = "access" 7 | POLICY_TYPE_PLAN = "plan" 8 | POLICY_TYPE_DRIFT = "drift" 9 | ) 10 | 11 | type Policy struct { 12 | gorm.Model 13 | Project *Project 14 | ProjectID *uint 15 | Policy string 16 | Type string 17 | CreatedBy *User 18 | CreatedByID *uint 19 | Organisation *Organisation 20 | OrganisationID uint 21 | Repo *Repo 22 | RepoID *uint 23 | } 24 | -------------------------------------------------------------------------------- /backend/models/setup.go: -------------------------------------------------------------------------------- 1 | package models 2 | 3 | import ( 4 | "log/slog" 5 | "os" 6 | 7 | slogGorm "github.com/orandin/slog-gorm" 8 | "gorm.io/driver/postgres" 9 | _ "gorm.io/driver/postgres" 10 | "gorm.io/gorm" 11 | ) 12 | 13 | type Database struct { 14 | GormDB *gorm.DB 15 | } 16 | 17 | var DEFAULT_ORG_NAME = "digger" 18 | 19 | // var DB *gorm.DB 20 | var DB *Database 21 | 22 | func ConnectDatabase() { 23 | database, err := gorm.Open(postgres.Open(os.Getenv("DATABASE_URL")), &gorm.Config{ 24 | Logger: slogGorm.New(), 25 | }) 26 | if err != nil { 27 | slog.Error("Failed to connect to database", "error", err) 28 | panic("Failed to connect to database!") 29 | } 30 | 31 | DB = &Database{GormDB: database} 32 | 33 | // data and fixtures added 34 | orgNumberOne, err := DB.GetOrganisation(DEFAULT_ORG_NAME) 35 | if orgNumberOne == nil { 36 | slog.Info("No default organization found, creating default organisation", "name", DEFAULT_ORG_NAME) 37 | _, err := DB.CreateOrganisation("digger", "", DEFAULT_ORG_NAME) 38 | if err != nil { 39 | slog.Error("Failed to create default organization", "error", err) 40 | } 41 | } 42 | } 43 | -------------------------------------------------------------------------------- /backend/models/user.go: -------------------------------------------------------------------------------- 1 | package models 2 | 3 | import "gorm.io/gorm" 4 | 5 | type User struct { 6 | gorm.Model 7 | Email string `gorm:"uniqueIndex"` 8 | ExternalSource string `gorm:"uniqueIndex:idx_user_external_source"` 9 | ExternalId string `gorm:"uniqueIndex:idx_user_external_source"` 10 | // the default org currently in use by this user 11 | OrganisationId *uint 12 | Organisation Organisation 13 | Username string `gorm:"uniqueIndex:idx_user"` 14 | } 15 | -------------------------------------------------------------------------------- /backend/queries/queries.go: -------------------------------------------------------------------------------- 1 | package queries 2 | 3 | import "time" 4 | 5 | type JobQueryResult struct { 6 | ID uint `gorm:"column:id"` 7 | CreatedAt time.Time `gorm:"column:created_at"` 8 | UpdatedAt time.Time `gorm:"column:updated_at"` 9 | DeletedAt *time.Time `gorm:"column:deleted_at"` 10 | DiggerJobID string `gorm:"column:digger_job_id"` 11 | Status string `gorm:"column:status"` 12 | WorkflowRunURL string `gorm:"column:workflow_run_url"` 13 | WorkflowFile string `gorm:"column:workflow_file"` 14 | TerraformOutput string `gorm:"column:terraform_output"` 15 | PRNumber int `gorm:"column:pr_number"` 16 | RepoFullName string `gorm:"column:repo_full_name"` 17 | BranchName string `gorm:"column:branch_name"` 18 | } 19 | -------------------------------------------------------------------------------- /backend/scripts/entrypoint.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -e 3 | 4 | if [[ -z "${BASELINE_MIGRATION}" ]]; then 5 | cd /app 6 | if [[ "${ALLOW_DIRTY}" == "true" ]]; then 7 | atlas migrate apply --url $DATABASE_URL --allow-dirty 8 | else 9 | atlas migrate apply --url $DATABASE_URL 10 | fi 11 | ./backend 12 | else 13 | cd /app 14 | atlas migrate apply --url $DATABASE_URL --baseline $BASELINE_MIGRATION 15 | ./backend 16 | fi -------------------------------------------------------------------------------- /backend/sql/migration_25_08_2023.sql: -------------------------------------------------------------------------------- 1 | INSERT INTO public.repos 2 | SELECT id, created_at, updated_at, deleted_at, name, organisation_id 3 | FROM public.namespaces; 4 | 5 | UPDATE public.projects 6 | SET repo_id = namespace_id; 7 | 8 | UPDATE public.policies 9 | SET repo_id = namespace_id; 10 | -------------------------------------------------------------------------------- /backend/tasks/.gitignore: -------------------------------------------------------------------------------- 1 | .env 2 | tasks 3 | -------------------------------------------------------------------------------- /backend/templates/github_success.tmpl: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | Digger github app installed 6 | 7 | 8 | 9 | 10 | 11 |
12 |
13 |

App installation successful

14 |

You can now close this tab.

15 |
16 |
17 | 18 | -------------------------------------------------------------------------------- /backend/templates/index.tmpl: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 |
8 | 19 |
20 | 21 | 22 | 23 | -------------------------------------------------------------------------------- /backend/templates/static/css/main.css: -------------------------------------------------------------------------------- 1 | 2 | 3 | .bg-gradient-primary { 4 | background-color: #001529; 5 | background-image: linear-gradient(180deg, #001529 10%, #001529 100%); 6 | background-size: cover; 7 | } -------------------------------------------------------------------------------- /backend/templates/static/js/prism-live-javascript.js: -------------------------------------------------------------------------------- 1 | Prism.Live.registerLanguage("clike", { 2 | comments: { 3 | singleline: "//", 4 | multiline: ["/*", "*/"] 5 | }, 6 | snippets: { 7 | if: `if ($1) { 8 | $2 9 | }` 10 | } 11 | }); 12 | 13 | Prism.Live.registerLanguage("javascript", { 14 | snippets: { 15 | log: "console.log($1)", 16 | } 17 | }, Prism.Live.languages.clike); 18 | -------------------------------------------------------------------------------- /backend/tools.go: -------------------------------------------------------------------------------- 1 | package main 2 | 3 | import _ "ariga.io/atlas-provider-gorm/gormschema" 4 | -------------------------------------------------------------------------------- /backend/utils/allowlist_test.go: -------------------------------------------------------------------------------- 1 | package utils 2 | 3 | import ( 4 | "github.com/stretchr/testify/assert" 5 | "os" 6 | "testing" 7 | ) 8 | 9 | func TestExtractRepoName(t *testing.T) { 10 | url := "http://gitlab.com/mike/dispora.git" 11 | repoName, _ := ExtractCleanRepoName(url) 12 | assert.Equal(t, "gitlab.com/mike/dispora", repoName) 13 | 14 | url = "http://git.mydomain.com/mike/dispora.git" 15 | repoName, _ = ExtractCleanRepoName(url) 16 | assert.Equal(t, "git.mydomain.com/mike/dispora", repoName) 17 | } 18 | 19 | func TestRepoAllowList(t *testing.T) { 20 | os.Setenv("DIGGER_REPO_ALLOW_LIST", "gitlab.com/diggerdev/digger-demo,gitlab.com/diggerdev/alsoallowed") 21 | url := "http://gitlab.com/mike/dispora.git" 22 | allowed := IsInRepoAllowList(url) 23 | assert.False(t, allowed) 24 | 25 | url = "http://gitlab.com/diggerdev/digger-demo2.git" 26 | allowed = IsInRepoAllowList(url) 27 | assert.False(t, allowed) 28 | 29 | url = "http://gitlab.com/diggerdev/digger-demo.git" 30 | allowed = IsInRepoAllowList(url) 31 | assert.True(t, allowed) 32 | 33 | } 34 | -------------------------------------------------------------------------------- /backend/utils/log.go: -------------------------------------------------------------------------------- 1 | package utils 2 | 3 | import ( 4 | "bufio" 5 | "bytes" 6 | "log/slog" 7 | "strings" 8 | ) 9 | 10 | // SentrySlogWriter adapts Sentry's log output to a structured logger. 11 | type SentrySlogWriter struct { 12 | logger *slog.Logger 13 | } 14 | 15 | // NewSentrySlogWriter creates a new adapter to redirect Sentry logs to slog. 16 | func NewSentrySlogWriter(logger *slog.Logger) *SentrySlogWriter { 17 | return &SentrySlogWriter{logger: logger} 18 | } 19 | 20 | // Write implements io.Writer to process Sentry's logs and send them to slog. 21 | func (s *SentrySlogWriter) Write(p []byte) (n int, err error) { 22 | scanner := bufio.NewScanner(bytes.NewReader(p)) 23 | for scanner.Scan() { 24 | line := scanner.Text() 25 | if strings.HasPrefix(line, "[Sentry]") { 26 | parts := strings.SplitN(line, " ", 4) 27 | if len(parts) >= 4 { 28 | s.logger.Debug(parts[3]) // Extract message without prefix and timestamp 29 | } else { 30 | s.logger.Debug(line) 31 | } 32 | } else { 33 | s.logger.Debug(line) 34 | } 35 | } 36 | return len(p), nil 37 | } 38 | -------------------------------------------------------------------------------- /backend/version.txt: -------------------------------------------------------------------------------- 1 | DEFAULT -------------------------------------------------------------------------------- /cli/.gitignore: -------------------------------------------------------------------------------- 1 | **/digger 2 | !/pkg/digger 3 | !/cmd/digger 4 | -------------------------------------------------------------------------------- /cli/cmd/digger/main.go: -------------------------------------------------------------------------------- 1 | package main 2 | 3 | import ( 4 | "fmt" 5 | "github.com/diggerhq/digger/cli/pkg/usage" 6 | "os" 7 | ) 8 | 9 | /* 10 | Exit codes: 11 | 0 - No errors 12 | 1 - Failed to read digger digger_config 13 | 2 - Failed to create lock provider 14 | 3 - Failed to find auth token 15 | 4 - Failed to initialise CI context 16 | 5 - 17 | 6 - failed to process CI event 18 | 7 - failed to convert event to command 19 | 8 - failed to execute command 20 | 10 - No CI detected 21 | */ 22 | 23 | func main() { 24 | if len(os.Args) == 1 { 25 | os.Args = append([]string{os.Args[0]}, "default") 26 | } 27 | if err := rootCmd.Execute(); err != nil { 28 | usage.ReportErrorAndExit("", fmt.Sprintf("Error occurred during command exec: %v", err), 8) 29 | } 30 | 31 | } 32 | -------------------------------------------------------------------------------- /cli/dockerfiles/branch/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM hashicorp/terraform as terraform_builder 2 | 3 | FROM golang:1.22.3-alpine as digger_builder 4 | 5 | ARG DIGGER_BRANCH=develop 6 | ARG OS=linux 7 | ARG ARCH=amd64 8 | 9 | RUN apk add --no-cache git 10 | RUN git clone https://github.com/diggerhq/digger.git --depth 1 --branch ${DIGGER_BRANCH} 11 | RUN cd digger && env GOOS=${OS} GOARCH=${ARCH} CGO_ENABLED=0 go build -o digger ./cli/cmd/digger && chmod +x digger && mv digger /usr/local/bin/digger 12 | 13 | FROM alpine:latest 14 | 15 | COPY --from=terraform_builder /bin/terraform /usr/local/bin/terraform 16 | COPY --from=digger_builder /usr/local/bin/digger /usr/local/bin/digger 17 | 18 | 19 | -------------------------------------------------------------------------------- /cli/dockerfiles/release/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM hashicorp/terraform as terraform_builder 2 | 3 | FROM alpine:latest 4 | 5 | ARG DIGGER_VERSION=latest 6 | ARG OS=Linux 7 | ARG ARCH=X64 8 | 9 | COPY --from=terraform_builder /bin/terraform /bin/terraform 10 | 11 | RUN apk add --no-cache curl 12 | RUN curl -sL https://github.com/diggerhq/digger/releases/download/${DIGGER_VERSION}/digger-${OS}-${ARCH} -o digger 13 | RUN chmod +x digger 14 | RUN mv digger /usr/local/bin/digger 15 | 16 | 17 | 18 | 19 | -------------------------------------------------------------------------------- /cli/pkg/core/drift/drift.go: -------------------------------------------------------------------------------- 1 | package drift 2 | 3 | type Notification interface { 4 | Send(projectName string, plan string) error 5 | } 6 | -------------------------------------------------------------------------------- /cli/pkg/digger/io.go: -------------------------------------------------------------------------------- 1 | package digger 2 | 3 | import ( 4 | "fmt" 5 | "github.com/diggerhq/digger/libs/ci" 6 | "github.com/diggerhq/digger/libs/scheduler" 7 | "log" 8 | ) 9 | 10 | func UpdateAggregateStatus(batch *scheduler.SerializedBatch, prService ci.PullRequestService) error { 11 | // TODO: Introduce batch-level 12 | isPlan, err := batch.IsPlan() 13 | if err != nil { 14 | log.Printf("failed to get batch job plan/apply status: %v", err) 15 | return fmt.Errorf("failed to get batch job plan/apply status: %v", err) 16 | } 17 | 18 | if isPlan { 19 | prService.SetStatus(batch.PrNumber, batch.ToStatusCheck(), "digger/plan") 20 | prService.SetStatus(batch.PrNumber, "pending", "digger/apply") 21 | } else { 22 | prService.SetStatus(batch.PrNumber, "success", "digger/plan") 23 | prService.SetStatus(batch.PrNumber, batch.ToStatusCheck(), "digger/apply") 24 | } 25 | return nil 26 | } 27 | -------------------------------------------------------------------------------- /cli/pkg/digger/isNonEmptyPlan.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/diggerhq/digger/b9f645e9af499badc14c7f00bbca06026232a686/cli/pkg/digger/isNonEmptyPlan.txt -------------------------------------------------------------------------------- /cli/pkg/drift/Provider.go: -------------------------------------------------------------------------------- 1 | package drift 2 | 3 | import ( 4 | "fmt" 5 | core_drift "github.com/diggerhq/digger/cli/pkg/core/drift" 6 | "github.com/diggerhq/digger/libs/ci" 7 | "os" 8 | ) 9 | 10 | type DriftNotificationProvider interface { 11 | Get(prService ci.PullRequestService) (core_drift.Notification, error) 12 | } 13 | 14 | type DriftNotificationProviderBasic struct{} 15 | 16 | func (d DriftNotificationProviderBasic) Get(prService ci.PullRequestService) (core_drift.Notification, error) { 17 | slackNotificationUrl := os.Getenv("INPUT_DRIFT_DETECTION_SLACK_NOTIFICATION_URL") 18 | var notification core_drift.Notification 19 | if slackNotificationUrl != "" { 20 | notification = SlackNotification{slackNotificationUrl} 21 | } else { 22 | return nil, fmt.Errorf("could not identify drift mode, please specify slack using env variable INPUT_DRIFT_DETECTION_SLACK_NOTIFICATION_URL") 23 | } 24 | return notification, nil 25 | } 26 | -------------------------------------------------------------------------------- /cli/pkg/usage/usage_test.go: -------------------------------------------------------------------------------- 1 | package usage 2 | 3 | import ( 4 | "github.com/stretchr/testify/assert" 5 | "os" 6 | "testing" 7 | ) 8 | 9 | // util function for testing of send usage record 10 | func TestSendingUsageRecord(t *testing.T) { 11 | if os.Getenv("MANUAL_TEST") == "" { 12 | t.Skip("Skipping manual test") 13 | } 14 | err := SendUsageRecord("repoOwner", "testEvent", "testing") 15 | assert.Nil(t, err) 16 | } 17 | -------------------------------------------------------------------------------- /cli/pkg/utils/commands.go: -------------------------------------------------------------------------------- 1 | package utils 2 | 3 | import ( 4 | "fmt" 5 | "log" 6 | ) 7 | 8 | type Command struct { 9 | Name string 10 | Description string 11 | } 12 | 13 | var availableCommands = []Command{ 14 | {"digger help", "Display help information"}, 15 | {"digger version", "Display version information"}, 16 | {"digger apply", "Apply the Terraform digger_config"}, 17 | {"digger plan", "Plan the Terraform digger_config"}, 18 | {"digger show-projects", "Show the impacted projects"}, 19 | {"digger lock", "Lock Terraform project"}, 20 | {"digger unlock", "Unlock the Terraform project"}, 21 | } 22 | 23 | func DisplayCommands() { 24 | log.Println("Use the following commands to get started:") 25 | for _, command := range availableCommands { 26 | log.Printf(" %s: %s\n", command.Name, command.Description) 27 | } 28 | } 29 | 30 | // display commands as string 31 | func GetCommands() string { 32 | var commands string 33 | for _, command := range availableCommands { 34 | commands += fmt.Sprintf(" %s: %s\n", command.Name, command.Description) 35 | } 36 | return commands 37 | } 38 | -------------------------------------------------------------------------------- /cli/pkg/utils/io.go: -------------------------------------------------------------------------------- 1 | package utils 2 | 3 | import ( 4 | "archive/zip" 5 | "fmt" 6 | "io" 7 | "os" 8 | "path/filepath" 9 | ) 10 | 11 | func ExtractZip(zipFilePath string, outDir string) error { 12 | 13 | // Open the zip file 14 | zipReader, err := zip.OpenReader(zipFilePath) 15 | if err != nil { 16 | return fmt.Errorf("failed to open zip: %w", err) 17 | } 18 | defer zipReader.Close() 19 | 20 | for _, file := range zipReader.File { 21 | path := filepath.Join(outDir, file.Name) 22 | 23 | if file.FileInfo().IsDir() { 24 | os.MkdirAll(path, os.ModePerm) 25 | continue 26 | } 27 | 28 | if err := os.MkdirAll(filepath.Dir(path), os.ModePerm); err != nil { 29 | return fmt.Errorf("failed to create directory: %w", err) 30 | } 31 | 32 | dstFile, err := os.OpenFile(path, os.O_WRONLY|os.O_CREATE|os.O_TRUNC, file.Mode()) 33 | if err != nil { 34 | return fmt.Errorf("failed to create file: %w", err) 35 | } 36 | 37 | srcFile, err := file.Open() 38 | if err != nil { 39 | dstFile.Close() 40 | return fmt.Errorf("failed to open zip file: %w", err) 41 | } 42 | 43 | _, err = io.Copy(dstFile, srcFile) 44 | srcFile.Close() 45 | dstFile.Close() 46 | 47 | if err != nil { 48 | return fmt.Errorf("failed to extract file: %w", err) 49 | } 50 | } 51 | return nil 52 | } 53 | -------------------------------------------------------------------------------- /cli/pkg/utils/strings.go: -------------------------------------------------------------------------------- 1 | package utils 2 | 3 | import "strings" 4 | 5 | func ParseRepoNamespace(namespace string) (string, string) { 6 | splits := strings.Split(namespace, "/") 7 | SCMOrganisation := splits[0] 8 | SCMrepository := splits[1] 9 | return SCMOrganisation, SCMrepository 10 | } 11 | -------------------------------------------------------------------------------- /cli/pkg/utils/version.go: -------------------------------------------------------------------------------- 1 | package utils 2 | 3 | import "fmt" 4 | 5 | var version = "0.1.6" 6 | 7 | // GetVersion returns the current version of the package 8 | func GetVersion() string { 9 | verOutput := fmt.Sprintf("you are using digger version %s", version) 10 | return verOutput 11 | } 12 | -------------------------------------------------------------------------------- /cli_e2e/go.mod: -------------------------------------------------------------------------------- 1 | module github.com/diggerhq/digger/cli_e2e 2 | 3 | go 1.24.0 4 | 5 | require github.com/stretchr/testify v1.9.0 6 | 7 | require ( 8 | github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc // indirect 9 | github.com/kr/pretty v0.3.1 // indirect 10 | github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect 11 | github.com/rogpeppe/go-internal v1.12.0 // indirect 12 | gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c // indirect 13 | gopkg.in/yaml.v3 v3.0.1 // indirect 14 | ) 15 | -------------------------------------------------------------------------------- /cli_e2e/go.sum: -------------------------------------------------------------------------------- 1 | github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc h1:U9qPSI2PIWSS1VwoXQT9A3Wy9MM3WgvqSxFWenqJduM= 2 | github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE= 3 | github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= 4 | github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 h1:Jamvg5psRIccs7FGNTlIRMkT8wgtp5eCXdBlqhYGL6U= 5 | github.com/rogpeppe/go-internal v1.12.0 h1:exVL4IDcn6na9z1rAb56Vxr+CgyK3nn3O+epU5NdKM8= 6 | github.com/stretchr/testify v1.9.0 h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsTg= 7 | gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= 8 | gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= 9 | -------------------------------------------------------------------------------- /dgctl/.gitignore: -------------------------------------------------------------------------------- 1 | .archive 2 | generated/ 3 | dgctl.generated.json 4 | .dgctl 5 | dgctl 6 | -------------------------------------------------------------------------------- /dgctl/dgctl.json: -------------------------------------------------------------------------------- 1 | { 2 | "target": "diggerhq/tf-module-bundler@master", 3 | "aws_region": "us-east-1", 4 | "version": "0.0.32", 5 | "id": "a7a7739f-9d89-4447-bd01-672dc525ce44", 6 | "blocks": [ 7 | { 8 | "aws_app_identifier": "default_network-8e38d890", 9 | "name": "default_network", 10 | "type": "vpc" 11 | } 12 | ], 13 | "created": 1715462481906 14 | } -------------------------------------------------------------------------------- /dgctl/go.mod: -------------------------------------------------------------------------------- 1 | module github.com/diggerhq/digger/dgctl 2 | 3 | go 1.24.0 4 | 5 | 6 | require ( 7 | github.com/inconshreveable/mousetrap v1.1.0 // indirect 8 | github.com/loov/hrtime v1.0.3 // indirect 9 | github.com/loov/watchrun v0.6.0 // indirect 10 | github.com/spf13/cobra v1.8.0 // indirect 11 | github.com/spf13/pflag v1.0.5 // indirect 12 | golang.org/x/sys v0.7.0 // indirect 13 | ) 14 | -------------------------------------------------------------------------------- /dgctl/main.go: -------------------------------------------------------------------------------- 1 | /* 2 | Copyright © 2024 diggerhq 3 | 4 | Licensed under the Apache License, Version 2.0 (the "License"); 5 | you may not use this file except in compliance with the License. 6 | You may obtain a copy of the License at 7 | 8 | http://www.apache.org/licenses/LICENSE-2.0 9 | 10 | Unless required by applicable law or agreed to in writing, software 11 | distributed under the License is distributed on an "AS IS" BASIS, 12 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | See the License for the specific language governing permissions and 14 | limitations under the License. 15 | */ 16 | package main 17 | 18 | import ( 19 | "github.com/diggerhq/digger/dgctl/cmd" 20 | "log" 21 | "os" 22 | ) 23 | 24 | func init() { 25 | log.SetOutput(os.Stdout) 26 | log.SetFlags(log.Ldate | log.Ltime) 27 | } 28 | 29 | func main() { 30 | cmd.Execute() 31 | } 32 | -------------------------------------------------------------------------------- /docs/ce/azure-specific/azure-devops-locking-connection-methods.mdx: -------------------------------------------------------------------------------- 1 | --- 2 | title: "Azure devops locking connection methods" 3 | --- 4 | 5 | **How does it work?** 6 | 7 | There is one mandatory environment variable the user will have to set, in order to use Azure based locks: `DIGGER_AZURE_AUTH_METHOD`, which can take one of the three values below: 8 | 9 | * SHARED\_KEY 10 | 11 | * CONNECTION\_STRING 12 | 13 | * CLIENT\_SECRET 14 | 15 | * MANAGED\_IDENTITY 16 | 17 | Then, depending on the value of `DIGGER_AZURE_AUTH_METHOD`, the user will have to set other environment variables. 18 | 19 | 1. **SHARED\_KEY** 20 | * `DIGGER_AZURE_SA_NAME`: Storage account name 21 | * `DIGGER_AZURE_SHARED_KEY`: shared key of the storage account 22 | 23 | 2. **CONNECTION\_STRING** 24 | * `DIGGER_AZURE_CONNECTION_STRING`: connection string 25 | 26 | 3. **CLIENT\_SECRET** 27 | * `DIGGER_AZURE_TENANT_ID`: tenant id to use 28 | * `DIGGER_AZURE_CLIENT_ID`: client id of the service principal 29 | * `DIGGER_AZURE_CLIENT_SECRET`: secret of the service principal 30 | * `DIGGER_AZURE_SA_NAME`: storage account name 31 | 32 | 4. **MANAGED\_IDENTITY** 33 | * `DIGGER_AZURE_SA_NAME`: storage account name 34 | -------------------------------------------------------------------------------- /docs/ce/cloud-providers/setting-up-separate-mgmt-account.mdx: -------------------------------------------------------------------------------- 1 | --- 2 | title: "Setting up separate mgmt account" 3 | --- 4 | 5 | You can use separate AWS accounts for Digger locks and target infrastructure. 6 | 7 | * If you only pass `AWS_ACCESS_KEY_ID` and `AWS_SECRET_ACCESS_KEY` env vars, same account will be used for both 8 | 9 | * If in addition you also pass `DIGGER_AWS_ACCESS_KEY_ID` and `DIGGER_AWS_SECRET_ACCESS_KEY` vars then those will be used for Digger locks, and the first pair will be used as target account -------------------------------------------------------------------------------- /docs/ce/features/commentops.mdx: -------------------------------------------------------------------------------- 1 | --- 2 | title: "CommentOps" 3 | --- 4 | 5 | #### Supported commands 6 | 7 | `digger apply` \- will check RP locks, run terraform apply on all projects affected in the current PR, unlock projects when necessary and possibly merge PR depending on configuration. 8 | 9 | `digger plan` \- will lock and check RP locks, run terraform plan on all projects affected in the current PR 10 | 11 | `digger lock` \- will lock projects in current PR 12 | 13 | `digger unlock` \- will unlock projects in current PR. It's useful to circumvent any trouble related to locking of projects. 14 | 15 | #### Supported flags 16 | 17 | `digger apply/plan` 18 | 19 | * **\-p** enables user to run the command for a particular project, e.g. `digger plan -p staging` -------------------------------------------------------------------------------- /docs/ce/features/concurrency.mdx: -------------------------------------------------------------------------------- 1 | --- 2 | title: "Concurrency" 3 | --- 4 | 5 | With Digger plans / apply jobs that do not depend on each other run in parallel. This makes execution much faster, often by a large factor, for example when a change in a module affects multiple state files. 6 | 7 | Digger does not run its own compute; instead it's an orchestrator that starts jobs in your CI. So concurrency is natural with this architecture. -------------------------------------------------------------------------------- /docs/ce/features/plan-persistence.mdx: -------------------------------------------------------------------------------- 1 | --- 2 | title: "Plan Persistence" 3 | --- 4 | 5 | By default digger will run an apply based on the branch pull request files (no artefacts stored). In order to configure plan artefacts you can configure the inputs for storing as github artefacts or aws buckets or gcp buckets. The corresponding artefacts to be configured can be found in [storing plans in a bucket](/ce/howto/store-plans-in-a-bucket) 6 | -------------------------------------------------------------------------------- /docs/ce/features/plan-preview.mdx: -------------------------------------------------------------------------------- 1 | --- 2 | title: "Plan preview" 3 | --- 4 | 5 | Digger runs `terraform plan` whenever a pull request is raised, and appends output as comment 6 | 7 | 8 | 9 | Digger run report also contains other useful info e.g. results of policy checks. 10 | 11 | You can also re-plan by commenting `digger plan` (see [CommentOps](/features/commentops)) 12 | 13 | 14 | * The default way of working with digger is creating a pull request, previewing the plan within the PR as a comment, approving and applying the change within the change and then merging the pull request to the default branch 15 | 16 | * This guarantees that any pull request merged to the default branch reflects the infrastructure (since the apply will succeed before merging the pull request) 17 | 18 | * By performing locks on pull request we guarantee that the plan preview on the pull request is not stale. i.e. the infrastructure was not touched by another subsequent change 19 | 20 | * Code in github: [https://github.com/diggerhq/digger/blob/5815775095d7380281c71c7c3aa63ca1b374365f/pkg/digger/digger.go#L228](https://github.com/diggerhq/digger/blob/5815775095d7380281c71c7c3aa63ca1b374365f/pkg/digger/digger.go#L228) 21 | -------------------------------------------------------------------------------- /docs/ce/features/pr-level-locks.mdx: -------------------------------------------------------------------------------- 1 | --- 2 | title: "PR-level locks" 3 | --- 4 | 5 | * For every pull request we perform a lock when the pull request is opened and unlocked when the pull request is merged, this is to avoid making a plan preview stale 6 | 7 | * For GCP locking is performed using buckets that are strongly consistent: [https://github.com/diggerhq/digger/blob/80289922227f225d887feb74749b4daef8b441f8/pkg/gcp/gcp\_lock.go#L13](https://github.com/diggerhq/digger/blob/80289922227f225d887feb74749b4daef8b441f8/pkg/gcp/gcp%5Flock.go#L13) 8 | 9 | * These options are configured and the locking can be disabled entirely if it is not needed 10 | 11 | * The locking interface is very simple and is based on `Lock()` and `Unlock()` Operations [https://github.com/diggerhq/digger/blob/5815775095d7380281c71c7c3aa63ca1b374365f/pkg/locking/locking.go#L40](https://github.com/diggerhq/digger/blob/5815775095d7380281c71c7c3aa63ca1b374365f/pkg/locking/locking.go#L40) 12 | 13 | * A pull request acquires a lock for every project impacted by this PR and all dependant projects -------------------------------------------------------------------------------- /docs/ce/gcp/using-gcp-bucket-for-locks.mdx: -------------------------------------------------------------------------------- 1 | --- 2 | title: "Using GCP bucket for locks" 3 | --- 4 | 5 | You can configure Digger to use a GCP bucket for locks. To do that, set the following setting in the workflow file: 6 | 7 | - `google-lock-bucket` action input pointing to your bucket 8 | -------------------------------------------------------------------------------- /docs/ce/howto/auto-merge.mdx: -------------------------------------------------------------------------------- 1 | --- 2 | title: "Auto-merge" 3 | --- 4 | 5 | You can set `auto_merge: true` option to automatically merge the PR when all checks pass (including successful apply). Note that it's a top-level setting, not project level: 6 | 7 | ``` 8 | projects: 9 | - name: development 10 | dir: dev 11 | - name: production 12 | dir: prod 13 | auto_merge: true 14 | auto_merge_strategy: "squash" # optional, only supported on github 15 | ``` 16 | 17 | If you are using github VCS you can also specify 18 | `auto_merge_strategy` to define the type of strategy. Possible values are: 19 | 20 | - squash: for squash merge 21 | - rebase: for rebase merge 22 | - merge: for merge commits merge -------------------------------------------------------------------------------- /docs/ce/howto/commenting-strategies.mdx: -------------------------------------------------------------------------------- 1 | --- 2 | title: "Commenting strategies" 3 | --- 4 | 5 | * The way digger comments plan previews on PRs can be configured based on the requirement of verbosity or summary. The value of the summary can be configured using the `reporting-strategy` parameter: [https://github.com/diggerhq/digger/blob/develop/action.yml#L105](https://github.com/diggerhq/digger/blob/develop/action.yml#L105) 6 | 7 | * If you have many projects and would like to not pollute your pull requests with several plan comments for every you should chose `comments per run` or `latest run` reporting strategies which will shrink comments to a single comment per push 8 | 9 | * If you would like a more verbose output you should chose the `multiple comments` strategy 10 | 11 | * More details about the reporting strategy can be seen in the reporter interface: [https://github.com/diggerhq/digger/blob/5815775095d7380281c71c7c3aa63ca1b374365f/pkg/reporting/reporting.go#L12](https://github.com/diggerhq/digger/blob/5815775095d7380281c71c7c3aa63ca1b374365f/pkg/reporting/reporting.go#L12) 12 | -------------------------------------------------------------------------------- /docs/ce/howto/custom-commands.mdx: -------------------------------------------------------------------------------- 1 | --- 2 | title: "Custom commands" 3 | --- 4 | 5 | You can specify custom steps using the `workflows` section in digger.yml. Handy for integration with other CLIs like infracost. 6 | 7 | ``` 8 | projects: 9 | - name: production 10 | dir: prod 11 | workflow: with-infracost 12 | 13 | workflows: 14 | with-infracost: 15 | plan: 16 | steps: 17 | - init 18 | - plan 19 | - run: infracost breakdown --path=. | tee -a $DIGGER_OUT 20 | ``` 21 | 22 | ## Environment variables 23 | 24 | Digger makes the following environment variables available to custom commands: 25 | - `$DEFAULT_BRANCH` 26 | - `$DIGGER_OUT` 27 | - `$PR_BRANCH` 28 | - `$PROJECT_NAME` 29 | 30 | These can be used to achieve workflows like [infracost diff](/ce/howto/using-infracost) 31 | 32 | ## $DIGGER_OUT 33 | 34 | If your custom command writes into a file path defined in the `$DIGGER_OUT` env variable, then its content will be appended to the comment as "additional output": 35 | 36 | ![](/images/custom-command-output-infracost.png) 37 | 38 | The value of `$DIGGER_OUT` defaults to `$RUNNER_TEMP/digger-out.log`; you can change that if needed by setting the env var explicitly. 39 | -------------------------------------------------------------------------------- /docs/ce/howto/disable-auto-checkout.mdx: -------------------------------------------------------------------------------- 1 | --- 2 | title: "Disable auto-checkout" 3 | description: "By default Digger checks out latest code from the branch prior to every run. So you don't need to configure checkout in your workflow file." 4 | --- 5 | 6 | If you prefer to not checkout automatically (for example if you have your own checkout logic as a previous step), set the `configure-checkout` input to `false` in your workflow -------------------------------------------------------------------------------- /docs/ce/howto/disable-locking.mdx: -------------------------------------------------------------------------------- 1 | --- 2 | title: "Disable locking" 3 | --- 4 | 5 | In order to disable locking repo wide you can add a top-level flag to your digger.yml: 6 | 7 | ``` 8 | pr_locks: false 9 | 10 | projects: 11 | - name: dev 12 | dir: dev/ 13 | ``` -------------------------------------------------------------------------------- /docs/ce/howto/disable-telemetry.mdx: -------------------------------------------------------------------------------- 1 | --- 2 | title: "Disable telemetry" 3 | --- 4 | 5 | Digger collects anonymized telemetry. See [usage.go](https://github.com/diggerhq/digger/blob/develop/cli/pkg/usage/usage.go) for detail. You can disable telemetry collection either by setting `telemetry: false` in digger.yml, or by setting the `TELEMETRY` env variable to `false`. 6 | -------------------------------------------------------------------------------- /docs/ce/howto/draft-prs.mdx: -------------------------------------------------------------------------------- 1 | --- 2 | title: "Draft PRs" 3 | --- 4 | 5 | By default, Digger ignores Draft pull requests. You can enable support of draft PRs by setting `allow_draft_prs` flag in digger.yml: 6 | 7 | ``` 8 | allow_draft_prs: true 9 | ``` 10 | 11 | ## Unlocking on changing back to draft 12 | 13 | When a PR changes status from "ready to review" back to "draft": 14 | 15 | - if `allow_draft_prs` is `false`, digger will release the lock 16 | - if `allow_draft_prs` is `true`, digger will not do anything (because it's considered a PR like any other) 17 | -------------------------------------------------------------------------------- /docs/ce/howto/include-exclude-patterns.mdx: -------------------------------------------------------------------------------- 1 | --- 2 | title: "Include / exclude patterns" 3 | --- 4 | 5 | You can specify wildcard and glob patterns in digger.yml to include multiple directories into a project. A common use case for this is if you have multiple environment folders and they import from a common `modules` directory: 6 | 7 | ```yml 8 | development/ 9 | main.tf 10 | production/ 11 | main.tf 12 | modules/ 13 | shared_moduleA/ 14 | dev_only_module/ 15 | ``` 16 | 17 | 18 | The path of include / exclude patterns is relative to the digger.yml location - NOT the project location 19 | 20 | 21 | If you wanted to trigger plans for all `modules/` folder in both dev and prod projects you would include them in the `include_patterns` key. Similarly you put anything which you want to ignore in the `exclude_patterns` key ( exclude takes precedence over includes). 22 | 23 | ```yml 24 | projects: 25 | - name: dev 26 | dir: ./development 27 | include_patterns: ["./modules/**"] 28 | workflow: default_workflow 29 | - name: prod 30 | dir: ./production 31 | include_patterns: ["./modules/**"] 32 | exclude_patterns: ["./modules/dev_only_module/**"] 33 | ``` -------------------------------------------------------------------------------- /docs/ce/howto/policy-overrides.mdx: -------------------------------------------------------------------------------- 1 | --- 2 | title: "Policy overrides" 3 | --- 4 | 5 | You can configure Access Policy (OPA) in Digger to override prior Plan Policy checks. The Access policy is run before every apply, and by default if there are any policy violations in the plan it'd fail. However, you can use the `approvers` list in your access policy to override this behavior, for example if the PR has been approved by specific users or teams. Or simply check if it's not empty and then pass, which would achieve the logic "only allow apply if approved". 6 | 7 | [See video](https://www.loom.com/share/4dec05636b2b4b0187faf91e5c18879a?sid=58811567-00b3-4246-b87f-bd8dc06016f1) -------------------------------------------------------------------------------- /docs/ce/howto/specify-terraform-version.mdx: -------------------------------------------------------------------------------- 1 | --- 2 | title: "Specify terraform version" 3 | --- 4 | 5 | In order to specify which terraform version to run you can do so in the digger_workflow.yml file. 6 | 7 | This example shows how you can do it for terraform: 8 | 9 | ``` 10 | jobs: 11 | destroy: 12 | runs-on: ubuntu-latest 13 | steps: 14 | - name: digger destroy 15 | uses: diggerhq/digger@vLatest 16 | with: 17 | setup-terraform: true 18 | terraform-version: v1.5.5 19 | env: 20 | GITHUB_CONTEXT: ${{ toJson(github) }} 21 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 22 | ``` 23 | 24 | And similarly for OpenTofu: 25 | 26 | ``` 27 | jobs: 28 | destroy: 29 | runs-on: ubuntu-latest 30 | steps: 31 | - name: digger destroy 32 | uses: diggerhq/digger@vLatest 33 | with: 34 | setup-opentofu: true 35 | opentofu-version: v1.6.1 36 | env: 37 | GITHUB_CONTEXT: ${{ toJson(github) }} 38 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 39 | ``` 40 | 41 | 42 | When you use OpenTofu, you also need to enable `opentofu: true` in the project settings in digger.yml. 43 | 44 | -------------------------------------------------------------------------------- /docs/ce/howto/using-checkov.mdx: -------------------------------------------------------------------------------- 1 | --- 2 | title: "Using Checkov" 3 | description: "You can configure Digger to run Checkov policy-as-code as an additional step:" 4 | --- 5 | 6 | ``` 7 | projects: 8 | - name: project_a_d 9 | dir: ./project_a/development 10 | workflow: project_a 11 | 12 | workflows: 13 | project_a: 14 | plan: 15 | steps: 16 | - init 17 | - plan 18 | - run: checkov -d . --framework terraform 19 | ``` 20 | 21 | This docs page needs improvement. Please consider contributing to [docs](https://github.com/diggerhq/digger/tree/develop/docs). Here is the [relevant PR](https://github.com/diggerhq/digger/pull/267) implementing this feature -------------------------------------------------------------------------------- /docs/ce/howto/using-opa-conftest.mdx: -------------------------------------------------------------------------------- 1 | --- 2 | title: "Inline policies (conftest)" 3 | --- 4 | 5 | 6 | This is the most basic way to use OPA policies with Digger. For more advanced 7 | use cases check out [OPA policies](/ee/opa) 8 | 9 | 10 | You can configure Digger CLI to run Conftest ito check your Terraform plan output against Open Policy Agent policies. 11 | 12 | ## Pre-requisites 13 | 14 | - Conftest binary needs to be installed into your CI pipeline (see [Conftest Docs](https://www.conftest.dev/install/)) 15 | 16 | - OPA policies (rego files) under `/policies` directory in your repo 17 | 18 | ## Digger.yml configuration 19 | 20 | Example assumes the terraform is in the `prod` directory. 21 | 22 | Don't forget to update the json file name as well if your directory is named differently 23 | 24 | ``` 25 | projects: 26 | - name: prod 27 | dir: prod 28 | workflow: my_custom_workflow 29 | workflows: 30 | my_custom_workflow: 31 | plan: 32 | steps: 33 | - init: 34 | - plan 35 | - run: "conftest test ./prod.json -p ../policies" 36 | workflow_configuration: 37 | on_pull_request_pushed: [digger plan] 38 | on_pull_request_closed: [digger unlock] 39 | on_commit_to_default: [digger apply] 40 | ``` 41 | -------------------------------------------------------------------------------- /docs/ce/howto/workspaces.mdx: -------------------------------------------------------------------------------- 1 | --- 2 | title: "Workspaces" 3 | description: "You can specify a Workspace for a project by using the `workspace` option in `digger.yml`" 4 | --- 5 | 6 | 7 | This is about Terraform CLI Workspaces - not Terraform Cloud Workspaces. Those 8 | are different things for historic reasons. [Hashicorp 9 | article](https://developer.hashicorp.com/terraform/cloud-docs/workspaces#terraform-cloud-vs-terraform-cli-workspaces) 10 | 11 | 12 | So you can have 2 projects linked to the same directory but using different workspaces, like this: 13 | 14 | ``` 15 | projects: 16 | - name: dev 17 | dir: ./ 18 | workspace: dev 19 | - name: prod 20 | dir: ./ 21 | workspace: prod 22 | ``` 23 | 24 | Example repository: [https://github.com/diggerhq/digger_demo_workspaces/](https://github.com/diggerhq/digger%5Fdemo%5Fworkspaces/) 25 | -------------------------------------------------------------------------------- /docs/ce/securing-digger/external-provider.mdx: -------------------------------------------------------------------------------- 1 | --- 2 | title: "External providers code execution" 3 | --- 4 | 5 | Digger executes terraform in github actions within previlliged environments. Since terraform has the ability 6 | to execute arbitrary code based on data blocks or external providers this can lead to a user with malicious 7 | intent to expose the environment variables within the CI environment, potentially leaking cloud secrets. 8 | 9 | How to avoid this? 10 | --- 11 | Currently we are exploring solutions to avoid this security threat. The first thing you should do is to 12 | not use long-lived credentials to connect to your cloud account. Instead rely on OIDC for short-lived 13 | credentials to minimise the exposure from this threat. Secondly its important to ensure that only trusted 14 | individuals are allowed to update the terraform code. We are also working on additional solutions to secure 15 | against this threat. For more details and to engage in the discussion please take a look at this github issue: 16 | https://github.com/diggerhq/digger/issues/1530 17 | -------------------------------------------------------------------------------- /docs/ce/troubleshooting/comments.mdx: -------------------------------------------------------------------------------- 1 | --- 2 | title: "PR Comment Issues" 3 | --- 4 | 5 | ## Links to jobs not working 6 | 7 | Sometimes the link to job status leads to the PR instead of the Action job. E.g. if the job has failed, you click on "failed" in the summary comment, but it goes to the same PR. 8 | 9 | The likely reason is that the step that exposes the job id is missing from the workflow file. Add it right after checkout (most importantly, before Digger): 10 | 11 | ``` 12 | - name: ${{ fromJSON(github.event.inputs.spec).job_id }} 13 | run: echo "job id ${{ fromJSON(github.event.inputs.spec).job_id }}" 14 | ``` 15 | -------------------------------------------------------------------------------- /docs/ce/troubleshooting/importing-existing-resources.mdx: -------------------------------------------------------------------------------- 1 | --- 2 | title: "Importing existing resources" 3 | --- 4 | 5 | In some cases due to drift or failure to update terraform state terraform will attempt to create an existing resources in your environment and it will lead to a failure message such as the following: 6 | 7 | ``` 8 | Error: creating ECR Repository (xxx): 9 | RepositoryAlreadyExistsException: The repository with name 'xxx' already exists in the 10 | registry with id ... 11 | ``` 12 | 13 | In this case you need to import this resource manually using a command such as: 14 | 15 | ``` 16 | terraform import aws_ecr_repository.service test-service 17 | ``` 18 | 19 | To perform this in digger you need to clone your terraform and run the custom command locally. run terraform init and make sure that it is configured to connect to your state backend. To ensure this is the case try to run terraform plan and check that the plan preview is as expected. Finally you can run the terraform import command in order to update your state. 20 | 21 | **Coming soon:** Ability to run custom terraform commands directly in digger without the need to clone and run custom commands (all will be controlled via RBAC) -------------------------------------------------------------------------------- /docs/ee/ai-summaries.mdx: -------------------------------------------------------------------------------- 1 | --- 2 | title: "AI Summaries" 3 | --- 4 | 5 | AI Summaries allows you to generate a summary of multiple plans when you create a PR. This significantly reduces the cognitive load 6 | on a reviewer of that PR. Lets say someone updated a module that is relied on by multiple projects in a monorepo, this would lead to 20 different plans like in this screenshot: 7 | 8 | ![multiple plans](/images/ee/ai-summaries.png) 9 | 10 | 11 | With AI summaries enabled a summary comment is posted summarise all of the plans below it. 12 | This means that the reviewer now only needs to read the summary and glance through all the plans, saving precious reviewer time! 13 | 14 | This is currently a invite-only feature so please [book a demo](https://digger.dev/) with us to request access! -------------------------------------------------------------------------------- /docs/ee/dashboard.mdx: -------------------------------------------------------------------------------- 1 | --- 2 | title: "Dashboard" 3 | --- 4 | 5 | Digger Dashboard is a UI for managing your projects, connected repositories, plan / apply runs, and OPA policies. 6 | 7 | Currently EE only, we are planning to bring some of the features of it to CE at some point. 8 | 9 | [Book a demo](https://calendly.com/diggerdev/diggerdemo) 10 | 11 | 12 | -------------------------------------------------------------------------------- /docs/ee/fips-140.mdx: -------------------------------------------------------------------------------- 1 | --- 2 | title: "FIPS 140 standard" 3 | --- 4 | 5 | You can use digger binary with FIPS140 standard. FIPS 140 (Federal Information Processing Standard Publication 140) is a U.S. government standard that specifies security requirements for cryptographic modules protecting sensitive information. 6 | 7 | as of version v0.6.101 digger backend and cli are both compiled seperately with FIPS140 enabled. In order to enable it for github follow these steps: 8 | 9 | - For the backend you need to ensure you use the right docker image: `_backend_ee_fips` during the pull 10 | - For the cli you need to add the following argument in addition to `ee: true` : 11 | 12 | ``` 13 | - diggerhq/digger@vLatest 14 | with: 15 | ee: 'true' 16 | fips: 'true' 17 | ``` 18 | 19 | If you are using gitlab or other VCS then just ensure that you are downloading the fips enabled binary which is suffixed with '_fips' -------------------------------------------------------------------------------- /docs/ee/rbac.mdx: -------------------------------------------------------------------------------- 1 | --- 2 | title: "RBAC" 3 | --- 4 | 5 | Digger Enterprise supports granular Role-based Access Contros at 3 levels: 6 | 7 | - Organisation 8 | - Repository 9 | - Project 10 | 11 | You can set up RBAC in Digger by configuring an Access Policy at the appropriate level of your Management Repo. See [OPA Policies](/ee/opa) for more detail. 12 | 13 | Every Access Policy is passed the following details about the attempted operation from GitHub: 14 | 15 | - user ID of the user who initiated the operation 16 | - team 17 | - list of PR approvers 18 | 19 | This way you can implement advanced workflows such as [policy overrides](https://github.com/diggerhq/demo-policy-overrides/pull/9) based on roles and granular permissions. 20 | 21 | [Book a demo](https://calendly.com/diggerdev/diggerdemo) 22 | -------------------------------------------------------------------------------- /docs/favicon.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/diggerhq/digger/b9f645e9af499badc14c7f00bbca06026232a686/docs/favicon.png -------------------------------------------------------------------------------- /docs/images/Screenshot2025-05-26at19.00.33.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/diggerhq/digger/b9f645e9af499badc14c7f00bbca06026232a686/docs/images/Screenshot2025-05-26at19.00.33.png -------------------------------------------------------------------------------- /docs/images/Screenshot2025-05-26at19.00.38.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/diggerhq/digger/b9f645e9af499badc14c7f00bbca06026232a686/docs/images/Screenshot2025-05-26at19.00.38.png -------------------------------------------------------------------------------- /docs/images/Screenshot2025-05-26at19.00.55.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/diggerhq/digger/b9f645e9af499badc14c7f00bbca06026232a686/docs/images/Screenshot2025-05-26at19.00.55.png -------------------------------------------------------------------------------- /docs/images/Screenshot2025-05-26at19.14.34.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/diggerhq/digger/b9f645e9af499badc14c7f00bbca06026232a686/docs/images/Screenshot2025-05-26at19.14.34.png -------------------------------------------------------------------------------- /docs/images/Screenshot2025-05-26at19.58.03.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/diggerhq/digger/b9f645e9af499badc14c7f00bbca06026232a686/docs/images/Screenshot2025-05-26at19.58.03.png -------------------------------------------------------------------------------- /docs/images/Screenshot2025-05-26at21.31.18.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/diggerhq/digger/b9f645e9af499badc14c7f00bbca06026232a686/docs/images/Screenshot2025-05-26at21.31.18.png -------------------------------------------------------------------------------- /docs/images/buildkite/buildkite.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/diggerhq/digger/b9f645e9af499badc14c7f00bbca06026232a686/docs/images/buildkite/buildkite.png -------------------------------------------------------------------------------- /docs/images/buildkite/github_comment.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/diggerhq/digger/b9f645e9af499badc14c7f00bbca06026232a686/docs/images/buildkite/github_comment.png -------------------------------------------------------------------------------- /docs/images/configuration/1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/diggerhq/digger/b9f645e9af499badc14c7f00bbca06026232a686/docs/images/configuration/1.png -------------------------------------------------------------------------------- /docs/images/configuration/1.webp: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/diggerhq/digger/b9f645e9af499badc14c7f00bbca06026232a686/docs/images/configuration/1.webp -------------------------------------------------------------------------------- /docs/images/configuration/2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/diggerhq/digger/b9f645e9af499badc14c7f00bbca06026232a686/docs/images/configuration/2.png -------------------------------------------------------------------------------- /docs/images/configuration/2.webp: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/diggerhq/digger/b9f645e9af499badc14c7f00bbca06026232a686/docs/images/configuration/2.webp -------------------------------------------------------------------------------- /docs/images/configuration/3.webp: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/diggerhq/digger/b9f645e9af499badc14c7f00bbca06026232a686/docs/images/configuration/3.webp -------------------------------------------------------------------------------- /docs/images/configuration/4.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/diggerhq/digger/b9f645e9af499badc14c7f00bbca06026232a686/docs/images/configuration/4.png -------------------------------------------------------------------------------- /docs/images/configuration/5.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/diggerhq/digger/b9f645e9af499badc14c7f00bbca06026232a686/docs/images/configuration/5.png -------------------------------------------------------------------------------- /docs/images/configuration/image.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/diggerhq/digger/b9f645e9af499badc14c7f00bbca06026232a686/docs/images/configuration/image.png -------------------------------------------------------------------------------- /docs/images/configuration/infracost-example.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/diggerhq/digger/b9f645e9af499badc14c7f00bbca06026232a686/docs/images/configuration/infracost-example.png -------------------------------------------------------------------------------- /docs/images/custom-command-output-infracost.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/diggerhq/digger/b9f645e9af499badc14c7f00bbca06026232a686/docs/images/custom-command-output-infracost.png -------------------------------------------------------------------------------- /docs/images/digger-dashboard-screenshot.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/diggerhq/digger/b9f645e9af499badc14c7f00bbca06026232a686/docs/images/digger-dashboard-screenshot.png -------------------------------------------------------------------------------- /docs/images/digger-dashboard.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/diggerhq/digger/b9f645e9af499badc14c7f00bbca06026232a686/docs/images/digger-dashboard.png -------------------------------------------------------------------------------- /docs/images/digger-plan-preview.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/diggerhq/digger/b9f645e9af499badc14c7f00bbca06026232a686/docs/images/digger-plan-preview.png -------------------------------------------------------------------------------- /docs/images/drift-issues.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/diggerhq/digger/b9f645e9af499badc14c7f00bbca06026232a686/docs/images/drift-issues.png -------------------------------------------------------------------------------- /docs/images/ee/ai-summaries.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/diggerhq/digger/b9f645e9af499badc14c7f00bbca06026232a686/docs/images/ee/ai-summaries.png -------------------------------------------------------------------------------- /docs/images/ee/example-plan.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/diggerhq/digger/b9f645e9af499badc14c7f00bbca06026232a686/docs/images/ee/example-plan.png -------------------------------------------------------------------------------- /docs/images/ee/gitlab-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/diggerhq/digger/b9f645e9af499badc14c7f00bbca06026232a686/docs/images/ee/gitlab-1.png -------------------------------------------------------------------------------- /docs/images/ee/gitlab-2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/diggerhq/digger/b9f645e9af499badc14c7f00bbca06026232a686/docs/images/ee/gitlab-2.png -------------------------------------------------------------------------------- /docs/images/ee/multi-tenant-github.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/diggerhq/digger/b9f645e9af499badc14c7f00bbca06026232a686/docs/images/ee/multi-tenant-github.png -------------------------------------------------------------------------------- /docs/images/gcp/1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/diggerhq/digger/b9f645e9af499badc14c7f00bbca06026232a686/docs/images/gcp/1.png -------------------------------------------------------------------------------- /docs/images/gcp/2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/diggerhq/digger/b9f645e9af499badc14c7f00bbca06026232a686/docs/images/gcp/2.png -------------------------------------------------------------------------------- /docs/images/gcp/3.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/diggerhq/digger/b9f645e9af499badc14c7f00bbca06026232a686/docs/images/gcp/3.png -------------------------------------------------------------------------------- /docs/images/gcp/4.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/diggerhq/digger/b9f645e9af499badc14c7f00bbca06026232a686/docs/images/gcp/4.png -------------------------------------------------------------------------------- /docs/images/gcp/image.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/diggerhq/digger/b9f645e9af499badc14c7f00bbca06026232a686/docs/images/gcp/image.png -------------------------------------------------------------------------------- /docs/images/getting-started/1.webp: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/diggerhq/digger/b9f645e9af499badc14c7f00bbca06026232a686/docs/images/getting-started/1.webp -------------------------------------------------------------------------------- /docs/images/getting-started/2.webp: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/diggerhq/digger/b9f645e9af499badc14c7f00bbca06026232a686/docs/images/getting-started/2.webp -------------------------------------------------------------------------------- /docs/images/getting-started/3.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/diggerhq/digger/b9f645e9af499badc14c7f00bbca06026232a686/docs/images/getting-started/3.png -------------------------------------------------------------------------------- /docs/images/getting-started/4.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/diggerhq/digger/b9f645e9af499badc14c7f00bbca06026232a686/docs/images/getting-started/4.png -------------------------------------------------------------------------------- /docs/images/getting-started/5.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/diggerhq/digger/b9f645e9af499badc14c7f00bbca06026232a686/docs/images/getting-started/5.png -------------------------------------------------------------------------------- /docs/images/getting-started/6.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/diggerhq/digger/b9f645e9af499badc14c7f00bbca06026232a686/docs/images/getting-started/6.png -------------------------------------------------------------------------------- /docs/images/getting-started/7.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/diggerhq/digger/b9f645e9af499badc14c7f00bbca06026232a686/docs/images/getting-started/7.png -------------------------------------------------------------------------------- /docs/images/getting-started/azure-devops-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/diggerhq/digger/b9f645e9af499badc14c7f00bbca06026232a686/docs/images/getting-started/azure-devops-1.png -------------------------------------------------------------------------------- /docs/images/getting-started/azure-devops-2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/diggerhq/digger/b9f645e9af499badc14c7f00bbca06026232a686/docs/images/getting-started/azure-devops-2.png -------------------------------------------------------------------------------- /docs/images/getting-started/azure-devops-3.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/diggerhq/digger/b9f645e9af499badc14c7f00bbca06026232a686/docs/images/getting-started/azure-devops-3.png -------------------------------------------------------------------------------- /docs/images/getting-started/azure-devops-4.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/diggerhq/digger/b9f645e9af499badc14c7f00bbca06026232a686/docs/images/getting-started/azure-devops-4.png -------------------------------------------------------------------------------- /docs/images/getting-started/azure-devops-5.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/diggerhq/digger/b9f645e9af499badc14c7f00bbca06026232a686/docs/images/getting-started/azure-devops-5.png -------------------------------------------------------------------------------- /docs/images/getting-started/azure-devops-6.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/diggerhq/digger/b9f645e9af499badc14c7f00bbca06026232a686/docs/images/getting-started/azure-devops-6.png -------------------------------------------------------------------------------- /docs/images/getting-started/azure-devops-7.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/diggerhq/digger/b9f645e9af499badc14c7f00bbca06026232a686/docs/images/getting-started/azure-devops-7.png -------------------------------------------------------------------------------- /docs/images/getting-started/azure-devops-8.5.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/diggerhq/digger/b9f645e9af499badc14c7f00bbca06026232a686/docs/images/getting-started/azure-devops-8.5.png -------------------------------------------------------------------------------- /docs/images/getting-started/azure-devops-8.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/diggerhq/digger/b9f645e9af499badc14c7f00bbca06026232a686/docs/images/getting-started/azure-devops-8.png -------------------------------------------------------------------------------- /docs/images/getting-started/image.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/diggerhq/digger/b9f645e9af499badc14c7f00bbca06026232a686/docs/images/getting-started/image.png -------------------------------------------------------------------------------- /docs/images/gitlab/gitlab-apply.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/diggerhq/digger/b9f645e9af499badc14c7f00bbca06026232a686/docs/images/gitlab/gitlab-apply.png -------------------------------------------------------------------------------- /docs/images/gitlab/gitlab-plan.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/diggerhq/digger/b9f645e9af499badc14c7f00bbca06026232a686/docs/images/gitlab/gitlab-plan.png -------------------------------------------------------------------------------- /docs/images/infracost-diff-comment-digger.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/diggerhq/digger/b9f645e9af499badc14c7f00bbca06026232a686/docs/images/infracost-diff-comment-digger.png -------------------------------------------------------------------------------- /docs/images/readme/1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/diggerhq/digger/b9f645e9af499badc14c7f00bbca06026232a686/docs/images/readme/1.png -------------------------------------------------------------------------------- /docs/logo/dark.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/diggerhq/digger/b9f645e9af499badc14c7f00bbca06026232a686/docs/logo/dark.png -------------------------------------------------------------------------------- /docs/logo/light.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/diggerhq/digger/b9f645e9af499badc14c7f00bbca06026232a686/docs/logo/light.png -------------------------------------------------------------------------------- /docs/readme/faq.mdx: -------------------------------------------------------------------------------- 1 | --- 2 | title: 'FAQ' 3 | description: 'Frequently asked questions' 4 | --- 5 | 6 | # Does Digger support Terraform 1.6 and 1.7? 7 | 8 | Digger does not officially support Terraform versions 1.6 and above (any version released after Hashicorp’s switch to BSL, [announced](https://www.hashicorp.com/blog/hashicorp-adopts-business-source-license) in August 2023). We recommend using [OpenTofu](https://opentofu.org/) instead, a community fork of Terraform in response to Hashicorp’s license change. OpenTofu is [governed](https://www.linuxfoundation.org/press/announcing-opentofu) by Linux Foundation and [pending](https://github.com/cncf/sandbox/issues/81) CNCF sandbox. -------------------------------------------------------------------------------- /docs/readme/feedback.mdx: -------------------------------------------------------------------------------- 1 | --- 2 | title: "Feedback" 3 | --- 4 | 5 | We are always looking at feedback and suggestions from the community. If something is missing, if you have a feature request, or if something isn't working as it is supposed to, 6 | please tell us. We are always keen on thoughts/feedback. 7 | 8 | 9 | Head over to [Github](https://github.com/diggerhq/digger/blob/develop/docs/readme/feedback.mdx) and raise a PR to add your feedback below (Optionally link to a github issue if you find it is relevant to your feedback) 10 | 11 | ## To-do list (Issues, feedback and feature requests) 12 | 13 | 1. Terraform file for the initial condiguration to setup S3 bucket and dynamo DB for [state management](https://github.com/diggerhq/digger/issues/206). 14 | 15 | 2. Decouple auth from [frontegg](https://github.com/diggerhq/digger/issues/1074). -------------------------------------------------------------------------------- /docs/troubleshooting-errors.mdx: -------------------------------------------------------------------------------- 1 | --- 2 | title: "Troubleshooting Errors" 3 | --- 4 | 5 | -------------------------------------------------------------------------------- /ee/backend/.dockerignore: -------------------------------------------------------------------------------- 1 | Dockerfile 2 | cloud -------------------------------------------------------------------------------- /ee/backend/.gitignore: -------------------------------------------------------------------------------- 1 | main 2 | backend 3 | .idea/ 4 | .DS_Store 5 | venv/ 6 | **/__pycache__/ 7 | __azurite* 8 | ./digger 9 | cloud 10 | *.env 11 | *.env.* 12 | .docker-compose-env 13 | controllers/database_test.db 14 | -------------------------------------------------------------------------------- /ee/backend/atlas.hcl: -------------------------------------------------------------------------------- 1 | data "external_schema" "gorm" { 2 | program = [ 3 | "go", 4 | "run", 5 | "-mod=mod", 6 | "ariga.io/atlas-provider-gorm", 7 | "load", 8 | "--path", "./models", 9 | "--dialect", "postgres", 10 | ] 11 | } 12 | 13 | env "gorm" { 14 | src = data.external_schema.gorm.url 15 | dev = "docker://postgres/16.1" 16 | migration { 17 | dir = "file://migrations" 18 | } 19 | format { 20 | migrate { 21 | diff = "{{ sql . \" \" }}" 22 | } 23 | } 24 | } -------------------------------------------------------------------------------- /ee/backend/ci_backends/buildkite.go: -------------------------------------------------------------------------------- 1 | package ci_backends 2 | 3 | import ( 4 | "encoding/json" 5 | "github.com/buildkite/go-buildkite/v3/buildkite" 6 | "github.com/diggerhq/digger/libs/spec" 7 | ) 8 | 9 | type BuildkiteCi struct { 10 | Client buildkite.Client 11 | Org string 12 | Pipeline string 13 | } 14 | 15 | func (b BuildkiteCi) TriggerWorkflow(spec spec.Spec, runName string, vcsToken string) error { 16 | 17 | specBytes, err := json.Marshal(spec) 18 | client := b.Client 19 | _, _, err = client.Builds.Create(b.Org, b.Pipeline, &buildkite.CreateBuild{ 20 | Commit: spec.Job.Commit, 21 | Branch: spec.Job.Branch, 22 | Message: runName, 23 | Author: buildkite.Author{Username: spec.VCS.Actor}, 24 | Env: map[string]string{ 25 | "DIGGER_SPEC": string(specBytes), 26 | "GITHUB_TOKEN": vcsToken, 27 | }, 28 | PullRequestID: int64(*spec.Job.PullRequestNumber), 29 | }) 30 | 31 | return err 32 | 33 | } 34 | 35 | func (b BuildkiteCi) GetWorkflowUrl(spec spec.Spec) (string, error) { 36 | return "", nil 37 | } 38 | -------------------------------------------------------------------------------- /ee/backend/docker-compose.yml: -------------------------------------------------------------------------------- 1 | version: '3.7' 2 | 3 | services: 4 | postgres: 5 | image: postgres:alpine 6 | ports: 7 | - "5432:5432" 8 | environment: 9 | - POSTGRES_PASSWORD=23q4RSDFSDFS 10 | healthcheck: 11 | test: [ "CMD-SHELL", "pg_isready -U postgres" ] 12 | interval: 5s 13 | timeout: 5s 14 | retries: 5 15 | 16 | web: 17 | links: 18 | - postgres 19 | depends_on: 20 | postgres: 21 | condition: service_healthy 22 | build: ./ 23 | env_file: 24 | - .env.docker-compose 25 | ports: 26 | - "3100:3000" 27 | -------------------------------------------------------------------------------- /ee/backend/templates/bottom.tmpl: -------------------------------------------------------------------------------- 1 | {{define "bottom"}} 2 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | {{ end }} -------------------------------------------------------------------------------- /ee/backend/templates/github_success.tmpl: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | Digger github app installed 6 | 7 | 8 | 9 | 10 | 11 |
12 |
13 |

App installation successful

14 |

You can now close this tab.

15 |
16 |
17 | 18 | -------------------------------------------------------------------------------- /ee/backend/templates/healthy.tmpl: -------------------------------------------------------------------------------- 1 | 2 | 3 |

Digger is up and running

4 |

Congratulations!

5 | 6 | -------------------------------------------------------------------------------- /ee/backend/templates/index.tmpl: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 |
8 | 19 |
20 | 21 | 22 | 23 | -------------------------------------------------------------------------------- /ee/backend/templates/notifications.tmpl: -------------------------------------------------------------------------------- 1 | {{define "notifications"}} 2 | {{ range $message := .Messages }} 3 | 7 | {{end}} 8 | {{ range $message := .Warnings }} 9 | 13 | {{end}} 14 | {{ range $message := .Errors }} 15 | 19 | {{end}} 20 | {{ end }} -------------------------------------------------------------------------------- /ee/backend/templates/policy_details.tmpl: -------------------------------------------------------------------------------- 1 | {{template "top" . }} 2 | 3 |
4 |
5 |
6 |
7 |

Policy Details

8 |
9 |
10 | {{template "notifications" . }} 11 |
12 |
13 |
14 |
15 | 16 | 17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 | {{template "bottom" . }} 27 | 28 | 29 | 30 | 31 | -------------------------------------------------------------------------------- /ee/backend/templates/project_add.tmpl: -------------------------------------------------------------------------------- 1 | {{template "top" . }} 2 | 3 |
4 |
5 |
6 |
7 |

Project Details

8 |
9 |
10 | 11 | {{template "notifications" . }} 12 | 13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 | 23 |
24 |
25 |
26 |
27 | {{template "bottom" . }} 28 | 29 | 30 | 31 | 32 | -------------------------------------------------------------------------------- /ee/backend/templates/project_details.tmpl: -------------------------------------------------------------------------------- 1 | {{template "top" . }} 2 | 3 |
4 |
5 |
6 |
7 |

Project Details

8 |
9 |
10 | {{template "notifications" . }} 11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 | {{template "bottom" . }} 25 | 26 | 27 | 28 | 29 | -------------------------------------------------------------------------------- /ee/backend/templates/static/css/main.css: -------------------------------------------------------------------------------- 1 | 2 | 3 | .bg-gradient-primary { 4 | background-color: #001529; 5 | background-image: linear-gradient(180deg, #001529 10%, #001529 100%); 6 | background-size: cover; 7 | } -------------------------------------------------------------------------------- /ee/backend/templates/static/js/prism-live-javascript.js: -------------------------------------------------------------------------------- 1 | Prism.Live.registerLanguage("clike", { 2 | comments: { 3 | singleline: "//", 4 | multiline: ["/*", "*/"] 5 | }, 6 | snippets: { 7 | if: `if ($1) { 8 | $2 9 | }` 10 | } 11 | }); 12 | 13 | Prism.Live.registerLanguage("javascript", { 14 | snippets: { 15 | log: "console.log($1)", 16 | } 17 | }, Prism.Live.languages.clike); 18 | -------------------------------------------------------------------------------- /ee/cli/.gitignore: -------------------------------------------------------------------------------- 1 | **/digger 2 | !/pkg/digger 3 | !/cmd/digger 4 | -------------------------------------------------------------------------------- /ee/cli/cmd/digger/main.go: -------------------------------------------------------------------------------- 1 | package main 2 | 3 | import ( 4 | "fmt" 5 | "github.com/diggerhq/digger/cli/pkg/usage" 6 | "github.com/diggerhq/digger/libs/license" 7 | "log" 8 | "os" 9 | ) 10 | 11 | /* 12 | Exit codes: 13 | 0 - No errors 14 | 1 - Failed to read digger digger_config 15 | 2 - Failed to create lock provider 16 | 3 - Failed to find auth token 17 | 4 - Failed to initialise CI context 18 | 5 - 19 | 6 - failed to process CI event 20 | 7 - failed to convert event to command 21 | 8 - failed to execute command 22 | 10 - No CI detected 23 | */ 24 | 25 | func main() { 26 | err := license.LicenseKeyChecker{}.Check() 27 | if err != nil { 28 | log.Printf("error checking license %v", err) 29 | os.Exit(1) 30 | } 31 | if len(os.Args) == 1 { 32 | os.Args = append([]string{os.Args[0]}, "default") 33 | } 34 | if err := rootCmd.Execute(); err != nil { 35 | usage.ReportErrorAndExit("", fmt.Sprintf("Error occurred during command exec: %v", err), 8) 36 | } 37 | 38 | } 39 | 40 | func init() { 41 | log.SetOutput(os.Stdout) 42 | 43 | if os.Getenv("DEBUG") == "true" { 44 | log.SetFlags(log.Ltime | log.Lshortfile) 45 | } 46 | } 47 | -------------------------------------------------------------------------------- /ee/cli/pkg/comment_updater/provider.go: -------------------------------------------------------------------------------- 1 | package comment_updater 2 | 3 | import ( 4 | "fmt" 5 | comment_updater "github.com/diggerhq/digger/libs/comment_utils/summary" 6 | "github.com/diggerhq/digger/libs/digger_config" 7 | ) 8 | 9 | type CommentUpdaterProviderAdvanced struct { 10 | } 11 | 12 | func (c CommentUpdaterProviderAdvanced) Get(renderMode string) (comment_updater.CommentUpdater, error) { 13 | if renderMode == digger_config.CommentRenderModeBasic { 14 | return comment_updater.BasicCommentUpdater{}, nil 15 | } else if renderMode == digger_config.CommentRenderModeGroupByModule { 16 | commentUpdater := comment_updater.BasicCommentUpdater{} 17 | return commentUpdater, nil 18 | } else { 19 | return nil, fmt.Errorf("Unknown comment render mode found: %v", renderMode) 20 | } 21 | } 22 | -------------------------------------------------------------------------------- /ee/cli/pkg/drift/provider.go: -------------------------------------------------------------------------------- 1 | package drift 2 | 3 | import ( 4 | "fmt" 5 | core_drift "github.com/diggerhq/digger/cli/pkg/core/drift" 6 | "github.com/diggerhq/digger/cli/pkg/drift" 7 | "github.com/diggerhq/digger/libs/ci" 8 | "os" 9 | ) 10 | 11 | type DriftNotificationProviderAdvanced struct{} 12 | 13 | func (d DriftNotificationProviderAdvanced) Get(prService ci.PullRequestService) (core_drift.Notification, error) { 14 | slackNotificationUrl := os.Getenv("INPUT_DRIFT_DETECTION_SLACK_NOTIFICATION_URL") 15 | DriftAsGithubIssues := os.Getenv("INPUT_DRIFT_GITHUB_ISSUES") 16 | var notification core_drift.Notification 17 | if slackNotificationUrl != "" { 18 | notification = drift.SlackNotification{slackNotificationUrl} 19 | } else if DriftAsGithubIssues != "" { 20 | notification = GithubIssueNotification{GithubService: &prService} 21 | } else { 22 | return nil, fmt.Errorf("could not identify drift mode, please specify slack or github") 23 | } 24 | return notification, nil 25 | } 26 | -------------------------------------------------------------------------------- /ee/cli/pkg/policy/policy_test.go: -------------------------------------------------------------------------------- 1 | package policy 2 | 3 | import ( 4 | "github.com/stretchr/testify/assert" 5 | "log" 6 | "os" 7 | "testing" 8 | ) 9 | 10 | func init() { 11 | log.SetOutput(os.Stdout) 12 | log.SetFlags(log.Ldate | log.Ltime) 13 | } 14 | 15 | func TestGetPrefixesForPath(t *testing.T) { 16 | prefixes := GetPrefixesForPath("dev/vpc/subnets", "access.rego") 17 | assert.Equal(t, []string{"dev/vpc/subnets/access.rego", "dev/vpc/access.rego", "dev/access.rego"}, prefixes) 18 | log.Printf("%v", prefixes) 19 | } 20 | 21 | func TestGetPrefixesForPathAbsolute(t *testing.T) { 22 | prefixes := GetPrefixesForPath("/dev/vpc/subnets", "access.rego") 23 | assert.Equal(t, []string{"/dev/vpc/subnets/access.rego", "/dev/vpc/access.rego", "/dev/access.rego"}, prefixes) 24 | log.Printf("%v", prefixes) 25 | } 26 | -------------------------------------------------------------------------------- /ee/cli/pkg/utils/github.go: -------------------------------------------------------------------------------- 1 | package utils 2 | 3 | import ( 4 | "github.com/diggerhq/digger/backend/utils" 5 | "log" 6 | "os" 7 | ) 8 | 9 | func createTempDir() string { 10 | tempDir, err := os.MkdirTemp("", "repo") 11 | if err != nil { 12 | log.Fatal(err) 13 | } 14 | return tempDir 15 | } 16 | 17 | type action func(string) error 18 | 19 | func CloneGitRepoAndDoAction(repoUrl string, branch string, token string, tokenUsername string, action action) error { 20 | dir := createTempDir() 21 | git := utils.NewGitShellWithTokenAuth(dir, token, tokenUsername) 22 | err := git.Clone(repoUrl, branch) 23 | if err != nil { 24 | return err 25 | } 26 | defer os.RemoveAll(dir) 27 | err = action(dir) 28 | if err != nil { 29 | log.Printf("error performing action: %v", err) 30 | return err 31 | } 32 | 33 | return nil 34 | 35 | } 36 | -------------------------------------------------------------------------------- /ee/drift/.dockerignore: -------------------------------------------------------------------------------- 1 | # flyctl launch added from .gitignore 2 | **/.env 3 | fly.toml 4 | -------------------------------------------------------------------------------- /ee/drift/.gitignore: -------------------------------------------------------------------------------- 1 | .env 2 | main 3 | drift 4 | -------------------------------------------------------------------------------- /ee/drift/README.md: -------------------------------------------------------------------------------- 1 | Backend for DriftApp 2 | -------------------------------------------------------------------------------- /ee/drift/controllers/controllers.go: -------------------------------------------------------------------------------- 1 | package controllers 2 | 3 | import ( 4 | "github.com/diggerhq/digger/backend/ci_backends" 5 | "github.com/diggerhq/digger/backend/utils" 6 | ) 7 | 8 | type MainController struct { 9 | GithubClientProvider utils.GithubClientProvider 10 | CiBackendProvider ci_backends.CiBackendProvider 11 | } 12 | -------------------------------------------------------------------------------- /ee/drift/controllers/health.go: -------------------------------------------------------------------------------- 1 | package controllers 2 | 3 | import ( 4 | "github.com/gin-gonic/gin" 5 | ) 6 | 7 | func (mc MainController) Ping(c *gin.Context) { 8 | c.String(200, "pong") 9 | } 10 | -------------------------------------------------------------------------------- /ee/drift/dbgen/dbgen.go: -------------------------------------------------------------------------------- 1 | package main 2 | 3 | import ( 4 | "os" 5 | 6 | "gorm.io/driver/postgres" 7 | "gorm.io/gen" 8 | "gorm.io/gorm" 9 | ) 10 | 11 | // Dynamic SQL 12 | type Querier interface { 13 | // SELECT * FROM @@table WHERE name = @name{{if role !=""}} AND role = @role{{end}} 14 | FilterWithNameAndRole(name, role string) ([]gen.T, error) 15 | } 16 | 17 | func main() { 18 | g := gen.NewGenerator(gen.Config{ 19 | OutPath: "../models_generated", 20 | Mode: gen.WithoutContext | gen.WithDefaultQuery | gen.WithQueryInterface, // generate mode 21 | }) 22 | 23 | dburl := os.Getenv("DB_URL") 24 | if dburl == "" { 25 | dburl = "postgresql://postgres:postgres@127.0.0.1:54322/postgres" 26 | } 27 | gormdb, _ := gorm.Open(postgres.Open(dburl)) 28 | g.UseDB(gormdb) // reuse your gorm db 29 | 30 | g.ApplyBasic( 31 | // Generate structs from all tables of current database 32 | g.GenerateAllTable()..., 33 | ) 34 | 35 | // Generate the code 36 | g.Execute() 37 | } 38 | -------------------------------------------------------------------------------- /ee/drift/dbgen/go.mod: -------------------------------------------------------------------------------- 1 | module dbgen 2 | 3 | go 1.24.0 4 | 5 | require ( 6 | gorm.io/driver/postgres v1.5.9 7 | gorm.io/gen v0.3.26 8 | gorm.io/gorm v1.25.12 9 | ) 10 | 11 | require ( 12 | github.com/go-sql-driver/mysql v1.7.0 // indirect 13 | github.com/jackc/pgpassfile v1.0.0 // indirect 14 | github.com/jackc/pgservicefile v0.0.0-20221227161230-091c0ba34f0a // indirect 15 | github.com/jackc/pgx/v5 v5.5.5 // indirect 16 | github.com/jackc/puddle/v2 v2.2.1 // indirect 17 | github.com/jinzhu/inflection v1.0.0 // indirect 18 | github.com/jinzhu/now v1.1.5 // indirect 19 | golang.org/x/crypto v0.17.0 // indirect 20 | golang.org/x/mod v0.17.0 // indirect 21 | golang.org/x/sync v0.8.0 // indirect 22 | golang.org/x/text v0.18.0 // indirect 23 | golang.org/x/tools v0.21.1-0.20240508182429-e35e4ccd0d2d // indirect 24 | gorm.io/datatypes v1.1.1-0.20230130040222-c43177d3cf8c // indirect 25 | gorm.io/driver/mysql v1.5.7 // indirect 26 | gorm.io/hints v1.1.0 // indirect 27 | gorm.io/plugin/dbresolver v1.5.3 // indirect 28 | ) 29 | -------------------------------------------------------------------------------- /ee/drift/dbmodels/setup.go: -------------------------------------------------------------------------------- 1 | package dbmodels 2 | 3 | import ( 4 | "github.com/diggerhq/digger/ee/drift/models_generated" 5 | slogGorm "github.com/orandin/slog-gorm" 6 | "gorm.io/driver/postgres" 7 | _ "gorm.io/driver/postgres" 8 | "gorm.io/gorm" 9 | "log/slog" 10 | "os" 11 | ) 12 | 13 | type Database struct { 14 | GormDB *gorm.DB 15 | Query *models_generated.Query 16 | } 17 | 18 | // var DB *gorm.DB 19 | var DB *Database 20 | 21 | func ConnectDatabase() { 22 | logger := slog.New(slog.NewJSONHandler(os.Stdout, nil)).With("gorm", true) 23 | gormLogger := slogGorm.New( 24 | slogGorm.WithHandler(logger.Handler()), 25 | slogGorm.WithTraceAll(), 26 | slogGorm.SetLogLevel(slogGorm.DefaultLogType, slog.LevelInfo), 27 | slogGorm.WithContextValue("gorm", "true"), 28 | ) 29 | 30 | database, err := gorm.Open(postgres.Open(os.Getenv("DIGGER_DATABASE_URL")), &gorm.Config{ 31 | Logger: gormLogger, 32 | }) 33 | 34 | if err != nil { 35 | panic("Failed to connect to database!") 36 | } 37 | 38 | query := models_generated.Use(database) 39 | DB = &Database{ 40 | Query: query, 41 | GormDB: database, 42 | } 43 | 44 | } 45 | -------------------------------------------------------------------------------- /ee/drift/middleware/middleware.go: -------------------------------------------------------------------------------- 1 | package middleware 2 | 3 | const ORGANISATION_ID_KEY = "organisation_ID" 4 | const ACCESS_LEVEL_KEY = "access_level" 5 | -------------------------------------------------------------------------------- /ee/drift/middleware/webhooks.go: -------------------------------------------------------------------------------- 1 | package middleware 2 | 3 | import ( 4 | "github.com/gin-gonic/gin" 5 | "net/http" 6 | "os" 7 | "strings" 8 | ) 9 | 10 | func WebhookAuth() gin.HandlerFunc { 11 | return func(c *gin.Context) { 12 | webhookSecret := os.Getenv("DIGGER_WEBHOOK_SECRET") 13 | authHeader := c.Request.Header.Get("Authorization") 14 | if authHeader == "" { 15 | c.String(http.StatusForbidden, "No Authorization header provided") 16 | c.Abort() 17 | return 18 | } 19 | token := strings.TrimPrefix(authHeader, "Bearer ") 20 | if token != webhookSecret { 21 | c.String(http.StatusForbidden, "invalid token") 22 | c.Abort() 23 | return 24 | } 25 | // webhook auth optionally accepts organisation ID as a value 26 | orgIdHeader := c.GetHeader("X-Digger-Org-ID") 27 | if orgIdHeader != "" { 28 | c.Set(ORGANISATION_ID_KEY, orgIdHeader) 29 | } 30 | 31 | c.Next() 32 | return 33 | } 34 | } 35 | -------------------------------------------------------------------------------- /ee/drift/model/digger_ci_job_tokens.gen.go: -------------------------------------------------------------------------------- 1 | // Code generated by gorm.io/gen. DO NOT EDIT. 2 | // Code generated by gorm.io/gen. DO NOT EDIT. 3 | // Code generated by gorm.io/gen. DO NOT EDIT. 4 | 5 | package model 6 | 7 | import ( 8 | "time" 9 | 10 | "gorm.io/gorm" 11 | ) 12 | 13 | const TableNameDiggerCiJobToken = "digger_ci_job_tokens" 14 | 15 | // DiggerCiJobToken mapped from table 16 | type DiggerCiJobToken struct { 17 | ID string `gorm:"column:id;primaryKey" json:"id"` 18 | CreatedAt time.Time `gorm:"column:created_at" json:"created_at"` 19 | UpdatedAt time.Time `gorm:"column:updated_at" json:"updated_at"` 20 | DeletedAt gorm.DeletedAt `gorm:"column:deleted_at" json:"deleted_at"` 21 | Value string `gorm:"column:value" json:"value"` 22 | Expiry time.Time `gorm:"column:expiry" json:"expiry"` 23 | OrganisationID string `gorm:"column:organisation_id" json:"organisation_id"` 24 | Type string `gorm:"column:type" json:"type"` 25 | } 26 | 27 | // TableName DiggerCiJobToken's table name 28 | func (*DiggerCiJobToken) TableName() string { 29 | return TableNameDiggerCiJobToken 30 | } 31 | -------------------------------------------------------------------------------- /ee/drift/model/org_settings.gen.go: -------------------------------------------------------------------------------- 1 | // Code generated by gorm.io/gen. DO NOT EDIT. 2 | // Code generated by gorm.io/gen. DO NOT EDIT. 3 | // Code generated by gorm.io/gen. DO NOT EDIT. 4 | 5 | package model 6 | 7 | import ( 8 | "time" 9 | ) 10 | 11 | const TableNameOrgSetting = "org_settings" 12 | 13 | // OrgSetting mapped from table 14 | type OrgSetting struct { 15 | CreatedAt time.Time `gorm:"column:created_at;not null;default:now()" json:"created_at"` 16 | ScheduleType string `gorm:"column:schedule_type" json:"schedule_type"` 17 | Schedule string `gorm:"column:schedule" json:"schedule"` 18 | SlackNotificationURL string `gorm:"column:slack_notification_url" json:"slack_notification_url"` 19 | OrgID string `gorm:"column:org_id" json:"org_id"` 20 | ExternalOrgID string `gorm:"column:external_org_id" json:"external_org_id"` 21 | ID string `gorm:"column:id;primaryKey;default:gen_random_uuid()" json:"id"` 22 | } 23 | 24 | // TableName OrgSetting's table name 25 | func (*OrgSetting) TableName() string { 26 | return TableNameOrgSetting 27 | } 28 | -------------------------------------------------------------------------------- /ee/drift/model/organisations.gen.go: -------------------------------------------------------------------------------- 1 | // Code generated by gorm.io/gen. DO NOT EDIT. 2 | // Code generated by gorm.io/gen. DO NOT EDIT. 3 | // Code generated by gorm.io/gen. DO NOT EDIT. 4 | 5 | package model 6 | 7 | import ( 8 | "time" 9 | 10 | "gorm.io/gorm" 11 | ) 12 | 13 | const TableNameOrganisation = "organisations" 14 | 15 | // Organisation mapped from table 16 | type Organisation struct { 17 | ID string `gorm:"column:id;primaryKey;default:uuid_generate_v4()" json:"id"` 18 | CreatedAt time.Time `gorm:"column:created_at" json:"created_at"` 19 | UpdatedAt time.Time `gorm:"column:updated_at" json:"updated_at"` 20 | DeletedAt gorm.DeletedAt `gorm:"column:deleted_at" json:"deleted_at"` 21 | Name string `gorm:"column:name" json:"name"` 22 | ExternalSource string `gorm:"column:external_source" json:"external_source"` 23 | ExternalID string `gorm:"column:external_id" json:"external_id"` 24 | } 25 | 26 | // TableName Organisation's table name 27 | func (*Organisation) TableName() string { 28 | return TableNameOrganisation 29 | } 30 | -------------------------------------------------------------------------------- /ee/drift/model/user_settings.gen.go: -------------------------------------------------------------------------------- 1 | // Code generated by gorm.io/gen. DO NOT EDIT. 2 | // Code generated by gorm.io/gen. DO NOT EDIT. 3 | // Code generated by gorm.io/gen. DO NOT EDIT. 4 | 5 | package model 6 | 7 | import ( 8 | "time" 9 | ) 10 | 11 | const TableNameUserSetting = "user_settings" 12 | 13 | // UserSetting mapped from table 14 | type UserSetting struct { 15 | ID int64 `gorm:"column:id;primaryKey;autoIncrement:true" json:"id"` 16 | CreatedAt time.Time `gorm:"column:created_at;not null;default:now()" json:"created_at"` 17 | ScheduleType string `gorm:"column:schedule_type" json:"schedule_type"` 18 | Schedule string `gorm:"column:schedule" json:"schedule"` 19 | SlackNotificationURL string `gorm:"column:slack_notification_url" json:"slack_notification_url"` 20 | UserID string `gorm:"column:user_id;not null" json:"user_id"` 21 | } 22 | 23 | // TableName UserSetting's table name 24 | func (*UserSetting) TableName() string { 25 | return TableNameUserSetting 26 | } 27 | -------------------------------------------------------------------------------- /ee/drift/scripts/cron/notifications.sql: -------------------------------------------------------------------------------- 1 | select 2 | cron.schedule( 3 | 'invoke-notification-schedule-every-hour-15', 4 | '15 * * * *', 5 | $$ 6 | select 7 | net.http_post( 8 | url:='https://{DIGGER_HOSTNAME}/_internal/process_notifications', 9 | headers:=jsonb_build_object('Content-Type','application/json', 'Authorization', 'Bearer ' || {DIGGER_WEBHOOK_SECRET}), 10 | body:=jsonb_build_object('time', now() ), 11 | timeout_milliseconds:=5000 12 | ) as request_id; 13 | $$ 14 | ); 15 | -------------------------------------------------------------------------------- /ee/drift/scripts/cron/scheduler.sql: -------------------------------------------------------------------------------- 1 | select 2 | cron.schedule( 3 | 'invoke-drift-schedule-every-hour-00', 4 | '0 * * * *', 5 | $$ 6 | select 7 | net.http_post( 8 | url:='https://{DIGGER_HOSTNAME}/_internal/process_drift', 9 | headers:=jsonb_build_object('Content-Type','application/json', 'Authorization', 'Bearer ' || {DIGGER_WEBHOOK_SECRET}), 10 | body:=jsonb_build_object('time', now() ), 11 | timeout_milliseconds:=5000 12 | ) as request_id; 13 | $$ 14 | ); 15 | -------------------------------------------------------------------------------- /ee/drift/scripts/entrypoint.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -e 3 | 4 | ./driftapp 5 | -------------------------------------------------------------------------------- /fly-drift-igor-dev.toml: -------------------------------------------------------------------------------- 1 | # fly.toml app configuration file generated for driftapp-backend-igor-dev-late-hill-6194 on 2024-09-25T17:22:20+01:00 2 | # 3 | # See https://fly.io/docs/reference/configuration/ for information about how to use this file. 4 | # 5 | 6 | app = 'driftapp-backend-igor-dev-late-hill-6194' 7 | primary_region = 'lhr' 8 | kill_signal = 'SIGINT' 9 | kill_timeout = '5s' 10 | 11 | [build] 12 | dockerfile = 'Dockerfile_drift' 13 | 14 | [env] 15 | DIGGER_HOSTNAME = 'https://driftapp-backend.digger.dev' 16 | 17 | [[services]] 18 | protocol = 'tcp' 19 | internal_port = 3000 20 | processes = ['app'] 21 | 22 | [[services.ports]] 23 | port = 80 24 | handlers = ['http'] 25 | force_https = true 26 | 27 | [[services.ports]] 28 | port = 443 29 | handlers = ['tls', 'http'] 30 | 31 | [services.concurrency] 32 | type = 'connections' 33 | hard_limit = 25 34 | soft_limit = 20 35 | 36 | [[vm]] 37 | memory = '1gb' 38 | cpu_kind = 'shared' 39 | cpus = 1 40 | -------------------------------------------------------------------------------- /fly-drift.toml: -------------------------------------------------------------------------------- 1 | # fly.toml app configuration file generated for driftapp-backend on 2024-09-23T18:02:57+01:00 2 | # 3 | # See https://fly.io/docs/reference/configuration/ for information about how to use this file. 4 | # 5 | 6 | app = 'driftapp-backend' 7 | primary_region = 'lhr' 8 | kill_signal = 'SIGINT' 9 | kill_timeout = '5s' 10 | 11 | [env] 12 | DIGGER_HOSTNAME = 'https://driftapp-backend.digger.dev' 13 | 14 | 15 | [build] 16 | dockerfile = 'Dockerfile_drift' 17 | 18 | [[services]] 19 | protocol = 'tcp' 20 | internal_port = 3000 21 | processes = ['app'] 22 | 23 | [[services.ports]] 24 | port = 80 25 | handlers = ['http'] 26 | force_https = true 27 | 28 | [[services.ports]] 29 | port = 443 30 | handlers = ['tls', 'http'] 31 | 32 | [services.concurrency] 33 | type = 'connections' 34 | hard_limit = 25 35 | soft_limit = 20 36 | 37 | [[vm]] 38 | memory = '1gb' 39 | cpu_kind = 'shared' 40 | cpus = 1 41 | -------------------------------------------------------------------------------- /fly-pro.toml: -------------------------------------------------------------------------------- 1 | # fly.toml app configuration file generated for digger-pro on 2025-02-18T09:50:24+03:00 2 | # 3 | # See https://fly.io/docs/reference/configuration/ for information about how to use this file. 4 | # 5 | 6 | app = 'digger-pro' 7 | primary_region = 'lhr' 8 | 9 | [env] 10 | HOSTNAME = 'https://ui-backend.digger.dev' 11 | DIGGER_HOSTNAME = 'https://ui-backend.digger.dev' 12 | JWT_AUTH = 'true' 13 | 14 | [build] 15 | dockerfile = 'Dockerfile_backend_ee' 16 | 17 | [[services]] 18 | protocol = 'tcp' 19 | internal_port = 3000 20 | processes = ['app'] 21 | 22 | [[services.ports]] 23 | port = 80 24 | handlers = ['http'] 25 | force_https = true 26 | 27 | [[services.ports]] 28 | port = 443 29 | handlers = ['tls', 'http'] 30 | 31 | [services.concurrency] 32 | type = 'connections' 33 | hard_limit = 25 34 | soft_limit = 20 35 | 36 | [[vm]] 37 | memory = '1gb' 38 | cpu_kind = 'shared' 39 | cpus = 1 40 | -------------------------------------------------------------------------------- /fly-staging.toml: -------------------------------------------------------------------------------- 1 | # fly.toml app configuration file generated for next-backend on 2024-07-19T08:47:44+01:00 2 | # 3 | # See https://fly.io/docs/reference/configuration/ for information about how to use this file. 4 | # 5 | 6 | app = 'next-backend-staging' 7 | primary_region = 'lhr' 8 | kill_signal = 'SIGINT' 9 | kill_timeout = '5s' 10 | 11 | [env] 12 | DIGGER_HOSTNAME = 'https://next-backend-staging.digger.dev' 13 | 14 | [build] 15 | dockerfile = 'Dockerfile_next' 16 | 17 | [[services]] 18 | protocol = 'tcp' 19 | internal_port = 3000 20 | processes = ['app'] 21 | 22 | [[services.ports]] 23 | port = 80 24 | handlers = ['http'] 25 | force_https = true 26 | 27 | [[services.ports]] 28 | port = 443 29 | handlers = ['tls', 'http'] 30 | 31 | [services.concurrency] 32 | type = 'connections' 33 | hard_limit = 25 34 | soft_limit = 20 35 | 36 | [[vm]] 37 | memory = '1gb' 38 | cpu_kind = 'shared' 39 | cpus = 1 40 | -------------------------------------------------------------------------------- /fly.toml: -------------------------------------------------------------------------------- 1 | # fly.toml app configuration file generated for next-backend on 2024-07-19T08:47:44+01:00 2 | # 3 | # See https://fly.io/docs/reference/configuration/ for information about how to use this file. 4 | # 5 | 6 | app = 'next-backend' 7 | primary_region = 'lhr' 8 | kill_signal = 'SIGINT' 9 | kill_timeout = '5s' 10 | 11 | [env] 12 | DIGGER_HOSTNAME = 'https://next-backend.digger.dev' 13 | 14 | [build] 15 | dockerfile = 'Dockerfile_next' 16 | 17 | [[services]] 18 | protocol = 'tcp' 19 | internal_port = 3000 20 | processes = ['app'] 21 | 22 | [[services.ports]] 23 | port = 80 24 | handlers = ['http'] 25 | force_https = true 26 | 27 | [[services.ports]] 28 | port = 443 29 | handlers = ['tls', 'http'] 30 | 31 | [services.concurrency] 32 | type = 'connections' 33 | hard_limit = 25 34 | soft_limit = 20 35 | 36 | [[vm]] 37 | memory = '1gb' 38 | cpu_kind = 'shared' 39 | cpus = 1 40 | -------------------------------------------------------------------------------- /go.mod: -------------------------------------------------------------------------------- 1 | module github.com/diggerhq/digger 2 | 3 | go 1.24.0 4 | -------------------------------------------------------------------------------- /go.work: -------------------------------------------------------------------------------- 1 | go 1.24.0 2 | 3 | use ( 4 | ./backend 5 | ./cli 6 | ./cli_e2e 7 | ./dgctl 8 | ./next 9 | ./next/dbgen 10 | 11 | ./ee/backend 12 | ./ee/cli 13 | ./ee/drift 14 | 15 | ./libs 16 | 17 | 18 | ) 19 | -------------------------------------------------------------------------------- /libs/backendapi/backend.go: -------------------------------------------------------------------------------- 1 | package backendapi 2 | 3 | import ( 4 | "github.com/diggerhq/digger/libs/iac_utils" 5 | "github.com/diggerhq/digger/libs/scheduler" 6 | "time" 7 | ) 8 | 9 | type Api interface { 10 | ReportProject(repo string, projectName string, configuration string) error 11 | ReportProjectRun(repo string, projectName string, startedAt time.Time, endedAt time.Time, status string, command string, output string) error 12 | ReportProjectJobStatus(repo string, projectName string, jobId string, status string, timestamp time.Time, summary *iac_utils.IacSummary, planJson string, PrCommentUrl string, PrCommentId string, terraformOutput string, iacUtils iac_utils.IacUtils) (*scheduler.SerializedBatch, error) 13 | UploadJobArtefact(zipLocation string) (*int, *string, error) 14 | DownloadJobArtefact(downloadTo string) (*string, error) 15 | } 16 | -------------------------------------------------------------------------------- /libs/backendapi/mocks.go: -------------------------------------------------------------------------------- 1 | package backendapi 2 | 3 | import ( 4 | "github.com/diggerhq/digger/libs/iac_utils" 5 | "github.com/diggerhq/digger/libs/scheduler" 6 | "time" 7 | ) 8 | 9 | type MockBackendApi struct { 10 | } 11 | 12 | func (t MockBackendApi) ReportProject(namespace string, projectName string, configuration string) error { 13 | return nil 14 | } 15 | 16 | func (t MockBackendApi) ReportProjectRun(repo string, projectName string, startedAt time.Time, endedAt time.Time, status string, command string, output string) error { 17 | return nil 18 | } 19 | 20 | func (t MockBackendApi) ReportProjectJobStatus(repo string, projectName string, jobId string, status string, timestamp time.Time, summary *iac_utils.IacSummary, planJson string, PrCommentUrl string, PrCommentId string, terraformOutput string, iacUtils iac_utils.IacUtils) (*scheduler.SerializedBatch, error) { 21 | return nil, nil 22 | } 23 | 24 | func (t MockBackendApi) UploadJobArtefact(zipLocation string) (*int, *string, error) { 25 | return nil, nil, nil 26 | } 27 | 28 | func (t MockBackendApi) DownloadJobArtefact(downloadTo string) (*string, error) { 29 | return nil, nil 30 | } 31 | -------------------------------------------------------------------------------- /libs/ci/github/comment.go: -------------------------------------------------------------------------------- 1 | package github 2 | -------------------------------------------------------------------------------- /libs/ci/github/errors.go: -------------------------------------------------------------------------------- 1 | package github 2 | 3 | import ( 4 | "errors" 5 | ) 6 | 7 | var UnhandledMergeGroupEventError = errors.New("ignoring event: merge_group") 8 | -------------------------------------------------------------------------------- /libs/ci/github/models/models.go: -------------------------------------------------------------------------------- 1 | package models 2 | 3 | type EventPackage struct { 4 | Event interface{} 5 | EventName string 6 | Actor string 7 | Repository string 8 | } 9 | -------------------------------------------------------------------------------- /libs/ci/gitlab/gitlab_test.go: -------------------------------------------------------------------------------- 1 | package gitlab 2 | 3 | import ( 4 | "github.com/stretchr/testify/assert" 5 | "testing" 6 | ) 7 | 8 | func TestParseGitLabContext(t *testing.T) { 9 | t.Setenv("CI_PIPELINE_SOURCE", "push") 10 | t.Setenv("CI_PIPELINE_ID", "1") 11 | t.Setenv("CI_PIPELINE_IID", "2") 12 | 13 | context, err := ParseGitLabContext() 14 | assert.NoError(t, err) 15 | assert.NotNil(t, context) 16 | assert.Equal(t, PipelineSourceType("push"), context.PipelineSource) 17 | assert.Equal(t, 1, *context.PipelineId) 18 | assert.Equal(t, 2, *context.PipelineIId) 19 | assert.Nil(t, context.MergeRequestId) 20 | assert.Nil(t, context.MergeRequestIId) 21 | } 22 | 23 | func TestOpenMergeRequestEvent(t *testing.T) { 24 | t.Setenv("CI_PIPELINE_SOURCE", "push") 25 | t.Setenv("CI_PIPELINE_ID", "1") 26 | t.Setenv("CI_PIPELINE_IID", "2") 27 | 28 | context, err := ParseGitLabContext() 29 | assert.NoError(t, err) 30 | assert.NotNil(t, context) 31 | assert.Equal(t, PipelineSourceType("push"), context.PipelineSource) 32 | assert.Equal(t, 1, *context.PipelineId) 33 | assert.Equal(t, 2, *context.PipelineIId) 34 | assert.Nil(t, context.MergeRequestId) 35 | assert.Nil(t, context.MergeRequestIId) 36 | } 37 | -------------------------------------------------------------------------------- /libs/ci/utils.go: -------------------------------------------------------------------------------- 1 | package ci 2 | 3 | import ( 4 | "errors" 5 | "regexp" 6 | ) 7 | 8 | // TODO move func to lib-orchestrator library after gitlab and azure moves there 9 | func ParseProjectName(comment string) string { 10 | re := regexp.MustCompile(`-p ([0-9a-zA-Z\-_]+)`) 11 | match := re.FindStringSubmatch(comment) 12 | if len(match) > 1 { 13 | return match[1] 14 | } 15 | return "" 16 | } 17 | 18 | // TODO move func to lib-orchestrator library after gitlab and azure moves there 19 | func ParseWorkspace(comment string) (string, error) { 20 | re := regexp.MustCompile(`-w(?:\s+(\S+)|$)`) 21 | matches := re.FindAllStringSubmatch(comment, -1) 22 | 23 | if len(matches) == 0 { 24 | return "", nil 25 | } 26 | 27 | if len(matches) > 1 { 28 | return "", errors.New("more than one -w flag found") 29 | } 30 | 31 | if len(matches[0]) < 2 || matches[0][1] == "" { 32 | return "", errors.New("no value found after -w flag") 33 | } 34 | 35 | return matches[0][1], nil 36 | } 37 | -------------------------------------------------------------------------------- /libs/comment_utils/reporting/core.go: -------------------------------------------------------------------------------- 1 | package reporting 2 | 3 | type Reporter interface { 4 | Report(report string, reportFormatting func(report string) string) (commentId string, commentUrl string, error error) 5 | Flush() (string, string, error) 6 | Suppress() error 7 | SupportsMarkdown() bool 8 | } 9 | -------------------------------------------------------------------------------- /libs/comment_utils/reporting/mock.go: -------------------------------------------------------------------------------- 1 | package reporting 2 | 3 | type MockReporter struct { 4 | commands []string 5 | } 6 | 7 | func (mockReporter *MockReporter) Report(report string, reportFormatting func(report string) string) (string, string, error) { 8 | mockReporter.commands = append(mockReporter.commands, "Report") 9 | return "", "", nil 10 | } 11 | 12 | func (mockReporter *MockReporter) Flush() (string, string, error) { 13 | return "", "", nil 14 | } 15 | 16 | func (mockReporter *MockReporter) Suppress() error { 17 | return nil 18 | } 19 | 20 | func (mockReporter *MockReporter) SupportsMarkdown() bool { 21 | mockReporter.commands = append(mockReporter.commands, "SupportsMarkdown") 22 | return false 23 | } 24 | -------------------------------------------------------------------------------- /libs/comment_utils/reporting/noop.go: -------------------------------------------------------------------------------- 1 | package reporting 2 | 3 | type NoopReporter struct{} 4 | 5 | func (reporter NoopReporter) Report(report string, reportFormatting func(report string) string) (string, string, error) { 6 | return "", "", nil 7 | } 8 | 9 | func (reporter NoopReporter) Flush() (string, string, error) { 10 | return "", "", nil 11 | } 12 | 13 | func (reporter NoopReporter) SupportsMarkdown() bool { 14 | return false 15 | } 16 | 17 | func (reporter NoopReporter) Suppress() error { 18 | return nil 19 | } 20 | -------------------------------------------------------------------------------- /libs/comment_utils/summary/provider.go: -------------------------------------------------------------------------------- 1 | package comment_updater 2 | 3 | import ( 4 | "fmt" 5 | "github.com/diggerhq/digger/libs/digger_config" 6 | ) 7 | 8 | type CommentUpdaterProvider interface { 9 | Get(renderMode string) (CommentUpdater, error) 10 | } 11 | 12 | type CommentUpdaterProviderBasic struct{} 13 | 14 | func (c CommentUpdaterProviderBasic) Get(renderMode string) (CommentUpdater, error) { 15 | if renderMode == digger_config.CommentRenderModeBasic { 16 | return BasicCommentUpdater{}, nil 17 | } else if renderMode == digger_config.CommentRenderModeGroupByModule { 18 | commentUpdater := BasicCommentUpdater{} 19 | return commentUpdater, nil 20 | } else if renderMode == "noop" { 21 | return NoopCommentUpdater{}, nil 22 | } else { 23 | return nil, fmt.Errorf("Unknown comment render mode found: %v", renderMode) 24 | } 25 | } 26 | -------------------------------------------------------------------------------- /libs/comment_utils/utils/comments.go: -------------------------------------------------------------------------------- 1 | package utils 2 | 3 | import "fmt" 4 | 5 | func GetTerraformOutputAsCollapsibleComment(summary string, open bool) func(string) string { 6 | var openTag string 7 | if open { 8 | openTag = "open=\"true\"" 9 | } else { 10 | openTag = "" 11 | } 12 | 13 | return func(comment string) string { 14 | return fmt.Sprintf(`
`+summary+` 15 | 16 | `+"```terraform"+` 17 | `+comment+` 18 | `+"```"+` 19 |
`, openTag) 20 | } 21 | } 22 | 23 | func GetTerraformOutputAsComment(summary string) func(string) string { 24 | return func(comment string) string { 25 | return summary + "\n```terraform\n" + comment + "\n```" 26 | } 27 | } 28 | 29 | func AsCollapsibleComment(summary string, open bool) func(string) string { 30 | var openTag string 31 | if open { 32 | openTag = "open=\"true\"" 33 | } else { 34 | openTag = "" 35 | } 36 | return func(comment string) string { 37 | return fmt.Sprintf(`
`+summary+` 38 | `+comment+` 39 |
`, openTag) 40 | } 41 | } 42 | 43 | func AsComment(summary string) func(string) string { 44 | return func(comment string) string { 45 | return summary + "\n" + comment 46 | } 47 | } 48 | -------------------------------------------------------------------------------- /libs/digger_config/.gitignore: -------------------------------------------------------------------------------- 1 | .idea 2 | -------------------------------------------------------------------------------- /libs/digger_config/terragrunt/atlantis/LICENSE: -------------------------------------------------------------------------------- 1 | The MIT License (MIT) 2 | 3 | Copyright © 2020 transcend-io 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in 13 | all copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN 21 | THE SOFTWARE. -------------------------------------------------------------------------------- /libs/digger_config/terragrunt/atlantis/readme.md: -------------------------------------------------------------------------------- 1 | based on https://github.com/transcend-io/terragrunt-atlantis-config 2 | -------------------------------------------------------------------------------- /libs/digger_config/validators.go: -------------------------------------------------------------------------------- 1 | package digger_config 2 | 3 | import "fmt" 4 | 5 | func ValidateAutomergeStrategy(strategy string) error { 6 | switch strategy { 7 | case string(AutomergeStrategySquash), string(AutomergeStrategyMerge), string(AutomergeStrategyRebase): 8 | return nil 9 | default: 10 | return fmt.Errorf("invalid merge strategy: %v, valid values are: %v, %v, %v", strategy, AutomergeStrategySquash, AutomergeStrategyMerge, AutomergeStrategyRebase) 11 | } 12 | } 13 | -------------------------------------------------------------------------------- /libs/license/license_test.go: -------------------------------------------------------------------------------- 1 | // use this to ignore tests from external contributions 2 | //go:build !external 3 | 4 | package license 5 | 6 | import ( 7 | "github.com/stretchr/testify/assert" 8 | "testing" 9 | ) 10 | 11 | func TestLicenseKeyChecker(t *testing.T) { 12 | err := LicenseKeyChecker{}.Check() 13 | assert.NoError(t, err) 14 | } 15 | -------------------------------------------------------------------------------- /libs/locking/aws/envprovider/envprovider_test.go: -------------------------------------------------------------------------------- 1 | package envprovider 2 | 3 | import ( 4 | "context" 5 | "os" 6 | "testing" 7 | 8 | "github.com/aws/aws-sdk-go-v2/aws" 9 | "github.com/stretchr/testify/assert" 10 | ) 11 | 12 | func TestRetrieve(t *testing.T) { 13 | t.Parallel() 14 | tests := map[string]struct { 15 | key string 16 | secret string 17 | }{ 18 | "digger prefix": {key: "DIGGER_AWS_ACCESS_KEY_ID", secret: "DIGGER_AWS_SECRET_ACCESS_KEY"}, 19 | "no prefix": {key: "AWS_ACCESS_KEY_ID", secret: "AWS_SECRET_ACCESS_KEY"}, 20 | "other": {key: "AWS_ACCESS_KEY", secret: "AWS_SECRET_KEY"}, 21 | } 22 | 23 | for name, tc := range tests { 24 | t.Run(name, func(t *testing.T) { 25 | os.Setenv(tc.key, "key") 26 | os.Setenv(tc.secret, "secret") 27 | e := EnvProvider{} 28 | act, _ := e.Retrieve(context.TODO()) 29 | exp := aws.Credentials{AccessKeyID: "key", SecretAccessKey: "secret", SessionToken: ""} 30 | assert.Equal(t, exp, act) 31 | }) 32 | } 33 | } 34 | -------------------------------------------------------------------------------- /libs/locking/core.go: -------------------------------------------------------------------------------- 1 | package locking 2 | 3 | type Lock interface { 4 | Lock(transactionId int, resource string) (bool, error) 5 | Unlock(resource string) (bool, error) 6 | GetLock(resource string) (*int, error) 7 | } 8 | 9 | type ProjectLock interface { 10 | Lock() (bool, error) 11 | Unlock() (bool, error) 12 | ForceUnlock() error 13 | LockId() string 14 | } 15 | -------------------------------------------------------------------------------- /libs/locking/mock.go: -------------------------------------------------------------------------------- 1 | package locking 2 | 3 | type MockLock struct { 4 | MapLock map[string]int 5 | } 6 | 7 | func (lock *MockLock) Lock(transactionId int, resource string) (bool, error) { 8 | if lock.MapLock == nil { 9 | lock.MapLock = make(map[string]int) 10 | } 11 | lock.MapLock[resource] = transactionId 12 | return true, nil 13 | } 14 | 15 | func (lock *MockLock) Unlock(resource string) (bool, error) { 16 | delete(lock.MapLock, resource) 17 | return true, nil 18 | } 19 | 20 | func (lock *MockLock) GetLock(resource string) (*int, error) { 21 | result, ok := lock.MapLock[resource] 22 | if ok { 23 | return &result, nil 24 | } 25 | return nil, nil 26 | } 27 | -------------------------------------------------------------------------------- /libs/locking/utils.go: -------------------------------------------------------------------------------- 1 | package locking 2 | 3 | import ( 4 | "fmt" 5 | "github.com/diggerhq/digger/libs/scheduler" 6 | ) 7 | 8 | func PerformLockingActionFromCommand(prLock PullRequestLock, command scheduler.DiggerCommand) error { 9 | var err error 10 | switch command { 11 | case scheduler.DiggerCommandUnlock: 12 | _, err = prLock.Unlock() 13 | if err != nil { 14 | err = fmt.Errorf("failed to unlock project: %v", err) 15 | } 16 | case scheduler.DiggerCommandPlan: 17 | _, err = prLock.Lock() 18 | if err != nil { 19 | err = fmt.Errorf("failed to lock project: %v", err) 20 | } 21 | case scheduler.DiggerCommandApply: 22 | _, err = prLock.Lock() 23 | if err != nil { 24 | err = fmt.Errorf("failed to lock project: %v", err) 25 | } 26 | case scheduler.DiggerCommandLock: 27 | _, err = prLock.Lock() 28 | if err != nil { 29 | err = fmt.Errorf("failed to lock project: %v", err) 30 | } 31 | } 32 | return err 33 | } 34 | -------------------------------------------------------------------------------- /libs/orchestrator/.gitignore: -------------------------------------------------------------------------------- 1 | .idea/ 2 | -------------------------------------------------------------------------------- /libs/orchestrator/locking.go: -------------------------------------------------------------------------------- 1 | package orchestrator 2 | -------------------------------------------------------------------------------- /libs/policy/mocks.go: -------------------------------------------------------------------------------- 1 | package policy 2 | 3 | import "github.com/diggerhq/digger/libs/ci" 4 | 5 | type MockPolicyChecker struct { 6 | } 7 | 8 | func (t MockPolicyChecker) CheckAccessPolicy(ciService ci.OrgService, prService *ci.PullRequestService, SCMOrganisation string, SCMrepository string, projectName string, projectDir string, command string, prNumber *int, requestedBy string, planPolicyViolations []string) (bool, error) { 9 | return false, nil 10 | } 11 | 12 | func (t MockPolicyChecker) CheckPlanPolicy(SCMrepository string, SCMOrganisation string, projectname string, projectDir string, planOutput string) (bool, []string, error) { 13 | return false, nil, nil 14 | } 15 | 16 | func (t MockPolicyChecker) CheckDriftPolicy(SCMOrganisation string, SCMrepository string, projectname string) (bool, error) { 17 | return true, nil 18 | } 19 | -------------------------------------------------------------------------------- /libs/policy/providers.go: -------------------------------------------------------------------------------- 1 | package policy 2 | 3 | import ( 4 | "log/slog" 5 | "net/http" 6 | "os" 7 | ) 8 | 9 | type PolicyCheckerProviderBasic struct{} 10 | 11 | func (p PolicyCheckerProviderBasic) Get(hostname string, organisationName string, authToken string) (Checker, error) { 12 | var policyChecker Checker 13 | if os.Getenv("NO_BACKEND") == "true" { 14 | slog.Warn("Running in 'backendless' mode. No policies will be supported.") 15 | policyChecker = NoOpPolicyChecker{} 16 | } else { 17 | slog.Info("Initializing policy checker", 18 | "hostname", hostname, 19 | "organisation", organisationName) 20 | 21 | policyChecker = DiggerPolicyChecker{ 22 | PolicyProvider: &DiggerHttpPolicyProvider{ 23 | DiggerHost: hostname, 24 | DiggerOrganisation: organisationName, 25 | AuthToken: authToken, 26 | HttpClient: http.DefaultClient, 27 | }} 28 | } 29 | return policyChecker, nil 30 | } 31 | -------------------------------------------------------------------------------- /libs/scheduler/serializers.go: -------------------------------------------------------------------------------- 1 | package scheduler 2 | 3 | import ( 4 | "encoding/json" 5 | "log/slog" 6 | ) 7 | 8 | func GetJobSpecs(jobs []SerializedJob) ([]JobJson, error) { 9 | jobSpecs := make([]JobJson, 0) 10 | for _, job := range jobs { 11 | var jobSpec JobJson 12 | err := json.Unmarshal(job.JobString, &jobSpec) 13 | if err != nil { 14 | slog.Error("Failed to unmarshal serialized job", 15 | "projectName", job.ProjectName, 16 | "jobId", job.DiggerJobId, 17 | "error", err) 18 | return nil, err 19 | } 20 | jobSpecs = append(jobSpecs, jobSpec) 21 | } 22 | slog.Debug("Successfully unmarshaled job specs", "count", len(jobSpecs)) 23 | return jobSpecs, nil 24 | } 25 | 26 | func JobsToProjectMap(jobs []SerializedJob) (map[string]SerializedJob, error) { 27 | res := make(map[string]SerializedJob) 28 | for _, job := range jobs { 29 | res[job.ProjectName] = job 30 | slog.Debug("Added job to project map", 31 | "projectName", job.ProjectName, 32 | "jobId", job.DiggerJobId) 33 | } 34 | slog.Debug("Created project map from jobs", "projectCount", len(res)) 35 | return res, nil 36 | } 37 | -------------------------------------------------------------------------------- /libs/spec/payloads.go: -------------------------------------------------------------------------------- 1 | package spec 2 | 3 | type GetSpecPayload struct { 4 | Command string `json:"command"` 5 | RepoFullName string `json:"repo_full_name"` 6 | Actor string `json:"actor"` 7 | //DefaultBranch string `json:"default_branch"` 8 | //PrBranch string `json:"pr_branch"` 9 | DiggerConfig string `json:"digger_config"` 10 | Project string `json:"project"` 11 | } 12 | 13 | func (p GetSpecPayload) ToMapStruct() map[string]interface{} { 14 | return map[string]interface{}{ 15 | "command": p.Command, 16 | "repo_full_name": p.RepoFullName, 17 | "actor": p.Actor, 18 | //"default_branch": p.DefaultBranch, 19 | //"pr_branch": p.PrBranch, 20 | "digger_config": p.DiggerConfig, 21 | "project": p.Project, 22 | } 23 | } 24 | -------------------------------------------------------------------------------- /libs/spec/variables_provider.go: -------------------------------------------------------------------------------- 1 | package spec 2 | 3 | import ( 4 | "fmt" 5 | digger_crypto "github.com/diggerhq/digger/libs/crypto" 6 | "github.com/samber/lo" 7 | "os" 8 | ) 9 | 10 | type VariablesProvider struct{} 11 | 12 | func (p VariablesProvider) GetVariables(variables []VariableSpec) (map[string]string, error) { 13 | private_key := os.Getenv("DIGGER_PRIVATE_KEY") 14 | secrets := lo.Filter(variables, func(variable VariableSpec, i int) bool { 15 | return variable.IsSecret 16 | }) 17 | if len(secrets) > 0 && private_key == "" { 18 | return nil, fmt.Errorf("digger private key not supplied, unable to decrypt secrets") 19 | } 20 | 21 | res := make(map[string]string) 22 | 23 | for _, v := range variables { 24 | if v.IsSecret { 25 | value, err := digger_crypto.DecryptValueUsingPrivateKey(v.Value, private_key) 26 | if err != nil { 27 | return nil, fmt.Errorf("could not decrypt value using private key: %v", err) 28 | } 29 | res[v.Name] = string(value) 30 | } else if v.IsInterpolated { 31 | // if it is an interpolated value we get it form env variable of the variable 32 | res[v.Name] = os.Getenv(v.Value) 33 | } else { 34 | res[v.Name] = v.Value 35 | } 36 | } 37 | 38 | return res, nil 39 | } 40 | -------------------------------------------------------------------------------- /libs/storage/mocks.go: -------------------------------------------------------------------------------- 1 | package storage 2 | 3 | type MockPlanStorage struct { 4 | } 5 | 6 | func (t MockPlanStorage) StorePlanFile(fileContents []byte, artifactName string, fileName string) error { 7 | return nil 8 | } 9 | 10 | func (t MockPlanStorage) RetrievePlan(localPlanFilePath string, artifactName string, storedPlanFilePath string) (*string, error) { 11 | return nil, nil 12 | } 13 | 14 | func (t MockPlanStorage) DeleteStoredPlan(artifactName string, storedPlanFilePath string) error { 15 | return nil 16 | } 17 | 18 | func (t MockPlanStorage) PlanExists(artifactName string, storedPlanFilePath string) (bool, error) { 19 | return false, nil 20 | } 21 | -------------------------------------------------------------------------------- /libs/storage/storage.go: -------------------------------------------------------------------------------- 1 | package storage 2 | 3 | type PlanStorage interface { 4 | StorePlanFile(fileContents []byte, artifactName string, storedPlanFilePath string) error 5 | RetrievePlan(localPlanFilePath string, artifactName string, storedPlanFilePath string) (*string, error) 6 | DeleteStoredPlan(artifactName string, storedPlanFilePath string) error 7 | PlanExists(artifactName string, storedPlanFilePath string) (bool, error) 8 | } 9 | -------------------------------------------------------------------------------- /next/.gitignore: -------------------------------------------------------------------------------- 1 | .env 2 | -------------------------------------------------------------------------------- /next/ci_backends/ci_backends.go: -------------------------------------------------------------------------------- 1 | package ci_backends 2 | 3 | import ( 4 | "github.com/diggerhq/digger/backend/utils" 5 | "github.com/diggerhq/digger/libs/spec" 6 | ) 7 | 8 | type CiBackend interface { 9 | TriggerWorkflow(spec spec.Spec, runName string, vcsToken string) error 10 | GetWorkflowUrl(spec spec.Spec) (string, error) 11 | } 12 | 13 | type JenkinsCi struct{} 14 | 15 | type CiBackendOptions struct { 16 | GithubClientProvider utils.GithubClientProvider 17 | GithubInstallationId int64 18 | GitlabProjectId int 19 | GitlabmergeRequestEventName string 20 | GitlabCIPipelineID string 21 | GitlabCIPipelineIID int 22 | GitlabCIMergeRequestID int 23 | GitlabCIMergeRequestIID int 24 | GitlabCIProjectName string 25 | GitlabciprojectNamespace string 26 | GitlabciprojectId int 27 | GitlabciprojectNamespaceId int 28 | GitlabDiscussionId string 29 | RepoFullName string 30 | RepoOwner string 31 | RepoName string 32 | } 33 | -------------------------------------------------------------------------------- /next/ci_backends/github_actions.go: -------------------------------------------------------------------------------- 1 | package ci_backends 2 | 3 | import ( 4 | "context" 5 | "encoding/json" 6 | orchestrator_scheduler "github.com/diggerhq/digger/libs/scheduler" 7 | "github.com/diggerhq/digger/libs/spec" 8 | "github.com/google/go-github/v61/github" 9 | "log" 10 | ) 11 | 12 | type GithubActionCi struct { 13 | Client *github.Client 14 | } 15 | 16 | func (g GithubActionCi) TriggerWorkflow(spec spec.Spec, runName string, vcsToken string) error { 17 | log.Printf("TriggerGithubWorkflow: repoOwner: %v, repoName: %v, commentId: %v", spec.VCS.RepoOwner, spec.VCS.RepoName, spec.CommentId) 18 | client := g.Client 19 | specBytes, err := json.Marshal(spec) 20 | 21 | inputs := orchestrator_scheduler.WorkflowInput{ 22 | Spec: string(specBytes), 23 | RunName: runName, 24 | } 25 | 26 | _, err = client.Actions.CreateWorkflowDispatchEventByFileName(context.Background(), spec.VCS.RepoOwner, spec.VCS.RepoName, spec.VCS.WorkflowFile, github.CreateWorkflowDispatchEventRequest{ 27 | Ref: spec.Job.Branch, 28 | Inputs: inputs.ToMap(), 29 | }) 30 | 31 | return err 32 | } 33 | 34 | func (g GithubActionCi) GetWorkflowUrl(spec spec.Spec) (string, error) { 35 | return "", nil 36 | } 37 | -------------------------------------------------------------------------------- /next/ci_backends/jenkins.go: -------------------------------------------------------------------------------- 1 | package ci_backends 2 | -------------------------------------------------------------------------------- /next/ci_backends/provider.go: -------------------------------------------------------------------------------- 1 | package ci_backends 2 | 3 | import ( 4 | "fmt" 5 | "github.com/diggerhq/digger/next/utils" 6 | "log" 7 | ) 8 | 9 | type CiBackendProvider interface { 10 | GetCiBackend(options CiBackendOptions) (CiBackend, error) 11 | } 12 | 13 | type DefaultBackendProvider struct{} 14 | 15 | func (d DefaultBackendProvider) GetCiBackend(options CiBackendOptions) (CiBackend, error) { 16 | client, _, err := utils.GetGithubClient(options.GithubClientProvider, options.GithubInstallationId, options.RepoFullName) 17 | if err != nil { 18 | log.Printf("GetCiBackend: could not get github client: %v", err) 19 | return nil, fmt.Errorf("could not get github client: %v", err) 20 | } 21 | backend := &GithubActionCi{ 22 | Client: client, 23 | } 24 | return backend, nil 25 | } 26 | -------------------------------------------------------------------------------- /next/controllers/static.go: -------------------------------------------------------------------------------- 1 | package controllers 2 | 3 | import ( 4 | "github.com/gin-gonic/gin" 5 | "net/http" 6 | ) 7 | 8 | func Home(c *gin.Context) { 9 | c.HTML(http.StatusOK, "home.tmpl", gin.H{}) 10 | } 11 | -------------------------------------------------------------------------------- /next/dbgen/go.mod: -------------------------------------------------------------------------------- 1 | module dbgen 2 | 3 | go 1.24.0 4 | 5 | require ( 6 | github.com/Joker/jade v1.1.3 7 | gorm.io/gen v0.3.26 8 | ) 9 | 10 | require ( 11 | github.com/go-sql-driver/mysql v1.7.0 // indirect 12 | github.com/jinzhu/inflection v1.0.0 // indirect 13 | github.com/jinzhu/now v1.1.5 // indirect 14 | golang.org/x/mod v0.14.0 // indirect 15 | golang.org/x/tools v0.17.0 // indirect 16 | gorm.io/datatypes v1.1.1-0.20230130040222-c43177d3cf8c // indirect 17 | gorm.io/driver/mysql v1.4.4 // indirect 18 | gorm.io/gorm v1.25.9 // indirect 19 | gorm.io/hints v1.1.0 // indirect 20 | gorm.io/plugin/dbresolver v1.5.0 // indirect 21 | ) 22 | -------------------------------------------------------------------------------- /next/dbmodels/github.go: -------------------------------------------------------------------------------- 1 | package dbmodels 2 | 3 | type GithubAppInstallStatus int 4 | 5 | const ( 6 | GithubAppInstallActive GithubAppInstallStatus = 1 7 | GithubAppInstallDeleted GithubAppInstallStatus = 2 8 | ) 9 | 10 | type GithubAppInstallationLinkStatus int8 11 | 12 | const ( 13 | GithubAppInstallationLinkActive GithubAppInstallationLinkStatus = 1 14 | GithubAppInstallationLinkInactive GithubAppInstallationLinkStatus = 2 15 | ) 16 | -------------------------------------------------------------------------------- /next/dbmodels/orgs.go: -------------------------------------------------------------------------------- 1 | package dbmodels 2 | 3 | type ProjectStatus int 4 | 5 | const ( 6 | ProjectActive ProjectStatus = 1 7 | ProjectInactive ProjectStatus = 2 8 | ) 9 | 10 | const ( 11 | AccessPolicyType = "access" 12 | AdminPolicyType = "admin" 13 | CliJobAccessType = "cli_access" 14 | ) 15 | -------------------------------------------------------------------------------- /next/dbmodels/projects.go: -------------------------------------------------------------------------------- 1 | package dbmodels 2 | 3 | import ( 4 | "strings" 5 | 6 | "github.com/diggerhq/digger/libs/digger_config" 7 | "github.com/diggerhq/digger/next/model" 8 | ) 9 | 10 | func ToDiggerProject(p *model.Project) digger_config.Project { 11 | return digger_config.Project{ 12 | Name: p.Name, 13 | Dir: p.TerraformWorkingDir, 14 | Workspace: func() string { 15 | if p.Workspace == "" { 16 | return "default" 17 | } 18 | return p.Workspace 19 | }(), 20 | Terragrunt: (p.IacType == "terragrunt"), 21 | OpenTofu: (p.IacType == "opentofu"), 22 | Workflow: p.Workflow, 23 | WorkflowFile: func() string { 24 | if p.WorkflowFile == "" { 25 | return "digger_workflow.yml" 26 | } 27 | return p.WorkflowFile 28 | }(), 29 | IncludePatterns: strings.Split(p.IncludePatterns, ","), 30 | ExcludePatterns: strings.Split(p.ExcludePatterns, ","), 31 | DependencyProjects: []string{}, 32 | DriftDetection: false, 33 | AwsRoleToAssume: nil, 34 | Generated: false, 35 | } 36 | } 37 | -------------------------------------------------------------------------------- /next/dbmodels/runs.go: -------------------------------------------------------------------------------- 1 | package dbmodels 2 | 3 | type DiggerRunStatus string 4 | 5 | const ( 6 | RunQueued DiggerRunStatus = "Queued" 7 | RunPendingPlan DiggerRunStatus = "Pending Plan" 8 | RunPlanning DiggerRunStatus = "Running Plan" 9 | RunPendingApproval DiggerRunStatus = "Pending Approval" 10 | RunApproved DiggerRunStatus = "Approved" 11 | RunPendingApply DiggerRunStatus = "Pending Apply" 12 | RunApplying DiggerRunStatus = "Running Apply" 13 | RunSucceeded DiggerRunStatus = "Succeeded" 14 | RunFailed DiggerRunStatus = "Failed" 15 | RunDiscarded DiggerRunStatus = "Discarded" 16 | ) 17 | 18 | type RunType string 19 | 20 | const ( 21 | PlanAndApply RunType = "Plan and Apply" 22 | PlanOnly RunType = "Plan Only" 23 | ) 24 | -------------------------------------------------------------------------------- /next/dbmodels/setup.go: -------------------------------------------------------------------------------- 1 | package dbmodels 2 | 3 | import ( 4 | "github.com/diggerhq/digger/next/models_generated" 5 | slogGorm "github.com/orandin/slog-gorm" 6 | "gorm.io/driver/postgres" 7 | _ "gorm.io/driver/postgres" 8 | "gorm.io/gorm" 9 | "log/slog" 10 | "os" 11 | ) 12 | 13 | type Database struct { 14 | GormDB *gorm.DB 15 | Query *models_generated.Query 16 | } 17 | 18 | // var DB *gorm.DB 19 | var DB *Database 20 | 21 | func ConnectDatabase() { 22 | logger := slog.New(slog.NewJSONHandler(os.Stdout, nil)).With("gorm", true) 23 | gormLogger := slogGorm.New( 24 | slogGorm.WithHandler(logger.Handler()), 25 | slogGorm.WithTraceAll(), 26 | slogGorm.SetLogLevel(slogGorm.DefaultLogType, slog.LevelInfo), 27 | slogGorm.WithContextValue("gorm", "true"), 28 | ) 29 | 30 | database, err := gorm.Open(postgres.Open(os.Getenv("DIGGER_DATABASE_URL")), &gorm.Config{ 31 | Logger: gormLogger, 32 | }) 33 | 34 | if err != nil { 35 | panic("Failed to connect to database!") 36 | } 37 | 38 | query := models_generated.Use(database) 39 | DB = &Database{ 40 | Query: query, 41 | GormDB: database, 42 | } 43 | 44 | } 45 | -------------------------------------------------------------------------------- /next/dbmodels/variables.go: -------------------------------------------------------------------------------- 1 | package dbmodels 2 | 3 | import ( 4 | "github.com/diggerhq/digger/libs/spec" 5 | "github.com/diggerhq/digger/next/model" 6 | ) 7 | 8 | func ToVariableSpec(v model.EnvVar) spec.VariableSpec { 9 | return spec.VariableSpec{ 10 | Name: v.Name, 11 | Value: v.Value, 12 | IsSecret: v.IsSecret, 13 | } 14 | } 15 | -------------------------------------------------------------------------------- /next/middleware/middleware.go: -------------------------------------------------------------------------------- 1 | package middleware 2 | 3 | const ORGANISATION_ID_KEY = "organisation_ID" 4 | const ACCESS_LEVEL_KEY = "access_level" 5 | -------------------------------------------------------------------------------- /next/middleware/webhooks.go: -------------------------------------------------------------------------------- 1 | package middleware 2 | 3 | import ( 4 | "github.com/gin-gonic/gin" 5 | "net/http" 6 | "os" 7 | "strings" 8 | ) 9 | 10 | func WebhookAuth() gin.HandlerFunc { 11 | return func(c *gin.Context) { 12 | webhookSecret := os.Getenv("DIGGER_WEBHOOK_SECRET") 13 | authHeader := c.Request.Header.Get("Authorization") 14 | if authHeader == "" { 15 | c.String(http.StatusForbidden, "No Authorization header provided") 16 | c.Abort() 17 | return 18 | } 19 | token := strings.TrimPrefix(authHeader, "Bearer ") 20 | if token != webhookSecret { 21 | c.String(http.StatusForbidden, "invalid token") 22 | c.Abort() 23 | return 24 | } 25 | // webhook auth optionally accepts organisation ID as a value 26 | orgIdHeader := c.GetHeader("X-Digger-Org-ID") 27 | if orgIdHeader != "" { 28 | c.Set(ORGANISATION_ID_KEY, orgIdHeader) 29 | } 30 | 31 | c.Next() 32 | return 33 | } 34 | } 35 | -------------------------------------------------------------------------------- /next/model/account_delete_tokens.gen.go: -------------------------------------------------------------------------------- 1 | // Code generated by gorm.io/gen. DO NOT EDIT. 2 | // Code generated by gorm.io/gen. DO NOT EDIT. 3 | // Code generated by gorm.io/gen. DO NOT EDIT. 4 | 5 | package model 6 | 7 | const TableNameAccountDeleteToken = "account_delete_tokens" 8 | 9 | // AccountDeleteToken mapped from table 10 | type AccountDeleteToken struct { 11 | Token string `gorm:"column:token;not null;default:uuid_generate_v4()" json:"token"` 12 | UserID string `gorm:"column:user_id;primaryKey" json:"user_id"` 13 | } 14 | 15 | // TableName AccountDeleteToken's table name 16 | func (*AccountDeleteToken) TableName() string { 17 | return TableNameAccountDeleteToken 18 | } 19 | -------------------------------------------------------------------------------- /next/model/billing_bypass_organizations.gen.go: -------------------------------------------------------------------------------- 1 | // Code generated by gorm.io/gen. DO NOT EDIT. 2 | // Code generated by gorm.io/gen. DO NOT EDIT. 3 | // Code generated by gorm.io/gen. DO NOT EDIT. 4 | 5 | package model 6 | 7 | import ( 8 | "time" 9 | ) 10 | 11 | const TableNameBillingBypassOrganization = "billing_bypass_organizations" 12 | 13 | // BillingBypassOrganization mapped from table 14 | type BillingBypassOrganization struct { 15 | ID string `gorm:"column:id;primaryKey" json:"id"` 16 | CreatedAt time.Time `gorm:"column:created_at;not null;default:now()" json:"created_at"` 17 | } 18 | 19 | // TableName BillingBypassOrganization's table name 20 | func (*BillingBypassOrganization) TableName() string { 21 | return TableNameBillingBypassOrganization 22 | } 23 | -------------------------------------------------------------------------------- /next/model/chats.gen.go: -------------------------------------------------------------------------------- 1 | // Code generated by gorm.io/gen. DO NOT EDIT. 2 | // Code generated by gorm.io/gen. DO NOT EDIT. 3 | // Code generated by gorm.io/gen. DO NOT EDIT. 4 | 5 | package model 6 | 7 | import ( 8 | "time" 9 | ) 10 | 11 | const TableNameChat = "chats" 12 | 13 | // Chat mapped from table 14 | type Chat struct { 15 | ID string `gorm:"column:id;primaryKey" json:"id"` 16 | UserID string `gorm:"column:user_id" json:"user_id"` 17 | Payload string `gorm:"column:payload" json:"payload"` 18 | CreatedAt time.Time `gorm:"column:created_at;not null;default:timezone('utc" json:"created_at"` 19 | ProjectID string `gorm:"column:project_id;not null" json:"project_id"` 20 | } 21 | 22 | // TableName Chat's table name 23 | func (*Chat) TableName() string { 24 | return TableNameChat 25 | } 26 | -------------------------------------------------------------------------------- /next/model/customers.gen.go: -------------------------------------------------------------------------------- 1 | // Code generated by gorm.io/gen. DO NOT EDIT. 2 | // Code generated by gorm.io/gen. DO NOT EDIT. 3 | // Code generated by gorm.io/gen. DO NOT EDIT. 4 | 5 | package model 6 | 7 | const TableNameCustomer = "customers" 8 | 9 | // Customer mapped from table 10 | type Customer struct { 11 | StripeCustomerID string `gorm:"column:stripe_customer_id;primaryKey" json:"stripe_customer_id"` 12 | OrganizationID string `gorm:"column:organization_id;primaryKey" json:"organization_id"` 13 | } 14 | 15 | // TableName Customer's table name 16 | func (*Customer) TableName() string { 17 | return TableNameCustomer 18 | } 19 | -------------------------------------------------------------------------------- /next/model/digger_job_parent_links.gen.go: -------------------------------------------------------------------------------- 1 | // Code generated by gorm.io/gen. DO NOT EDIT. 2 | // Code generated by gorm.io/gen. DO NOT EDIT. 3 | // Code generated by gorm.io/gen. DO NOT EDIT. 4 | 5 | package model 6 | 7 | import ( 8 | "time" 9 | 10 | "gorm.io/gorm" 11 | ) 12 | 13 | const TableNameDiggerJobParentLink = "digger_job_parent_links" 14 | 15 | // DiggerJobParentLink mapped from table 16 | type DiggerJobParentLink struct { 17 | ID int64 `gorm:"column:id;primaryKey;autoIncrement:true" json:"id"` 18 | CreatedAt time.Time `gorm:"column:created_at" json:"created_at"` 19 | UpdatedAt time.Time `gorm:"column:updated_at" json:"updated_at"` 20 | DeletedAt gorm.DeletedAt `gorm:"column:deleted_at" json:"deleted_at"` 21 | DiggerJobID string `gorm:"column:digger_job_id" json:"digger_job_id"` 22 | ParentDiggerJobID string `gorm:"column:parent_digger_job_id" json:"parent_digger_job_id"` 23 | } 24 | 25 | // TableName DiggerJobParentLink's table name 26 | func (*DiggerJobParentLink) TableName() string { 27 | return TableNameDiggerJobParentLink 28 | } 29 | -------------------------------------------------------------------------------- /next/model/digger_job_tokens.gen.go: -------------------------------------------------------------------------------- 1 | // Code generated by gorm.io/gen. DO NOT EDIT. 2 | // Code generated by gorm.io/gen. DO NOT EDIT. 3 | // Code generated by gorm.io/gen. DO NOT EDIT. 4 | 5 | package model 6 | 7 | import ( 8 | "time" 9 | 10 | "gorm.io/gorm" 11 | ) 12 | 13 | const TableNameDiggerJobToken = "digger_job_tokens" 14 | 15 | // DiggerJobToken mapped from table 16 | type DiggerJobToken struct { 17 | ID int64 `gorm:"column:id;primaryKey;autoIncrement:true" json:"id"` 18 | CreatedAt time.Time `gorm:"column:created_at" json:"created_at"` 19 | UpdatedAt time.Time `gorm:"column:updated_at" json:"updated_at"` 20 | DeletedAt gorm.DeletedAt `gorm:"column:deleted_at" json:"deleted_at"` 21 | Value string `gorm:"column:value" json:"value"` 22 | Expiry time.Time `gorm:"column:expiry" json:"expiry"` 23 | Type string `gorm:"column:type" json:"type"` 24 | OrganisationID string `gorm:"column:organisation_id" json:"organisation_id"` 25 | } 26 | 27 | // TableName DiggerJobToken's table name 28 | func (*DiggerJobToken) TableName() string { 29 | return TableNameDiggerJobToken 30 | } 31 | -------------------------------------------------------------------------------- /next/model/digger_locks.gen.go: -------------------------------------------------------------------------------- 1 | // Code generated by gorm.io/gen. DO NOT EDIT. 2 | // Code generated by gorm.io/gen. DO NOT EDIT. 3 | // Code generated by gorm.io/gen. DO NOT EDIT. 4 | 5 | package model 6 | 7 | import ( 8 | "time" 9 | 10 | "gorm.io/gorm" 11 | ) 12 | 13 | const TableNameDiggerLock = "digger_locks" 14 | 15 | // DiggerLock mapped from table 16 | type DiggerLock struct { 17 | ID string `gorm:"column:id;primaryKey;default:gen_random_uuid()" json:"id"` 18 | CreatedAt time.Time `gorm:"column:created_at;not null;default:CURRENT_TIMESTAMP" json:"created_at"` 19 | UpdatedAt time.Time `gorm:"column:updated_at;not null;default:CURRENT_TIMESTAMP" json:"updated_at"` 20 | DeletedAt gorm.DeletedAt `gorm:"column:deleted_at" json:"deleted_at"` 21 | Resource string `gorm:"column:resource;not null" json:"resource"` 22 | LockID int64 `gorm:"column:lock_id;not null" json:"lock_id"` 23 | OrganizationID string `gorm:"column:organization_id;not null" json:"organization_id"` 24 | } 25 | 26 | // TableName DiggerLock's table name 27 | func (*DiggerLock) TableName() string { 28 | return TableNameDiggerLock 29 | } 30 | -------------------------------------------------------------------------------- /next/model/digger_run_queue_items.gen.go: -------------------------------------------------------------------------------- 1 | // Code generated by gorm.io/gen. DO NOT EDIT. 2 | // Code generated by gorm.io/gen. DO NOT EDIT. 3 | // Code generated by gorm.io/gen. DO NOT EDIT. 4 | 5 | package model 6 | 7 | import ( 8 | "time" 9 | 10 | "gorm.io/gorm" 11 | ) 12 | 13 | const TableNameDiggerRunQueueItem = "digger_run_queue_items" 14 | 15 | // DiggerRunQueueItem mapped from table 16 | type DiggerRunQueueItem struct { 17 | ID int64 `gorm:"column:id;primaryKey;autoIncrement:true" json:"id"` 18 | CreatedAt time.Time `gorm:"column:created_at" json:"created_at"` 19 | UpdatedAt time.Time `gorm:"column:updated_at" json:"updated_at"` 20 | DeletedAt gorm.DeletedAt `gorm:"column:deleted_at" json:"deleted_at"` 21 | DiggerRunID string `gorm:"column:digger_run_id" json:"digger_run_id"` 22 | ProjectID string `gorm:"column:project_id" json:"project_id"` 23 | } 24 | 25 | // TableName DiggerRunQueueItem's table name 26 | func (*DiggerRunQueueItem) TableName() string { 27 | return TableNameDiggerRunQueueItem 28 | } 29 | -------------------------------------------------------------------------------- /next/model/digger_run_stages.gen.go: -------------------------------------------------------------------------------- 1 | // Code generated by gorm.io/gen. DO NOT EDIT. 2 | // Code generated by gorm.io/gen. DO NOT EDIT. 3 | // Code generated by gorm.io/gen. DO NOT EDIT. 4 | 5 | package model 6 | 7 | import ( 8 | "time" 9 | 10 | "gorm.io/gorm" 11 | ) 12 | 13 | const TableNameDiggerRunStage = "digger_run_stages" 14 | 15 | // DiggerRunStage mapped from table 16 | type DiggerRunStage struct { 17 | ID string `gorm:"column:id;primaryKey;default:gen_random_uuid()" json:"id"` 18 | CreatedAt time.Time `gorm:"column:created_at;not null;default:CURRENT_TIMESTAMP" json:"created_at"` 19 | UpdatedAt time.Time `gorm:"column:updated_at;not null;default:CURRENT_TIMESTAMP" json:"updated_at"` 20 | DeletedAt gorm.DeletedAt `gorm:"column:deleted_at" json:"deleted_at"` 21 | BatchID string `gorm:"column:batch_id;not null" json:"batch_id"` 22 | } 23 | 24 | // TableName DiggerRunStage's table name 25 | func (*DiggerRunStage) TableName() string { 26 | return TableNameDiggerRunStage 27 | } 28 | -------------------------------------------------------------------------------- /next/model/env_vars.gen.go: -------------------------------------------------------------------------------- 1 | // Code generated by gorm.io/gen. DO NOT EDIT. 2 | // Code generated by gorm.io/gen. DO NOT EDIT. 3 | // Code generated by gorm.io/gen. DO NOT EDIT. 4 | 5 | package model 6 | 7 | import ( 8 | "time" 9 | ) 10 | 11 | const TableNameEnvVar = "env_vars" 12 | 13 | // EnvVar mapped from table 14 | type EnvVar struct { 15 | ID string `gorm:"column:id;primaryKey;default:gen_random_uuid()" json:"id"` 16 | ProjectID string `gorm:"column:project_id;not null" json:"project_id"` 17 | Name string `gorm:"column:name;not null" json:"name"` 18 | Value string `gorm:"column:value;not null" json:"value"` 19 | UpdatedAt time.Time `gorm:"column:updated_at;not null;default:now()" json:"updated_at"` 20 | IsSecret bool `gorm:"column:is_secret;not null" json:"is_secret"` 21 | } 22 | 23 | // TableName EnvVar's table name 24 | func (*EnvVar) TableName() string { 25 | return TableNameEnvVar 26 | } 27 | -------------------------------------------------------------------------------- /next/model/github_apps.gen.go: -------------------------------------------------------------------------------- 1 | // Code generated by gorm.io/gen. DO NOT EDIT. 2 | // Code generated by gorm.io/gen. DO NOT EDIT. 3 | // Code generated by gorm.io/gen. DO NOT EDIT. 4 | 5 | package model 6 | 7 | import ( 8 | "time" 9 | 10 | "gorm.io/gorm" 11 | ) 12 | 13 | const TableNameGithubApp = "github_apps" 14 | 15 | // GithubApp mapped from table 16 | type GithubApp struct { 17 | ID string `gorm:"column:id;primaryKey;default:gen_random_uuid()" json:"id"` 18 | CreatedAt time.Time `gorm:"column:created_at;not null;default:CURRENT_TIMESTAMP" json:"created_at"` 19 | UpdatedAt time.Time `gorm:"column:updated_at;not null;default:CURRENT_TIMESTAMP" json:"updated_at"` 20 | DeletedAt gorm.DeletedAt `gorm:"column:deleted_at" json:"deleted_at"` 21 | GithubID int64 `gorm:"column:github_id;not null" json:"github_id"` 22 | Name string `gorm:"column:name;not null" json:"name"` 23 | GithubAppURL string `gorm:"column:github_app_url;not null" json:"github_app_url"` 24 | } 25 | 26 | // TableName GithubApp's table name 27 | func (*GithubApp) TableName() string { 28 | return TableNameGithubApp 29 | } 30 | -------------------------------------------------------------------------------- /next/model/internal_blog_author_posts.gen.go: -------------------------------------------------------------------------------- 1 | // Code generated by gorm.io/gen. DO NOT EDIT. 2 | // Code generated by gorm.io/gen. DO NOT EDIT. 3 | // Code generated by gorm.io/gen. DO NOT EDIT. 4 | 5 | package model 6 | 7 | const TableNameInternalBlogAuthorPost = "internal_blog_author_posts" 8 | 9 | // InternalBlogAuthorPost mapped from table 10 | type InternalBlogAuthorPost struct { 11 | AuthorID string `gorm:"column:author_id;primaryKey" json:"author_id"` 12 | PostID string `gorm:"column:post_id;primaryKey" json:"post_id"` 13 | } 14 | 15 | // TableName InternalBlogAuthorPost's table name 16 | func (*InternalBlogAuthorPost) TableName() string { 17 | return TableNameInternalBlogAuthorPost 18 | } 19 | -------------------------------------------------------------------------------- /next/model/internal_blog_post_tags.gen.go: -------------------------------------------------------------------------------- 1 | // Code generated by gorm.io/gen. DO NOT EDIT. 2 | // Code generated by gorm.io/gen. DO NOT EDIT. 3 | // Code generated by gorm.io/gen. DO NOT EDIT. 4 | 5 | package model 6 | 7 | const TableNameInternalBlogPostTag = "internal_blog_post_tags" 8 | 9 | // InternalBlogPostTag mapped from table 10 | type InternalBlogPostTag struct { 11 | ID int32 `gorm:"column:id;primaryKey;autoIncrement:true" json:"id"` 12 | Slug string `gorm:"column:slug;not null" json:"slug"` 13 | Name string `gorm:"column:name;not null" json:"name"` 14 | Description string `gorm:"column:description" json:"description"` 15 | } 16 | 17 | // TableName InternalBlogPostTag's table name 18 | func (*InternalBlogPostTag) TableName() string { 19 | return TableNameInternalBlogPostTag 20 | } 21 | -------------------------------------------------------------------------------- /next/model/internal_blog_post_tags_relationship.gen.go: -------------------------------------------------------------------------------- 1 | // Code generated by gorm.io/gen. DO NOT EDIT. 2 | // Code generated by gorm.io/gen. DO NOT EDIT. 3 | // Code generated by gorm.io/gen. DO NOT EDIT. 4 | 5 | package model 6 | 7 | const TableNameInternalBlogPostTagsRelationship = "internal_blog_post_tags_relationship" 8 | 9 | // InternalBlogPostTagsRelationship mapped from table 10 | type InternalBlogPostTagsRelationship struct { 11 | BlogPostID string `gorm:"column:blog_post_id;primaryKey" json:"blog_post_id"` 12 | TagID int32 `gorm:"column:tag_id;primaryKey" json:"tag_id"` 13 | } 14 | 15 | // TableName InternalBlogPostTagsRelationship's table name 16 | func (*InternalBlogPostTagsRelationship) TableName() string { 17 | return TableNameInternalBlogPostTagsRelationship 18 | } 19 | -------------------------------------------------------------------------------- /next/model/internal_changelog.gen.go: -------------------------------------------------------------------------------- 1 | // Code generated by gorm.io/gen. DO NOT EDIT. 2 | // Code generated by gorm.io/gen. DO NOT EDIT. 3 | // Code generated by gorm.io/gen. DO NOT EDIT. 4 | 5 | package model 6 | 7 | import ( 8 | "time" 9 | ) 10 | 11 | const TableNameInternalChangelog = "internal_changelog" 12 | 13 | // InternalChangelog mapped from table 14 | type InternalChangelog struct { 15 | ID string `gorm:"column:id;primaryKey;default:gen_random_uuid()" json:"id"` 16 | Title string `gorm:"column:title;not null" json:"title"` 17 | Changes string `gorm:"column:changes;not null" json:"changes"` 18 | UserID string `gorm:"column:user_id" json:"user_id"` 19 | CreatedAt time.Time `gorm:"column:created_at;default:CURRENT_TIMESTAMP" json:"created_at"` 20 | UpdatedAt time.Time `gorm:"column:updated_at;default:CURRENT_TIMESTAMP" json:"updated_at"` 21 | CoverImage string `gorm:"column:cover_image" json:"cover_image"` 22 | } 23 | 24 | // TableName InternalChangelog's table name 25 | func (*InternalChangelog) TableName() string { 26 | return TableNameInternalChangelog 27 | } 28 | -------------------------------------------------------------------------------- /next/model/internal_feedback_comments.gen.go: -------------------------------------------------------------------------------- 1 | // Code generated by gorm.io/gen. DO NOT EDIT. 2 | // Code generated by gorm.io/gen. DO NOT EDIT. 3 | // Code generated by gorm.io/gen. DO NOT EDIT. 4 | 5 | package model 6 | 7 | import ( 8 | "time" 9 | ) 10 | 11 | const TableNameInternalFeedbackComment = "internal_feedback_comments" 12 | 13 | // InternalFeedbackComment mapped from table 14 | type InternalFeedbackComment struct { 15 | ID string `gorm:"column:id;primaryKey;default:gen_random_uuid()" json:"id"` 16 | UserID string `gorm:"column:user_id;not null" json:"user_id"` 17 | ThreadID string `gorm:"column:thread_id;not null" json:"thread_id"` 18 | Content string `gorm:"column:content;not null" json:"content"` 19 | CreatedAt time.Time `gorm:"column:created_at;not null;default:CURRENT_TIMESTAMP" json:"created_at"` 20 | UpdatedAt time.Time `gorm:"column:updated_at;not null;default:CURRENT_TIMESTAMP" json:"updated_at"` 21 | } 22 | 23 | // TableName InternalFeedbackComment's table name 24 | func (*InternalFeedbackComment) TableName() string { 25 | return TableNameInternalFeedbackComment 26 | } 27 | -------------------------------------------------------------------------------- /next/model/organization_credits.gen.go: -------------------------------------------------------------------------------- 1 | // Code generated by gorm.io/gen. DO NOT EDIT. 2 | // Code generated by gorm.io/gen. DO NOT EDIT. 3 | // Code generated by gorm.io/gen. DO NOT EDIT. 4 | 5 | package model 6 | 7 | const TableNameOrganizationCredit = "organization_credits" 8 | 9 | // OrganizationCredit mapped from table 10 | type OrganizationCredit struct { 11 | OrganizationID string `gorm:"column:organization_id;primaryKey" json:"organization_id"` 12 | Credits int64 `gorm:"column:credits;not null;default:12" json:"credits"` 13 | } 14 | 15 | // TableName OrganizationCredit's table name 16 | func (*OrganizationCredit) TableName() string { 17 | return TableNameOrganizationCredit 18 | } 19 | -------------------------------------------------------------------------------- /next/model/organization_members.gen.go: -------------------------------------------------------------------------------- 1 | // Code generated by gorm.io/gen. DO NOT EDIT. 2 | // Code generated by gorm.io/gen. DO NOT EDIT. 3 | // Code generated by gorm.io/gen. DO NOT EDIT. 4 | 5 | package model 6 | 7 | import ( 8 | "time" 9 | ) 10 | 11 | const TableNameOrganizationMember = "organization_members" 12 | 13 | // OrganizationMember mapped from table 14 | type OrganizationMember struct { 15 | ID int64 `gorm:"column:id;primaryKey;autoIncrement:true" json:"id"` 16 | CreatedAt time.Time `gorm:"column:created_at;not null;default:now()" json:"created_at"` 17 | MemberID string `gorm:"column:member_id;not null" json:"member_id"` 18 | MemberRole string `gorm:"column:member_role;not null" json:"member_role"` 19 | OrganizationID string `gorm:"column:organization_id;not null" json:"organization_id"` 20 | } 21 | 22 | // TableName OrganizationMember's table name 23 | func (*OrganizationMember) TableName() string { 24 | return TableNameOrganizationMember 25 | } 26 | -------------------------------------------------------------------------------- /next/model/organizations.gen.go: -------------------------------------------------------------------------------- 1 | // Code generated by gorm.io/gen. DO NOT EDIT. 2 | // Code generated by gorm.io/gen. DO NOT EDIT. 3 | // Code generated by gorm.io/gen. DO NOT EDIT. 4 | 5 | package model 6 | 7 | import ( 8 | "time" 9 | ) 10 | 11 | const TableNameOrganization = "organizations" 12 | 13 | // Organization mapped from table 14 | type Organization struct { 15 | CreatedAt time.Time `gorm:"column:created_at;not null;default:now()" json:"created_at"` 16 | ID string `gorm:"column:id;primaryKey;default:uuid_generate_v4()" json:"id"` 17 | Title string `gorm:"column:title;not null;default:Test Organization" json:"title"` 18 | Slug string `gorm:"column:slug;not null;default:(gen_random_uuid())" json:"slug"` 19 | PublicKey string `gorm:"column:public_key" json:"public_key"` 20 | } 21 | 22 | // TableName Organization's table name 23 | func (*Organization) TableName() string { 24 | return TableNameOrganization 25 | } 26 | -------------------------------------------------------------------------------- /next/model/organizations_private_info.gen.go: -------------------------------------------------------------------------------- 1 | // Code generated by gorm.io/gen. DO NOT EDIT. 2 | // Code generated by gorm.io/gen. DO NOT EDIT. 3 | // Code generated by gorm.io/gen. DO NOT EDIT. 4 | 5 | package model 6 | 7 | const TableNameOrganizationsPrivateInfo = "organizations_private_info" 8 | 9 | // OrganizationsPrivateInfo mapped from table 10 | type OrganizationsPrivateInfo struct { 11 | ID string `gorm:"column:id;primaryKey" json:"id"` 12 | BillingAddress string `gorm:"column:billing_address" json:"billing_address"` 13 | PaymentMethod string `gorm:"column:payment_method" json:"payment_method"` 14 | } 15 | 16 | // TableName OrganizationsPrivateInfo's table name 17 | func (*OrganizationsPrivateInfo) TableName() string { 18 | return TableNameOrganizationsPrivateInfo 19 | } 20 | -------------------------------------------------------------------------------- /next/model/prices.gen.go: -------------------------------------------------------------------------------- 1 | // Code generated by gorm.io/gen. DO NOT EDIT. 2 | // Code generated by gorm.io/gen. DO NOT EDIT. 3 | // Code generated by gorm.io/gen. DO NOT EDIT. 4 | 5 | package model 6 | 7 | const TableNamePrice = "prices" 8 | 9 | // Price mapped from table 10 | type Price struct { 11 | ID string `gorm:"column:id;primaryKey" json:"id"` 12 | ProductID string `gorm:"column:product_id" json:"product_id"` 13 | Active bool `gorm:"column:active" json:"active"` 14 | Description string `gorm:"column:description" json:"description"` 15 | UnitAmount int64 `gorm:"column:unit_amount" json:"unit_amount"` 16 | Currency string `gorm:"column:currency" json:"currency"` 17 | Type string `gorm:"column:type" json:"type"` 18 | Interval string `gorm:"column:interval" json:"interval"` 19 | IntervalCount int64 `gorm:"column:interval_count" json:"interval_count"` 20 | TrialPeriodDays int64 `gorm:"column:trial_period_days" json:"trial_period_days"` 21 | Metadata string `gorm:"column:metadata" json:"metadata"` 22 | } 23 | 24 | // TableName Price's table name 25 | func (*Price) TableName() string { 26 | return TableNamePrice 27 | } 28 | -------------------------------------------------------------------------------- /next/model/products.gen.go: -------------------------------------------------------------------------------- 1 | // Code generated by gorm.io/gen. DO NOT EDIT. 2 | // Code generated by gorm.io/gen. DO NOT EDIT. 3 | // Code generated by gorm.io/gen. DO NOT EDIT. 4 | 5 | package model 6 | 7 | const TableNameProduct = "products" 8 | 9 | // Product mapped from table 10 | type Product struct { 11 | ID string `gorm:"column:id;primaryKey" json:"id"` 12 | Active bool `gorm:"column:active" json:"active"` 13 | Name string `gorm:"column:name" json:"name"` 14 | Description string `gorm:"column:description" json:"description"` 15 | Image string `gorm:"column:image" json:"image"` 16 | Metadata string `gorm:"column:metadata" json:"metadata"` 17 | IsVisibleInUI bool `gorm:"column:is_visible_in_ui;not null" json:"is_visible_in_ui"` 18 | } 19 | 20 | // TableName Product's table name 21 | func (*Product) TableName() string { 22 | return TableNameProduct 23 | } 24 | -------------------------------------------------------------------------------- /next/model/project_comments.gen.go: -------------------------------------------------------------------------------- 1 | // Code generated by gorm.io/gen. DO NOT EDIT. 2 | // Code generated by gorm.io/gen. DO NOT EDIT. 3 | // Code generated by gorm.io/gen. DO NOT EDIT. 4 | 5 | package model 6 | 7 | import ( 8 | "time" 9 | ) 10 | 11 | const TableNameProjectComment = "project_comments" 12 | 13 | // ProjectComment mapped from table 14 | type ProjectComment struct { 15 | ID int64 `gorm:"column:id;primaryKey;autoIncrement:true" json:"id"` 16 | CreatedAt time.Time `gorm:"column:created_at;default:now()" json:"created_at"` 17 | Text string `gorm:"column:text;not null" json:"text"` 18 | UserID string `gorm:"column:user_id;not null" json:"user_id"` 19 | InReplyTo int64 `gorm:"column:in_reply_to" json:"in_reply_to"` 20 | ProjectID string `gorm:"column:project_id;not null" json:"project_id"` 21 | } 22 | 23 | // TableName ProjectComment's table name 24 | func (*ProjectComment) TableName() string { 25 | return TableNameProjectComment 26 | } 27 | -------------------------------------------------------------------------------- /next/model/project_tfvars.gen.go: -------------------------------------------------------------------------------- 1 | // Code generated by gorm.io/gen. DO NOT EDIT. 2 | // Code generated by gorm.io/gen. DO NOT EDIT. 3 | // Code generated by gorm.io/gen. DO NOT EDIT. 4 | 5 | package model 6 | 7 | import ( 8 | "time" 9 | ) 10 | 11 | const TableNameProjectTfvar = "project_tfvars" 12 | 13 | // ProjectTfvar mapped from table 14 | type ProjectTfvar struct { 15 | ID string `gorm:"column:id;primaryKey;default:gen_random_uuid()" json:"id"` 16 | ProjectID string `gorm:"column:project_id;not null" json:"project_id"` 17 | Tfvars string `gorm:"column:tfvars;not null" json:"tfvars"` 18 | UpdatedAt time.Time `gorm:"column:updated_at;not null;default:now()" json:"updated_at"` 19 | } 20 | 21 | // TableName ProjectTfvar's table name 22 | func (*ProjectTfvar) TableName() string { 23 | return TableNameProjectTfvar 24 | } 25 | -------------------------------------------------------------------------------- /next/model/team_members.gen.go: -------------------------------------------------------------------------------- 1 | // Code generated by gorm.io/gen. DO NOT EDIT. 2 | // Code generated by gorm.io/gen. DO NOT EDIT. 3 | // Code generated by gorm.io/gen. DO NOT EDIT. 4 | 5 | package model 6 | 7 | import ( 8 | "time" 9 | ) 10 | 11 | const TableNameTeamMember = "team_members" 12 | 13 | // TeamMember mapped from table 14 | type TeamMember struct { 15 | ID int64 `gorm:"column:id;primaryKey;autoIncrement:true" json:"id"` 16 | CreatedAt time.Time `gorm:"column:created_at;default:now()" json:"created_at"` 17 | UserID string `gorm:"column:user_id;not null" json:"user_id"` 18 | Role string `gorm:"column:role;not null;default:member" json:"role"` 19 | TeamID int64 `gorm:"column:team_id;not null" json:"team_id"` 20 | } 21 | 22 | // TableName TeamMember's table name 23 | func (*TeamMember) TableName() string { 24 | return TableNameTeamMember 25 | } 26 | -------------------------------------------------------------------------------- /next/model/teams.gen.go: -------------------------------------------------------------------------------- 1 | // Code generated by gorm.io/gen. DO NOT EDIT. 2 | // Code generated by gorm.io/gen. DO NOT EDIT. 3 | // Code generated by gorm.io/gen. DO NOT EDIT. 4 | 5 | package model 6 | 7 | import ( 8 | "time" 9 | ) 10 | 11 | const TableNameTeam = "teams" 12 | 13 | // Team mapped from table 14 | type Team struct { 15 | ID int64 `gorm:"column:id;primaryKey;autoIncrement:true" json:"id"` 16 | CreatedAt time.Time `gorm:"column:created_at;default:now()" json:"created_at"` 17 | OrganizationID string `gorm:"column:organization_id;not null" json:"organization_id"` 18 | Name string `gorm:"column:name;not null" json:"name"` 19 | } 20 | 21 | // TableName Team's table name 22 | func (*Team) TableName() string { 23 | return TableNameTeam 24 | } 25 | -------------------------------------------------------------------------------- /next/model/user_api_keys.gen.go: -------------------------------------------------------------------------------- 1 | // Code generated by gorm.io/gen. DO NOT EDIT. 2 | // Code generated by gorm.io/gen. DO NOT EDIT. 3 | // Code generated by gorm.io/gen. DO NOT EDIT. 4 | 5 | package model 6 | 7 | import ( 8 | "time" 9 | ) 10 | 11 | const TableNameUserAPIKey = "user_api_keys" 12 | 13 | // UserAPIKey mapped from table 14 | type UserAPIKey struct { 15 | KeyID string `gorm:"column:key_id;primaryKey" json:"key_id"` 16 | MaskedKey string `gorm:"column:masked_key;not null" json:"masked_key"` 17 | CreatedAt time.Time `gorm:"column:created_at;not null;default:now()" json:"created_at"` 18 | UserID string `gorm:"column:user_id;not null" json:"user_id"` 19 | ExpiresAt time.Time `gorm:"column:expires_at" json:"expires_at"` 20 | IsRevoked bool `gorm:"column:is_revoked;not null" json:"is_revoked"` 21 | } 22 | 23 | // TableName UserAPIKey's table name 24 | func (*UserAPIKey) TableName() string { 25 | return TableNameUserAPIKey 26 | } 27 | -------------------------------------------------------------------------------- /next/model/user_m2m_applications.gen.go: -------------------------------------------------------------------------------- 1 | // Code generated by gorm.io/gen. DO NOT EDIT. 2 | // Code generated by gorm.io/gen. DO NOT EDIT. 3 | // Code generated by gorm.io/gen. DO NOT EDIT. 4 | 5 | package model 6 | 7 | import ( 8 | "time" 9 | ) 10 | 11 | const TableNameUserM2mApplication = "user_m2m_applications" 12 | 13 | // UserM2mApplication mapped from table 14 | type UserM2mApplication struct { 15 | ID int32 `gorm:"column:id;primaryKey;autoIncrement:true" json:"id"` 16 | ClientID string `gorm:"column:clientId;not null" json:"clientId"` 17 | Name string `gorm:"column:name" json:"name"` 18 | Description string `gorm:"column:description" json:"description"` 19 | CreatedAt time.Time `gorm:"column:created_at;default:CURRENT_TIMESTAMP" json:"created_at"` 20 | UpdatedAt time.Time `gorm:"column:updated_at;default:CURRENT_TIMESTAMP" json:"updated_at"` 21 | Email string `gorm:"column:email;not null" json:"email"` 22 | Audience string `gorm:"column:audience;not null" json:"audience"` 23 | Issuer string `gorm:"column:issuer;not null" json:"issuer"` 24 | } 25 | 26 | // TableName UserM2mApplication's table name 27 | func (*UserM2mApplication) TableName() string { 28 | return TableNameUserM2mApplication 29 | } 30 | -------------------------------------------------------------------------------- /next/model/user_notifications.gen.go: -------------------------------------------------------------------------------- 1 | // Code generated by gorm.io/gen. DO NOT EDIT. 2 | // Code generated by gorm.io/gen. DO NOT EDIT. 3 | // Code generated by gorm.io/gen. DO NOT EDIT. 4 | 5 | package model 6 | 7 | import ( 8 | "time" 9 | ) 10 | 11 | const TableNameUserNotification = "user_notifications" 12 | 13 | // UserNotification mapped from table 14 | type UserNotification struct { 15 | ID string `gorm:"column:id;primaryKey;default:uuid_generate_v4()" json:"id"` 16 | UserID string `gorm:"column:user_id" json:"user_id"` 17 | IsRead bool `gorm:"column:is_read;not null" json:"is_read"` 18 | IsSeen bool `gorm:"column:is_seen;not null" json:"is_seen"` 19 | Payload string `gorm:"column:payload;not null;default:{}" json:"payload"` 20 | CreatedAt time.Time `gorm:"column:created_at;not null;default:CURRENT_TIMESTAMP" json:"created_at"` 21 | UpdatedAt time.Time `gorm:"column:updated_at;not null;default:CURRENT_TIMESTAMP" json:"updated_at"` 22 | } 23 | 24 | // TableName UserNotification's table name 25 | func (*UserNotification) TableName() string { 26 | return TableNameUserNotification 27 | } 28 | -------------------------------------------------------------------------------- /next/model/user_onboarding.gen.go: -------------------------------------------------------------------------------- 1 | // Code generated by gorm.io/gen. DO NOT EDIT. 2 | // Code generated by gorm.io/gen. DO NOT EDIT. 3 | // Code generated by gorm.io/gen. DO NOT EDIT. 4 | 5 | package model 6 | 7 | import ( 8 | "time" 9 | ) 10 | 11 | const TableNameUserOnboarding = "user_onboarding" 12 | 13 | // UserOnboarding mapped from table 14 | type UserOnboarding struct { 15 | UserID string `gorm:"column:user_id;primaryKey" json:"user_id"` 16 | CreatedAt time.Time `gorm:"column:created_at;not null;default:now()" json:"created_at"` 17 | AcceptedTerms bool `gorm:"column:accepted_terms;not null" json:"accepted_terms"` 18 | } 19 | 20 | // TableName UserOnboarding's table name 21 | func (*UserOnboarding) TableName() string { 22 | return TableNameUserOnboarding 23 | } 24 | -------------------------------------------------------------------------------- /next/model/user_private_info.gen.go: -------------------------------------------------------------------------------- 1 | // Code generated by gorm.io/gen. DO NOT EDIT. 2 | // Code generated by gorm.io/gen. DO NOT EDIT. 3 | // Code generated by gorm.io/gen. DO NOT EDIT. 4 | 5 | package model 6 | 7 | import ( 8 | "time" 9 | ) 10 | 11 | const TableNameUserPrivateInfo = "user_private_info" 12 | 13 | // UserPrivateInfo mapped from table 14 | type UserPrivateInfo struct { 15 | ID string `gorm:"column:id;primaryKey" json:"id"` 16 | CreatedAt time.Time `gorm:"column:created_at;default:now()" json:"created_at"` 17 | DefaultOrganization string `gorm:"column:default_organization" json:"default_organization"` 18 | } 19 | 20 | // TableName UserPrivateInfo's table name 21 | func (*UserPrivateInfo) TableName() string { 22 | return TableNameUserPrivateInfo 23 | } 24 | -------------------------------------------------------------------------------- /next/model/user_roles.gen.go: -------------------------------------------------------------------------------- 1 | // Code generated by gorm.io/gen. DO NOT EDIT. 2 | // Code generated by gorm.io/gen. DO NOT EDIT. 3 | // Code generated by gorm.io/gen. DO NOT EDIT. 4 | 5 | package model 6 | 7 | const TableNameUserRole = "user_roles" 8 | 9 | // UserRole mapped from table 10 | type UserRole struct { 11 | ID int64 `gorm:"column:id;primaryKey;autoIncrement:true" json:"id"` 12 | UserID string `gorm:"column:user_id;not null" json:"user_id"` 13 | Role string `gorm:"column:role;not null" json:"role"` 14 | } 15 | 16 | // TableName UserRole's table name 17 | func (*UserRole) TableName() string { 18 | return TableNameUserRole 19 | } 20 | -------------------------------------------------------------------------------- /next/scripts/cron/process_drift.query: -------------------------------------------------------------------------------- 1 | select 2 | cron.schedule( 3 | 'invoke-function-every-half-minute', 4 | '30 seconds', 5 | $$ 6 | select 7 | net.http_post( 8 | url:='https://{DIGGER_HOSTNAME}/_internal/process_drift', 9 | headers:=jsonb_build_object('Content-Type','application/json', 'Authorization', 'Bearer ' || 'abc123'), 10 | body:=jsonb_build_object('time', now() ), 11 | timeout_milliseconds:=5000 12 | ) as request_id; 13 | $$ 14 | ); 15 | -------------------------------------------------------------------------------- /next/scripts/cron/process_runs_queue.query: -------------------------------------------------------------------------------- 1 | select 2 | cron.schedule( 3 | 'process-runs-queue', 4 | '30 seconds', 5 | $$ 6 | select 7 | net.http_post( 8 | url:='https://{DIGGER_HOSTNAME}/_internal/process_runs_queue', 9 | headers:='{"Content-Type": "application/json", "Authorization": "Bearer abc123"}'::jsonb, 10 | body:='{}'::jsonb 11 | ) as request_id; 12 | $$ 13 | ); 14 | 15 | 16 | -------------------------------------------------------------------------------- /next/scripts/entrypoint.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -e 3 | 4 | ./next 5 | -------------------------------------------------------------------------------- /next/supa/supa.go: -------------------------------------------------------------------------------- 1 | package supa 2 | 3 | import ( 4 | "fmt" 5 | "github.com/supabase-community/supabase-go" 6 | "os" 7 | ) 8 | 9 | var client *supabase.Client = nil 10 | 11 | func GetClient() (*supabase.Client, error) { 12 | ApiUrl := os.Getenv("DIGGER_SUPABASE_API_URL") 13 | ApiKey := os.Getenv("DIGGER_SUPABASE_API_KEY") 14 | var err error 15 | client, err = supabase.NewClient(ApiUrl, ApiKey, nil) 16 | if err != nil { 17 | fmt.Println("cannot initialize supabase client", err) 18 | return nil, fmt.Errorf("could not create supabase client: %v", err) 19 | } 20 | return client, err 21 | } 22 | -------------------------------------------------------------------------------- /next/templates/github_success.tmpl: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | Digger github app installed 6 | 7 | 8 | 9 | 10 | 11 |
12 |
13 |

App installation successful

14 |

You can now close this tab.

15 |
16 |
17 | 18 | -------------------------------------------------------------------------------- /next/templates/index.tmpl: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 |
8 | 19 |
20 | 21 | 22 | 23 | -------------------------------------------------------------------------------- /next/templates/static/css/main.css: -------------------------------------------------------------------------------- 1 | 2 | 3 | .bg-gradient-primary { 4 | background-color: #001529; 5 | background-image: linear-gradient(180deg, #001529 10%, #001529 100%); 6 | background-size: cover; 7 | } -------------------------------------------------------------------------------- /next/templates/static/js/prism-live-javascript.js: -------------------------------------------------------------------------------- 1 | Prism.Live.registerLanguage("clike", { 2 | comments: { 3 | singleline: "//", 4 | multiline: ["/*", "*/"] 5 | }, 6 | snippets: { 7 | if: `if ($1) { 8 | $2 9 | }` 10 | } 11 | }); 12 | 13 | Prism.Live.registerLanguage("javascript", { 14 | snippets: { 15 | log: "console.log($1)", 16 | } 17 | }, Prism.Live.languages.clike); 18 | -------------------------------------------------------------------------------- /next/utils/crontab.go: -------------------------------------------------------------------------------- 1 | package utils 2 | 3 | import ( 4 | "fmt" 5 | "github.com/robfig/cron/v3" 6 | "time" 7 | ) 8 | 9 | func MatchesCrontab(cronString string, timestamp time.Time) (bool, error) { 10 | // Parse the crontab string 11 | schedule, err := cron.ParseStandard(cronString) 12 | if err != nil { 13 | return false, fmt.Errorf("failed to parse crontab string: %w", err) 14 | } 15 | 16 | // Round down the timestamp to the nearest minute 17 | roundedTime := timestamp.Truncate(time.Minute) 18 | 19 | // Check if the rounded time matches the schedule 20 | nextTime := schedule.Next(roundedTime.Add(-time.Minute)) 21 | return nextTime.Equal(roundedTime), nil 22 | } 23 | -------------------------------------------------------------------------------- /next/utils/crontab_test.go: -------------------------------------------------------------------------------- 1 | package utils 2 | 3 | import ( 4 | "fmt" 5 | "github.com/stretchr/testify/assert" 6 | "testing" 7 | "time" 8 | ) 9 | 10 | func TestCrontTabMatching(t *testing.T) { 11 | cronString := "*/15 * * * *" // Every 15 minutes 12 | timestamp := time.Date(2023, 5, 1, 12, 30, 30, 0, time.UTC) 13 | 14 | matches, err := MatchesCrontab(cronString, timestamp) 15 | if err != nil { 16 | fmt.Printf("Error: %v\n", err) 17 | return 18 | } 19 | assert.True(t, matches) 20 | 21 | cronString = "*/15 * * * *" // Every 15 minutes 22 | timestamp = time.Date(2022, 5, 1, 12, 12, 30, 0, time.UTC) 23 | 24 | matches, err = MatchesCrontab(cronString, timestamp) 25 | if err != nil { 26 | fmt.Printf("Error: %v\n", err) 27 | return 28 | } 29 | assert.False(t, matches) 30 | 31 | } 32 | --------------------------------------------------------------------------------