├── apps ├── __init__.py ├── management │ ├── __init__.py │ └── commands │ │ ├── __init__.py │ │ └── celery.py ├── migrations │ ├── __init__.py │ ├── 0004_alter_app_name.py │ ├── 0003_alter_tasklog_success.py │ ├── 0002_app_github_url.py │ └── 0001_initial.py ├── static │ └── favicon.ico ├── apps.py ├── forms.py ├── templates │ ├── setup_key.html │ ├── command_wait.html │ ├── login.html │ ├── list_apps.html │ ├── base.html │ └── app_info.html ├── models.py ├── urls.py └── views.py ├── .tool-versions ├── .dockerignore ├── .github ├── FUNDING.yml ├── renovate.json5 └── workflows │ └── ci.yml ├── Procfile ├── tests ├── conftest.py ├── settings.py ├── test_tasks.py ├── recording_cache.py ├── test_auth.py └── test_views.py ├── screenshots ├── app_index.png └── apps_list.png ├── .pyup.yml ├── dokku-boot.sh ├── uv.lock ├── Dockerfile ├── .gitignore ├── wharf ├── __init__.py ├── context_processors.py ├── wsgi.py ├── urls.py ├── celery.py ├── auth.py ├── settings.py └── tasks.py ├── requirements.in ├── Makefile ├── pyproject.toml ├── manage.py ├── Vagrantfile ├── .pre-commit-config.yaml ├── docker-compose.yml ├── test.sh ├── README.md ├── requirements.txt ├── wait-for-it.sh ├── check_boot.py └── LICENSE /apps/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /.tool-versions: -------------------------------------------------------------------------------- 1 | uv 0.6.14 2 | -------------------------------------------------------------------------------- /apps/management/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /apps/migrations/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /.dockerignore: -------------------------------------------------------------------------------- 1 | repos/* 2 | .venv/* 3 | -------------------------------------------------------------------------------- /apps/management/commands/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /.github/FUNDING.yml: -------------------------------------------------------------------------------- 1 | --- 2 | github: palfrey 3 | ko_fi: palfrey 4 | -------------------------------------------------------------------------------- /Procfile: -------------------------------------------------------------------------------- 1 | web: bash dokku-boot.sh 2 | celery: python manage.py celery 3 | -------------------------------------------------------------------------------- /tests/conftest.py: -------------------------------------------------------------------------------- 1 | from tests.recording_cache import recording_cache # noqa: F401 2 | -------------------------------------------------------------------------------- /apps/static/favicon.ico: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/palfrey/wharf/HEAD/apps/static/favicon.ico -------------------------------------------------------------------------------- /screenshots/app_index.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/palfrey/wharf/HEAD/screenshots/app_index.png -------------------------------------------------------------------------------- /screenshots/apps_list.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/palfrey/wharf/HEAD/screenshots/apps_list.png -------------------------------------------------------------------------------- /.pyup.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # see https://pyup.io/docs/configuration/ for all available options 3 | 4 | update: insecure 5 | -------------------------------------------------------------------------------- /apps/apps.py: -------------------------------------------------------------------------------- 1 | from django.apps import AppConfig 2 | 3 | 4 | class AppsConfig(AppConfig): 5 | name = "apps" 6 | -------------------------------------------------------------------------------- /dokku-boot.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -eux -o pipefail 3 | 4 | python manage.py migrate 5 | python manage.py runserver 0.0.0.0:${PORT:-5000} 6 | -------------------------------------------------------------------------------- /uv.lock: -------------------------------------------------------------------------------- 1 | version = 1 2 | revision = 1 3 | requires-python = ">=3.12" 4 | 5 | [[package]] 6 | name = "wharf" 7 | version = "0.1.0" 8 | source = { virtual = "." } 9 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM python:3.12.6 2 | WORKDIR /app 3 | RUN apt-get update && apt-get install -y iproute2 4 | COPY requirements.txt /app 5 | RUN pip install -r requirements.txt 6 | COPY . /app 7 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .vscode/ 2 | ENV/ 3 | db.sqlite3 4 | celerybeat-schedule 5 | repos/ 6 | /static/ 7 | .vagrant/ 8 | __pycache__/ 9 | *.log 10 | screenshot.png 11 | keys 12 | *key 13 | src/ 14 | -------------------------------------------------------------------------------- /wharf/__init__.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import, unicode_literals 2 | 3 | # This will make sure the app is always imported when 4 | # Django starts so that shared_task will use this app. 5 | from .celery import app as celery_app 6 | 7 | __all__ = ["celery_app"] 8 | -------------------------------------------------------------------------------- /wharf/context_processors.py: -------------------------------------------------------------------------------- 1 | from typing import Any 2 | 3 | from django.conf import settings 4 | 5 | 6 | def helpers(request: Any): 7 | return { 8 | "HAS_LOGIN_SET": settings.ADMIN_LOGIN != "admin" 9 | or settings.ADMIN_PASSWORD != "password" 10 | } 11 | -------------------------------------------------------------------------------- /tests/settings.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | os.environ["REDIS_URL"] = "redis://redis:6379/1" 4 | 5 | from wharf.settings import * # noqa: F403 6 | 7 | CACHES = { 8 | # "default": { 9 | # "BACKEND": "tests.recording_cache.RecordingCache", 10 | # "LOCATION": "unique-snowflake", 11 | # } 12 | } 13 | -------------------------------------------------------------------------------- /requirements.in: -------------------------------------------------------------------------------- 1 | django 2 | django-jinja-bootstrap-form 3 | dj-database-url 4 | requests 5 | jinja2 6 | psycopg2-binary 7 | celery[redis] >= 5 8 | django-redis 9 | django-celery-results 10 | paramiko 11 | gitpython 12 | humanize 13 | timeout-decorator 14 | selenium 15 | packaging 16 | 17 | django-jinja 18 | 19 | pre-commit 20 | pytest 21 | pytest-django 22 | pytest-watcher 23 | model-bakery 24 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | .venv/bin/python: 2 | uv venv 3 | 4 | sync: .venv/bin/python requirements.txt 5 | uv pip sync --strict requirements.txt 6 | 7 | requirements.txt: requirements.in .venv/bin/python 8 | uv pip compile requirements.in -o requirements.txt --python-version 3.12 --no-strip-extras 9 | 10 | watch-test: sync 11 | .venv/bin/ptw --now --runner .venv/bin/pytest . -vvv 12 | 13 | pre-commit: sync 14 | .venv/bin/pre-commit run -a 15 | -------------------------------------------------------------------------------- /wharf/wsgi.py: -------------------------------------------------------------------------------- 1 | """ 2 | WSGI config for wharf project. 3 | 4 | It exposes the WSGI callable as a module-level variable named ``application``. 5 | 6 | For more information on this file, see 7 | https://docs.djangoproject.com/en/2.0/howto/deployment/wsgi/ 8 | """ 9 | 10 | import os 11 | 12 | from django.core.wsgi import get_wsgi_application 13 | 14 | os.environ.setdefault("DJANGO_SETTINGS_MODULE", "wharf.settings") 15 | 16 | application = get_wsgi_application() 17 | -------------------------------------------------------------------------------- /apps/migrations/0004_alter_app_name.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 5.2.1 on 2025-05-15 19:21 2 | 3 | from django.db import migrations, models 4 | 5 | 6 | class Migration(migrations.Migration): 7 | dependencies = [ 8 | ("apps", "0003_alter_tasklog_success"), 9 | ] 10 | 11 | operations = [ 12 | migrations.AlterField( 13 | model_name="app", 14 | name="name", 15 | field=models.CharField(max_length=256, unique=True), 16 | ), 17 | ] 18 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [project] 2 | description = "Opinionated web frontend for Dokku" 3 | name = "wharf" 4 | requires-python = ">=3.12" 5 | version = "0.1.0" 6 | 7 | [tool.djlint] 8 | ignore = "H030,H031" # meta tags ignores 9 | 10 | [tool.pytest.ini_options] 11 | DJANGO_SETTINGS_MODULE = "tests.settings" 12 | filterwarnings = [ 13 | "ignore::django.core.cache.backends.base.CacheKeyWarning" 14 | ] 15 | 16 | [tool.ruff.lint] 17 | ignore = ["DJ008"] 18 | select = ["E4", "E7", "E9", "F", "I", "DJ"] 19 | -------------------------------------------------------------------------------- /apps/migrations/0003_alter_tasklog_success.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 5.2 on 2025-04-21 18:45 2 | 3 | from django.db import migrations, models 4 | 5 | 6 | class Migration(migrations.Migration): 7 | dependencies = [ 8 | ("apps", "0002_app_github_url"), 9 | ] 10 | 11 | operations = [ 12 | migrations.AlterField( 13 | model_name="tasklog", 14 | name="success", 15 | field=models.BooleanField(blank=True, null=True), 16 | ), 17 | ] 18 | -------------------------------------------------------------------------------- /apps/migrations/0002_app_github_url.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 2.0.2 on 2018-02-26 23:57 2 | 3 | from django.db import migrations, models 4 | 5 | 6 | class Migration(migrations.Migration): 7 | dependencies = [ 8 | ("apps", "0001_initial"), 9 | ] 10 | 11 | operations = [ 12 | migrations.AddField( 13 | model_name="app", 14 | name="github_url", 15 | field=models.URLField(default=""), 16 | preserve_default=False, 17 | ), 18 | ] 19 | -------------------------------------------------------------------------------- /apps/forms.py: -------------------------------------------------------------------------------- 1 | from django import forms 2 | 3 | 4 | class ConfigForm(forms.Form): 5 | key = forms.CharField(label="key", max_length=100) 6 | value = forms.CharField(label="value", max_length=300) 7 | 8 | 9 | class CreateAppForm(forms.Form): 10 | name = forms.CharField(label="App name", max_length=100) 11 | 12 | 13 | class CreateDomainForm(forms.Form): 14 | name = forms.CharField(label="Domain name", max_length=100) 15 | 16 | 17 | class SetupLetsEncrypt(forms.Form): 18 | email = forms.EmailField(label="Email", max_length=100) 19 | -------------------------------------------------------------------------------- /wharf/urls.py: -------------------------------------------------------------------------------- 1 | from django.conf import settings 2 | from django.conf.urls.static import static 3 | from django.contrib import admin 4 | from django.contrib.auth import views as auth_views 5 | from django.urls import include, path 6 | 7 | urlpatterns = [ 8 | path("", include("apps.urls")), 9 | path( 10 | "accounts/login/", 11 | auth_views.LoginView.as_view(template_name="login.html"), 12 | name="login", 13 | ), 14 | path("admin/", admin.site.urls), 15 | ] + static(settings.STATIC_URL, document_root=settings.STATIC_ROOT) 16 | -------------------------------------------------------------------------------- /manage.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | import os 3 | import sys 4 | 5 | if __name__ == "__main__": 6 | os.environ.setdefault("DJANGO_SETTINGS_MODULE", "wharf.settings") 7 | try: 8 | from django.core.management import execute_from_command_line 9 | except ImportError as exc: 10 | raise ImportError( 11 | "Couldn't import Django. Are you sure it's installed and " 12 | "available on your PYTHONPATH environment variable? Did you " 13 | "forget to activate a virtual environment?" 14 | ) from exc 15 | execute_from_command_line(sys.argv) 16 | -------------------------------------------------------------------------------- /apps/templates/setup_key.html: -------------------------------------------------------------------------------- 1 | {% extends "base.html" %} 2 | {% block body %} 3 |
ssh-keys:add command from the Dokku instructions for setting up SSH keys
6 |
9 | {{ key }}
10 |
11 |
18 | {{ log }}
19 |
20 | {% if running %}
21 |
26 | {% endif %}
27 | {% if app == "_" %}
28 | Return to apps index
29 | {% else %}
30 | Return to {{ app }} info
31 | {% endif %}
32 | {% endblock body %}
33 |
--------------------------------------------------------------------------------
/apps/templates/login.html:
--------------------------------------------------------------------------------
1 | {% extends "base.html" %}
2 | {% block body %}
3 | Your username and password didn't match. Please try again.
{% endif %} 9 | 24 | {% endblock body %} 25 | -------------------------------------------------------------------------------- /.github/renovate.json5: -------------------------------------------------------------------------------- 1 | { 2 | $schema: "https://docs.renovatebot.com/renovate-schema.json", 3 | extends: [ 4 | ":ignoreModulesAndTests", 5 | "group:monorepos", 6 | "group:recommended", 7 | "replacements:all", 8 | "workarounds:all", 9 | ], 10 | packageRules: [ 11 | { 12 | // Don't upgrade docker stuff, because it's things like docker-compose 13 | // that have to match the deployed ones 14 | matchDatasources: ["docker"], 15 | enabled: false, 16 | }, 17 | { 18 | // Don't really need to upgrade Python 19 | matchManagers: ["pyenv"], 20 | enabled: false, 21 | }, 22 | { 23 | // Issues with Kombu and Redis 6 24 | matchDepNames: ["redis"], 25 | matchUpdateTypes: ["major"], 26 | enabled: false, 27 | }, 28 | { 29 | matchUpdateTypes: ["patch", "minor"], 30 | enabled: false 31 | }, 32 | { 33 | matchPackageNames: ["*"], 34 | automerge: true 35 | }, 36 | ], 37 | "vulnerabilityAlerts": { 38 | "enabled": true 39 | }, 40 | "osvVulnerabilityAlerts": true 41 | } 42 | -------------------------------------------------------------------------------- /Vagrantfile: -------------------------------------------------------------------------------- 1 | Vagrant.configure("2") do |config| 2 | config.vm.box = "bento/ubuntu-24.04" 3 | config.vagrant.plugins = "vagrant-libvirt" 4 | 5 | config.vm.box_check_update = false 6 | config.vm.synced_folder ".", "/vagrant" 7 | 8 | config.vm.network "forwarded_port", guest: 80, host: 5000 9 | 10 | config.vm.provider :libvirt do |libvirt| 11 | libvirt.memory = "1024" 12 | libvirt.machine_type = 'pc-q35-3.1' 13 | end 14 | 15 | config.vm.provision "shell", privileged: false, inline: <<-SHELL 16 | set -eux -o pipefail 17 | sudo apt-get update 18 | sudo apt-get install --no-install-recommends -y build-essential python3 python3-pip git apt-transport-https curl redis-server firefox python3-setuptools python3-wheel python3-dev libssl-dev xdg-utils hostsed 19 | curl -fsSL https://download.docker.com/linux/ubuntu/gpg | sudo apt-key add - 20 | echo "deb [arch=amd64] https://download.docker.com/linux/ubuntu noble stable" | sudo tee /etc/apt/sources.list.d/docker.list 21 | cd /vagrant 22 | pip3 install --break-system-packages -r requirements.txt 23 | ./test.sh 24 | SHELL 25 | end 26 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | repos: 3 | - repo: https://github.com/pre-commit/pre-commit-hooks 4 | rev: v6.0.0 5 | hooks: 6 | - id: trailing-whitespace 7 | exclude_types: [yaml, diff] 8 | - id: end-of-file-fixer 9 | exclude_types: [diff] 10 | - id: check-yaml 11 | - id: check-added-large-files 12 | 13 | - repo: https://github.com/jumanjihouse/pre-commit-hook-yamlfmt 14 | rev: 0.2.3 15 | hooks: 16 | - id: yamlfmt 17 | args: [--mapping, '2', --offset, '0', --sequence, '2'] 18 | exclude: pnpm-lock.yaml 19 | 20 | - repo: https://github.com/macisamuele/language-formatters-pre-commit-hooks 21 | rev: v2.14.0 22 | hooks: 23 | - id: pretty-format-toml 24 | args: [--autofix] 25 | 26 | - repo: https://github.com/astral-sh/ruff-pre-commit 27 | rev: v0.14.5 28 | hooks: 29 | - id: ruff-check 30 | args: [--fix] 31 | - id: ruff-format 32 | 33 | - repo: local 34 | hooks: 35 | - id: uv 36 | name: uv 37 | language: system 38 | entry: uv lock --check 39 | pass_filenames: false 40 | 41 | - repo: https://github.com/djlint/djLint 42 | rev: v1.36.4 43 | hooks: 44 | - id: djlint-reformat-django 45 | - id: djlint-django 46 | -------------------------------------------------------------------------------- /apps/migrations/0001_initial.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 2.0.2 on 2018-02-26 23:05 2 | 3 | import uuid 4 | 5 | import django.db.models.deletion 6 | from django.db import migrations, models 7 | 8 | 9 | class Migration(migrations.Migration): 10 | initial = True 11 | 12 | dependencies = [] 13 | 14 | operations = [ 15 | migrations.CreateModel( 16 | name="App", 17 | fields=[ 18 | ( 19 | "id", 20 | models.UUIDField( 21 | default=uuid.uuid4, primary_key=True, serialize=False 22 | ), 23 | ), 24 | ("name", models.CharField(max_length=256)), 25 | ], 26 | ), 27 | migrations.CreateModel( 28 | name="TaskLog", 29 | fields=[ 30 | ( 31 | "task_id", 32 | models.CharField(max_length=256, primary_key=True, serialize=False), 33 | ), 34 | ("when", models.DateTimeField()), 35 | ("success", models.BooleanField(null=True)), 36 | ("description", models.CharField(max_length=256)), 37 | ( 38 | "app", 39 | models.ForeignKey( 40 | on_delete=django.db.models.deletion.CASCADE, to="apps.App" 41 | ), 42 | ), 43 | ], 44 | ), 45 | ] 46 | -------------------------------------------------------------------------------- /docker-compose.yml: -------------------------------------------------------------------------------- 1 | --- 2 | services: 3 | web: 4 | build: . 5 | command: ./wait-for-it.sh localhost:5432 --strict --timeout=0 -- ./wait-for-it.sh localhost:6379 --strict --timeout=0 -- bash -c "PORT=8000 bash dokku-boot.sh" 6 | volumes: 7 | - .:/app 8 | environment: 9 | - DATABASE_URL=postgres://postgres:example@localhost:5432/wharf 10 | - BROKER_URL=redis://localhost:6379/0 11 | - CACHE_URL=redis://localhost:6379/1 12 | - DOKKU_SSH_HOST=${DOKKU_SSH_HOST:-127.0.0.1} 13 | - DOKKU_SSH_PORT=${DOKKU_SSH_PORT:-22} 14 | - GITHUB_SECRET=${GITHUB_SECRET:-password} 15 | - ADMIN_PASSWORD=${ADMIN_PASSWORD:-password} 16 | depends_on: 17 | - postgres 18 | - redis 19 | network_mode: host 20 | 21 | celery: 22 | build: . 23 | command: ./wait-for-it.sh localhost:5432 --strict --timeout=0 -- ./wait-for-it.sh localhost:6379 --strict --timeout=0 -- bash -c "python manage.py celery" 24 | volumes: 25 | - .:/app 26 | environment: 27 | - DATABASE_URL=postgres://postgres:example@localhost:5432/wharf 28 | - BROKER_URL=redis://localhost:6379/0 29 | - CACHE_URL=redis://localhost:6379/1 30 | - DOKKU_SSH_HOST=${DOKKU_SSH_HOST:-127.0.0.1} 31 | - DOKKU_SSH_PORT=${DOKKU_SSH_PORT:-22} 32 | depends_on: 33 | - postgres 34 | - redis 35 | network_mode: host 36 | 37 | postgres: 38 | image: postgres:14-alpine 39 | environment: 40 | POSTGRES_DB: wharf 41 | POSTGRES_PASSWORD: example 42 | network_mode: host 43 | 44 | redis: 45 | image: redis:4-alpine 46 | network_mode: host 47 | -------------------------------------------------------------------------------- /apps/templates/list_apps.html: -------------------------------------------------------------------------------- 1 | {% extends "base.html" %} 2 | {% block body %} 3 |
10 |
11 | Setup
12 | -----
13 | 1. [Install Dokku](https://dokku.com/docs/getting-started/installation)
14 | 2. Install the following plugins:
15 | * https://github.com/dokku/dokku-redis
16 | * https://github.com/dokku/dokku-postgres
17 | * https://github.com/dokku/dokku-letsencrypt
18 | 3. Setup the Let's Encrypt plugin to auto-renew (`dokku letsencrypt:cron-job --add`)
19 | 4. Create the app (`dokku apps:create wharf`)
20 | 5. Add SSH key storage:
21 | 1. `mkdir /var/lib/dokku/data/storage/wharf-ssh/`
22 | 2. `chown dokku:dokku /var/lib/dokku/data/storage/wharf-ssh/`
23 | 3. `dokku storage:mount wharf /var/lib/dokku/data/storage/wharf-ssh/:/root/.ssh`
24 | 6. Optionally, add dokku-daemon. We still need SSH keys for pushing to Dokku, but this should speed up other Wharf commands:
25 | 1. Install as per instructions at https://github.com/palfrey/dokku-daemon-rs (note the original version should in theory work, but [it's JSON support is buggy](https://github.com/dokku/dokku-daemon/issues/31))
26 | 2. `dokku storage:mount wharf /var/run/dokku-daemon/dokku-daemon.sock:/var/run/dokku-daemon/dokku-daemon.sock`
27 | 7. Add Redis (`dokku redis:create wharf && dokku redis:link wharf wharf`)
28 | 8. Add Postgres (`dokku postgres:create wharf && dokku postgres:link wharf wharf`)
29 | 9. Set `ADMIN_PASSWORD` to something secret (`dokku config:set wharf ADMIN_PASSWORD=somesecret`)
30 | 10. Deploy this Git repo [as per the standard Dokku instructions](https://dokku.com/docs/deployment/application-deployment/)
31 | 11. `dokku ps:scale wharf celery=1`
32 |
33 | Helpful hints
34 | -------------
35 | * If you're running SSH on a non-standard port, set `DOKKU_SSH_PORT` e.g. `dokku config:set wharf DOKKU_SSH_PORT=2222`
36 | * If Dokku is running somewhere else than the local machine, set `DOKKU_SSH_HOST` e.g. `dokku config:set wharf DOKKU_SSH_HOST=foo.example.com`
37 | * If there's a Dockerfile in your repository, it'll [try and deploy using that by default](https://dokku.com/docs/deployment/methods/dockerfiles/). Set BUILDPACK_URL to override
38 | * BUILDPACK_URL should be an HTTPS one, not a SSH or heroku/something one
39 | * You should setup the global domain name when creating Dokku to start with and add a *.<your dokku domain> entry to give new apps more usable names.
40 | * Set `GIT_BRANCH` in the variables to deploy from non-`master`
41 |
42 | Enabling Github auto-deploy webhooks
43 | ------------------------------------
44 | 1. Set `GITHUB_SECRET` config item to something secret
45 | 2. Goto [settings/webhooks](https://developer.github.com/webhooks/creating/#setting-up-a-webhook) in Github
46 | 3. Make a new webhook for <your Wharf instance>/webhook with Content type as `application/json` and Secret to the secret from `GITHUB_SECRET`
47 |
48 | Development
49 | -----------
50 | Easiest way to do dev is:
51 |
52 | 1. `vagrant up` which will boot the entire Dokku setup in a VM
53 | 2. `DOKKU_SSH_HOST=host.docker.internal DOKKU_SSH_PORT=2222 docker-compose up`
54 | * `host.docker.internal` works on Mac/Windows, but not on Linux (see https://github.com/docker/for-linux/issues/264). On Linux hosts, you should set `DOKKU_SSH_HOST` to whatever your IP is (not localhost, but a local IP is fine)
55 | 3. Load up `http://localhost:8000/`
56 |
--------------------------------------------------------------------------------
/apps/urls.py:
--------------------------------------------------------------------------------
1 | from django.contrib.staticfiles.storage import staticfiles_storage
2 | from django.urls import path
3 | from django.views.generic.base import RedirectView
4 |
5 | from . import views
6 |
7 | urlpatterns = [
8 | path("", views.index, name="index"),
9 | path("status", views.status, name="status"),
10 | path("refresh", views.refresh_all, name="refresh_all"),
11 | path("create_app", views.create_app, name="create_app"),
12 | path("global_config_set", views.global_config_set, name="global_config_set"),
13 | path(
14 | "global_config_check/
161 | {{ logs }}
162 |
163 | {% endblock body %}
164 |
--------------------------------------------------------------------------------
/wharf/settings.py:
--------------------------------------------------------------------------------
1 | """
2 | Django settings for wharf project.
3 |
4 | Generated by 'django-admin startproject' using Django 2.0.2.
5 |
6 | For more information on this file, see
7 | https://docs.djangoproject.com/en/2.0/topics/settings/
8 |
9 | For the full list of settings and their values, see
10 | https://docs.djangoproject.com/en/2.0/ref/settings/
11 | """
12 |
13 | import os
14 | import re
15 | import subprocess
16 |
17 | import dj_database_url
18 |
19 | # Build paths inside the project like this: os.path.join(BASE_DIR, ...)
20 | BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
21 |
22 |
23 | # Quick-start development settings - unsuitable for production
24 | # See https://docs.djangoproject.com/en/2.0/howto/deployment/checklist/
25 |
26 | SECRET_KEY = os.environ.get(
27 | "SECRET_KEY", ")u-_udqved=rq9p3fc-6mv6xh7y%slo-5d=h1590(k19e+srxt"
28 | )
29 |
30 | DEBUG = True
31 |
32 | ALLOWED_HOSTS = ["*"]
33 | if "CSRF_TRUSTED_ORIGIN" in os.environ:
34 | CSRF_TRUSTED_ORIGINS = [os.environ["CSRF_TRUSTED_ORIGIN"]]
35 |
36 |
37 | # Application definition
38 |
39 | INSTALLED_APPS = [
40 | "django.contrib.admin",
41 | "django.contrib.auth",
42 | "django.contrib.contenttypes",
43 | "django.contrib.sessions",
44 | "django.contrib.messages",
45 | "django.contrib.staticfiles",
46 | "django_jinja",
47 | "bootstrapform_jinja",
48 | "django_celery_results",
49 | "apps",
50 | ]
51 |
52 | MIDDLEWARE = [
53 | "django.middleware.security.SecurityMiddleware",
54 | "django.contrib.sessions.middleware.SessionMiddleware",
55 | "django.middleware.common.CommonMiddleware",
56 | "django.middleware.csrf.CsrfViewMiddleware",
57 | "django.contrib.auth.middleware.AuthenticationMiddleware",
58 | "django.contrib.messages.middleware.MessageMiddleware",
59 | "django.middleware.clickjacking.XFrameOptionsMiddleware",
60 | "wharf.auth.LoginRequiredMiddleware",
61 | ]
62 |
63 | AUTHENTICATION_BACKENDS = [
64 | "wharf.auth.SettingsBackend",
65 | "django.contrib.auth.backends.ModelBackend",
66 | ]
67 |
68 | LOGIN_REDIRECT_URL = "/"
69 | LOGIN_EXEMPT_URLS = ["webhook", "favicon.ico", "status"]
70 |
71 | ROOT_URLCONF = "wharf.urls"
72 |
73 | TEMPLATES = [
74 | {
75 | "BACKEND": "django_jinja.jinja2.Jinja2",
76 | "DIRS": [],
77 | "APP_DIRS": True,
78 | "OPTIONS": {
79 | "context_processors": [
80 | "django.template.context_processors.debug",
81 | "django.template.context_processors.request",
82 | "django.contrib.auth.context_processors.auth",
83 | "django.contrib.messages.context_processors.messages",
84 | "wharf.context_processors.helpers",
85 | ],
86 | "match_extension": None,
87 | "app_dirname": "templates",
88 | },
89 | },
90 | {
91 | "BACKEND": "django.template.backends.django.DjangoTemplates",
92 | "DIRS": [],
93 | "OPTIONS": {
94 | "context_processors": [
95 | "django.contrib.auth.context_processors.auth",
96 | "django.contrib.messages.context_processors.messages",
97 | ],
98 | },
99 | },
100 | ]
101 |
102 | if "CACHE_URL" in os.environ:
103 | cache_url = os.environ["CACHE_URL"]
104 | elif "REDIS_URL" in os.environ:
105 | cache_url = "%s/1" % os.environ["REDIS_URL"]
106 | else:
107 | raise Exception("Neither CACHE_URL nor REDIS_URL set in environment")
108 |
109 | CACHES = {
110 | "default": {
111 | "BACKEND": "django_redis.cache.RedisCache",
112 | "LOCATION": cache_url,
113 | "OPTIONS": {
114 | "CLIENT_CLASS": "django_redis.client.DefaultClient",
115 | },
116 | "TIMEOUT": 60 * 60 * 24, # 1 day
117 | }
118 | }
119 |
120 | WSGI_APPLICATION = "wharf.wsgi.application"
121 |
122 | # Database
123 | # https://docs.djangoproject.com/en/2.0/ref/settings/#databases
124 |
125 | DATABASES = {
126 | "default": dj_database_url.config(
127 | default="sqlite:///" + os.path.join(BASE_DIR, "db.sqlite3")
128 | )
129 | }
130 |
131 | # Password validation
132 | # https://docs.djangoproject.com/en/2.0/ref/settings/#auth-password-validators
133 |
134 | AUTH_PASSWORD_VALIDATORS = [
135 | {
136 | "NAME": "django.contrib.auth.password_validation.UserAttributeSimilarityValidator",
137 | },
138 | {
139 | "NAME": "django.contrib.auth.password_validation.MinimumLengthValidator",
140 | },
141 | {
142 | "NAME": "django.contrib.auth.password_validation.CommonPasswordValidator",
143 | },
144 | {
145 | "NAME": "django.contrib.auth.password_validation.NumericPasswordValidator",
146 | },
147 | ]
148 |
149 |
150 | # Internationalization
151 | # https://docs.djangoproject.com/en/2.0/topics/i18n/
152 |
153 | LANGUAGE_CODE = "en-us"
154 |
155 | TIME_ZONE = "UTC"
156 |
157 | USE_I18N = True
158 |
159 | USE_L10N = True
160 |
161 | USE_TZ = True
162 |
163 |
164 | # Static files (CSS, JavaScript, Images)
165 | # https://docs.djangoproject.com/en/2.0/howto/static-files/
166 |
167 | STATIC_URL = "/static/"
168 | STATIC_ROOT = os.path.join(BASE_DIR, "static")
169 |
170 | # Wharf settings
171 |
172 | DOKKU_HOST = os.environ.get("DOKKU_SSH_HOST", None)
173 | if DOKKU_HOST is None: # default, so need to detect host
174 | ip_paths = ["/sbin/ip", "/usr/sbin/ip"]
175 | ip_path: str | None = None
176 | for possible_path in ip_paths:
177 | if os.path.exists(possible_path):
178 | ip_path = possible_path
179 | break
180 | else:
181 | raise Exception(ip_paths)
182 | route = subprocess.check_output([ip_path, "route"], encoding="utf-8")
183 | ip = re.match(r"default via (\d+\.\d+\.\d+.\d+)", route)
184 | assert ip is not None
185 | DOKKU_HOST = ip.groups()[0]
186 |
187 | DOKKU_SSH_PORT = int(os.environ.get("DOKKU_SSH_PORT", "22"))
188 | GITHUB_SECRET = os.environ.get("GITHUB_SECRET", "password")
189 | ADMIN_LOGIN = os.environ.get("ADMIN_LOGIN", "admin")
190 | ADMIN_PASSWORD = os.environ.get("ADMIN_PASSWORD", "password")
191 |
192 | # Celery settings
193 |
194 | if "BROKER_URL" in os.environ:
195 | broker_url = os.environ["BROKER_URL"]
196 | elif "REDIS_URL" in os.environ:
197 | broker_url = "%s/0" % os.environ["REDIS_URL"]
198 | else:
199 | raise Exception("Neither BROKER_URL nor REDIS_URL set in environment")
200 |
201 | CELERY_RESULT_BACKEND = "django-cache"
202 | CELERY_BROKER_URL = broker_url
203 | CELERY_TASK_TRACK_STARTED = True
204 | CELERY_TASK_SERIALISER = "pickle" # To fix exception serialisation. See https://github.com/celery/celery/pull/3592
205 |
206 | LOGGING = {
207 | "version": 1,
208 | "disable_existing_loggers": False,
209 | "handlers": {
210 | "console": {
211 | "class": "logging.StreamHandler",
212 | },
213 | },
214 | "loggers": {
215 | "django": {
216 | "handlers": ["console"],
217 | "level": os.getenv("DJANGO_LOG_LEVEL", "INFO"),
218 | },
219 | },
220 | }
221 |
--------------------------------------------------------------------------------
/check_boot.py:
--------------------------------------------------------------------------------
1 | import os
2 | import subprocess
3 | import sys
4 | import time
5 | import uuid
6 | from pathlib import Path
7 | from subprocess import check_call, check_output
8 | from typing import Callable, Literal
9 |
10 | from selenium import webdriver
11 | from selenium.common.exceptions import TimeoutException
12 | from selenium.webdriver.common.by import By
13 | from selenium.webdriver.firefox.service import Service
14 | from selenium.webdriver.remote.webdriver import WebDriver
15 | from selenium.webdriver.remote.webelement import WebElement
16 | from selenium.webdriver.support.ui import WebDriverWait
17 |
18 |
19 | class Tester:
20 | def __init__(self):
21 | os.environ["MOZ_REMOTE_SETTINGS_DEVTOOLS"] = "1"
22 | firefox_options = webdriver.FirefoxOptions()
23 | firefox_options.add_argument("-headless")
24 | firefox_options.accept_insecure_certs = True
25 | geckodriver_path = Path("/snap/bin/geckodriver")
26 | assert geckodriver_path.exists(), geckodriver_path
27 | self.driver = webdriver.Firefox(
28 | options=firefox_options,
29 | service=Service(
30 | executable_path=geckodriver_path.as_posix(),
31 | log_output=subprocess.STDOUT,
32 | ),
33 | )
34 | self.driver.implicitly_wait(0)
35 | self.start = time.time()
36 |
37 | def log(self, message):
38 | print("%f: %s" % (time.time() - self.start, message))
39 |
40 | def find_one(self, elements: list[WebElement]):
41 | if len(elements) == 1:
42 | return elements[0]
43 | elif len(elements) == 0:
44 | return None
45 | else:
46 | raise Exception(elements)
47 |
48 | def find_element(self, strat: str, id: str | None, allow_none: bool = False):
49 | self.log("Looking for %s: '%s'" % (strat, id))
50 | ret = self.find_one(self.driver.find_elements(strat, id))
51 | if ret is None and not allow_none:
52 | self.failure()
53 | raise Exception("No such element with %s and %s" % (strat, id))
54 | return ret
55 |
56 | def wait_for_one(self, locators):
57 | for locator in locators:
58 | element = self.find_element(*locator, allow_none=True)
59 | if element is not None:
60 | return element
61 | return False
62 |
63 | def url(self) -> str:
64 | return self.driver.current_url
65 |
66 | def failure(self):
67 | self.driver.get_screenshot_as_file("screenshot.png")
68 | print(self.url())
69 | print(self.page_source())
70 | os.system("sudo docker logs wharf.web.1")
71 | os.system("sudo docker logs wharf.celery.1")
72 | os.system("dokku nginx:show-config wharf")
73 |
74 | def get(self, url):
75 | self.log("Went to %s" % url)
76 | return self.driver.get(url)
77 |
78 | def send_keys(self, strat, id, text):
79 | self.log("Send keys '%s' to %s: '%s'" % (text, strat, id))
80 | return self.find_element(strat, id).send_keys(text)
81 |
82 | def click(self, strat, id):
83 | self.log("Click on %s: '%s'" % (strat, id))
84 | return self.find_element(strat, id).click()
85 |
86 | def wait_for_lambda(
87 | self,
88 | func: Callable[[WebDriver], Literal[False] | WebElement],
89 | timeout: int = 10,
90 | ) -> WebElement:
91 | try:
92 | return WebDriverWait(self.driver, timeout).until(func)
93 | except TimeoutException:
94 | self.failure()
95 | raise
96 |
97 | def wait_for_list(self, items, timeout: int = 10):
98 | return self.wait_for_lambda(lambda driver: self.wait_for_one(items), timeout)
99 |
100 | def get_main_id(self):
101 | res = self.wait_for_list(
102 | [(By.ID, "initial-setup-header"), (By.ID, "list_apps")]
103 | )
104 | return res.get_attribute("id")
105 |
106 | def page_source(self):
107 | return self.driver.page_source
108 |
109 |
110 | tester = Tester()
111 | try:
112 | tester.get(sys.argv[1])
113 | tester.send_keys(By.NAME, "username", "admin")
114 | tester.send_keys(By.NAME, "password", "password")
115 | tester.click(By.NAME, "submit")
116 | id = tester.get_main_id()
117 | if id == "list_apps":
118 | tester.log("Checking SSH status")
119 | tester.click(By.ID, "refresh_info")
120 | id = tester.get_main_id() # because keys might not work any more
121 | if id == "initial-setup-header":
122 | tester.log("Adding new keys")
123 | keys = check_output("sudo dokku ssh-keys:list".split(" ")).decode("utf-8")
124 | if "check_boot" in keys:
125 | check_call("sudo dokku ssh-keys:remove check_boot".split(" "))
126 | element = tester.find_element(By.ID, "ssh-key")
127 | assert element is not None
128 | cmd = "echo " + element.text + " | sudo dokku ssh-keys:add check_boot"
129 | tester.log(cmd)
130 | ret = os.system(cmd)
131 | assert ret == 0
132 | tester.get(sys.argv[1])
133 | elif id == "list_apps":
134 | pass
135 | else:
136 | raise Exception(id)
137 | app_name = uuid.uuid4().hex
138 | tester.log("Making new app %s" % app_name)
139 | tester.send_keys(By.ID, "id_name", app_name)
140 | tester.click(By.ID, "create_app")
141 | tester.wait_for_list([(By.ID, "app_page")])
142 | assert tester.page_source().find(app_name) != -1
143 |
144 | tester.get(sys.argv[1])
145 | tester.click(By.XPATH, f'//a[text()="{app_name}"]')
146 | tester.wait_for_list([(By.ID, "app_page")])
147 | assert tester.page_source().find(f"Wharf: {app_name}") != -1
148 |
149 | github_text = "Can't deploy due to missing GITHUB_URL"
150 | if tester.page_source().find(github_text) != -1:
151 | tester.send_keys(By.ID, "id_key", "GITHUB_URL")
152 | tester.send_keys(
153 | By.ID, "id_value", "https://github.com/palfrey/python-getting-started.git"
154 | )
155 | tester.click(By.ID, "config_add")
156 |
157 | def wait_for_no_github_text(driver: WebDriver) -> WebElement | Literal[False]:
158 | if tester.page_source().find(github_text) != -1:
159 | return False
160 | else:
161 | return tester.wait_for_list([(By.ID, "app_page")], timeout=900)
162 |
163 | tester.wait_for_lambda(wait_for_no_github_text, timeout=900)
164 | if tester.page_source().find("github_text") != -1:
165 | tester.failure()
166 | raise Exception
167 |
168 | tester.click(By.ID, "deploy_app")
169 | for x in range(30):
170 | try:
171 | tester.log("Attempt %d %s" % (x, tester.url()))
172 | if tester.url().startswith("https:"):
173 | tester.log("going to http page")
174 | tester.get(tester.url().replace("https", "http"))
175 | tester.wait_for_list([(By.ID, "app_page")], timeout=30)
176 | break
177 | except TimeoutException:
178 | continue
179 | if tester.page_source().find(f"Wharf: {app_name}") == -1:
180 | tester.failure()
181 | raise Exception
182 |
183 | finally:
184 | tester.driver.quit()
185 |
--------------------------------------------------------------------------------
/wharf/tasks.py:
--------------------------------------------------------------------------------
1 | import json
2 | import os.path
3 | import socket
4 | import subprocess
5 | import time
6 | from datetime import UTC, datetime
7 | from fcntl import F_GETFL, F_SETFL, fcntl
8 | from os import O_NONBLOCK, read
9 | from pathlib import Path
10 | from typing import cast
11 |
12 | from celery import Task
13 | from django.conf import settings
14 | from git import Repo
15 | from paramiko import RSAKey
16 | from paramiko.client import AutoAddPolicy, SSHClient
17 | from redis import StrictRedis
18 |
19 | import apps.models as models
20 |
21 | from .celery import app
22 |
23 | redis = StrictRedis.from_url(settings.CELERY_BROKER_URL)
24 |
25 |
26 | SSH_WORKS_KEY = "ssh-check"
27 |
28 |
29 | def handle_data(key, raw_data: bytes):
30 | data = raw_data.decode("utf-8", "replace")
31 | redis.append(key, data)
32 | print(data)
33 |
34 |
35 | def task_key(task_id: object) -> str:
36 | return "task:%s" % task_id
37 |
38 |
39 | keyfile = os.path.expanduser("~/.ssh/id_rsa")
40 |
41 |
42 | def generate_key():
43 | if not os.path.exists(keyfile):
44 | keydir = os.path.dirname(keyfile)
45 | if not os.path.exists(keydir):
46 | os.mkdir(keydir)
47 | prv = RSAKey.generate(bits=1024)
48 | prv.write_private_key_file(keyfile)
49 | pub = RSAKey(filename=keyfile)
50 | with open("%s.pub" % keyfile, "w") as f:
51 | f.write("%s %s" % (pub.get_name(), pub.get_base64()))
52 | print("Made new Wharf SSH key")
53 |
54 |
55 | generate_key()
56 |
57 |
58 | @app.task
59 | def get_public_key():
60 | return open("%s.pub" % keyfile).read()
61 |
62 |
63 | daemon_socket = "/var/run/dokku-daemon/dokku-daemon.sock"
64 |
65 |
66 | def has_daemon():
67 | return os.path.exists(daemon_socket) and os.access(daemon_socket, os.W_OK)
68 |
69 |
70 | # From https://github.com/dokku/dokku-daemon?tab=readme-ov-file#usage-within-a-dokku-app
71 | def run_with_daemon(key: str, command: str, timeout=60) -> bool:
72 | client = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
73 | client.connect(daemon_socket)
74 | client.sendall(command.encode())
75 | output = b""
76 | while True:
77 | new_output = client.recv(1024)
78 | output += new_output
79 | if len(new_output) == 0:
80 | break
81 | json_data = output.decode("utf-8", "replace")
82 | client.close()
83 | print(f"json_data: '{json_data}'")
84 | data = json.loads(json_data)["output"]
85 | redis.append(key, data)
86 | print(data)
87 | return True
88 |
89 |
90 | @app.task(bind=True)
91 | def run_ssh_command(self: Task, command: str | list[str]):
92 | print("Running command", command)
93 | key = task_key(self.request.id)
94 | redis.set(key, "")
95 | if not has_daemon():
96 | client = SSHClient()
97 | client.set_missing_host_key_policy(AutoAddPolicy)
98 | known_hosts = Path("~/.ssh/known_hosts").expanduser()
99 | known_hosts_folder = known_hosts.parent
100 | if not known_hosts_folder.exists():
101 | known_hosts_folder.mkdir()
102 |
103 | if known_hosts.exists():
104 | client.load_host_keys(
105 | known_hosts.as_posix()
106 | ) # So that we also save back the new host
107 | else:
108 | with known_hosts.open("w") as f:
109 | f.write("") # so connect doesn't barf when trying to save
110 | else:
111 | client = None
112 |
113 | if isinstance(command, list):
114 | commands = command
115 | else:
116 | commands = [command]
117 | for c in commands:
118 | if client is None:
119 | run_with_daemon(key, c)
120 | else:
121 | if os.path.exists(keyfile):
122 | pkey = RSAKey.from_private_key_file(keyfile)
123 | else:
124 | pkey = None
125 | client.connect(
126 | settings.DOKKU_HOST,
127 | port=settings.DOKKU_SSH_PORT,
128 | username="dokku",
129 | pkey=pkey,
130 | allow_agent=False,
131 | look_for_keys=False,
132 | )
133 | transport = client.get_transport()
134 | assert transport is not None
135 | channel = transport.open_session()
136 | channel.exec_command(c)
137 | while True:
138 | anything = False
139 | while channel.recv_ready():
140 | data = channel.recv(1024)
141 | handle_data(key, data)
142 | anything = True
143 | while channel.recv_stderr_ready():
144 | data = channel.recv_stderr(1024)
145 | handle_data(key, data)
146 | anything = True
147 | if not anything:
148 | if channel.exit_status_ready():
149 | break
150 | time.sleep(0.1)
151 | return cast(bytes, redis.get(key)).decode("utf-8")
152 |
153 |
154 | def set_nb(pipe):
155 | flags = fcntl(pipe, F_GETFL)
156 | fcntl(pipe, F_SETFL, flags | O_NONBLOCK)
157 |
158 |
159 | class FailedCommand(Exception):
160 | pass
161 |
162 |
163 | def run_process(key, cmd, cwd=None):
164 | p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=cwd)
165 | set_nb(p.stdout)
166 | set_nb(p.stderr)
167 | while True:
168 | try:
169 | assert p.stdout is not None
170 | out = read(p.stdout.fileno(), 1024)
171 | except BlockingIOError:
172 | out = b""
173 | try:
174 | assert p.stderr is not None
175 | err = read(p.stderr.fileno(), 1024)
176 | except BlockingIOError:
177 | err = b""
178 | handle_data(key, out)
179 | handle_data(key, err)
180 | if out == b"" and err == b"":
181 | if p.poll() is not None:
182 | break
183 | time.sleep(0.1)
184 | if p.poll() != 0:
185 | raise FailedCommand
186 |
187 |
188 | def trust_dokku_host():
189 | ssh_config_path = os.path.expanduser("~/.ssh/config")
190 | if not os.path.exists(ssh_config_path):
191 | ssh_config_dir = os.path.dirname(ssh_config_path)
192 | if not os.path.exists(ssh_config_dir):
193 | os.mkdir(ssh_config_dir)
194 | with open(ssh_config_path, "w") as f:
195 | f.write(f""""Host {settings.DOKKU_HOST}
196 | StrictHostKeyChecking no
197 | UserKnownHostsFile=/dev/null""")
198 |
199 |
200 | @app.task(bind=True)
201 | def deploy(self: Task, app_name: str, git_url: str, git_branch: str):
202 | models.TaskLog(
203 | task_id=self.request.id,
204 | when=datetime.now(tz=UTC),
205 | app=models.App.objects.get(name=app_name),
206 | description="Deploying %s" % app_name,
207 | ).save()
208 | key = task_key(self.request.id)
209 | app_repo_path = os.path.abspath(os.path.join("repos", app_name))
210 | if not os.path.exists(app_repo_path):
211 | redis.append(key, "== Cloning ==\n")
212 | run_process(key, ["git", "clone", git_url, app_repo_path])
213 | repo = Repo(app_repo_path)
214 | try:
215 | repo.remotes["dokku"]
216 | except IndexError:
217 | repo.create_remote(
218 | "dokku",
219 | "ssh://dokku@%s:%s/%s"
220 | % (settings.DOKKU_HOST, settings.DOKKU_SSH_PORT, app_name),
221 | )
222 | trust_dokku_host()
223 | redis.append(key, "== Pulling ==\n")
224 | run_process(key, ["git", "pull"], cwd=app_repo_path)
225 | redis.append(key, "== Pushing to Dokku ==\n")
226 | run_process(key, ["git", "push", "-f", "dokku", git_branch], cwd=app_repo_path)
227 |
228 |
229 | @app.task(bind=True)
230 | def check_ssh(self: Task) -> bool:
231 | trust_dokku_host()
232 | try:
233 | redis.set(SSH_WORKS_KEY, "")
234 | run_process(
235 | SSH_WORKS_KEY,
236 | [
237 | "ssh",
238 | "-p",
239 | str(settings.DOKKU_SSH_PORT),
240 | "-o",
241 | "PasswordAuthentication=no",
242 | f"dokku@{settings.DOKKU_HOST}",
243 | "version",
244 | ],
245 | )
246 | return True
247 | except FailedCommand:
248 | return False
249 |
--------------------------------------------------------------------------------
/apps/views.py:
--------------------------------------------------------------------------------
1 | import hashlib
2 | import hmac
3 | import json
4 | import re
5 | from datetime import UTC, datetime
6 | from logging import getLogger
7 | from typing import Any, Sequence, cast
8 |
9 | import timeout_decorator
10 | from celery.result import AsyncResult
11 | from celery.states import FAILURE, PENDING, STARTED, SUCCESS, state
12 | from django.conf import settings
13 | from django.contrib import messages
14 | from django.core.cache import cache
15 | from django.http import (
16 | HttpRequest,
17 | HttpResponse,
18 | HttpResponseBadRequest,
19 | HttpResponseServerError,
20 | )
21 | from django.shortcuts import redirect, render
22 | from django.urls import reverse
23 | from django.views.decorators.csrf import csrf_exempt
24 | from packaging.version import Version
25 | from redis import StrictRedis
26 |
27 | import wharf.tasks as tasks
28 |
29 | from . import forms, models
30 |
31 | logger = getLogger(__name__)
32 |
33 | redis = StrictRedis.from_url(settings.CELERY_BROKER_URL)
34 | ansi_escape = re.compile(r"\x1B\[[0-?]*[ -/]*[@-~]")
35 |
36 |
37 | def run_cmd(cmd):
38 | res = tasks.run_ssh_command.delay(cmd)
39 | return res.get().strip()
40 |
41 |
42 | def cmd_key(cmd: str):
43 | return "cmd:%s" % cmd
44 |
45 |
46 | def run_cmd_with_cache(cmd: str):
47 | key = cmd_key(cmd)
48 | existing = cache.get(key)
49 | if existing:
50 | return existing
51 | res = run_cmd(cmd)
52 | cache.set(key, res, None)
53 | return res
54 |
55 |
56 | def clear_cache(cmd: str):
57 | key = cmd_key(cmd)
58 | cache.delete(key)
59 |
60 |
61 | def plugin_versions() -> dict[str, Version]:
62 | plugin_raw_list = run_cmd_with_cache("plugin:list")
63 | plugin_pattern = re.compile(r"([a-z\-_0-9]+?)\s+([\d\.]+)")
64 | plugin_groups = plugin_pattern.findall(plugin_raw_list)
65 | return dict([(k, Version(v)) for (k, v) in plugin_groups])
66 |
67 |
68 | def redirect_reverse(
69 | view_name: str,
70 | kwargs: dict[str, Any] | None = None,
71 | args: Sequence[Any] | None = None,
72 | ):
73 | new_url = reverse(view_name, kwargs=kwargs, args=args)
74 | logger.warning(f"New url is {new_url}")
75 | return redirect(new_url)
76 |
77 |
78 | def run_cmd_with_log(app_name, description, cmd, after):
79 | res = tasks.run_ssh_command.delay(cmd)
80 | if app_name is None: # global
81 | app_name = "_"
82 | else:
83 | models.TaskLog(
84 | task_id=res.id,
85 | when=datetime.now(tz=UTC),
86 | app=models.App.objects.get(name=app_name),
87 | description=description,
88 | ).save()
89 | return redirect_reverse(
90 | "wait_for_command",
91 | kwargs={"app_name": app_name, "task_id": res.id, "after": after},
92 | )
93 |
94 |
95 | def get_log(res: AsyncResult):
96 | if res.state > state(PENDING):
97 | key = tasks.task_key(res.id)
98 | raw = cast(bytes | None, redis.get(key))
99 | if raw is None:
100 | return ""
101 | return raw.decode("utf-8")
102 | else:
103 | return ""
104 |
105 |
106 | def wait_for_command(request: HttpRequest, app_name, task_id, after):
107 | res = AsyncResult(task_id)
108 | if app_name != "_":
109 | app = models.App.objects.get(name=app_name)
110 | task, created = models.TaskLog.objects.get_or_create(
111 | task_id=task_id, defaults={"app": app, "when": datetime.now(tz=UTC)}
112 | )
113 | description = task.description
114 | else:
115 | description = ""
116 | if res.state == state(SUCCESS):
117 | return redirect_reverse(
118 | after, kwargs={"app_name": app_name, "task_id": task_id}
119 | )
120 | log = ansi_escape.sub("", get_log(res))
121 | if res.state == state(FAILURE):
122 | log += str(res.traceback)
123 | return render(
124 | request,
125 | "command_wait.html",
126 | {
127 | "app": app_name,
128 | "task_id": task_id,
129 | "log": log,
130 | "state": res.state,
131 | "running": res.state in [state(PENDING), state(STARTED)],
132 | "description": description,
133 | },
134 | )
135 |
136 |
137 | def show_log(request: HttpRequest, task_id: str):
138 | res = AsyncResult(task_id)
139 | task = models.TaskLog.objects.get(task_id=task_id)
140 | log = ansi_escape.sub("", get_log(res))
141 | if res.state == state(FAILURE):
142 | log += str(res.traceback)
143 | return render(
144 | request,
145 | "command_wait.html",
146 | {
147 | "app": task.app.name,
148 | "task_id": task_id,
149 | "log": log,
150 | "state": res.state,
151 | "running": False,
152 | "description": task.description,
153 | },
154 | )
155 |
156 |
157 | def app_list():
158 | data = run_cmd_with_cache("apps:list")
159 | lines = data.split("\n")
160 | if lines[0] != "=====> My Apps":
161 | raise Exception(data)
162 | return lines[1:]
163 |
164 |
165 | public_key = ""
166 |
167 |
168 | def index(request: HttpRequest):
169 | global public_key
170 | if public_key == "":
171 | public_key = tasks.get_public_key.delay().get()
172 |
173 | if redis.get(tasks.SSH_WORKS_KEY) is None:
174 | ssh_works = tasks.check_ssh.delay().get()
175 | if not ssh_works:
176 | return render(request, "setup_key.html", {"key": public_key})
177 |
178 | try:
179 | apps = app_list()
180 | except Exception as e:
181 | if e.__class__.__name__ in [
182 | "AuthenticationException"
183 | ]: # Can't use class directly as Celery mangles things
184 | return render(request, "setup_key.html", {"key": public_key})
185 | else:
186 | raise
187 | if request.method == "POST":
188 | app_form = forms.CreateAppForm(request.POST)
189 | if app_form.is_valid():
190 | return create_app(app_form.cleaned_data["name"])
191 | else:
192 | app_form = forms.CreateAppForm()
193 | config_form = forms.ConfigForm()
194 | config = global_config()
195 | return render(
196 | request,
197 | "list_apps.html",
198 | {
199 | "apps": apps,
200 | "app_form": app_form,
201 | "config_form": config_form,
202 | "config": sorted(config.items()),
203 | },
204 | )
205 |
206 |
207 | def refresh_all(request: HttpRequest):
208 | cache.clear()
209 | return redirect_reverse("index")
210 |
211 |
212 | def refresh(request: HttpRequest, app_name: str):
213 | key_patterns = [
214 | "config:show %s",
215 | "postgres:info %s",
216 | "redis:info %s",
217 | "ps:report %s",
218 | "domains:report %s",
219 | ]
220 | keys = [cmd_key(k % app_name) for k in key_patterns]
221 | lc = letsencrypt_command()
222 | if lc is not None:
223 | keys.append(cmd_key(lc))
224 | cache.delete_many(keys)
225 | return redirect_reverse("app_info", args=[app_name])
226 |
227 |
228 | def generic_config(app_name: str, data: str) -> dict[str, Any]:
229 | if "does not exist" in data:
230 | return {}
231 | lines = data.split("\n")
232 | if lines[0] != "=====> %s env vars" % app_name:
233 | raise Exception(data)
234 | config = {}
235 | for line in lines[1:]:
236 | (name, value) = line.split(":", 1)
237 | config[name] = value.lstrip()
238 | return config
239 |
240 |
241 | def app_config(app_name):
242 | data = run_cmd_with_cache("config:show %s" % app_name)
243 | return generic_config(app_name, data)
244 |
245 |
246 | def global_config():
247 | data = run_cmd_with_cache("config:show --global")
248 | return generic_config("global", data)
249 |
250 |
251 | def app_config_set(app, key, value):
252 | return run_cmd_with_log(
253 | app,
254 | "Setting %s" % key,
255 | "config:set %s %s=%s" % (app, key, value),
256 | "check_app_config_set",
257 | )
258 |
259 |
260 | def check_config_set(request: HttpRequest, task_id: str):
261 | res = AsyncResult(task_id)
262 | data = get_log(res)
263 | lines = data.split("\n")
264 | if lines[0] != "-----> Setting config vars":
265 | raise Exception(data)
266 | messages.success(request, "Config updated")
267 |
268 |
269 | def check_app_config_set(request: HttpRequest, app_name, task_id: str):
270 | check_config_set(request, task_id)
271 | clear_cache("config:show %s" % app_name)
272 | return redirect_reverse("app_info", args=[app_name])
273 |
274 |
275 | def app_config_delete(request, app):
276 | key = request.POST["key"]
277 | return run_cmd_with_log(
278 | app,
279 | "Removing %s" % key,
280 | "config:unset %s %s" % (app, key),
281 | "check_app_config_delete",
282 | )
283 |
284 |
285 | def check_app_config_delete(request: HttpRequest, app_name, task_id: str):
286 | res = AsyncResult(task_id)
287 | data = get_log(res)
288 | lines = data.split("\n")
289 | if "Unsetting" not in lines[0]:
290 | raise Exception(data)
291 | messages.success(request, "Config updated")
292 | clear_cache("config:show %s" % app_name)
293 | return redirect_reverse("app_info", args=[app_name])
294 |
295 |
296 | def global_config_set(request):
297 | form = forms.ConfigForm(request.POST)
298 | if form.is_valid():
299 | return run_cmd_with_log(
300 | None,
301 | "Setting %s" % form.cleaned_data["key"],
302 | "config:set --global %s=%s"
303 | % (form.cleaned_data["key"], form.cleaned_data["value"]),
304 | "check_global_config_set",
305 | )
306 | else:
307 | raise Exception
308 |
309 |
310 | def check_global_config_set(request: HttpRequest, task_id: str):
311 | check_config_set(request, task_id)
312 | clear_cache("config --global")
313 | return redirect_reverse("index")
314 |
315 |
316 | def generic_list(app_name, data, name_field: str, field_names: list[str]):
317 | lines = data.split("\n")
318 | if lines[0].find("is not a dokku command") != -1:
319 | raise Exception("Neeed plugin!")
320 | if lines[0].find("does") != -1:
321 | return None
322 | fields = dict([(x, {}) for x in field_names])
323 | last_field: str | None = None
324 | for f in fields.keys():
325 | index = lines[0].find(f)
326 | if index == -1:
327 | raise Exception("Can't find '%s' in '%s'" % (f, lines[0].strip()))
328 | if f == name_field:
329 | index = 0
330 | fields[f]["start"] = index
331 | if last_field is not None:
332 | fields[last_field]["end"] = index
333 | last_field = f
334 | assert last_field is not None
335 | fields[last_field]["end"] = None
336 | results = []
337 | for line in lines[1:]:
338 | info = {}
339 | for f in fields.keys():
340 | if fields[f]["end"] is None:
341 | info[f] = line[fields[f]["start"] :].strip()
342 | else:
343 | info[f] = line[fields[f]["start"] : fields[f]["end"]].strip()
344 | results.append(info)
345 | results = dict([[x[name_field], x] for x in results])
346 | if app_name in results:
347 | return results[app_name]
348 | else:
349 | return None
350 |
351 |
352 | def generic_info(data: str):
353 | lines = data.split("\n")
354 | if lines[0].find("is not a dokku command") != -1:
355 | raise Exception("Neeed plugin!")
356 | if lines[0].find("does not exist") != -1:
357 | return None
358 | results = {}
359 | for line in lines[1:]:
360 | key, value = line.split(":", 1)
361 | key = key.strip()
362 | value = value.strip()
363 | results[key] = value
364 | return results
365 |
366 |
367 | def db_info(cache_key: str):
368 | data = run_cmd_with_cache(cache_key)
369 | try:
370 | return generic_info(data)
371 | except:
372 | clear_cache(cache_key)
373 | raise
374 |
375 |
376 | def postgres_info(app_name: str):
377 | cache_key = "postgres:info %s" % app_name
378 | return db_info(cache_key)
379 |
380 |
381 | def redis_info(app_name: str):
382 | cache_key = "redis:info %s" % app_name
383 | return db_info(cache_key)
384 |
385 |
386 | def letsencrypt_command():
387 | version = plugin_versions().get("letsencrypt")
388 | if version is None:
389 | return None
390 | if version <= Version("0.9.4"):
391 | return "letsencrypt:ls"
392 | else:
393 | return "letsencrypt:list"
394 |
395 |
396 | def letsencrypt(app_name: str):
397 | cmd = letsencrypt_command()
398 | assert cmd is not None
399 | data = run_cmd_with_cache(cmd)
400 | return generic_list(
401 | app_name,
402 | data,
403 | "App name",
404 | ["App name", "Certificate Expiry", "Time before expiry", "Time before renewal"],
405 | )
406 |
407 |
408 | def process_info(app_name):
409 | data = run_cmd_with_cache("ps:report %s" % app_name)
410 | if "does not exist" in data:
411 | return {}
412 | lines = data.split("\n")
413 | if lines[0].find("exit status") != -1:
414 | lines = lines[1:]
415 | if lines[0].find("No such object") != -1:
416 | lines = lines[1:]
417 | if (
418 | lines[0].find("%s process information" % app_name) == -1
419 | and lines[0].find("%s ps information" % app_name) == -1
420 | ): # Different versions
421 | raise Exception(lines)
422 | results = {}
423 | processes = {}
424 | process_re = re.compile(r"Status\s+(\S+ \d+):\s+(\S+) \(CID: [a-z0-9]+\)")
425 | for line in lines[1:]:
426 | if line.strip().startswith("Status "):
427 | matches = process_re.search(line)
428 | if matches is None:
429 | raise Exception(line)
430 | matches = matches.groups()
431 | processes[matches[0]] = matches[1]
432 | else:
433 | (name, rest) = line.split(":", 1)
434 | results[name.strip()] = rest.strip()
435 | results["processes"] = processes
436 | return results
437 |
438 |
439 | def domains_list(app_name: str) -> list[str]:
440 | data = run_cmd_with_cache("domains:report %s" % app_name)
441 | if "does not exist" in data:
442 | return []
443 | vhosts = re.search("Domains app vhosts: (.*)", data)
444 | assert vhosts is not None
445 | return [x.strip() for x in vhosts.groups()[0].split(" ") if x != ""]
446 |
447 |
448 | def add_domain(request: HttpRequest, app_name: str):
449 | form = forms.CreateDomainForm(request.POST)
450 | if form.is_valid():
451 | commands = ["domains:add %s %s" % (app_name, form.cleaned_data["name"])]
452 | if letsencrypt(app_name) is not None:
453 | commands.append("letsencrypt:enable %s" % app_name)
454 | return run_cmd_with_log(
455 | app_name,
456 | "Add domain %s" % form.cleaned_data["name"],
457 | commands,
458 | "check_domain",
459 | )
460 | else:
461 | raise Exception
462 |
463 |
464 | def check_domain(request: HttpRequest, app_name, task_id: str):
465 | res = AsyncResult(task_id)
466 | data = get_log(res)
467 | if data.find("Reloading nginx") != -1:
468 | clear_cache("domains:report %s" % app_name)
469 | messages.success(request, "Added domain name to %s" % app_name)
470 | return redirect_reverse("app_info", args=[app_name])
471 | else:
472 | raise Exception(data)
473 |
474 |
475 | def remove_domain(request: HttpRequest, app_name):
476 | name = request.POST["name"]
477 | commands = ["domains:remove %s %s" % (app_name, name)]
478 | if letsencrypt(app_name) is not None:
479 | commands.append("letsencrypt %s" % app_name)
480 | return run_cmd_with_log(
481 | app_name, "Remove domain %s" % name, commands, "check_domain"
482 | )
483 |
484 |
485 | def app_info(request: HttpRequest, app_name):
486 | app, _ = models.App.objects.get_or_create(name=app_name)
487 | config = app_config(app_name)
488 | if "GITHUB_URL" in config:
489 | app.github_url = config["GITHUB_URL"]
490 | app.save()
491 | if request.method == "POST":
492 | form = forms.ConfigForm(request.POST)
493 | if form.is_valid():
494 | return app_config_set(
495 | app_name, form.cleaned_data["key"], form.cleaned_data["value"]
496 | )
497 | else:
498 | form = forms.ConfigForm()
499 | return render(
500 | request,
501 | "app_info.html",
502 | {
503 | "postgres": postgres_info(app_name),
504 | "redis": redis_info(app_name),
505 | "letsencrypt": letsencrypt(app_name),
506 | "process": process_info(app_name),
507 | "logs": ansi_escape.sub("", run_cmd("logs %s --num 100" % app_name)),
508 | "domains": domains_list(app_name),
509 | "domain_form": forms.CreateDomainForm(),
510 | "letsencrypt_form": forms.SetupLetsEncrypt(),
511 | "form": form,
512 | "app": app_name,
513 | "git_url": config.get("GITHUB_URL", None),
514 | "config": sorted(config.items()),
515 | "task_logs": models.TaskLog.objects.filter(app=app).order_by("-when")[0:10],
516 | },
517 | )
518 |
519 |
520 | def deploy(request: HttpRequest, app_name):
521 | if request.POST["action"] == "deploy":
522 | config = app_config(app_name)
523 | res = tasks.deploy.delay(
524 | app_name, request.POST["url"], config.get("GIT_BRANCH", "master")
525 | )
526 | clear_cache("config:show %s" % app_name)
527 | clear_cache("domains:report %s" % app_name)
528 | clear_cache("ps:report %s" % app_name)
529 | return redirect_reverse(
530 | "wait_for_command",
531 | kwargs={"app_name": app_name, "task_id": res.id, "after": "check_deploy"},
532 | )
533 | elif request.POST["action"] == "rebuild":
534 | return run_cmd_with_log(
535 | app_name, "Rebuilding", "ps:rebuild %s" % app_name, "check_rebuild"
536 | )
537 | else:
538 | raise Exception(request.POST["action"])
539 |
540 |
541 | def create_postgres(request: HttpRequest, app_name):
542 | return run_cmd_with_log(
543 | app_name,
544 | "Add Postgres",
545 | ["postgres:create %s" % app_name, "postgres:link %s %s" % (app_name, app_name)],
546 | "check_postgres",
547 | )
548 |
549 |
550 | def remove_postgres(request: HttpRequest, app_name):
551 | return run_cmd_with_log(
552 | app_name,
553 | "Remove Postgres",
554 | [
555 | "postgres:unlink %s %s" % (app_name, app_name),
556 | "postgres:destroy %s --force" % app_name,
557 | ],
558 | "check_remove_postgres",
559 | )
560 |
561 |
562 | def create_redis(request: HttpRequest, app_name):
563 | return run_cmd_with_log(
564 | app_name,
565 | "Add Redis",
566 | ["redis:create %s" % app_name, "redis:link %s %s" % (app_name, app_name)],
567 | "check_redis",
568 | )
569 |
570 |
571 | def remove_redis(request: HttpRequest, app_name):
572 | return run_cmd_with_log(
573 | app_name,
574 | "Remove Redis",
575 | [
576 | "redis:unlink %s %s" % (app_name, app_name),
577 | "redis:destroy %s --force" % app_name,
578 | ],
579 | "check_remove_redis",
580 | )
581 |
582 |
583 | def check_deploy(request: HttpRequest, app_name, task_id: str):
584 | clear_cache("config:show %s" % app_name)
585 | messages.success(request, "%s redeployed" % app_name)
586 | return redirect_reverse("app_info", args=[app_name])
587 |
588 |
589 | def check_rebuild(request: HttpRequest, app_name, task_id: str):
590 | res = AsyncResult(task_id)
591 | data = get_log(res)
592 | if data.find("Application deployed:") == -1:
593 | raise Exception(data)
594 | messages.success(request, "%s rebuilt" % app_name)
595 | clear_cache("config:show %s" % app_name)
596 | return redirect_reverse("app_info", args=[app_name])
597 |
598 |
599 | def check_postgres(request: HttpRequest, app_name, task_id: str):
600 | res = AsyncResult(task_id)
601 | data = get_log(res)
602 | if data.find("Postgres container created") == -1:
603 | raise Exception(data)
604 | messages.success(request, "Postgres added to %s" % app_name)
605 | clear_cache("postgres:info %s" % app_name)
606 | clear_cache("config:show %s" % app_name)
607 | return redirect_reverse("app_info", args=[app_name])
608 |
609 |
610 | def check_remove_postgres(request: HttpRequest, app_name, task_id: str):
611 | res = AsyncResult(task_id)
612 | data = get_log(res)
613 | if data.find("Postgres container deleted: %s" % app_name) == -1:
614 | raise Exception(data)
615 | messages.success(request, "Postgres removed from %s" % app_name)
616 | clear_cache("postgres:info %s" % app_name)
617 | clear_cache("config:show %s" % app_name)
618 | return redirect_reverse("app_info", args=[app_name])
619 |
620 |
621 | def check_redis(request: HttpRequest, app_name, task_id: str):
622 | res = AsyncResult(task_id)
623 | data = get_log(res)
624 | if data.find("Redis container created") == -1:
625 | raise Exception(data)
626 | messages.success(request, "Redis added to %s" % app_name)
627 | clear_cache("redis:info %s" % app_name)
628 | clear_cache("config:show %s" % app_name)
629 | return redirect_reverse("app_info", args=[app_name])
630 |
631 |
632 | def check_remove_redis(request: HttpRequest, app_name, task_id: str):
633 | res = AsyncResult(task_id)
634 | data = get_log(res)
635 | if data.find("Redis container deleted: %s" % app_name) == -1:
636 | raise Exception(data)
637 | messages.success(request, "Redis removed from %s" % app_name)
638 | clear_cache("redis:info %s" % app_name)
639 | clear_cache("config:show %s" % app_name)
640 | return redirect_reverse("app_info", args=[app_name])
641 |
642 |
643 | def create_app(app_name: str):
644 | if models.App.objects.filter(name=app_name).exists():
645 | return HttpResponseBadRequest(f"You already have an app called '{app_name}'")
646 | models.App(name=app_name).save()
647 | return run_cmd_with_log(
648 | app_name, "Add app %s" % app_name, "apps:create %s" % app_name, "check_app"
649 | )
650 |
651 |
652 | def check_app(request: HttpRequest, app_name: str, task_id: str):
653 | res = AsyncResult(task_id)
654 | data = get_log(res)
655 | if data.find("Creating %s..." % app_name) == -1:
656 | raise Exception(data)
657 | messages.success(request, "Created %s" % app_name)
658 | clear_cache("apps:list")
659 | return redirect_reverse("app_info", args=[app_name])
660 |
661 |
662 | def setup_letsencrypt(request: HttpRequest, app_name: str):
663 | form = forms.SetupLetsEncrypt(request.POST)
664 | if form.is_valid():
665 | commands = [
666 | f"letsencrypt:set {app_name} email %s" % form.cleaned_data["email"],
667 | f"letsencrypt:enable {app_name}",
668 | ]
669 | return run_cmd_with_log(
670 | app_name,
671 | "Enable Let's Encrypt",
672 | commands,
673 | "check_letsencrypt",
674 | )
675 | else:
676 | raise Exception(form.errors)
677 |
678 |
679 | def remove_letsencrypt(request: HttpRequest, app_name):
680 | return run_cmd_with_log(
681 | app_name,
682 | "Remove Letsencrypt",
683 | [
684 | f"letsencrypt:disable {app_name} --force",
685 | ],
686 | "check_remove_letsencrypt",
687 | )
688 |
689 |
690 | def check_letsencrypt(request: HttpRequest, app_name: str, task_id: str):
691 | res = AsyncResult(task_id)
692 | app = models.App.objects.get(name=app_name)
693 | task, _created = models.TaskLog.objects.get_or_create(
694 | task_id=task_id, defaults={"app": app, "when": datetime.now(tz=UTC)}
695 | )
696 | log = get_log(res)
697 | if log.find("Certificate retrieved successfully") != -1:
698 | cmd = letsencrypt_command()
699 | assert cmd is not None
700 | clear_cache(cmd)
701 | return redirect_reverse("app_info", args=[app_name])
702 | else:
703 | return render(
704 | request,
705 | "command_wait.html",
706 | {
707 | "app": app_name,
708 | "task_id": task_id,
709 | "log": log,
710 | "state": res.state,
711 | "running": res.state in [state(PENDING), state(STARTED)],
712 | "description": task.description,
713 | },
714 | )
715 |
716 |
717 | def check_remove_letsencrypt(request: HttpRequest, app_name: str, task_id: str):
718 | res = AsyncResult(task_id)
719 | app = models.App.objects.get(name=app_name)
720 | task, _created = models.TaskLog.objects.get_or_create(
721 | task_id=task_id, defaults={"app": app, "when": datetime.now(tz=UTC)}
722 | )
723 | log = get_log(res)
724 | if log.find(f"Removing letsencrypt files for {app_name}") != -1:
725 | cmd = letsencrypt_command()
726 | assert cmd is not None
727 | clear_cache(cmd)
728 | return redirect_reverse("app_info", args=[app_name])
729 | else:
730 | return render(
731 | request,
732 | "command_wait.html",
733 | {
734 | "app": app_name,
735 | "task_id": task_id,
736 | "log": log,
737 | "state": res.state,
738 | "running": res.state in [state(PENDING), state(STARTED)],
739 | "description": task.description,
740 | },
741 | )
742 |
743 |
744 | @csrf_exempt
745 | def github_webhook(request: HttpRequest):
746 | secret = settings.GITHUB_SECRET.encode("utf-8")
747 | hash = "sha1=%s" % hmac.new(secret, request.body, hashlib.sha1).hexdigest()
748 | if "HTTP_X_HUB_SIGNATURE" not in request.META:
749 | return HttpResponseBadRequest("No X-Hub-Signature header")
750 | header = request.META["HTTP_X_HUB_SIGNATURE"]
751 | if header != hash:
752 | return HttpResponseBadRequest("%s doesn't equal %s" % (hash, header))
753 | data = json.loads(request.read())
754 | if "hook_id" in data: # assume Ping
755 | if "push" not in data["hook"]["events"]:
756 | return HttpResponseBadRequest("No Push event set!")
757 | return HttpResponse("All good")
758 | default_ref = "refs/heads/%s" % data["repository"]["default_branch"]
759 | if data["ref"] != default_ref:
760 | return HttpResponse(
761 | "Push to non-default branch (saw %s, expected %s)"
762 | % (data["ref"], default_ref)
763 | )
764 | clone_url = data["repository"]["clone_url"]
765 | apps = models.App.objects.filter(github_url=clone_url)
766 | if not apps.exists():
767 | return HttpResponseBadRequest(
768 | "Can't find an entry for clone URL %s" % clone_url
769 | )
770 | app = apps.first()
771 | assert app is not None
772 | config = app_config(app.name)
773 | res = tasks.deploy.delay(app.name, clone_url, config.get("GIT_BRANCH", "master"))
774 | clear_cache("config:show %s" % app.name)
775 | return HttpResponse(
776 | "Running deploy. Deploy log is at %s"
777 | % request.build_absolute_uri(reverse("show_log", kwargs={"task_id": res.id}))
778 | )
779 |
780 |
781 | @timeout_decorator.timeout(5, use_signals=False)
782 | def check_status():
783 | # Clearing the cache and then trying a command makes sure that
784 | # - The cache is up
785 | # - Celery is up
786 | # - We can run dokku commands
787 | clear_cache("config --global")
788 | run_cmd_with_cache("config --global")
789 |
790 |
791 | def status(request: HttpRequest):
792 | try:
793 | check_status()
794 | return HttpResponse("All good")
795 | except timeout_decorator.TimeoutError:
796 | return HttpResponseServerError("Timeout trying to get status")
797 |
--------------------------------------------------------------------------------
/tests/test_views.py:
--------------------------------------------------------------------------------
1 | import re
2 | import uuid
3 | from typing import Any, Callable, cast
4 | from unittest.mock import MagicMock, Mock, patch
5 |
6 | import pytest
7 | from celery.result import AsyncResult
8 | from celery.states import SUCCESS, state
9 | from django.conf import LazySettings
10 | from django.core.cache import cache
11 | from django.http import HttpRequest, HttpResponse, HttpResponseRedirect
12 | from django.test import Client
13 | from model_bakery import baker
14 | from redis import StrictRedis
15 |
16 | from apps import models
17 | from apps.views import (
18 | app_config_delete,
19 | app_info,
20 | app_list,
21 | check_app,
22 | check_app_config_delete,
23 | check_letsencrypt,
24 | check_postgres,
25 | check_redis,
26 | check_remove_letsencrypt,
27 | check_remove_postgres,
28 | check_remove_redis,
29 | create_app,
30 | create_postgres,
31 | create_redis,
32 | global_config,
33 | index,
34 | letsencrypt,
35 | process_info,
36 | refresh,
37 | refresh_all,
38 | remove_letsencrypt,
39 | remove_postgres,
40 | remove_redis,
41 | setup_letsencrypt,
42 | )
43 | from tests.recording_cache import RecordingCache
44 |
45 |
46 | class MockCelery:
47 | def __init__(self, res: object):
48 | self.res = res
49 | self.id = uuid.uuid4()
50 |
51 | def get(self):
52 | return self.res
53 |
54 |
55 | commands = {
56 | ("apps:list",): """=====> My Apps
57 | wharf""",
58 | ("config:show test_app",): """=====> test_app env vars
59 | DOKKU_APP_RESTORE: 1
60 | DOKKU_APP_TYPE: dockerfile
61 | DOKKU_PROXY_PORT: 80""",
62 | ("config:show missing",): " ! App missing does not exist",
63 | ("postgres:list",): """=====> Postgres services
64 | wharf""",
65 | ("postgres:info test_app",): """=====> test_app postgres service information
66 | Config dir: /var/lib/dokku/services/postgres/test_app/data
67 | Config options:
68 | Data dir: /var/lib/dokku/services/postgres/test_app/data
69 | Dsn: postgres://postgres:aa23a509ff7443011ebfa49e3c3a582a@dokku-postgres-test_app:5432/test_app
70 | Exposed ports: -
71 | Id: 3a07c995d32e13766d3ebc44d040391f434e234d3d9c6021410eff4a130af656
72 | Internal ip: 172.17.0.3
73 | Initial network:
74 | Links: wharf
75 | Post create network:
76 | Post start network:
77 | Service root: /var/lib/dokku/services/postgres/test_app
78 | Status: running
79 | Version: postgres:17.4""",
80 | ("redis:info test_app",): """=====> test_app redis service information
81 | Config dir: /var/lib/dokku/services/redis/test_app/config
82 | Config options:
83 | Data dir: /var/lib/dokku/services/redis/test_app/data
84 | Dsn: redis://:6654f1fd4527260516b99ea515f5d283e9ab887822f7e3c9d5d37ac4815b73d2@dokku-redis-wharf:6379
85 | Exposed ports: -
86 | Id: 12d11c44ecb0f75175ab4c15a853d9b2801d1349f5ff16610dadf154184256d7
87 | Internal ip: 172.17.0.2
88 | Initial network:
89 | Links: test_app
90 | Post create network:
91 | Post start network:
92 | Service root: /var/lib/dokku/services/redis/test_app
93 | Status: running
94 | Version: redis:7.4.2""",
95 | ("ps:report test_app",): """=====> test_app ps information
96 | Deployed: true
97 | Processes: 2
98 | Ps can scale: true
99 | Ps computed procfile path: Procfile
100 | Ps global procfile path: Procfile
101 | Ps procfile path:
102 | Ps restart policy: on-failure:10
103 | Restore: true
104 | Running: true
105 | Status celery 1: running (CID: 68b2897a761)
106 | Status web 1: running (CID: d536b673b49)""",
107 | (
108 | "logs test_app --num 100",
109 | ): """2025-04-25T23:07:53.894820268Z app[celery.1]: System check identified some issues:
110 | 2025-04-25T23:07:53.895026545Z app[celery.1]:
111 | 2025-04-25T23:07:53.895030874Z app[celery.1]: WARNINGS:""",
112 | ("domains:report test_app",): """"=====> test_app domains information
113 | Domains app enabled: true
114 | Domains app vhosts: test_app.vagrant
115 | Domains global enabled: true
116 | Domains global vhosts: vagrant""",
117 | ("postgres:info missing",): " ! Postgres service missing does not exist",
118 | ("redis:info missing",): " ! Redis service missing does not exist",
119 | (
120 | "letsencrypt:ls",
121 | ): "-----> App name Certificate Expiry Time before expiry Time before renewal",
122 | (
123 | "letsencrypt:list",
124 | ): "-----> App name Certificate Expiry Time before expiry Time before renewal",
125 | ("ps:report missing",): " ! App missing does not exist",
126 | ("logs missing --num 100",): " ! App missing does not exist",
127 | ("domains:report missing",): " ! App missing does not exist",
128 | (
129 | "plugin:list",
130 | ): """ letsencrypt 0.9.4 enabled Automated installation of let's encrypt TLS certificates
131 | logs 0.35.18 enabled dokku core logs plugin
132 | network 0.35.18 enabled dokku core network plugin""",
133 | ("apps:create foo",): "",
134 | ("config:show --global",): """=====> global env vars
135 | CURL_CONNECT_TIMEOUT: 90
136 | CURL_TIMEOUT: 600""",
137 | ("postgres:create foo",): """Waiting for container to be ready
138 | Creating container database
139 | Securing connection to database
140 | =====> Postgres container created: foo""",
141 | ("postgres:link foo foo",): """-----> Setting config vars
142 | DATABASE_URL: postgres://postgres:2a871f1589b4519719428602980939bb@dokku-postgres-foo:5432/foo""",
143 | ("postgres:unlink foo foo",): "-----> Unsetting DATABASE_URL",
144 | ("postgres:destroy foo --force",): """=====> Pausing container
145 | Container paused
146 | Removing container
147 | Removing data
148 | =====> Postgres container deleted: foo""",
149 | (
150 | "ps:report non-running-app",
151 | ): """No such object: cdbd631f11431826fb7ccfd257921e6b0ac1e6fc7986948e44e0d49609e11123
152 | =====> non-running-app ps information
153 | Deployed: true
154 | Processes: 1
155 | Ps can scale: true
156 | Ps computed procfile path: Procfile
157 | Ps global procfile path: Procfile
158 | Ps procfile path:
159 | Ps restart policy: on-failure:10
160 | Restore: true
161 | Running: false
162 | Status web 1: missing (CID: cdbd631f114)""",
163 | ("redis:create foo",): """ Waiting for container to be ready
164 | =====> Redis container created: foo
165 | =====> foo redis service information
166 | Config dir: /var/lib/dokku/services/redis/foo/config
167 | Config options:
168 | Data dir: /var/lib/dokku/services/redis/foo/data
169 | Dsn: redis://:584fb7aa7ca03acda2bc8c81c056ac81e0ec59d10efec8137cfcf893854f5570@dokku-redis-foo:6379
170 | Exposed ports: -
171 | Id: 37f96e39797e4a731d750a19ae0e3255fb84fede13d1e704ae61d18ac037e4ad
172 | Internal ip: 172.17.0.4
173 | Initial network:
174 | Links: -
175 | Post create network:
176 | Post start network:
177 | Service root: /var/lib/dokku/services/redis/foo
178 | Status: running
179 | Version: redis:8.4.0""",
180 | ("redis:link foo foo",): """----> Setting config vars
181 | REDIS_URL: redis://:584fb7aa7ca03acda2bc8c81c056ac81e0ec59d10efec8137cfcf893854f5570@dokku-redis-foo:6379""",
182 | ("redis:unlink foo foo",): """-----> Unsetting REDIS_URL""",
183 | ("redis:destroy foo --force",): """=====> Deleting foo
184 | =====> Pausing container
185 | Container paused
186 | Removing container
187 | Removing data
188 | =====> Redis container deleted: foo""",
189 | (
190 | "letsencrypt:set test_app email foo@bar.com",
191 | ): "=====> Setting email to foo@bar.com",
192 | ("letsencrypt:enable test_app",): """=====> Enabling letsencrypt for test_app
193 | -----> Enabling ACME proxy for test_app...
194 | -----> Getting letsencrypt certificate for test_app via HTTP-01
195 | - Domain 'test_app.vagrant'
196 | 2025/12/20 21:38:20 No key found for account foo@bar.com. Generating a P256 key.
197 | 2025/12/20 21:38:20 Saved key to /certs/accounts/acme-v02.api.letsencrypt.org/foo@bar.com/keys/foo@bar.com.key
198 | 2025/12/20 21:38:20 [INFO] acme: Registering account for foo@bar.com
199 | 2025/12/20 21:38:20 [INFO] [test_app.vagrant] acme: Obtaining bundled SAN certificate
200 | !!!! HEADS UP !!!!
201 |
202 | Your account credentials have been saved in your Let's Encrypt
203 | configuration directory at "/certs/accounts".
204 |
205 | You should make a secure backup of this folder now. This
206 | configuration directory will also contain certificates and
207 | private keys obtained from Let's Encrypt so making regular
208 | backups of this folder is ideal.
209 | 2025/12/20 21:38:21 [INFO] [test_app.vagrant] AuthURL: https://acme-v02.api.letsencrypt.org/acme/authz/12345/6789
210 | 2025/12/20 21:38:21 [INFO] [test_app.vagrant] acme: Could not find solver for: tls-alpn-01
211 | 2025/12/20 21:38:21 [INFO] [test_app.vagrant] acme: use http-01 solver
212 | 2025/12/20 21:38:21 [INFO] [test_app.vagrant] acme: Trying to solve HTTP-01
213 | 2025/12/20 21:38:29 [INFO] [test_app.vagrant] The server validated our request
214 | 2025/12/20 21:38:29 [INFO] [test_app.vagrant] acme: Validations succeeded; requesting certificates
215 | 2025/12/20 21:38:29 [INFO] [test_app.vagrant] Server responded with a certificate.
216 | -----> Certificate retrieved successfully.
217 | -----> Installing let's encrypt certificates
218 | -----> Unsetting DOKKU_PROXY_PORT
219 | -----> Setting config vars
220 | DOKKU_PROXY_PORT_MAP: http:80:5000
221 | -----> Setting config vars
222 | DOKKU_PROXY_PORT_MAP: http:80:5000 https:443:5000
223 | -----> Configuring test_app.vagrant...(using built-in template)
224 | -----> Creating https nginx.conf
225 | Enabling HSTS
226 | Reloading nginx
227 | -----> Ensuring network configuration is in sync for test_app
228 | -----> Configuring test_app.vagrant...(using built-in template)
229 | -----> Creating https nginx.conf
230 | Enabling HSTS
231 | Reloading nginx
232 | -----> Disabling ACME proxy for test_app...
233 | -----> Done""",
234 | ("letsencrypt:disable test_app --force",): """-----> Disabling letsencrypt for app
235 | Removing letsencrypt files for test_app
236 | Removing SSL endpoint from test_app
237 | -----> Unsetting DOKKU_PROXY_SSL_PORT
238 | -----> Setting config vars
239 | DOKKU_PROXY_PORT_MAP: http:80:5000
240 | -----> Configuring test_app.vagrant...(using built-in template)
241 | -----> Creating http nginx.conf
242 | Reloading nginx
243 | -----> Done""",
244 | ("config:unset test_app FOO_KEY",): """-----> Unsetting FOO_KEY""",
245 | }
246 |
247 |
248 | def custom_mock_commands(override_commands: dict[Any, str]) -> Callable:
249 | def _internal(*args):
250 | if type(args[0]) is list:
251 | all_celerys = [_internal(x) for x in args[0]]
252 | return MockCelery("\n".join([c.res for c in all_celerys]))
253 | if args in override_commands:
254 | return MockCelery(override_commands[args])
255 | if args in commands:
256 | return MockCelery(commands[args])
257 | print(args)
258 | raise Exception(args)
259 |
260 | return _internal
261 |
262 |
263 | mock_commands = custom_mock_commands({})
264 |
265 |
266 | @pytest.fixture
267 | def mock_request() -> HttpRequest:
268 | mr = MagicMock(spec=HttpRequest)
269 | mr.META = MagicMock()
270 | mr._messages = MagicMock()
271 | mr.method = MagicMock()
272 | return mr
273 |
274 |
275 | @pytest.fixture(autouse=True)
276 | def disable_cache(
277 | monkeypatch: pytest.MonkeyPatch,
278 | recording_cache: RecordingCache,
279 | ):
280 | monkeypatch.setattr(cache, "set", lambda _key, _value, _timeout: None)
281 |
282 |
283 | @pytest.fixture(autouse=True)
284 | def patch_csrf_token(monkeypatch: pytest.MonkeyPatch):
285 | monkeypatch.setattr(
286 | "django.middleware.csrf.get_token", Mock(return_value="predictabletoken")
287 | )
288 | yield
289 |
290 |
291 | @patch("wharf.tasks.run_ssh_command.delay")
292 | def test_app_list(patched_delay: MagicMock):
293 | patched_delay.side_effect = mock_commands
294 | assert app_list() == ["wharf"]
295 |
296 |
297 | def finished_log(monkeypatch: pytest.MonkeyPatch, contents: str):
298 | monkeypatch.setattr(AsyncResult, "state", state(SUCCESS))
299 | monkeypatch.setattr(
300 | StrictRedis,
301 | "get",
302 | lambda _self, _key: contents.encode("utf-8"),
303 | )
304 |
305 |
306 | @patch("wharf.tasks.run_ssh_command.delay")
307 | def test_check_app(
308 | patched_delay: MagicMock, mock_request: HttpRequest, monkeypatch: pytest.MonkeyPatch
309 | ):
310 | finished_log(
311 | monkeypatch,
312 | """Creating test_app...
313 | -----> Creating new app virtual host file...""",
314 | )
315 |
316 | patched_delay.side_effect = mock_commands
317 | resp = check_app(mock_request, "test_app", "1234")
318 | assert resp.status_code == 302, resp
319 | assert resp.url == "/apps/test_app"
320 |
321 |
322 | @pytest.mark.django_db
323 | @patch("wharf.tasks.run_ssh_command.delay")
324 | def test_app_info(patched_delay: MagicMock, mock_request: HttpRequest):
325 | patched_delay.side_effect = mock_commands
326 | resp = app_info(mock_request, "test_app")
327 | assert resp.status_code == 200, resp
328 | content = resp.content.decode("utf-8")
329 |
330 | expected_contents = [
331 | """\n2025-04-25T23:07:53.894820268Z app[celery.1]: System check identified some issues:\n2025-04-25T23:07:53.895026545Z app[celery.1]:\n2025-04-25T23:07:53.895030874Z app[celery.1]: WARNINGS:\n""", 334 | ] 335 | for expected_content in expected_contents: 336 | assert expected_content in content 337 | 338 | 339 | @pytest.mark.django_db 340 | @patch("wharf.tasks.run_ssh_command.delay") 341 | def test_missing_app_info(patched_delay: MagicMock, mock_request: HttpRequest): 342 | patched_delay.side_effect = mock_commands 343 | resp = app_info(mock_request, "missing") 344 | assert resp.status_code == 200, resp 345 | content = resp.content.decode("utf-8") 346 | expected_contents = [ 347 | """
\n! App missing does not exist\n""", 351 | ] 352 | for expected_content in expected_contents: 353 | assert expected_content in content 354 | 355 | 356 | @pytest.mark.django_db 357 | @patch("wharf.tasks.run_ssh_command.delay") 358 | def test_newer_letsencrypt(patched_delay: MagicMock): 359 | patched_delay.side_effect = custom_mock_commands( 360 | { 361 | ( 362 | "plugin:list", 363 | ): " letsencrypt 0.22.0 enabled Automated installation of let's encrypt TLS certificates" 364 | } 365 | ) 366 | assert letsencrypt("wharf") is None 367 | 368 | 369 | @pytest.mark.django_db 370 | @patch("wharf.tasks.run_ssh_command.delay") 371 | def test_create_app(patched_delay: MagicMock): 372 | patched_delay.side_effect = mock_commands 373 | res = create_app("foo") 374 | assert res.status_code == 302, res 375 | assert isinstance(res, HttpResponseRedirect) 376 | assert res.url.startswith("/apps/foo/wait/"), res 377 | 378 | 379 | @pytest.mark.django_db 380 | def test_create_duplicate_app(): 381 | models.App.objects.create(name="foo") 382 | res = create_app("foo") 383 | assert res.status_code == 400, res 384 | assert res.content == b"You already have an app called 'foo'", res 385 | 386 | 387 | def test_login_change(client: Client): 388 | response = client.get("/", follow=True) 389 | assert "Initial login is admin/password" in response.text 390 | 391 | 392 | def test_login_no_change(client: Client, settings: LazySettings): 393 | settings.ADMIN_PASSWORD = "testpassword" 394 | response = client.get("/", follow=True) 395 | assert "Initial login is admin/password" not in response.text 396 | 397 | 398 | @patch("wharf.tasks.run_ssh_command.delay") 399 | def test_global_config(patched_delay: MagicMock): 400 | patched_delay.side_effect = mock_commands 401 | assert global_config() == {"CURL_CONNECT_TIMEOUT": "90", "CURL_TIMEOUT": "600"} 402 | 403 | 404 | @pytest.mark.django_db 405 | def test_refresh_all(mock_request: HttpRequest, recording_cache: RecordingCache): 406 | resp = refresh_all(mock_request) 407 | assert resp.status_code == 302, resp 408 | assert resp.url == "/", resp 409 | assert recording_cache.actions == ["clear"] 410 | 411 | 412 | @pytest.mark.django_db 413 | @patch("wharf.tasks.run_ssh_command.delay") 414 | def test_refresh_one( 415 | patched_delay: MagicMock, mock_request: HttpRequest, recording_cache: RecordingCache 416 | ): 417 | patched_delay.side_effect = mock_commands 418 | resp = refresh(mock_request, "foo") 419 | assert resp.status_code == 302, resp 420 | assert resp.url == "/apps/foo", resp 421 | assert recording_cache.actions == [ 422 | ("get", ("cmd:plugin:list",)), 423 | ( 424 | "delete_many", 425 | ( 426 | [ 427 | "cmd:config:show foo", 428 | "cmd:postgres:info foo", 429 | "cmd:redis:info foo", 430 | "cmd:ps:report foo", 431 | "cmd:domains:report foo", 432 | "cmd:letsencrypt:ls", 433 | ], 434 | ), 435 | ), 436 | ] 437 | 438 | 439 | @pytest.mark.django_db 440 | @patch("wharf.tasks.run_ssh_command.delay") 441 | def test_task_logs_limit(patched_delay: MagicMock, mock_request: HttpRequest): 442 | patched_delay.side_effect = mock_commands 443 | test_app = baker.make(models.App, name="test_app") 444 | baker.make(models.TaskLog, app=test_app, _quantity=20) 445 | resp = app_info(mock_request, "test_app") 446 | 447 | assert isinstance(resp, HttpResponse) 448 | log_count = re.findall(r"/logs/[^\"]+", resp.text) 449 | assert len(log_count) == 10, resp.text 450 | 451 | 452 | @pytest.mark.django_db 453 | @patch("wharf.tasks.run_ssh_command.delay") 454 | def test_create_postgres( 455 | patched_delay: MagicMock, mock_request: HttpRequest, monkeypatch: pytest.MonkeyPatch 456 | ): 457 | patched_delay.side_effect = mock_commands 458 | models.App.objects.get_or_create(name="foo") 459 | res = create_postgres(mock_request, "foo") 460 | assert res.status_code == 302, res 461 | assert res.url.startswith("/apps/foo/wait/"), res 462 | 463 | finished_log(monkeypatch, commands[("postgres:create foo",)]) 464 | check_postgres(mock_request, "foo", "1234") 465 | 466 | 467 | @pytest.mark.django_db 468 | @patch("wharf.tasks.run_ssh_command.delay") 469 | def test_remove_postgres( 470 | patched_delay: MagicMock, mock_request: HttpRequest, monkeypatch: pytest.MonkeyPatch 471 | ): 472 | patched_delay.side_effect = mock_commands 473 | models.App.objects.get_or_create(name="foo") 474 | res = remove_postgres(mock_request, "foo") 475 | assert res.status_code == 302, res 476 | assert res.url.startswith("/apps/foo/wait/"), res 477 | 478 | finished_log(monkeypatch, commands[("postgres:destroy foo --force",)]) 479 | 480 | check_remove_postgres(mock_request, "foo", "1234") 481 | 482 | 483 | @pytest.mark.django_db 484 | @patch("wharf.tasks.run_ssh_command.delay") 485 | def test_create_redis( 486 | patched_delay: MagicMock, mock_request: HttpRequest, monkeypatch: pytest.MonkeyPatch 487 | ): 488 | patched_delay.side_effect = mock_commands 489 | models.App.objects.get_or_create(name="foo") 490 | res = create_redis(mock_request, "foo") 491 | assert res.status_code == 302, res 492 | assert res.url.startswith("/apps/foo/wait/"), res 493 | 494 | finished_log(monkeypatch, commands[("redis:create foo",)]) 495 | check_redis(mock_request, "foo", "1234") 496 | 497 | 498 | @pytest.mark.django_db 499 | @patch("wharf.tasks.run_ssh_command.delay") 500 | def test_remove_redis( 501 | patched_delay: MagicMock, mock_request: HttpRequest, monkeypatch: pytest.MonkeyPatch 502 | ): 503 | patched_delay.side_effect = mock_commands 504 | models.App.objects.get_or_create(name="foo") 505 | res = remove_redis(mock_request, "foo") 506 | assert res.status_code == 302, res 507 | assert res.url.startswith("/apps/foo/wait/"), res 508 | 509 | finished_log(monkeypatch, commands[("redis:destroy foo --force",)]) 510 | 511 | check_remove_redis(mock_request, "foo", "1234") 512 | 513 | 514 | @pytest.mark.django_db 515 | @patch("wharf.tasks.run_ssh_command.delay") 516 | def test_non_running_app(patched_delay: MagicMock): 517 | patched_delay.side_effect = mock_commands 518 | res = process_info("non-running-app") 519 | assert res == { 520 | "Deployed": "true", 521 | "Processes": "1", 522 | "Ps can scale": "true", 523 | "Ps computed procfile path": "Procfile", 524 | "Ps global procfile path": "Procfile", 525 | "Ps procfile path": "", 526 | "Ps restart policy": "on-failure:10", 527 | "Restore": "true", 528 | "Running": "false", 529 | "processes": {"web 1": "missing"}, 530 | } 531 | 532 | 533 | @pytest.mark.django_db 534 | @patch("wharf.tasks.run_ssh_command.delay") 535 | @patch("wharf.tasks.get_public_key.delay") 536 | def test_index( 537 | patched_public_key: MagicMock, 538 | patched_delay: MagicMock, 539 | mock_request: HttpRequest, 540 | monkeypatch: pytest.MonkeyPatch, 541 | ): 542 | patched_delay.side_effect = mock_commands 543 | patched_public_key.return_value = MockCelery("demo-key") 544 | 545 | def redis_keys(self, key): 546 | if key == "ssh-check": 547 | return "ok version" 548 | raise Exception(key) 549 | 550 | monkeypatch.setattr( 551 | StrictRedis, 552 | "get", 553 | redis_keys, 554 | ) 555 | resp = index(mock_request) 556 | assert resp.status_code == 200, resp 557 | content = resp.content.decode("utf-8") 558 | assert content.find('
\n demo-key\n ') != -1, (
593 | content
594 | )
595 |
596 |
597 | @pytest.mark.django_db
598 | @patch("wharf.tasks.run_ssh_command.delay")
599 | def test_setup_letsencrypt(
600 | patched_delay: MagicMock, mock_request: HttpRequest, monkeypatch: pytest.MonkeyPatch
601 | ):
602 | models.App.objects.create(name="test_app")
603 | cast(MagicMock, mock_request).POST = {"email": "foo@bar.com"}
604 | patched_delay.side_effect = mock_commands
605 | res = setup_letsencrypt(mock_request, "test_app")
606 | assert res.status_code == 302, res
607 | assert isinstance(res, HttpResponseRedirect)
608 | assert res.url.startswith("/apps/test_app/wait/"), res
609 |
610 | finished_log(
611 | monkeypatch,
612 | commands[("letsencrypt:set test_app email foo@bar.com",)]
613 | + commands[("letsencrypt:enable test_app",)],
614 | )
615 |
616 | check_res = check_letsencrypt(mock_request, "test_app", "1234")
617 | assert check_res.status_code == 302, check_res
618 | assert isinstance(check_res, HttpResponseRedirect)
619 | assert check_res.url == "/apps/test_app", check_res
620 |
621 |
622 | @pytest.mark.django_db
623 | @patch("wharf.tasks.run_ssh_command.delay")
624 | def test_remove_letsencrypt(
625 | patched_delay: MagicMock, mock_request: HttpRequest, monkeypatch: pytest.MonkeyPatch
626 | ):
627 | models.App.objects.create(name="test_app")
628 | cast(MagicMock, mock_request).POST = {"email": "foo@bar.com"}
629 | patched_delay.side_effect = mock_commands
630 | res = remove_letsencrypt(mock_request, "test_app")
631 | assert res.status_code == 302, res
632 | assert isinstance(res, HttpResponseRedirect)
633 | assert res.url.startswith("/apps/test_app/wait/"), res
634 |
635 | finished_log(monkeypatch, commands[("letsencrypt:disable test_app --force",)])
636 |
637 | check_res = check_remove_letsencrypt(mock_request, "test_app", "1234")
638 | assert check_res.status_code == 302, check_res
639 | assert isinstance(check_res, HttpResponseRedirect)
640 | assert check_res.url == "/apps/test_app", check_res
641 |
642 |
643 | @pytest.mark.django_db
644 | @patch("wharf.tasks.run_ssh_command.delay")
645 | def test_app_config_delete(
646 | patched_delay: MagicMock, mock_request: HttpRequest, monkeypatch: pytest.MonkeyPatch
647 | ):
648 | models.App.objects.create(name="test_app")
649 | cast(MagicMock, mock_request).POST = {"key": "FOO_KEY"}
650 | patched_delay.side_effect = mock_commands
651 | res = app_config_delete(mock_request, "test_app")
652 | assert res.status_code == 302, res
653 | assert isinstance(res, HttpResponseRedirect)
654 | assert res.url.startswith("/apps/test_app/wait/"), res
655 |
656 | finished_log(monkeypatch, commands[("config:unset test_app FOO_KEY",)])
657 |
658 | check_res = check_app_config_delete(mock_request, "test_app", "1234")
659 | assert check_res.status_code == 302, check_res
660 | assert isinstance(check_res, HttpResponseRedirect)
661 | assert check_res.url == "/apps/test_app", check_res
662 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | GNU AFFERO GENERAL PUBLIC LICENSE
2 | Version 3, 19 November 2007
3 |
4 | Copyright (C) 2007 Free Software Foundation, Inc.