├── apps ├── __init__.py ├── management │ ├── __init__.py │ └── commands │ │ ├── __init__.py │ │ └── celery.py ├── migrations │ ├── __init__.py │ ├── 0004_alter_app_name.py │ ├── 0003_alter_tasklog_success.py │ ├── 0002_app_github_url.py │ └── 0001_initial.py ├── static │ └── favicon.ico ├── apps.py ├── forms.py ├── templates │ ├── setup_key.html │ ├── command_wait.html │ ├── login.html │ ├── list_apps.html │ ├── base.html │ └── app_info.html ├── models.py ├── urls.py └── views.py ├── .tool-versions ├── .dockerignore ├── .github ├── FUNDING.yml ├── renovate.json5 └── workflows │ └── ci.yml ├── Procfile ├── tests ├── conftest.py ├── settings.py ├── test_tasks.py ├── recording_cache.py ├── test_auth.py └── test_views.py ├── screenshots ├── app_index.png └── apps_list.png ├── .pyup.yml ├── dokku-boot.sh ├── uv.lock ├── Dockerfile ├── .gitignore ├── wharf ├── __init__.py ├── context_processors.py ├── wsgi.py ├── urls.py ├── celery.py ├── auth.py ├── settings.py └── tasks.py ├── requirements.in ├── Makefile ├── pyproject.toml ├── manage.py ├── Vagrantfile ├── .pre-commit-config.yaml ├── docker-compose.yml ├── test.sh ├── README.md ├── requirements.txt ├── wait-for-it.sh ├── check_boot.py └── LICENSE /apps/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /.tool-versions: -------------------------------------------------------------------------------- 1 | uv 0.6.14 2 | -------------------------------------------------------------------------------- /apps/management/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /apps/migrations/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /.dockerignore: -------------------------------------------------------------------------------- 1 | repos/* 2 | .venv/* 3 | -------------------------------------------------------------------------------- /apps/management/commands/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /.github/FUNDING.yml: -------------------------------------------------------------------------------- 1 | --- 2 | github: palfrey 3 | ko_fi: palfrey 4 | -------------------------------------------------------------------------------- /Procfile: -------------------------------------------------------------------------------- 1 | web: bash dokku-boot.sh 2 | celery: python manage.py celery 3 | -------------------------------------------------------------------------------- /tests/conftest.py: -------------------------------------------------------------------------------- 1 | from tests.recording_cache import recording_cache # noqa: F401 2 | -------------------------------------------------------------------------------- /apps/static/favicon.ico: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/palfrey/wharf/HEAD/apps/static/favicon.ico -------------------------------------------------------------------------------- /screenshots/app_index.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/palfrey/wharf/HEAD/screenshots/app_index.png -------------------------------------------------------------------------------- /screenshots/apps_list.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/palfrey/wharf/HEAD/screenshots/apps_list.png -------------------------------------------------------------------------------- /.pyup.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # see https://pyup.io/docs/configuration/ for all available options 3 | 4 | update: insecure 5 | -------------------------------------------------------------------------------- /apps/apps.py: -------------------------------------------------------------------------------- 1 | from django.apps import AppConfig 2 | 3 | 4 | class AppsConfig(AppConfig): 5 | name = "apps" 6 | -------------------------------------------------------------------------------- /dokku-boot.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -eux -o pipefail 3 | 4 | python manage.py migrate 5 | python manage.py runserver 0.0.0.0:${PORT:-5000} 6 | -------------------------------------------------------------------------------- /uv.lock: -------------------------------------------------------------------------------- 1 | version = 1 2 | revision = 1 3 | requires-python = ">=3.12" 4 | 5 | [[package]] 6 | name = "wharf" 7 | version = "0.1.0" 8 | source = { virtual = "." } 9 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM python:3.12.6 2 | WORKDIR /app 3 | RUN apt-get update && apt-get install -y iproute2 4 | COPY requirements.txt /app 5 | RUN pip install -r requirements.txt 6 | COPY . /app 7 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .vscode/ 2 | ENV/ 3 | db.sqlite3 4 | celerybeat-schedule 5 | repos/ 6 | /static/ 7 | .vagrant/ 8 | __pycache__/ 9 | *.log 10 | screenshot.png 11 | keys 12 | *key 13 | src/ 14 | -------------------------------------------------------------------------------- /wharf/__init__.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import, unicode_literals 2 | 3 | # This will make sure the app is always imported when 4 | # Django starts so that shared_task will use this app. 5 | from .celery import app as celery_app 6 | 7 | __all__ = ["celery_app"] 8 | -------------------------------------------------------------------------------- /wharf/context_processors.py: -------------------------------------------------------------------------------- 1 | from typing import Any 2 | 3 | from django.conf import settings 4 | 5 | 6 | def helpers(request: Any): 7 | return { 8 | "HAS_LOGIN_SET": settings.ADMIN_LOGIN != "admin" 9 | or settings.ADMIN_PASSWORD != "password" 10 | } 11 | -------------------------------------------------------------------------------- /tests/settings.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | os.environ["REDIS_URL"] = "redis://redis:6379/1" 4 | 5 | from wharf.settings import * # noqa: F403 6 | 7 | CACHES = { 8 | # "default": { 9 | # "BACKEND": "tests.recording_cache.RecordingCache", 10 | # "LOCATION": "unique-snowflake", 11 | # } 12 | } 13 | -------------------------------------------------------------------------------- /requirements.in: -------------------------------------------------------------------------------- 1 | django 2 | django-jinja-bootstrap-form 3 | dj-database-url 4 | requests 5 | jinja2 6 | psycopg2-binary 7 | celery[redis] >= 5 8 | django-redis 9 | django-celery-results 10 | paramiko 11 | gitpython 12 | humanize 13 | timeout-decorator 14 | selenium 15 | packaging 16 | 17 | django-jinja 18 | 19 | pre-commit 20 | pytest 21 | pytest-django 22 | pytest-watcher 23 | model-bakery 24 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | .venv/bin/python: 2 | uv venv 3 | 4 | sync: .venv/bin/python requirements.txt 5 | uv pip sync --strict requirements.txt 6 | 7 | requirements.txt: requirements.in .venv/bin/python 8 | uv pip compile requirements.in -o requirements.txt --python-version 3.12 --no-strip-extras 9 | 10 | watch-test: sync 11 | .venv/bin/ptw --now --runner .venv/bin/pytest . -vvv 12 | 13 | pre-commit: sync 14 | .venv/bin/pre-commit run -a 15 | -------------------------------------------------------------------------------- /wharf/wsgi.py: -------------------------------------------------------------------------------- 1 | """ 2 | WSGI config for wharf project. 3 | 4 | It exposes the WSGI callable as a module-level variable named ``application``. 5 | 6 | For more information on this file, see 7 | https://docs.djangoproject.com/en/2.0/howto/deployment/wsgi/ 8 | """ 9 | 10 | import os 11 | 12 | from django.core.wsgi import get_wsgi_application 13 | 14 | os.environ.setdefault("DJANGO_SETTINGS_MODULE", "wharf.settings") 15 | 16 | application = get_wsgi_application() 17 | -------------------------------------------------------------------------------- /apps/migrations/0004_alter_app_name.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 5.2.1 on 2025-05-15 19:21 2 | 3 | from django.db import migrations, models 4 | 5 | 6 | class Migration(migrations.Migration): 7 | dependencies = [ 8 | ("apps", "0003_alter_tasklog_success"), 9 | ] 10 | 11 | operations = [ 12 | migrations.AlterField( 13 | model_name="app", 14 | name="name", 15 | field=models.CharField(max_length=256, unique=True), 16 | ), 17 | ] 18 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [project] 2 | description = "Opinionated web frontend for Dokku" 3 | name = "wharf" 4 | requires-python = ">=3.12" 5 | version = "0.1.0" 6 | 7 | [tool.djlint] 8 | ignore = "H030,H031" # meta tags ignores 9 | 10 | [tool.pytest.ini_options] 11 | DJANGO_SETTINGS_MODULE = "tests.settings" 12 | filterwarnings = [ 13 | "ignore::django.core.cache.backends.base.CacheKeyWarning" 14 | ] 15 | 16 | [tool.ruff.lint] 17 | ignore = ["DJ008"] 18 | select = ["E4", "E7", "E9", "F", "I", "DJ"] 19 | -------------------------------------------------------------------------------- /apps/migrations/0003_alter_tasklog_success.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 5.2 on 2025-04-21 18:45 2 | 3 | from django.db import migrations, models 4 | 5 | 6 | class Migration(migrations.Migration): 7 | dependencies = [ 8 | ("apps", "0002_app_github_url"), 9 | ] 10 | 11 | operations = [ 12 | migrations.AlterField( 13 | model_name="tasklog", 14 | name="success", 15 | field=models.BooleanField(blank=True, null=True), 16 | ), 17 | ] 18 | -------------------------------------------------------------------------------- /apps/migrations/0002_app_github_url.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 2.0.2 on 2018-02-26 23:57 2 | 3 | from django.db import migrations, models 4 | 5 | 6 | class Migration(migrations.Migration): 7 | dependencies = [ 8 | ("apps", "0001_initial"), 9 | ] 10 | 11 | operations = [ 12 | migrations.AddField( 13 | model_name="app", 14 | name="github_url", 15 | field=models.URLField(default=""), 16 | preserve_default=False, 17 | ), 18 | ] 19 | -------------------------------------------------------------------------------- /apps/forms.py: -------------------------------------------------------------------------------- 1 | from django import forms 2 | 3 | 4 | class ConfigForm(forms.Form): 5 | key = forms.CharField(label="key", max_length=100) 6 | value = forms.CharField(label="value", max_length=300) 7 | 8 | 9 | class CreateAppForm(forms.Form): 10 | name = forms.CharField(label="App name", max_length=100) 11 | 12 | 13 | class CreateDomainForm(forms.Form): 14 | name = forms.CharField(label="Domain name", max_length=100) 15 | 16 | 17 | class SetupLetsEncrypt(forms.Form): 18 | email = forms.EmailField(label="Email", max_length=100) 19 | -------------------------------------------------------------------------------- /wharf/urls.py: -------------------------------------------------------------------------------- 1 | from django.conf import settings 2 | from django.conf.urls.static import static 3 | from django.contrib import admin 4 | from django.contrib.auth import views as auth_views 5 | from django.urls import include, path 6 | 7 | urlpatterns = [ 8 | path("", include("apps.urls")), 9 | path( 10 | "accounts/login/", 11 | auth_views.LoginView.as_view(template_name="login.html"), 12 | name="login", 13 | ), 14 | path("admin/", admin.site.urls), 15 | ] + static(settings.STATIC_URL, document_root=settings.STATIC_ROOT) 16 | -------------------------------------------------------------------------------- /manage.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | import os 3 | import sys 4 | 5 | if __name__ == "__main__": 6 | os.environ.setdefault("DJANGO_SETTINGS_MODULE", "wharf.settings") 7 | try: 8 | from django.core.management import execute_from_command_line 9 | except ImportError as exc: 10 | raise ImportError( 11 | "Couldn't import Django. Are you sure it's installed and " 12 | "available on your PYTHONPATH environment variable? Did you " 13 | "forget to activate a virtual environment?" 14 | ) from exc 15 | execute_from_command_line(sys.argv) 16 | -------------------------------------------------------------------------------- /apps/templates/setup_key.html: -------------------------------------------------------------------------------- 1 | {% extends "base.html" %} 2 | {% block body %} 3 |

Wharf: Initial setup

4 | Save the following key (as one line) to a file on your server, and run the 5 | ssh-keys:add command from the Dokku instructions for setting up SSH keys 6 |
7 |
8 | 9 | {{ key }} 10 | 11 |
12 |
13 | When you've done this, click here to reload 14 | {% endblock body %} 15 | -------------------------------------------------------------------------------- /apps/management/commands/celery.py: -------------------------------------------------------------------------------- 1 | # From http://avilpage.com/2017/05/how-to-auto-reload-celery-workers-in-development.html 2 | import shlex 3 | import subprocess 4 | 5 | from django.core.management.base import BaseCommand 6 | from django.utils import autoreload 7 | 8 | 9 | def restart_celery(): 10 | cmd = "pkill -9 celery" 11 | subprocess.call(shlex.split(cmd)) 12 | cmd = "celery -A wharf worker -l info -B" 13 | subprocess.call(shlex.split(cmd)) 14 | 15 | 16 | class Command(BaseCommand): 17 | def handle(self, *args, **options): 18 | print("Starting celery worker with autoreload...") 19 | autoreload.run_with_reloader(restart_celery) 20 | -------------------------------------------------------------------------------- /wharf/celery.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import, unicode_literals 2 | 3 | import os 4 | 5 | from celery import Celery 6 | 7 | # set the default Django settings module for the 'celery' program. 8 | os.environ.setdefault("DJANGO_SETTINGS_MODULE", "wharf.settings") 9 | 10 | app = Celery("wharf", include=["wharf.tasks"]) 11 | 12 | # Using a string here means the worker doesn't have to serialize 13 | # the configuration object to child processes. 14 | # - namespace='CELERY' means all celery-related configuration keys 15 | # should have a `CELERY_` prefix. 16 | app.config_from_object("django.conf:settings", namespace="CELERY") 17 | 18 | # Load task modules from all registered Django app configs. 19 | app.autodiscover_tasks() 20 | -------------------------------------------------------------------------------- /apps/models.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | import uuid 3 | 4 | import humanize 5 | from django.db import models 6 | 7 | 8 | class App(models.Model): 9 | id = models.UUIDField(primary_key=True, default=uuid.uuid4) 10 | name = models.CharField(max_length=256, unique=True) 11 | github_url = models.URLField() 12 | 13 | 14 | class TaskLog(models.Model): 15 | task_id = models.CharField(max_length=256, primary_key=True) 16 | when = models.DateTimeField() 17 | success = models.BooleanField(null=True, blank=True) 18 | app = models.ForeignKey(App, on_delete=models.CASCADE) 19 | description = models.CharField(max_length=256) 20 | 21 | def nice_when(self): 22 | return humanize.naturaltime( 23 | datetime.datetime.now(datetime.timezone.utc) - self.when 24 | ) 25 | -------------------------------------------------------------------------------- /apps/templates/command_wait.html: -------------------------------------------------------------------------------- 1 | {% extends "base.html" %} 2 | {% block body %} 3 |

4 | Wharf: 5 | {% if running %} 6 | running task 7 | {% else %} 8 | task log 9 | {% endif %} 10 | {% if app != "_" %}for {{ app }}{% endif %} 11 |

12 |

{{ description }}

13 | Task id: {{ task_id }} 14 |
15 | Task state: {{ state }} 16 |
17 |
18 | {{ log }}
19 | 
20 | {% if running %} 21 | 26 | {% endif %} 27 | {% if app == "_" %} 28 | Return to apps index 29 | {% else %} 30 | Return to {{ app }} info 31 | {% endif %} 32 | {% endblock body %} 33 | -------------------------------------------------------------------------------- /apps/templates/login.html: -------------------------------------------------------------------------------- 1 | {% extends "base.html" %} 2 | {% block body %} 3 |

Wharf: Login

4 | {% if not HAS_LOGIN_SET %} 5 | Initial login is admin/password, but this can be changed by setting ADMIN_LOGIN 6 | and ADMIN_PASSWORD in the environment variables. 7 | {% endif %} 8 | {% if form.errors %}

Your username and password didn't match. Please try again.

{% endif %} 9 |
10 | {% csrf_token %} 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 |
ADMIN_LOGIN{{ form.username }}
ADMIN_PASSWORD{{ form.password }}
21 | 22 | 23 |
24 | {% endblock body %} 25 | -------------------------------------------------------------------------------- /.github/renovate.json5: -------------------------------------------------------------------------------- 1 | { 2 | $schema: "https://docs.renovatebot.com/renovate-schema.json", 3 | extends: [ 4 | ":ignoreModulesAndTests", 5 | "group:monorepos", 6 | "group:recommended", 7 | "replacements:all", 8 | "workarounds:all", 9 | ], 10 | packageRules: [ 11 | { 12 | // Don't upgrade docker stuff, because it's things like docker-compose 13 | // that have to match the deployed ones 14 | matchDatasources: ["docker"], 15 | enabled: false, 16 | }, 17 | { 18 | // Don't really need to upgrade Python 19 | matchManagers: ["pyenv"], 20 | enabled: false, 21 | }, 22 | { 23 | // Issues with Kombu and Redis 6 24 | matchDepNames: ["redis"], 25 | matchUpdateTypes: ["major"], 26 | enabled: false, 27 | }, 28 | { 29 | matchUpdateTypes: ["patch", "minor"], 30 | enabled: false 31 | }, 32 | { 33 | matchPackageNames: ["*"], 34 | automerge: true 35 | }, 36 | ], 37 | "vulnerabilityAlerts": { 38 | "enabled": true 39 | }, 40 | "osvVulnerabilityAlerts": true 41 | } 42 | -------------------------------------------------------------------------------- /Vagrantfile: -------------------------------------------------------------------------------- 1 | Vagrant.configure("2") do |config| 2 | config.vm.box = "bento/ubuntu-24.04" 3 | config.vagrant.plugins = "vagrant-libvirt" 4 | 5 | config.vm.box_check_update = false 6 | config.vm.synced_folder ".", "/vagrant" 7 | 8 | config.vm.network "forwarded_port", guest: 80, host: 5000 9 | 10 | config.vm.provider :libvirt do |libvirt| 11 | libvirt.memory = "1024" 12 | libvirt.machine_type = 'pc-q35-3.1' 13 | end 14 | 15 | config.vm.provision "shell", privileged: false, inline: <<-SHELL 16 | set -eux -o pipefail 17 | sudo apt-get update 18 | sudo apt-get install --no-install-recommends -y build-essential python3 python3-pip git apt-transport-https curl redis-server firefox python3-setuptools python3-wheel python3-dev libssl-dev xdg-utils hostsed 19 | curl -fsSL https://download.docker.com/linux/ubuntu/gpg | sudo apt-key add - 20 | echo "deb [arch=amd64] https://download.docker.com/linux/ubuntu noble stable" | sudo tee /etc/apt/sources.list.d/docker.list 21 | cd /vagrant 22 | pip3 install --break-system-packages -r requirements.txt 23 | ./test.sh 24 | SHELL 25 | end 26 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | repos: 3 | - repo: https://github.com/pre-commit/pre-commit-hooks 4 | rev: v6.0.0 5 | hooks: 6 | - id: trailing-whitespace 7 | exclude_types: [yaml, diff] 8 | - id: end-of-file-fixer 9 | exclude_types: [diff] 10 | - id: check-yaml 11 | - id: check-added-large-files 12 | 13 | - repo: https://github.com/jumanjihouse/pre-commit-hook-yamlfmt 14 | rev: 0.2.3 15 | hooks: 16 | - id: yamlfmt 17 | args: [--mapping, '2', --offset, '0', --sequence, '2'] 18 | exclude: pnpm-lock.yaml 19 | 20 | - repo: https://github.com/macisamuele/language-formatters-pre-commit-hooks 21 | rev: v2.14.0 22 | hooks: 23 | - id: pretty-format-toml 24 | args: [--autofix] 25 | 26 | - repo: https://github.com/astral-sh/ruff-pre-commit 27 | rev: v0.14.5 28 | hooks: 29 | - id: ruff-check 30 | args: [--fix] 31 | - id: ruff-format 32 | 33 | - repo: local 34 | hooks: 35 | - id: uv 36 | name: uv 37 | language: system 38 | entry: uv lock --check 39 | pass_filenames: false 40 | 41 | - repo: https://github.com/djlint/djLint 42 | rev: v1.36.4 43 | hooks: 44 | - id: djlint-reformat-django 45 | - id: djlint-django 46 | -------------------------------------------------------------------------------- /apps/migrations/0001_initial.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 2.0.2 on 2018-02-26 23:05 2 | 3 | import uuid 4 | 5 | import django.db.models.deletion 6 | from django.db import migrations, models 7 | 8 | 9 | class Migration(migrations.Migration): 10 | initial = True 11 | 12 | dependencies = [] 13 | 14 | operations = [ 15 | migrations.CreateModel( 16 | name="App", 17 | fields=[ 18 | ( 19 | "id", 20 | models.UUIDField( 21 | default=uuid.uuid4, primary_key=True, serialize=False 22 | ), 23 | ), 24 | ("name", models.CharField(max_length=256)), 25 | ], 26 | ), 27 | migrations.CreateModel( 28 | name="TaskLog", 29 | fields=[ 30 | ( 31 | "task_id", 32 | models.CharField(max_length=256, primary_key=True, serialize=False), 33 | ), 34 | ("when", models.DateTimeField()), 35 | ("success", models.BooleanField(null=True)), 36 | ("description", models.CharField(max_length=256)), 37 | ( 38 | "app", 39 | models.ForeignKey( 40 | on_delete=django.db.models.deletion.CASCADE, to="apps.App" 41 | ), 42 | ), 43 | ], 44 | ), 45 | ] 46 | -------------------------------------------------------------------------------- /docker-compose.yml: -------------------------------------------------------------------------------- 1 | --- 2 | services: 3 | web: 4 | build: . 5 | command: ./wait-for-it.sh localhost:5432 --strict --timeout=0 -- ./wait-for-it.sh localhost:6379 --strict --timeout=0 -- bash -c "PORT=8000 bash dokku-boot.sh" 6 | volumes: 7 | - .:/app 8 | environment: 9 | - DATABASE_URL=postgres://postgres:example@localhost:5432/wharf 10 | - BROKER_URL=redis://localhost:6379/0 11 | - CACHE_URL=redis://localhost:6379/1 12 | - DOKKU_SSH_HOST=${DOKKU_SSH_HOST:-127.0.0.1} 13 | - DOKKU_SSH_PORT=${DOKKU_SSH_PORT:-22} 14 | - GITHUB_SECRET=${GITHUB_SECRET:-password} 15 | - ADMIN_PASSWORD=${ADMIN_PASSWORD:-password} 16 | depends_on: 17 | - postgres 18 | - redis 19 | network_mode: host 20 | 21 | celery: 22 | build: . 23 | command: ./wait-for-it.sh localhost:5432 --strict --timeout=0 -- ./wait-for-it.sh localhost:6379 --strict --timeout=0 -- bash -c "python manage.py celery" 24 | volumes: 25 | - .:/app 26 | environment: 27 | - DATABASE_URL=postgres://postgres:example@localhost:5432/wharf 28 | - BROKER_URL=redis://localhost:6379/0 29 | - CACHE_URL=redis://localhost:6379/1 30 | - DOKKU_SSH_HOST=${DOKKU_SSH_HOST:-127.0.0.1} 31 | - DOKKU_SSH_PORT=${DOKKU_SSH_PORT:-22} 32 | depends_on: 33 | - postgres 34 | - redis 35 | network_mode: host 36 | 37 | postgres: 38 | image: postgres:14-alpine 39 | environment: 40 | POSTGRES_DB: wharf 41 | POSTGRES_PASSWORD: example 42 | network_mode: host 43 | 44 | redis: 45 | image: redis:4-alpine 46 | network_mode: host 47 | -------------------------------------------------------------------------------- /apps/templates/list_apps.html: -------------------------------------------------------------------------------- 1 | {% extends "base.html" %} 2 | {% block body %} 3 |

Wharf

4 |
5 | {% csrf_token %} 6 | 7 |
8 |

Apps

9 | 22 |

New app

23 |
24 | {% csrf_token %} 25 | 26 | {{ app_form | bootstrap }} 27 | 28 |
29 |
30 |

Global Config

31 | 34 |

New item

35 |
36 | {% csrf_token %} 37 | 38 | {{ config_form | bootstrap }} 39 | 40 |
41 | {% endblock body %} 42 | -------------------------------------------------------------------------------- /apps/templates/base.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 7 | Wharf 8 | 9 | 13 | 14 | 15 |
16 | {% if messages %} 17 | {% for message in messages %} 18 |
{{ message }}
19 | {% endfor %} 20 | {% endif %} 21 | {% block body %} 22 | {% endblock body %} 23 | 26 | 29 | 32 |
33 | 34 | 35 | -------------------------------------------------------------------------------- /tests/test_tasks.py: -------------------------------------------------------------------------------- 1 | import subprocess 2 | from pathlib import Path 3 | from typing import Callable 4 | from unittest.mock import MagicMock, patch 5 | 6 | import pytest 7 | from redis import StrictRedis 8 | 9 | import wharf.tasks as tasks 10 | from apps import models 11 | 12 | root_path = Path(__file__).parent.parent 13 | test_app_path = root_path.joinpath("repos", "test_app") 14 | 15 | processes = [ 16 | ["git", "clone", "git://foo", test_app_path.as_posix()], 17 | ["git", "pull"], 18 | ["git", "push", "-f", "dokku", "master"], 19 | ["git", "push", "-f", "dokku", "main"], 20 | ] 21 | 22 | 23 | def custom_mock_processes(override_commands: list[list[str]]) -> Callable: 24 | def _internal(task_name, args, **kwargs): 25 | if args in override_commands: 26 | raise Exception("override") 27 | if args in processes: 28 | return 29 | print(args) 30 | raise Exception(args) 31 | 32 | return _internal 33 | 34 | 35 | mock_processes = custom_mock_processes([]) 36 | 37 | 38 | @pytest.mark.django_db 39 | @patch("wharf.tasks.run_process") 40 | @pytest.mark.parametrize("branch", ["main", "master"]) 41 | def test_deploy( 42 | patched_runprocess: MagicMock, branch: str, monkeypatch: pytest.MonkeyPatch 43 | ): 44 | patched_runprocess.side_effect = mock_processes 45 | monkeypatch.setattr(StrictRedis, "append", lambda _self, _key, _value: b"") 46 | models.App.objects.create(name="test_app") 47 | if test_app_path.exists(): 48 | subprocess.check_call(["rm", "-Rf", test_app_path.as_posix()]) 49 | test_app_path.mkdir() 50 | subprocess.check_call(["git", "init"], cwd=test_app_path) 51 | 52 | tasks.deploy("test_app", "git://foo", branch) # pyright: ignore[reportCallIssue] 53 | 54 | 55 | def test_handle_data_non_utf8(monkeypatch: pytest.MonkeyPatch): 56 | redis_keys = [] 57 | 58 | def store_value(_self, key, value): 59 | nonlocal redis_keys 60 | redis_keys.append((key, value)) 61 | 62 | monkeypatch.setattr(StrictRedis, "append", store_value) 63 | tasks.handle_data("abc", "æ".encode("cp1252")) 64 | 65 | assert redis_keys == [("abc", "\ufffd")] 66 | -------------------------------------------------------------------------------- /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | --- 2 | name: CI 3 | on: 4 | push: 5 | branches: 6 | - main 7 | pull_request: 8 | branches: 9 | - main 10 | schedule: 11 | - cron: 0 0 1 * * 12 | jobs: 13 | Build: 14 | runs-on: ubuntu-24.04 15 | steps: 16 | - run: | 17 | sudo systemctl stop apache2 18 | sudo apt-get remove -y apache2 19 | - run: curl -fsSL https://download.docker.com/linux/ubuntu/gpg | sudo apt-key add - 20 | - run: sudo add-apt-repository "deb [arch=amd64] https://download.docker.com/linux/ubuntu $(lsb_release -cs) stable" 21 | - run: apt-cache policy docker-ce 22 | - run: sudo apt-get install -y docker-ce nginx firefox hostsed 23 | - name: Set up Python 24 | uses: actions/setup-python@v6 25 | with: 26 | python-version: 3.12 27 | - name: Set up Docker Buildx 28 | id: buildx 29 | uses: docker/setup-buildx-action@v3 30 | - uses: actions/checkout@v6 31 | with: 32 | fetch-depth: 0 # All of history 33 | - name: Cache pip 34 | uses: actions/cache@v5 35 | with: 36 | path: ~/.cache/pip 37 | key: ${{ runner.os }}-pip-${{ hashFiles('requirements.txt') }} 38 | restore-keys: | 39 | ${{ runner.os }}-pip- 40 | - uses: asdf-vm/actions/install@v4 41 | - name: Install dependencies 42 | run: make sync 43 | - run: docker buildx build . 44 | - run: firefox --version 45 | - run: which firefox 46 | - run: | 47 | source .venv/bin/activate 48 | ./test.sh 49 | 50 | pre-commit: 51 | runs-on: ubuntu-24.04 52 | steps: 53 | - name: Set up Python 54 | uses: actions/setup-python@v6 55 | with: 56 | python-version: 3.12 57 | - uses: actions/checkout@v6 58 | with: 59 | fetch-depth: 0 # All of history 60 | - name: Cache pip 61 | uses: actions/cache@v5 62 | with: 63 | path: ~/.cache/pip 64 | key: ${{ runner.os }}-pip-${{ hashFiles('requirements.txt') }} 65 | restore-keys: | 66 | ${{ runner.os }}-pip- 67 | - uses: asdf-vm/actions/install@v4 68 | - name: Install dependencies 69 | run: make sync 70 | - run: make pre-commit 71 | -------------------------------------------------------------------------------- /tests/recording_cache.py: -------------------------------------------------------------------------------- 1 | from typing import Callable, Iterator, cast 2 | 3 | import pytest 4 | from django.conf import LazySettings 5 | from django.core.cache import cache 6 | from django.core.cache.backends.locmem import LocMemCache 7 | 8 | MULTIPLE_COMMANDS = ["delete_many"] 9 | 10 | 11 | class RecordingCache(LocMemCache): 12 | actions = [] 13 | # Some commands e.g. delete_many are implemented via other commands 14 | # Don't record the internal details 15 | _pause_internal = False 16 | 17 | def _make_internal(self, entry): 18 | def internal(*args, **kwargs): 19 | if not self._pause_internal: 20 | item = [entry] 21 | if args != (): 22 | item.append(args) 23 | if kwargs != {}: 24 | item.append(kwargs) 25 | if len(item) == 1: 26 | self.actions.append(item[0]) 27 | else: 28 | self.actions.append(tuple(item)) 29 | if entry in MULTIPLE_COMMANDS: 30 | self._pause_internal = True 31 | ret = self._originals[entry](*args, **kwargs) 32 | if entry in MULTIPLE_COMMANDS: 33 | self._pause_internal = False 34 | return ret 35 | 36 | return internal 37 | 38 | def __init__(self, *args, **kwargs): 39 | LocMemCache.__init__(self, *args, **kwargs) 40 | self._originals = {} 41 | for entry in dir(self): 42 | if entry.startswith("_") or entry in [ 43 | "actions", 44 | "key_func", 45 | "make_key", 46 | "make_and_validate_key", 47 | "validate_key", 48 | ]: 49 | continue 50 | self._originals[entry] = getattr(self, entry) 51 | if not isinstance(self._originals[entry], Callable): 52 | continue 53 | 54 | setattr(self, entry, self._make_internal(entry)) 55 | 56 | 57 | @pytest.fixture 58 | def recording_cache(settings: LazySettings) -> Iterator[RecordingCache]: 59 | settings.CACHES["default"] = { 60 | "BACKEND": "tests.recording_cache.RecordingCache", 61 | "LOCATION": "unique-snowflake", 62 | } 63 | recording_cache = cast(RecordingCache, cache) 64 | yield recording_cache 65 | recording_cache.actions = [] 66 | -------------------------------------------------------------------------------- /tests/test_auth.py: -------------------------------------------------------------------------------- 1 | from typing import List, cast 2 | from unittest.mock import MagicMock, _Call 3 | 4 | from django.contrib.messages.storage.base import Message 5 | from django.core.handlers.wsgi import WSGIRequest 6 | from django.http import HttpResponseRedirect 7 | from django.test import RequestFactory 8 | 9 | import wharf.auth 10 | 11 | 12 | class RequestWithMessages(WSGIRequest): 13 | _messages: MagicMock 14 | 15 | 16 | def test_login_required_middleware_no_auth(rf: RequestFactory) -> None: 17 | mw = wharf.auth.LoginRequiredMiddleware(None) 18 | request = rf.get("/test") 19 | request.user = MagicMock() 20 | request.user.is_authenticated = False 21 | res = mw(request) 22 | assert isinstance(res, HttpResponseRedirect) 23 | assert res.url == "/accounts/login/?next=/test" 24 | 25 | 26 | def authed_request_with_messages( 27 | rf: RequestFactory, messages: List[Message] 28 | ) -> tuple[RequestWithMessages, MagicMock]: 29 | request = cast(RequestWithMessages, rf.get("/test")) 30 | request.user = MagicMock() 31 | request.user.is_authenticated = True 32 | mock_messages = MagicMock() 33 | request._messages = MagicMock( 34 | add=mock_messages, __iter__=lambda _self: iter(messages) 35 | ) 36 | return (request, mock_messages) 37 | 38 | 39 | def test_login_required_middleware_authed(rf: RequestFactory) -> None: 40 | mw = wharf.auth.LoginRequiredMiddleware(lambda r: None) 41 | (request, mock_messages) = authed_request_with_messages(rf, []) 42 | mw(request) 43 | mock_messages.assert_called_once() 44 | call = cast(_Call, mock_messages.call_args) 45 | assert len(call.args) == 3, call 46 | assert call.args[0] == 30, call 47 | assert call.args[1].startswith( 48 | "ADMIN_PASSWORD is in plain text. Set it to pbkdf2_sha256$1200000$" 49 | ), call 50 | assert call.args[2] == "", call 51 | 52 | 53 | def test_login_required_middleware_existing_message(rf: RequestFactory) -> None: 54 | mw = wharf.auth.LoginRequiredMiddleware(lambda r: None) 55 | (request, mock_messages) = authed_request_with_messages( 56 | rf, 57 | [ 58 | Message( 59 | 30, 60 | "ADMIN_PASSWORD is in plain text. Set it to pbkdf2_sha256$1200000$", 61 | "", 62 | ) 63 | ], 64 | ) 65 | mw(request) 66 | mock_messages.assert_not_called() 67 | -------------------------------------------------------------------------------- /test.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -eux -o pipefail 4 | 5 | export PYTHONUNBUFFERED=1 6 | 7 | if [ -f /etc/nginx/sites-enabled/default ]; then 8 | sudo rm /etc/nginx/sites-enabled/default 9 | sudo systemctl restart nginx 10 | fi 11 | 12 | REDIS_URL=redis://dummy python3 manage.py test 13 | wget -nv -O - https://packagecloud.io/dokku/dokku/gpgkey | sudo apt-key add - 14 | if [ ! -f /etc/apt/sources.list.d/dokku.list ]; then 15 | echo "deb https://packagecloud.io/dokku/dokku/ubuntu/ noble main" | sudo tee /etc/apt/sources.list.d/dokku.list 16 | sudo apt-get update 17 | fi 18 | echo dokku dokku/skip_key_file boolean true | sudo debconf-set-selections 19 | if [ ! -f /usr/bin/dokku ]; then 20 | sudo DEBIAN_FRONTEND=noninteractive apt-get install --no-install-recommends -y dokku 21 | fi 22 | sudo dokku plugin:install-dependencies --core 23 | 24 | if [ ! -f /usr/bin/dokku-daemon ]; then 25 | if [ ! -d dokku-daemon-rs ]; then 26 | git clone https://github.com/palfrey/dokku-daemon-rs 27 | fi 28 | curl -OL https://github.com/palfrey/dokku-daemon-rs/releases/download/v0.1.0/dokku-daemon-rs-linux-amd64 29 | chmod +x dokku-daemon-rs-linux-amd64 30 | sudo mv dokku-daemon-rs-linux-amd64 /usr/bin/dokku-daemon 31 | (cd dokku-daemon-rs && 32 | sudo cp -f conf/dokku-daemon.service /etc/systemd/system/dokku-daemon.service && 33 | sudo mkdir -p /var/run/dokku-daemon && 34 | sudo systemctl daemon-reload && 35 | sudo systemctl start dokku-daemon 36 | ) 37 | fi 38 | (dokku plugin:list | grep redis) || sudo dokku plugin:install https://github.com/dokku/dokku-redis.git --committish 1.41.0 redis 39 | (dokku plugin:list | grep postgres) || sudo dokku plugin:install https://github.com/dokku/dokku-postgres.git --committish 1.43.0 postgres 40 | (dokku plugin:list | grep letsencrypt) || sudo dokku plugin:install https://github.com/dokku/dokku-letsencrypt.git --committish 0.22.0 letsencrypt 41 | dokku plugin:list 42 | dokku letsencrypt:cron-job --add 43 | (dokku apps:list | grep wharf) || dokku apps:create wharf 44 | (dokku redis:list | grep wharf) || (dokku redis:create wharf && dokku redis:link wharf wharf) 45 | (dokku postgres:list | grep wharf) || (dokku postgres:create wharf && dokku postgres:link wharf wharf) 46 | (git remote | grep dokku) || git remote add dokku ssh://dokku@localhost/wharf 47 | if [ ! -f ~/.ssh/id_rsa ]; then 48 | yes y | ssh-keygen -t rsa -N '' -f ~/.ssh/id_rsa 49 | fi 50 | (dokku ssh-keys:list | grep travis) || sudo dokku ssh-keys:add travis ~/.ssh/id_rsa.pub 51 | dokku ssh-keys:list 52 | sudo chmod 600 /home/dokku/.ssh/authorized_keys 53 | sudo chmod 700 /home/dokku/.ssh 54 | KEY_DIR=`pwd`/keys 55 | if [ ! -d $KEY_DIR ]; then 56 | mkdir -p $KEY_DIR 57 | fi 58 | sudo chown dokku:dokku $KEY_DIR 59 | (dokku storage:list wharf | grep dokku-daemon) || dokku storage:mount wharf /var/run/dokku-daemon/dokku-daemon.sock:/var/run/dokku-daemon/dokku-daemon.sock 60 | (dokku storage:list wharf | grep ssh) || dokku storage:mount wharf $KEY_DIR:/root/.ssh 61 | GIT_SSH_COMMAND="ssh -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no -v" git push dokku HEAD:refs/heads/main 62 | export WHARF_HOSTNAME=wharf.$(hostname --long) 63 | sudo hostsed add 127.0.0.1 $WHARF_HOSTNAME 64 | dokku ps:scale wharf celery=1 65 | sudo docker ps 66 | sudo apt-get install -y net-tools 67 | sudo netstat -nlp 68 | dokku domains:report 69 | dokku proxy:report 70 | dokku ports:report 71 | python3 check_boot.py http://$WHARF_HOSTNAME 72 | if [ ! -f $KEY_DIR/id_rsa ]; then 73 | echo "Can't find keys in key dir" 74 | ls $KEY_DIR 75 | exit 1 76 | fi 77 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | Wharf 2 | ===== 3 | [![CI](https://github.com/palfrey/wharf/actions/workflows/ci.yml/badge.svg)](https://github.com/palfrey/wharf/actions) 4 | 5 | Wharf is an opinionated web frontend for [Dokku](https://dokku.com/docs/). You can also use the command line version, but most features you'll need day-to-day are in the Web UI 6 | 7 | Screenshots 8 | ----------- 9 | 10 | 11 | Setup 12 | ----- 13 | 1. [Install Dokku](https://dokku.com/docs/getting-started/installation) 14 | 2. Install the following plugins: 15 | * https://github.com/dokku/dokku-redis 16 | * https://github.com/dokku/dokku-postgres 17 | * https://github.com/dokku/dokku-letsencrypt 18 | 3. Setup the Let's Encrypt plugin to auto-renew (`dokku letsencrypt:cron-job --add`) 19 | 4. Create the app (`dokku apps:create wharf`) 20 | 5. Add SSH key storage: 21 | 1. `mkdir /var/lib/dokku/data/storage/wharf-ssh/` 22 | 2. `chown dokku:dokku /var/lib/dokku/data/storage/wharf-ssh/` 23 | 3. `dokku storage:mount wharf /var/lib/dokku/data/storage/wharf-ssh/:/root/.ssh` 24 | 6. Optionally, add dokku-daemon. We still need SSH keys for pushing to Dokku, but this should speed up other Wharf commands: 25 | 1. Install as per instructions at https://github.com/palfrey/dokku-daemon-rs (note the original version should in theory work, but [it's JSON support is buggy](https://github.com/dokku/dokku-daemon/issues/31)) 26 | 2. `dokku storage:mount wharf /var/run/dokku-daemon/dokku-daemon.sock:/var/run/dokku-daemon/dokku-daemon.sock` 27 | 7. Add Redis (`dokku redis:create wharf && dokku redis:link wharf wharf`) 28 | 8. Add Postgres (`dokku postgres:create wharf && dokku postgres:link wharf wharf`) 29 | 9. Set `ADMIN_PASSWORD` to something secret (`dokku config:set wharf ADMIN_PASSWORD=somesecret`) 30 | 10. Deploy this Git repo [as per the standard Dokku instructions](https://dokku.com/docs/deployment/application-deployment/) 31 | 11. `dokku ps:scale wharf celery=1` 32 | 33 | Helpful hints 34 | ------------- 35 | * If you're running SSH on a non-standard port, set `DOKKU_SSH_PORT` e.g. `dokku config:set wharf DOKKU_SSH_PORT=2222` 36 | * If Dokku is running somewhere else than the local machine, set `DOKKU_SSH_HOST` e.g. `dokku config:set wharf DOKKU_SSH_HOST=foo.example.com` 37 | * If there's a Dockerfile in your repository, it'll [try and deploy using that by default](https://dokku.com/docs/deployment/methods/dockerfiles/). Set BUILDPACK_URL to override 38 | * BUILDPACK_URL should be an HTTPS one, not a SSH or heroku/something one 39 | * You should setup the global domain name when creating Dokku to start with and add a *.<your dokku domain> entry to give new apps more usable names. 40 | * Set `GIT_BRANCH` in the variables to deploy from non-`master` 41 | 42 | Enabling Github auto-deploy webhooks 43 | ------------------------------------ 44 | 1. Set `GITHUB_SECRET` config item to something secret 45 | 2. Goto [settings/webhooks](https://developer.github.com/webhooks/creating/#setting-up-a-webhook) in Github 46 | 3. Make a new webhook for <your Wharf instance>/webhook with Content type as `application/json` and Secret to the secret from `GITHUB_SECRET` 47 | 48 | Development 49 | ----------- 50 | Easiest way to do dev is: 51 | 52 | 1. `vagrant up` which will boot the entire Dokku setup in a VM 53 | 2. `DOKKU_SSH_HOST=host.docker.internal DOKKU_SSH_PORT=2222 docker-compose up` 54 | * `host.docker.internal` works on Mac/Windows, but not on Linux (see https://github.com/docker/for-linux/issues/264). On Linux hosts, you should set `DOKKU_SSH_HOST` to whatever your IP is (not localhost, but a local IP is fine) 55 | 3. Load up `http://localhost:8000/` 56 | -------------------------------------------------------------------------------- /apps/urls.py: -------------------------------------------------------------------------------- 1 | from django.contrib.staticfiles.storage import staticfiles_storage 2 | from django.urls import path 3 | from django.views.generic.base import RedirectView 4 | 5 | from . import views 6 | 7 | urlpatterns = [ 8 | path("", views.index, name="index"), 9 | path("status", views.status, name="status"), 10 | path("refresh", views.refresh_all, name="refresh_all"), 11 | path("create_app", views.create_app, name="create_app"), 12 | path("global_config_set", views.global_config_set, name="global_config_set"), 13 | path( 14 | "global_config_check/", 15 | views.check_global_config_set, 16 | name="check_global_config_set", 17 | ), 18 | path("apps//check_app/", views.check_app, name="check_app"), 19 | path("apps/", views.app_info, name="app_info"), 20 | path( 21 | "apps//wait//", 22 | views.wait_for_command, 23 | name="wait_for_command", 24 | ), 25 | path( 26 | "apps//check_app_config_set/", 27 | views.check_app_config_set, 28 | name="check_app_config_set", 29 | ), 30 | path( 31 | "apps//app_config_delete", 32 | views.app_config_delete, 33 | name="app_config_delete", 34 | ), 35 | path( 36 | "apps//check_app_config_delete/", 37 | views.check_app_config_delete, 38 | name="check_app_config_delete", 39 | ), 40 | path("apps//deploy", views.deploy, name="deploy"), 41 | path( 42 | "apps//check_deploy/", 43 | views.check_deploy, 44 | name="check_deploy", 45 | ), 46 | path( 47 | "apps//check_rebuild/", 48 | views.check_rebuild, 49 | name="check_rebuild", 50 | ), 51 | path( 52 | "apps//create_postgres", views.create_postgres, name="create_postgres" 53 | ), 54 | path( 55 | "apps//check_postgres/", 56 | views.check_postgres, 57 | name="check_postgres", 58 | ), 59 | path( 60 | "apps//remove_postgres", views.remove_postgres, name="remove_postgres" 61 | ), 62 | path( 63 | "apps//check_remove_postgres/", 64 | views.check_remove_postgres, 65 | name="check_remove_postgres", 66 | ), 67 | path("apps//create_redis", views.create_redis, name="create_redis"), 68 | path( 69 | "apps//check_redis/", views.check_redis, name="check_redis" 70 | ), 71 | path("apps//remove_redis", views.remove_redis, name="remove_redis"), 72 | path( 73 | "apps//check_remove_redis/", 74 | views.check_remove_redis, 75 | name="check_remove_redis", 76 | ), 77 | path( 78 | "apps//setup_letsencrypt", 79 | views.setup_letsencrypt, 80 | name="setup_letsencrypt", 81 | ), 82 | path( 83 | "apps//check_letsencrypt/", 84 | views.check_letsencrypt, 85 | name="check_letsencrypt", 86 | ), 87 | path( 88 | "apps//remove_letsencrypt", 89 | views.remove_letsencrypt, 90 | name="remove_letsencrypt", 91 | ), 92 | path( 93 | "apps//check_remove_letsencrypt/", 94 | views.check_remove_letsencrypt, 95 | name="check_remove_letsencrypt", 96 | ), 97 | path("apps//add_domain", views.add_domain, name="add_domain"), 98 | path( 99 | "apps//check_domain/", 100 | views.check_domain, 101 | name="check_domain", 102 | ), 103 | path("apps//remove_domain", views.remove_domain, name="remove_domain"), 104 | path("apps//refresh", views.refresh, name="refresh"), 105 | path("logs/", views.show_log, name="show_log"), 106 | path("webhook", views.github_webhook), 107 | path( 108 | "favicon.ico", 109 | RedirectView.as_view( 110 | url=staticfiles_storage.url("favicon.ico"), permanent=False 111 | ), 112 | name="favicon", 113 | ), 114 | ] 115 | -------------------------------------------------------------------------------- /wharf/auth.py: -------------------------------------------------------------------------------- 1 | from re import compile 2 | from typing import cast 3 | 4 | from django.conf import settings 5 | from django.contrib import messages 6 | from django.contrib.auth.hashers import check_password, make_password 7 | from django.contrib.auth.models import User 8 | from django.contrib.messages.storage.base import Message 9 | from django.http import HttpRequest, HttpResponseRedirect 10 | from django.utils.http import url_has_allowed_host_and_scheme 11 | 12 | 13 | class SettingsBackend: 14 | """ 15 | Authenticate against the settings ADMIN_LOGIN and ADMIN_PASSWORD. 16 | 17 | Use the login name and a hash of the password. For example: 18 | 19 | ADMIN_LOGIN = 'admin' 20 | ADMIN_PASSWORD = 'pbkdf2_sha256$30000$Vo0VlMnkR4Bk$qEvtdyZRWTcOsCnI/oQ7fVOu1XAURIZYoOZ3iq8Dr4M=' 21 | 22 | If ADMIN_PASSWORD is unhashed, then this returns a message warning you about that 23 | """ 24 | 25 | def authenticate(self, request, username=None, password=None): 26 | login_valid = settings.ADMIN_LOGIN == username 27 | if settings.ADMIN_PASSWORD.startswith("pbkdf2_sha256"): 28 | pwd_valid = check_password(password, settings.ADMIN_PASSWORD) 29 | else: 30 | pwd_valid = password == settings.ADMIN_PASSWORD 31 | if login_valid and pwd_valid: 32 | try: 33 | user = User.objects.get(username=username) 34 | except User.DoesNotExist: 35 | # Create a new user. There's no need to set a password 36 | # because only the password from settings.py is checked. 37 | user = User(username=username) 38 | user.is_staff = True 39 | user.is_superuser = True 40 | user.save() 41 | return user 42 | return None 43 | 44 | def get_user(self, user_id): 45 | try: 46 | return User.objects.get(pk=user_id) 47 | except User.DoesNotExist: 48 | return None 49 | 50 | 51 | # Based on https://gist.github.com/agusmakmun/b71ac536124e0535a8b076989f8cfcd3 52 | EXEMPT_URLS = [compile(settings.LOGIN_URL.lstrip("/"))] 53 | if hasattr(settings, "LOGIN_EXEMPT_URLS"): 54 | EXEMPT_URLS += [compile(expr) for expr in settings.LOGIN_EXEMPT_URLS] 55 | 56 | 57 | class LoginRequiredMiddleware: 58 | """ 59 | Middleware that requires a user to be authenticated to view any page other 60 | than LOGIN_URL. Exemptions to this requirement can optionally be specified 61 | in settings via a list of regular expressions in LOGIN_EXEMPT_URLS (which 62 | you can copy from your urls.py). 63 | Requires authentication middleware and template context processors to be 64 | loaded. You'll get an error if they aren't. 65 | 66 | Based on https://djangosnippets.org/snippets/1179/ 67 | My modification adds 'next' GET parameter to enable redirection after 68 | successful login. 69 | """ 70 | 71 | def __init__(self, get_response): 72 | self.get_response = get_response 73 | # One-time configuration and initialization 74 | 75 | def __call__(self, request: HttpRequest): 76 | if not request.user.is_authenticated: 77 | path = request.path_info.lstrip("/") 78 | if not any(m.match(path) for m in EXEMPT_URLS): 79 | redirect_to = settings.LOGIN_URL 80 | # Add 'next' GET variable to support redirection after login 81 | if len(path) > 0 and url_has_allowed_host_and_scheme( 82 | url=request.path_info, allowed_hosts=None 83 | ): 84 | redirect_to = "%s?next=%s" % (settings.LOGIN_URL, request.path_info) 85 | return HttpResponseRedirect(redirect_to) 86 | elif not settings.ADMIN_PASSWORD.startswith("pbkdf2_sha256"): 87 | for message in messages.get_messages(request): 88 | if cast(Message, message).message.startswith( 89 | "ADMIN_PASSWORD is in plain text" 90 | ): 91 | break 92 | else: 93 | better_password = make_password(settings.ADMIN_PASSWORD) 94 | messages.warning( 95 | request, 96 | "ADMIN_PASSWORD is in plain text. Set it to %s instead" 97 | % better_password, 98 | ) 99 | return self.get_response(request) 100 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | # This file was autogenerated by uv via the following command: 2 | # uv pip compile requirements.in -o requirements.txt --python-version 3.12 --no-strip-extras 3 | amqp==5.3.1 4 | # via kombu 5 | asgiref==3.11.0 6 | # via django 7 | attrs==25.3.0 8 | # via 9 | # outcome 10 | # trio 11 | bcrypt==5.0.0 12 | # via paramiko 13 | billiard==4.2.1 14 | # via celery 15 | celery[redis]==5.5.1 16 | # via 17 | # -r requirements.in 18 | # django-celery-results 19 | certifi==2025.1.31 20 | # via 21 | # requests 22 | # selenium 23 | cffi==2.0.0 24 | # via 25 | # cryptography 26 | # pynacl 27 | cfgv==3.4.0 28 | # via pre-commit 29 | charset-normalizer==3.4.1 30 | # via requests 31 | click==8.1.8 32 | # via 33 | # celery 34 | # click-didyoumean 35 | # click-plugins 36 | # click-repl 37 | click-didyoumean==0.3.1 38 | # via celery 39 | click-plugins==1.1.1 40 | # via celery 41 | click-repl==0.3.0 42 | # via celery 43 | cryptography==46.0.1 44 | # via paramiko 45 | distlib==0.4.0 46 | # via virtualenv 47 | dj-database-url==3.0.0 48 | # via -r requirements.in 49 | django==6.0 50 | # via 51 | # -r requirements.in 52 | # dj-database-url 53 | # django-celery-results 54 | # django-jinja 55 | # django-jinja-bootstrap-form 56 | # django-redis 57 | # model-bakery 58 | django-celery-results==2.6.0 59 | # via -r requirements.in 60 | django-jinja==2.11.0 61 | # via 62 | # -r requirements.in 63 | # django-jinja-bootstrap-form 64 | django-jinja-bootstrap-form==4.5.0 65 | # via -r requirements.in 66 | django-redis==6.0.0 67 | # via -r requirements.in 68 | filelock==3.20.1 69 | # via virtualenv 70 | gitdb==4.0.12 71 | # via gitpython 72 | gitpython==3.1.44 73 | # via -r requirements.in 74 | h11==0.16.0 75 | # via wsproto 76 | humanize==4.12.2 77 | # via -r requirements.in 78 | identify==2.6.15 79 | # via pre-commit 80 | idna==3.10 81 | # via 82 | # requests 83 | # trio 84 | iniconfig==2.1.0 85 | # via pytest 86 | invoke==2.2.1 87 | # via paramiko 88 | jinja2==3.1.6 89 | # via 90 | # -r requirements.in 91 | # django-jinja 92 | kombu==5.5.3 93 | # via celery 94 | markupsafe==3.0.2 95 | # via jinja2 96 | model-bakery==1.20.5 97 | # via -r requirements.in 98 | nodeenv==1.9.1 99 | # via pre-commit 100 | outcome==1.3.0.post0 101 | # via 102 | # trio 103 | # trio-websocket 104 | packaging==25.0 105 | # via 106 | # -r requirements.in 107 | # pytest 108 | paramiko==4.0.0 109 | # via -r requirements.in 110 | platformdirs==4.5.0 111 | # via virtualenv 112 | pluggy==1.5.0 113 | # via pytest 114 | pre-commit==4.4.0 115 | # via -r requirements.in 116 | prompt-toolkit==3.0.51 117 | # via click-repl 118 | psycopg2-binary==2.9.10 119 | # via -r requirements.in 120 | pycparser==2.22 121 | # via cffi 122 | pygments==2.19.2 123 | # via pytest 124 | pynacl==1.5.0 125 | # via paramiko 126 | pysocks==1.7.1 127 | # via urllib3 128 | pytest==9.0.0 129 | # via 130 | # -r requirements.in 131 | # pytest-django 132 | pytest-django==4.11.1 133 | # via -r requirements.in 134 | pytest-watcher==0.4.3 135 | # via -r requirements.in 136 | python-dateutil==2.9.0.post0 137 | # via celery 138 | pyyaml==6.0.3 139 | # via pre-commit 140 | redis==5.2.1 141 | # via 142 | # celery 143 | # django-redis 144 | requests==2.32.4 145 | # via -r requirements.in 146 | selenium==4.31.0 147 | # via -r requirements.in 148 | six==1.17.0 149 | # via python-dateutil 150 | smmap==5.0.2 151 | # via gitdb 152 | sniffio==1.3.1 153 | # via trio 154 | sortedcontainers==2.4.0 155 | # via trio 156 | sqlparse==0.5.3 157 | # via django 158 | timeout-decorator==0.5.0 159 | # via -r requirements.in 160 | trio==0.29.0 161 | # via 162 | # selenium 163 | # trio-websocket 164 | trio-websocket==0.12.2 165 | # via selenium 166 | typing-extensions==4.13.2 167 | # via 168 | # dj-database-url 169 | # selenium 170 | tzdata==2025.2 171 | # via kombu 172 | urllib3[socks]==2.6.0 173 | # via 174 | # requests 175 | # selenium 176 | vine==5.1.0 177 | # via 178 | # amqp 179 | # celery 180 | # kombu 181 | virtualenv==20.35.4 182 | # via pre-commit 183 | watchdog==6.0.0 184 | # via pytest-watcher 185 | wcwidth==0.2.13 186 | # via prompt-toolkit 187 | websocket-client==1.8.0 188 | # via selenium 189 | wsproto==1.2.0 190 | # via trio-websocket 191 | -------------------------------------------------------------------------------- /wait-for-it.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | # From https://github.com/vishnubob/wait-for-it 3 | # Use this script to test if a given TCP host/port are available 4 | 5 | cmdname=$(basename $0) 6 | 7 | echoerr() { if [[ $QUIET -ne 1 ]]; then echo "$@" 1>&2; fi } 8 | 9 | usage() 10 | { 11 | cat << USAGE >&2 12 | Usage: 13 | $cmdname host:port [-s] [-t timeout] [-- command args] 14 | -h HOST | --host=HOST Host or IP under test 15 | -p PORT | --port=PORT TCP port under test 16 | Alternatively, you specify the host and port as host:port 17 | -s | --strict Only execute subcommand if the test succeeds 18 | -q | --quiet Don't output any status messages 19 | -t TIMEOUT | --timeout=TIMEOUT 20 | Timeout in seconds, zero for no timeout 21 | -- COMMAND ARGS Execute command with args after the test finishes 22 | USAGE 23 | exit 1 24 | } 25 | 26 | wait_for() 27 | { 28 | if [[ $TIMEOUT -gt 0 ]]; then 29 | echoerr "$cmdname: waiting $TIMEOUT seconds for $HOST:$PORT" 30 | else 31 | echoerr "$cmdname: waiting for $HOST:$PORT without a timeout" 32 | fi 33 | start_ts=$(date +%s) 34 | while : 35 | do 36 | if [[ $ISBUSY -eq 1 ]]; then 37 | nc -z $HOST $PORT 38 | result=$? 39 | else 40 | (echo > /dev/tcp/$HOST/$PORT) >/dev/null 2>&1 41 | result=$? 42 | fi 43 | if [[ $result -eq 0 ]]; then 44 | end_ts=$(date +%s) 45 | echoerr "$cmdname: $HOST:$PORT is available after $((end_ts - start_ts)) seconds" 46 | break 47 | fi 48 | sleep 1 49 | done 50 | return $result 51 | } 52 | 53 | wait_for_wrapper() 54 | { 55 | # In order to support SIGINT during timeout: http://unix.stackexchange.com/a/57692 56 | if [[ $QUIET -eq 1 ]]; then 57 | timeout $BUSYTIMEFLAG $TIMEOUT $0 --quiet --child --host=$HOST --port=$PORT --timeout=$TIMEOUT & 58 | else 59 | timeout $BUSYTIMEFLAG $TIMEOUT $0 --child --host=$HOST --port=$PORT --timeout=$TIMEOUT & 60 | fi 61 | PID=$! 62 | trap "kill -INT -$PID" INT 63 | wait $PID 64 | RESULT=$? 65 | if [[ $RESULT -ne 0 ]]; then 66 | echoerr "$cmdname: timeout occurred after waiting $TIMEOUT seconds for $HOST:$PORT" 67 | fi 68 | return $RESULT 69 | } 70 | 71 | # process arguments 72 | while [[ $# -gt 0 ]] 73 | do 74 | case "$1" in 75 | *:* ) 76 | hostport=(${1//:/ }) 77 | HOST=${hostport[0]} 78 | PORT=${hostport[1]} 79 | shift 1 80 | ;; 81 | --child) 82 | CHILD=1 83 | shift 1 84 | ;; 85 | -q | --quiet) 86 | QUIET=1 87 | shift 1 88 | ;; 89 | -s | --strict) 90 | STRICT=1 91 | shift 1 92 | ;; 93 | -h) 94 | HOST="$2" 95 | if [[ $HOST == "" ]]; then break; fi 96 | shift 2 97 | ;; 98 | --host=*) 99 | HOST="${1#*=}" 100 | shift 1 101 | ;; 102 | -p) 103 | PORT="$2" 104 | if [[ $PORT == "" ]]; then break; fi 105 | shift 2 106 | ;; 107 | --port=*) 108 | PORT="${1#*=}" 109 | shift 1 110 | ;; 111 | -t) 112 | TIMEOUT="$2" 113 | if [[ $TIMEOUT == "" ]]; then break; fi 114 | shift 2 115 | ;; 116 | --timeout=*) 117 | TIMEOUT="${1#*=}" 118 | shift 1 119 | ;; 120 | --) 121 | shift 122 | CLI=("$@") 123 | break 124 | ;; 125 | --help) 126 | usage 127 | ;; 128 | *) 129 | echoerr "Unknown argument: $1" 130 | usage 131 | ;; 132 | esac 133 | done 134 | 135 | if [[ "$HOST" == "" || "$PORT" == "" ]]; then 136 | echoerr "Error: you need to provide a host and port to test." 137 | usage 138 | fi 139 | 140 | TIMEOUT=${TIMEOUT:-15} 141 | STRICT=${STRICT:-0} 142 | CHILD=${CHILD:-0} 143 | QUIET=${QUIET:-0} 144 | 145 | # check to see if timeout is from busybox? 146 | # check to see if timeout is from busybox? 147 | TIMEOUT_PATH=$(realpath $(which timeout)) 148 | if [[ $TIMEOUT_PATH =~ "busybox" ]]; then 149 | ISBUSY=1 150 | BUSYTIMEFLAG="-t" 151 | else 152 | ISBUSY=0 153 | BUSYTIMEFLAG="" 154 | fi 155 | 156 | if [[ $CHILD -gt 0 ]]; then 157 | wait_for 158 | RESULT=$? 159 | exit $RESULT 160 | else 161 | if [[ $TIMEOUT -gt 0 ]]; then 162 | wait_for_wrapper 163 | RESULT=$? 164 | else 165 | wait_for 166 | RESULT=$? 167 | fi 168 | fi 169 | 170 | if [[ $CLI != "" ]]; then 171 | if [[ $RESULT -ne 0 && $STRICT -eq 1 ]]; then 172 | echoerr "$cmdname: strict mode, refusing to execute subprocess" 173 | exit $RESULT 174 | fi 175 | exec "${CLI[@]}" 176 | else 177 | exit $RESULT 178 | fi 179 | -------------------------------------------------------------------------------- /apps/templates/app_info.html: -------------------------------------------------------------------------------- 1 | {% extends "base.html" %} 2 | {% block body %} 3 |

Wharf: {{ app }}

4 | Return to apps index 5 |
6 |

Actions

7 |
8 |
9 | {% csrf_token %} 10 | 15 |
16 | {% if git_url == None %} 17 | Can't deploy due to missing GITHUB_URL in config (which should be set to the "Clone with HTTPS" url from Github) 18 | {% else %} 19 |
20 | {% csrf_token %} 21 | 22 |    23 | 28 |    29 | 34 |
35 | {% endif %} 36 |
37 |

Task logs

38 | {% if task_logs %} 39 | 46 | {% else %} 47 | No tasks run yet 48 | {% endif %} 49 |

Domains

50 | {% if domains.length == 0 %} 51 | None 52 | {% else %} 53 |
    54 | {% for d in domains %} 55 |
  • 56 | {{ d }} 57 |
    60 | {% csrf_token %} 61 | 62 | 63 |
    64 |
  • 65 | {% endfor %} 66 |
67 | {% endif %} 68 |

New domain

69 |
70 | {% csrf_token %} 71 | {{ domain_form | bootstrap }} 72 | 73 |
74 |

Config

75 |
    76 | {% for (k,v) in config %} 77 |
  • 78 | {% if k in ['GIT_REV', 'REDIS_URL', 'DATABASE_URL'] %} 79 | {{ k }} = {{ v }} 80 | {% else %} 81 |
    84 | {{ k }} = {{ v }}  85 | {% csrf_token %} 86 | 87 | 88 |
    89 | {% endif %} 90 |
  • 91 | {% endfor %} 92 |
93 |

New item

94 |
95 | {% csrf_token %} 96 | {{ form | bootstrap }} 97 | 98 |
99 |

Postgres

100 | {% if postgres %} 101 | Status: {{ postgres['Status'] }} 102 |
105 | {% csrf_token %} 106 | 107 |
108 | {% else %} 109 |
112 | {% csrf_token %} 113 | 114 |
115 | {% endif %} 116 |

Redis

117 | {% if redis %} 118 | Status: {{ redis['Status'] }} 119 |
122 | {% csrf_token %} 123 | 124 |
125 | {% else %} 126 |
129 | {% csrf_token %} 130 | 131 |
132 | {% endif %} 133 |

Let's Encrypt

134 | {% if letsencrypt %} 135 |
    136 | {% for (k,v) in letsencrypt.items() if k != 'App name' %}
  • {{ k }}: {{ v }}
  • {% endfor %} 137 |
138 |
141 | {% csrf_token %} 142 | 143 |
144 | {% else %} 145 |
146 | {% csrf_token %} 147 | {{ letsencrypt_form | bootstrap }} 148 | 149 |
150 | {% endif %} 151 |

Process Info

152 |
    153 | {% for (k,v) in process.items() if k != 'processes' %}
  • {{ k }}: {{ v }}
  • {% endfor %} 154 |
155 |

Processes

156 |
    157 | {% for (k,v) in process.get('processes',{}).items() %}
  • {{ k }}: {{ v }}
  • {% endfor %} 158 |
159 |

Logs

160 |
161 | {{ logs }}
162 | 
163 | {% endblock body %} 164 | -------------------------------------------------------------------------------- /wharf/settings.py: -------------------------------------------------------------------------------- 1 | """ 2 | Django settings for wharf project. 3 | 4 | Generated by 'django-admin startproject' using Django 2.0.2. 5 | 6 | For more information on this file, see 7 | https://docs.djangoproject.com/en/2.0/topics/settings/ 8 | 9 | For the full list of settings and their values, see 10 | https://docs.djangoproject.com/en/2.0/ref/settings/ 11 | """ 12 | 13 | import os 14 | import re 15 | import subprocess 16 | 17 | import dj_database_url 18 | 19 | # Build paths inside the project like this: os.path.join(BASE_DIR, ...) 20 | BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) 21 | 22 | 23 | # Quick-start development settings - unsuitable for production 24 | # See https://docs.djangoproject.com/en/2.0/howto/deployment/checklist/ 25 | 26 | SECRET_KEY = os.environ.get( 27 | "SECRET_KEY", ")u-_udqved=rq9p3fc-6mv6xh7y%slo-5d=h1590(k19e+srxt" 28 | ) 29 | 30 | DEBUG = True 31 | 32 | ALLOWED_HOSTS = ["*"] 33 | if "CSRF_TRUSTED_ORIGIN" in os.environ: 34 | CSRF_TRUSTED_ORIGINS = [os.environ["CSRF_TRUSTED_ORIGIN"]] 35 | 36 | 37 | # Application definition 38 | 39 | INSTALLED_APPS = [ 40 | "django.contrib.admin", 41 | "django.contrib.auth", 42 | "django.contrib.contenttypes", 43 | "django.contrib.sessions", 44 | "django.contrib.messages", 45 | "django.contrib.staticfiles", 46 | "django_jinja", 47 | "bootstrapform_jinja", 48 | "django_celery_results", 49 | "apps", 50 | ] 51 | 52 | MIDDLEWARE = [ 53 | "django.middleware.security.SecurityMiddleware", 54 | "django.contrib.sessions.middleware.SessionMiddleware", 55 | "django.middleware.common.CommonMiddleware", 56 | "django.middleware.csrf.CsrfViewMiddleware", 57 | "django.contrib.auth.middleware.AuthenticationMiddleware", 58 | "django.contrib.messages.middleware.MessageMiddleware", 59 | "django.middleware.clickjacking.XFrameOptionsMiddleware", 60 | "wharf.auth.LoginRequiredMiddleware", 61 | ] 62 | 63 | AUTHENTICATION_BACKENDS = [ 64 | "wharf.auth.SettingsBackend", 65 | "django.contrib.auth.backends.ModelBackend", 66 | ] 67 | 68 | LOGIN_REDIRECT_URL = "/" 69 | LOGIN_EXEMPT_URLS = ["webhook", "favicon.ico", "status"] 70 | 71 | ROOT_URLCONF = "wharf.urls" 72 | 73 | TEMPLATES = [ 74 | { 75 | "BACKEND": "django_jinja.jinja2.Jinja2", 76 | "DIRS": [], 77 | "APP_DIRS": True, 78 | "OPTIONS": { 79 | "context_processors": [ 80 | "django.template.context_processors.debug", 81 | "django.template.context_processors.request", 82 | "django.contrib.auth.context_processors.auth", 83 | "django.contrib.messages.context_processors.messages", 84 | "wharf.context_processors.helpers", 85 | ], 86 | "match_extension": None, 87 | "app_dirname": "templates", 88 | }, 89 | }, 90 | { 91 | "BACKEND": "django.template.backends.django.DjangoTemplates", 92 | "DIRS": [], 93 | "OPTIONS": { 94 | "context_processors": [ 95 | "django.contrib.auth.context_processors.auth", 96 | "django.contrib.messages.context_processors.messages", 97 | ], 98 | }, 99 | }, 100 | ] 101 | 102 | if "CACHE_URL" in os.environ: 103 | cache_url = os.environ["CACHE_URL"] 104 | elif "REDIS_URL" in os.environ: 105 | cache_url = "%s/1" % os.environ["REDIS_URL"] 106 | else: 107 | raise Exception("Neither CACHE_URL nor REDIS_URL set in environment") 108 | 109 | CACHES = { 110 | "default": { 111 | "BACKEND": "django_redis.cache.RedisCache", 112 | "LOCATION": cache_url, 113 | "OPTIONS": { 114 | "CLIENT_CLASS": "django_redis.client.DefaultClient", 115 | }, 116 | "TIMEOUT": 60 * 60 * 24, # 1 day 117 | } 118 | } 119 | 120 | WSGI_APPLICATION = "wharf.wsgi.application" 121 | 122 | # Database 123 | # https://docs.djangoproject.com/en/2.0/ref/settings/#databases 124 | 125 | DATABASES = { 126 | "default": dj_database_url.config( 127 | default="sqlite:///" + os.path.join(BASE_DIR, "db.sqlite3") 128 | ) 129 | } 130 | 131 | # Password validation 132 | # https://docs.djangoproject.com/en/2.0/ref/settings/#auth-password-validators 133 | 134 | AUTH_PASSWORD_VALIDATORS = [ 135 | { 136 | "NAME": "django.contrib.auth.password_validation.UserAttributeSimilarityValidator", 137 | }, 138 | { 139 | "NAME": "django.contrib.auth.password_validation.MinimumLengthValidator", 140 | }, 141 | { 142 | "NAME": "django.contrib.auth.password_validation.CommonPasswordValidator", 143 | }, 144 | { 145 | "NAME": "django.contrib.auth.password_validation.NumericPasswordValidator", 146 | }, 147 | ] 148 | 149 | 150 | # Internationalization 151 | # https://docs.djangoproject.com/en/2.0/topics/i18n/ 152 | 153 | LANGUAGE_CODE = "en-us" 154 | 155 | TIME_ZONE = "UTC" 156 | 157 | USE_I18N = True 158 | 159 | USE_L10N = True 160 | 161 | USE_TZ = True 162 | 163 | 164 | # Static files (CSS, JavaScript, Images) 165 | # https://docs.djangoproject.com/en/2.0/howto/static-files/ 166 | 167 | STATIC_URL = "/static/" 168 | STATIC_ROOT = os.path.join(BASE_DIR, "static") 169 | 170 | # Wharf settings 171 | 172 | DOKKU_HOST = os.environ.get("DOKKU_SSH_HOST", None) 173 | if DOKKU_HOST is None: # default, so need to detect host 174 | ip_paths = ["/sbin/ip", "/usr/sbin/ip"] 175 | ip_path: str | None = None 176 | for possible_path in ip_paths: 177 | if os.path.exists(possible_path): 178 | ip_path = possible_path 179 | break 180 | else: 181 | raise Exception(ip_paths) 182 | route = subprocess.check_output([ip_path, "route"], encoding="utf-8") 183 | ip = re.match(r"default via (\d+\.\d+\.\d+.\d+)", route) 184 | assert ip is not None 185 | DOKKU_HOST = ip.groups()[0] 186 | 187 | DOKKU_SSH_PORT = int(os.environ.get("DOKKU_SSH_PORT", "22")) 188 | GITHUB_SECRET = os.environ.get("GITHUB_SECRET", "password") 189 | ADMIN_LOGIN = os.environ.get("ADMIN_LOGIN", "admin") 190 | ADMIN_PASSWORD = os.environ.get("ADMIN_PASSWORD", "password") 191 | 192 | # Celery settings 193 | 194 | if "BROKER_URL" in os.environ: 195 | broker_url = os.environ["BROKER_URL"] 196 | elif "REDIS_URL" in os.environ: 197 | broker_url = "%s/0" % os.environ["REDIS_URL"] 198 | else: 199 | raise Exception("Neither BROKER_URL nor REDIS_URL set in environment") 200 | 201 | CELERY_RESULT_BACKEND = "django-cache" 202 | CELERY_BROKER_URL = broker_url 203 | CELERY_TASK_TRACK_STARTED = True 204 | CELERY_TASK_SERIALISER = "pickle" # To fix exception serialisation. See https://github.com/celery/celery/pull/3592 205 | 206 | LOGGING = { 207 | "version": 1, 208 | "disable_existing_loggers": False, 209 | "handlers": { 210 | "console": { 211 | "class": "logging.StreamHandler", 212 | }, 213 | }, 214 | "loggers": { 215 | "django": { 216 | "handlers": ["console"], 217 | "level": os.getenv("DJANGO_LOG_LEVEL", "INFO"), 218 | }, 219 | }, 220 | } 221 | -------------------------------------------------------------------------------- /check_boot.py: -------------------------------------------------------------------------------- 1 | import os 2 | import subprocess 3 | import sys 4 | import time 5 | import uuid 6 | from pathlib import Path 7 | from subprocess import check_call, check_output 8 | from typing import Callable, Literal 9 | 10 | from selenium import webdriver 11 | from selenium.common.exceptions import TimeoutException 12 | from selenium.webdriver.common.by import By 13 | from selenium.webdriver.firefox.service import Service 14 | from selenium.webdriver.remote.webdriver import WebDriver 15 | from selenium.webdriver.remote.webelement import WebElement 16 | from selenium.webdriver.support.ui import WebDriverWait 17 | 18 | 19 | class Tester: 20 | def __init__(self): 21 | os.environ["MOZ_REMOTE_SETTINGS_DEVTOOLS"] = "1" 22 | firefox_options = webdriver.FirefoxOptions() 23 | firefox_options.add_argument("-headless") 24 | firefox_options.accept_insecure_certs = True 25 | geckodriver_path = Path("/snap/bin/geckodriver") 26 | assert geckodriver_path.exists(), geckodriver_path 27 | self.driver = webdriver.Firefox( 28 | options=firefox_options, 29 | service=Service( 30 | executable_path=geckodriver_path.as_posix(), 31 | log_output=subprocess.STDOUT, 32 | ), 33 | ) 34 | self.driver.implicitly_wait(0) 35 | self.start = time.time() 36 | 37 | def log(self, message): 38 | print("%f: %s" % (time.time() - self.start, message)) 39 | 40 | def find_one(self, elements: list[WebElement]): 41 | if len(elements) == 1: 42 | return elements[0] 43 | elif len(elements) == 0: 44 | return None 45 | else: 46 | raise Exception(elements) 47 | 48 | def find_element(self, strat: str, id: str | None, allow_none: bool = False): 49 | self.log("Looking for %s: '%s'" % (strat, id)) 50 | ret = self.find_one(self.driver.find_elements(strat, id)) 51 | if ret is None and not allow_none: 52 | self.failure() 53 | raise Exception("No such element with %s and %s" % (strat, id)) 54 | return ret 55 | 56 | def wait_for_one(self, locators): 57 | for locator in locators: 58 | element = self.find_element(*locator, allow_none=True) 59 | if element is not None: 60 | return element 61 | return False 62 | 63 | def url(self) -> str: 64 | return self.driver.current_url 65 | 66 | def failure(self): 67 | self.driver.get_screenshot_as_file("screenshot.png") 68 | print(self.url()) 69 | print(self.page_source()) 70 | os.system("sudo docker logs wharf.web.1") 71 | os.system("sudo docker logs wharf.celery.1") 72 | os.system("dokku nginx:show-config wharf") 73 | 74 | def get(self, url): 75 | self.log("Went to %s" % url) 76 | return self.driver.get(url) 77 | 78 | def send_keys(self, strat, id, text): 79 | self.log("Send keys '%s' to %s: '%s'" % (text, strat, id)) 80 | return self.find_element(strat, id).send_keys(text) 81 | 82 | def click(self, strat, id): 83 | self.log("Click on %s: '%s'" % (strat, id)) 84 | return self.find_element(strat, id).click() 85 | 86 | def wait_for_lambda( 87 | self, 88 | func: Callable[[WebDriver], Literal[False] | WebElement], 89 | timeout: int = 10, 90 | ) -> WebElement: 91 | try: 92 | return WebDriverWait(self.driver, timeout).until(func) 93 | except TimeoutException: 94 | self.failure() 95 | raise 96 | 97 | def wait_for_list(self, items, timeout: int = 10): 98 | return self.wait_for_lambda(lambda driver: self.wait_for_one(items), timeout) 99 | 100 | def get_main_id(self): 101 | res = self.wait_for_list( 102 | [(By.ID, "initial-setup-header"), (By.ID, "list_apps")] 103 | ) 104 | return res.get_attribute("id") 105 | 106 | def page_source(self): 107 | return self.driver.page_source 108 | 109 | 110 | tester = Tester() 111 | try: 112 | tester.get(sys.argv[1]) 113 | tester.send_keys(By.NAME, "username", "admin") 114 | tester.send_keys(By.NAME, "password", "password") 115 | tester.click(By.NAME, "submit") 116 | id = tester.get_main_id() 117 | if id == "list_apps": 118 | tester.log("Checking SSH status") 119 | tester.click(By.ID, "refresh_info") 120 | id = tester.get_main_id() # because keys might not work any more 121 | if id == "initial-setup-header": 122 | tester.log("Adding new keys") 123 | keys = check_output("sudo dokku ssh-keys:list".split(" ")).decode("utf-8") 124 | if "check_boot" in keys: 125 | check_call("sudo dokku ssh-keys:remove check_boot".split(" ")) 126 | element = tester.find_element(By.ID, "ssh-key") 127 | assert element is not None 128 | cmd = "echo " + element.text + " | sudo dokku ssh-keys:add check_boot" 129 | tester.log(cmd) 130 | ret = os.system(cmd) 131 | assert ret == 0 132 | tester.get(sys.argv[1]) 133 | elif id == "list_apps": 134 | pass 135 | else: 136 | raise Exception(id) 137 | app_name = uuid.uuid4().hex 138 | tester.log("Making new app %s" % app_name) 139 | tester.send_keys(By.ID, "id_name", app_name) 140 | tester.click(By.ID, "create_app") 141 | tester.wait_for_list([(By.ID, "app_page")]) 142 | assert tester.page_source().find(app_name) != -1 143 | 144 | tester.get(sys.argv[1]) 145 | tester.click(By.XPATH, f'//a[text()="{app_name}"]') 146 | tester.wait_for_list([(By.ID, "app_page")]) 147 | assert tester.page_source().find(f"Wharf: {app_name}") != -1 148 | 149 | github_text = "Can't deploy due to missing GITHUB_URL" 150 | if tester.page_source().find(github_text) != -1: 151 | tester.send_keys(By.ID, "id_key", "GITHUB_URL") 152 | tester.send_keys( 153 | By.ID, "id_value", "https://github.com/palfrey/python-getting-started.git" 154 | ) 155 | tester.click(By.ID, "config_add") 156 | 157 | def wait_for_no_github_text(driver: WebDriver) -> WebElement | Literal[False]: 158 | if tester.page_source().find(github_text) != -1: 159 | return False 160 | else: 161 | return tester.wait_for_list([(By.ID, "app_page")], timeout=900) 162 | 163 | tester.wait_for_lambda(wait_for_no_github_text, timeout=900) 164 | if tester.page_source().find("github_text") != -1: 165 | tester.failure() 166 | raise Exception 167 | 168 | tester.click(By.ID, "deploy_app") 169 | for x in range(30): 170 | try: 171 | tester.log("Attempt %d %s" % (x, tester.url())) 172 | if tester.url().startswith("https:"): 173 | tester.log("going to http page") 174 | tester.get(tester.url().replace("https", "http")) 175 | tester.wait_for_list([(By.ID, "app_page")], timeout=30) 176 | break 177 | except TimeoutException: 178 | continue 179 | if tester.page_source().find(f"Wharf: {app_name}") == -1: 180 | tester.failure() 181 | raise Exception 182 | 183 | finally: 184 | tester.driver.quit() 185 | -------------------------------------------------------------------------------- /wharf/tasks.py: -------------------------------------------------------------------------------- 1 | import json 2 | import os.path 3 | import socket 4 | import subprocess 5 | import time 6 | from datetime import UTC, datetime 7 | from fcntl import F_GETFL, F_SETFL, fcntl 8 | from os import O_NONBLOCK, read 9 | from pathlib import Path 10 | from typing import cast 11 | 12 | from celery import Task 13 | from django.conf import settings 14 | from git import Repo 15 | from paramiko import RSAKey 16 | from paramiko.client import AutoAddPolicy, SSHClient 17 | from redis import StrictRedis 18 | 19 | import apps.models as models 20 | 21 | from .celery import app 22 | 23 | redis = StrictRedis.from_url(settings.CELERY_BROKER_URL) 24 | 25 | 26 | SSH_WORKS_KEY = "ssh-check" 27 | 28 | 29 | def handle_data(key, raw_data: bytes): 30 | data = raw_data.decode("utf-8", "replace") 31 | redis.append(key, data) 32 | print(data) 33 | 34 | 35 | def task_key(task_id: object) -> str: 36 | return "task:%s" % task_id 37 | 38 | 39 | keyfile = os.path.expanduser("~/.ssh/id_rsa") 40 | 41 | 42 | def generate_key(): 43 | if not os.path.exists(keyfile): 44 | keydir = os.path.dirname(keyfile) 45 | if not os.path.exists(keydir): 46 | os.mkdir(keydir) 47 | prv = RSAKey.generate(bits=1024) 48 | prv.write_private_key_file(keyfile) 49 | pub = RSAKey(filename=keyfile) 50 | with open("%s.pub" % keyfile, "w") as f: 51 | f.write("%s %s" % (pub.get_name(), pub.get_base64())) 52 | print("Made new Wharf SSH key") 53 | 54 | 55 | generate_key() 56 | 57 | 58 | @app.task 59 | def get_public_key(): 60 | return open("%s.pub" % keyfile).read() 61 | 62 | 63 | daemon_socket = "/var/run/dokku-daemon/dokku-daemon.sock" 64 | 65 | 66 | def has_daemon(): 67 | return os.path.exists(daemon_socket) and os.access(daemon_socket, os.W_OK) 68 | 69 | 70 | # From https://github.com/dokku/dokku-daemon?tab=readme-ov-file#usage-within-a-dokku-app 71 | def run_with_daemon(key: str, command: str, timeout=60) -> bool: 72 | client = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) 73 | client.connect(daemon_socket) 74 | client.sendall(command.encode()) 75 | output = b"" 76 | while True: 77 | new_output = client.recv(1024) 78 | output += new_output 79 | if len(new_output) == 0: 80 | break 81 | json_data = output.decode("utf-8", "replace") 82 | client.close() 83 | print(f"json_data: '{json_data}'") 84 | data = json.loads(json_data)["output"] 85 | redis.append(key, data) 86 | print(data) 87 | return True 88 | 89 | 90 | @app.task(bind=True) 91 | def run_ssh_command(self: Task, command: str | list[str]): 92 | print("Running command", command) 93 | key = task_key(self.request.id) 94 | redis.set(key, "") 95 | if not has_daemon(): 96 | client = SSHClient() 97 | client.set_missing_host_key_policy(AutoAddPolicy) 98 | known_hosts = Path("~/.ssh/known_hosts").expanduser() 99 | known_hosts_folder = known_hosts.parent 100 | if not known_hosts_folder.exists(): 101 | known_hosts_folder.mkdir() 102 | 103 | if known_hosts.exists(): 104 | client.load_host_keys( 105 | known_hosts.as_posix() 106 | ) # So that we also save back the new host 107 | else: 108 | with known_hosts.open("w") as f: 109 | f.write("") # so connect doesn't barf when trying to save 110 | else: 111 | client = None 112 | 113 | if isinstance(command, list): 114 | commands = command 115 | else: 116 | commands = [command] 117 | for c in commands: 118 | if client is None: 119 | run_with_daemon(key, c) 120 | else: 121 | if os.path.exists(keyfile): 122 | pkey = RSAKey.from_private_key_file(keyfile) 123 | else: 124 | pkey = None 125 | client.connect( 126 | settings.DOKKU_HOST, 127 | port=settings.DOKKU_SSH_PORT, 128 | username="dokku", 129 | pkey=pkey, 130 | allow_agent=False, 131 | look_for_keys=False, 132 | ) 133 | transport = client.get_transport() 134 | assert transport is not None 135 | channel = transport.open_session() 136 | channel.exec_command(c) 137 | while True: 138 | anything = False 139 | while channel.recv_ready(): 140 | data = channel.recv(1024) 141 | handle_data(key, data) 142 | anything = True 143 | while channel.recv_stderr_ready(): 144 | data = channel.recv_stderr(1024) 145 | handle_data(key, data) 146 | anything = True 147 | if not anything: 148 | if channel.exit_status_ready(): 149 | break 150 | time.sleep(0.1) 151 | return cast(bytes, redis.get(key)).decode("utf-8") 152 | 153 | 154 | def set_nb(pipe): 155 | flags = fcntl(pipe, F_GETFL) 156 | fcntl(pipe, F_SETFL, flags | O_NONBLOCK) 157 | 158 | 159 | class FailedCommand(Exception): 160 | pass 161 | 162 | 163 | def run_process(key, cmd, cwd=None): 164 | p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=cwd) 165 | set_nb(p.stdout) 166 | set_nb(p.stderr) 167 | while True: 168 | try: 169 | assert p.stdout is not None 170 | out = read(p.stdout.fileno(), 1024) 171 | except BlockingIOError: 172 | out = b"" 173 | try: 174 | assert p.stderr is not None 175 | err = read(p.stderr.fileno(), 1024) 176 | except BlockingIOError: 177 | err = b"" 178 | handle_data(key, out) 179 | handle_data(key, err) 180 | if out == b"" and err == b"": 181 | if p.poll() is not None: 182 | break 183 | time.sleep(0.1) 184 | if p.poll() != 0: 185 | raise FailedCommand 186 | 187 | 188 | def trust_dokku_host(): 189 | ssh_config_path = os.path.expanduser("~/.ssh/config") 190 | if not os.path.exists(ssh_config_path): 191 | ssh_config_dir = os.path.dirname(ssh_config_path) 192 | if not os.path.exists(ssh_config_dir): 193 | os.mkdir(ssh_config_dir) 194 | with open(ssh_config_path, "w") as f: 195 | f.write(f""""Host {settings.DOKKU_HOST} 196 | StrictHostKeyChecking no 197 | UserKnownHostsFile=/dev/null""") 198 | 199 | 200 | @app.task(bind=True) 201 | def deploy(self: Task, app_name: str, git_url: str, git_branch: str): 202 | models.TaskLog( 203 | task_id=self.request.id, 204 | when=datetime.now(tz=UTC), 205 | app=models.App.objects.get(name=app_name), 206 | description="Deploying %s" % app_name, 207 | ).save() 208 | key = task_key(self.request.id) 209 | app_repo_path = os.path.abspath(os.path.join("repos", app_name)) 210 | if not os.path.exists(app_repo_path): 211 | redis.append(key, "== Cloning ==\n") 212 | run_process(key, ["git", "clone", git_url, app_repo_path]) 213 | repo = Repo(app_repo_path) 214 | try: 215 | repo.remotes["dokku"] 216 | except IndexError: 217 | repo.create_remote( 218 | "dokku", 219 | "ssh://dokku@%s:%s/%s" 220 | % (settings.DOKKU_HOST, settings.DOKKU_SSH_PORT, app_name), 221 | ) 222 | trust_dokku_host() 223 | redis.append(key, "== Pulling ==\n") 224 | run_process(key, ["git", "pull"], cwd=app_repo_path) 225 | redis.append(key, "== Pushing to Dokku ==\n") 226 | run_process(key, ["git", "push", "-f", "dokku", git_branch], cwd=app_repo_path) 227 | 228 | 229 | @app.task(bind=True) 230 | def check_ssh(self: Task) -> bool: 231 | trust_dokku_host() 232 | try: 233 | redis.set(SSH_WORKS_KEY, "") 234 | run_process( 235 | SSH_WORKS_KEY, 236 | [ 237 | "ssh", 238 | "-p", 239 | str(settings.DOKKU_SSH_PORT), 240 | "-o", 241 | "PasswordAuthentication=no", 242 | f"dokku@{settings.DOKKU_HOST}", 243 | "version", 244 | ], 245 | ) 246 | return True 247 | except FailedCommand: 248 | return False 249 | -------------------------------------------------------------------------------- /apps/views.py: -------------------------------------------------------------------------------- 1 | import hashlib 2 | import hmac 3 | import json 4 | import re 5 | from datetime import UTC, datetime 6 | from logging import getLogger 7 | from typing import Any, Sequence, cast 8 | 9 | import timeout_decorator 10 | from celery.result import AsyncResult 11 | from celery.states import FAILURE, PENDING, STARTED, SUCCESS, state 12 | from django.conf import settings 13 | from django.contrib import messages 14 | from django.core.cache import cache 15 | from django.http import ( 16 | HttpRequest, 17 | HttpResponse, 18 | HttpResponseBadRequest, 19 | HttpResponseServerError, 20 | ) 21 | from django.shortcuts import redirect, render 22 | from django.urls import reverse 23 | from django.views.decorators.csrf import csrf_exempt 24 | from packaging.version import Version 25 | from redis import StrictRedis 26 | 27 | import wharf.tasks as tasks 28 | 29 | from . import forms, models 30 | 31 | logger = getLogger(__name__) 32 | 33 | redis = StrictRedis.from_url(settings.CELERY_BROKER_URL) 34 | ansi_escape = re.compile(r"\x1B\[[0-?]*[ -/]*[@-~]") 35 | 36 | 37 | def run_cmd(cmd): 38 | res = tasks.run_ssh_command.delay(cmd) 39 | return res.get().strip() 40 | 41 | 42 | def cmd_key(cmd: str): 43 | return "cmd:%s" % cmd 44 | 45 | 46 | def run_cmd_with_cache(cmd: str): 47 | key = cmd_key(cmd) 48 | existing = cache.get(key) 49 | if existing: 50 | return existing 51 | res = run_cmd(cmd) 52 | cache.set(key, res, None) 53 | return res 54 | 55 | 56 | def clear_cache(cmd: str): 57 | key = cmd_key(cmd) 58 | cache.delete(key) 59 | 60 | 61 | def plugin_versions() -> dict[str, Version]: 62 | plugin_raw_list = run_cmd_with_cache("plugin:list") 63 | plugin_pattern = re.compile(r"([a-z\-_0-9]+?)\s+([\d\.]+)") 64 | plugin_groups = plugin_pattern.findall(plugin_raw_list) 65 | return dict([(k, Version(v)) for (k, v) in plugin_groups]) 66 | 67 | 68 | def redirect_reverse( 69 | view_name: str, 70 | kwargs: dict[str, Any] | None = None, 71 | args: Sequence[Any] | None = None, 72 | ): 73 | new_url = reverse(view_name, kwargs=kwargs, args=args) 74 | logger.warning(f"New url is {new_url}") 75 | return redirect(new_url) 76 | 77 | 78 | def run_cmd_with_log(app_name, description, cmd, after): 79 | res = tasks.run_ssh_command.delay(cmd) 80 | if app_name is None: # global 81 | app_name = "_" 82 | else: 83 | models.TaskLog( 84 | task_id=res.id, 85 | when=datetime.now(tz=UTC), 86 | app=models.App.objects.get(name=app_name), 87 | description=description, 88 | ).save() 89 | return redirect_reverse( 90 | "wait_for_command", 91 | kwargs={"app_name": app_name, "task_id": res.id, "after": after}, 92 | ) 93 | 94 | 95 | def get_log(res: AsyncResult): 96 | if res.state > state(PENDING): 97 | key = tasks.task_key(res.id) 98 | raw = cast(bytes | None, redis.get(key)) 99 | if raw is None: 100 | return "" 101 | return raw.decode("utf-8") 102 | else: 103 | return "" 104 | 105 | 106 | def wait_for_command(request: HttpRequest, app_name, task_id, after): 107 | res = AsyncResult(task_id) 108 | if app_name != "_": 109 | app = models.App.objects.get(name=app_name) 110 | task, created = models.TaskLog.objects.get_or_create( 111 | task_id=task_id, defaults={"app": app, "when": datetime.now(tz=UTC)} 112 | ) 113 | description = task.description 114 | else: 115 | description = "" 116 | if res.state == state(SUCCESS): 117 | return redirect_reverse( 118 | after, kwargs={"app_name": app_name, "task_id": task_id} 119 | ) 120 | log = ansi_escape.sub("", get_log(res)) 121 | if res.state == state(FAILURE): 122 | log += str(res.traceback) 123 | return render( 124 | request, 125 | "command_wait.html", 126 | { 127 | "app": app_name, 128 | "task_id": task_id, 129 | "log": log, 130 | "state": res.state, 131 | "running": res.state in [state(PENDING), state(STARTED)], 132 | "description": description, 133 | }, 134 | ) 135 | 136 | 137 | def show_log(request: HttpRequest, task_id: str): 138 | res = AsyncResult(task_id) 139 | task = models.TaskLog.objects.get(task_id=task_id) 140 | log = ansi_escape.sub("", get_log(res)) 141 | if res.state == state(FAILURE): 142 | log += str(res.traceback) 143 | return render( 144 | request, 145 | "command_wait.html", 146 | { 147 | "app": task.app.name, 148 | "task_id": task_id, 149 | "log": log, 150 | "state": res.state, 151 | "running": False, 152 | "description": task.description, 153 | }, 154 | ) 155 | 156 | 157 | def app_list(): 158 | data = run_cmd_with_cache("apps:list") 159 | lines = data.split("\n") 160 | if lines[0] != "=====> My Apps": 161 | raise Exception(data) 162 | return lines[1:] 163 | 164 | 165 | public_key = "" 166 | 167 | 168 | def index(request: HttpRequest): 169 | global public_key 170 | if public_key == "": 171 | public_key = tasks.get_public_key.delay().get() 172 | 173 | if redis.get(tasks.SSH_WORKS_KEY) is None: 174 | ssh_works = tasks.check_ssh.delay().get() 175 | if not ssh_works: 176 | return render(request, "setup_key.html", {"key": public_key}) 177 | 178 | try: 179 | apps = app_list() 180 | except Exception as e: 181 | if e.__class__.__name__ in [ 182 | "AuthenticationException" 183 | ]: # Can't use class directly as Celery mangles things 184 | return render(request, "setup_key.html", {"key": public_key}) 185 | else: 186 | raise 187 | if request.method == "POST": 188 | app_form = forms.CreateAppForm(request.POST) 189 | if app_form.is_valid(): 190 | return create_app(app_form.cleaned_data["name"]) 191 | else: 192 | app_form = forms.CreateAppForm() 193 | config_form = forms.ConfigForm() 194 | config = global_config() 195 | return render( 196 | request, 197 | "list_apps.html", 198 | { 199 | "apps": apps, 200 | "app_form": app_form, 201 | "config_form": config_form, 202 | "config": sorted(config.items()), 203 | }, 204 | ) 205 | 206 | 207 | def refresh_all(request: HttpRequest): 208 | cache.clear() 209 | return redirect_reverse("index") 210 | 211 | 212 | def refresh(request: HttpRequest, app_name: str): 213 | key_patterns = [ 214 | "config:show %s", 215 | "postgres:info %s", 216 | "redis:info %s", 217 | "ps:report %s", 218 | "domains:report %s", 219 | ] 220 | keys = [cmd_key(k % app_name) for k in key_patterns] 221 | lc = letsencrypt_command() 222 | if lc is not None: 223 | keys.append(cmd_key(lc)) 224 | cache.delete_many(keys) 225 | return redirect_reverse("app_info", args=[app_name]) 226 | 227 | 228 | def generic_config(app_name: str, data: str) -> dict[str, Any]: 229 | if "does not exist" in data: 230 | return {} 231 | lines = data.split("\n") 232 | if lines[0] != "=====> %s env vars" % app_name: 233 | raise Exception(data) 234 | config = {} 235 | for line in lines[1:]: 236 | (name, value) = line.split(":", 1) 237 | config[name] = value.lstrip() 238 | return config 239 | 240 | 241 | def app_config(app_name): 242 | data = run_cmd_with_cache("config:show %s" % app_name) 243 | return generic_config(app_name, data) 244 | 245 | 246 | def global_config(): 247 | data = run_cmd_with_cache("config:show --global") 248 | return generic_config("global", data) 249 | 250 | 251 | def app_config_set(app, key, value): 252 | return run_cmd_with_log( 253 | app, 254 | "Setting %s" % key, 255 | "config:set %s %s=%s" % (app, key, value), 256 | "check_app_config_set", 257 | ) 258 | 259 | 260 | def check_config_set(request: HttpRequest, task_id: str): 261 | res = AsyncResult(task_id) 262 | data = get_log(res) 263 | lines = data.split("\n") 264 | if lines[0] != "-----> Setting config vars": 265 | raise Exception(data) 266 | messages.success(request, "Config updated") 267 | 268 | 269 | def check_app_config_set(request: HttpRequest, app_name, task_id: str): 270 | check_config_set(request, task_id) 271 | clear_cache("config:show %s" % app_name) 272 | return redirect_reverse("app_info", args=[app_name]) 273 | 274 | 275 | def app_config_delete(request, app): 276 | key = request.POST["key"] 277 | return run_cmd_with_log( 278 | app, 279 | "Removing %s" % key, 280 | "config:unset %s %s" % (app, key), 281 | "check_app_config_delete", 282 | ) 283 | 284 | 285 | def check_app_config_delete(request: HttpRequest, app_name, task_id: str): 286 | res = AsyncResult(task_id) 287 | data = get_log(res) 288 | lines = data.split("\n") 289 | if "Unsetting" not in lines[0]: 290 | raise Exception(data) 291 | messages.success(request, "Config updated") 292 | clear_cache("config:show %s" % app_name) 293 | return redirect_reverse("app_info", args=[app_name]) 294 | 295 | 296 | def global_config_set(request): 297 | form = forms.ConfigForm(request.POST) 298 | if form.is_valid(): 299 | return run_cmd_with_log( 300 | None, 301 | "Setting %s" % form.cleaned_data["key"], 302 | "config:set --global %s=%s" 303 | % (form.cleaned_data["key"], form.cleaned_data["value"]), 304 | "check_global_config_set", 305 | ) 306 | else: 307 | raise Exception 308 | 309 | 310 | def check_global_config_set(request: HttpRequest, task_id: str): 311 | check_config_set(request, task_id) 312 | clear_cache("config --global") 313 | return redirect_reverse("index") 314 | 315 | 316 | def generic_list(app_name, data, name_field: str, field_names: list[str]): 317 | lines = data.split("\n") 318 | if lines[0].find("is not a dokku command") != -1: 319 | raise Exception("Neeed plugin!") 320 | if lines[0].find("does") != -1: 321 | return None 322 | fields = dict([(x, {}) for x in field_names]) 323 | last_field: str | None = None 324 | for f in fields.keys(): 325 | index = lines[0].find(f) 326 | if index == -1: 327 | raise Exception("Can't find '%s' in '%s'" % (f, lines[0].strip())) 328 | if f == name_field: 329 | index = 0 330 | fields[f]["start"] = index 331 | if last_field is not None: 332 | fields[last_field]["end"] = index 333 | last_field = f 334 | assert last_field is not None 335 | fields[last_field]["end"] = None 336 | results = [] 337 | for line in lines[1:]: 338 | info = {} 339 | for f in fields.keys(): 340 | if fields[f]["end"] is None: 341 | info[f] = line[fields[f]["start"] :].strip() 342 | else: 343 | info[f] = line[fields[f]["start"] : fields[f]["end"]].strip() 344 | results.append(info) 345 | results = dict([[x[name_field], x] for x in results]) 346 | if app_name in results: 347 | return results[app_name] 348 | else: 349 | return None 350 | 351 | 352 | def generic_info(data: str): 353 | lines = data.split("\n") 354 | if lines[0].find("is not a dokku command") != -1: 355 | raise Exception("Neeed plugin!") 356 | if lines[0].find("does not exist") != -1: 357 | return None 358 | results = {} 359 | for line in lines[1:]: 360 | key, value = line.split(":", 1) 361 | key = key.strip() 362 | value = value.strip() 363 | results[key] = value 364 | return results 365 | 366 | 367 | def db_info(cache_key: str): 368 | data = run_cmd_with_cache(cache_key) 369 | try: 370 | return generic_info(data) 371 | except: 372 | clear_cache(cache_key) 373 | raise 374 | 375 | 376 | def postgres_info(app_name: str): 377 | cache_key = "postgres:info %s" % app_name 378 | return db_info(cache_key) 379 | 380 | 381 | def redis_info(app_name: str): 382 | cache_key = "redis:info %s" % app_name 383 | return db_info(cache_key) 384 | 385 | 386 | def letsencrypt_command(): 387 | version = plugin_versions().get("letsencrypt") 388 | if version is None: 389 | return None 390 | if version <= Version("0.9.4"): 391 | return "letsencrypt:ls" 392 | else: 393 | return "letsencrypt:list" 394 | 395 | 396 | def letsencrypt(app_name: str): 397 | cmd = letsencrypt_command() 398 | assert cmd is not None 399 | data = run_cmd_with_cache(cmd) 400 | return generic_list( 401 | app_name, 402 | data, 403 | "App name", 404 | ["App name", "Certificate Expiry", "Time before expiry", "Time before renewal"], 405 | ) 406 | 407 | 408 | def process_info(app_name): 409 | data = run_cmd_with_cache("ps:report %s" % app_name) 410 | if "does not exist" in data: 411 | return {} 412 | lines = data.split("\n") 413 | if lines[0].find("exit status") != -1: 414 | lines = lines[1:] 415 | if lines[0].find("No such object") != -1: 416 | lines = lines[1:] 417 | if ( 418 | lines[0].find("%s process information" % app_name) == -1 419 | and lines[0].find("%s ps information" % app_name) == -1 420 | ): # Different versions 421 | raise Exception(lines) 422 | results = {} 423 | processes = {} 424 | process_re = re.compile(r"Status\s+(\S+ \d+):\s+(\S+) \(CID: [a-z0-9]+\)") 425 | for line in lines[1:]: 426 | if line.strip().startswith("Status "): 427 | matches = process_re.search(line) 428 | if matches is None: 429 | raise Exception(line) 430 | matches = matches.groups() 431 | processes[matches[0]] = matches[1] 432 | else: 433 | (name, rest) = line.split(":", 1) 434 | results[name.strip()] = rest.strip() 435 | results["processes"] = processes 436 | return results 437 | 438 | 439 | def domains_list(app_name: str) -> list[str]: 440 | data = run_cmd_with_cache("domains:report %s" % app_name) 441 | if "does not exist" in data: 442 | return [] 443 | vhosts = re.search("Domains app vhosts: (.*)", data) 444 | assert vhosts is not None 445 | return [x.strip() for x in vhosts.groups()[0].split(" ") if x != ""] 446 | 447 | 448 | def add_domain(request: HttpRequest, app_name: str): 449 | form = forms.CreateDomainForm(request.POST) 450 | if form.is_valid(): 451 | commands = ["domains:add %s %s" % (app_name, form.cleaned_data["name"])] 452 | if letsencrypt(app_name) is not None: 453 | commands.append("letsencrypt:enable %s" % app_name) 454 | return run_cmd_with_log( 455 | app_name, 456 | "Add domain %s" % form.cleaned_data["name"], 457 | commands, 458 | "check_domain", 459 | ) 460 | else: 461 | raise Exception 462 | 463 | 464 | def check_domain(request: HttpRequest, app_name, task_id: str): 465 | res = AsyncResult(task_id) 466 | data = get_log(res) 467 | if data.find("Reloading nginx") != -1: 468 | clear_cache("domains:report %s" % app_name) 469 | messages.success(request, "Added domain name to %s" % app_name) 470 | return redirect_reverse("app_info", args=[app_name]) 471 | else: 472 | raise Exception(data) 473 | 474 | 475 | def remove_domain(request: HttpRequest, app_name): 476 | name = request.POST["name"] 477 | commands = ["domains:remove %s %s" % (app_name, name)] 478 | if letsencrypt(app_name) is not None: 479 | commands.append("letsencrypt %s" % app_name) 480 | return run_cmd_with_log( 481 | app_name, "Remove domain %s" % name, commands, "check_domain" 482 | ) 483 | 484 | 485 | def app_info(request: HttpRequest, app_name): 486 | app, _ = models.App.objects.get_or_create(name=app_name) 487 | config = app_config(app_name) 488 | if "GITHUB_URL" in config: 489 | app.github_url = config["GITHUB_URL"] 490 | app.save() 491 | if request.method == "POST": 492 | form = forms.ConfigForm(request.POST) 493 | if form.is_valid(): 494 | return app_config_set( 495 | app_name, form.cleaned_data["key"], form.cleaned_data["value"] 496 | ) 497 | else: 498 | form = forms.ConfigForm() 499 | return render( 500 | request, 501 | "app_info.html", 502 | { 503 | "postgres": postgres_info(app_name), 504 | "redis": redis_info(app_name), 505 | "letsencrypt": letsencrypt(app_name), 506 | "process": process_info(app_name), 507 | "logs": ansi_escape.sub("", run_cmd("logs %s --num 100" % app_name)), 508 | "domains": domains_list(app_name), 509 | "domain_form": forms.CreateDomainForm(), 510 | "letsencrypt_form": forms.SetupLetsEncrypt(), 511 | "form": form, 512 | "app": app_name, 513 | "git_url": config.get("GITHUB_URL", None), 514 | "config": sorted(config.items()), 515 | "task_logs": models.TaskLog.objects.filter(app=app).order_by("-when")[0:10], 516 | }, 517 | ) 518 | 519 | 520 | def deploy(request: HttpRequest, app_name): 521 | if request.POST["action"] == "deploy": 522 | config = app_config(app_name) 523 | res = tasks.deploy.delay( 524 | app_name, request.POST["url"], config.get("GIT_BRANCH", "master") 525 | ) 526 | clear_cache("config:show %s" % app_name) 527 | clear_cache("domains:report %s" % app_name) 528 | clear_cache("ps:report %s" % app_name) 529 | return redirect_reverse( 530 | "wait_for_command", 531 | kwargs={"app_name": app_name, "task_id": res.id, "after": "check_deploy"}, 532 | ) 533 | elif request.POST["action"] == "rebuild": 534 | return run_cmd_with_log( 535 | app_name, "Rebuilding", "ps:rebuild %s" % app_name, "check_rebuild" 536 | ) 537 | else: 538 | raise Exception(request.POST["action"]) 539 | 540 | 541 | def create_postgres(request: HttpRequest, app_name): 542 | return run_cmd_with_log( 543 | app_name, 544 | "Add Postgres", 545 | ["postgres:create %s" % app_name, "postgres:link %s %s" % (app_name, app_name)], 546 | "check_postgres", 547 | ) 548 | 549 | 550 | def remove_postgres(request: HttpRequest, app_name): 551 | return run_cmd_with_log( 552 | app_name, 553 | "Remove Postgres", 554 | [ 555 | "postgres:unlink %s %s" % (app_name, app_name), 556 | "postgres:destroy %s --force" % app_name, 557 | ], 558 | "check_remove_postgres", 559 | ) 560 | 561 | 562 | def create_redis(request: HttpRequest, app_name): 563 | return run_cmd_with_log( 564 | app_name, 565 | "Add Redis", 566 | ["redis:create %s" % app_name, "redis:link %s %s" % (app_name, app_name)], 567 | "check_redis", 568 | ) 569 | 570 | 571 | def remove_redis(request: HttpRequest, app_name): 572 | return run_cmd_with_log( 573 | app_name, 574 | "Remove Redis", 575 | [ 576 | "redis:unlink %s %s" % (app_name, app_name), 577 | "redis:destroy %s --force" % app_name, 578 | ], 579 | "check_remove_redis", 580 | ) 581 | 582 | 583 | def check_deploy(request: HttpRequest, app_name, task_id: str): 584 | clear_cache("config:show %s" % app_name) 585 | messages.success(request, "%s redeployed" % app_name) 586 | return redirect_reverse("app_info", args=[app_name]) 587 | 588 | 589 | def check_rebuild(request: HttpRequest, app_name, task_id: str): 590 | res = AsyncResult(task_id) 591 | data = get_log(res) 592 | if data.find("Application deployed:") == -1: 593 | raise Exception(data) 594 | messages.success(request, "%s rebuilt" % app_name) 595 | clear_cache("config:show %s" % app_name) 596 | return redirect_reverse("app_info", args=[app_name]) 597 | 598 | 599 | def check_postgres(request: HttpRequest, app_name, task_id: str): 600 | res = AsyncResult(task_id) 601 | data = get_log(res) 602 | if data.find("Postgres container created") == -1: 603 | raise Exception(data) 604 | messages.success(request, "Postgres added to %s" % app_name) 605 | clear_cache("postgres:info %s" % app_name) 606 | clear_cache("config:show %s" % app_name) 607 | return redirect_reverse("app_info", args=[app_name]) 608 | 609 | 610 | def check_remove_postgres(request: HttpRequest, app_name, task_id: str): 611 | res = AsyncResult(task_id) 612 | data = get_log(res) 613 | if data.find("Postgres container deleted: %s" % app_name) == -1: 614 | raise Exception(data) 615 | messages.success(request, "Postgres removed from %s" % app_name) 616 | clear_cache("postgres:info %s" % app_name) 617 | clear_cache("config:show %s" % app_name) 618 | return redirect_reverse("app_info", args=[app_name]) 619 | 620 | 621 | def check_redis(request: HttpRequest, app_name, task_id: str): 622 | res = AsyncResult(task_id) 623 | data = get_log(res) 624 | if data.find("Redis container created") == -1: 625 | raise Exception(data) 626 | messages.success(request, "Redis added to %s" % app_name) 627 | clear_cache("redis:info %s" % app_name) 628 | clear_cache("config:show %s" % app_name) 629 | return redirect_reverse("app_info", args=[app_name]) 630 | 631 | 632 | def check_remove_redis(request: HttpRequest, app_name, task_id: str): 633 | res = AsyncResult(task_id) 634 | data = get_log(res) 635 | if data.find("Redis container deleted: %s" % app_name) == -1: 636 | raise Exception(data) 637 | messages.success(request, "Redis removed from %s" % app_name) 638 | clear_cache("redis:info %s" % app_name) 639 | clear_cache("config:show %s" % app_name) 640 | return redirect_reverse("app_info", args=[app_name]) 641 | 642 | 643 | def create_app(app_name: str): 644 | if models.App.objects.filter(name=app_name).exists(): 645 | return HttpResponseBadRequest(f"You already have an app called '{app_name}'") 646 | models.App(name=app_name).save() 647 | return run_cmd_with_log( 648 | app_name, "Add app %s" % app_name, "apps:create %s" % app_name, "check_app" 649 | ) 650 | 651 | 652 | def check_app(request: HttpRequest, app_name: str, task_id: str): 653 | res = AsyncResult(task_id) 654 | data = get_log(res) 655 | if data.find("Creating %s..." % app_name) == -1: 656 | raise Exception(data) 657 | messages.success(request, "Created %s" % app_name) 658 | clear_cache("apps:list") 659 | return redirect_reverse("app_info", args=[app_name]) 660 | 661 | 662 | def setup_letsencrypt(request: HttpRequest, app_name: str): 663 | form = forms.SetupLetsEncrypt(request.POST) 664 | if form.is_valid(): 665 | commands = [ 666 | f"letsencrypt:set {app_name} email %s" % form.cleaned_data["email"], 667 | f"letsencrypt:enable {app_name}", 668 | ] 669 | return run_cmd_with_log( 670 | app_name, 671 | "Enable Let's Encrypt", 672 | commands, 673 | "check_letsencrypt", 674 | ) 675 | else: 676 | raise Exception(form.errors) 677 | 678 | 679 | def remove_letsencrypt(request: HttpRequest, app_name): 680 | return run_cmd_with_log( 681 | app_name, 682 | "Remove Letsencrypt", 683 | [ 684 | f"letsencrypt:disable {app_name} --force", 685 | ], 686 | "check_remove_letsencrypt", 687 | ) 688 | 689 | 690 | def check_letsencrypt(request: HttpRequest, app_name: str, task_id: str): 691 | res = AsyncResult(task_id) 692 | app = models.App.objects.get(name=app_name) 693 | task, _created = models.TaskLog.objects.get_or_create( 694 | task_id=task_id, defaults={"app": app, "when": datetime.now(tz=UTC)} 695 | ) 696 | log = get_log(res) 697 | if log.find("Certificate retrieved successfully") != -1: 698 | cmd = letsencrypt_command() 699 | assert cmd is not None 700 | clear_cache(cmd) 701 | return redirect_reverse("app_info", args=[app_name]) 702 | else: 703 | return render( 704 | request, 705 | "command_wait.html", 706 | { 707 | "app": app_name, 708 | "task_id": task_id, 709 | "log": log, 710 | "state": res.state, 711 | "running": res.state in [state(PENDING), state(STARTED)], 712 | "description": task.description, 713 | }, 714 | ) 715 | 716 | 717 | def check_remove_letsencrypt(request: HttpRequest, app_name: str, task_id: str): 718 | res = AsyncResult(task_id) 719 | app = models.App.objects.get(name=app_name) 720 | task, _created = models.TaskLog.objects.get_or_create( 721 | task_id=task_id, defaults={"app": app, "when": datetime.now(tz=UTC)} 722 | ) 723 | log = get_log(res) 724 | if log.find(f"Removing letsencrypt files for {app_name}") != -1: 725 | cmd = letsencrypt_command() 726 | assert cmd is not None 727 | clear_cache(cmd) 728 | return redirect_reverse("app_info", args=[app_name]) 729 | else: 730 | return render( 731 | request, 732 | "command_wait.html", 733 | { 734 | "app": app_name, 735 | "task_id": task_id, 736 | "log": log, 737 | "state": res.state, 738 | "running": res.state in [state(PENDING), state(STARTED)], 739 | "description": task.description, 740 | }, 741 | ) 742 | 743 | 744 | @csrf_exempt 745 | def github_webhook(request: HttpRequest): 746 | secret = settings.GITHUB_SECRET.encode("utf-8") 747 | hash = "sha1=%s" % hmac.new(secret, request.body, hashlib.sha1).hexdigest() 748 | if "HTTP_X_HUB_SIGNATURE" not in request.META: 749 | return HttpResponseBadRequest("No X-Hub-Signature header") 750 | header = request.META["HTTP_X_HUB_SIGNATURE"] 751 | if header != hash: 752 | return HttpResponseBadRequest("%s doesn't equal %s" % (hash, header)) 753 | data = json.loads(request.read()) 754 | if "hook_id" in data: # assume Ping 755 | if "push" not in data["hook"]["events"]: 756 | return HttpResponseBadRequest("No Push event set!") 757 | return HttpResponse("All good") 758 | default_ref = "refs/heads/%s" % data["repository"]["default_branch"] 759 | if data["ref"] != default_ref: 760 | return HttpResponse( 761 | "Push to non-default branch (saw %s, expected %s)" 762 | % (data["ref"], default_ref) 763 | ) 764 | clone_url = data["repository"]["clone_url"] 765 | apps = models.App.objects.filter(github_url=clone_url) 766 | if not apps.exists(): 767 | return HttpResponseBadRequest( 768 | "Can't find an entry for clone URL %s" % clone_url 769 | ) 770 | app = apps.first() 771 | assert app is not None 772 | config = app_config(app.name) 773 | res = tasks.deploy.delay(app.name, clone_url, config.get("GIT_BRANCH", "master")) 774 | clear_cache("config:show %s" % app.name) 775 | return HttpResponse( 776 | "Running deploy. Deploy log is at %s" 777 | % request.build_absolute_uri(reverse("show_log", kwargs={"task_id": res.id})) 778 | ) 779 | 780 | 781 | @timeout_decorator.timeout(5, use_signals=False) 782 | def check_status(): 783 | # Clearing the cache and then trying a command makes sure that 784 | # - The cache is up 785 | # - Celery is up 786 | # - We can run dokku commands 787 | clear_cache("config --global") 788 | run_cmd_with_cache("config --global") 789 | 790 | 791 | def status(request: HttpRequest): 792 | try: 793 | check_status() 794 | return HttpResponse("All good") 795 | except timeout_decorator.TimeoutError: 796 | return HttpResponseServerError("Timeout trying to get status") 797 | -------------------------------------------------------------------------------- /tests/test_views.py: -------------------------------------------------------------------------------- 1 | import re 2 | import uuid 3 | from typing import Any, Callable, cast 4 | from unittest.mock import MagicMock, Mock, patch 5 | 6 | import pytest 7 | from celery.result import AsyncResult 8 | from celery.states import SUCCESS, state 9 | from django.conf import LazySettings 10 | from django.core.cache import cache 11 | from django.http import HttpRequest, HttpResponse, HttpResponseRedirect 12 | from django.test import Client 13 | from model_bakery import baker 14 | from redis import StrictRedis 15 | 16 | from apps import models 17 | from apps.views import ( 18 | app_config_delete, 19 | app_info, 20 | app_list, 21 | check_app, 22 | check_app_config_delete, 23 | check_letsencrypt, 24 | check_postgres, 25 | check_redis, 26 | check_remove_letsencrypt, 27 | check_remove_postgres, 28 | check_remove_redis, 29 | create_app, 30 | create_postgres, 31 | create_redis, 32 | global_config, 33 | index, 34 | letsencrypt, 35 | process_info, 36 | refresh, 37 | refresh_all, 38 | remove_letsencrypt, 39 | remove_postgres, 40 | remove_redis, 41 | setup_letsencrypt, 42 | ) 43 | from tests.recording_cache import RecordingCache 44 | 45 | 46 | class MockCelery: 47 | def __init__(self, res: object): 48 | self.res = res 49 | self.id = uuid.uuid4() 50 | 51 | def get(self): 52 | return self.res 53 | 54 | 55 | commands = { 56 | ("apps:list",): """=====> My Apps 57 | wharf""", 58 | ("config:show test_app",): """=====> test_app env vars 59 | DOKKU_APP_RESTORE: 1 60 | DOKKU_APP_TYPE: dockerfile 61 | DOKKU_PROXY_PORT: 80""", 62 | ("config:show missing",): " ! App missing does not exist", 63 | ("postgres:list",): """=====> Postgres services 64 | wharf""", 65 | ("postgres:info test_app",): """=====> test_app postgres service information 66 | Config dir: /var/lib/dokku/services/postgres/test_app/data 67 | Config options: 68 | Data dir: /var/lib/dokku/services/postgres/test_app/data 69 | Dsn: postgres://postgres:aa23a509ff7443011ebfa49e3c3a582a@dokku-postgres-test_app:5432/test_app 70 | Exposed ports: - 71 | Id: 3a07c995d32e13766d3ebc44d040391f434e234d3d9c6021410eff4a130af656 72 | Internal ip: 172.17.0.3 73 | Initial network: 74 | Links: wharf 75 | Post create network: 76 | Post start network: 77 | Service root: /var/lib/dokku/services/postgres/test_app 78 | Status: running 79 | Version: postgres:17.4""", 80 | ("redis:info test_app",): """=====> test_app redis service information 81 | Config dir: /var/lib/dokku/services/redis/test_app/config 82 | Config options: 83 | Data dir: /var/lib/dokku/services/redis/test_app/data 84 | Dsn: redis://:6654f1fd4527260516b99ea515f5d283e9ab887822f7e3c9d5d37ac4815b73d2@dokku-redis-wharf:6379 85 | Exposed ports: - 86 | Id: 12d11c44ecb0f75175ab4c15a853d9b2801d1349f5ff16610dadf154184256d7 87 | Internal ip: 172.17.0.2 88 | Initial network: 89 | Links: test_app 90 | Post create network: 91 | Post start network: 92 | Service root: /var/lib/dokku/services/redis/test_app 93 | Status: running 94 | Version: redis:7.4.2""", 95 | ("ps:report test_app",): """=====> test_app ps information 96 | Deployed: true 97 | Processes: 2 98 | Ps can scale: true 99 | Ps computed procfile path: Procfile 100 | Ps global procfile path: Procfile 101 | Ps procfile path: 102 | Ps restart policy: on-failure:10 103 | Restore: true 104 | Running: true 105 | Status celery 1: running (CID: 68b2897a761) 106 | Status web 1: running (CID: d536b673b49)""", 107 | ( 108 | "logs test_app --num 100", 109 | ): """2025-04-25T23:07:53.894820268Z app[celery.1]: System check identified some issues: 110 | 2025-04-25T23:07:53.895026545Z app[celery.1]: 111 | 2025-04-25T23:07:53.895030874Z app[celery.1]: WARNINGS:""", 112 | ("domains:report test_app",): """"=====> test_app domains information 113 | Domains app enabled: true 114 | Domains app vhosts: test_app.vagrant 115 | Domains global enabled: true 116 | Domains global vhosts: vagrant""", 117 | ("postgres:info missing",): " ! Postgres service missing does not exist", 118 | ("redis:info missing",): " ! Redis service missing does not exist", 119 | ( 120 | "letsencrypt:ls", 121 | ): "-----> App name Certificate Expiry Time before expiry Time before renewal", 122 | ( 123 | "letsencrypt:list", 124 | ): "-----> App name Certificate Expiry Time before expiry Time before renewal", 125 | ("ps:report missing",): " ! App missing does not exist", 126 | ("logs missing --num 100",): " ! App missing does not exist", 127 | ("domains:report missing",): " ! App missing does not exist", 128 | ( 129 | "plugin:list", 130 | ): """ letsencrypt 0.9.4 enabled Automated installation of let's encrypt TLS certificates 131 | logs 0.35.18 enabled dokku core logs plugin 132 | network 0.35.18 enabled dokku core network plugin""", 133 | ("apps:create foo",): "", 134 | ("config:show --global",): """=====> global env vars 135 | CURL_CONNECT_TIMEOUT: 90 136 | CURL_TIMEOUT: 600""", 137 | ("postgres:create foo",): """Waiting for container to be ready 138 | Creating container database 139 | Securing connection to database 140 | =====> Postgres container created: foo""", 141 | ("postgres:link foo foo",): """-----> Setting config vars 142 | DATABASE_URL: postgres://postgres:2a871f1589b4519719428602980939bb@dokku-postgres-foo:5432/foo""", 143 | ("postgres:unlink foo foo",): "-----> Unsetting DATABASE_URL", 144 | ("postgres:destroy foo --force",): """=====> Pausing container 145 | Container paused 146 | Removing container 147 | Removing data 148 | =====> Postgres container deleted: foo""", 149 | ( 150 | "ps:report non-running-app", 151 | ): """No such object: cdbd631f11431826fb7ccfd257921e6b0ac1e6fc7986948e44e0d49609e11123 152 | =====> non-running-app ps information 153 | Deployed: true 154 | Processes: 1 155 | Ps can scale: true 156 | Ps computed procfile path: Procfile 157 | Ps global procfile path: Procfile 158 | Ps procfile path: 159 | Ps restart policy: on-failure:10 160 | Restore: true 161 | Running: false 162 | Status web 1: missing (CID: cdbd631f114)""", 163 | ("redis:create foo",): """ Waiting for container to be ready 164 | =====> Redis container created: foo 165 | =====> foo redis service information 166 | Config dir: /var/lib/dokku/services/redis/foo/config 167 | Config options: 168 | Data dir: /var/lib/dokku/services/redis/foo/data 169 | Dsn: redis://:584fb7aa7ca03acda2bc8c81c056ac81e0ec59d10efec8137cfcf893854f5570@dokku-redis-foo:6379 170 | Exposed ports: - 171 | Id: 37f96e39797e4a731d750a19ae0e3255fb84fede13d1e704ae61d18ac037e4ad 172 | Internal ip: 172.17.0.4 173 | Initial network: 174 | Links: - 175 | Post create network: 176 | Post start network: 177 | Service root: /var/lib/dokku/services/redis/foo 178 | Status: running 179 | Version: redis:8.4.0""", 180 | ("redis:link foo foo",): """----> Setting config vars 181 | REDIS_URL: redis://:584fb7aa7ca03acda2bc8c81c056ac81e0ec59d10efec8137cfcf893854f5570@dokku-redis-foo:6379""", 182 | ("redis:unlink foo foo",): """-----> Unsetting REDIS_URL""", 183 | ("redis:destroy foo --force",): """=====> Deleting foo 184 | =====> Pausing container 185 | Container paused 186 | Removing container 187 | Removing data 188 | =====> Redis container deleted: foo""", 189 | ( 190 | "letsencrypt:set test_app email foo@bar.com", 191 | ): "=====> Setting email to foo@bar.com", 192 | ("letsencrypt:enable test_app",): """=====> Enabling letsencrypt for test_app 193 | -----> Enabling ACME proxy for test_app... 194 | -----> Getting letsencrypt certificate for test_app via HTTP-01 195 | - Domain 'test_app.vagrant' 196 | 2025/12/20 21:38:20 No key found for account foo@bar.com. Generating a P256 key. 197 | 2025/12/20 21:38:20 Saved key to /certs/accounts/acme-v02.api.letsencrypt.org/foo@bar.com/keys/foo@bar.com.key 198 | 2025/12/20 21:38:20 [INFO] acme: Registering account for foo@bar.com 199 | 2025/12/20 21:38:20 [INFO] [test_app.vagrant] acme: Obtaining bundled SAN certificate 200 | !!!! HEADS UP !!!! 201 | 202 | Your account credentials have been saved in your Let's Encrypt 203 | configuration directory at "/certs/accounts". 204 | 205 | You should make a secure backup of this folder now. This 206 | configuration directory will also contain certificates and 207 | private keys obtained from Let's Encrypt so making regular 208 | backups of this folder is ideal. 209 | 2025/12/20 21:38:21 [INFO] [test_app.vagrant] AuthURL: https://acme-v02.api.letsencrypt.org/acme/authz/12345/6789 210 | 2025/12/20 21:38:21 [INFO] [test_app.vagrant] acme: Could not find solver for: tls-alpn-01 211 | 2025/12/20 21:38:21 [INFO] [test_app.vagrant] acme: use http-01 solver 212 | 2025/12/20 21:38:21 [INFO] [test_app.vagrant] acme: Trying to solve HTTP-01 213 | 2025/12/20 21:38:29 [INFO] [test_app.vagrant] The server validated our request 214 | 2025/12/20 21:38:29 [INFO] [test_app.vagrant] acme: Validations succeeded; requesting certificates 215 | 2025/12/20 21:38:29 [INFO] [test_app.vagrant] Server responded with a certificate. 216 | -----> Certificate retrieved successfully. 217 | -----> Installing let's encrypt certificates 218 | -----> Unsetting DOKKU_PROXY_PORT 219 | -----> Setting config vars 220 | DOKKU_PROXY_PORT_MAP: http:80:5000 221 | -----> Setting config vars 222 | DOKKU_PROXY_PORT_MAP: http:80:5000 https:443:5000 223 | -----> Configuring test_app.vagrant...(using built-in template) 224 | -----> Creating https nginx.conf 225 | Enabling HSTS 226 | Reloading nginx 227 | -----> Ensuring network configuration is in sync for test_app 228 | -----> Configuring test_app.vagrant...(using built-in template) 229 | -----> Creating https nginx.conf 230 | Enabling HSTS 231 | Reloading nginx 232 | -----> Disabling ACME proxy for test_app... 233 | -----> Done""", 234 | ("letsencrypt:disable test_app --force",): """-----> Disabling letsencrypt for app 235 | Removing letsencrypt files for test_app 236 | Removing SSL endpoint from test_app 237 | -----> Unsetting DOKKU_PROXY_SSL_PORT 238 | -----> Setting config vars 239 | DOKKU_PROXY_PORT_MAP: http:80:5000 240 | -----> Configuring test_app.vagrant...(using built-in template) 241 | -----> Creating http nginx.conf 242 | Reloading nginx 243 | -----> Done""", 244 | ("config:unset test_app FOO_KEY",): """-----> Unsetting FOO_KEY""", 245 | } 246 | 247 | 248 | def custom_mock_commands(override_commands: dict[Any, str]) -> Callable: 249 | def _internal(*args): 250 | if type(args[0]) is list: 251 | all_celerys = [_internal(x) for x in args[0]] 252 | return MockCelery("\n".join([c.res for c in all_celerys])) 253 | if args in override_commands: 254 | return MockCelery(override_commands[args]) 255 | if args in commands: 256 | return MockCelery(commands[args]) 257 | print(args) 258 | raise Exception(args) 259 | 260 | return _internal 261 | 262 | 263 | mock_commands = custom_mock_commands({}) 264 | 265 | 266 | @pytest.fixture 267 | def mock_request() -> HttpRequest: 268 | mr = MagicMock(spec=HttpRequest) 269 | mr.META = MagicMock() 270 | mr._messages = MagicMock() 271 | mr.method = MagicMock() 272 | return mr 273 | 274 | 275 | @pytest.fixture(autouse=True) 276 | def disable_cache( 277 | monkeypatch: pytest.MonkeyPatch, 278 | recording_cache: RecordingCache, 279 | ): 280 | monkeypatch.setattr(cache, "set", lambda _key, _value, _timeout: None) 281 | 282 | 283 | @pytest.fixture(autouse=True) 284 | def patch_csrf_token(monkeypatch: pytest.MonkeyPatch): 285 | monkeypatch.setattr( 286 | "django.middleware.csrf.get_token", Mock(return_value="predictabletoken") 287 | ) 288 | yield 289 | 290 | 291 | @patch("wharf.tasks.run_ssh_command.delay") 292 | def test_app_list(patched_delay: MagicMock): 293 | patched_delay.side_effect = mock_commands 294 | assert app_list() == ["wharf"] 295 | 296 | 297 | def finished_log(monkeypatch: pytest.MonkeyPatch, contents: str): 298 | monkeypatch.setattr(AsyncResult, "state", state(SUCCESS)) 299 | monkeypatch.setattr( 300 | StrictRedis, 301 | "get", 302 | lambda _self, _key: contents.encode("utf-8"), 303 | ) 304 | 305 | 306 | @patch("wharf.tasks.run_ssh_command.delay") 307 | def test_check_app( 308 | patched_delay: MagicMock, mock_request: HttpRequest, monkeypatch: pytest.MonkeyPatch 309 | ): 310 | finished_log( 311 | monkeypatch, 312 | """Creating test_app... 313 | -----> Creating new app virtual host file...""", 314 | ) 315 | 316 | patched_delay.side_effect = mock_commands 317 | resp = check_app(mock_request, "test_app", "1234") 318 | assert resp.status_code == 302, resp 319 | assert resp.url == "/apps/test_app" 320 | 321 | 322 | @pytest.mark.django_db 323 | @patch("wharf.tasks.run_ssh_command.delay") 324 | def test_app_info(patched_delay: MagicMock, mock_request: HttpRequest): 325 | patched_delay.side_effect = mock_commands 326 | resp = app_info(mock_request, "test_app") 327 | assert resp.status_code == 200, resp 328 | content = resp.content.decode("utf-8") 329 | 330 | expected_contents = [ 331 | """

Wharf: test_app

\n Return to apps index\n
\n

Actions

\n
\n
\n \n \n
\n \n Can\'t deploy due to missing GITHUB_URL in config (which should be set to the "Clone with HTTPS" url from Github)\n \n
\n

Task logs

\n \n No tasks run yet\n \n

Domains

\n \n
    \n \n
  • \n test_app.vagrant\n
    \n \n \n \n
    \n
  • \n \n
\n \n

New domain

\n
""", 332 | """
\n \n \n \n \n\n
\n \n \n \n
\n \n
\n\n \n
\n

Config

\n
    \n \n
  • \n \n
    \n DOKKU_APP_RESTORE = 1 \n \n \n \n
    \n \n
  • \n \n
  • \n \n
    \n DOKKU_APP_TYPE = dockerfile \n \n \n \n
    \n \n
  • \n \n
  • \n \n
    \n DOKKU_PROXY_PORT = 80 \n \n \n \n
    \n \n
  • \n \n
\n

New item

""", 333 | """
\n \n \n \n \n\n
\n \n \n \n
\n \n
\n\n
\n \n \n \n \n\n
\n \n \n \n
\n \n
\n\n \n \n

Postgres

\n \n Status: running\n
\n \n \n
\n \n

Redis

\n \n Status: running\n
\n \n \n
\n \n

Let\'s Encrypt

\n \n
\n \n \n
\n \n \n \n \n\n
\n \n \n \n
\n \n
\n\n \n
\n \n

Process Info

\n
    \n
  • Deployed: true
  • Processes: 2
  • Ps can scale: true
  • Ps computed procfile path: Procfile
  • Ps global procfile path: Procfile
  • Ps procfile path:
  • Ps restart policy: on-failure:10
  • Restore: true
  • Running: true
  • \n
\n

Processes

\n
    \n
  • celery 1: running
  • web 1: running
  • \n
\n

Logs

\n
\n2025-04-25T23:07:53.894820268Z app[celery.1]: System check identified some issues:\n2025-04-25T23:07:53.895026545Z app[celery.1]:\n2025-04-25T23:07:53.895030874Z app[celery.1]: WARNINGS:\n
""", 334 | ] 335 | for expected_content in expected_contents: 336 | assert expected_content in content 337 | 338 | 339 | @pytest.mark.django_db 340 | @patch("wharf.tasks.run_ssh_command.delay") 341 | def test_missing_app_info(patched_delay: MagicMock, mock_request: HttpRequest): 342 | patched_delay.side_effect = mock_commands 343 | resp = app_info(mock_request, "missing") 344 | assert resp.status_code == 200, resp 345 | content = resp.content.decode("utf-8") 346 | expected_contents = [ 347 | """

Wharf: missing

\n Return to apps index\n
\n

Actions

\n
\n
\n \n \n
\n \n Can\'t deploy due to missing GITHUB_URL in config (which should be set to the "Clone with HTTPS" url from Github)\n \n
\n

Task logs

\n \n No tasks run yet\n \n

Domains

\n \n
    \n \n
\n \n

New domain

\n
""", 348 | """
\n \n \n \n \n\n
\n \n \n \n
\n \n
\n\n \n
\n

Config

\n
    \n \n
\n

New item

\n
\n \n \n
\n \n \n \n \n\n
\n \n \n \n
\n \n
\n\n
\n \n \n \n \n\n
\n \n \n \n
\n \n
\n\n \n
\n

Postgres

\n \n
""", 349 | """

Redis

\n \n """, 350 | """\n
\n \n

Let\'s Encrypt

\n \n
\n \n \n
\n \n \n \n \n\n
\n \n \n \n
\n \n
\n\n \n
\n \n

Process Info

\n
    \n \n
\n

Processes

\n
    \n \n
\n

Logs

\n
\n!     App missing does not exist\n
""", 351 | ] 352 | for expected_content in expected_contents: 353 | assert expected_content in content 354 | 355 | 356 | @pytest.mark.django_db 357 | @patch("wharf.tasks.run_ssh_command.delay") 358 | def test_newer_letsencrypt(patched_delay: MagicMock): 359 | patched_delay.side_effect = custom_mock_commands( 360 | { 361 | ( 362 | "plugin:list", 363 | ): " letsencrypt 0.22.0 enabled Automated installation of let's encrypt TLS certificates" 364 | } 365 | ) 366 | assert letsencrypt("wharf") is None 367 | 368 | 369 | @pytest.mark.django_db 370 | @patch("wharf.tasks.run_ssh_command.delay") 371 | def test_create_app(patched_delay: MagicMock): 372 | patched_delay.side_effect = mock_commands 373 | res = create_app("foo") 374 | assert res.status_code == 302, res 375 | assert isinstance(res, HttpResponseRedirect) 376 | assert res.url.startswith("/apps/foo/wait/"), res 377 | 378 | 379 | @pytest.mark.django_db 380 | def test_create_duplicate_app(): 381 | models.App.objects.create(name="foo") 382 | res = create_app("foo") 383 | assert res.status_code == 400, res 384 | assert res.content == b"You already have an app called 'foo'", res 385 | 386 | 387 | def test_login_change(client: Client): 388 | response = client.get("/", follow=True) 389 | assert "Initial login is admin/password" in response.text 390 | 391 | 392 | def test_login_no_change(client: Client, settings: LazySettings): 393 | settings.ADMIN_PASSWORD = "testpassword" 394 | response = client.get("/", follow=True) 395 | assert "Initial login is admin/password" not in response.text 396 | 397 | 398 | @patch("wharf.tasks.run_ssh_command.delay") 399 | def test_global_config(patched_delay: MagicMock): 400 | patched_delay.side_effect = mock_commands 401 | assert global_config() == {"CURL_CONNECT_TIMEOUT": "90", "CURL_TIMEOUT": "600"} 402 | 403 | 404 | @pytest.mark.django_db 405 | def test_refresh_all(mock_request: HttpRequest, recording_cache: RecordingCache): 406 | resp = refresh_all(mock_request) 407 | assert resp.status_code == 302, resp 408 | assert resp.url == "/", resp 409 | assert recording_cache.actions == ["clear"] 410 | 411 | 412 | @pytest.mark.django_db 413 | @patch("wharf.tasks.run_ssh_command.delay") 414 | def test_refresh_one( 415 | patched_delay: MagicMock, mock_request: HttpRequest, recording_cache: RecordingCache 416 | ): 417 | patched_delay.side_effect = mock_commands 418 | resp = refresh(mock_request, "foo") 419 | assert resp.status_code == 302, resp 420 | assert resp.url == "/apps/foo", resp 421 | assert recording_cache.actions == [ 422 | ("get", ("cmd:plugin:list",)), 423 | ( 424 | "delete_many", 425 | ( 426 | [ 427 | "cmd:config:show foo", 428 | "cmd:postgres:info foo", 429 | "cmd:redis:info foo", 430 | "cmd:ps:report foo", 431 | "cmd:domains:report foo", 432 | "cmd:letsencrypt:ls", 433 | ], 434 | ), 435 | ), 436 | ] 437 | 438 | 439 | @pytest.mark.django_db 440 | @patch("wharf.tasks.run_ssh_command.delay") 441 | def test_task_logs_limit(patched_delay: MagicMock, mock_request: HttpRequest): 442 | patched_delay.side_effect = mock_commands 443 | test_app = baker.make(models.App, name="test_app") 444 | baker.make(models.TaskLog, app=test_app, _quantity=20) 445 | resp = app_info(mock_request, "test_app") 446 | 447 | assert isinstance(resp, HttpResponse) 448 | log_count = re.findall(r"/logs/[^\"]+", resp.text) 449 | assert len(log_count) == 10, resp.text 450 | 451 | 452 | @pytest.mark.django_db 453 | @patch("wharf.tasks.run_ssh_command.delay") 454 | def test_create_postgres( 455 | patched_delay: MagicMock, mock_request: HttpRequest, monkeypatch: pytest.MonkeyPatch 456 | ): 457 | patched_delay.side_effect = mock_commands 458 | models.App.objects.get_or_create(name="foo") 459 | res = create_postgres(mock_request, "foo") 460 | assert res.status_code == 302, res 461 | assert res.url.startswith("/apps/foo/wait/"), res 462 | 463 | finished_log(monkeypatch, commands[("postgres:create foo",)]) 464 | check_postgres(mock_request, "foo", "1234") 465 | 466 | 467 | @pytest.mark.django_db 468 | @patch("wharf.tasks.run_ssh_command.delay") 469 | def test_remove_postgres( 470 | patched_delay: MagicMock, mock_request: HttpRequest, monkeypatch: pytest.MonkeyPatch 471 | ): 472 | patched_delay.side_effect = mock_commands 473 | models.App.objects.get_or_create(name="foo") 474 | res = remove_postgres(mock_request, "foo") 475 | assert res.status_code == 302, res 476 | assert res.url.startswith("/apps/foo/wait/"), res 477 | 478 | finished_log(monkeypatch, commands[("postgres:destroy foo --force",)]) 479 | 480 | check_remove_postgres(mock_request, "foo", "1234") 481 | 482 | 483 | @pytest.mark.django_db 484 | @patch("wharf.tasks.run_ssh_command.delay") 485 | def test_create_redis( 486 | patched_delay: MagicMock, mock_request: HttpRequest, monkeypatch: pytest.MonkeyPatch 487 | ): 488 | patched_delay.side_effect = mock_commands 489 | models.App.objects.get_or_create(name="foo") 490 | res = create_redis(mock_request, "foo") 491 | assert res.status_code == 302, res 492 | assert res.url.startswith("/apps/foo/wait/"), res 493 | 494 | finished_log(monkeypatch, commands[("redis:create foo",)]) 495 | check_redis(mock_request, "foo", "1234") 496 | 497 | 498 | @pytest.mark.django_db 499 | @patch("wharf.tasks.run_ssh_command.delay") 500 | def test_remove_redis( 501 | patched_delay: MagicMock, mock_request: HttpRequest, monkeypatch: pytest.MonkeyPatch 502 | ): 503 | patched_delay.side_effect = mock_commands 504 | models.App.objects.get_or_create(name="foo") 505 | res = remove_redis(mock_request, "foo") 506 | assert res.status_code == 302, res 507 | assert res.url.startswith("/apps/foo/wait/"), res 508 | 509 | finished_log(monkeypatch, commands[("redis:destroy foo --force",)]) 510 | 511 | check_remove_redis(mock_request, "foo", "1234") 512 | 513 | 514 | @pytest.mark.django_db 515 | @patch("wharf.tasks.run_ssh_command.delay") 516 | def test_non_running_app(patched_delay: MagicMock): 517 | patched_delay.side_effect = mock_commands 518 | res = process_info("non-running-app") 519 | assert res == { 520 | "Deployed": "true", 521 | "Processes": "1", 522 | "Ps can scale": "true", 523 | "Ps computed procfile path": "Procfile", 524 | "Ps global procfile path": "Procfile", 525 | "Ps procfile path": "", 526 | "Ps restart policy": "on-failure:10", 527 | "Restore": "true", 528 | "Running": "false", 529 | "processes": {"web 1": "missing"}, 530 | } 531 | 532 | 533 | @pytest.mark.django_db 534 | @patch("wharf.tasks.run_ssh_command.delay") 535 | @patch("wharf.tasks.get_public_key.delay") 536 | def test_index( 537 | patched_public_key: MagicMock, 538 | patched_delay: MagicMock, 539 | mock_request: HttpRequest, 540 | monkeypatch: pytest.MonkeyPatch, 541 | ): 542 | patched_delay.side_effect = mock_commands 543 | patched_public_key.return_value = MockCelery("demo-key") 544 | 545 | def redis_keys(self, key): 546 | if key == "ssh-check": 547 | return "ok version" 548 | raise Exception(key) 549 | 550 | monkeypatch.setattr( 551 | StrictRedis, 552 | "get", 553 | redis_keys, 554 | ) 555 | resp = index(mock_request) 556 | assert resp.status_code == 200, resp 557 | content = resp.content.decode("utf-8") 558 | assert content.find('

Wharf

') != -1, content 559 | 560 | 561 | @pytest.mark.django_db 562 | @patch("wharf.tasks.run_ssh_command.delay") 563 | @patch("wharf.tasks.get_public_key.delay") 564 | @patch("wharf.tasks.check_ssh.delay") 565 | def test_index_no_ssh_check( 566 | patched_check_ssh: MagicMock, 567 | patched_public_key: MagicMock, 568 | patched_delay: MagicMock, 569 | mock_request: HttpRequest, 570 | monkeypatch: pytest.MonkeyPatch, 571 | ): 572 | patched_delay.side_effect = mock_commands 573 | patched_check_ssh.return_value = MockCelery(False) 574 | patched_public_key.return_value = MockCelery("demo-key") 575 | 576 | def redis_keys(self, key): 577 | if key == "ssh-check": 578 | return None 579 | raise Exception(key) 580 | 581 | monkeypatch.setattr( 582 | StrictRedis, 583 | "get", 584 | redis_keys, 585 | ) 586 | resp = index(mock_request) 587 | assert resp.status_code == 200, resp 588 | content = resp.content.decode("utf-8") 589 | assert ( 590 | content.find('

Wharf: Initial setup

') != -1 591 | ), content 592 | assert content.find('\n demo-key\n ') != -1, ( 593 | content 594 | ) 595 | 596 | 597 | @pytest.mark.django_db 598 | @patch("wharf.tasks.run_ssh_command.delay") 599 | def test_setup_letsencrypt( 600 | patched_delay: MagicMock, mock_request: HttpRequest, monkeypatch: pytest.MonkeyPatch 601 | ): 602 | models.App.objects.create(name="test_app") 603 | cast(MagicMock, mock_request).POST = {"email": "foo@bar.com"} 604 | patched_delay.side_effect = mock_commands 605 | res = setup_letsencrypt(mock_request, "test_app") 606 | assert res.status_code == 302, res 607 | assert isinstance(res, HttpResponseRedirect) 608 | assert res.url.startswith("/apps/test_app/wait/"), res 609 | 610 | finished_log( 611 | monkeypatch, 612 | commands[("letsencrypt:set test_app email foo@bar.com",)] 613 | + commands[("letsencrypt:enable test_app",)], 614 | ) 615 | 616 | check_res = check_letsencrypt(mock_request, "test_app", "1234") 617 | assert check_res.status_code == 302, check_res 618 | assert isinstance(check_res, HttpResponseRedirect) 619 | assert check_res.url == "/apps/test_app", check_res 620 | 621 | 622 | @pytest.mark.django_db 623 | @patch("wharf.tasks.run_ssh_command.delay") 624 | def test_remove_letsencrypt( 625 | patched_delay: MagicMock, mock_request: HttpRequest, monkeypatch: pytest.MonkeyPatch 626 | ): 627 | models.App.objects.create(name="test_app") 628 | cast(MagicMock, mock_request).POST = {"email": "foo@bar.com"} 629 | patched_delay.side_effect = mock_commands 630 | res = remove_letsencrypt(mock_request, "test_app") 631 | assert res.status_code == 302, res 632 | assert isinstance(res, HttpResponseRedirect) 633 | assert res.url.startswith("/apps/test_app/wait/"), res 634 | 635 | finished_log(monkeypatch, commands[("letsencrypt:disable test_app --force",)]) 636 | 637 | check_res = check_remove_letsencrypt(mock_request, "test_app", "1234") 638 | assert check_res.status_code == 302, check_res 639 | assert isinstance(check_res, HttpResponseRedirect) 640 | assert check_res.url == "/apps/test_app", check_res 641 | 642 | 643 | @pytest.mark.django_db 644 | @patch("wharf.tasks.run_ssh_command.delay") 645 | def test_app_config_delete( 646 | patched_delay: MagicMock, mock_request: HttpRequest, monkeypatch: pytest.MonkeyPatch 647 | ): 648 | models.App.objects.create(name="test_app") 649 | cast(MagicMock, mock_request).POST = {"key": "FOO_KEY"} 650 | patched_delay.side_effect = mock_commands 651 | res = app_config_delete(mock_request, "test_app") 652 | assert res.status_code == 302, res 653 | assert isinstance(res, HttpResponseRedirect) 654 | assert res.url.startswith("/apps/test_app/wait/"), res 655 | 656 | finished_log(monkeypatch, commands[("config:unset test_app FOO_KEY",)]) 657 | 658 | check_res = check_app_config_delete(mock_request, "test_app", "1234") 659 | assert check_res.status_code == 302, check_res 660 | assert isinstance(check_res, HttpResponseRedirect) 661 | assert check_res.url == "/apps/test_app", check_res 662 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | GNU AFFERO GENERAL PUBLIC LICENSE 2 | Version 3, 19 November 2007 3 | 4 | Copyright (C) 2007 Free Software Foundation, Inc. 5 | Everyone is permitted to copy and distribute verbatim copies 6 | of this license document, but changing it is not allowed. 7 | 8 | Preamble 9 | 10 | The GNU Affero General Public License is a free, copyleft license for 11 | software and other kinds of works, specifically designed to ensure 12 | cooperation with the community in the case of network server software. 13 | 14 | The licenses for most software and other practical works are designed 15 | to take away your freedom to share and change the works. By contrast, 16 | our General Public Licenses are intended to guarantee your freedom to 17 | share and change all versions of a program--to make sure it remains free 18 | software for all its users. 19 | 20 | When we speak of free software, we are referring to freedom, not 21 | price. Our General Public Licenses are designed to make sure that you 22 | have the freedom to distribute copies of free software (and charge for 23 | them if you wish), that you receive source code or can get it if you 24 | want it, that you can change the software or use pieces of it in new 25 | free programs, and that you know you can do these things. 26 | 27 | Developers that use our General Public Licenses protect your rights 28 | with two steps: (1) assert copyright on the software, and (2) offer 29 | you this License which gives you legal permission to copy, distribute 30 | and/or modify the software. 31 | 32 | A secondary benefit of defending all users' freedom is that 33 | improvements made in alternate versions of the program, if they 34 | receive widespread use, become available for other developers to 35 | incorporate. Many developers of free software are heartened and 36 | encouraged by the resulting cooperation. However, in the case of 37 | software used on network servers, this result may fail to come about. 38 | The GNU General Public License permits making a modified version and 39 | letting the public access it on a server without ever releasing its 40 | source code to the public. 41 | 42 | The GNU Affero General Public License is designed specifically to 43 | ensure that, in such cases, the modified source code becomes available 44 | to the community. It requires the operator of a network server to 45 | provide the source code of the modified version running there to the 46 | users of that server. Therefore, public use of a modified version, on 47 | a publicly accessible server, gives the public access to the source 48 | code of the modified version. 49 | 50 | An older license, called the Affero General Public License and 51 | published by Affero, was designed to accomplish similar goals. This is 52 | a different license, not a version of the Affero GPL, but Affero has 53 | released a new version of the Affero GPL which permits relicensing under 54 | this license. 55 | 56 | The precise terms and conditions for copying, distribution and 57 | modification follow. 58 | 59 | TERMS AND CONDITIONS 60 | 61 | 0. Definitions. 62 | 63 | "This License" refers to version 3 of the GNU Affero General Public License. 64 | 65 | "Copyright" also means copyright-like laws that apply to other kinds of 66 | works, such as semiconductor masks. 67 | 68 | "The Program" refers to any copyrightable work licensed under this 69 | License. Each licensee is addressed as "you". "Licensees" and 70 | "recipients" may be individuals or organizations. 71 | 72 | To "modify" a work means to copy from or adapt all or part of the work 73 | in a fashion requiring copyright permission, other than the making of an 74 | exact copy. The resulting work is called a "modified version" of the 75 | earlier work or a work "based on" the earlier work. 76 | 77 | A "covered work" means either the unmodified Program or a work based 78 | on the Program. 79 | 80 | To "propagate" a work means to do anything with it that, without 81 | permission, would make you directly or secondarily liable for 82 | infringement under applicable copyright law, except executing it on a 83 | computer or modifying a private copy. Propagation includes copying, 84 | distribution (with or without modification), making available to the 85 | public, and in some countries other activities as well. 86 | 87 | To "convey" a work means any kind of propagation that enables other 88 | parties to make or receive copies. Mere interaction with a user through 89 | a computer network, with no transfer of a copy, is not conveying. 90 | 91 | An interactive user interface displays "Appropriate Legal Notices" 92 | to the extent that it includes a convenient and prominently visible 93 | feature that (1) displays an appropriate copyright notice, and (2) 94 | tells the user that there is no warranty for the work (except to the 95 | extent that warranties are provided), that licensees may convey the 96 | work under this License, and how to view a copy of this License. If 97 | the interface presents a list of user commands or options, such as a 98 | menu, a prominent item in the list meets this criterion. 99 | 100 | 1. Source Code. 101 | 102 | The "source code" for a work means the preferred form of the work 103 | for making modifications to it. "Object code" means any non-source 104 | form of a work. 105 | 106 | A "Standard Interface" means an interface that either is an official 107 | standard defined by a recognized standards body, or, in the case of 108 | interfaces specified for a particular programming language, one that 109 | is widely used among developers working in that language. 110 | 111 | The "System Libraries" of an executable work include anything, other 112 | than the work as a whole, that (a) is included in the normal form of 113 | packaging a Major Component, but which is not part of that Major 114 | Component, and (b) serves only to enable use of the work with that 115 | Major Component, or to implement a Standard Interface for which an 116 | implementation is available to the public in source code form. A 117 | "Major Component", in this context, means a major essential component 118 | (kernel, window system, and so on) of the specific operating system 119 | (if any) on which the executable work runs, or a compiler used to 120 | produce the work, or an object code interpreter used to run it. 121 | 122 | The "Corresponding Source" for a work in object code form means all 123 | the source code needed to generate, install, and (for an executable 124 | work) run the object code and to modify the work, including scripts to 125 | control those activities. However, it does not include the work's 126 | System Libraries, or general-purpose tools or generally available free 127 | programs which are used unmodified in performing those activities but 128 | which are not part of the work. For example, Corresponding Source 129 | includes interface definition files associated with source files for 130 | the work, and the source code for shared libraries and dynamically 131 | linked subprograms that the work is specifically designed to require, 132 | such as by intimate data communication or control flow between those 133 | subprograms and other parts of the work. 134 | 135 | The Corresponding Source need not include anything that users 136 | can regenerate automatically from other parts of the Corresponding 137 | Source. 138 | 139 | The Corresponding Source for a work in source code form is that 140 | same work. 141 | 142 | 2. Basic Permissions. 143 | 144 | All rights granted under this License are granted for the term of 145 | copyright on the Program, and are irrevocable provided the stated 146 | conditions are met. This License explicitly affirms your unlimited 147 | permission to run the unmodified Program. The output from running a 148 | covered work is covered by this License only if the output, given its 149 | content, constitutes a covered work. This License acknowledges your 150 | rights of fair use or other equivalent, as provided by copyright law. 151 | 152 | You may make, run and propagate covered works that you do not 153 | convey, without conditions so long as your license otherwise remains 154 | in force. You may convey covered works to others for the sole purpose 155 | of having them make modifications exclusively for you, or provide you 156 | with facilities for running those works, provided that you comply with 157 | the terms of this License in conveying all material for which you do 158 | not control copyright. Those thus making or running the covered works 159 | for you must do so exclusively on your behalf, under your direction 160 | and control, on terms that prohibit them from making any copies of 161 | your copyrighted material outside their relationship with you. 162 | 163 | Conveying under any other circumstances is permitted solely under 164 | the conditions stated below. Sublicensing is not allowed; section 10 165 | makes it unnecessary. 166 | 167 | 3. Protecting Users' Legal Rights From Anti-Circumvention Law. 168 | 169 | No covered work shall be deemed part of an effective technological 170 | measure under any applicable law fulfilling obligations under article 171 | 11 of the WIPO copyright treaty adopted on 20 December 1996, or 172 | similar laws prohibiting or restricting circumvention of such 173 | measures. 174 | 175 | When you convey a covered work, you waive any legal power to forbid 176 | circumvention of technological measures to the extent such circumvention 177 | is effected by exercising rights under this License with respect to 178 | the covered work, and you disclaim any intention to limit operation or 179 | modification of the work as a means of enforcing, against the work's 180 | users, your or third parties' legal rights to forbid circumvention of 181 | technological measures. 182 | 183 | 4. Conveying Verbatim Copies. 184 | 185 | You may convey verbatim copies of the Program's source code as you 186 | receive it, in any medium, provided that you conspicuously and 187 | appropriately publish on each copy an appropriate copyright notice; 188 | keep intact all notices stating that this License and any 189 | non-permissive terms added in accord with section 7 apply to the code; 190 | keep intact all notices of the absence of any warranty; and give all 191 | recipients a copy of this License along with the Program. 192 | 193 | You may charge any price or no price for each copy that you convey, 194 | and you may offer support or warranty protection for a fee. 195 | 196 | 5. Conveying Modified Source Versions. 197 | 198 | You may convey a work based on the Program, or the modifications to 199 | produce it from the Program, in the form of source code under the 200 | terms of section 4, provided that you also meet all of these conditions: 201 | 202 | a) The work must carry prominent notices stating that you modified 203 | it, and giving a relevant date. 204 | 205 | b) The work must carry prominent notices stating that it is 206 | released under this License and any conditions added under section 207 | 7. This requirement modifies the requirement in section 4 to 208 | "keep intact all notices". 209 | 210 | c) You must license the entire work, as a whole, under this 211 | License to anyone who comes into possession of a copy. This 212 | License will therefore apply, along with any applicable section 7 213 | additional terms, to the whole of the work, and all its parts, 214 | regardless of how they are packaged. This License gives no 215 | permission to license the work in any other way, but it does not 216 | invalidate such permission if you have separately received it. 217 | 218 | d) If the work has interactive user interfaces, each must display 219 | Appropriate Legal Notices; however, if the Program has interactive 220 | interfaces that do not display Appropriate Legal Notices, your 221 | work need not make them do so. 222 | 223 | A compilation of a covered work with other separate and independent 224 | works, which are not by their nature extensions of the covered work, 225 | and which are not combined with it such as to form a larger program, 226 | in or on a volume of a storage or distribution medium, is called an 227 | "aggregate" if the compilation and its resulting copyright are not 228 | used to limit the access or legal rights of the compilation's users 229 | beyond what the individual works permit. Inclusion of a covered work 230 | in an aggregate does not cause this License to apply to the other 231 | parts of the aggregate. 232 | 233 | 6. Conveying Non-Source Forms. 234 | 235 | You may convey a covered work in object code form under the terms 236 | of sections 4 and 5, provided that you also convey the 237 | machine-readable Corresponding Source under the terms of this License, 238 | in one of these ways: 239 | 240 | a) Convey the object code in, or embodied in, a physical product 241 | (including a physical distribution medium), accompanied by the 242 | Corresponding Source fixed on a durable physical medium 243 | customarily used for software interchange. 244 | 245 | b) Convey the object code in, or embodied in, a physical product 246 | (including a physical distribution medium), accompanied by a 247 | written offer, valid for at least three years and valid for as 248 | long as you offer spare parts or customer support for that product 249 | model, to give anyone who possesses the object code either (1) a 250 | copy of the Corresponding Source for all the software in the 251 | product that is covered by this License, on a durable physical 252 | medium customarily used for software interchange, for a price no 253 | more than your reasonable cost of physically performing this 254 | conveying of source, or (2) access to copy the 255 | Corresponding Source from a network server at no charge. 256 | 257 | c) Convey individual copies of the object code with a copy of the 258 | written offer to provide the Corresponding Source. This 259 | alternative is allowed only occasionally and noncommercially, and 260 | only if you received the object code with such an offer, in accord 261 | with subsection 6b. 262 | 263 | d) Convey the object code by offering access from a designated 264 | place (gratis or for a charge), and offer equivalent access to the 265 | Corresponding Source in the same way through the same place at no 266 | further charge. You need not require recipients to copy the 267 | Corresponding Source along with the object code. If the place to 268 | copy the object code is a network server, the Corresponding Source 269 | may be on a different server (operated by you or a third party) 270 | that supports equivalent copying facilities, provided you maintain 271 | clear directions next to the object code saying where to find the 272 | Corresponding Source. Regardless of what server hosts the 273 | Corresponding Source, you remain obligated to ensure that it is 274 | available for as long as needed to satisfy these requirements. 275 | 276 | e) Convey the object code using peer-to-peer transmission, provided 277 | you inform other peers where the object code and Corresponding 278 | Source of the work are being offered to the general public at no 279 | charge under subsection 6d. 280 | 281 | A separable portion of the object code, whose source code is excluded 282 | from the Corresponding Source as a System Library, need not be 283 | included in conveying the object code work. 284 | 285 | A "User Product" is either (1) a "consumer product", which means any 286 | tangible personal property which is normally used for personal, family, 287 | or household purposes, or (2) anything designed or sold for incorporation 288 | into a dwelling. In determining whether a product is a consumer product, 289 | doubtful cases shall be resolved in favor of coverage. For a particular 290 | product received by a particular user, "normally used" refers to a 291 | typical or common use of that class of product, regardless of the status 292 | of the particular user or of the way in which the particular user 293 | actually uses, or expects or is expected to use, the product. A product 294 | is a consumer product regardless of whether the product has substantial 295 | commercial, industrial or non-consumer uses, unless such uses represent 296 | the only significant mode of use of the product. 297 | 298 | "Installation Information" for a User Product means any methods, 299 | procedures, authorization keys, or other information required to install 300 | and execute modified versions of a covered work in that User Product from 301 | a modified version of its Corresponding Source. The information must 302 | suffice to ensure that the continued functioning of the modified object 303 | code is in no case prevented or interfered with solely because 304 | modification has been made. 305 | 306 | If you convey an object code work under this section in, or with, or 307 | specifically for use in, a User Product, and the conveying occurs as 308 | part of a transaction in which the right of possession and use of the 309 | User Product is transferred to the recipient in perpetuity or for a 310 | fixed term (regardless of how the transaction is characterized), the 311 | Corresponding Source conveyed under this section must be accompanied 312 | by the Installation Information. But this requirement does not apply 313 | if neither you nor any third party retains the ability to install 314 | modified object code on the User Product (for example, the work has 315 | been installed in ROM). 316 | 317 | The requirement to provide Installation Information does not include a 318 | requirement to continue to provide support service, warranty, or updates 319 | for a work that has been modified or installed by the recipient, or for 320 | the User Product in which it has been modified or installed. Access to a 321 | network may be denied when the modification itself materially and 322 | adversely affects the operation of the network or violates the rules and 323 | protocols for communication across the network. 324 | 325 | Corresponding Source conveyed, and Installation Information provided, 326 | in accord with this section must be in a format that is publicly 327 | documented (and with an implementation available to the public in 328 | source code form), and must require no special password or key for 329 | unpacking, reading or copying. 330 | 331 | 7. Additional Terms. 332 | 333 | "Additional permissions" are terms that supplement the terms of this 334 | License by making exceptions from one or more of its conditions. 335 | Additional permissions that are applicable to the entire Program shall 336 | be treated as though they were included in this License, to the extent 337 | that they are valid under applicable law. If additional permissions 338 | apply only to part of the Program, that part may be used separately 339 | under those permissions, but the entire Program remains governed by 340 | this License without regard to the additional permissions. 341 | 342 | When you convey a copy of a covered work, you may at your option 343 | remove any additional permissions from that copy, or from any part of 344 | it. (Additional permissions may be written to require their own 345 | removal in certain cases when you modify the work.) You may place 346 | additional permissions on material, added by you to a covered work, 347 | for which you have or can give appropriate copyright permission. 348 | 349 | Notwithstanding any other provision of this License, for material you 350 | add to a covered work, you may (if authorized by the copyright holders of 351 | that material) supplement the terms of this License with terms: 352 | 353 | a) Disclaiming warranty or limiting liability differently from the 354 | terms of sections 15 and 16 of this License; or 355 | 356 | b) Requiring preservation of specified reasonable legal notices or 357 | author attributions in that material or in the Appropriate Legal 358 | Notices displayed by works containing it; or 359 | 360 | c) Prohibiting misrepresentation of the origin of that material, or 361 | requiring that modified versions of such material be marked in 362 | reasonable ways as different from the original version; or 363 | 364 | d) Limiting the use for publicity purposes of names of licensors or 365 | authors of the material; or 366 | 367 | e) Declining to grant rights under trademark law for use of some 368 | trade names, trademarks, or service marks; or 369 | 370 | f) Requiring indemnification of licensors and authors of that 371 | material by anyone who conveys the material (or modified versions of 372 | it) with contractual assumptions of liability to the recipient, for 373 | any liability that these contractual assumptions directly impose on 374 | those licensors and authors. 375 | 376 | All other non-permissive additional terms are considered "further 377 | restrictions" within the meaning of section 10. If the Program as you 378 | received it, or any part of it, contains a notice stating that it is 379 | governed by this License along with a term that is a further 380 | restriction, you may remove that term. If a license document contains 381 | a further restriction but permits relicensing or conveying under this 382 | License, you may add to a covered work material governed by the terms 383 | of that license document, provided that the further restriction does 384 | not survive such relicensing or conveying. 385 | 386 | If you add terms to a covered work in accord with this section, you 387 | must place, in the relevant source files, a statement of the 388 | additional terms that apply to those files, or a notice indicating 389 | where to find the applicable terms. 390 | 391 | Additional terms, permissive or non-permissive, may be stated in the 392 | form of a separately written license, or stated as exceptions; 393 | the above requirements apply either way. 394 | 395 | 8. Termination. 396 | 397 | You may not propagate or modify a covered work except as expressly 398 | provided under this License. Any attempt otherwise to propagate or 399 | modify it is void, and will automatically terminate your rights under 400 | this License (including any patent licenses granted under the third 401 | paragraph of section 11). 402 | 403 | However, if you cease all violation of this License, then your 404 | license from a particular copyright holder is reinstated (a) 405 | provisionally, unless and until the copyright holder explicitly and 406 | finally terminates your license, and (b) permanently, if the copyright 407 | holder fails to notify you of the violation by some reasonable means 408 | prior to 60 days after the cessation. 409 | 410 | Moreover, your license from a particular copyright holder is 411 | reinstated permanently if the copyright holder notifies you of the 412 | violation by some reasonable means, this is the first time you have 413 | received notice of violation of this License (for any work) from that 414 | copyright holder, and you cure the violation prior to 30 days after 415 | your receipt of the notice. 416 | 417 | Termination of your rights under this section does not terminate the 418 | licenses of parties who have received copies or rights from you under 419 | this License. If your rights have been terminated and not permanently 420 | reinstated, you do not qualify to receive new licenses for the same 421 | material under section 10. 422 | 423 | 9. Acceptance Not Required for Having Copies. 424 | 425 | You are not required to accept this License in order to receive or 426 | run a copy of the Program. Ancillary propagation of a covered work 427 | occurring solely as a consequence of using peer-to-peer transmission 428 | to receive a copy likewise does not require acceptance. However, 429 | nothing other than this License grants you permission to propagate or 430 | modify any covered work. These actions infringe copyright if you do 431 | not accept this License. Therefore, by modifying or propagating a 432 | covered work, you indicate your acceptance of this License to do so. 433 | 434 | 10. Automatic Licensing of Downstream Recipients. 435 | 436 | Each time you convey a covered work, the recipient automatically 437 | receives a license from the original licensors, to run, modify and 438 | propagate that work, subject to this License. You are not responsible 439 | for enforcing compliance by third parties with this License. 440 | 441 | An "entity transaction" is a transaction transferring control of an 442 | organization, or substantially all assets of one, or subdividing an 443 | organization, or merging organizations. If propagation of a covered 444 | work results from an entity transaction, each party to that 445 | transaction who receives a copy of the work also receives whatever 446 | licenses to the work the party's predecessor in interest had or could 447 | give under the previous paragraph, plus a right to possession of the 448 | Corresponding Source of the work from the predecessor in interest, if 449 | the predecessor has it or can get it with reasonable efforts. 450 | 451 | You may not impose any further restrictions on the exercise of the 452 | rights granted or affirmed under this License. For example, you may 453 | not impose a license fee, royalty, or other charge for exercise of 454 | rights granted under this License, and you may not initiate litigation 455 | (including a cross-claim or counterclaim in a lawsuit) alleging that 456 | any patent claim is infringed by making, using, selling, offering for 457 | sale, or importing the Program or any portion of it. 458 | 459 | 11. Patents. 460 | 461 | A "contributor" is a copyright holder who authorizes use under this 462 | License of the Program or a work on which the Program is based. The 463 | work thus licensed is called the contributor's "contributor version". 464 | 465 | A contributor's "essential patent claims" are all patent claims 466 | owned or controlled by the contributor, whether already acquired or 467 | hereafter acquired, that would be infringed by some manner, permitted 468 | by this License, of making, using, or selling its contributor version, 469 | but do not include claims that would be infringed only as a 470 | consequence of further modification of the contributor version. For 471 | purposes of this definition, "control" includes the right to grant 472 | patent sublicenses in a manner consistent with the requirements of 473 | this License. 474 | 475 | Each contributor grants you a non-exclusive, worldwide, royalty-free 476 | patent license under the contributor's essential patent claims, to 477 | make, use, sell, offer for sale, import and otherwise run, modify and 478 | propagate the contents of its contributor version. 479 | 480 | In the following three paragraphs, a "patent license" is any express 481 | agreement or commitment, however denominated, not to enforce a patent 482 | (such as an express permission to practice a patent or covenant not to 483 | sue for patent infringement). To "grant" such a patent license to a 484 | party means to make such an agreement or commitment not to enforce a 485 | patent against the party. 486 | 487 | If you convey a covered work, knowingly relying on a patent license, 488 | and the Corresponding Source of the work is not available for anyone 489 | to copy, free of charge and under the terms of this License, through a 490 | publicly available network server or other readily accessible means, 491 | then you must either (1) cause the Corresponding Source to be so 492 | available, or (2) arrange to deprive yourself of the benefit of the 493 | patent license for this particular work, or (3) arrange, in a manner 494 | consistent with the requirements of this License, to extend the patent 495 | license to downstream recipients. "Knowingly relying" means you have 496 | actual knowledge that, but for the patent license, your conveying the 497 | covered work in a country, or your recipient's use of the covered work 498 | in a country, would infringe one or more identifiable patents in that 499 | country that you have reason to believe are valid. 500 | 501 | If, pursuant to or in connection with a single transaction or 502 | arrangement, you convey, or propagate by procuring conveyance of, a 503 | covered work, and grant a patent license to some of the parties 504 | receiving the covered work authorizing them to use, propagate, modify 505 | or convey a specific copy of the covered work, then the patent license 506 | you grant is automatically extended to all recipients of the covered 507 | work and works based on it. 508 | 509 | A patent license is "discriminatory" if it does not include within 510 | the scope of its coverage, prohibits the exercise of, or is 511 | conditioned on the non-exercise of one or more of the rights that are 512 | specifically granted under this License. You may not convey a covered 513 | work if you are a party to an arrangement with a third party that is 514 | in the business of distributing software, under which you make payment 515 | to the third party based on the extent of your activity of conveying 516 | the work, and under which the third party grants, to any of the 517 | parties who would receive the covered work from you, a discriminatory 518 | patent license (a) in connection with copies of the covered work 519 | conveyed by you (or copies made from those copies), or (b) primarily 520 | for and in connection with specific products or compilations that 521 | contain the covered work, unless you entered into that arrangement, 522 | or that patent license was granted, prior to 28 March 2007. 523 | 524 | Nothing in this License shall be construed as excluding or limiting 525 | any implied license or other defenses to infringement that may 526 | otherwise be available to you under applicable patent law. 527 | 528 | 12. No Surrender of Others' Freedom. 529 | 530 | If conditions are imposed on you (whether by court order, agreement or 531 | otherwise) that contradict the conditions of this License, they do not 532 | excuse you from the conditions of this License. If you cannot convey a 533 | covered work so as to satisfy simultaneously your obligations under this 534 | License and any other pertinent obligations, then as a consequence you may 535 | not convey it at all. For example, if you agree to terms that obligate you 536 | to collect a royalty for further conveying from those to whom you convey 537 | the Program, the only way you could satisfy both those terms and this 538 | License would be to refrain entirely from conveying the Program. 539 | 540 | 13. Remote Network Interaction; Use with the GNU General Public License. 541 | 542 | Notwithstanding any other provision of this License, if you modify the 543 | Program, your modified version must prominently offer all users 544 | interacting with it remotely through a computer network (if your version 545 | supports such interaction) an opportunity to receive the Corresponding 546 | Source of your version by providing access to the Corresponding Source 547 | from a network server at no charge, through some standard or customary 548 | means of facilitating copying of software. This Corresponding Source 549 | shall include the Corresponding Source for any work covered by version 3 550 | of the GNU General Public License that is incorporated pursuant to the 551 | following paragraph. 552 | 553 | Notwithstanding any other provision of this License, you have 554 | permission to link or combine any covered work with a work licensed 555 | under version 3 of the GNU General Public License into a single 556 | combined work, and to convey the resulting work. The terms of this 557 | License will continue to apply to the part which is the covered work, 558 | but the work with which it is combined will remain governed by version 559 | 3 of the GNU General Public License. 560 | 561 | 14. Revised Versions of this License. 562 | 563 | The Free Software Foundation may publish revised and/or new versions of 564 | the GNU Affero General Public License from time to time. Such new versions 565 | will be similar in spirit to the present version, but may differ in detail to 566 | address new problems or concerns. 567 | 568 | Each version is given a distinguishing version number. If the 569 | Program specifies that a certain numbered version of the GNU Affero General 570 | Public License "or any later version" applies to it, you have the 571 | option of following the terms and conditions either of that numbered 572 | version or of any later version published by the Free Software 573 | Foundation. If the Program does not specify a version number of the 574 | GNU Affero General Public License, you may choose any version ever published 575 | by the Free Software Foundation. 576 | 577 | If the Program specifies that a proxy can decide which future 578 | versions of the GNU Affero General Public License can be used, that proxy's 579 | public statement of acceptance of a version permanently authorizes you 580 | to choose that version for the Program. 581 | 582 | Later license versions may give you additional or different 583 | permissions. However, no additional obligations are imposed on any 584 | author or copyright holder as a result of your choosing to follow a 585 | later version. 586 | 587 | 15. Disclaimer of Warranty. 588 | 589 | THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY 590 | APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT 591 | HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY 592 | OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, 593 | THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR 594 | PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM 595 | IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF 596 | ALL NECESSARY SERVICING, REPAIR OR CORRECTION. 597 | 598 | 16. Limitation of Liability. 599 | 600 | IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING 601 | WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS 602 | THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY 603 | GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE 604 | USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF 605 | DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD 606 | PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), 607 | EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF 608 | SUCH DAMAGES. 609 | 610 | 17. Interpretation of Sections 15 and 16. 611 | 612 | If the disclaimer of warranty and limitation of liability provided 613 | above cannot be given local legal effect according to their terms, 614 | reviewing courts shall apply local law that most closely approximates 615 | an absolute waiver of all civil liability in connection with the 616 | Program, unless a warranty or assumption of liability accompanies a 617 | copy of the Program in return for a fee. 618 | 619 | END OF TERMS AND CONDITIONS 620 | 621 | How to Apply These Terms to Your New Programs 622 | 623 | If you develop a new program, and you want it to be of the greatest 624 | possible use to the public, the best way to achieve this is to make it 625 | free software which everyone can redistribute and change under these terms. 626 | 627 | To do so, attach the following notices to the program. It is safest 628 | to attach them to the start of each source file to most effectively 629 | state the exclusion of warranty; and each file should have at least 630 | the "copyright" line and a pointer to where the full notice is found. 631 | 632 | 633 | Copyright (C) 634 | 635 | This program is free software: you can redistribute it and/or modify 636 | it under the terms of the GNU Affero General Public License as published by 637 | the Free Software Foundation, either version 3 of the License, or 638 | (at your option) any later version. 639 | 640 | This program is distributed in the hope that it will be useful, 641 | but WITHOUT ANY WARRANTY; without even the implied warranty of 642 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 643 | GNU Affero General Public License for more details. 644 | 645 | You should have received a copy of the GNU Affero General Public License 646 | along with this program. If not, see . 647 | 648 | Also add information on how to contact you by electronic and paper mail. 649 | 650 | If your software can interact with users remotely through a computer 651 | network, you should also make sure that it provides a way for users to 652 | get its source. For example, if your program is a web application, its 653 | interface could display a "Source" link that leads users to an archive 654 | of the code. There are many ways you could offer source, and different 655 | solutions will be better for different programs; see section 13 for the 656 | specific requirements. 657 | 658 | You should also get your employer (if you work as a programmer) or school, 659 | if any, to sign a "copyright disclaimer" for the program, if necessary. 660 | For more information on this, and how to apply and follow the GNU AGPL, see 661 | . 662 | --------------------------------------------------------------------------------