├── .gitignore ├── LICENSE ├── README.md ├── __init__.py ├── admin_routes.py ├── assets ├── create.html ├── create.js ├── update.html ├── update.js ├── view.html └── view.js ├── config.json ├── container_manager.py ├── helpers.py ├── image-readme ├── 1.png ├── 2.png ├── 3.png ├── 4.png ├── demo.gif ├── http.png ├── main.png ├── manage.png └── tcp.png ├── models.py ├── requirements.txt ├── settings.json ├── templates ├── config │ └── container_status.html ├── container_base.html ├── container_cheat.html ├── container_dashboard.html └── container_settings.html └── user_routes.py /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | share/python-wheels/ 24 | *.egg-info/ 25 | .installed.cfg 26 | *.egg 27 | MANIFEST 28 | 29 | # PyInstaller 30 | # Usually these files are written by a python script from a template 31 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 32 | *.manifest 33 | *.spec 34 | 35 | # Installer logs 36 | pip-log.txt 37 | pip-delete-this-directory.txt 38 | 39 | # Unit test / coverage reports 40 | htmlcov/ 41 | .tox/ 42 | .nox/ 43 | .coverage 44 | .coverage.* 45 | .cache 46 | nosetests.xml 47 | coverage.xml 48 | *.cover 49 | *.py,cover 50 | .hypothesis/ 51 | .pytest_cache/ 52 | cover/ 53 | 54 | # Translations 55 | *.mo 56 | *.pot 57 | 58 | # Django stuff: 59 | *.log 60 | local_settings.py 61 | db.sqlite3 62 | db.sqlite3-journal 63 | 64 | # Flask stuff: 65 | instance/ 66 | .webassets-cache 67 | 68 | # Scrapy stuff: 69 | .scrapy 70 | 71 | # Sphinx documentation 72 | docs/_build/ 73 | 74 | # PyBuilder 75 | .pybuilder/ 76 | target/ 77 | 78 | # Jupyter Notebook 79 | .ipynb_checkpoints 80 | 81 | # IPython 82 | profile_default/ 83 | ipython_config.py 84 | 85 | # pyenv 86 | # For a library or package, you might want to ignore these files since the code is 87 | # intended to run in multiple environments; otherwise, check them in: 88 | # .python-version 89 | 90 | # pipenv 91 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 92 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 93 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 94 | # install all needed dependencies. 95 | #Pipfile.lock 96 | 97 | # poetry 98 | # Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. 99 | # This is especially recommended for binary packages to ensure reproducibility, and is more 100 | # commonly ignored for libraries. 101 | # https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control 102 | #poetry.lock 103 | 104 | # pdm 105 | # Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. 106 | #pdm.lock 107 | # pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it 108 | # in version control. 109 | # https://pdm.fming.dev/#use-with-ide 110 | .pdm.toml 111 | 112 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm 113 | __pypackages__/ 114 | 115 | # Celery stuff 116 | celerybeat-schedule 117 | celerybeat.pid 118 | 119 | # SageMath parsed files 120 | *.sage.py 121 | 122 | # Environments 123 | .env 124 | .venv 125 | env/ 126 | venv/ 127 | ENV/ 128 | env.bak/ 129 | venv.bak/ 130 | 131 | # Spyder project settings 132 | .spyderproject 133 | .spyproject 134 | 135 | # Rope project settings 136 | .ropeproject 137 | 138 | # mkdocs documentation 139 | /site 140 | 141 | # mypy 142 | .mypy_cache/ 143 | .dmypy.json 144 | dmypy.json 145 | 146 | # Pyre type checker 147 | .pyre/ 148 | 149 | # pytype static type analyzer 150 | .pytype/ 151 | 152 | # Cython debug symbols 153 | cython_debug/ 154 | 155 | # PyCharm 156 | # JetBrains specific template is maintained in a separate JetBrains.gitignore that can 157 | # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore 158 | # and can be added to the global gitignore or merged into this file. For a more nuclear 159 | # option (not recommended) you can uncomment the following to ignore the entire idea folder. 160 | #.idea/ -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2024 Phan Nhat 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # CTFd Docker Containers Plugin 2 | 3 |
4 |

CTFd Docker Containers Plugin

5 |

6 | A plugin to create containerized challenges for your CTF contest. 7 |

8 |
9 | 10 | ## Table of Contents 11 | 1. [Getting Started](#getting-started) 12 | - [Prerequisites](#prerequisites) 13 | - [Installation](#installation) 14 | 2. [Usage](#usage) 15 | - [Using Local Docker Daemon](#using-local-docker-daemon) 16 | - [Using Remote Docker via SSH](#using-remote-docker-via-ssh) 17 | 3. [Demo](#demo) 18 | 4. [Roadmap](#roadmap) 19 | 5. [License](#license) 20 | 6. [Contact](#contact) 21 | 22 | --- 23 | 24 | ## Getting Started 25 | 26 | This section provides instructions for setting up the project locally. 27 | 28 | ### Prerequisites 29 | 30 | To use this plugin, you should have: 31 | - Experience hosting CTFd with Docker 32 | - Basic knowledge of Docker 33 | - SSH access to remote servers (if using remote Docker) 34 | 35 | ### Installation 36 | 37 | 1. **Clone this repository:** 38 | ```bash 39 | git clone https://github.com/phannhat17/CTFd-Docker-Plugin.git 40 | ``` 41 | 2. **Rename the folder:** 42 | ```bash 43 | mv CTFd-Docker-Plugin containers 44 | ``` 45 | 3. **Move the folder to the CTFd plugins directory:** 46 | ```bash 47 | mv containers /path/to/CTFd/plugins/ 48 | ``` 49 | 50 | [Back to top](#ctfd-docker-containers-plugin) 51 | 52 | --- 53 | 54 | ## Usage 55 | 56 | ### Using Local Docker Daemon 57 | 58 | #### Case A: **CTFd Running Directly on Host:** 59 | - Go to the plugin settings page: `/containers/settings` 60 | - Fill in all fields except the `Base URL`. 61 | 62 | ![Settings Example](./image-readme/1.png) 63 | 64 | #### Case B: **CTFd Running via Docker:** 65 | - Map the Docker socket into the CTFd container by modify the `docker-compose.yml` file: 66 | ```bash 67 | services: 68 | ctfd: 69 | ... 70 | volumes: 71 | - /var/run/docker.sock:/var/run/docker.sock 72 | ... 73 | ``` 74 | - Restart CTFd 75 | - Go to the plugin settings page: `/containers/settings` 76 | - Fill in all fields except the `Base URL`. 77 | 78 | ### Using Remote Docker via SSH 79 | 80 | For remote Docker, the CTFd host must have SSH access to the remote server. 81 | 82 | #### Prerequisites: 83 | - **SSH access** from the CTFd host to the Docker server 84 | - The remote server's fingerprint should be in the `known_hosts` file 85 | - SSH key files (`id_rsa`) and an SSH config file should be available 86 | 87 | #### Case A: **CTFd Running via Docker** 88 | 89 | 1. **Prepare SSH Config:** 90 | ```bash 91 | mkdir ssh_config 92 | cp ~/.ssh/id_rsa ~/.ssh/known_hosts ~/.ssh/config ssh_config/ 93 | ``` 94 | 95 | 2. **Mount SSH Config into the CTFd container:** 96 | ```yaml 97 | services: 98 | ctfd: 99 | ... 100 | volumes: 101 | - ./ssh_config:/root/.ssh:ro 102 | ... 103 | ``` 104 | 105 | 3. **Restart CTFd:** 106 | ```bash 107 | docker-compose down 108 | docker-compose up -d 109 | ``` 110 | 111 | #### Case B: **CTFd Running Directly on Host** 112 | 113 | 1. **Ensure SSH Access:** 114 | - Test the connection: 115 | ```bash 116 | ssh user@remote-server 117 | ``` 118 | 119 | 2. **Configure Docker Base URL:** 120 | - In the CTFd plugin settings page (`/containers/settings`), set: 121 | ``` 122 | Base URL: ssh://user@remote-server 123 | ``` 124 | 125 | 3. **Restart CTFd:** 126 | ```bash 127 | sudo systemctl restart ctfd 128 | ``` 129 | 130 | [Back to top](#ctfd-docker-containers-plugin) 131 | 132 | --- 133 | 134 | ## Demo 135 | 136 | ### Admin Dashboard 137 | - Manage running containers 138 | - Filter by challenge or player 139 | 140 | ![Manage Containers](./image-readme/manage.png) 141 | 142 | ### Challenge View 143 | 144 | **Web Access** | **TCP Access** 145 | :-------------:|:-------------: 146 | ![Web](./image-readme/http.png) | ![TCP](./image-readme/tcp.png) 147 | 148 | ### Live Demo 149 | 150 | ![Live Demo](./image-readme/demo.gif) 151 | 152 | [Back to top](#ctfd-docker-containers-plugin) 153 | 154 | --- 155 | 156 | ## Roadmap 157 | 158 | - [x] Support for user mode 159 | - [x] Admin dashboard with team/user filtering 160 | - [x] Compatibility with the core-beta theme 161 | - [x] Monitor share flag 162 | - [ ] Monitor detail on share flag 163 | - [ ] Prevent container creation on solved challenge 164 | 165 | For more features and known issues, check the [open issues](https://github.com/phannhat17/CTFd-Docker-Plugin/issues). 166 | 167 | [Back to top](#ctfd-docker-containers-plugin) 168 | 169 | --- 170 | 171 | ## License 172 | 173 | Distributed under the MIT License. See `LICENSE.txt` for details. 174 | 175 | > This plugin is an upgrade of [andyjsmith's plugin](https://github.com/andyjsmith/CTFd-Docker-Plugin) with additional features. 176 | 177 | If there are licensing concerns, please reach out via email (contact below). 178 | 179 | [Back to top](#ctfd-docker-containers-plugin) 180 | 181 | --- 182 | 183 | ## Contact 184 | 185 | **Phan Nhat** 186 | - **Discord:** ftpotato 187 | - **Email:** contact@phannhat.id.vn 188 | - **Project Link:** [CTFd Docker Plugin](https://github.com/phannhat17/CTFd-Docker-Plugin) 189 | 190 | [Back to top](#ctfd-docker-containers-plugin) 191 | 192 | -------------------------------------------------------------------------------- /__init__.py: -------------------------------------------------------------------------------- 1 | from __future__ import division 2 | 3 | import time 4 | import json 5 | import datetime 6 | import math 7 | 8 | from flask import Blueprint, request, Flask, render_template, url_for, redirect, flash 9 | 10 | from CTFd.models import db, Solves, Teams, Users 11 | from CTFd.plugins import register_plugin_assets_directory 12 | from CTFd.plugins.challenges import CHALLENGE_CLASSES, BaseChallenge 13 | from CTFd.utils.modes import get_model 14 | from .models import ContainerChallengeModel, ContainerInfoModel, ContainerSettingsModel, ContainerFlagModel, ContainerCheatLog 15 | from .container_manager import ContainerManager, ContainerException 16 | from .admin_routes import admin_bp, set_container_manager as set_admin_manager 17 | from .user_routes import containers_bp, set_container_manager as set_user_manager 18 | from .helpers import * 19 | from CTFd.utils.user import get_current_user 20 | 21 | settings = json.load(open(get_settings_path())) 22 | 23 | class ContainerChallenge(BaseChallenge): 24 | id = settings["plugin-info"]["id"] 25 | name = settings["plugin-info"]["name"] 26 | templates = settings["plugin-info"]["templates"] 27 | scripts = settings["plugin-info"]["scripts"] 28 | route = settings["plugin-info"]["base_path"] 29 | 30 | challenge_model = ContainerChallengeModel 31 | 32 | @classmethod 33 | def read(cls, challenge): 34 | """ 35 | This method is in used to access the data of a challenge in a format processable by the front end. 36 | 37 | :param challenge: 38 | :return: Challenge object, data dictionary to be returned to the user 39 | """ 40 | data = { 41 | "id": challenge.id, 42 | "name": challenge.name, 43 | "value": challenge.value, 44 | "image": challenge.image, 45 | "port": challenge.port, 46 | "command": challenge.command, 47 | "connection_type": challenge.connection_type, 48 | "initial": challenge.initial, 49 | "decay": challenge.decay, 50 | "minimum": challenge.minimum, 51 | "description": challenge.description, 52 | "connection_info": challenge.connection_info, 53 | "category": challenge.category, 54 | "state": challenge.state, 55 | "max_attempts": challenge.max_attempts, 56 | "type": challenge.type, 57 | "type_data": { 58 | "id": cls.id, 59 | "name": cls.name, 60 | "templates": cls.templates, 61 | "scripts": cls.scripts, 62 | }, 63 | } 64 | return data 65 | 66 | @classmethod 67 | def calculate_value(cls, challenge): 68 | Model = get_model() 69 | 70 | solve_count = ( 71 | Solves.query.join(Model, Solves.account_id == Model.id) 72 | .filter( 73 | Solves.challenge_id == challenge.id, 74 | Model.hidden == False, 75 | Model.banned == False, 76 | ) 77 | .count() 78 | ) 79 | 80 | # If the solve count is 0 we shouldn't manipulate the solve count to 81 | # let the math update back to normal 82 | if solve_count != 0: 83 | # We subtract -1 to allow the first solver to get max point value 84 | solve_count -= 1 85 | 86 | # It is important that this calculation takes into account floats. 87 | # Hence this file uses from __future__ import division 88 | value = ( 89 | ((challenge.minimum - challenge.initial) / (challenge.decay**2)) 90 | * (solve_count**2) 91 | ) + challenge.initial 92 | 93 | value = math.ceil(value) 94 | 95 | if value < challenge.minimum: 96 | value = challenge.minimum 97 | 98 | challenge.value = value 99 | db.session.commit() 100 | return challenge 101 | 102 | @classmethod 103 | def update(cls, challenge, request): 104 | """ 105 | This method is used to update the information associated with a challenge. This should be kept strictly to the 106 | Challenges table and any child tables. 107 | :param challenge: 108 | :param request: 109 | :return: 110 | """ 111 | data = request.form or request.get_json() 112 | 113 | for attr, value in data.items(): 114 | # We need to set these to floats so that the next operations don't operate on strings 115 | if attr in ("initial", "minimum", "decay"): 116 | value = float(value) 117 | setattr(challenge, attr, value) 118 | 119 | return ContainerChallenge.calculate_value(challenge) 120 | 121 | @classmethod 122 | def solve(cls, user, team, challenge, request): 123 | super().solve(user, team, challenge, request) 124 | 125 | cls.calculate_value(challenge) 126 | 127 | @classmethod 128 | def attempt(cls, challenge, request): 129 | # 1) Gather user/team & submitted_flag 130 | try: 131 | user, x_id, submitted_flag = get_xid_and_flag() 132 | except ValueError as e: 133 | return False, str(e) 134 | 135 | # 2) Get running container 136 | container_info = None 137 | try: 138 | container_info = get_active_container(challenge.id, x_id) 139 | except ValueError as e: 140 | return False, str(e) 141 | 142 | # 3) Check if container is actually running 143 | from . import container_manager 144 | if not container_manager or not container_manager.is_container_running(container_info.container_id): 145 | return False, "Your container is not running; you cannot submit yet." 146 | 147 | # Validate the flag belongs to the user/team 148 | try: 149 | container_flag = get_container_flag(submitted_flag, user, container_manager, container_info, challenge) 150 | except ValueError as e: 151 | return False, str(e) # Return incorrect flag message if not cheating 152 | 153 | # 6) Mark used & kill container => success 154 | container_flag.used = True 155 | db.session.commit() 156 | 157 | # **If the challenge is static, delete both flag and container records** 158 | if challenge.flag_mode == "static": 159 | db.session.delete(container_flag) 160 | db.session.commit() 161 | 162 | # **If the challenge is random, keep the flag but delete only the container info** 163 | if challenge.flag_mode == "random": 164 | db.session.query(ContainerFlagModel).filter_by(container_id=container_info.container_id).update({"container_id": None}) 165 | db.session.commit() 166 | 167 | # Remove container info record 168 | container = ContainerInfoModel.query.filter_by(container_id=container_info.container_id).first() 169 | if container: 170 | db.session.delete(container) 171 | db.session.commit() 172 | 173 | # Kill the container 174 | container_manager.kill_container(container_info.container_id) 175 | 176 | return True, "Correct" 177 | 178 | container_manager = None # Global 179 | 180 | def load(app: Flask): 181 | # Ensure database is initialized 182 | app.db.create_all() 183 | 184 | # Register the challenge type 185 | CHALLENGE_CLASSES["container"] = ContainerChallenge 186 | 187 | register_plugin_assets_directory( 188 | app, base_path=settings["plugin-info"]["base_path"] 189 | ) 190 | 191 | global container_manager 192 | container_settings = settings_to_dict(ContainerSettingsModel.query.all()) 193 | container_manager = ContainerManager(container_settings, app) 194 | 195 | base_bp = Blueprint( 196 | "containers", 197 | __name__, 198 | template_folder=settings["blueprint"]["template_folder"], 199 | static_folder=settings["blueprint"]["static_folder"] 200 | ) 201 | 202 | set_admin_manager(container_manager) 203 | set_user_manager(container_manager) 204 | 205 | # Register the blueprints 206 | app.register_blueprint(admin_bp) # Admin APIs 207 | app.register_blueprint(containers_bp) # User APIs 208 | 209 | 210 | app.register_blueprint(base_bp) 211 | -------------------------------------------------------------------------------- /admin_routes.py: -------------------------------------------------------------------------------- 1 | import json 2 | from flask import Blueprint, request, jsonify, render_template, url_for, redirect, Flask, flash 3 | from CTFd.models import db 4 | from .models import ContainerChallengeModel, ContainerInfoModel, ContainerSettingsModel, ContainerCheatLog 5 | from .container_manager import ContainerManager, ContainerException 6 | from CTFd.utils.decorators import admins_only 7 | from .helpers import * 8 | 9 | admin_bp = Blueprint("container_admin", __name__, url_prefix="/containers/admin") 10 | 11 | container_manager = None 12 | 13 | def set_container_manager(manager): 14 | global container_manager 15 | container_manager = manager 16 | 17 | # Admin dashboard 18 | @admin_bp.route("/dashboard", methods=["GET"]) 19 | @admins_only 20 | def route_containers_dashboard(): 21 | connected = False 22 | try: 23 | connected = container_manager.is_connected() 24 | except ContainerException: 25 | pass 26 | 27 | running_containers = ContainerInfoModel.query.order_by( 28 | ContainerInfoModel.timestamp.desc() 29 | ).all() 30 | 31 | for i, container in enumerate(running_containers): 32 | try: 33 | running_containers[i].is_running = container_manager.is_container_running( 34 | container.container_id 35 | ) 36 | except ContainerException: 37 | running_containers[i].is_running = False 38 | 39 | return render_template( 40 | "container_dashboard.html", 41 | containers=running_containers, 42 | connected=connected, 43 | ) 44 | 45 | @admin_bp.route("/settings", methods=["GET"]) 46 | @admins_only 47 | def route_containers_settings(): 48 | connected = False 49 | try: 50 | connected = container_manager.is_connected() 51 | except ContainerException: 52 | pass 53 | 54 | return render_template( 55 | "container_settings.html", 56 | settings=container_manager.settings, 57 | connected=connected, 58 | ) 59 | 60 | @admin_bp.route("/cheat", methods=["GET"]) 61 | @admins_only 62 | def route_containers_cheat(): 63 | connected = False 64 | try: 65 | connected = container_manager.is_connected() 66 | except ContainerException: 67 | pass 68 | 69 | cheat_logs = ContainerCheatLog.query.order_by(ContainerCheatLog.timestamp.desc()).all() 70 | 71 | return render_template( 72 | "container_cheat.html", 73 | connected=connected, 74 | cheat_logs=cheat_logs 75 | ) 76 | 77 | # Admin API 78 | @admin_bp.route("/api/settings", methods=["POST"]) 79 | @admins_only 80 | def route_update_settings(): 81 | 82 | required_fields = [ 83 | "docker_base_url", 84 | "docker_hostname", 85 | "container_expiration", 86 | "container_maxmemory", 87 | "container_maxcpu", 88 | "max_containers", 89 | ] 90 | 91 | # Validate required fields 92 | for field in required_fields: 93 | if request.form.get(field) is None: 94 | return {"error": f"{field} is required."}, 400 95 | 96 | # Update settings dynamically 97 | for key in required_fields: 98 | value = request.form.get(key) 99 | setting = ContainerSettingsModel.query.filter_by(key=key).first() 100 | 101 | if not setting: 102 | setting = ContainerSettingsModel(key=key, value=value) 103 | db.session.add(setting) 104 | else: 105 | setting.value = value 106 | 107 | db.session.commit() 108 | 109 | # Refresh container manager settings 110 | container_manager.settings = settings_to_dict( 111 | ContainerSettingsModel.query.all() 112 | ) 113 | 114 | if container_manager.settings.get("docker_base_url") is not None: 115 | try: 116 | container_manager.initialize_connection(container_manager.settings, Flask) 117 | except ContainerException as err: 118 | flash(str(err), "error") 119 | return redirect(url_for(".route_containers_settings")) 120 | 121 | return redirect(url_for(".route_containers_dashboard")) 122 | 123 | @admin_bp.route("/api/kill", methods=["POST"]) 124 | @admins_only 125 | def route_admin_kill_container(): 126 | try: 127 | validate_request(request.json, ["container_id"]) 128 | return kill_container(container_manager, request.json.get("container_id")) 129 | except ValueError as err: 130 | return {"error": str(err)}, 400 131 | 132 | @admin_bp.route("/api/purge", methods=["POST"]) 133 | @admins_only 134 | def route_purge_containers(): 135 | """Bulk delete multiple containers""" 136 | try: 137 | validate_request(request.json, ["container_ids"]) 138 | container_ids = request.json.get("container_ids", []) 139 | if not container_ids: 140 | return {"error": "No containers selected"}, 400 141 | 142 | deleted_count = 0 143 | for container_id in container_ids: 144 | container = ContainerInfoModel.query.filter_by(container_id=container_id).first() 145 | if container: 146 | try: 147 | container_manager.kill_container(container_id) 148 | db.session.delete(container) 149 | deleted_count += 1 150 | except ContainerException: 151 | continue 152 | 153 | db.session.commit() 154 | return {"success": f"Deleted {deleted_count} container(s)"} 155 | except ValueError as err: 156 | return {"error": str(err)}, 400 157 | 158 | @admin_bp.route("/api/images", methods=["GET"]) 159 | @admins_only 160 | def route_get_images(): 161 | try: 162 | images = container_manager.get_images() 163 | except ContainerException as err: 164 | return {"error": str(err)} 165 | 166 | return {"images": images} 167 | 168 | @admin_bp.route("/api/running_containers", methods=["GET"]) 169 | @admins_only 170 | def route_get_running_containers(): 171 | running_containers = ContainerInfoModel.query.order_by( 172 | ContainerInfoModel.timestamp.desc() 173 | ).all() 174 | 175 | connected = False 176 | try: 177 | connected = container_manager.is_connected() 178 | except ContainerException: 179 | pass 180 | 181 | # Create lists to store unique teams and challenges 182 | unique_teams = set() 183 | unique_challenges = set() 184 | 185 | for i, container in enumerate(running_containers): 186 | try: 187 | running_containers[i].is_running = ( 188 | container_manager.is_container_running(container.container_id) 189 | ) 190 | except ContainerException: 191 | running_containers[i].is_running = False 192 | 193 | # Add team and challenge to the unique sets 194 | if is_team_mode() is True: 195 | unique_teams.add(f"{container.team.name} [{container.team_id}]") 196 | else: 197 | unique_teams.add(f"{container.user.name} [{container.user_id}]") 198 | unique_challenges.add( 199 | f"{container.challenge.name} [{container.challenge_id}]" 200 | ) 201 | 202 | # Convert unique sets to lists 203 | unique_teams_list = list(unique_teams) 204 | unique_challenges_list = list(unique_challenges) 205 | 206 | # Create a list of dictionaries containing running_containers data 207 | running_containers_data = [] 208 | for container in running_containers: 209 | if is_team_mode() is True: 210 | container_data = { 211 | "container_id": container.container_id, 212 | "image": container.challenge.image, 213 | "challenge": f"{container.challenge.name} [{container.challenge_id}]", 214 | "team": f"{container.team.name} [{container.team_id}]", 215 | "port": container.port, 216 | "created": container.timestamp, 217 | "expires": container.expires, 218 | "is_running": container.is_running, 219 | } 220 | else: 221 | container_data = { 222 | "container_id": container.container_id, 223 | "image": container.challenge.image, 224 | "challenge": f"{container.challenge.name} [{container.challenge_id}]", 225 | "user": f"{container.user.name} [{container.user_id}]", 226 | "port": container.port, 227 | "created": container.timestamp, 228 | "expires": container.expires, 229 | "is_running": container.is_running, 230 | } 231 | running_containers_data.append(container_data) 232 | 233 | # Create a JSON response containing running_containers_data, unique teams, and unique challenges 234 | response_data = { 235 | "containers": running_containers_data, 236 | "connected": connected, 237 | "teams": unique_teams_list, 238 | "challenges": unique_challenges_list, 239 | } 240 | 241 | # Return the JSON response 242 | return jsonify(response_data) 243 | -------------------------------------------------------------------------------- /assets/create.html: -------------------------------------------------------------------------------- 1 | {% extends "admin/challenges/create.html" %} 2 | 3 | {% block header %} 4 | 7 | {% endblock %} 8 | 9 | {% block value %} 10 |
11 | 17 | 18 |
19 |
20 | 25 | 26 | 27 |
28 | 29 |
30 | 35 | 36 |
37 | 38 |
39 | 44 | 45 |
46 | 47 |
48 | 54 | 58 |
59 | 60 |
61 | 67 | 73 |
74 | 75 |
76 | 82 | 83 |
84 | 85 |
86 | 93 | 94 |
95 | 96 |
97 | 136 | 140 |
141 | 142 |
143 | 144 | 145 |
146 | 147 |
148 | 149 | 150 |
151 | 152 |
153 | 154 | 155 |
156 | 157 | {% endblock %} 158 | 159 | {% block type %} 160 | 161 | {% endblock %} -------------------------------------------------------------------------------- /assets/create.js: -------------------------------------------------------------------------------- 1 | CTFd.plugin.run((_CTFd) => { 2 | const $ = _CTFd.lib.$; 3 | const md = _CTFd.lib.markdown(); 4 | }); 5 | 6 | var containerImage = document.getElementById("container-image"); 7 | var containerImageDefault = document.getElementById("container-image-default"); 8 | var path = "/containers/admin/api/images"; 9 | 10 | fetch(path, { 11 | method: "GET", 12 | headers: { 13 | "Accept": "application/json", 14 | "CSRF-Token": init.csrfNonce 15 | } 16 | }) 17 | .then(response => { 18 | if (!response.ok) { 19 | // Handle error response 20 | return Promise.reject("Error fetching data"); 21 | } 22 | return response.json(); 23 | }) 24 | .then(data => { 25 | if (data.error != undefined) { 26 | // Error 27 | containerImageDefault.innerHTML = data.error; 28 | } else { 29 | // Success 30 | for (var i = 0; i < data.images.length; i++) { 31 | var opt = document.createElement("option"); 32 | opt.value = data.images[i]; 33 | opt.innerHTML = data.images[i]; 34 | containerImage.appendChild(opt); 35 | } 36 | containerImageDefault.innerHTML = "Choose an image..."; 37 | containerImage.removeAttribute("disabled"); 38 | } 39 | console.log(data); 40 | }) 41 | .catch(error => { 42 | // Handle fetch error 43 | console.error(error); 44 | }); 45 | -------------------------------------------------------------------------------- /assets/update.html: -------------------------------------------------------------------------------- 1 | {% extends "admin/challenges/update.html" %} 2 | 3 | {% block connection_info %} 4 |
5 | 11 | 13 |
14 | {% endblock %} 15 | 16 | {% block value %} 17 |
18 | 23 | 24 |
25 | 26 |
27 | 32 | 33 |
34 | 35 |
36 | 41 | 42 |
43 | 44 |
45 | 50 | 51 |
52 | 53 |
54 | 57 | 63 | 67 |
68 | 69 |
70 | 76 | 82 |
83 | 84 |
85 | 91 | 92 |
93 | 94 |
95 | 102 | 103 |
104 | 105 |
106 | 147 | 155 |
156 | 157 |
158 | 159 | 161 |
162 | 163 |
164 | 165 | 167 |
168 | 169 |
170 | 171 | 173 |
174 | 175 | {% endblock %} -------------------------------------------------------------------------------- /assets/update.js: -------------------------------------------------------------------------------- 1 | var containerImage = document.getElementById("container-image"); 2 | var containerImageDefault = document.getElementById("container-image-default"); 3 | var path = "/containers/admin/api/images"; 4 | 5 | fetch(path, { 6 | method: "GET", 7 | headers: { 8 | "Accept": "application/json", 9 | "CSRF-Token": init.csrfNonce 10 | } 11 | }) 12 | .then(response => response.json()) 13 | .then(data => { 14 | if (data.error !== undefined) { 15 | // Error 16 | containerImageDefault.innerHTML = data.error; 17 | } else { 18 | // Success 19 | for (var i = 0; i < data.images.length; i++) { 20 | var opt = document.createElement("option"); 21 | opt.value = data.images[i]; 22 | opt.innerHTML = data.images[i]; 23 | containerImage.appendChild(opt); 24 | } 25 | containerImageDefault.innerHTML = "Choose an image..."; 26 | containerImage.removeAttribute("disabled"); 27 | containerImage.value = container_image_selected; 28 | } 29 | console.log(data); 30 | }) 31 | .catch(error => { 32 | console.error("Fetch error:", error); 33 | }); 34 | 35 | var currentURL = window.location.href; 36 | var match = currentURL.match(/\/challenges\/(\d+)/); 37 | 38 | if (match && match[1]) { 39 | var challenge_id = parseInt(match[1]); 40 | 41 | var connectType = document.getElementById("connect-type"); 42 | var connectTypeDefault = document.getElementById("connect-type-default"); 43 | 44 | var connectTypeEndpoint = "/containers/api/get_connect_type/" + challenge_id; 45 | 46 | fetch(connectTypeEndpoint, { 47 | method: "GET", 48 | headers: { 49 | "Accept": "application/json", 50 | "CSRF-Token": init.csrfNonce 51 | } 52 | }) 53 | .then(response => response.json()) 54 | .then(connectTypeData => { 55 | if (connectTypeData.error !== undefined) { 56 | console.error("Error:", connectTypeData.error); 57 | } else { 58 | var connectTypeValue = connectTypeData.connect; 59 | 60 | connectTypeDefault.innerHTML = "Choose..."; 61 | connectType.removeAttribute("disabled"); 62 | connectType.value = connectTypeValue; 63 | } 64 | console.log(connectTypeData); 65 | }) 66 | .catch(error => { 67 | console.error("Fetch error:", error); 68 | }); 69 | } else { 70 | console.error("Challenge ID not found in the URL."); 71 | } 72 | -------------------------------------------------------------------------------- /assets/view.html: -------------------------------------------------------------------------------- 1 | {% extends "challenge.html" %} {% block connection_info %} 2 |
3 | 4 | 5 |
6 | 7 | 14 | 21 | 28 | 29 |
30 | {% endblock %} 31 | -------------------------------------------------------------------------------- /assets/view.js: -------------------------------------------------------------------------------- 1 | CTFd._internal.challenge.data = undefined; 2 | CTFd._internal.challenge.renderer = null; 3 | CTFd._internal.challenge.preRender = function () {}; 4 | CTFd._internal.challenge.render = null; 5 | CTFd._internal.challenge.postRender = function () {}; 6 | 7 | CTFd._internal.challenge.submit = function (preview) { 8 | var challenge_id = parseInt(CTFd.lib.$("#challenge-id").val()); 9 | var submission = CTFd.lib.$("#challenge-input").val(); 10 | 11 | let alert = resetAlert(); 12 | 13 | var body = { 14 | challenge_id: challenge_id, 15 | submission: submission, 16 | }; 17 | var params = {}; 18 | if (preview) { 19 | params["preview"] = true; 20 | } 21 | 22 | return CTFd.api 23 | .post_challenge_attempt(params, body) 24 | .then(function (response) { 25 | if (response.status === 429) return response; // Rate limit 26 | if (response.status === 403) return response; // Not logged in / CTF paused 27 | return response; 28 | }); 29 | }; 30 | 31 | function mergeQueryParams(parameters, queryParameters) { 32 | if (parameters.$queryParameters) { 33 | Object.keys(parameters.$queryParameters).forEach(function (parameterName) { 34 | queryParameters[parameterName] = parameters.$queryParameters[parameterName]; 35 | }); 36 | } 37 | return queryParameters; 38 | } 39 | 40 | function resetAlert() { 41 | let alert = document.getElementById("deployment-info"); 42 | alert.innerHTML = '
Loading...
'; 43 | alert.classList.remove("alert-danger"); 44 | 45 | // Disable buttons while loading 46 | document.getElementById("create-chal").disabled = true; 47 | document.getElementById("extend-chal").disabled = true; 48 | document.getElementById("terminate-chal").disabled = true; 49 | 50 | return alert; 51 | } 52 | 53 | function enableButtons() { 54 | document.getElementById("create-chal").disabled = false; 55 | document.getElementById("extend-chal").disabled = false; 56 | document.getElementById("terminate-chal").disabled = false; 57 | } 58 | 59 | function toggleChallengeCreate() { 60 | document.getElementById("create-chal").classList.toggle('d-none'); 61 | } 62 | 63 | function toggleChallengeUpdate() { 64 | document.getElementById("extend-chal").classList.toggle('d-none'); 65 | document.getElementById("terminate-chal").classList.toggle('d-none'); 66 | } 67 | 68 | function calculateExpiry(date) { 69 | return Math.ceil((new Date(date * 1000) - new Date()) / 1000 / 60); 70 | } 71 | 72 | function createChallengeLinkElement(data, parent) { 73 | parent.innerHTML = ""; 74 | 75 | let expires = document.createElement('span'); 76 | expires.textContent = "Expires in " + calculateExpiry(new Date(data.expires)) + " minutes."; 77 | parent.append(expires, document.createElement('br')); 78 | 79 | if (data.connect == "tcp") { 80 | let codeElement = document.createElement('code'); 81 | codeElement.textContent = 'nc ' + data.hostname + " " + data.port; 82 | parent.append(codeElement); 83 | } else { 84 | let link = document.createElement('a'); 85 | link.href = 'http://' + data.hostname + ":" + data.port; 86 | link.textContent = 'http://' + data.hostname + ":" + data.port; 87 | link.target = '_blank'; 88 | parent.append(link); 89 | } 90 | } 91 | 92 | function view_container_info(challenge_id) { 93 | let alert = resetAlert(); 94 | 95 | fetch("/containers/api/view_info", { 96 | method: "POST", 97 | headers: { 98 | "Content-Type": "application/json", 99 | "Accept": "application/json", 100 | "CSRF-Token": init.csrfNonce 101 | }, 102 | body: JSON.stringify({ chal_id: challenge_id }) 103 | }) 104 | .then(response => response.json()) 105 | .then(data => { 106 | alert.innerHTML = ""; // Remove spinner 107 | if (data.status == "Challenge not started") { 108 | alert.innerHTML = data.status; 109 | toggleChallengeCreate(); 110 | } else if (data.status == "already_running") { 111 | createChallengeLinkElement(data, alert); 112 | toggleChallengeUpdate(); 113 | } else { 114 | alert.innerHTML = data.message; 115 | alert.classList.add("alert-danger"); 116 | toggleChallengeUpdate(); 117 | } 118 | }) 119 | .catch(error => { 120 | alert.innerHTML = "Error fetching container info."; 121 | alert.classList.add("alert-danger"); 122 | console.error("Fetch error:", error); 123 | }) 124 | .finally(enableButtons); 125 | } 126 | 127 | function container_request(challenge_id) { 128 | let alert = resetAlert(); 129 | 130 | fetch("/containers/api/request", { 131 | method: "POST", 132 | headers: { 133 | "Content-Type": "application/json", 134 | "Accept": "application/json", 135 | "CSRF-Token": init.csrfNonce 136 | }, 137 | body: JSON.stringify({ chal_id: challenge_id }) 138 | }) 139 | .then(response => response.json()) 140 | .then(data => { 141 | alert.innerHTML = ""; // Remove spinner 142 | if (data.error) { 143 | alert.innerHTML = data.error; 144 | alert.classList.add("alert-danger"); 145 | toggleChallengeCreate(); 146 | } else if (data.message) { 147 | alert.innerHTML = data.message; 148 | alert.classList.add("alert-danger"); 149 | toggleChallengeCreate(); 150 | } else { 151 | createChallengeLinkElement(data, alert); 152 | toggleChallengeUpdate(); 153 | toggleChallengeCreate(); 154 | } 155 | }) 156 | .catch(error => { 157 | alert.innerHTML = "Error requesting container."; 158 | alert.classList.add("alert-danger"); 159 | console.error("Fetch error:", error); 160 | }) 161 | .finally(enableButtons); 162 | } 163 | 164 | function container_renew(challenge_id) { 165 | let alert = resetAlert(); 166 | 167 | fetch("/containers/api/renew", { 168 | method: "POST", 169 | headers: { 170 | "Content-Type": "application/json", 171 | "Accept": "application/json", 172 | "CSRF-Token": init.csrfNonce 173 | }, 174 | body: JSON.stringify({ chal_id: challenge_id }) 175 | }) 176 | .then(response => response.json()) 177 | .then(data => { 178 | alert.innerHTML = ""; // Remove spinner 179 | if (data.error) { 180 | alert.innerHTML = data.error; 181 | alert.classList.add("alert-danger"); 182 | } else if (data.message) { 183 | alert.innerHTML = data.message; 184 | alert.classList.add("alert-danger"); 185 | } else { 186 | createChallengeLinkElement(data, alert); 187 | } 188 | }) 189 | .catch(error => { 190 | alert.innerHTML = "Error renewing container."; 191 | alert.classList.add("alert-danger"); 192 | console.error("Fetch error:", error); 193 | }) 194 | .finally(enableButtons); 195 | } 196 | 197 | function container_stop(challenge_id) { 198 | let alert = resetAlert(); 199 | 200 | fetch("/containers/api/stop", { 201 | method: "POST", 202 | headers: { 203 | "Content-Type": "application/json", 204 | "Accept": "application/json", 205 | "CSRF-Token": init.csrfNonce 206 | }, 207 | body: JSON.stringify({ chal_id: challenge_id }) 208 | }) 209 | .then(response => response.json()) 210 | .then(data => { 211 | alert.innerHTML = ""; // Remove spinner 212 | if (data.error) { 213 | alert.innerHTML = data.error; 214 | alert.classList.add("alert-danger"); 215 | toggleChallengeCreate(); 216 | } else if (data.message) { 217 | alert.innerHTML = data.message; 218 | alert.classList.add("alert-danger"); 219 | toggleChallengeCreate(); 220 | } else { 221 | alert.innerHTML = "Challenge Terminated."; 222 | toggleChallengeCreate(); 223 | toggleChallengeUpdate(); 224 | } 225 | }) 226 | .catch(error => { 227 | alert.innerHTML = "Error stopping container."; 228 | alert.classList.add("alert-danger"); 229 | console.error("Fetch error:", error); 230 | }) 231 | .finally(enableButtons); 232 | } -------------------------------------------------------------------------------- /config.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "Containers", 3 | "route": "/containers/admin/dashboard" 4 | } 5 | -------------------------------------------------------------------------------- /container_manager.py: -------------------------------------------------------------------------------- 1 | import atexit 2 | import time 3 | import json 4 | import random 5 | import string 6 | 7 | from flask import Flask 8 | from apscheduler.schedulers.background import BackgroundScheduler 9 | from apscheduler.schedulers import SchedulerNotRunningError 10 | import docker 11 | import paramiko.ssh_exception 12 | import requests 13 | 14 | from CTFd.models import db 15 | from .models import ContainerInfoModel, ContainerFlagModel, ContainerFlagModel 16 | 17 | 18 | def generate_random_flag(challenge): 19 | """Generate a random flag with the given length and format""" 20 | flag_length = challenge.random_flag_length 21 | random_part = "".join( 22 | random.choices(string.ascii_letters + string.digits, k=flag_length) 23 | ) 24 | return f"{challenge.flag_prefix}{random_part}{challenge.flag_suffix}" 25 | 26 | 27 | class ContainerException(Exception): 28 | def __init__(self, *args: object) -> None: 29 | super().__init__(*args) 30 | if args: 31 | self.message = args[0] 32 | else: 33 | self.message = None 34 | 35 | def __str__(self) -> str: 36 | if self.message: 37 | return self.message 38 | else: 39 | return "Unknown Container Exception" 40 | 41 | 42 | class ContainerManager: 43 | def __init__(self, settings, app): 44 | self.settings = settings 45 | self.client = None 46 | self.app = app 47 | if ( 48 | settings.get("docker_base_url") is None 49 | or settings.get("docker_base_url") == "" 50 | ): 51 | return 52 | 53 | # Connect to the docker daemon 54 | try: 55 | self.initialize_connection(settings, app) 56 | except ContainerException: 57 | print("Docker could not initialize or connect.") 58 | return 59 | 60 | def initialize_connection(self, settings, app) -> None: 61 | self.settings = settings 62 | self.app = app 63 | 64 | # Remove any leftover expiration schedulers 65 | try: 66 | self.expiration_scheduler.shutdown() 67 | except (SchedulerNotRunningError, AttributeError): 68 | # Scheduler was never running 69 | pass 70 | 71 | if settings.get("docker_base_url") is None: 72 | self.client = None 73 | return 74 | 75 | try: 76 | self.client = docker.DockerClient(base_url=settings.get("docker_base_url")) 77 | except docker.errors.DockerException as e: 78 | self.client = None 79 | raise ContainerException("CTFd could not connect to Docker") 80 | except TimeoutError as e: 81 | self.client = None 82 | raise ContainerException("CTFd timed out when connecting to Docker") 83 | except paramiko.ssh_exception.NoValidConnectionsError as e: 84 | self.client = None 85 | raise ContainerException( 86 | "CTFd timed out when connecting to Docker: " + str(e) 87 | ) 88 | except paramiko.ssh_exception.AuthenticationException as e: 89 | self.client = None 90 | raise ContainerException( 91 | "CTFd had an authentication error when connecting to Docker: " + str(e) 92 | ) 93 | 94 | # Set up expiration scheduler 95 | try: 96 | self.expiration_seconds = int(settings.get("container_expiration", 0)) * 60 97 | except (ValueError, AttributeError): 98 | self.expiration_seconds = 0 99 | 100 | EXPIRATION_CHECK_INTERVAL = 5 101 | 102 | if self.expiration_seconds > 0: 103 | self.expiration_scheduler = BackgroundScheduler() 104 | self.expiration_scheduler.add_job( 105 | func=self.kill_expired_containers, 106 | args=(app,), 107 | trigger="interval", 108 | seconds=EXPIRATION_CHECK_INTERVAL, 109 | ) 110 | self.expiration_scheduler.start() 111 | 112 | # Shut down the scheduler when exiting the app 113 | atexit.register(lambda: self.expiration_scheduler.shutdown()) 114 | 115 | # TODO: Fix this cause it doesn't work 116 | def run_command(func): 117 | def wrapper_run_command(self, *args, **kwargs): 118 | if self.client is None: 119 | try: 120 | self.__init__(self.settings, self.app) 121 | except: 122 | raise ContainerException("Docker is not connected") 123 | try: 124 | if self.client is None: 125 | raise ContainerException("Docker is not connected") 126 | if self.client.ping(): 127 | return func(self, *args, **kwargs) 128 | except ( 129 | paramiko.ssh_exception.SSHException, 130 | ConnectionError, 131 | requests.exceptions.ConnectionError, 132 | ) as e: 133 | # Try to reconnect before failing 134 | try: 135 | self.__init__(self.settings, self.app) 136 | except: 137 | pass 138 | raise ContainerException( 139 | "Docker connection was lost. Please try your request again later." 140 | ) 141 | 142 | return wrapper_run_command 143 | 144 | @run_command 145 | def kill_expired_containers(self, app: Flask): 146 | with app.app_context(): 147 | containers: "list[ContainerInfoModel]" = ContainerInfoModel.query.all() 148 | 149 | for container in containers: 150 | delta_seconds = container.expires - int(time.time()) 151 | if delta_seconds < 0: 152 | try: 153 | self.kill_container(container.container_id) 154 | except ContainerException: 155 | print( 156 | "[Container Expiry Job] Docker is not initialized. Please check your settings." 157 | ) 158 | 159 | db.session.delete(container) 160 | db.session.commit() 161 | 162 | @run_command 163 | def is_container_running(self, container_id: str) -> bool: 164 | container = self.client.containers.list(filters={"id": container_id}) 165 | if len(container) == 0: 166 | return False 167 | return container[0].status == "running" 168 | 169 | @run_command 170 | def create_container(self, challenge, xid, is_team): 171 | kwargs = {} 172 | 173 | flag = ( 174 | generate_random_flag(challenge) 175 | if challenge.flag_mode == "random" 176 | else challenge.flag_prefix + challenge.flag_suffix 177 | ) 178 | 179 | # Set the memory and CPU limits for the container 180 | if self.settings.get("container_maxmemory"): 181 | try: 182 | mem_limit = int(self.settings.get("container_maxmemory")) 183 | if mem_limit > 0: 184 | kwargs["mem_limit"] = f"{mem_limit}m" 185 | except ValueError: 186 | ContainerException( 187 | "Configured container memory limit must be an integer" 188 | ) 189 | if self.settings.get("container_maxcpu"): 190 | try: 191 | cpu_period = float(self.settings.get("container_maxcpu")) 192 | if cpu_period > 0: 193 | kwargs["cpu_quota"] = int(cpu_period * 100000) 194 | kwargs["cpu_period"] = 100000 195 | except ValueError: 196 | ContainerException("Configured container CPU limit must be a number") 197 | 198 | volumes = challenge.volumes 199 | if volumes is not None and volumes != "": 200 | print("Volumes:", volumes) 201 | try: 202 | volumes_dict = json.loads(volumes) 203 | kwargs["volumes"] = volumes_dict 204 | except json.decoder.JSONDecodeError: 205 | raise ContainerException("Volumes JSON string is invalid") 206 | 207 | try: 208 | container = self.client.containers.run( 209 | challenge.image, 210 | ports={str(challenge.port): None}, 211 | command=challenge.command, 212 | detach=True, 213 | auto_remove=True, 214 | environment={"FLAG": flag}, 215 | **kwargs, 216 | ) 217 | 218 | port = self.get_container_port(container.id) 219 | if port is None: 220 | raise ContainerException("Could not get container port") 221 | expires = int(time.time() + self.expiration_seconds) 222 | 223 | new_container_entry = ContainerInfoModel( 224 | container_id=container.id, 225 | challenge_id=challenge.id, 226 | team_id=xid if is_team else None, 227 | user_id=None if is_team else xid, 228 | port=port, 229 | flag=flag, 230 | timestamp=int(time.time()), 231 | expires=expires, 232 | ) 233 | db.session.add(new_container_entry) 234 | db.session.commit() 235 | 236 | # Save the flag in the database 237 | new_flag_entry = ContainerFlagModel( 238 | challenge_id=challenge.id, 239 | container_id=container.id, 240 | flag=flag, 241 | team_id=xid if is_team else None, 242 | user_id=None if is_team else xid, 243 | ) 244 | db.session.add(new_flag_entry) 245 | db.session.commit() 246 | 247 | return {"container": container, "expires": expires, "port": port} 248 | except docker.errors.ImageNotFound: 249 | raise ContainerException("Docker image not found") 250 | 251 | @run_command 252 | def get_container_port(self, container_id: str) -> "str|None": 253 | try: 254 | for port in list(self.client.containers.get(container_id).ports.values()): 255 | if port is not None: 256 | return port[0]["HostPort"] 257 | except (KeyError, IndexError): 258 | return None 259 | 260 | @run_command 261 | def get_images(self) -> "list[str]|None": 262 | try: 263 | images = self.client.images.list() 264 | except (KeyError, IndexError): 265 | return [] 266 | 267 | images_list = [] 268 | for image in images: 269 | if len(image.tags) > 0: 270 | images_list.append(image.tags[0]) 271 | 272 | images_list.sort() 273 | return images_list 274 | 275 | @run_command 276 | def kill_container(self, container_id: str): 277 | try: 278 | self.client.containers.get(container_id).kill() 279 | 280 | container_info = ContainerInfoModel.query.filter_by( 281 | container_id=container_id 282 | ).first() 283 | if not container_info: 284 | return # No matching record => nothing else to do 285 | 286 | challenge = container_info.challenge 287 | 288 | used_flags = ContainerFlagModel.query.filter_by( 289 | container_id=container_id 290 | ).all() 291 | 292 | if challenge.flag_mode == "static": 293 | # Remove all flags for static-mode challenges (ignore used or not used) 294 | for f in used_flags: 295 | db.session.delete(f) 296 | else: 297 | for f in used_flags: 298 | if f.used: 299 | # Keep this flag, but remove its container reference 300 | f.container_id = None 301 | else: 302 | # If the flag wasn't used, delete it 303 | db.session.delete(f) 304 | 305 | except docker.errors.NotFound: 306 | pass 307 | 308 | def is_connected(self) -> bool: 309 | try: 310 | self.client.ping() 311 | except: 312 | return False 313 | return True 314 | -------------------------------------------------------------------------------- /helpers.py: -------------------------------------------------------------------------------- 1 | import os 2 | import json 3 | import time 4 | from flask import jsonify, request 5 | from CTFd.utils import get_config 6 | from .models import ContainerChallengeModel, ContainerInfoModel, ContainerSettingsModel, ContainerFlagModel, ContainerCheatLog 7 | from .container_manager import ContainerManager, ContainerException 8 | from CTFd.models import db, Teams, Users, Solves 9 | from CTFd.utils.user import get_current_user 10 | 11 | 12 | def get_settings_path(): 13 | """Retrieve the path to settings.json""" 14 | # Thanks https://github.com/TheFlash2k 15 | return os.path.join(os.path.dirname(os.path.abspath(__file__)), "settings.json") 16 | 17 | 18 | settings = json.load(open(get_settings_path())) 19 | USERS_MODE = settings["modes"]["USERS_MODE"] 20 | TEAMS_MODE = settings["modes"]["TEAMS_MODE"] 21 | 22 | 23 | def settings_to_dict(settings): 24 | """Convert settings table records into a dictionary""" 25 | return {setting.key: setting.value for setting in settings} 26 | 27 | 28 | def is_team_mode(): 29 | """Determine if CTF is running in team mode""" 30 | mode = get_config("user_mode") 31 | return mode == TEAMS_MODE 32 | 33 | 34 | def kill_container(container_manager, container_id): 35 | """Kill and remove a running container""" 36 | container = ContainerInfoModel.query.filter_by(container_id=container_id).first() 37 | 38 | if not container: 39 | return jsonify({"error": "Container not found"}), 400 40 | 41 | try: 42 | container_manager.kill_container(container_id) 43 | except ContainerException: 44 | return jsonify( 45 | {"error": "Docker is not initialized. Please check your settings."} 46 | ) 47 | 48 | db.session.delete(container) 49 | db.session.commit() 50 | 51 | return jsonify({"success": "Container killed"}) 52 | 53 | 54 | def renew_container(container_manager, chal_id, xid, is_team): 55 | """Extend the expiration time of an active container""" 56 | challenge = ContainerChallengeModel.query.filter_by(id=chal_id).first() 57 | 58 | if challenge is None: 59 | return jsonify({"error": "Challenge not found"}), 400 60 | 61 | running_container = ContainerInfoModel.query.filter_by( 62 | challenge_id=challenge.id, 63 | team_id=xid if is_team else None, 64 | user_id=None if is_team else xid, 65 | ).first() 66 | 67 | if running_container is None: 68 | return jsonify({"error": "Container not found, try resetting the container."}) 69 | 70 | try: 71 | running_container.expires = int( 72 | time.time() + container_manager.expiration_seconds 73 | ) 74 | db.session.commit() 75 | except ContainerException: 76 | return jsonify({"error": "Database error occurred, please try again."}) 77 | 78 | return jsonify( 79 | { 80 | "success": "Container renewed", 81 | "expires": running_container.expires, 82 | "hostname": container_manager.settings.get("docker_hostname", ""), 83 | "port": running_container.port, 84 | "connect": challenge.connection_type, 85 | } 86 | ) 87 | 88 | 89 | def create_container(container_manager, chal_id, xid, is_team): 90 | """Create a new challenge container""" 91 | challenge = ContainerChallengeModel.query.filter_by(id=chal_id).first() 92 | 93 | if challenge is None: 94 | return jsonify({"error": "Challenge not found"}), 400 95 | 96 | if Solves.query.filter_by(challenge_id=chal_id, account_id=xid).first(): 97 | return jsonify({"error": "Challenge already solved"}), 400 98 | 99 | 100 | max_containers = int(container_manager.settings.get("max_containers", 3)) 101 | 102 | # Check if user/team has reached the max container limit 103 | running_container = ContainerInfoModel.query.filter_by( 104 | challenge_id=challenge.id, 105 | team_id=xid if is_team else None, 106 | user_id=None if is_team else xid, 107 | ).first() 108 | 109 | container_count = ContainerInfoModel.query.filter_by( 110 | team_id=xid if is_team else None, 111 | user_id=None if is_team else xid, 112 | ).count() 113 | 114 | if container_count >= max_containers: 115 | return ( 116 | jsonify( 117 | { 118 | "error": f"Max containers ({max_containers}) reached. Please stop a running container before starting a new one." 119 | } 120 | ), 121 | 400, 122 | ) 123 | 124 | if running_container: 125 | # Check if the container is still running 126 | try: 127 | if container_manager.is_container_running(running_container.container_id): 128 | return jsonify( 129 | { 130 | "status": "already_running", 131 | "hostname": container_manager.settings.get( 132 | "docker_hostname", "" 133 | ), 134 | "port": running_container.port, 135 | "connect": challenge.connection_type, 136 | "expires": running_container.expires, 137 | } 138 | ) 139 | else: 140 | db.session.delete(running_container) 141 | db.session.commit() 142 | except ContainerException as err: 143 | return jsonify({"error": str(err)}), 500 144 | 145 | # Start a new Docker container 146 | try: 147 | created_container = container_manager.create_container(challenge, xid, is_team) 148 | except ContainerException as err: 149 | return jsonify({"error": str(err)}) 150 | 151 | return jsonify( 152 | { 153 | "status": "created", 154 | "hostname": container_manager.settings.get("docker_hostname", ""), 155 | "port": created_container["port"], 156 | "connect": challenge.connection_type, 157 | "expires": created_container["expires"], 158 | } 159 | ) 160 | 161 | 162 | def view_container_info(container_manager, chal_id, xid, is_team): 163 | """Retrieve information about a running container""" 164 | challenge = ContainerChallengeModel.query.filter_by(id=chal_id).first() 165 | 166 | if challenge is None: 167 | return jsonify({"error": "Challenge not found"}), 400 168 | 169 | running_container = ContainerInfoModel.query.filter_by( 170 | challenge_id=challenge.id, 171 | team_id=xid if is_team else None, 172 | user_id=None if is_team else xid, 173 | ).first() 174 | 175 | if running_container: 176 | try: 177 | if container_manager.is_container_running(running_container.container_id): 178 | return jsonify( 179 | { 180 | "status": "already_running", 181 | "hostname": container_manager.settings.get( 182 | "docker_hostname", "" 183 | ), 184 | "port": running_container.port, 185 | "connect": challenge.connection_type, 186 | "expires": running_container.expires, 187 | } 188 | ) 189 | else: 190 | db.session.delete(running_container) 191 | db.session.commit() 192 | except ContainerException as err: 193 | return jsonify({"error": str(err)}), 500 194 | else: 195 | return jsonify({"status": "Challenge not started"}) 196 | 197 | 198 | def connect_type(chal_id): 199 | """Get the connection type for a challenge""" 200 | challenge = ContainerChallengeModel.query.filter_by(id=chal_id).first() 201 | 202 | if challenge is None: 203 | return jsonify({"error": "Challenge not found"}), 400 204 | 205 | return jsonify({"status": "Ok", "connect": challenge.connection_type}) 206 | 207 | def get_xid_and_flag(): 208 | """ 209 | 1) Returns (x_id, submitted_flag) from the current request 210 | 2) Raises ValueError with an error message if something is missing 211 | """ 212 | user = get_current_user() 213 | if not user: 214 | raise ValueError("You must be logged in to attempt this challenge.") 215 | 216 | if is_team_mode(): 217 | if not user.team_id: 218 | raise ValueError("You must belong to a team to solve this challenge.") 219 | x_id = user.team_id 220 | else: 221 | x_id = user.id 222 | 223 | # Parse flag from JSON or form 224 | data = request.get_json() or request.form 225 | submitted_flag = data.get("submission", "").strip() 226 | if not submitted_flag: 227 | raise ValueError("No flag provided.") 228 | 229 | return user, x_id, submitted_flag 230 | 231 | 232 | def get_active_container(challenge_id, x_id): 233 | """ 234 | Returns the ContainerInfoModel if found and running, else raises ValueError with a message. 235 | """ 236 | container_info = ContainerInfoModel.query.filter_by( 237 | challenge_id=challenge_id, 238 | team_id=x_id if is_team_mode() else None, 239 | user_id=None if is_team_mode() else x_id, 240 | ).first() 241 | 242 | if not container_info: 243 | raise ValueError("No container is currently active for this challenge.") 244 | 245 | return container_info 246 | 247 | 248 | def get_container_flag(submitted_flag, user, container_manager, container_info, challenge): 249 | """ 250 | Fetches the ContainerFlagModel for the given submitted_flag. 251 | Ensures the flag belongs to the user or team (in team mode). 252 | If the flag was already used by another user/team, trigger a ban. 253 | """ 254 | if is_team_mode(): 255 | # In team mode, check if the flag belongs to the user's team 256 | container_flag = ContainerFlagModel.query.filter_by(flag=submitted_flag).first() 257 | if challenge.flag_mode == "random" and container_flag and container_flag.team_id != user.team_id: 258 | # Flag belongs to another team and is reused → cheating detected 259 | ban_team_and_original_owner(container_flag, user, container_manager, container_info) 260 | else: 261 | # In individual mode, check if the flag belongs to the user 262 | container_flag = ContainerFlagModel.query.filter_by(flag=submitted_flag).first() 263 | if challenge.flag_mode == "random" and container_flag and container_flag.user_id != user.id: 264 | # Flag belongs to another user and is reused → cheating detected 265 | ban_team_and_original_owner(container_flag, user, container_manager, container_info) 266 | 267 | # If no flag is found, return incorrect flag error 268 | if not container_flag: 269 | raise ValueError("Incorrect") 270 | 271 | return container_flag 272 | 273 | 274 | 275 | def ban_team_and_original_owner(container_flag, user, container_manager, container_info): 276 | """ 277 | If flag swapping or cheating is detected, ban both the original owner and the submitter. 278 | Deletes the container record and kills the container. 279 | """ 280 | if not container_flag: 281 | raise ValueError("Cannot ban without a valid container flag.") 282 | 283 | cheat_log = ContainerCheatLog( 284 | reused_flag=container_flag.flag, 285 | challenge_id=container_flag.challenge_id, 286 | original_team_id=container_flag.team_id, 287 | original_user_id=container_flag.user_id, 288 | second_team_id=user.team_id if is_team_mode() else None, 289 | second_user_id=user.id if not is_team_mode() else None, 290 | timestamp=int(time.time()) 291 | ) 292 | db.session.add(cheat_log) 293 | db.session.commit() 294 | 295 | # Ban logic 296 | if is_team_mode(): 297 | original_team = Teams.query.filter_by(id=container_flag.team_id).first() 298 | submit_team = Teams.query.filter_by(id=user.team_id).first() 299 | 300 | if original_team: 301 | original_team.banned = True 302 | for member in original_team.members: 303 | member.banned = True 304 | if submit_team: 305 | submit_team.banned = True 306 | for member in submit_team.members: 307 | member.banned = True 308 | else: 309 | if container_flag.user_id: 310 | original_user = Users.query.filter_by(id=container_flag.user_id).first() 311 | if original_user: 312 | original_user.banned = True 313 | 314 | user.banned = True 315 | 316 | db.session.commit() 317 | 318 | # **If static mode, delete both flag and container info** 319 | if container_flag.challenge.flag_mode == "static": 320 | db.session.delete(container_flag) 321 | db.session.commit() 322 | 323 | # **If random mode, only delete container info but keep the flag** 324 | elif container_flag.challenge.flag_mode == "random": 325 | db.session.query(ContainerFlagModel).filter_by(container_id=container_info.container_id).update({"container_id": None}) 326 | db.session.commit() 327 | 328 | # Remove container info record 329 | container = ContainerInfoModel.query.filter_by(container_id=container_info.container_id).first() 330 | if container: 331 | db.session.delete(container) 332 | db.session.commit() 333 | 334 | # Kill the container 335 | container_manager.kill_container(container_info.container_id) 336 | 337 | # Kill the container 338 | container_manager.kill_container(container_info.container_id) 339 | raise ValueError("Cheating detected!") 340 | 341 | def get_current_user_or_team(): 342 | user = get_current_user() 343 | if user is None: 344 | raise ValueError("User not found") 345 | if user.team is None and is_team_mode(): 346 | raise ValueError("User not a member of a team") 347 | return user.team.id if is_team_mode() else user.id 348 | 349 | def validate_request(json_data, required_fields): 350 | if json_data is None: 351 | raise ValueError("Invalid request") 352 | for field in required_fields: 353 | if json_data.get(field) is None: 354 | raise ValueError(f"No {field} specified") -------------------------------------------------------------------------------- /image-readme/1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/phannhat17/CTFd-Docker-Plugin/5b662872087d90e2f18fe2dfc2386a7315fcfa97/image-readme/1.png -------------------------------------------------------------------------------- /image-readme/2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/phannhat17/CTFd-Docker-Plugin/5b662872087d90e2f18fe2dfc2386a7315fcfa97/image-readme/2.png -------------------------------------------------------------------------------- /image-readme/3.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/phannhat17/CTFd-Docker-Plugin/5b662872087d90e2f18fe2dfc2386a7315fcfa97/image-readme/3.png -------------------------------------------------------------------------------- /image-readme/4.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/phannhat17/CTFd-Docker-Plugin/5b662872087d90e2f18fe2dfc2386a7315fcfa97/image-readme/4.png -------------------------------------------------------------------------------- /image-readme/demo.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/phannhat17/CTFd-Docker-Plugin/5b662872087d90e2f18fe2dfc2386a7315fcfa97/image-readme/demo.gif -------------------------------------------------------------------------------- /image-readme/http.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/phannhat17/CTFd-Docker-Plugin/5b662872087d90e2f18fe2dfc2386a7315fcfa97/image-readme/http.png -------------------------------------------------------------------------------- /image-readme/main.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/phannhat17/CTFd-Docker-Plugin/5b662872087d90e2f18fe2dfc2386a7315fcfa97/image-readme/main.png -------------------------------------------------------------------------------- /image-readme/manage.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/phannhat17/CTFd-Docker-Plugin/5b662872087d90e2f18fe2dfc2386a7315fcfa97/image-readme/manage.png -------------------------------------------------------------------------------- /image-readme/tcp.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/phannhat17/CTFd-Docker-Plugin/5b662872087d90e2f18fe2dfc2386a7315fcfa97/image-readme/tcp.png -------------------------------------------------------------------------------- /models.py: -------------------------------------------------------------------------------- 1 | from sqlalchemy.sql import func 2 | from sqlalchemy.orm import relationship 3 | 4 | from CTFd.models import db 5 | from CTFd.models import Challenges 6 | 7 | 8 | class ContainerChallengeModel(Challenges): 9 | __mapper_args__ = {"polymorphic_identity": "container"} 10 | id = db.Column( 11 | db.Integer, db.ForeignKey("challenges.id", ondelete="CASCADE"), primary_key=True 12 | ) 13 | image = db.Column(db.Text) 14 | port = db.Column(db.Integer) 15 | command = db.Column(db.Text, default="") 16 | volumes = db.Column(db.Text, default="") 17 | connection_type = db.Column(db.Text) 18 | 19 | # Dynamic challenge properties 20 | initial = db.Column(db.Integer, default=0) 21 | minimum = db.Column(db.Integer, default=0) 22 | decay = db.Column(db.Integer, default=0) 23 | 24 | # Random flag properties 25 | flag_mode = db.Column(db.Text, default="static") 26 | random_flag_length = db.Column(db.Integer, default=10) 27 | flag_prefix = db.Column(db.Text, default="CTF{") 28 | flag_suffix = db.Column(db.Text, default="}") 29 | 30 | def __init__(self, *args, **kwargs): 31 | super(ContainerChallengeModel, self).__init__(**kwargs) 32 | self.value = kwargs["initial"] 33 | 34 | 35 | class ContainerInfoModel(db.Model): 36 | __mapper_args__ = {"polymorphic_identity": "container_info"} 37 | container_id = db.Column(db.String(512), primary_key=True) 38 | challenge_id = db.Column(db.Integer, db.ForeignKey("challenges.id", ondelete="CASCADE"), index=True) 39 | team_id = db.Column(db.Integer, db.ForeignKey("teams.id", ondelete="CASCADE"), index=True) 40 | user_id = db.Column(db.Integer, db.ForeignKey("users.id", ondelete="CASCADE"), index=True) 41 | port = db.Column(db.Integer) 42 | timestamp = db.Column(db.Integer) 43 | expires = db.Column(db.Integer) 44 | flag = db.Column(db.Text, default="") 45 | 46 | team = relationship("Teams", foreign_keys=[team_id]) 47 | user = relationship("Users", foreign_keys=[user_id]) 48 | 49 | challenge = relationship(ContainerChallengeModel, foreign_keys=[challenge_id]) 50 | 51 | 52 | class ContainerSettingsModel(db.Model): 53 | __mapper_args__ = {"polymorphic_identity": "container_settings"} 54 | key = db.Column(db.String(512), primary_key=True) 55 | value = db.Column(db.Text) 56 | 57 | 58 | class ContainerFlagModel(db.Model): 59 | __mapper_args__ = {"polymorphic_identity": "container_flags"} 60 | id = db.Column(db.Integer, primary_key=True, autoincrement=True) 61 | 62 | challenge_id = db.Column( 63 | db.Integer, db.ForeignKey("challenges.id", ondelete="CASCADE") 64 | ) 65 | container_id = db.Column( 66 | db.String(512), 67 | db.ForeignKey("container_info_model.container_id"), 68 | nullable=True, 69 | ) 70 | flag = db.Column(db.Text) 71 | user_id = db.Column(db.Integer, db.ForeignKey("users.id", ondelete="CASCADE")) 72 | team_id = db.Column(db.Integer, db.ForeignKey("teams.id", ondelete="CASCADE")) 73 | used = db.Column(db.Boolean, default=False) 74 | 75 | container = relationship(ContainerInfoModel, foreign_keys=[container_id]) 76 | challenge = relationship(ContainerChallengeModel, foreign_keys=[challenge_id]) 77 | user = relationship("Users", foreign_keys=[user_id]) 78 | team = relationship("Teams", foreign_keys=[team_id]) 79 | 80 | 81 | class ContainerCheatLog(db.Model): 82 | __mapper_args__ = {"polymorphic_identity": "container_cheat_logs"} 83 | 84 | id = db.Column(db.Integer, primary_key=True, autoincrement=True) 85 | 86 | # The reused flag 87 | reused_flag = db.Column(db.Text) 88 | 89 | # Which challenge was it from? 90 | challenge_id = db.Column( 91 | db.Integer, db.ForeignKey("challenges.id", ondelete="CASCADE") 92 | ) 93 | # We'll store the relevant relationships if needed 94 | challenge = db.relationship("ContainerChallengeModel", foreign_keys=[challenge_id]) 95 | 96 | # Original owners 97 | original_team_id = db.Column(db.Integer, db.ForeignKey("teams.id"), nullable=True) 98 | original_user_id = db.Column(db.Integer, db.ForeignKey("users.id"), nullable=True) 99 | 100 | # The second submitter who tried reusing the flag 101 | second_team_id = db.Column(db.Integer, db.ForeignKey("teams.id"), nullable=True) 102 | second_user_id = db.Column(db.Integer, db.ForeignKey("users.id"), nullable=True) 103 | 104 | # Time of the cheating attempt 105 | timestamp = db.Column(db.Integer) 106 | 107 | # Relationship to help retrieve team/user if needed 108 | original_team = db.relationship("Teams", foreign_keys=[original_team_id]) 109 | original_user = db.relationship("Users", foreign_keys=[original_user_id]) 110 | second_team = db.relationship("Teams", foreign_keys=[second_team_id]) 111 | second_user = db.relationship("Users", foreign_keys=[second_user_id]) 112 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | docker 2 | paramiko 3 | apscheduler -------------------------------------------------------------------------------- /settings.json: -------------------------------------------------------------------------------- 1 | { 2 | "modes": { 3 | "USERS_MODE": "users", 4 | "TEAMS_MODE": "teams" 5 | }, 6 | "plugin-info": { 7 | "id": "container", 8 | "name": "container", 9 | "templates": { 10 | "create": "/plugins/containers/assets/create.html", 11 | "update": "/plugins/containers/assets/update.html", 12 | "view": "/plugins/containers/assets/view.html" 13 | }, 14 | "scripts": { 15 | "create": "/plugins/containers/assets/create.js", 16 | "update": "/plugins/containers/assets/update.js", 17 | "view": "/plugins/containers/assets/view.js" 18 | }, 19 | "base_path": "/plugins/containers/assets/" 20 | }, 21 | "blueprint": { 22 | "template_folder": "templates", 23 | "static_folder": "assets" 24 | } 25 | } 26 | -------------------------------------------------------------------------------- /templates/config/container_status.html: -------------------------------------------------------------------------------- 1 |
2 |
Status
3 | 4 | Status of communication with Docker service : 5 | {% if connected %} 6 | reachable 7 | {% else %} 8 | unreachable 9 | {% endif %} 10 |
11 | 12 |
-------------------------------------------------------------------------------- /templates/container_base.html: -------------------------------------------------------------------------------- 1 | {% extends "admin/base.html" %} 2 | 3 | {% block content %} 4 |
5 |
6 |

Containers Challenge Management

7 |
8 |
9 |
10 |
11 |
12 | 16 |
17 |
18 |
19 | {% block panel %} 20 | {% endblock %} 21 |
22 |
23 |
24 |
25 | {% endblock %} -------------------------------------------------------------------------------- /templates/container_cheat.html: -------------------------------------------------------------------------------- 1 | {% extends "container_base.html" %} 2 | 3 | {% block menu %} 4 | 7 | 10 | 13 | {% endblock %} 14 | 15 | {% block panel %} 16 | {% include "components/errors.html" %} 17 | {% if cheat_logs|length == 0 %} 18 |

No cheating events recorded yet.

19 | {% else %} 20 | 21 | 22 | 23 | 39 | 40 | 41 | {% for log in cheat_logs %} 42 | 43 | 46 | 53 | 68 | 81 | 84 | 85 | {% endfor %} 86 | 87 |
24 | Time 25 | 26 | 27 | Challenge 28 | 29 | 30 | Original Owner 31 | 32 | 33 | Second Submitter 34 | 35 | 36 | Flag 37 | 38 |
44 | {{ log.timestamp }} 45 | 47 | 52 | 54 |
55 | {% if log.original_team_id %} 56 | 57 | {{ log.original_team.name }} 58 | 59 | {% elif log.original_user_id %} 60 | 61 | {{ log.original_user.name }} 62 | 63 | {% else %} 64 | Unknown 65 | {% endif %} 66 |
67 |
69 | {% if log.second_team_id %} 70 | 71 | {{ log.second_team.name }} 72 | 73 | {% elif log.second_user_id %} 74 | 75 | {{ log.second_user.name }} 76 | 77 | {% else %} 78 | Unknown 79 | {% endif %} 80 | 82 | {{ log.reused_flag }} 83 |
88 | {% include "config/container_status.html" %} 89 | 90 | {% endif %} 91 | {% endblock %} 92 | 93 | 94 | {% block scripts %} 95 | 128 | {% endblock %} -------------------------------------------------------------------------------- /templates/container_dashboard.html: -------------------------------------------------------------------------------- 1 | {% extends "container_base.html" %} 2 | 3 | {% block menu %} 4 | 7 | 10 | 13 | {% endblock %} 14 | 15 | {% block panel %} 16 |
17 | 21 |
22 | 23 | 24 | 25 | 26 | 31 | 56 | 57 | 58 | {% if containers %} 59 | {% for c in containers %} 60 | 61 | 66 | 73 | 78 | 88 | 93 | 96 | 99 | 104 | 110 | 111 | {% endfor %} 112 | {% endif %} 113 | 114 |
27 |
  29 |
30 |
32 | Challenge 33 | 34 | 35 | Container ID 36 | 37 | 38 | Team/User 39 | 40 | 41 | Flag 42 | 43 | 44 | Created 45 | 46 | 47 | End 48 | 49 | 50 | Connect Info 51 | 52 | 53 | Destroy 54 | 55 |
62 |
  64 |
65 |
67 | 72 | 74 |
75 | {{ c.container_id[:12] }} 76 |
77 |
79 | {% if c.team == None %} 80 | 81 | {{ c.user.name }} 82 | 83 | {% else %} 84 | 85 | {{ c.team.name }} 86 | {% endif %} 87 | 89 |
90 | {{ c.flag }} 91 |
92 |
94 | {{ c.timestamp }} 95 | 97 | {{ c.expires }} 98 | 100 |
101 | {{ c.challenge.connection_type }} - {{ c.port }} 102 |
103 |
105 | 109 |
115 | {% include "config/container_status.html" %} 116 | 117 | {% endblock %} 118 | 119 | 120 | {% block scripts %} 121 | 241 | {% endblock %} -------------------------------------------------------------------------------- /templates/container_settings.html: -------------------------------------------------------------------------------- 1 | {% extends "container_base.html" %} 2 | 3 | 4 | {% block menu %} 5 | 8 | 11 | 14 | {% endblock %} 15 | 16 | {% block panel %} 17 | {% include "components/errors.html" %} 18 |
19 |
21 |
22 |
23 |
Instructions
24 |

25 | The Base URL should be the local socket address of the Docker daemon, i.e. unix://var/run/docker.sock, or it can be a remote SSH address, e.g. ssh://root@example.com. In either case, sudo will not be executed. For a local socket, the user CTFd is running as should have permissions for Docker; for SSH connections, the SSH user in the Base URL should be root or have Docker permissions. 26 |

27 | 28 |
29 | 32 | 35 |
36 | 37 |
38 | 41 | 43 |
44 |
45 | 48 | 50 |
51 |
52 | 55 | 57 |
58 |
59 | 62 | 64 |
65 |
66 | 69 | 71 |
72 | 73 |
74 | 77 | 78 |
79 |
80 |
81 |
82 | {% include "config/container_status.html" %} 83 |
84 | {% endblock %} -------------------------------------------------------------------------------- /user_routes.py: -------------------------------------------------------------------------------- 1 | import json 2 | from flask import Blueprint, request, jsonify, render_template, url_for, redirect, Flask, flash 3 | from CTFd.models import db 4 | from .models import ContainerChallengeModel, ContainerInfoModel, ContainerSettingsModel 5 | from .container_manager import ContainerManager, ContainerException 6 | from CTFd.utils.decorators import ( 7 | authed_only, 8 | admins_only, 9 | during_ctf_time_only, 10 | ratelimit, 11 | require_verified_emails, 12 | ) 13 | from .helpers import * 14 | from CTFd.utils.user import get_current_user 15 | from CTFd.utils import get_config 16 | 17 | containers_bp = Blueprint("container_user", __name__, url_prefix="/containers") 18 | 19 | container_manager = None 20 | 21 | def set_container_manager(manager): 22 | global container_manager 23 | container_manager = manager 24 | 25 | 26 | @containers_bp.route("/api/get_connect_type/", methods=["GET"]) 27 | @authed_only 28 | @during_ctf_time_only 29 | @require_verified_emails 30 | @ratelimit(method="GET", limit=15, interval=60) 31 | def get_connect_type(challenge_id): 32 | try: 33 | return connect_type(challenge_id) 34 | except ContainerException as err: 35 | return {"error": str(err)}, 500 36 | 37 | @containers_bp.route("/api/view_info", methods=["POST"]) 38 | @authed_only 39 | @during_ctf_time_only 40 | @require_verified_emails 41 | @ratelimit(method="POST", limit=15, interval=60) 42 | def route_view_info(): 43 | try: 44 | validate_request(request.json, ["chal_id"]) 45 | xid = get_current_user_or_team() 46 | return view_container_info(container_manager, request.json.get("chal_id"), xid, is_team_mode()) 47 | except ValueError as err: 48 | return {"error": str(err)}, 400 49 | 50 | @containers_bp.route("/api/request", methods=["POST"]) 51 | @authed_only 52 | @during_ctf_time_only 53 | @require_verified_emails 54 | @ratelimit(method="POST", limit=6, interval=60) 55 | def route_request_container(): 56 | try: 57 | validate_request(request.json, ["chal_id"]) 58 | xid = get_current_user_or_team() 59 | return create_container(container_manager, request.json.get("chal_id"), xid, is_team_mode()) 60 | except ValueError as err: 61 | return {"error": str(err)}, 400 62 | 63 | @containers_bp.route("/api/renew", methods=["POST"]) 64 | @authed_only 65 | @during_ctf_time_only 66 | @require_verified_emails 67 | @ratelimit(method="POST", limit=6, interval=60) 68 | def route_renew_container(): 69 | try: 70 | validate_request(request.json, ["chal_id"]) 71 | xid = get_current_user_or_team() 72 | return renew_container(container_manager, request.json.get("chal_id"), xid, is_team_mode()) 73 | except ValueError as err: 74 | return {"error": str(err)}, 400 75 | 76 | @containers_bp.route("/api/stop", methods=["POST"]) 77 | @authed_only 78 | @during_ctf_time_only 79 | @require_verified_emails 80 | @ratelimit(method="POST", limit=10, interval=60) 81 | def route_stop_container(): 82 | try: 83 | validate_request(request.json, ["chal_id"]) 84 | xid = get_current_user_or_team() 85 | running_container = ContainerInfoModel.query.filter_by( 86 | challenge_id=request.json.get("chal_id"), 87 | team_id=xid if is_team_mode() else None, 88 | user_id=None if is_team_mode() else xid 89 | ).first() 90 | 91 | if running_container: 92 | return kill_container(container_manager, running_container.container_id) 93 | return {"error": "No container found"}, 400 94 | except ValueError as err: 95 | return {"error": str(err)}, 400 --------------------------------------------------------------------------------