├── .github ├── CODEOWNERS └── workflows │ └── ci.yml ├── .gitignore ├── .pre-commit-config.yaml ├── CODE_OF_CONDUCT.md ├── LICENSE ├── MANIFEST.in ├── README.md ├── pyproject.toml ├── setup.cfg ├── src └── ffpuppet │ ├── __init__.py │ ├── __main__.py │ ├── bootstrapper.py │ ├── checks.py │ ├── cmds.gdb │ ├── core.py │ ├── display.py │ ├── exceptions.py │ ├── helpers.py │ ├── job_object.py │ ├── lsof.py │ ├── main.py │ ├── minidump_parser.py │ ├── process_tree.py │ ├── profile.py │ ├── puppet_logger.py │ ├── py.typed │ ├── resources │ ├── testff.py │ └── tree.py │ ├── sanitizer_util.py │ ├── test_bootstrapper.py │ ├── test_checks.py │ ├── test_display.py │ ├── test_ffpuppet.py │ ├── test_helpers.py │ ├── test_job_object.py │ ├── test_main.py │ ├── test_minidump_parser.py │ ├── test_process_tree.py │ ├── test_profile.py │ ├── test_puppet_logger.py │ └── test_sanitizer_util.py └── tox.ini /.github/CODEOWNERS: -------------------------------------------------------------------------------- 1 | * @MozillaSecurity/fuzzing-team-reviewers 2 | -------------------------------------------------------------------------------- /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | name: Python CI 2 | 3 | on: 4 | pull_request: 5 | branches: [master] 6 | push: 7 | branches: [master] 8 | release: 9 | types: [released] 10 | 11 | jobs: 12 | test: 13 | name: Python ${{ matrix.python-version }} (${{ matrix.platform }}) 14 | runs-on: ${{ matrix.platform }} 15 | 16 | strategy: 17 | fail-fast: false 18 | matrix: 19 | include: 20 | - python-version: "3.9" 21 | platform: ubuntu-latest 22 | toxenv: py39 23 | - python-version: "3.10" 24 | platform: ubuntu-latest 25 | toxenv: py310 26 | - python-version: "3.11" 27 | platform: ubuntu-latest 28 | toxenv: py311 29 | - python-version: "3.12" 30 | platform: ubuntu-latest 31 | toxenv: py312 32 | - python-version: "3.13" 33 | platform: ubuntu-latest 34 | toxenv: py313 35 | - python-version: "3.12" 36 | platform: macos-latest 37 | toxenv: py312 38 | - python-version: "3.12" 39 | platform: windows-latest 40 | toxenv: py312 41 | 42 | steps: 43 | - uses: actions/checkout@v4 44 | 45 | - name: Set up Python ${{ matrix.python-version }} 46 | uses: actions/setup-python@v5 47 | with: 48 | python-version: ${{ matrix.python-version }} 49 | 50 | - name: Install tox 51 | run: python -m pip install --upgrade tox 52 | 53 | - name: Run lint 54 | run: tox -e lint 55 | 56 | - name: Run tests 57 | run: tox -e ${{ matrix.toxenv }} 58 | 59 | - name: Run Codecov 60 | env: 61 | CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} 62 | run: tox -e codecov 63 | 64 | publish: 65 | name: Build & Publish to PyPI 66 | if: github.event_name == 'release' 67 | needs: test 68 | runs-on: ubuntu-latest 69 | 70 | steps: 71 | - uses: actions/checkout@v4 72 | 73 | - name: Set up Python 74 | uses: actions/setup-python@v5 75 | with: 76 | python-version: "3.12" 77 | 78 | - name: Install tox 79 | run: python -m pip install --upgrade tox 80 | 81 | - name: Publish to PyPI 82 | env: 83 | TWINE_USERNAME: ${{ secrets.PYPI_USERNAME }} 84 | TWINE_PASSWORD: ${{ secrets.PYPI_PASSWORD }} 85 | run: tox -e pypi 86 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | env/ 12 | build/ 13 | develop-eggs/ 14 | dist/ 15 | downloads/ 16 | eggs/ 17 | .eggs/ 18 | lib/ 19 | lib64/ 20 | parts/ 21 | sdist/ 22 | var/ 23 | *.egg-info/ 24 | .installed.cfg 25 | *.egg 26 | 27 | # PyInstaller 28 | # Usually these files are written by a python script from a template 29 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 30 | *.manifest 31 | *.spec 32 | 33 | # Installer logs 34 | pip-log.txt 35 | pip-delete-this-directory.txt 36 | 37 | # Unit test / coverage reports 38 | htmlcov/ 39 | .tox/ 40 | .coverage 41 | .coverage.* 42 | .cache 43 | nosetests.xml 44 | coverage.xml 45 | *,cover 46 | .hypothesis/ 47 | 48 | # Translations 49 | *.mo 50 | *.pot 51 | 52 | # Django stuff: 53 | *.log 54 | local_settings.py 55 | 56 | # Flask stuff: 57 | instance/ 58 | .webassets-cache 59 | 60 | # Scrapy stuff: 61 | .scrapy 62 | 63 | # Sphinx documentation 64 | docs/_build/ 65 | 66 | # PyBuilder 67 | target/ 68 | 69 | # IPython Notebook 70 | .ipynb_checkpoints 71 | 72 | # pyenv 73 | .python-version 74 | 75 | # celery beat schedule file 76 | celerybeat-schedule 77 | 78 | # dotenv 79 | .env 80 | 81 | # virtualenv 82 | venv/ 83 | ENV/ 84 | 85 | # Spyder project settings 86 | .spyderproject 87 | 88 | # Rope project settings 89 | .ropeproject 90 | 91 | # VSCode settings 92 | .vscode/ 93 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | repos: 2 | - repo: https://github.com/astral-sh/ruff-pre-commit 3 | rev: v0.9.3 4 | hooks: 5 | - id: ruff 6 | args: [--fix] 7 | - id: ruff-format 8 | - repo: https://github.com/pre-commit/pre-commit-hooks 9 | rev: v5.0.0 10 | hooks: 11 | - id: check-added-large-files 12 | - id: check-ast 13 | - id: check-case-conflict 14 | - id: check-docstring-first 15 | - id: check-executables-have-shebangs 16 | - id: check-merge-conflict 17 | - id: check-symlinks 18 | - id: check-json 19 | - id: check-toml 20 | - id: check-yaml 21 | - id: debug-statements 22 | - id: end-of-file-fixer 23 | - id: mixed-line-ending 24 | - id: name-tests-test 25 | args: ["--django"] 26 | - id: requirements-txt-fixer 27 | - id: trailing-whitespace 28 | - repo: https://github.com/codespell-project/codespell 29 | rev: v2.4.0 30 | hooks: 31 | - id: codespell 32 | exclude_types: [json] 33 | - repo: meta 34 | hooks: 35 | - id: check-useless-excludes 36 | - repo: https://github.com/jorisroovers/gitlint 37 | rev: v0.19.1 38 | hooks: 39 | - id: gitlint 40 | args: [--contrib=contrib-title-conventional-commits, --ignore=body-is-missing, --msg-filename] 41 | stages: [ commit-msg ] 42 | - repo: local 43 | hooks: 44 | - id: mypy 45 | name: mypy 46 | entry: tox -e mypy -- 47 | language: system 48 | require_serial: true 49 | exclude: /test_.*\.py$ 50 | types: [python] 51 | - id: pylint 52 | name: pylint 53 | entry: tox -e pylint -- 54 | language: system 55 | require_serial: true 56 | types: [python] 57 | 58 | default_language_version: 59 | python: python3 60 | -------------------------------------------------------------------------------- /CODE_OF_CONDUCT.md: -------------------------------------------------------------------------------- 1 | # Community Participation Guidelines 2 | 3 | This repository is governed by Mozilla's code of conduct and etiquette guidelines. 4 | For more details, please read the 5 | [Mozilla Community Participation Guidelines](https://www.mozilla.org/about/governance/policies/participation/). 6 | 7 | ## How to Report 8 | For more information on how to report violations of the Community Participation Guidelines, please read our '[How to Report](https://www.mozilla.org/about/governance/policies/participation/reporting/)' page. 9 | 10 | 16 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include src/ffpuppet/cmds.gdb 2 | include src/ffpuppet/py.typed 3 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | FFPuppet 2 | ======== 3 | 4 | [![CI](https://github.com/MozillaSecurity/ffpuppet/actions/workflows/ci.yml/badge.svg)](https://github.com/MozillaSecurity/ffpuppet/actions/workflows/ci.yml) 5 | [![codecov](https://codecov.io/gh/MozillaSecurity/ffpuppet/branch/master/graph/badge.svg)](https://codecov.io/gh/MozillaSecurity/ffpuppet) 6 | [![Matrix](https://img.shields.io/badge/chat-%23fuzzing-green?logo=matrix)](https://matrix.to/#/#fuzzing:mozilla.org) 7 | [![PyPI](https://img.shields.io/pypi/v/ffpuppet)](https://pypi.org/project/ffpuppet) 8 | 9 | FFPuppet is a Python module that automates browser process related tasks to aid in fuzzing. Happy bug hunting! 10 | 11 | Are you [fuzzing](https://firefox-source-docs.mozilla.org/tools/fuzzing/index.html) the browser? [Grizzly](https://github.com/MozillaSecurity/grizzly) can help. 12 | 13 | Installation 14 | ------------ 15 | 16 | ##### To install the latest version from PyPI 17 | 18 | pip install ffpuppet 19 | 20 | ##### Xvfb on Linux 21 | 22 | On Linux `xvfb` can be used in order to run headless (this is not the same as Firefox's `-headless` mode). 23 | 24 | To install `xvfb` on Ubuntu run: 25 | 26 | apt-get install xvfb 27 | 28 | ##### Install minidump-stackwalk 29 | 30 | `minidump-stackwalk` is used to collect crash reports from minidump files. More 31 | information can be found [here](https://lib.rs/crates/minidump-stackwalk). 32 | 33 | Browser Builds 34 | -------------- 35 | 36 | If you are looking for builds to use with FFPuppet there are a few options. 37 | 38 | ##### Download a build 39 | 40 | [fuzzfetch](https://github.com/MozillaSecurity/fuzzfetch) is the recommended method for obtaining builds and is also very helpful in automation. 41 | 42 | Taskcluster has a collection of many different build types for multiple platforms and branches. 43 | An index of the latest mozilla-central builds can be found [here](https://firefox-ci-tc.services.mozilla.com/tasks/index/gecko.v2.mozilla-central.latest.firefox/). 44 | 45 | ##### Create your own build 46 | 47 | If you would like to compile your own, build instructions can be found [here](https://firefox-source-docs.mozilla.org/setup/index.html). When using `minidump-stackwalk` 48 | breakpad [symbols](https://firefox-source-docs.mozilla.org/setup/building_with_debug_symbols.html#building-with-debug-symbols) are required for symbolized stacks. 49 | 50 | Usage 51 | ----- 52 | 53 | Once installed FFPuppet can be run using the following command: 54 | 55 | ffpuppet 56 | 57 | ##### Replaying a test case 58 | 59 | ffpuppet -p -d -u 60 | 61 | This will open the provided test case file in Firefox using the provided prefs.js file. Any log data (stderr, stdout, ASan logs... etc) will be dumped to the console if a failure is detected. [Grizzly Replay](https://github.com/MozillaSecurity/grizzly/wiki/Grizzly-Replay) is recommended for replaying test cases. 62 | 63 | ##### Prefs.js files 64 | 65 | prefs.js files that can be used for fuzzing or other automated testing can be generated with [PrefPicker](https://github.com/MozillaSecurity/prefpicker). 66 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | requires = ["setuptools >= 43", "wheel", "setuptools_scm[toml] >= 3.4"] 3 | build-backend = "setuptools.build_meta" 4 | 5 | [tool.coverage.run] 6 | omit = [ 7 | "*/setup.py", 8 | "*/__main__.py", 9 | "*/test_*", 10 | "*/build/*", 11 | "*/dist/*", 12 | "*/resources/*", 13 | "*/.tox/*", 14 | "*/.egg/*", 15 | ] 16 | 17 | [tool.coverage.report] 18 | exclude_lines = [ 19 | "except ImportError:", 20 | "if __name__ == .__main__.:", 21 | "if TYPE_CHECKING:", 22 | "pragma: no cover", 23 | ] 24 | 25 | [tool.mypy] 26 | ignore_missing_imports = true 27 | strict = true 28 | show_error_codes = true 29 | 30 | [tool.pylint.format] 31 | max-line-length = 88 32 | 33 | [tool.pylint.messages_control] 34 | disable = [ 35 | "duplicate-code", 36 | "fixme", 37 | "too-few-public-methods", 38 | "too-many-arguments", 39 | "too-many-branches", 40 | "too-many-instance-attributes", 41 | "too-many-lines", 42 | "too-many-locals", 43 | "too-many-nested-blocks", 44 | "too-many-positional-arguments", 45 | "too-many-statements", 46 | ] 47 | 48 | [tool.pylint.typecheck] 49 | ignored-modules = ["pytest"] 50 | 51 | [tool.pytest.ini_options] 52 | log_level = "DEBUG" 53 | 54 | [tool.ruff] 55 | fix = true 56 | target-version = "py39" 57 | 58 | [tool.ruff.lint] 59 | select = [ 60 | # flake8-comprehensions 61 | "C4", 62 | # pycodestyle 63 | "E", 64 | # Pyflakes 65 | "F", 66 | # Flynt 67 | "FLY", 68 | # isort 69 | "I", 70 | # Perflint 71 | "PERF", 72 | # Ruff-specific rules 73 | "RUF", 74 | # flake8-simplify 75 | "SIM", 76 | # flake8-type-checking 77 | "TCH", 78 | # pyupgrade 79 | "UP", 80 | # pycodestyle 81 | "W", 82 | ] 83 | 84 | [tool.setuptools_scm] 85 | -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [metadata] 2 | author = Tyson Smith 3 | author_email = twsmith@mozilla.com 4 | classifiers = 5 | Intended Audience :: Developers 6 | License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0) 7 | Programming Language :: Python :: 3 8 | Topic :: Software Development :: Testing 9 | description = A Python module that aids in the automation of Firefox at the process level 10 | keywords = automation firefox fuzz fuzzing security test testing 11 | license = MPL 2.0 12 | long_description = file: README.md 13 | long_description_content_type = text/markdown 14 | maintainer = Mozilla Fuzzing Team 15 | maintainer_email = fuzzing@mozilla.com 16 | name = ffpuppet 17 | url = https://github.com/MozillaSecurity/ffpuppet 18 | 19 | [options] 20 | include_package_data = True 21 | install_requires = 22 | psutil >= 5.9.0 23 | xvfbwrapper >= 0.2.9; sys_platform == "linux" 24 | package_dir = 25 | = src 26 | packages = 27 | ffpuppet 28 | python_requires = >=3.9 29 | zip_safe = False 30 | 31 | [options.entry_points] 32 | console_scripts = 33 | ffpuppet = ffpuppet.main:main 34 | 35 | [options.extras_require] 36 | dev = 37 | pre-commit 38 | tox 39 | -------------------------------------------------------------------------------- /src/ffpuppet/__init__.py: -------------------------------------------------------------------------------- 1 | # This Source Code Form is subject to the terms of the Mozilla Public 2 | # License, v. 2.0. If a copy of the MPL was not distributed with this 3 | # file, You can obtain one at http://mozilla.org/MPL/2.0/. 4 | """FFPuppet module""" 5 | 6 | from .core import Debugger, FFPuppet, Reason 7 | from .display import DisplayMode 8 | from .exceptions import ( 9 | BrowserExecutionError, 10 | BrowserTerminatedError, 11 | BrowserTimeoutError, 12 | LaunchError, 13 | ) 14 | from .sanitizer_util import SanitizerOptions 15 | 16 | __all__ = ( 17 | "BrowserExecutionError", 18 | "BrowserTerminatedError", 19 | "BrowserTimeoutError", 20 | "Debugger", 21 | "DisplayMode", 22 | "FFPuppet", 23 | "LaunchError", 24 | "Reason", 25 | "SanitizerOptions", 26 | ) 27 | __author__ = "Tyson Smith" 28 | -------------------------------------------------------------------------------- /src/ffpuppet/__main__.py: -------------------------------------------------------------------------------- 1 | # This Source Code Form is subject to the terms of the Mozilla Public 2 | # License, v. 2.0. If a copy of the MPL was not distributed with this 3 | # file, You can obtain one at http://mozilla.org/MPL/2.0/. 4 | """FFPuppet module main""" 5 | 6 | from .main import main 7 | 8 | main() 9 | -------------------------------------------------------------------------------- /src/ffpuppet/bootstrapper.py: -------------------------------------------------------------------------------- 1 | # This Source Code Form is subject to the terms of the Mozilla Public 2 | # License, v. 2.0. If a copy of the MPL was not distributed with this 3 | # file, You can obtain one at http://mozilla.org/MPL/2.0/. 4 | """ffpuppet bootstrapper module""" 5 | 6 | from __future__ import annotations 7 | 8 | from logging import getLogger 9 | from select import select 10 | from socket import SO_REUSEADDR, SOL_SOCKET, socket 11 | from time import perf_counter, sleep 12 | from typing import TYPE_CHECKING, Callable 13 | 14 | # as of python 3.10 socket.timeout was made an alias of TimeoutError 15 | # pylint: disable=ungrouped-imports,wrong-import-order 16 | from socket import timeout as socket_timeout # isort: skip 17 | 18 | from .exceptions import BrowserTerminatedError, BrowserTimeoutError, LaunchError 19 | 20 | if TYPE_CHECKING: 21 | from collections.abc import Iterable 22 | 23 | LOG = getLogger(__name__) 24 | 25 | __author__ = "Tyson Smith" 26 | 27 | 28 | class Bootstrapper: # pylint: disable=missing-docstring 29 | # see: searchfox.org/mozilla-central/source/netwerk/base/nsIOService.cpp 30 | # include ports above 1023 31 | BLOCKED_PORTS = frozenset( 32 | ( 33 | 1719, 34 | 1720, 35 | 1723, 36 | 2049, 37 | 3659, 38 | 4045, 39 | 5060, 40 | 5061, 41 | 6000, 42 | 6566, 43 | 6665, 44 | 6666, 45 | 6667, 46 | 6668, 47 | 6669, 48 | 6697, 49 | 10080, 50 | ) 51 | ) 52 | # receive buffer size 53 | BUF_SIZE = 4096 54 | # duration of initial blocking socket operations 55 | POLL_WAIT = 1.0 56 | 57 | __slots__ = ("_socket",) 58 | 59 | def __init__(self, sock: socket) -> None: 60 | self._socket = sock 61 | 62 | def __enter__(self) -> Bootstrapper: 63 | return self 64 | 65 | def __exit__(self, *exc: object) -> None: 66 | self.close() 67 | 68 | @classmethod 69 | def check_port(cls, value: int) -> bool: 70 | """Verify port value is in valid range. 71 | 72 | Args: 73 | None 74 | 75 | Returns: 76 | bool 77 | """ 78 | return value == 0 or 1024 <= value <= 65535 79 | 80 | def close(self) -> None: 81 | """Close listening socket. 82 | 83 | Args: 84 | None 85 | 86 | Returns: 87 | None 88 | """ 89 | self._socket.close() 90 | 91 | @classmethod 92 | def create(cls, attempts: int = 50, port: int = 0) -> Bootstrapper: 93 | """Create a Bootstrapper. 94 | 95 | Args: 96 | attempts: Number of times to attempt to bind. 97 | port: Port to use. Use 0 for system select. 98 | 99 | Returns: 100 | Bootstrapper. 101 | """ 102 | sock = cls.create_socket(attempts=attempts, port=port) 103 | if sock is None: 104 | raise LaunchError("Could not find available port") 105 | return cls(sock) 106 | 107 | @classmethod 108 | def create_socket( 109 | cls, 110 | attempts: int = 50, 111 | blocked: Iterable[int] | None = BLOCKED_PORTS, 112 | port: int = 0, 113 | ) -> socket | None: 114 | """Create a listening socket. 115 | 116 | Args: 117 | attempts: Number of times to attempt to bind. 118 | blocked: Ports that cannot be used. 119 | port: Port to use. Use 0 for system select. 120 | 121 | Returns: 122 | A listening socket. 123 | """ 124 | assert attempts > 0 125 | if not cls.check_port(port): 126 | LOG.debug("requested invalid port: %d", port) 127 | return None 128 | if blocked and port in blocked: 129 | LOG.debug("requested blocked port: %d", port) 130 | return None 131 | for _ in range(attempts): 132 | sock = socket() 133 | sock.setsockopt(SOL_SOCKET, SO_REUSEADDR, 1) 134 | try: 135 | sock.bind(("127.0.0.1", port)) 136 | sock.listen() 137 | except (OSError, PermissionError) as exc: 138 | LOG.debug("%s: %s", type(exc).__name__, exc) 139 | sock.close() 140 | sleep(0.1) 141 | continue 142 | # avoid blocked ports 143 | if blocked and sock.getsockname()[1] in blocked: 144 | LOG.debug("bound to blocked port, retrying...") 145 | sock.close() 146 | continue 147 | break 148 | else: 149 | return None 150 | return sock 151 | 152 | @property 153 | def location(self) -> str: 154 | """Location in the format of 'http://127.0.0.1:#'. 155 | 156 | Args: 157 | None 158 | 159 | Returns: 160 | Location. 161 | """ 162 | return f"http://127.0.0.1:{self.port}" 163 | 164 | @property 165 | def port(self) -> int: 166 | """Listening socket port number. 167 | 168 | Args: 169 | None 170 | 171 | Returns: 172 | Port number. 173 | """ 174 | return int(self._socket.getsockname()[1]) 175 | 176 | def wait( 177 | self, 178 | cb_continue: Callable[[], bool], 179 | timeout: float = 60, 180 | url: str | None = None, 181 | ) -> None: 182 | """Wait for browser connection, read request and send response. 183 | 184 | Args: 185 | cb_continue: Callback that communicates browser process health. 186 | timeout: Amount of time wait before raising BrowserTimeoutError. 187 | url: Location to redirect to. 188 | 189 | Returns: 190 | None 191 | """ 192 | assert timeout >= 0 193 | start_time = perf_counter() 194 | time_limit = start_time + timeout 195 | conn: socket | None = None 196 | try: 197 | LOG.debug("waiting for browser connection...") 198 | while conn is None: 199 | readable, _, _ = select([self._socket], (), (), self.POLL_WAIT) 200 | if self._socket not in readable: 201 | # no connections ready for reading 202 | if not cb_continue(): 203 | raise BrowserTerminatedError( 204 | "Failure waiting for browser connection" 205 | ) 206 | if perf_counter() >= time_limit: 207 | raise BrowserTimeoutError( 208 | "Timeout waiting for browser connection" 209 | ) 210 | continue 211 | conn, _ = self._socket.accept() 212 | conn.settimeout(1) 213 | count_recv = 0 214 | total_recv = 0 215 | LOG.debug("waiting for browser request...") 216 | while True: 217 | try: 218 | count_recv = len(conn.recv(self.BUF_SIZE)) 219 | total_recv += count_recv 220 | except socket_timeout: 221 | # use -1 to indicate timeout 222 | count_recv = -1 223 | if count_recv == self.BUF_SIZE: 224 | # check if there is more to read 225 | continue 226 | if total_recv: 227 | LOG.debug("request size: %d bytes(s)", total_recv) 228 | break 229 | if not cb_continue(): 230 | raise BrowserTerminatedError("Failure waiting for request") 231 | if perf_counter() >= time_limit: 232 | raise BrowserTimeoutError("Timeout waiting for request") 233 | if count_recv == 0: 234 | LOG.debug("connection failed, waiting for next connection...") 235 | conn.close() 236 | conn = None 237 | break 238 | 239 | # build response 240 | if url is None: 241 | resp = "HTTP/1.1 204 No Content\r\nConnection: close\r\n\r\n" 242 | else: 243 | resp = ( 244 | "HTTP/1.1 301 Moved Permanently\r\n" 245 | f"Location: {url}\r\n" 246 | "Connection: close\r\n\r\n" 247 | ) 248 | # set timeout to match remaining time 249 | conn.settimeout(max(time_limit - perf_counter(), 1)) 250 | LOG.debug("sending response (redirect: %s)", url) 251 | try: 252 | conn.sendall(resp.encode("ascii")) 253 | except socket_timeout: 254 | resp_timeout = True 255 | else: 256 | resp_timeout = False 257 | if not cb_continue(): 258 | raise BrowserTerminatedError("Failure during browser startup") 259 | if resp_timeout: 260 | raise BrowserTimeoutError("Timeout sending response") 261 | LOG.debug("bootstrap complete (%0.1fs)", perf_counter() - start_time) 262 | except OSError as exc: # pragma: no cover 263 | raise LaunchError(f"Error attempting to launch browser: {exc}") from exc 264 | finally: 265 | if conn is not None: 266 | conn.close() 267 | -------------------------------------------------------------------------------- /src/ffpuppet/checks.py: -------------------------------------------------------------------------------- 1 | # This Source Code Form is subject to the terms of the Mozilla Public 2 | # License, v. 2.0. If a copy of the MPL was not distributed with this 3 | # file, You can obtain one at http://mozilla.org/MPL/2.0/. 4 | """ffpuppet checks module""" 5 | 6 | from __future__ import annotations 7 | 8 | from abc import ABC, abstractmethod 9 | from os import SEEK_SET, stat 10 | from platform import system 11 | from typing import IO, TYPE_CHECKING, Callable 12 | 13 | from psutil import AccessDenied, NoSuchProcess, Process 14 | 15 | if TYPE_CHECKING: 16 | from collections.abc import Iterable 17 | from re import Pattern 18 | 19 | __author__ = "Tyson Smith" 20 | __credits__ = ["Tyson Smith"] 21 | 22 | 23 | class _LogContentsCheckState: 24 | __slots__ = ("buffer", "fname", "offset") 25 | 26 | def __init__(self, fname: str) -> None: 27 | self.fname: str = fname 28 | self.buffer: bytes = b"" 29 | self.offset: int = 0 30 | 31 | 32 | class Check(ABC): 33 | """ 34 | Check base class 35 | """ 36 | 37 | name: str 38 | 39 | __slots__ = ("message", "name") 40 | 41 | def __init__(self) -> None: 42 | self.message: str | None = None 43 | 44 | @abstractmethod 45 | def check(self) -> bool: 46 | """ 47 | Implement a check that returns True when the abort conditions are met. 48 | """ 49 | 50 | def dump_log(self, dst_fp: IO[bytes]) -> None: 51 | """Write log contents to file. 52 | 53 | Args: 54 | dst_fp: Open file object to write logs to. 55 | 56 | Returns: 57 | None 58 | """ 59 | if self.message is not None: 60 | dst_fp.write(self.message.encode(errors="ignore")) 61 | 62 | 63 | class CheckLogContents(Check): 64 | """ 65 | CheckLogContents will search through the browser logs for a token. 66 | """ 67 | 68 | buf_limit = 1024 # 1KB 69 | chunk_size = 0x20000 # 128KB 70 | name = "log_contents" 71 | 72 | __slots__ = ("logs", "tokens") 73 | 74 | def __init__( 75 | self, log_files: Iterable[str], search_tokens: Iterable[Pattern[str]] 76 | ) -> None: 77 | assert log_files, "log_files is empty" 78 | assert search_tokens, "search_tokens is empty" 79 | super().__init__() 80 | self.logs: list[_LogContentsCheckState] = [] 81 | for log_file in log_files: 82 | self.logs.append(_LogContentsCheckState(log_file)) 83 | self.tokens = search_tokens 84 | 85 | def check(self) -> bool: 86 | """Collect log contents for tokens. 87 | 88 | Args: 89 | None 90 | 91 | Returns: 92 | True if a token is located otherwise False. 93 | """ 94 | for log in self.logs: 95 | try: 96 | # check if file has new data 97 | if stat(log.fname).st_size <= log.offset: 98 | continue 99 | with open(log.fname, "rb") as scan_fp: 100 | # only collect new data 101 | scan_fp.seek(log.offset, SEEK_SET) 102 | # read and prepend chunk of previously read data 103 | data = b"".join((log.buffer, scan_fp.read(self.chunk_size))) 104 | log.offset = scan_fp.tell() 105 | except OSError: 106 | # log does not exist 107 | continue 108 | for token in self.tokens: 109 | match = token.search(data.decode(errors="replace")) 110 | if match: 111 | self.message = f"TOKEN_LOCATED: {match.group()}\n" 112 | return True 113 | log.buffer = data[-1 * self.buf_limit :] 114 | return False 115 | 116 | 117 | class CheckLogSize(Check): 118 | """ 119 | CheckLogSize will check the total file size of the browser logs. 120 | """ 121 | 122 | name = "log_size" 123 | 124 | __slots__ = ("limit", "stderr_file", "stdout_file") 125 | 126 | def __init__(self, limit: int, stderr_file: str, stdout_file: str) -> None: 127 | super().__init__() 128 | self.limit = limit 129 | self.stderr_file = stderr_file 130 | self.stdout_file = stdout_file 131 | 132 | def check(self) -> bool: 133 | """Collect log disk usage info and compare with limit. 134 | 135 | Args: 136 | None 137 | 138 | Returns: 139 | True if the total usage is greater than or equal to 140 | self.limit otherwise False. 141 | """ 142 | err_size = stat(self.stderr_file).st_size 143 | out_size = stat(self.stdout_file).st_size 144 | total_size = err_size + out_size 145 | if total_size > self.limit: 146 | self.message = ( 147 | f"LOG_SIZE_LIMIT_EXCEEDED: {total_size:,}\n" 148 | f"Limit: {self.limit:,} ({self.limit / 1_048_576}MB)\n" 149 | f"stderr log: {err_size:,} ({err_size / 1_048_576}MB)\n" 150 | f"stdout log: {out_size:,} ({out_size / 1_048_576}MB)\n" 151 | ) 152 | return self.message is not None 153 | 154 | 155 | class CheckMemoryUsage(Check): 156 | """ 157 | CheckMemoryUsage is used to check the amount of memory used by the browser 158 | process and its descendants against a defined limit. 159 | """ 160 | 161 | name = "memory_usage" 162 | 163 | __slots__ = ("_get_procs", "_is_linux", "limit", "pid") 164 | 165 | def __init__( 166 | self, pid: int, limit: int, get_procs_cb: Callable[[], list[Process]] 167 | ) -> None: 168 | super().__init__() 169 | self._get_procs = get_procs_cb 170 | self._is_linux = system() == "Linux" 171 | self.limit = limit 172 | self.pid = pid 173 | 174 | def check(self) -> bool: 175 | """Use psutil to collect memory usage info and compare with limit. 176 | 177 | Args: 178 | None 179 | 180 | Returns: 181 | True if the total usage is greater than or equal to 182 | self.limit otherwise False. 183 | """ 184 | largest_shared = 0 185 | proc_info: list[tuple[int, int]] = [] 186 | total_usage = 0 187 | for proc in self._get_procs(): 188 | try: 189 | mem_info = proc.memory_info() 190 | except (AccessDenied, NoSuchProcess): # pragma: no cover 191 | continue 192 | cur_usage: int = mem_info.rss 193 | if self._is_linux: 194 | # on Linux use "rss - shared" as the current usage 195 | cur_usage -= mem_info.shared 196 | # track largest shared amount to be appended to the grand total 197 | # this is not perfect but it is close enough for this 198 | largest_shared = max(largest_shared, mem_info.shared) 199 | total_usage += cur_usage 200 | proc_info.append((proc.pid, cur_usage)) 201 | total_usage += largest_shared 202 | if total_usage >= self.limit: 203 | msg = [ 204 | f"MEMORY_LIMIT_EXCEEDED: {total_usage:,}\n", 205 | f"Limit: {self.limit:,} ({self.limit / 1_048_576}MB)\n", 206 | f"Parent PID: {self.pid}\n", 207 | ] 208 | for pid, usage in proc_info: 209 | msg.append(f"-> PID {pid: 6}: {usage: 14,}\n") 210 | self.message = "".join(msg) 211 | return self.message is not None 212 | -------------------------------------------------------------------------------- /src/ffpuppet/cmds.gdb: -------------------------------------------------------------------------------- 1 | define quit_with_code 2 | if $_siginfo 3 | quit 128+$_siginfo.si_signo 4 | else 5 | quit $_exitcode 6 | end 7 | end 8 | 9 | handle SIG38 nostop noprint pass 10 | set breakpoint pending on 11 | set confirm off 12 | set prompt 13 | maint set internal-error quit yes 14 | maint set internal-error corefile no 15 | set backtrace limit 25 16 | set print elements 10 17 | set python print-stack full 18 | set trace-commands on 19 | -------------------------------------------------------------------------------- /src/ffpuppet/display.py: -------------------------------------------------------------------------------- 1 | # This Source Code Form is subject to the terms of the Mozilla Public 2 | # License, v. 2.0. If a copy of the MPL was not distributed with this 3 | # file, You can obtain one at http://mozilla.org/MPL/2.0/. 4 | """ffpuppet display module""" 5 | 6 | from __future__ import annotations 7 | 8 | from contextlib import suppress 9 | from enum import Enum, auto, unique 10 | from logging import getLogger 11 | from platform import system 12 | from types import MappingProxyType 13 | from typing import TYPE_CHECKING 14 | 15 | with suppress(ImportError): 16 | from xvfbwrapper import Xvfb 17 | 18 | if TYPE_CHECKING: 19 | from collections.abc import Mapping, Sequence 20 | 21 | 22 | LOG = getLogger(__name__) 23 | 24 | 25 | @unique 26 | class DisplayMode(Enum): 27 | """Supported display modes.""" 28 | 29 | DEFAULT = auto() 30 | HEADLESS = auto() 31 | if system() == "Linux": 32 | XVFB = auto() 33 | 34 | 35 | class Display: 36 | """Default display mode. 37 | 38 | Attributes: 39 | args: Extra command line arguments to pass to Firefox. 40 | env: Extra environment variables to set. 41 | mode: DisplayMode enum name. 42 | """ 43 | 44 | __slots__ = ("args", "env") 45 | 46 | def __init__(self) -> None: 47 | self.args: Sequence[str] = () 48 | self.env: Mapping[str, str] = MappingProxyType({}) 49 | 50 | def close(self) -> None: 51 | """Perform any required operations to shutdown and cleanup. 52 | 53 | Args: 54 | None 55 | 56 | Returns: 57 | None 58 | """ 59 | 60 | 61 | class HeadlessDisplay(Display): 62 | """Headless display mode.""" 63 | 64 | def __init__(self) -> None: 65 | super().__init__() 66 | self.args = ("-headless",) 67 | 68 | 69 | class XvfbDisplay(Display): 70 | """Xvfb display mode.""" 71 | 72 | __slots__ = ("_xvfb",) 73 | 74 | def __init__(self) -> None: 75 | super().__init__() 76 | self.env = MappingProxyType({"MOZ_ENABLE_WAYLAND": "0"}) 77 | try: 78 | self._xvfb: Xvfb | None = Xvfb(width=1280, height=1024) 79 | except NameError: 80 | LOG.error("Missing xvfbwrapper") 81 | raise 82 | self._xvfb.start() 83 | 84 | def close(self) -> None: 85 | if self._xvfb is not None: 86 | self._xvfb.stop() 87 | self._xvfb = None 88 | 89 | 90 | _displays: dict[DisplayMode, type[Display]] = { 91 | DisplayMode.DEFAULT: Display, 92 | DisplayMode.HEADLESS: HeadlessDisplay, 93 | } 94 | if system() == "Linux": 95 | _displays[DisplayMode.XVFB] = XvfbDisplay 96 | 97 | DISPLAYS = MappingProxyType(_displays) 98 | -------------------------------------------------------------------------------- /src/ffpuppet/exceptions.py: -------------------------------------------------------------------------------- 1 | # This Source Code Form is subject to the terms of the Mozilla Public 2 | # License, v. 2.0. If a copy of the MPL was not distributed with this 3 | # file, You can obtain one at http://mozilla.org/MPL/2.0/. 4 | """ffpuppet exceptions""" 5 | 6 | 7 | class LaunchError(Exception): 8 | """ 9 | Raised when the browser process does not appear to be in a functional state 10 | during launch. 11 | """ 12 | 13 | 14 | class BrowserExecutionError(LaunchError): 15 | """ 16 | Raised when the browser binary cannot be executed. 17 | """ 18 | 19 | 20 | class BrowserTerminatedError(LaunchError): 21 | """ 22 | Raised when the browser process goes away during launch. 23 | """ 24 | 25 | 26 | class BrowserTimeoutError(LaunchError): 27 | """ 28 | Raised when the browser process appears to hang during launch. 29 | """ 30 | 31 | 32 | class InvalidPrefs(LaunchError): 33 | """ 34 | Raised when an invalid prefs.js file is used. 35 | """ 36 | 37 | 38 | class TerminateError(Exception): 39 | """ 40 | Raised when attempts to terminate the browser fail. 41 | """ 42 | -------------------------------------------------------------------------------- /src/ffpuppet/helpers.py: -------------------------------------------------------------------------------- 1 | # This Source Code Form is subject to the terms of the Mozilla Public 2 | # License, v. 2.0. If a copy of the MPL was not distributed with this 3 | # file, You can obtain one at http://mozilla.org/MPL/2.0/. 4 | """ffpuppet helper utilities""" 5 | 6 | from __future__ import annotations 7 | 8 | import sys 9 | from contextlib import suppress 10 | from logging import getLogger 11 | from os import environ 12 | from pathlib import Path 13 | from subprocess import STDOUT, CalledProcessError, check_output 14 | from time import perf_counter, sleep 15 | from typing import TYPE_CHECKING 16 | 17 | from psutil import Process, process_iter 18 | 19 | from .sanitizer_util import SanitizerOptions 20 | 21 | if TYPE_CHECKING: 22 | from collections.abc import Generator, Iterable, Mapping 23 | 24 | if sys.platform == "win32": 25 | from .lsof import pids_by_file 26 | 27 | IS_WINDOWS = True 28 | else: 29 | IS_WINDOWS = False 30 | 31 | CERTUTIL = "certutil.exe" if IS_WINDOWS else "certutil" 32 | LLVM_SYMBOLIZER = "llvm-symbolizer.exe" if IS_WINDOWS else "llvm-symbolizer" 33 | LOG = getLogger(__name__) 34 | 35 | __author__ = "Tyson Smith" 36 | 37 | 38 | def _configure_sanitizers( 39 | orig_env: Mapping[str, str], target_path: Path, log_path: Path 40 | ) -> dict[str, str]: 41 | """Copy environment and update default values in *SAN_OPTIONS entries. 42 | These values are only updated if they are not provided, with the exception of 43 | 'log_path'. 'log_path' is used by FFPuppet to detect results. 44 | 45 | Args: 46 | env: Current environment. 47 | target_path: Directory containing browser binary. 48 | log_path: Location to write sanitizer logs to. 49 | 50 | Returns: 51 | Environment with *SAN_OPTIONS defaults set. 52 | """ 53 | env = dict(orig_env) 54 | # https://github.com/google/sanitizers/wiki/SanitizerCommonFlags 55 | common_flags = [ 56 | ("abort_on_error", "false"), 57 | ("allocator_may_return_null", "true"), 58 | ("disable_coredump", "true"), 59 | ("exitcode", "77"), # use unique exitcode to help identify missed reports 60 | ("handle_abort", "true"), # if true, abort_on_error=false to prevent hangs 61 | ("handle_sigbus", "true"), # set to be safe 62 | ("handle_sigfpe", "true"), # set to be safe 63 | ("handle_sigill", "true"), # set to be safe 64 | ("symbolize", "true"), 65 | ] 66 | # set llvm-symbolizer path 67 | # *SAN_OPTIONS=external_symbolizer_path takes priority if it is defined in env 68 | llvm_sym = Path(env.get("ASAN_SYMBOLIZER_PATH") or target_path / LLVM_SYMBOLIZER) 69 | if llvm_sym.is_file(): 70 | # add *SAN_OPTIONS=external_symbolizer_path 71 | common_flags.append(("external_symbolizer_path", f"'{llvm_sym}'")) 72 | else: 73 | # assume system llvm-symbolizer will be used 74 | LOG.debug("external llvm-symbolizer not found (%s)", llvm_sym) 75 | 76 | # setup Address Sanitizer options ONLY if not set manually in environment 77 | # https://github.com/google/sanitizers/wiki/AddressSanitizerFlags 78 | asan_config = SanitizerOptions(env.get("ASAN_OPTIONS")) 79 | assert asan_config.check_path("suppressions"), "missing suppressions file" 80 | for flag in common_flags: 81 | asan_config.add(*flag) 82 | # different defaults per OS 83 | # asan_config.add("alloc_dealloc_mismatch", "false") 84 | asan_config.add("check_initialization_order", "true") 85 | # stack UAR detection works as of clang 18 86 | asan_config.add("detect_stack_use_after_return", "true") 87 | # asan_config.add("detect_stack_use_after_scope", "true") 88 | asan_config.add("detect_invalid_pointer_pairs", "1") 89 | asan_config.add("detect_leaks", "false") 90 | # hard_rss_limit_mb requires background thread so only works on Linux for now... 91 | # see https://github.com/llvm/llvm-project/blob/main/compiler-rt/lib/ 92 | # sanitizer_common/sanitizer_common_libcdep.cpp#L116 93 | asan_config.add("hard_rss_limit_mb", "12288") 94 | # log_path is required for FFPuppet logging to function properly 95 | if "log_path" in asan_config: 96 | LOG.warning( 97 | "ASAN_OPTIONS=log_path is used internally and cannot be set externally" 98 | ) 99 | asan_config.add("log_path", f"'{log_path}'", overwrite=True) 100 | # This is an experimental feature added in Bug 1792757 101 | asan_config.add("rss_limit_heap_profile", "true") 102 | asan_config.add("sleep_before_dying", "0") 103 | asan_config.add("strict_init_order", "true") 104 | # temporarily revert to default (false) until https://bugzil.la/1767068 is fixed 105 | # asan_config.add("strict_string_checks", "true") 106 | env["ASAN_OPTIONS"] = str(asan_config) 107 | 108 | # setup Leak Sanitizer options ONLY if not set manually in environment 109 | # https://github.com/google/sanitizers/wiki/AddressSanitizerLeakSanitizer 110 | lsan_config = SanitizerOptions(env.get("LSAN_OPTIONS")) 111 | assert lsan_config.check_path("suppressions"), "missing suppressions file" 112 | lsan_config.add("max_leaks", "1") 113 | lsan_config.add("print_suppressions", "false") 114 | # helpful with rr/Pernosco sessions 115 | lsan_config.add("report_objects", "1") 116 | env["LSAN_OPTIONS"] = str(lsan_config) 117 | 118 | # setup Thread Sanitizer options ONLY if not set manually in environment 119 | tsan_config = SanitizerOptions(env.get("TSAN_OPTIONS")) 120 | assert tsan_config.check_path("suppressions"), "missing suppressions file" 121 | tsan_config.add("halt_on_error", "1") 122 | # hard_rss_limit_mb requires background thread so only works on Linux for now... 123 | # see https://github.com/llvm/llvm-project/blob/main/compiler-rt/lib/ 124 | # sanitizer_common/sanitizer_common_libcdep.cpp#L116 125 | tsan_config.add("hard_rss_limit_mb", "12288") 126 | if "log_path" in tsan_config: 127 | LOG.warning( 128 | "TSAN_OPTIONS=log_path is used internally and cannot be set externally" 129 | ) 130 | tsan_config.add("log_path", f"'{log_path}'", overwrite=True) 131 | # This is an experimental feature added in Bug 1792757 132 | tsan_config.add("rss_limit_heap_profile", "true") 133 | env["TSAN_OPTIONS"] = str(tsan_config) 134 | 135 | # setup Undefined Behavior Sanitizer options ONLY if not set manually in environment 136 | ubsan_config = SanitizerOptions(env.get("UBSAN_OPTIONS")) 137 | assert ubsan_config.check_path("suppressions"), "missing suppressions file" 138 | for flag in common_flags: 139 | ubsan_config.add(*flag) 140 | if "log_path" in ubsan_config: 141 | LOG.warning( 142 | "UBSAN_OPTIONS=log_path is used internally and cannot be set externally" 143 | ) 144 | ubsan_config.add("log_path", f"'{log_path}'", overwrite=True) 145 | ubsan_config.add("print_stacktrace", "1") 146 | ubsan_config.add("report_error_type", "1") 147 | env["UBSAN_OPTIONS"] = str(ubsan_config) 148 | 149 | return env 150 | 151 | 152 | def certutil_available(certutil: str) -> bool: 153 | """Check if NSS certutil is available. 154 | 155 | Args: 156 | certutil: certutil location. 157 | 158 | Returns: 159 | True if certutil is available for use otherwise False. 160 | """ 161 | try: 162 | check_output([certutil], stderr=STDOUT, timeout=60) 163 | except CalledProcessError as exc: 164 | # there are multiple "certutil" tools and one is installed on Windows by default 165 | # check the help output to make sure we have the correct tool 166 | if ( 167 | exc.output 168 | and b"Utility to manipulate NSS certificate databases" in exc.output 169 | ): 170 | return True 171 | except OSError as exc: 172 | LOG.debug(str(exc)) 173 | LOG.debug("'%s' is not suitable for use", certutil) 174 | return False 175 | 176 | 177 | def certutil_find(browser_bin: Path | None = None) -> str: 178 | """Look for NSS certutil in known location or fallback to built-in tool. 179 | 180 | Args: 181 | browser_bin: Location of browser binary. 182 | 183 | Returns: 184 | Path to certutil tool to use. 185 | """ 186 | if browser_bin: 187 | path = browser_bin.parent / "bin" / CERTUTIL 188 | if path.is_file(): 189 | return str(path.resolve()) 190 | return CERTUTIL 191 | 192 | 193 | def files_in_use(files: Iterable[Path]) -> Generator[tuple[Path, int, str]]: 194 | """Check if any of the given files are open. 195 | WARNING: This can be slow on Windows. 196 | 197 | Args: 198 | files: Files to check. 199 | 200 | Yields: 201 | Path of file, process ID and process name. 202 | """ 203 | # only check existing file 204 | files = tuple(x for x in files if x.exists()) 205 | if files: 206 | # WARNING: Process.open_files() has issues on Windows! 207 | # https://psutil.readthedocs.io/en/latest/#psutil.Process.open_files 208 | # use an alternative implementation instead 209 | if sys.platform == "win32": 210 | for open_file, pids in pids_by_file().items(): 211 | for check_file in files: 212 | # samefile() can raise if either file cannot be accessed 213 | # this is triggered on Windows if a file is missing 214 | with suppress(OSError): 215 | if check_file.samefile(open_file): 216 | for pid in pids: 217 | yield open_file, pid, Process(pid).name() 218 | else: 219 | for proc in process_iter(["pid", "name", "open_files"]): 220 | if not proc.info["open_files"]: 221 | continue 222 | for open_file in (Path(x.path) for x in proc.info["open_files"]): 223 | for check_file in files: 224 | # samefile() can raise if either file cannot be accessed 225 | with suppress(OSError): 226 | if check_file.samefile(open_file): 227 | yield open_file, proc.info["pid"], proc.info["name"] 228 | 229 | 230 | def prepare_environment( 231 | target_path: Path, 232 | sanitizer_log: Path, 233 | env_mod: Mapping[str, str | None] | None = None, 234 | ) -> dict[str, str]: 235 | """Create environment that can be used when launching the browser. 236 | 237 | Args: 238 | target_path: Directory containing the Firefox binary. 239 | sanitizer_log: Location to write sanitizer logs. Log prefix set 240 | with ASAN_OPTIONS=log_path=. 241 | env_mod: Environment modifier. Add, remove and update entries 242 | in the prepared environment. Add/update by setting 243 | value or remove entry by setting value to None. 244 | 245 | Returns: 246 | Environment to use when launching browser. 247 | """ 248 | base: dict[str, str | None] = {} 249 | env = dict(environ) 250 | 251 | # https://developer.gimp.org/api/2.0/glib/glib-running.html#G_SLICE 252 | base["G_SLICE"] = "always-malloc" 253 | base["MOZ_AUTOMATION"] = "1" 254 | base["MOZ_CC_RUN_DURING_SHUTDOWN"] = "1" 255 | # https://firefox-source-docs.mozilla.org/toolkit/crashreporter/crashreporter/ ... 256 | # index.html#environment-variables-affecting-crash-reporting 257 | base["MOZ_CRASHREPORTER"] = "1" 258 | base["MOZ_CRASHREPORTER_NO_DELETE_DUMP"] = "1" 259 | base["MOZ_CRASHREPORTER_NO_REPORT"] = "1" 260 | # shutdown all processes when a crash is detected 261 | base["MOZ_CRASHREPORTER_SHUTDOWN"] = "1" 262 | base["MOZ_DISABLE_CONTENT_SANDBOX"] = "1" 263 | base["MOZ_DISABLE_GMP_SANDBOX"] = "1" 264 | base["MOZ_DISABLE_GPU_SANDBOX"] = "1" 265 | base["MOZ_DISABLE_RDD_SANDBOX"] = "1" 266 | base["MOZ_DISABLE_SOCKET_PROCESS_SANDBOX"] = "1" 267 | base["MOZ_DISABLE_UTILITY_SANDBOX"] = "1" 268 | base["MOZ_DISABLE_VR_SANDBOX"] = "1" 269 | base["MOZ_GDB_SLEEP"] = "0" 270 | # https://bugzilla.mozilla.org/show_bug.cgi?id=1305151 271 | # skia assertions are easily hit and mostly due to precision, disable them. 272 | base["MOZ_SKIA_DISABLE_ASSERTS"] = "1" 273 | base["RUST_BACKTRACE"] = "full" 274 | # https://developer.mozilla.org/en-US/docs/Mozilla/Debugging/XPCOM_DEBUG_BREAK 275 | base["XPCOM_DEBUG_BREAK"] = "warn" 276 | base["XRE_NO_WINDOWS_CRASH_DIALOG"] = "1" 277 | # apply environment modifications 278 | if env_mod is not None: 279 | base.update(env_mod) 280 | # environment variables to skip if previously set in environ 281 | optional = frozenset( 282 | ( 283 | "_RR_TRACE_DIR", 284 | "MOZ_CRASHREPORTER", 285 | "MOZ_CRASHREPORTER_NO_DELETE_DUMP", 286 | "MOZ_CRASHREPORTER_NO_REPORT", 287 | "MOZ_CRASHREPORTER_SHUTDOWN", 288 | "MOZ_SKIA_DISABLE_ASSERTS", 289 | "RUST_BACKTRACE", 290 | "XPCOM_DEBUG_BREAK", 291 | ) 292 | ) 293 | # merge presets and modifications 294 | for env_name, env_value in base.items(): 295 | if env_value is None: 296 | if env_name in env: 297 | LOG.debug("removing env var '%s'", env_name) 298 | del env[env_name] 299 | continue 300 | if env_name in optional and env_name in env: 301 | LOG.debug("skipping optional env var '%s'", env_name) 302 | continue 303 | env[env_name] = env_value 304 | 305 | if env.get("MOZ_CRASHREPORTER_DISABLE") == "1": 306 | env.pop("MOZ_CRASHREPORTER", None) 307 | env.pop("MOZ_CRASHREPORTER_NO_DELETE_DUMP", None) 308 | env.pop("MOZ_CRASHREPORTER_NO_REPORT", None) 309 | env.pop("MOZ_CRASHREPORTER_SHUTDOWN", None) 310 | 311 | env = _configure_sanitizers(env, target_path, sanitizer_log) 312 | # filter environment to avoid leaking sensitive information 313 | return {k: v for k, v in env.items() if "_SECRET" not in k} 314 | 315 | 316 | def wait_on_files( 317 | wait_files: Iterable[Path], 318 | poll_rate: float = 1.0, 319 | timeout: float = 60, 320 | ) -> bool: 321 | """Wait while specified files are in use. 322 | 323 | Args: 324 | wait_files: Files that must no longer be open by a process. 325 | poll_rate: Time in seconds to wait between checks. 326 | timeout: Maximum number of seconds to wait. 327 | 328 | Returns: 329 | True if all files were closed within given time otherwise False. 330 | """ 331 | assert poll_rate >= 0 332 | assert timeout >= 0 333 | all_closed = False 334 | poll_rate = min(poll_rate, timeout) 335 | deadline = perf_counter() + timeout 336 | while True: 337 | open_iter = files_in_use(wait_files) 338 | if deadline <= perf_counter(): 339 | LOG.debug("wait_on_files() timeout (%ds)", timeout) 340 | for path, pid, name in open_iter: 341 | LOG.debug("'%s' open by '%s' (%d)", path, name, pid) 342 | break 343 | if not any(open_iter): 344 | all_closed = True 345 | break 346 | sleep(poll_rate) 347 | return all_closed 348 | 349 | 350 | def warn_open(path: Path) -> None: 351 | """Output a message via `LOG.warning` for each file found to be open by a Process. 352 | On Windows open files cannot be removed. This can be used to help debug issues. 353 | 354 | Args: 355 | path: Directory to scan for initial files. 356 | 357 | Returns: 358 | None 359 | """ 360 | for file_path, pid, name in files_in_use(path.iterdir()): 361 | LOG.warning("'%s' open by '%s' (%d)", file_path, name, pid) 362 | -------------------------------------------------------------------------------- /src/ffpuppet/job_object.py: -------------------------------------------------------------------------------- 1 | # This Source Code Form is subject to the terms of the Mozilla Public 2 | # License, v. 2.0. If a copy of the MPL was not distributed with this 3 | # file, You can obtain one at http://mozilla.org/MPL/2.0/. 4 | """Windows Job Object management""" 5 | 6 | import ctypes 7 | import ctypes.wintypes 8 | import sys 9 | from logging import getLogger 10 | from subprocess import Handle # type: ignore[attr-defined] 11 | 12 | from psutil import Process 13 | 14 | assert sys.platform == "win32" 15 | 16 | JOB_OBJECT_EXTENDED_LIMIT_INFORMATION = 9 17 | JOB_OBJECT_LIMIT_JOB_MEMORY = 0x200 18 | JOB_OBJECT_LIMIT_PROCESS_MEMORY = 0x100 19 | 20 | THREAD_SUSPEND_RESUME = 0x0002 21 | 22 | __author__ = "Jesse Schwartzentruber" 23 | 24 | LOG = getLogger(__name__) 25 | 26 | 27 | class IOCounters(ctypes.Structure): 28 | """IOCounters""" 29 | 30 | _fields_ = ( 31 | ("read_operation_count", ctypes.c_ulonglong), 32 | ("write_operation_count", ctypes.c_ulonglong), 33 | ("other_operation_count", ctypes.c_ulonglong), 34 | ("read_transfer_count", ctypes.c_ulonglong), 35 | ("write_transfer_count", ctypes.c_ulonglong), 36 | ("other_transfer_count", ctypes.c_ulonglong), 37 | ) 38 | 39 | 40 | class JobObjectBasicLimitInformation(ctypes.Structure): 41 | """JobObjectBasicLimitInformation""" 42 | 43 | _fields_ = ( 44 | ("per_process_user_time_limit", ctypes.wintypes.LARGE_INTEGER), 45 | ("per_job_user_time_limit", ctypes.wintypes.LARGE_INTEGER), 46 | ("limit_flags", ctypes.wintypes.DWORD), 47 | ("minimum_working_set_size", ctypes.c_size_t), 48 | ("maximum_working_set_size", ctypes.c_size_t), 49 | ("active_process_limit", ctypes.wintypes.DWORD), 50 | ("affinity", ctypes.wintypes.PULONG), 51 | ("priority_class", ctypes.wintypes.DWORD), 52 | ("scheduling_class", ctypes.wintypes.DWORD), 53 | ) 54 | 55 | 56 | class JobObjectExtendedLimitInformation(ctypes.Structure): 57 | """JobObjectExtendedLimitInformation""" 58 | 59 | _fields_ = ( 60 | ("basic_limit_information", JobObjectBasicLimitInformation), 61 | ("io_info", IOCounters), 62 | ("process_memory_limit", ctypes.c_size_t), 63 | ("job_memory_limit", ctypes.c_size_t), 64 | ("peak_process_memory_used", ctypes.c_size_t), 65 | ("peak_job_memory_used", ctypes.c_size_t), 66 | ) 67 | 68 | 69 | def config_job_object(handle: Handle, limit: int) -> None: 70 | """Configure Windows Job object. 71 | 72 | Args: 73 | handle: Process handle to assigned to the job object. 74 | limit: Total memory limit for the job. 75 | 76 | Returns: 77 | None 78 | """ 79 | assert limit > 0 80 | kernel32 = ctypes.windll.kernel32 81 | job = Handle(kernel32.CreateJobObjectA(None, None)) 82 | try: 83 | assert kernel32.AssignProcessToJobObject(job, handle) 84 | info = JobObjectExtendedLimitInformation() 85 | info.basic_limit_information.limit_flags = JOB_OBJECT_LIMIT_JOB_MEMORY 86 | # pylint: disable=attribute-defined-outside-init 87 | info.job_memory_limit = limit 88 | assert kernel32.SetInformationJobObject( 89 | job, 90 | JOB_OBJECT_EXTENDED_LIMIT_INFORMATION, 91 | ctypes.byref(info), 92 | ctypes.sizeof(info), 93 | ) 94 | finally: 95 | job.Close() 96 | 97 | 98 | def resume_suspended_process(pid: int) -> None: 99 | """Resume a possibly suspended Windows Process. 100 | 101 | Args: 102 | pid: Process ID. 103 | 104 | Returns: 105 | None 106 | """ 107 | kernel32 = ctypes.windll.kernel32 108 | for tinfo in Process(pid).threads(): 109 | thnd = Handle(kernel32.OpenThread(THREAD_SUSPEND_RESUME, False, tinfo.id)) 110 | try: 111 | result = kernel32.ResumeThread(thnd) 112 | LOG.debug("resuming thread %d returned %d", tinfo.id, result) 113 | assert result >= 0, f"ResumeThread for tid={tinfo.id} returned {result}" 114 | finally: 115 | thnd.Close() 116 | -------------------------------------------------------------------------------- /src/ffpuppet/lsof.py: -------------------------------------------------------------------------------- 1 | # This Source Code Form is subject to the terms of the Mozilla Public 2 | # License, v. 2.0. If a copy of the MPL was not distributed with this 3 | # file, You can obtain one at http://mozilla.org/MPL/2.0/. 4 | """Windows utility to map all open files on the system to process""" 5 | 6 | from __future__ import annotations 7 | 8 | import ctypes 9 | import ctypes.wintypes 10 | import sys 11 | from pathlib import Path 12 | 13 | DUPLICATE_SAME_ACCESS = 2 14 | FILE_TYPE_DISK = 1 15 | FILE_TYPE_UNKNOWN = 0 16 | MAX_PATH = 260 17 | NO_ERROR = 0 18 | PROCESS_DUP_HANDLE = 0x40 19 | STATUS_INFO_LENGTH_MISMATCH = 0xC0000004 20 | SYSTEM_EXTENDED_HANDLE_INFORMATION_CLASS = 0x40 21 | SYSTEM_HANDLE_INFORMATION_CLASS = 0x10 22 | 23 | __author__ = "Jesse Schwartzentruber" 24 | 25 | 26 | assert sys.platform == "win32" 27 | 28 | 29 | def nt_status(status: int) -> int: 30 | """Cast a signed integer to 32-bit unsigned. 31 | 32 | Args: 33 | status: an NTSTATUS result 34 | 35 | Returns: 36 | status cast to uint32 37 | """ 38 | return status & 0xFFFFFFFF 39 | 40 | 41 | def create_winerror(function: str) -> OSError: # pragma: no cover 42 | """Create a WinError exception. 43 | 44 | Args: 45 | function: Windows API function name that generated the error. 46 | 47 | Returns: 48 | OSError representing a windows error from fall to a given function. 49 | """ 50 | errno = ctypes.GetLastError() 51 | desc = f"{ctypes.FormatError()} ({function})" 52 | return OSError(errno, desc, None, errno) 53 | 54 | 55 | class SystemHandleTableEntryInfoEx(ctypes.Structure): 56 | """NT API Handle table entry structure""" 57 | 58 | _fields_ = ( 59 | ("Object", ctypes.c_void_p), 60 | ("UniqueProcessId", ctypes.wintypes.HANDLE), 61 | ("HandleValue", ctypes.wintypes.HANDLE), 62 | ("GrantedAccess", ctypes.c_ulong), 63 | ("CreatorBackTraceIndex", ctypes.c_ushort), 64 | ("ObjectTypeIndex", ctypes.c_ushort), 65 | ("HandleAttributes", ctypes.c_ulong), 66 | ("Reserved", ctypes.c_ulong), 67 | ) 68 | 69 | 70 | def nt_query_system_handle_information_ex() -> ctypes.Structure: 71 | """List all open handles in the system. 72 | 73 | Args: 74 | None 75 | 76 | Returns: 77 | A ctypes Structure with fields: 78 | NumberOfHandles (int) 79 | Handles (list[SystemHandleTableEntryInfoEx]) 80 | """ 81 | buf_size = 64 * 1024 82 | buf = ctypes.create_string_buffer(buf_size) 83 | ntdll = ctypes.windll.ntdll 84 | while True: 85 | status = ntdll.NtQuerySystemInformation( 86 | SYSTEM_EXTENDED_HANDLE_INFORMATION_CLASS, 87 | buf, 88 | buf_size, 89 | None, 90 | ) 91 | if nt_status(status) != STATUS_INFO_LENGTH_MISMATCH: 92 | break 93 | buf_size *= 2 94 | buf = ctypes.create_string_buffer(buf_size) 95 | assert status >= 0, f"NtQuerySystemInformation returned 0x{nt_status(status):08X}" 96 | num_handles = ctypes.c_void_p.from_buffer(buf).value 97 | 98 | class SystemHandleInformationEx(ctypes.Structure): 99 | """NT API Handle table structure""" 100 | 101 | _fields_ = ( 102 | ("NumberOfHandles", ctypes.c_void_p), 103 | ("Reserved", ctypes.c_void_p), 104 | ("Handles", SystemHandleTableEntryInfoEx * (num_handles or 0)), 105 | ) 106 | 107 | return SystemHandleInformationEx.from_buffer(buf) 108 | 109 | 110 | def pid_handle_to_filename( 111 | pid: int, hnd: int, raise_for_error: bool = False 112 | ) -> Path | None: 113 | """Resolve a PID/Handle pair to a filesystem Path. 114 | 115 | Args: 116 | pid: The Process ID the Handle belongs to 117 | hnd: The Handle belonging to the Process 118 | raise_for_error: if True, raise OSError when any error occurs 119 | 120 | Returns: 121 | Path the handle represents 122 | or None if error occurred and `raise_for_error` is False 123 | """ 124 | kernel32 = ctypes.windll.kernel32 125 | buf_size = MAX_PATH * 2 + 1 126 | buf = ctypes.create_string_buffer(buf_size) 127 | process_handle = kernel32.OpenProcess(PROCESS_DUP_HANDLE, False, pid) 128 | close_hnd = False 129 | try: 130 | if process_handle: 131 | handle_out = ctypes.wintypes.HANDLE() 132 | if kernel32.DuplicateHandle( 133 | process_handle, 134 | hnd, 135 | ctypes.wintypes.HANDLE(kernel32.GetCurrentProcess()), 136 | ctypes.byref(handle_out), 137 | 0, 138 | False, 139 | DUPLICATE_SAME_ACCESS, 140 | ): 141 | assert handle_out.value is not None 142 | hnd = int(handle_out.value) 143 | close_hnd = True 144 | else: 145 | if not raise_for_error: 146 | kernel32.SetLastError(0) 147 | return None 148 | raise create_winerror("DuplicateHandle") # pragma: no cover 149 | else: 150 | if not raise_for_error: 151 | kernel32.SetLastError(0) 152 | return None 153 | raise create_winerror("OpenProcess") # pragma: no cover 154 | ftype = kernel32.GetFileType(hnd) 155 | if ftype == FILE_TYPE_UNKNOWN: 156 | code = ctypes.GetLastError() 157 | if code != NO_ERROR: 158 | if not raise_for_error: 159 | kernel32.SetLastError(0) 160 | return None 161 | raise create_winerror("GetFileType") # pragma: no cover 162 | if ftype != FILE_TYPE_DISK: 163 | if not raise_for_error: 164 | return None 165 | raise OSError("Given handle is not a file") # pragma: no cover 166 | status = kernel32.GetFinalPathNameByHandleW(hnd, buf, buf_size, 0) 167 | finally: 168 | if process_handle: 169 | kernel32.CloseHandle(process_handle) 170 | if close_hnd: 171 | kernel32.CloseHandle(hnd) 172 | if not status: 173 | if not raise_for_error: 174 | kernel32.SetLastError(0) 175 | return None 176 | raise create_winerror("GetFinalPathnameByHandle") # pragma: no cover 177 | return Path(ctypes.wstring_at(buf)[4:]) # always prefixed with \\?\ 178 | 179 | 180 | def pids_by_file() -> dict[Path, set[int]]: 181 | """Create a mapping of open paths to the Processes that own the open file handles. 182 | 183 | Args: 184 | None 185 | 186 | Returns: 187 | dict mapping Path (the path of the open file) to a set of PIDs which have 188 | that path open. 189 | """ 190 | result: dict[Path, set[int]] = {} 191 | for hnd in nt_query_system_handle_information_ex().Handles: 192 | fname = pid_handle_to_filename(hnd.UniqueProcessId, hnd.HandleValue) 193 | if fname is not None: 194 | proc_pids = result.setdefault(fname, set()) 195 | proc_pids.add(hnd.UniqueProcessId) 196 | return result 197 | 198 | 199 | if __name__ == "__main__": # pragma: no cover 200 | 201 | def main() -> None: 202 | """test main""" 203 | printed = False 204 | for path, pids in sorted(pids_by_file().items()): 205 | print(f"{path}") 206 | for pid in sorted(pids): 207 | print(f"\t{pid}") 208 | printed = True 209 | if not printed: 210 | print("no open files?", file=sys.stderr) 211 | sys.exit(1) 212 | 213 | main() 214 | -------------------------------------------------------------------------------- /src/ffpuppet/main.py: -------------------------------------------------------------------------------- 1 | # This Source Code Form is subject to the terms of the Mozilla Public 2 | # License, v. 2.0. If a copy of the MPL was not distributed with this 3 | # file, You can obtain one at http://mozilla.org/MPL/2.0/. 4 | """ffpuppet main.py""" 5 | 6 | from __future__ import annotations 7 | 8 | from argparse import ArgumentParser, Namespace 9 | from importlib.metadata import PackageNotFoundError, version 10 | from logging import DEBUG, ERROR, INFO, WARNING, basicConfig, getLogger 11 | from pathlib import Path 12 | from platform import system 13 | from shutil import rmtree, which 14 | from tempfile import mkdtemp 15 | from time import sleep, strftime 16 | 17 | from .bootstrapper import Bootstrapper 18 | from .core import Debugger, FFPuppet, Reason 19 | from .display import DisplayMode 20 | from .exceptions import LaunchError 21 | from .helpers import certutil_available, certutil_find 22 | from .profile import Profile 23 | 24 | LOG = getLogger(__name__) 25 | 26 | __author__ = "Tyson Smith" 27 | try: 28 | __version__ = version("ffpuppet") 29 | except PackageNotFoundError: # pragma: no cover 30 | # package is not installed 31 | __version__ = "unknown" 32 | 33 | 34 | def dump_to_console(log_dir: Path, log_quota: int = 0x8000) -> str: 35 | """Read and merge log files and format for output on the console. 36 | 37 | Args: 38 | log_dir: Directory to scan for logs. 39 | log_quota: Maximum number of bytes to read per log. 40 | 41 | Returns: 42 | Merged log data to be displayed on the console. 43 | """ 44 | 45 | logs = [x for x in log_dir.iterdir() if x.is_file()] 46 | if not logs: 47 | return "" 48 | # display stdout and stderr last to avoid the need to scroll back 49 | # this assumes stderr contains the most relevant information 50 | for l_order in ("log_stdout", "log_stderr"): 51 | found = None 52 | for log in logs: 53 | if log.name.startswith(l_order): 54 | found = log 55 | break 56 | # move to the end of the print list 57 | if found and logs[-1] != found: 58 | logs.remove(found) 59 | logs.append(found) 60 | # merge logs 61 | lines = [] 62 | for log in logs: 63 | fsize = log.stat().st_size 64 | lines.append("\n===\n") 65 | lines.append(f"=== Dumping {log.name!r} ({fsize / 1024.0:0.2f}KB)") 66 | with log.open("rb") as log_fp: 67 | # tail log if needed 68 | log_fp.seek(max(fsize - log_quota, 0)) 69 | if log_fp.tell() > 0: 70 | lines.append(f" - tailed ({log_quota / 1024.0:0.2f}KB)") 71 | lines.append("\n===\n") 72 | lines.append(log_fp.read().decode("ascii", errors="ignore")) 73 | return "".join(lines) 74 | 75 | 76 | def parse_args(argv: list[str] | None = None) -> Namespace: 77 | """Handle argument parsing. 78 | 79 | Args: 80 | argv: Arguments from the user. 81 | 82 | Returns: 83 | Parsed and sanitized arguments. 84 | """ 85 | 86 | log_level_map = {"ERROR": ERROR, "WARN": WARNING, "INFO": INFO, "DEBUG": DEBUG} 87 | 88 | parser = ArgumentParser( 89 | prog="ffpuppet", 90 | description="FFPuppet - Firefox process launcher and log collector. " 91 | "Happy bug hunting!", 92 | ) 93 | parser.add_argument("binary", type=Path, help="Firefox binary to launch") 94 | parser.add_argument( 95 | "-d", 96 | "--display-logs", 97 | action="store_true", 98 | help="Display summary of browser logs on process exit.", 99 | ) 100 | parser.add_argument( 101 | "--log-level", 102 | choices=sorted(log_level_map), 103 | default="INFO", 104 | help="Configure console logging (default: %(default)s)", 105 | ) 106 | parser.add_argument( 107 | "--version", 108 | "-V", 109 | action="version", 110 | version=f"%(prog)s {__version__}", 111 | help="Show version number", 112 | ) 113 | 114 | cfg_group = parser.add_argument_group("Browser Configuration") 115 | cfg_group.add_argument( 116 | "--certs", 117 | nargs="+", 118 | type=Path, 119 | help="Install trusted certificates.", 120 | ) 121 | cfg_group.add_argument( 122 | "--display", 123 | choices=sorted(x.name.lower() for x in DisplayMode), 124 | default=DisplayMode.DEFAULT.name, 125 | help="Display mode.", 126 | ) 127 | cfg_group.add_argument( 128 | "-e", 129 | "--extension", 130 | action="append", 131 | type=Path, 132 | help="Install extensions. Specify the path to the xpi or the directory " 133 | "containing the unpacked extension.", 134 | ) 135 | cfg_group.add_argument( 136 | "--marionette", 137 | const=0, 138 | default=None, 139 | nargs="?", 140 | type=int, 141 | help="Enable marionette. If a port is provided it is used otherwise " 142 | "a random port is selected. (default: disabled)", 143 | ) 144 | cfg_group.add_argument( 145 | "-p", 146 | "--prefs", 147 | type=Path, 148 | help="Custom prefs.js file to use (default: profile default)", 149 | ) 150 | cfg_group.add_argument( 151 | "-P", 152 | "--profile", 153 | type=Path, 154 | help="Profile to use. This is non-destructive. A copy of the target profile " 155 | "will be used. (default: temporary profile)", 156 | ) 157 | cfg_group.add_argument( 158 | "-u", "--url", help="Server URL or path to local file to load." 159 | ) 160 | 161 | report_group = parser.add_argument_group("Issue Detection & Reporting") 162 | report_group.add_argument( 163 | "-a", 164 | "--abort-token", 165 | action="append", 166 | default=[], 167 | help="Scan the browser logs for the given value and close browser if detected. " 168 | "For example '-a ###!!! ASSERTION:' would be used to detect soft assertions.", 169 | ) 170 | report_group.add_argument( 171 | "--launch-timeout", 172 | type=int, 173 | default=300, 174 | help="Number of seconds to wait for the browser to become " 175 | "responsive after launching. (default: %(default)s)", 176 | ) 177 | report_group.add_argument( 178 | "-l", 179 | "--logs", 180 | default=Path.cwd(), 181 | type=Path, 182 | help="Location to save browser logs. " 183 | "A sub-directory containing the browser logs will be created.", 184 | ) 185 | report_group.add_argument( 186 | "--log-limit", 187 | type=int, 188 | default=0, 189 | help="Browser log file size limit in MBs (default: %(default)s, no limit)", 190 | ) 191 | report_group.add_argument( 192 | "-m", 193 | "--memory", 194 | type=int, 195 | default=0, 196 | help="Browser memory limit in MBs (default: %(default)s, no limit)", 197 | ) 198 | report_group.add_argument( 199 | "--poll-interval", 200 | type=float, 201 | default=0.5, 202 | help="Delay between checks for results (default: %(default)s)", 203 | ) 204 | report_group.add_argument( 205 | "--save-all", 206 | action="store_true", 207 | help="Always save logs." 208 | " By default logs are saved only when an issue is detected.", 209 | ) 210 | 211 | parser.set_defaults(debugger=Debugger.NONE) 212 | if system() == "Linux": 213 | dbg_group = parser.add_argument_group("Available Debuggers") 214 | # Add the mutually exclusive group to a regular group 215 | # because mutually exclusive groups don't accept a title 216 | dbg_group = dbg_group.add_mutually_exclusive_group() 217 | dbg_group.add_argument( 218 | "--gdb", 219 | action="store_const", 220 | const=Debugger.GDB, 221 | dest="debugger", 222 | help="Use GDB.", 223 | ) 224 | dbg_group.add_argument( 225 | "--pernosco", 226 | action="store_const", 227 | const=Debugger.PERNOSCO, 228 | dest="debugger", 229 | help="Use rr. Trace intended to be submitted to Pernosco.", 230 | ) 231 | dbg_group.add_argument( 232 | "--rr", 233 | action="store_const", 234 | const=Debugger.RR, 235 | dest="debugger", 236 | help="Use rr.", 237 | ) 238 | dbg_group.add_argument( 239 | "--valgrind", 240 | action="store_const", 241 | const=Debugger.VALGRIND, 242 | dest="debugger", 243 | help="Use Valgrind.", 244 | ) 245 | 246 | args = parser.parse_args(argv) 247 | 248 | # sanity checks 249 | if not args.binary.is_file(): 250 | parser.error(f"Invalid browser binary '{args.binary}'") 251 | if args.certs: 252 | if not certutil_available(certutil_find(args.binary)): 253 | parser.error("'--certs' requires NSS certutil") 254 | for cert in args.certs: 255 | if not cert.is_file(): 256 | parser.error(f"Invalid certificate file '{cert}'") 257 | if args.extension: 258 | for ext in args.extension: 259 | if not ext.exists(): 260 | parser.error(f"Extension '{ext}' does not exist") 261 | if args.debugger in (Debugger.PERNOSCO, Debugger.RR): 262 | # rr is only supported on Linux 263 | if not which("rr"): 264 | parser.error("rr is not installed") 265 | settings = "/proc/sys/kernel/perf_event_paranoid" 266 | value = int(Path(settings).read_bytes()) 267 | if value > 1: 268 | parser.error(f"rr needs {settings} <= 1, but it is {value}") 269 | if args.marionette is not None and not Bootstrapper.check_port(args.marionette): 270 | parser.error("--marionette must be 0 or > 1024 and < 65536") 271 | if not args.logs.is_dir(): 272 | parser.error(f"Log output directory is invalid '{args.logs}'") 273 | args.log_level = log_level_map[args.log_level] 274 | if args.log_limit < 0: 275 | parser.error("--log-limit must be >= 0") 276 | args.log_limit *= 1_048_576 277 | if args.memory < 0: 278 | parser.error("--memory must be >= 0") 279 | args.memory *= 1_048_576 280 | if args.prefs is not None and not args.prefs.is_file(): 281 | parser.error(f"Invalid prefs.js file '{args.prefs}'") 282 | 283 | return args 284 | 285 | 286 | def main(argv: list[str] | None = None) -> None: 287 | """FFPuppet main entry point.""" 288 | args = parse_args(argv) 289 | # set output verbosity 290 | if args.log_level == DEBUG: 291 | date_fmt = None 292 | log_fmt = "%(asctime)s %(levelname).1s %(name)s | %(message)s" 293 | else: 294 | date_fmt = "%Y-%m-%d %H:%M:%S" 295 | log_fmt = "[%(asctime)s] %(message)s" 296 | basicConfig(format=log_fmt, datefmt=date_fmt, level=args.log_level) 297 | 298 | ffp = FFPuppet( 299 | debugger=args.debugger, 300 | display_mode=DisplayMode[args.display.upper()], 301 | use_profile=args.profile, 302 | ) 303 | for a_token in args.abort_token: 304 | ffp.add_abort_token(a_token) 305 | 306 | user_exit = False 307 | try: 308 | LOG.info("Launching Firefox...") 309 | ffp.launch( 310 | args.binary, 311 | location=args.url, 312 | launch_timeout=args.launch_timeout, 313 | log_limit=args.log_limit, 314 | marionette=args.marionette, 315 | memory_limit=args.memory, 316 | prefs_js=args.prefs, 317 | extension=args.extension, 318 | cert_files=args.certs, 319 | ) 320 | if args.prefs and args.prefs.is_file(): 321 | assert ffp.profile is not None 322 | assert ffp.profile.path is not None 323 | Profile.check_prefs(ffp.profile.path / "prefs.js", args.prefs) 324 | if ffp.marionette is not None: 325 | LOG.info("Marionette listening on port: %d", ffp.marionette) 326 | LOG.info("Running Firefox (pid: %d)...", ffp.get_pid()) 327 | while ffp.is_healthy(): 328 | sleep(args.poll_interval) 329 | except KeyboardInterrupt: 330 | user_exit = True 331 | LOG.info("Ctrl+C detected.") 332 | except LaunchError as exc: 333 | LOG.error("Launch failed: %s", exc) 334 | finally: 335 | LOG.info("Shutting down...") 336 | ffp.close() 337 | if ffp.reason is not None: 338 | LOG.info("Firefox process is closed. (Reason: %s)", ffp.reason.name) 339 | else: 340 | LOG.error("FFPuppet.close() failed") 341 | logs = Path(mkdtemp(prefix=strftime("%Y%m%d-%H%M%S_ffp_logs_"), dir=args.logs)) 342 | ffp.save_logs(logs, logs_only=user_exit) 343 | if args.display_logs: 344 | LOG.info("Displaying logs...%s", dump_to_console(logs)) 345 | if ffp.reason == Reason.ALERT or args.save_all: 346 | LOG.info("Browser logs available here '%s'", logs.resolve()) 347 | else: 348 | rmtree(logs, ignore_errors=True) 349 | ffp.clean_up() 350 | -------------------------------------------------------------------------------- /src/ffpuppet/minidump_parser.py: -------------------------------------------------------------------------------- 1 | # This Source Code Form is subject to the terms of the Mozilla Public 2 | # License, v. 2.0. If a copy of the MPL was not distributed with this 3 | # file, You can obtain one at http://mozilla.org/MPL/2.0/. 4 | """ffpuppet minidump parsing module""" 5 | 6 | from __future__ import annotations 7 | 8 | from json import JSONDecodeError, load 9 | from logging import DEBUG, INFO, basicConfig, getLogger 10 | from pathlib import Path 11 | from shutil import rmtree, which 12 | from subprocess import CalledProcessError, TimeoutExpired, run 13 | from tempfile import TemporaryFile, mkdtemp 14 | from typing import IO, Any 15 | 16 | LOG = getLogger(__name__) 17 | MDSW_URL = "https://lib.rs/crates/minidump-stackwalk" 18 | SYMS_URL = "https://symbols.mozilla.org/" 19 | 20 | __author__ = "Tyson Smith" 21 | 22 | 23 | class MinidumpParser: 24 | """Parse minidump files via minidump-stackwalk. 25 | 26 | Attributes: 27 | symbols: Path containing debug symbols. 28 | """ 29 | 30 | MDSW_BIN = which("minidump-stackwalk") 31 | 32 | __slots__ = ("_storage", "_symbols") 33 | 34 | def __init__(self, symbols: Path | None = None) -> None: 35 | self._storage = Path(mkdtemp(prefix="md-parser-")) 36 | self._symbols = symbols 37 | 38 | def __enter__(self) -> MinidumpParser: 39 | return self 40 | 41 | def __exit__(self, *exc: object) -> None: 42 | self.close() 43 | 44 | def _cmd(self, src: Path) -> list[str]: 45 | """Generate minidump-stackwalk command line. 46 | 47 | Args: 48 | src: minidump to load. 49 | 50 | Returns: 51 | Command line. 52 | """ 53 | assert self.MDSW_BIN 54 | cmd = [self.MDSW_BIN, "--no-color", "--no-interactive", "--json"] 55 | if self._symbols: 56 | cmd.extend(["--symbols-path", str(self._symbols.resolve(strict=True))]) 57 | else: 58 | cmd.extend(["--symbols-url", SYMS_URL]) 59 | cmd.append(str(src.resolve(strict=True))) 60 | return cmd 61 | 62 | @staticmethod 63 | def _fmt_output(data: dict[str, Any], out_fp: IO[bytes], limit: int = 150) -> None: 64 | """Write summarized contents of a minidump to a file in a format that is 65 | consumable by FuzzManager. 66 | 67 | Args: 68 | md_data: Minidump contents. 69 | out_fp: Formatted content destination. 70 | limit: Maximum number of stack frames to include. 71 | 72 | Returns: 73 | None 74 | """ 75 | assert limit > 0 76 | # generate register information lines 77 | try: 78 | frames = data["crashing_thread"]["frames"] 79 | except KeyError: 80 | LOG.warning("No frames available for 'crashing thread'") 81 | frames = [] 82 | if frames: 83 | reg_lines: list[str] = [] 84 | for reg, value in frames[0]["registers"].items(): 85 | # display three registers per line 86 | sep = "\t" if (len(reg_lines) + 1) % 3 else "\n" 87 | reg_lines.append(f"{reg:>3} = {value}{sep}") 88 | out_fp.write("".join(reg_lines).rstrip().encode()) 89 | out_fp.write(b"\n") 90 | 91 | # generate OS information line 92 | line = "|".join( 93 | ("OS", data["system_info"]["os"], data["system_info"]["os_ver"]) 94 | ) 95 | out_fp.write(line.encode()) 96 | out_fp.write(b"\n") 97 | 98 | # generate CPU information line 99 | line = "|".join( 100 | ( 101 | "CPU", 102 | data["system_info"]["cpu_arch"] or "unknown", 103 | data["system_info"]["cpu_info"] or "", 104 | str(data["system_info"]["cpu_count"]), 105 | ) 106 | ) 107 | out_fp.write(line.encode()) 108 | out_fp.write(b"\n") 109 | 110 | # generate Crash information line 111 | crashing_thread = str(data["crash_info"].get("crashing_thread", "?")) 112 | line = "|".join( 113 | ( 114 | "Crash", 115 | data["crash_info"]["type"], 116 | data["crash_info"]["address"], 117 | crashing_thread, 118 | ) 119 | ) 120 | out_fp.write(line.encode()) 121 | out_fp.write(b"\n") 122 | 123 | # generate Frame information lines 124 | for frame in frames[:limit]: 125 | if frame["function_offset"]: 126 | # remove the padding zeros 127 | func_offset = hex(int(frame["function_offset"], 16)) 128 | else: 129 | func_offset = "" 130 | line = "|".join( 131 | ( 132 | crashing_thread, 133 | str(frame["frame"]), 134 | frame["module"] or "", 135 | frame["function"] or "", 136 | frame["file"] or "", 137 | str(frame["line"] or ""), 138 | func_offset, 139 | ) 140 | ) 141 | out_fp.write(line.encode()) 142 | out_fp.write(b"\n") 143 | 144 | if limit < len(frames): 145 | out_fp.write(b"WARNING: Hit stack size output limit!\n") 146 | 147 | def close(self) -> None: 148 | """Remove working data. 149 | 150 | Args: 151 | None 152 | 153 | Returns: 154 | None 155 | """ 156 | if self._storage.is_dir(): 157 | rmtree(self._storage) 158 | 159 | def create_log(self, src: Path, filename: str, timeout: int = 300) -> Path: 160 | """Create a human readable log from a minidump file. 161 | 162 | Args: 163 | src: Minidump file. 164 | filename: Name to use for output file. 165 | timeout: Maximum runtime of minidump-stackwalk. NOTE: Symbols may be 166 | downloaded if not provided which can add overhead. 167 | 168 | Returns: 169 | Log file. 170 | """ 171 | assert filename 172 | assert timeout >= 0 173 | cmd = self._cmd(src) 174 | dst = self._storage / filename 175 | with ( 176 | TemporaryFile(dir=self._storage, prefix="mdsw_out_") as out_fp, 177 | TemporaryFile(dir=self._storage, prefix="mdsw_err_") as err_fp, 178 | ): 179 | LOG.debug("running '%s'", " ".join(cmd)) 180 | try: 181 | run(cmd, check=True, stderr=err_fp, stdout=out_fp, timeout=timeout) 182 | out_fp.seek(0) 183 | # load json, format data and write log 184 | with dst.open("wb") as log_fp: 185 | self._fmt_output(load(out_fp), log_fp) 186 | except (CalledProcessError, JSONDecodeError, TimeoutExpired) as exc: 187 | if isinstance(exc, CalledProcessError): 188 | msg = f"minidump-stackwalk failed ({exc.returncode})" 189 | elif isinstance(exc, JSONDecodeError): 190 | msg = "json decode error" 191 | else: 192 | msg = "minidump-stackwalk timeout" 193 | LOG.warning("Failed to parse minidump: %s", msg) 194 | err_fp.seek(0) 195 | out_fp.seek(0) 196 | # write log 197 | with dst.open("wb") as log_fp: 198 | log_fp.write(f"Failed to parse minidump: {msg}".encode()) 199 | log_fp.write(b"\n\nminidump-stackwalk stderr:\n") 200 | log_fp.write(err_fp.read()) 201 | log_fp.write(b"\n\nminidump-stackwalk stdout:\n") 202 | log_fp.write(out_fp.read()) 203 | return dst 204 | 205 | @staticmethod 206 | def dmp_files(src_dir: Path) -> list[Path]: 207 | """Scan a directory for minidump (.dmp) files. Prioritize files that also have 208 | a MozCrashReason entry in the supporting .extra file. 209 | 210 | Args: 211 | src_dir: Directory containing minidump files. 212 | 213 | Returns: 214 | Dump files. 215 | """ 216 | dmps: list[Path] = [] 217 | for dmp in sorted(src_dir.glob("*.dmp"), key=lambda x: x.stat().st_mtime): 218 | try: 219 | # check .extra file for MozCrashReason entry 220 | with dmp.with_suffix(".extra").open() as out_fp: 221 | has_reason = load(out_fp).get("MozCrashReason") is not None 222 | except (FileNotFoundError, JSONDecodeError): 223 | has_reason = False 224 | # prioritize dmp with MozCrashReason 225 | if has_reason: 226 | dmps.insert(0, dmp) 227 | else: 228 | dmps.append(dmp) 229 | return dmps 230 | 231 | @classmethod 232 | def mdsw_available(cls, min_version: str = "0.15.2") -> bool: 233 | """Check if minidump-stackwalk binary is available. 234 | 235 | Args: 236 | min_version: Minimum supported minidump-stackwalk version. 237 | 238 | Returns: 239 | True if binary is available otherwise False. 240 | """ 241 | assert min_version.count(".") == 2 242 | 243 | if not cls.MDSW_BIN: 244 | LOG.debug("minidump-stackwalk not found") 245 | return False 246 | try: 247 | result = run([cls.MDSW_BIN, "--version"], check=False, capture_output=True) 248 | except OSError: 249 | LOG.debug("minidump-stackwalk not available (%s)", cls.MDSW_BIN) 250 | return False 251 | LOG.debug("using minidump-stackwalk (%s)", cls.MDSW_BIN) 252 | # expected output is 'minidump-stackwalk #.#.#' 253 | current_version = result.stdout.strip().split()[-1].decode() 254 | if current_version.count(".") != 2: 255 | LOG.error( 256 | "Unknown minidump-stackwalk version: '%s'", 257 | result.stdout.decode(errors="ignore"), 258 | ) 259 | return False 260 | # version check 261 | for cver, mver in zip(current_version.split("."), min_version.split(".")): 262 | if int(cver) > int(mver): 263 | break 264 | if int(cver) < int(mver): 265 | LOG.error( 266 | "minidump-stackwalk '%s' is unsupported (minimum '%s')", 267 | current_version, 268 | min_version, 269 | ) 270 | return False 271 | LOG.debug("detected minidump-stackwalk version '%s'", current_version) 272 | return True 273 | 274 | 275 | if __name__ == "__main__": 276 | from argparse import ArgumentParser 277 | 278 | parser = ArgumentParser() 279 | parser.add_argument("minidump", type=Path, help="Minidump to process.") 280 | parser.add_argument("--debug", action="store_true", help="Display debug output.") 281 | parser.add_argument( 282 | "--symbols", 283 | type=Path, 284 | help="Local symbols directory. " 285 | f"If not provided attempt to download symbols from {SYMS_URL}", 286 | ) 287 | args = parser.parse_args() 288 | 289 | # set output verbosity 290 | if args.debug: 291 | basicConfig(format="[%(levelname).1s] %(message)s", level=DEBUG) 292 | else: 293 | basicConfig(format="%(message)s", level=INFO) 294 | 295 | if MinidumpParser.mdsw_available(): 296 | with MinidumpParser(symbols=args.symbols) as md_parser: 297 | log = md_parser.create_log(args.minidump, "minidump_tmp.txt") 298 | LOG.info("Parsed %s\n%s", args.minidump.resolve(), log.read_text()) 299 | else: 300 | LOG.error( 301 | "Unable to process minidump, minidump-stackwalk is required. %s", MDSW_URL 302 | ) 303 | -------------------------------------------------------------------------------- /src/ffpuppet/process_tree.py: -------------------------------------------------------------------------------- 1 | # This Source Code Form is subject to the terms of the Mozilla Public 2 | # License, v. 2.0. If a copy of the MPL was not distributed with this 3 | # file, You can obtain one at http://mozilla.org/MPL/2.0/. 4 | """ffpuppet process tree module""" 5 | 6 | from __future__ import annotations 7 | 8 | import sys 9 | from contextlib import suppress 10 | from logging import getLogger 11 | from os import getenv 12 | from pathlib import Path 13 | from time import perf_counter, sleep 14 | from typing import TYPE_CHECKING, Callable, cast 15 | 16 | from psutil import ( 17 | STATUS_ZOMBIE, 18 | AccessDenied, 19 | NoSuchProcess, 20 | Process, 21 | TimeoutExpired, 22 | wait_procs, 23 | ) 24 | 25 | from .exceptions import TerminateError 26 | 27 | if TYPE_CHECKING: 28 | from collections.abc import Generator, Iterable 29 | from subprocess import Popen 30 | 31 | if sys.platform != "win32": 32 | from signal import SIGUSR1, Signals # pylint: disable=no-name-in-module 33 | 34 | COVERAGE_SIGNAL: Signals | None = SIGUSR1 35 | IS_WINDOWS = False 36 | else: 37 | COVERAGE_SIGNAL = None 38 | IS_WINDOWS = True 39 | 40 | 41 | LOG = getLogger(__name__) 42 | 43 | 44 | def _filter_zombies(procs: Iterable[Process]) -> Generator[Process]: 45 | """Filter out zombie processes from a collection of processes. 46 | 47 | Args: 48 | procs: Processes to check. 49 | 50 | Yields: 51 | Processes that are not zombies. 52 | """ 53 | for proc in procs: 54 | with suppress(AccessDenied, NoSuchProcess): 55 | if proc.status() == STATUS_ZOMBIE: 56 | LOG.debug("filtering zombie: %d - %s", proc.pid, proc.name()) 57 | continue 58 | yield proc 59 | 60 | 61 | def _last_modified(scan_dir: Path) -> float | None: 62 | """Scan directory recursively and find the latest modified date of all .gcda files. 63 | 64 | Args: 65 | scan_dir: Directory to scan. 66 | 67 | Returns: 68 | Last modified date or None if no files are found. 69 | """ 70 | with suppress(ValueError): 71 | return max(x.stat().st_mtime for x in scan_dir.glob("**/*.gcda")) 72 | return None 73 | 74 | 75 | def _safe_wait_procs( 76 | procs: Iterable[Process], 77 | timeout: float | None = 0, 78 | callback: Callable[[Process], object] | None = None, 79 | ) -> tuple[list[Process], list[Process]]: 80 | """Wrapper for psutil.wait_procs() to avoid AccessDenied. 81 | This can be an issue on Windows. 82 | 83 | Args: 84 | See psutil.wait_procs(). 85 | 86 | Returns: 87 | See psutil.wait_procs(). 88 | """ 89 | assert timeout is None or timeout >= 0 90 | 91 | deadline = None if timeout is None else perf_counter() + timeout 92 | while True: 93 | remaining = None if deadline is None else max(deadline - perf_counter(), 0) 94 | with suppress(AccessDenied): 95 | return cast( 96 | tuple[list[Process], list[Process]], 97 | wait_procs(procs, timeout=remaining, callback=callback), 98 | ) 99 | if deadline is not None and deadline <= perf_counter(): 100 | break 101 | sleep(0.25) 102 | 103 | # manually check processes 104 | alive: list[Process] = [] 105 | gone: list[Process] = [] 106 | for proc in procs: 107 | try: 108 | if not proc.is_running(): 109 | gone.append(proc) 110 | else: 111 | alive.append(proc) 112 | except AccessDenied: # noqa: PERF203 113 | alive.append(proc) 114 | except NoSuchProcess: 115 | gone.append(proc) 116 | return (gone, alive) 117 | 118 | 119 | def _writing_coverage(procs: Iterable[Process]) -> bool: 120 | """Check if any processes have open .gcda files. 121 | 122 | Args: 123 | procs: Processes to check. 124 | 125 | Returns: 126 | True if processes with open .gcda files are found. 127 | """ 128 | for proc in procs: 129 | with suppress(AccessDenied, NoSuchProcess): 130 | if any(x for x in proc.open_files() if x.path.endswith(".gcda")): 131 | return True 132 | return False 133 | 134 | 135 | class ProcessTree: 136 | """Manage the Firefox process tree. The process tree layout depends on the platform. 137 | Windows: 138 | python -> firefox (launcher) -> firefox (parent) -> firefox (content procs) 139 | 140 | Linux and others: 141 | python -> firefox (parent) -> firefox (content procs) 142 | """ 143 | 144 | __slots__ = ("_launcher", "_launcher_check", "_proc", "parent") 145 | 146 | def __init__(self, proc: Popen[bytes]) -> None: 147 | self._launcher: Process | None = None 148 | # only perform the launcher check on Windows 149 | self._launcher_check = IS_WINDOWS 150 | self._proc = proc 151 | self.parent: Process = Process(proc.pid) 152 | 153 | def cpu_usage(self) -> Generator[tuple[int, float]]: 154 | """Collect percentage of CPU usage per process. 155 | 156 | Note: the returned value can be > 100.0 in case of a process running multiple 157 | threads on different CPU cores. 158 | See: https://psutil.readthedocs.io/en/latest/#psutil.Process.cpu_percent 159 | 160 | This value is not divided by CPU count because we are typically more concerned 161 | with the low end for detecting idle processes. 162 | 163 | Args: 164 | None 165 | 166 | Yields: 167 | PID and the CPU usage as a percentage. 168 | """ 169 | procs = self.processes() 170 | for proc in procs: 171 | with suppress(AccessDenied, NoSuchProcess): 172 | proc.cpu_percent() 173 | # psutil recommends at least '0.1'. 174 | sleep(0.1) 175 | for proc in procs: 176 | with suppress(AccessDenied, NoSuchProcess): 177 | yield proc.pid, proc.cpu_percent() 178 | 179 | def dump_coverage(self, timeout: int = 15, idle_wait: int = 2) -> bool: 180 | """Signal processes to write coverage data to disk. Running coverage builds in 181 | parallel that are writing to the same location on disk is not recommended. 182 | NOTE: Coverage data is also written when launching and closing the browser. 183 | 184 | Args: 185 | timeout: Number of seconds to wait for data to be written to disk. 186 | idle_wait: Number of seconds to wait to determine if update is complete. 187 | 188 | Returns: 189 | True if coverage is written to disk or processes exit otherwise False. 190 | """ 191 | assert COVERAGE_SIGNAL is not None 192 | assert getenv("GCOV_PREFIX_STRIP"), "GCOV_PREFIX_STRIP not set" 193 | assert getenv("GCOV_PREFIX"), "GCOV_PREFIX not set" 194 | # coverage output can take a few seconds to start and complete 195 | assert timeout > 5 196 | cov_path = Path(getenv("GCOV_PREFIX", "")) 197 | last_mdate = _last_modified(cov_path) or 0 198 | signaled = 0 199 | # send COVERAGE_SIGNAL (SIGUSR1) to browser processes 200 | for proc in self.processes(): 201 | with suppress(AccessDenied, NoSuchProcess): 202 | proc.send_signal(COVERAGE_SIGNAL) 203 | signaled += 1 204 | # no processes signaled 205 | if signaled == 0: 206 | LOG.debug("coverage signal not sent, no browser processes found") 207 | return True 208 | # wait for processes to write .gcda files (typically takes ~2 seconds) 209 | start_time = perf_counter() 210 | last_change = None 211 | while True: 212 | if not self.is_running(): 213 | LOG.debug("not running waiting for coverage dump") 214 | return True 215 | # collect latest last modified dates 216 | mdate = _last_modified(cov_path) or 0 217 | # check if gcda files have been updated 218 | now = perf_counter() 219 | elapsed = now - start_time 220 | if mdate > last_mdate: 221 | last_change = now 222 | last_mdate = mdate 223 | # check if gcda write is complete (wait) 224 | if ( 225 | last_change is not None 226 | and now - last_change > idle_wait 227 | and not _writing_coverage(self.processes()) 228 | ): 229 | LOG.debug("coverage (gcda) dump took %0.2fs", elapsed) 230 | return True 231 | # check if max duration has been exceeded 232 | if elapsed >= timeout: 233 | if last_change is None: 234 | LOG.warning("Coverage files not modified after %0.2fs", elapsed) 235 | else: 236 | LOG.warning("Coverage file open after %0.2fs", elapsed) 237 | break 238 | sleep(0.25) 239 | return False 240 | 241 | def is_running(self) -> bool: 242 | """Check if parent process is running. 243 | 244 | Args: 245 | None 246 | 247 | Returns: 248 | True if the parent process is running otherwise False 249 | """ 250 | return self._poll(self.parent) is None 251 | 252 | @property 253 | def launcher(self) -> Process | None: 254 | """Inspect process tree and identity the browser launcher and parent processes. 255 | 256 | Args: 257 | None 258 | 259 | Returns: 260 | None 261 | """ 262 | if self._launcher_check and self._launcher is None: 263 | try: 264 | cmd = self.parent.cmdline() 265 | except (AccessDenied, NoSuchProcess): # pragma: no cover 266 | LOG.debug("call to self.parent.cmdline() failed") 267 | cmd = [] 268 | # check if launcher process is in use 269 | if "-no-deelevate" in cmd: 270 | launcher_children = self.parent.children(recursive=False) 271 | # launcher should only have one child process 272 | if len(launcher_children) == 1: 273 | LOG.debug("launcher process detected") 274 | self._launcher = self.parent 275 | self.parent = launcher_children[0] 276 | else: 277 | # this is expected behaviour when setting: 278 | # - `browser.launcherProcess.enabled=false` 279 | # it can also happen for unknown reasons... 280 | LOG.debug( 281 | "using launcher as parent, %d child proc(s) detected", 282 | len(launcher_children), 283 | ) 284 | self._launcher_check = False 285 | return self._launcher 286 | 287 | @staticmethod 288 | def _poll(proc: Process) -> int | None: 289 | """Poll a given process. 290 | 291 | Args: 292 | proc: Process to poll. 293 | 294 | Returns: 295 | None if the process is running otherwise the exit code is returned. 296 | """ 297 | try: 298 | return proc.wait(timeout=0) or 0 299 | except NoSuchProcess: 300 | LOG.debug("called poll() on process that does not exist") 301 | return 0 302 | except TimeoutExpired: 303 | return None 304 | 305 | def processes(self, recursive: bool = False) -> list[Process]: 306 | """Processes in the process tree. 307 | 308 | Args: 309 | recursive: If False only the parent and child processes are returned. 310 | 311 | Returns: 312 | Processes in the process tree. 313 | """ 314 | procs: list[Process] = [] 315 | if self.launcher is not None and self._poll(self.launcher) is None: 316 | procs.append(self.launcher) 317 | if self._poll(self.parent) is None: 318 | procs.append(self.parent) 319 | with suppress(AccessDenied, NoSuchProcess): 320 | procs.extend(self.parent.children(recursive=recursive)) 321 | return procs 322 | 323 | def terminate(self) -> None: 324 | """Call terminate() on browser processes. If terminate() fails try kill(). 325 | 326 | Args: 327 | None 328 | 329 | Returns: 330 | None 331 | """ 332 | procs = self.processes(recursive=True) 333 | if not procs: 334 | LOG.debug("no processes to terminate") 335 | return 336 | 337 | # try terminating the parent process first, this should be all that is needed 338 | if self._poll(self.parent) is None: 339 | with suppress(AccessDenied, NoSuchProcess, TimeoutExpired): 340 | LOG.debug("attempting to terminate parent (%d)", self.parent.pid) 341 | self.parent.terminate() 342 | self.parent.wait(timeout=10) 343 | # remaining processes should exit if parent process is gone 344 | procs = list(_filter_zombies(_safe_wait_procs(procs, timeout=1)[1])) 345 | 346 | use_kill = False 347 | while procs: 348 | LOG.debug( 349 | "calling %s on %d running process(es)", 350 | "kill()" if use_kill else "terminate()", 351 | len(procs), 352 | ) 353 | # iterate over processes and call terminate()/kill() 354 | for proc in procs: 355 | with suppress(AccessDenied, NoSuchProcess): 356 | if use_kill: 357 | proc.kill() 358 | else: 359 | proc.terminate() 360 | # wait for processes to terminate 361 | procs = list(_filter_zombies(_safe_wait_procs(procs, timeout=30)[1])) 362 | if use_kill: 363 | break 364 | use_kill = True 365 | 366 | if procs: 367 | LOG.warning("Processes still running: %d", len(procs)) 368 | for proc in procs: 369 | with suppress(AccessDenied, NoSuchProcess): 370 | LOG.warning("-> %d: %s (%s)", proc.pid, proc.name(), proc.status()) 371 | raise TerminateError("Failed to terminate processes") 372 | 373 | def wait(self, timeout: int = 300) -> int: 374 | """Wait for parent process to exit. 375 | 376 | Args: 377 | timeout: Maximum time to wait before raising TimeoutExpired. 378 | 379 | Returns: 380 | Process exit code. 381 | """ 382 | with suppress(AccessDenied, NoSuchProcess): 383 | return self.parent.wait(timeout=timeout) or 0 384 | return 0 # pragma: no cover 385 | 386 | def wait_procs(self, timeout: float | None = 0) -> int: 387 | """Wait for process tree to exit. 388 | 389 | Args: 390 | timeout: Maximum time to wait. 391 | 392 | Returns: 393 | Number of processes still alive. 394 | """ 395 | return len(_safe_wait_procs(self.processes(), timeout=timeout)[1]) 396 | -------------------------------------------------------------------------------- /src/ffpuppet/profile.py: -------------------------------------------------------------------------------- 1 | # This Source Code Form is subject to the terms of the Mozilla Public 2 | # License, v. 2.0. If a copy of the MPL was not distributed with this 3 | # file, You can obtain one at http://mozilla.org/MPL/2.0/. 4 | """ffpuppet profile manager""" 5 | 6 | from __future__ import annotations 7 | 8 | from json import load as json_load 9 | from logging import getLogger 10 | from pathlib import Path 11 | from shutil import copyfile, copytree, rmtree 12 | from subprocess import STDOUT, CalledProcessError, TimeoutExpired, check_output 13 | from tempfile import mkdtemp 14 | from time import time 15 | from typing import TYPE_CHECKING 16 | from xml.etree import ElementTree 17 | 18 | from .helpers import certutil_available, certutil_find 19 | 20 | if TYPE_CHECKING: 21 | from collections.abc import Iterable 22 | 23 | LOG = getLogger(__name__) 24 | 25 | __author__ = "Tyson Smith" 26 | 27 | 28 | class Profile: 29 | """ 30 | Browser profile management object. 31 | """ 32 | 33 | __slots__ = ("path",) 34 | 35 | def __init__( 36 | self, 37 | browser_bin: Path | None = None, 38 | cert_files: Iterable[Path] | None = None, 39 | extensions: Iterable[Path] | None = None, 40 | prefs_file: Path | None = None, 41 | template: Path | None = None, 42 | working_path: str | None = None, 43 | ) -> None: 44 | if cert_files and not certutil_available(certutil_find(browser_bin)): 45 | raise OSError("NSS certutil not found") 46 | 47 | self.path: Path | None = Path(mkdtemp(dir=working_path, prefix="ffprofile_")) 48 | try: 49 | if template is not None: 50 | self._copy_template(template) 51 | if prefs_file is not None: 52 | self._copy_prefs_file(prefs_file) 53 | if extensions is not None: 54 | self._copy_extensions(extensions) 55 | if cert_files: 56 | certutil_bin = certutil_find(browser_bin) 57 | self.init_cert_db(self.path, certutil_bin) 58 | for cert in cert_files: 59 | self.install_cert(self.path, cert, certutil_bin) 60 | except Exception: 61 | if self.path.exists(): 62 | rmtree(self.path, ignore_errors=True) 63 | raise 64 | 65 | def __enter__(self) -> Profile: 66 | return self 67 | 68 | def __exit__(self, *exc: object) -> None: 69 | self.remove() 70 | 71 | def __str__(self) -> str: 72 | return str(self.path) 73 | 74 | def _add_times_json(self) -> None: 75 | assert self.path 76 | # times.json only needs to be created when using a custom prefs.js 77 | times_json = self.path / "times.json" 78 | if not times_json.is_file(): 79 | times_json.write_text(f'{{"created":{int(time()) * 1000}}}') 80 | 81 | def _copy_extensions(self, extensions: Iterable[Path]) -> None: 82 | assert self.path 83 | ext_path = self.path / "extensions" 84 | ext_path.mkdir(exist_ok=True) 85 | for ext in extensions: 86 | if ext.is_file() and ext.name.endswith(".xpi"): 87 | copyfile(ext, ext_path / ext.name) 88 | elif ext.is_dir(): 89 | # read manifest to see what the folder should be named 90 | ext_name = None 91 | if (ext / "manifest.json").is_file(): 92 | try: 93 | with (ext / "manifest.json").open("r") as manifest: 94 | manifest_loaded_json = json_load(manifest) 95 | ext_name = manifest_loaded_json["applications"]["gecko"]["id"] 96 | except (OSError, KeyError, ValueError) as exc: 97 | LOG.debug("Failed to parse manifest.json: %s", exc) 98 | elif (ext / "install.rdf").is_file(): 99 | try: 100 | xmlns = { 101 | "x": "http://www.w3.org/1999/02/22-rdf-syntax-ns#", 102 | "em": "http://www.mozilla.org/2004/em-rdf#", 103 | } 104 | tree = ElementTree.parse(str(ext / "install.rdf")) 105 | assert tree.getroot().tag == f"{{{xmlns['x']}}}RDF" 106 | ids = tree.findall("./x:Description/em:id", namespaces=xmlns) 107 | assert len(ids) == 1 108 | ext_name = ids[0].text 109 | except (AssertionError, OSError, ElementTree.ParseError) as exc: 110 | LOG.debug("Failed to parse install.rdf: %s", exc) 111 | if ext_name is None: 112 | raise RuntimeError( 113 | f"Failed to find extension id in manifest: '{ext}'" 114 | ) 115 | copytree(ext, self.path / "extensions" / ext_name) 116 | else: 117 | raise RuntimeError(f"Unknown extension: '{ext}'") 118 | 119 | def _copy_prefs_file(self, prefs_file: Path) -> None: 120 | assert self.path 121 | LOG.debug("using prefs.js: '%s'", prefs_file) 122 | copyfile(prefs_file, self.path / "prefs.js") 123 | self._add_times_json() 124 | 125 | def _copy_template(self, template: Path) -> None: 126 | assert self.path 127 | LOG.debug("using profile template: '%s'", template) 128 | rmtree(self.path) 129 | copytree(template, self.path) 130 | invalid_prefs = self.path / "Invalidprefs.js" 131 | # if Invalidprefs.js was copied from the template profile remove it 132 | if invalid_prefs.is_file(): 133 | invalid_prefs.unlink() 134 | 135 | def add_prefs(self, prefs: dict[str, str]) -> None: 136 | """Write or append preferences from prefs to prefs.js file in profile_path. 137 | 138 | Args: 139 | prefs: preferences to add. 140 | 141 | Returns: 142 | None 143 | """ 144 | assert self.path 145 | if not (self.path / "prefs.js").is_file(): 146 | self._add_times_json() 147 | with (self.path / "prefs.js").open("a") as prefs_fp: 148 | # make sure there is a newline before appending to prefs.js 149 | prefs_fp.write("\n") 150 | for name, value in prefs.items(): 151 | prefs_fp.write(f"user_pref('{name}', {value});\n") 152 | 153 | @staticmethod 154 | def check_prefs(prof_prefs: Path, input_prefs: Path) -> bool: 155 | """Check that the given prefs.js file in use by the browser contains all 156 | the requested preferences. 157 | NOTE: There will be false positives if input_prefs does not adhere to the 158 | formatting that is used in prefs.js file generated by the browser. 159 | 160 | Args: 161 | prof_prefs: Profile prefs.js file. 162 | input_prefs: Prefs.js file that contains prefs that should be merged into 163 | the prefs.js file generated by the browser. 164 | 165 | Returns: 166 | True if all expected preferences are found otherwise False. 167 | """ 168 | with prof_prefs.open() as p_fp, input_prefs.open() as i_fp: 169 | p_prefs = {p.split(",")[0] for p in p_fp if p.startswith("user_pref(")} 170 | i_prefs = {p.split(",")[0] for p in i_fp if p.startswith("user_pref(")} 171 | missing_prefs = i_prefs - p_prefs 172 | for missing in missing_prefs: 173 | LOG.debug("pref not set '%s'", missing) 174 | return not missing_prefs 175 | 176 | @staticmethod 177 | def init_cert_db(dst: Path, certutil: str) -> None: 178 | """Create required certificate database files. 179 | 180 | Args: 181 | dst: Path of directory to initialize. 182 | certutil: certutil binary. 183 | 184 | Returns: 185 | None 186 | """ 187 | # remove any existing db files to avoid any compatibility issues 188 | (dst / "cert9.db").unlink(missing_ok=True) 189 | (dst / "key4.db").unlink(missing_ok=True) 190 | (dst / "pkcs11.txt").unlink(missing_ok=True) 191 | try: 192 | check_output( 193 | (certutil, "-N", "-d", str(dst), "--empty-password"), 194 | stderr=STDOUT, 195 | timeout=60, 196 | ) 197 | except (CalledProcessError, TimeoutExpired) as exc: 198 | LOG.error(str(exc)) 199 | if exc.output: 200 | LOG.error(exc.output.decode().strip()) 201 | raise RuntimeError("Init cert db: certutil error") from None 202 | 203 | @staticmethod 204 | def install_cert(dst: Path, cert_file: Path, certutil: str) -> None: 205 | """Install certificate in the database. 206 | 207 | Args: 208 | dst: Directory containing database. 209 | cert_file: Certificate file to install. 210 | certutil: certutil binary. 211 | 212 | Returns: 213 | None 214 | """ 215 | LOG.debug("installing certificate '%s' with '%s'", cert_file, certutil) 216 | try: 217 | check_output( 218 | ( 219 | certutil, 220 | "-A", 221 | "-d", 222 | str(dst), 223 | "-t", 224 | "CT,,", 225 | "-n", 226 | "test cert", 227 | "-i", 228 | str(cert_file), 229 | ), 230 | stderr=STDOUT, 231 | timeout=60, 232 | ) 233 | except (CalledProcessError, TimeoutExpired) as exc: 234 | LOG.error(str(exc)) 235 | if exc.output: 236 | LOG.error(exc.output.decode().strip()) 237 | raise RuntimeError("Install cert: certutil error") from None 238 | 239 | @property 240 | def invalid_prefs(self) -> Path | None: 241 | """Path to Invalidprefs.js if it exists. 242 | 243 | Args: 244 | None 245 | 246 | Returns: 247 | Invalidprefs.js or None if it does not exist. 248 | 249 | """ 250 | if self.path and (self.path / "Invalidprefs.js").is_file(): 251 | return self.path / "Invalidprefs.js" 252 | return None 253 | 254 | def remove(self) -> None: 255 | """Remove the profile from the filesystem. 256 | 257 | Args: 258 | None 259 | 260 | Returns: 261 | None 262 | """ 263 | if self.path is not None: 264 | LOG.debug("removing profile") 265 | rmtree(self.path, ignore_errors=True) 266 | if self.path.exists(): 267 | LOG.error("Failed to remove profile '%s'", self.path) 268 | self.path = None 269 | -------------------------------------------------------------------------------- /src/ffpuppet/puppet_logger.py: -------------------------------------------------------------------------------- 1 | # This Source Code Form is subject to the terms of the Mozilla Public 2 | # License, v. 2.0. If a copy of the MPL was not distributed with this 3 | # file, You can obtain one at http://mozilla.org/MPL/2.0/. 4 | """browser and debugger log management""" 5 | 6 | from __future__ import annotations 7 | 8 | from contextlib import suppress 9 | from logging import getLogger 10 | from mmap import ACCESS_READ, mmap 11 | from os import getpid, stat 12 | from os.path import isfile 13 | from pathlib import Path 14 | from shutil import copy2, copyfileobj, copytree, rmtree 15 | from subprocess import STDOUT, CalledProcessError, check_output 16 | from tempfile import NamedTemporaryFile, mkdtemp 17 | from typing import IO, TYPE_CHECKING 18 | 19 | from .helpers import warn_open 20 | 21 | if TYPE_CHECKING: 22 | from collections.abc import Generator 23 | 24 | LOG = getLogger(__name__) 25 | 26 | __author__ = "Tyson Smith" 27 | __credits__ = ["Tyson Smith"] 28 | 29 | 30 | class PuppetLogger: # pylint: disable=missing-docstring 31 | BUF_SIZE = 0x10000 # buffer size used to copy logs 32 | PATH_RR = "rr-traces" 33 | PREFIX_SAN = f"ffp_asan_{getpid()}.log" 34 | PREFIX_VALGRIND = f"valgrind.{getpid()}" 35 | 36 | __slots__ = ("_base", "_logs", "_rr_packed", "closed", "path", "watching") 37 | 38 | def __init__(self, base_path: str | None = None) -> None: 39 | self._base = base_path 40 | self._logs: dict[str, IO[bytes]] = {} 41 | self._rr_packed = False 42 | self.closed = True 43 | self.path: Path | None = None 44 | self.watching: dict[str, int] = {} 45 | self.reset() 46 | 47 | def __enter__(self) -> PuppetLogger: 48 | return self 49 | 50 | def __exit__(self, *exc: object) -> None: 51 | self.clean_up() 52 | 53 | def add_log(self, log_id: str, logfp: IO[bytes] | None = None) -> IO[bytes]: 54 | """Add a log file to the log manager. 55 | 56 | Args: 57 | log_id: ID of the log to add. 58 | logfp: File object to use. If None a new log file will be created. 59 | 60 | Returns: 61 | Newly added log file. 62 | """ 63 | assert log_id not in self._logs 64 | assert not self.closed 65 | if logfp is None: 66 | logfp = PuppetLogger.open_unique( 67 | base_dir=str(self.path) if self.path else None 68 | ) 69 | self._logs[log_id] = logfp 70 | return logfp 71 | 72 | def add_path(self, name: str) -> Path: 73 | """Add a directory that can be used as temporary storage for 74 | miscellaneous items such as additional debugger output. 75 | 76 | Args: 77 | name: Name of directory to create. 78 | 79 | Returns: 80 | Path of newly created directory. 81 | """ 82 | assert not self.closed 83 | assert self.path is not None 84 | path = self.path / name 85 | LOG.debug("adding path '%s' as '%s'", name, path) 86 | path.mkdir() 87 | return path 88 | 89 | def available_logs(self) -> frozenset[str]: 90 | """IDs for the available logs. 91 | 92 | Args: 93 | None 94 | 95 | Returns: 96 | All available log IDs. 97 | """ 98 | return frozenset(self._logs.keys()) 99 | 100 | def clean_up(self, ignore_errors: bool = False) -> None: 101 | """Remove log files from disk. 102 | 103 | Args: 104 | ignore_errors: Ignore errors triggered by removing files and directories. 105 | 106 | Returns: 107 | None 108 | """ 109 | if not self.closed: 110 | self.close() 111 | if self.path is not None: 112 | try: 113 | if self.path.exists(): 114 | rmtree(self.path, ignore_errors=ignore_errors) 115 | except OSError: 116 | warn_open(self.path) 117 | raise 118 | self._logs.clear() 119 | self.path = None 120 | 121 | def clone_log( 122 | self, 123 | log_id: str, 124 | offset: int = 0, 125 | target_file: str | None = None, 126 | ) -> Path | None: 127 | """Create a copy of the specified log. 128 | 129 | Args: 130 | log_id: ID of the log to clone. 131 | offset: Where to begin reading the log from. 132 | target_file: The log contents will be saved to target_file. 133 | 134 | Returns: 135 | Name of the file containing the cloned log or None on failure. 136 | """ 137 | log_fp = self.get_fp(log_id) 138 | if log_fp is None: 139 | return None 140 | if not log_fp.closed: 141 | log_fp.flush() 142 | with open(log_fp.name, "rb") as in_fp: 143 | if offset: 144 | in_fp.seek(offset) 145 | if target_file is None: 146 | with PuppetLogger.open_unique(base_dir=self._base) as cpyfp: 147 | target_file = cpyfp.name 148 | with open(target_file, "wb") as cpyfp: 149 | copyfileobj(in_fp, cpyfp, self.BUF_SIZE) 150 | return Path(target_file) 151 | 152 | def close(self) -> None: 153 | """Close all open file objects. 154 | 155 | Args: 156 | None 157 | 158 | Returns: 159 | None 160 | """ 161 | for lfp in self._logs.values(): 162 | if not lfp.closed: 163 | lfp.close() 164 | self.closed = True 165 | 166 | @property 167 | def files(self) -> Generator[str]: 168 | """File names of log files. 169 | 170 | Args: 171 | None 172 | 173 | Yields: 174 | File names of log files. 175 | """ 176 | for lfp in self._logs.values(): 177 | if lfp.name is not None: 178 | yield lfp.name 179 | 180 | def get_fp(self, log_id: str) -> IO[bytes] | None: 181 | """Lookup log file object by ID. 182 | 183 | Args: 184 | log_id: ID of the log (stderr, stdout... etc). 185 | 186 | Returns: 187 | The file matching given ID otherwise None. 188 | """ 189 | try: 190 | log_fp = self._logs[log_id] 191 | except KeyError: 192 | LOG.warning("log_id '%s' does not exist", log_id) 193 | return None 194 | if log_fp.name is None or not isfile(log_fp.name): 195 | raise FileNotFoundError(f"Log file not found: {log_fp.name}") 196 | return log_fp 197 | 198 | def log_length(self, log_id: str) -> int | None: 199 | """Get the length of the specified log. 200 | 201 | Args: 202 | log_id: ID of the log to measure. 203 | 204 | Returns: 205 | Length of the specified log in bytes or None if the log does not exist. 206 | """ 207 | log_fp = self.get_fp(log_id) 208 | if log_fp is None: 209 | return None 210 | if not log_fp.closed: 211 | log_fp.flush() 212 | return stat(log_fp.name).st_size 213 | 214 | @staticmethod 215 | def open_unique(base_dir: str | None = None, mode: str = "wb") -> IO[bytes]: 216 | """Create and open a unique file. 217 | 218 | Args: 219 | base_dir: This is where the file will be created. If None is 220 | passed the system default will be used. 221 | mode: File mode. See documentation for open(). 222 | 223 | Returns: 224 | An open file object. 225 | """ 226 | return NamedTemporaryFile( 227 | mode, delete=False, dir=base_dir, prefix="ffp_log_", suffix=".txt" 228 | ) 229 | 230 | def reset(self) -> None: 231 | """Reset logger for reuse. 232 | 233 | Args: 234 | None 235 | 236 | Returns: 237 | None 238 | """ 239 | self.clean_up() 240 | self.closed = False 241 | self._rr_packed = False 242 | self.path = Path(mkdtemp(prefix="ffplogs_", dir=self._base)) 243 | 244 | def save_logs( 245 | self, 246 | dest: Path, 247 | logs_only: bool = False, 248 | bin_path: Path | None = None, 249 | rr_pack: bool = False, 250 | ) -> None: 251 | """The browser logs will be saved to dest. This can only be called 252 | after close() has been called. 253 | 254 | Args: 255 | dest: Destination path for log data. Existing files will be overwritten. 256 | logs_only: Do not include other data, including debugger output files. 257 | bin_path: Firefox binary. 258 | rr_pack: Pack rr trace if required. 259 | 260 | Returns: 261 | None 262 | """ 263 | assert self.closed, "save_logs() cannot be called before calling close()" 264 | assert self.path is not None 265 | 266 | # copy log to location specified by dest 267 | dest.mkdir(parents=True, exist_ok=True) 268 | 269 | for log_id, log_fp in self._logs.items(): 270 | copy2(log_fp.name, dest / f"log_{log_id}.txt") 271 | 272 | if not logs_only: 273 | rr_trace = self.path / self.PATH_RR / "latest-trace" 274 | if rr_trace.is_dir(): 275 | # check logs for rr related issues 276 | # OSError: in case the file does not exist 277 | # ValueError: cannot mmap an empty file on Windows 278 | with ( 279 | suppress(OSError, ValueError), 280 | (dest / "log_stderr.txt").open("rb") as lfp, 281 | mmap(lfp.fileno(), 0, access=ACCESS_READ) as lmm, 282 | ): 283 | if lmm.find(b"=== Start rr backtrace:") != -1: 284 | LOG.warning("rr traceback detected in stderr log") 285 | if rr_pack and not self._rr_packed: 286 | LOG.debug("packing rr trace") 287 | try: 288 | check_output(["rr", "pack", str(rr_trace)], stderr=STDOUT) 289 | self._rr_packed = True 290 | except (OSError, CalledProcessError): 291 | LOG.warning("Error calling 'rr pack %s'", rr_trace) 292 | # copy `taskcluster-build-task` for use with Pernosco if available 293 | if bin_path is not None: 294 | task_info = bin_path / "taskcluster-build-task" 295 | if task_info.is_file(): 296 | moz_rr = rr_trace / "files.mozilla" 297 | moz_rr.mkdir(parents=True, exist_ok=True) 298 | copy2(task_info, moz_rr) 299 | LOG.debug("Copied 'taskcluster-build-task' to trace") 300 | 301 | for entry in self.path.iterdir(): 302 | if entry.is_dir(): 303 | copytree(entry, dest / entry.name, symlinks=True) 304 | -------------------------------------------------------------------------------- /src/ffpuppet/py.typed: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MozillaSecurity/ffpuppet/baa79ae7f14e222152c077c82d650e7f417a5904/src/ffpuppet/py.typed -------------------------------------------------------------------------------- /src/ffpuppet/resources/testff.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | """fake firefox""" 3 | 4 | import sys 5 | from argparse import ArgumentParser 6 | from enum import IntEnum, auto, unique 7 | from pathlib import Path 8 | from time import sleep 9 | from urllib.error import URLError 10 | from urllib.request import urlopen 11 | 12 | EXIT_DELAY = 45 13 | 14 | 15 | @unique 16 | class Mode(IntEnum): 17 | """Available testing modes""" 18 | 19 | BIG_LOG = auto() 20 | EXIT_CODE = auto() 21 | INVALID_JS = auto() 22 | MEMORY = auto() 23 | NONE = auto() 24 | SOFT_ASSERT = auto() 25 | 26 | 27 | def main() -> int: 28 | """Fake Firefox for testing""" 29 | parser = ArgumentParser(prog="testff", description="Fake Firefox for testing") 30 | parser.add_argument("url") 31 | parser.add_argument("-headless", action="store_true", help="ignored") 32 | parser.add_argument("-marionette", nargs="?", type=int, help="ignored") 33 | parser.add_argument("-new-instance", action="store_true", help="ignored") 34 | parser.add_argument("-no-deelevate", action="store_true", help="ignored") 35 | parser.add_argument("-wait-for-browser", action="store_true", help="ignored") 36 | parser.add_argument("-profile", type=Path, required=True) 37 | args = parser.parse_args() 38 | 39 | # read prefs to see how to run 40 | exit_code = 0 41 | mode = Mode.NONE 42 | with (args.profile / "prefs.js").open() as prefs_js: 43 | for line in prefs_js: 44 | if line.startswith("user_pref"): 45 | pass 46 | elif line.startswith("/"): 47 | line = line.lstrip("/").strip() 48 | if line == "fftest_memory": 49 | mode = Mode.MEMORY 50 | elif line == "fftest_soft_assert": 51 | mode = Mode.SOFT_ASSERT 52 | elif line == "fftest_invalid_js": 53 | mode = Mode.INVALID_JS 54 | elif line == "fftest_big_log": 55 | mode = Mode.BIG_LOG 56 | elif line.startswith("fftest_exit_code_"): 57 | mode = Mode.EXIT_CODE 58 | exit_code = int(line.split("fftest_exit_code_")[-1]) 59 | # don't worry about unknown values 60 | elif line.startswith("#"): 61 | pass # skip comments 62 | elif line.strip(): 63 | raise RuntimeError(f"unknown value in prefs.js: {line}") 64 | # sys.stdout.write(f'cmd: {cmd}\n') 65 | # sys.stdout.flush() 66 | 67 | if mode == Mode.INVALID_JS: 68 | (args.profile / "Invalidprefs.js").write_text("bad!") 69 | 70 | target_url = None 71 | try: 72 | # pylint: disable=consider-using-with 73 | conn = urlopen(args.url) 74 | except URLError as req_err: 75 | # can't redirect to file:// from http:// 76 | # pylint: disable=consider-using-with 77 | conn = urlopen(str(req_err.reason).split("'")[1]) 78 | try: 79 | target_url = conn.geturl() 80 | if target_url == args.url: 81 | target_url = None 82 | sys.stdout.write(conn.read().decode()) 83 | sys.stdout.write("\n") 84 | sys.stdout.flush() 85 | finally: 86 | conn.close() 87 | 88 | sys.stdout.write(f"url: {target_url!r}\n") 89 | sys.stdout.flush() 90 | 91 | if mode == Mode.MEMORY: 92 | sys.stdout.write("simulating high memory usage\n") 93 | sys.stdout.flush() 94 | _ = ["A" * 1024 * 1024 for _ in range(200)] 95 | elif mode == Mode.SOFT_ASSERT: 96 | sys.stdout.write("simulating soft assertion\n") 97 | sys.stdout.flush() 98 | sys.stderr.write("A" * 512 * 1024) 99 | sys.stderr.write("\n###!!! ASSERTION: test\n\nblah...\n") 100 | sys.stderr.flush() 101 | elif mode == Mode.BIG_LOG: 102 | sys.stdout.write("simulating big logs\n") 103 | buf = "A" * (512 * 1024) # 512KB 104 | for _ in range(25): 105 | sys.stdout.write(buf) 106 | sys.stderr.write(buf) 107 | sys.stdout.flush() 108 | sys.stderr.flush() 109 | elif mode == Mode.EXIT_CODE: 110 | sys.stdout.write(f"exit code test ({exit_code})\n") 111 | sys.stdout.flush() 112 | return exit_code 113 | 114 | sys.stdout.write(f"running... (sleep {EXIT_DELAY})\n") 115 | sys.stdout.flush() 116 | sleep(EXIT_DELAY) # wait before closing (should be terminated before elapse) 117 | sys.stdout.write("exiting normally\n") 118 | sys.stdout.flush() 119 | return 0 120 | 121 | 122 | if __name__ == "__main__": 123 | sys.exit(main()) 124 | -------------------------------------------------------------------------------- /src/ffpuppet/resources/tree.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | """fake browser tree""" 3 | 4 | from __future__ import annotations 5 | 6 | # NOTE: this must only use the standard library 7 | import signal 8 | from argparse import ArgumentParser, Namespace 9 | from logging import DEBUG, basicConfig, getLogger 10 | from os import getpid 11 | from pathlib import Path 12 | from socket import AF_INET, SOCK_STREAM, socket 13 | from subprocess import Popen 14 | from sys import executable 15 | from time import perf_counter, sleep 16 | from typing import Any 17 | 18 | LOG = getLogger(__name__) 19 | SHUTDOWN = False 20 | SOCKET_TIMEOUT = 60 21 | 22 | 23 | def handle_signal(signum: int, _frame: Any) -> None: 24 | """handle signal to allow manual shutdown""" 25 | # pylint: disable=global-statement 26 | global SHUTDOWN 27 | LOG.info("caught %r", signal.Signals(signum).name) 28 | SHUTDOWN = True 29 | 30 | 31 | def main(args: Namespace) -> int: 32 | """Mock a Firefox browser process tree for testing purposes""" 33 | child_procs: tuple[Popen[bytes], ...] | None = None 34 | start = perf_counter() 35 | try: 36 | pid = getpid() 37 | signal.signal(signal.SIGINT, handle_signal) 38 | signal.signal(signal.SIGTERM, handle_signal) 39 | assert args.procs >= 1, f"procs must be >= 1 ({pid})" 40 | assert args.duration >= 1, f"duration must be >= 1 ({pid})" 41 | assert not args.sync.exists(), f"sync file should not exist ({pid})" 42 | 43 | cmd = [ 44 | executable, 45 | __file__, 46 | str(args.procs), 47 | str(args.sync), 48 | "--parent-pid", 49 | str(pid), 50 | "--duration", 51 | str(args.duration), 52 | ] 53 | if args.no_deelevate and not args.launcher_is_parent: 54 | assert not args.contentproc, f"-contentproc not expected! ({pid})" 55 | LOG.info("Launcher process") 56 | # pylint: disable=consider-using-with 57 | child_procs = (Popen(cmd),) 58 | elif args.contentproc: 59 | LOG.info("Content process (ppid: %r)", args.parent_pid) 60 | with socket(AF_INET, SOCK_STREAM) as conn: 61 | conn.connect(("127.0.0.1", args.port)) 62 | # don't hang forever 63 | conn.settimeout(SOCKET_TIMEOUT) 64 | conn.sendall(str(pid).encode()) 65 | else: 66 | assert not args.no_deelevate or args.launcher_is_parent 67 | LOG.info("Parent process (ppid: %r)", args.parent_pid) 68 | with socket(AF_INET, SOCK_STREAM) as srv: 69 | srv.settimeout(SOCKET_TIMEOUT) 70 | srv.bind(("127.0.0.1", 0)) 71 | srv.listen() 72 | cmd.append("--port") 73 | cmd.append(str(srv.getsockname()[1])) 74 | cmd.append("-contentproc") 75 | # pylint: disable=consider-using-with 76 | child_procs = tuple(Popen(cmd) for _ in range(args.procs)) 77 | # wait for processes to launch 78 | for _ in range(args.procs): 79 | conn, _ = srv.accept() 80 | # don't hang forever 81 | conn.settimeout(SOCKET_TIMEOUT) 82 | with conn: 83 | conn.recv(64) 84 | LOG.info("Tree running (%0.03f)", perf_counter() - start) 85 | args.sync.touch() 86 | 87 | # wait loop 88 | while not SHUTDOWN and perf_counter() - start < args.duration: 89 | if child_procs and all(x.poll() is not None for x in child_procs): 90 | break 91 | sleep(0.1) 92 | 93 | except KeyboardInterrupt: 94 | pass 95 | 96 | finally: 97 | if not args.contentproc: 98 | args.sync.unlink(missing_ok=True) 99 | if child_procs: 100 | for proc in child_procs: 101 | if proc.poll() is None: 102 | proc.terminate() 103 | for proc in child_procs: 104 | proc.wait(timeout=10) 105 | LOG.info("Exiting, runtime %0.3fs", perf_counter() - start) 106 | 107 | return 0 108 | 109 | 110 | if __name__ == "__main__": 111 | parser = ArgumentParser() 112 | parser.add_argument("procs", type=int, help="number of content processes") 113 | parser.add_argument("sync", type=Path, help="used to indicate tree readiness") 114 | parser.add_argument("--duration", type=int, default=60) 115 | parser.add_argument("--launcher-is-parent", action="store_true") 116 | parser.add_argument("--parent-pid", type=int) 117 | parser.add_argument("--port", type=int) 118 | parser.add_argument("-contentproc", action="store_true", help="fake browser arg") 119 | parser.add_argument("-no-deelevate", action="store_true", help="fake browser arg") 120 | 121 | basicConfig( 122 | datefmt="%H:%M:%S", 123 | format="[%(asctime)s.%(msecs)03d][%(process)d] %(message)s", 124 | level=DEBUG, 125 | ) 126 | 127 | raise SystemExit(main(parser.parse_args())) 128 | -------------------------------------------------------------------------------- /src/ffpuppet/sanitizer_util.py: -------------------------------------------------------------------------------- 1 | # This Source Code Form is subject to the terms of the Mozilla Public 2 | # License, v. 2.0. If a copy of the MPL was not distributed with this 3 | # file, You can obtain one at http://mozilla.org/MPL/2.0/. 4 | """ffpuppet sanitizer utilities""" 5 | 6 | from __future__ import annotations 7 | 8 | from logging import getLogger 9 | from os.path import exists 10 | from re import compile as re_compile 11 | from typing import TYPE_CHECKING 12 | 13 | if TYPE_CHECKING: 14 | from collections.abc import Generator, Sequence 15 | 16 | LOG = getLogger(__name__) 17 | 18 | __author__ = "Tyson Smith" 19 | 20 | 21 | class SanitizerOptions: 22 | """Used to parse, load and manage sanitizer options.""" 23 | 24 | re_delim = re_compile(r":(?![\\|/])") 25 | 26 | __slots__ = ("_options",) 27 | 28 | def __init__(self, options: str | None = None) -> None: 29 | """ 30 | Args: 31 | options: Sanitizer options string to load. 32 | """ 33 | self._options: dict[str, str] = {} 34 | if options is not None: 35 | self.load_options(options) 36 | 37 | def __bool__(self) -> bool: 38 | return any(self._options) 39 | 40 | def __contains__(self, item: str) -> bool: 41 | return item in self._options 42 | 43 | def __iter__(self) -> Generator[Sequence[str]]: 44 | yield from self._options.items() 45 | 46 | def __len__(self) -> int: 47 | return len(self._options) 48 | 49 | def __str__(self) -> str: 50 | return ":".join(f"{k}={v}" for k, v in self) 51 | 52 | def add(self, flag: str, value: str, overwrite: bool = False) -> None: 53 | """Add sanitizer option flag. 54 | 55 | Args: 56 | flag: Sanitizer option flag to set. 57 | value: Value to use. Values containing ':' or ' ' must be quoted. 58 | overwrite: Overwrite existing value. 59 | 60 | Returns: 61 | None 62 | """ 63 | if not flag: 64 | raise ValueError("Flag name cannot be empty") 65 | if (":" in value or " " in value) and not self.is_quoted(value): 66 | raise ValueError(f"'{value}' ({flag}) must be quoted") 67 | if flag not in self._options or overwrite: 68 | self._options[flag] = value 69 | 70 | def check_path(self, flag: str) -> bool: 71 | """Check path exists on disk. 72 | Only indicate failure if flag exists and path does not. 73 | 74 | Args: 75 | flag: Flags to set. 76 | 77 | Returns: 78 | False if the flag exists and the path does not otherwise True. 79 | """ 80 | if flag in self._options: 81 | value = self._options[flag] 82 | if self.is_quoted(value): 83 | value = value[1:-1] 84 | return exists(value) 85 | return True 86 | 87 | def get(self, flag: str) -> str | None: 88 | """Get sanitizer flag. 89 | 90 | Args: 91 | flag: Flags to retrieve. 92 | 93 | Returns: 94 | Value of given flag or None 95 | """ 96 | return self._options.get(flag) 97 | 98 | @staticmethod 99 | def is_quoted(token: str) -> bool: 100 | """Check if token is quoted. 101 | 102 | Args: 103 | token: Value to check. 104 | 105 | Returns: 106 | True if token is quoted otherwise False. 107 | """ 108 | return len(token) > 1 and token[0] == token[-1] and token[0] in ('"', "'") 109 | 110 | def load_options(self, options: str | None) -> None: 111 | """Load flags from *SAN_OPTIONS in env. 112 | 113 | Args: 114 | options: Colon separated list of `flag=value` pairs. 115 | 116 | Returns: 117 | None 118 | """ 119 | self._options.clear() 120 | if options: 121 | for option in self.re_delim.split(options): 122 | try: 123 | self.add(*option.split("=", maxsplit=1)) 124 | except TypeError: # noqa: PERF203 125 | LOG.warning("Malformed sanitizer option %r", option) 126 | 127 | def pop(self, flag: str) -> str | None: 128 | """Pop sanitizer flag. 129 | 130 | Args: 131 | flag: Flags to retrieve. 132 | 133 | Returns: 134 | Value of given flag or None 135 | """ 136 | return self._options.pop(flag, None) 137 | -------------------------------------------------------------------------------- /src/ffpuppet/test_bootstrapper.py: -------------------------------------------------------------------------------- 1 | # This Source Code Form is subject to the terms of the Mozilla Public 2 | # License, v. 2.0. If a copy of the MPL was not distributed with this file, 3 | # You can obtain one at http://mozilla.org/MPL/2.0/. 4 | """ffpuppet bootstrapper tests""" 5 | # pylint: disable=protected-access 6 | 7 | # as of python 3.10 socket.timeout was made an alias of TimeoutError 8 | # pylint: disable=ungrouped-imports 9 | from socket import timeout as socket_timeout # isort: skip 10 | 11 | from itertools import repeat 12 | from socket import socket 13 | from threading import Thread 14 | 15 | from pytest import mark, raises 16 | 17 | from .bootstrapper import Bootstrapper 18 | from .exceptions import BrowserTerminatedError, BrowserTimeoutError, LaunchError 19 | 20 | 21 | def test_bootstrapper_01(): 22 | """test Bootstrapper.create()""" 23 | with Bootstrapper.create() as bts: 24 | assert bts._socket is not None 25 | assert bts.location.startswith("http://127.0.0.1:") 26 | assert int(bts.location.rsplit(":", maxsplit=1)[-1]) >= 1024 27 | assert bts.port >= 1024 28 | assert bts.port not in Bootstrapper.BLOCKED_PORTS 29 | bts.close() 30 | 31 | 32 | @mark.parametrize( 33 | "exc, msg, continue_cb", 34 | [ 35 | # test failure 36 | ( 37 | BrowserTerminatedError, 38 | "Failure waiting for browser connection", 39 | lambda: False, 40 | ), 41 | # test timeout 42 | ( 43 | BrowserTimeoutError, 44 | "Timeout waiting for browser connection", 45 | lambda: True, 46 | ), 47 | ], 48 | ) 49 | def test_bootstrapper_02(mocker, exc, msg, continue_cb): 50 | """test Bootstrapper.wait() failure waiting for initial connection""" 51 | mocker.patch("ffpuppet.bootstrapper.select", return_value=([], None, None)) 52 | mocker.patch("ffpuppet.bootstrapper.perf_counter", side_effect=(1, 1, 2, 3)) 53 | fake_sock = mocker.MagicMock(spec_set=socket) 54 | with Bootstrapper(fake_sock) as bts: 55 | with raises(exc, match=msg): 56 | bts.wait(continue_cb, timeout=2) 57 | assert fake_sock.accept.call_count == 0 58 | 59 | 60 | def test_bootstrapper_03(mocker): 61 | """test Bootstrapper.wait() failure waiting for request""" 62 | fake_sock = mocker.MagicMock(spec_set=socket) 63 | fake_conn = mocker.Mock(spec_set=socket) 64 | fake_conn.recv.side_effect = socket_timeout 65 | fake_sock.accept.return_value = (fake_conn, None) 66 | mocker.patch("ffpuppet.bootstrapper.select", return_value=([fake_sock], None, None)) 67 | with Bootstrapper(fake_sock) as bts: 68 | # test failure 69 | with raises(BrowserTerminatedError, match="Failure waiting for request"): 70 | bts.wait(lambda: False) 71 | assert fake_conn.recv.call_count == 1 72 | assert fake_conn.close.call_count == 1 73 | fake_conn.reset_mock() 74 | # test timeout 75 | mocker.patch("ffpuppet.bootstrapper.perf_counter", side_effect=(1, 1, 1, 1, 2)) 76 | with raises(BrowserTimeoutError, match="Timeout waiting for request"): 77 | bts.wait(lambda: True, timeout=0.1) 78 | # should call recv() at least 2x for positive and negative timeout check 79 | assert fake_conn.recv.call_count > 1 80 | assert fake_conn.close.call_count == 1 81 | 82 | 83 | def test_bootstrapper_04(mocker): 84 | """test Bootstrapper.wait() failure sending response""" 85 | fake_sock = mocker.MagicMock(spec_set=socket) 86 | fake_conn = mocker.Mock(spec_set=socket) 87 | fake_conn.recv.return_value = "A" 88 | fake_conn.sendall.side_effect = socket_timeout 89 | fake_sock.accept.return_value = (fake_conn, None) 90 | mocker.patch("ffpuppet.bootstrapper.select", return_value=([fake_sock], None, None)) 91 | with Bootstrapper(fake_sock) as bts: 92 | # test timeout 93 | with raises(BrowserTimeoutError, match="Timeout sending response"): 94 | bts.wait(lambda: True) 95 | assert fake_conn.recv.call_count == 1 96 | assert fake_conn.sendall.call_count == 1 97 | assert fake_conn.close.call_count == 1 98 | fake_conn.reset_mock() 99 | # test failure 100 | with raises(BrowserTerminatedError, match="Failure during browser startup"): 101 | bts.wait(lambda: False) 102 | assert fake_conn.recv.call_count == 1 103 | assert fake_conn.sendall.call_count == 1 104 | assert fake_conn.close.call_count == 1 105 | 106 | 107 | def test_bootstrapper_05(mocker): 108 | """test Bootstrapper.wait() target crashed""" 109 | fake_sock = mocker.MagicMock(spec_set=socket) 110 | fake_conn = mocker.Mock(spec_set=socket) 111 | fake_conn.recv.return_value = "foo" 112 | fake_sock.accept.return_value = (fake_conn, None) 113 | mocker.patch("ffpuppet.bootstrapper.select", return_value=([fake_sock], None, None)) 114 | with ( 115 | Bootstrapper(fake_sock) as bts, 116 | raises(BrowserTerminatedError, match="Failure during browser startup"), 117 | ): 118 | bts.wait(lambda: False) 119 | assert fake_conn.close.call_count == 1 120 | 121 | 122 | @mark.parametrize( 123 | "redirect, recv, closed", 124 | [ 125 | # normal startup 126 | (None, ("foo",), 1), 127 | # with a redirect url 128 | ("http://127.0.0.1:9999/test.html", ("foo",), 1), 129 | # request size matches buffer size 130 | (None, ("A" * Bootstrapper.BUF_SIZE, socket_timeout), 1), 131 | # large request 132 | (None, ("A" * Bootstrapper.BUF_SIZE, "foo"), 1), 133 | # slow startup 134 | (None, (socket_timeout, socket_timeout, "foo"), 1), 135 | # slow failed startup with retry 136 | (None, (socket_timeout, "", "foo"), 2), 137 | ], 138 | ) 139 | def test_bootstrapper_06(mocker, redirect, recv, closed): 140 | """test Bootstrapper.wait()""" 141 | fake_sock = mocker.MagicMock(spec_set=socket) 142 | fake_conn = mocker.Mock(spec_set=socket) 143 | fake_conn.recv.side_effect = recv 144 | fake_sock.accept.return_value = (fake_conn, None) 145 | mocker.patch("ffpuppet.bootstrapper.select", return_value=([fake_sock], None, None)) 146 | with Bootstrapper(fake_sock) as bts: 147 | bts.wait(lambda: True, url=redirect) 148 | assert fake_conn.close.call_count == closed 149 | assert fake_conn.recv.call_count == len(recv) 150 | assert fake_conn.sendall.call_count == 1 151 | 152 | 153 | def test_bootstrapper_07(): 154 | """test Bootstrapper.wait() with a fake browser""" 155 | 156 | def _fake_browser(port, payload_size=5120): 157 | conn = socket() 158 | # 50 x 0.1 = 5 seconds 159 | conn.settimeout(0.1) 160 | # open connection 161 | for attempt in reversed(range(50)): 162 | try: 163 | conn.connect(("127.0.0.1", port)) 164 | break 165 | except socket_timeout: 166 | if not attempt: 167 | raise 168 | # send request and receive response 169 | try: 170 | conn.settimeout(10) 171 | conn.sendall(b"A" * payload_size) 172 | conn.send(b"") 173 | conn.recv(8192) 174 | finally: 175 | conn.close() 176 | 177 | with Bootstrapper.create() as bts: 178 | browser_thread = Thread(target=_fake_browser, args=(bts.port,)) 179 | try: 180 | browser_thread.start() 181 | bts.wait(lambda: True, timeout=10) 182 | finally: 183 | browser_thread.join() 184 | 185 | 186 | @mark.parametrize( 187 | "bind, attempts", 188 | [ 189 | # failed to bind (OSError) 190 | ((OSError(0, "foo1"),), 1), 191 | # failed to bind (PermissionError) - multiple attempts 192 | (repeat(PermissionError(10013, "foo2"), 4), 4), 193 | ], 194 | ) 195 | def test_bootstrapper_08(mocker, bind, attempts): 196 | """test Bootstrapper.create_socket() - failures""" 197 | mocker.patch("ffpuppet.bootstrapper.sleep", autospec=True) 198 | fake_sock = mocker.MagicMock(spec_set=socket) 199 | fake_sock.bind.side_effect = bind 200 | mocker.patch("ffpuppet.bootstrapper.select", return_value=([fake_sock], None, None)) 201 | mocker.patch("ffpuppet.bootstrapper.socket", return_value=fake_sock) 202 | assert Bootstrapper.create_socket(attempts=attempts) is None 203 | assert fake_sock.bind.call_count == attempts 204 | assert fake_sock.close.call_count == attempts 205 | 206 | 207 | def test_bootstrapper_09(mocker): 208 | """test Bootstrapper() - blocked ports""" 209 | fake_sock = mocker.MagicMock(spec_set=socket) 210 | fake_sock.getsockname.side_effect = ( 211 | (None, next(iter(Bootstrapper.BLOCKED_PORTS))), 212 | (None, 12345), 213 | ) 214 | mocker.patch("ffpuppet.bootstrapper.socket", return_value=fake_sock) 215 | with Bootstrapper.create(attempts=2): 216 | pass 217 | assert fake_sock.close.call_count == 2 218 | 219 | 220 | def test_bootstrapper_10(mocker): 221 | """test Bootstrapper.create() - failure""" 222 | mocker.patch("ffpuppet.bootstrapper.Bootstrapper.create_socket", return_value=None) 223 | with raises(LaunchError), Bootstrapper.create(): 224 | pass 225 | 226 | 227 | @mark.parametrize("value", [123, 5555]) 228 | def test_bootstrapper_11(value): 229 | """test Bootstrapper.create_socket() - unusable ports""" 230 | assert Bootstrapper.create_socket(blocked=[5555], port=value) is None 231 | 232 | 233 | @mark.parametrize( 234 | "value, result", 235 | [ 236 | (0, True), 237 | (1337, True), 238 | (32768, True), 239 | (-1, False), 240 | (1, False), 241 | (1023, False), 242 | (65536, False), 243 | ], 244 | ) 245 | def test_bootstrapper_12(value, result): 246 | """test Bootstrapper.check_port()""" 247 | assert Bootstrapper.check_port(value) == result 248 | -------------------------------------------------------------------------------- /src/ffpuppet/test_checks.py: -------------------------------------------------------------------------------- 1 | # This Source Code Form is subject to the terms of the Mozilla Public 2 | # License, v. 2.0. If a copy of the MPL was not distributed with this file, 3 | # You can obtain one at http://mozilla.org/MPL/2.0/. 4 | """checks.py tests""" 5 | 6 | from os import getpid 7 | from re import compile as re_compile 8 | 9 | from psutil import Process 10 | 11 | from .checks import CheckLogContents, CheckLogSize, CheckMemoryUsage 12 | 13 | 14 | def test_check_01(mocker, tmp_path): 15 | """test CheckLogContents()""" 16 | test_log = tmp_path / "test.log" 17 | # input contains token 18 | test_log.write_bytes(b"\xf0\x9f\x91\x8dblah\nfoo\ntest\n123") 19 | checker = CheckLogContents([str(test_log)], [re_compile("test")]) 20 | assert checker.check() 21 | with test_log.open("wb") as lfp: 22 | checker.dump_log(lfp) 23 | assert lfp.tell() 24 | # input does not contains token 25 | checker = CheckLogContents([str(test_log)], [re_compile("no_token")]) 26 | assert not checker.check() 27 | # check a 2nd time 28 | assert not checker.check() 29 | with test_log.open("wb") as lfp: 30 | checker.dump_log(lfp) 31 | assert not lfp.tell() 32 | # log does not exist 33 | checker = CheckLogContents(["missing_log"], [re_compile("no_token")]) 34 | assert not checker.check() 35 | with test_log.open("wb") as lfp: 36 | checker.dump_log(lfp) 37 | assert not lfp.tell() 38 | # input exceeds chunk_size 39 | with test_log.open("w") as lfp_txt: 40 | lfp_txt.write("A" * (CheckLogContents.buf_limit - 2)) 41 | lfp_txt.write("test123") 42 | lfp_txt.write("A" * 20) 43 | checker = CheckLogContents([str(test_log)], [re_compile("test123")]) 44 | mocker.patch( 45 | "ffpuppet.checks.CheckLogContents.chunk_size", CheckLogContents.buf_limit 46 | ) 47 | assert not checker.check() 48 | assert checker.check() 49 | with test_log.open("wb") as lfp: 50 | checker.dump_log(lfp) 51 | assert lfp.tell() 52 | 53 | 54 | def test_check_02(tmp_path): 55 | """test CheckLogSize()""" 56 | stde = tmp_path / "stderr" 57 | stde.write_text("test\n") 58 | stdo = tmp_path / "stdout" 59 | stdo.write_text("test\n") 60 | # exceed limit 61 | checker = CheckLogSize(1, str(stde), str(stdo)) 62 | assert checker.check() 63 | with (tmp_path / "log").open("wb") as lfp: 64 | checker.dump_log(lfp) 65 | assert lfp.tell() 66 | # don't exceed limit 67 | checker = CheckLogSize(12, str(stde), str(stdo)) 68 | assert not checker.check() 69 | with (tmp_path / "log").open("wb") as lfp: 70 | checker.dump_log(lfp) 71 | assert not lfp.tell() 72 | 73 | 74 | def test_check_03(tmp_path): 75 | """test CheckMemoryUsage()""" 76 | 77 | def get_procs(): 78 | yield Process(getpid()) 79 | 80 | checker = CheckMemoryUsage(getpid(), 300 * 1024 * 1024, get_procs) 81 | # don't exceed limit 82 | assert not checker.check() 83 | with (tmp_path / "log").open("wb") as lfp: 84 | checker.dump_log(lfp) 85 | assert not lfp.tell() 86 | checker = CheckMemoryUsage(getpid(), 10, get_procs) 87 | # exceed limit 88 | assert checker.check() 89 | with (tmp_path / "log").open("wb") as lfp: 90 | checker.dump_log(lfp) 91 | assert lfp.tell() 92 | -------------------------------------------------------------------------------- /src/ffpuppet/test_display.py: -------------------------------------------------------------------------------- 1 | # This Source Code Form is subject to the terms of the Mozilla Public 2 | # License, v. 2.0. If a copy of the MPL was not distributed with this file, 3 | # You can obtain one at http://mozilla.org/MPL/2.0/. 4 | """display.py tests""" 5 | 6 | from platform import system 7 | 8 | from pytest import mark, raises 9 | 10 | from .display import DISPLAYS, Display, DisplayMode, HeadlessDisplay, XvfbDisplay 11 | 12 | 13 | @mark.parametrize("mode", tuple(x for x in DisplayMode)) 14 | def test_displays(mocker, mode): 15 | """test Displays()""" 16 | if system() == "Linux": 17 | mocker.patch("ffpuppet.display.Xvfb", autospec=True) 18 | display = DISPLAYS[mode]() 19 | assert display 20 | try: 21 | if mode.name == "DEFAULT": 22 | assert isinstance(display, Display) 23 | elif mode.name == "HEADLESS": 24 | assert isinstance(display, HeadlessDisplay) 25 | elif mode.name == "XVFB": 26 | assert isinstance(display, XvfbDisplay) 27 | else: 28 | raise AssertionError(f"Unknown DisplayMode: {mode.name}") 29 | finally: 30 | display.close() 31 | 32 | 33 | @mark.skipif(system() != "Linux", reason="Only supported on Linux") 34 | def test_xvfb_missing_deps(mocker): 35 | """test XvfbDisplay() missing deps""" 36 | mocker.patch("ffpuppet.display.Xvfb", side_effect=NameError("test")) 37 | with raises(NameError): 38 | XvfbDisplay() 39 | -------------------------------------------------------------------------------- /src/ffpuppet/test_helpers.py: -------------------------------------------------------------------------------- 1 | # This Source Code Form is subject to the terms of the Mozilla Public 2 | # License, v. 2.0. If a copy of the MPL was not distributed with this file, 3 | # You can obtain one at http://mozilla.org/MPL/2.0/. 4 | """ffpuppet helpers tests""" 5 | 6 | from os import getpid 7 | from pathlib import Path 8 | from subprocess import CalledProcessError 9 | 10 | from pytest import mark, raises 11 | 12 | from .helpers import ( 13 | CERTUTIL, 14 | LLVM_SYMBOLIZER, 15 | _configure_sanitizers, 16 | certutil_available, 17 | certutil_find, 18 | files_in_use, 19 | prepare_environment, 20 | wait_on_files, 21 | warn_open, 22 | ) 23 | from .sanitizer_util import SanitizerOptions 24 | 25 | 26 | def test_helpers_01(tmp_path): 27 | """test _configure_sanitizers()""" 28 | # test with empty environment 29 | env = _configure_sanitizers({}, tmp_path, "blah") 30 | assert "ASAN_OPTIONS" in env 31 | opts = SanitizerOptions(env["ASAN_OPTIONS"]) 32 | assert opts.get("external_symbolizer_path") is None 33 | assert opts.get("detect_leaks") == "false" 34 | assert opts.get("log_path") == "'blah'" 35 | assert "LSAN_OPTIONS" in env 36 | assert "UBSAN_OPTIONS" in env 37 | # test with presets environment 38 | env = _configure_sanitizers( 39 | { 40 | "ASAN_OPTIONS": "detect_leaks=true", 41 | "LSAN_OPTIONS": "a=1", 42 | "UBSAN_OPTIONS": "", 43 | }, 44 | tmp_path, 45 | "blah", 46 | ) 47 | assert "ASAN_OPTIONS" in env 48 | opts = SanitizerOptions(env["ASAN_OPTIONS"]) 49 | assert opts.get("detect_leaks") == "true" 50 | assert "LSAN_OPTIONS" in env 51 | assert "UBSAN_OPTIONS" in env 52 | opts = SanitizerOptions(env["UBSAN_OPTIONS"]) 53 | assert opts.get("print_stacktrace") is not None 54 | # test suppression file 55 | sup = tmp_path / "test.sup" 56 | sup.touch() 57 | env = _configure_sanitizers( 58 | {"ASAN_OPTIONS": f"suppressions='{sup}'"}, tmp_path, "blah" 59 | ) 60 | opts = SanitizerOptions(env["ASAN_OPTIONS"]) 61 | assert opts.get("suppressions") is not None 62 | # test overwrite log_path 63 | env = _configure_sanitizers( 64 | { 65 | "ASAN_OPTIONS": "log_path='overwrite'", 66 | "TSAN_OPTIONS": "log_path='overwrite'", 67 | "UBSAN_OPTIONS": "log_path='overwrite'", 68 | }, 69 | tmp_path, 70 | "blah", 71 | ) 72 | assert "ASAN_OPTIONS" in env 73 | opts = SanitizerOptions(env["ASAN_OPTIONS"]) 74 | assert opts.get("log_path") == "'blah'" 75 | assert "UBSAN_OPTIONS" in env 76 | opts = SanitizerOptions(env["UBSAN_OPTIONS"]) 77 | assert opts.get("log_path") == "'blah'" 78 | # test missing suppression file 79 | with raises(AssertionError, match="missing suppressions file"): 80 | _configure_sanitizers( 81 | {"ASAN_OPTIONS": "suppressions=not_a_file"}, tmp_path, "blah" 82 | ) 83 | # unquoted path containing ':' 84 | with raises(ValueError, match=r"\(strip_path_prefix\) must be quoted"): 85 | _configure_sanitizers( 86 | {"ASAN_OPTIONS": "strip_path_prefix=x:\\foo\\bar"}, tmp_path, "blah" 87 | ) 88 | # multiple options 89 | options = ( 90 | "opt1=1", 91 | "opt2=", 92 | "opt3=test", 93 | "opt4='x:\\foo'", 94 | 'opt5="z:/bar"', 95 | "opt6=''", 96 | "opt7='/with space/'", 97 | "opt8='x:\\with a space\\or two'", 98 | ) 99 | env = _configure_sanitizers({"ASAN_OPTIONS": ":".join(options)}, tmp_path, "blah") 100 | opts = SanitizerOptions(env["ASAN_OPTIONS"]) 101 | for key, value in (x.split(sep="=", maxsplit=1) for x in options): 102 | assert opts.get(key) == value 103 | # test using packaged llvm-symbolizer 104 | llvm_sym_packed = tmp_path / LLVM_SYMBOLIZER 105 | llvm_sym_packed.touch() 106 | env = _configure_sanitizers({"ASAN_OPTIONS": ":".join(options)}, tmp_path, "blah") 107 | opts = SanitizerOptions(env["ASAN_OPTIONS"]) 108 | assert opts.get("external_symbolizer_path").strip("'") == str(llvm_sym_packed) 109 | # test malformed option pair 110 | env = _configure_sanitizers({"ASAN_OPTIONS": "a=b=c:malformed"}, tmp_path, "blah") 111 | opts = SanitizerOptions(env["ASAN_OPTIONS"]) 112 | assert opts.get("a") == "b=c" 113 | assert "malformed" not in str(opts) 114 | # test ASAN_SYMBOLIZER_PATH 115 | (tmp_path / "a").mkdir() 116 | llvm_sym_a = tmp_path / "a" / "llvm-symbolizer" 117 | llvm_sym_a.touch() 118 | env = {"ASAN_SYMBOLIZER_PATH": str(llvm_sym_a)} 119 | env = _configure_sanitizers(env, tmp_path, "blah") 120 | opts = SanitizerOptions(env["ASAN_OPTIONS"]) 121 | assert opts.get("external_symbolizer_path").strip("'") == str(llvm_sym_a) 122 | # test ASAN_SYMBOLIZER_PATH override by ASAN_OPTIONS=external_symbolizer_path 123 | (tmp_path / "b").mkdir() 124 | llvm_sym_b = tmp_path / "b" / "llvm-symbolizer" 125 | llvm_sym_b.touch() 126 | env = { 127 | "ASAN_SYMBOLIZER_PATH": str(llvm_sym_a), 128 | "ASAN_OPTIONS": f"external_symbolizer_path='{llvm_sym_b}'", 129 | } 130 | env = _configure_sanitizers(env, tmp_path, "blah") 131 | opts = SanitizerOptions(env["ASAN_OPTIONS"]) 132 | assert opts.get("external_symbolizer_path").strip("'") == str(llvm_sym_b) 133 | 134 | 135 | def test_helpers_02(tmp_path): 136 | """test prepare_environment()""" 137 | env = prepare_environment(tmp_path, "blah") 138 | assert "ASAN_OPTIONS" in env 139 | assert "LSAN_OPTIONS" in env 140 | assert "UBSAN_OPTIONS" in env 141 | assert "RUST_BACKTRACE" in env 142 | assert "MOZ_CRASHREPORTER" in env 143 | 144 | 145 | def test_helpers_03(mocker, tmp_path): 146 | """test prepare_environment() using some predefined environment variables""" 147 | mocker.patch.dict( 148 | "ffpuppet.helpers.environ", 149 | { 150 | "MOZ_SKIA_DISABLE_ASSERTS": "0", 151 | "TEST_EXISTING_OVERWRITE": "0", 152 | "TEST_EXISTING_REMOVE": "1", 153 | "TEST_SECRET_TO_REMOVE": "1", 154 | }, 155 | ) 156 | pre = { 157 | "LSAN_OPTIONS": "lopt=newopt", 158 | "MOZ_GDB_SLEEP": "2", # update default 159 | "MOZ_SKIA_DISABLE_ASSERTS": "1", # existing optional 160 | "RUST_BACKTRACE": None, # remove default 161 | "TEST_FAKE": None, # remove non existing entry 162 | "TEST_VAR": "123", # add non existing entry 163 | "TEST_EXISTING_OVERWRITE": "1", 164 | "TEST_EXISTING_REMOVE": None, 165 | } 166 | env = prepare_environment(tmp_path, "blah", pre) 167 | assert "ASAN_OPTIONS" in env 168 | assert "LSAN_OPTIONS" in env 169 | assert "lopt=newopt" in env["LSAN_OPTIONS"].split(":") 170 | assert "max_leaks=1" in env["LSAN_OPTIONS"].split(":") 171 | assert "UBSAN_OPTIONS" in env 172 | assert env["TEST_VAR"] == "123" 173 | assert "MOZ_CRASHREPORTER" in env 174 | assert env["MOZ_GDB_SLEEP"] == "2" 175 | assert "RUST_BACKTRACE" not in env 176 | assert "TEST_FAKE" not in env 177 | assert "TEST_EXISTING_REMOVE" not in env 178 | assert env["MOZ_SKIA_DISABLE_ASSERTS"] == "0" 179 | assert env["TEST_EXISTING_OVERWRITE"] == "1" 180 | assert "TEST_SECRET_TO_REMOVE" not in env 181 | # MOZ_CRASHREPORTER should not be added if MOZ_CRASHREPORTER_DISABLE is set 182 | pre = {"MOZ_CRASHREPORTER_DISABLE": "1"} 183 | env = prepare_environment(tmp_path, "blah", pre) 184 | assert "MOZ_CRASHREPORTER" not in env 185 | 186 | 187 | def test_helpers_04(mocker, tmp_path): 188 | """test wait_on_files()""" 189 | fake_sleep = mocker.patch("ffpuppet.helpers.sleep", autospec=True) 190 | fake_time = mocker.patch("ffpuppet.helpers.perf_counter", autospec=True) 191 | t_file = tmp_path / "file.bin" 192 | t_file.touch() 193 | # test with open file (timeout) 194 | fake_time.side_effect = (1, 1, 2) 195 | with (tmp_path / "open.bin").open("w") as wait_fp: 196 | assert not wait_on_files([Path(wait_fp.name), t_file], timeout=0.1) 197 | assert fake_sleep.call_count == 1 198 | fake_sleep.reset_mock() 199 | # existing but closed file 200 | fake_time.side_effect = (1, 1) 201 | assert wait_on_files([t_file]) 202 | assert fake_sleep.call_count == 0 203 | # file that does not exist 204 | fake_time.side_effect = (1, 1) 205 | assert wait_on_files([Path("missing")]) 206 | assert fake_sleep.call_count == 0 207 | # empty file list 208 | fake_time.side_effect = (1, 1) 209 | assert wait_on_files([]) 210 | assert fake_sleep.call_count == 0 211 | 212 | 213 | def test_helpers_06(tmp_path): 214 | """test files_in_use()""" 215 | t_file = tmp_path / "file.bin" 216 | t_file.touch() 217 | # test with open file 218 | with (tmp_path / "file").open("w") as wait_fp: 219 | in_use = next(files_in_use([t_file, Path(wait_fp.name)])) 220 | assert in_use 221 | assert len(in_use) == 3 222 | assert Path(wait_fp.name).samefile(in_use[0]) 223 | assert in_use[1] == getpid() 224 | assert isinstance(in_use[2], str) 225 | # existing but closed file 226 | assert not any(files_in_use([t_file])) 227 | # missing file 228 | assert not any(files_in_use([tmp_path / "missing_file"])) 229 | # no files 230 | assert not any(files_in_use([])) 231 | 232 | 233 | def test_helpers_07(tmp_path): 234 | """test warn_open()""" 235 | with (tmp_path / "file.bin").open("w") as _: 236 | warn_open(tmp_path) 237 | 238 | 239 | @mark.parametrize( 240 | "raised, result", 241 | [ 242 | (None, False), 243 | (OSError("test"), False), 244 | (CalledProcessError(1, "test"), False), 245 | ( 246 | CalledProcessError( 247 | 1, 248 | "test", 249 | output=b"certutil - Utility to manipulate NSS certificate databases", 250 | ), 251 | True, 252 | ), 253 | ], 254 | ) 255 | def test_certutil_available_01(mocker, raised, result): 256 | """test certutil_available()""" 257 | mocker.patch("ffpuppet.helpers.check_output", autospec=True, side_effect=raised) 258 | assert certutil_available(CERTUTIL) == result 259 | 260 | 261 | def test_certutil_find_01(tmp_path): 262 | """test certutil_find()""" 263 | # default 264 | assert certutil_find() == CERTUTIL 265 | # missing bundled certutil 266 | browser_bin = tmp_path / "browser" 267 | browser_bin.touch() 268 | assert certutil_find(browser_bin) == CERTUTIL 269 | # found bundled certutil 270 | certutil_bin = tmp_path / "bin" / CERTUTIL 271 | certutil_bin.parent.mkdir() 272 | certutil_bin.touch() 273 | assert certutil_find(browser_bin) == str(certutil_bin) 274 | -------------------------------------------------------------------------------- /src/ffpuppet/test_job_object.py: -------------------------------------------------------------------------------- 1 | # This Source Code Form is subject to the terms of the Mozilla Public 2 | # License, v. 2.0. If a copy of the MPL was not distributed with this file, 3 | # You can obtain one at http://mozilla.org/MPL/2.0/. 4 | """ffpuppet job object tests""" 5 | 6 | from platform import system 7 | from subprocess import PIPE, Popen 8 | from sys import executable 9 | from time import sleep 10 | 11 | from pytest import skip 12 | 13 | if system() == "Windows": 14 | from .core import CREATE_SUSPENDED 15 | from .job_object import config_job_object, resume_suspended_process 16 | else: 17 | skip("skipping windows-only tests", allow_module_level=True) 18 | 19 | 20 | def test_job_object_01(): 21 | """test config_job_object() set limit higher than usage""" 22 | with Popen([executable, "-c", "input()"], stdin=PIPE, stderr=PIPE) as proc: 23 | # pylint: disable=no-member,protected-access,possibly-used-before-assignment 24 | config_job_object(proc._handle, 1024 * 1024 * 1024) 25 | proc.communicate(input=b"a", timeout=10) 26 | assert proc.wait(10) == 0 27 | 28 | 29 | def test_job_object_02(): 30 | """test config_job_object() enforce limit""" 31 | with Popen( 32 | [executable, "-c", "input(); a = ['A' * 1024 * 1024 for _ in range(50)]"], 33 | stdin=PIPE, 34 | stderr=PIPE, 35 | ) as proc: 36 | # pylint: disable=no-member,protected-access,possibly-used-before-assignment 37 | config_job_object(proc._handle, 32 * 1024 * 1024) 38 | _, err = proc.communicate(input=b"a", timeout=10) 39 | assert proc.wait(10) == 1 40 | assert b"MemoryError" in err 41 | 42 | 43 | def test_thread_resume(): 44 | """test that suspended process is created in job""" 45 | # the test function creates a subprocess to show that the parent process 46 | # is suspended on launch. if creationflags=CREATE_SUSPENDED is omitted, 47 | # the test should fail (no MemoryError) 48 | with Popen( 49 | [ 50 | executable, 51 | "-c", 52 | "from subprocess import run; import sys;" 53 | "run([sys.executable, '-c', " 54 | "\"input(); a = ['A' * 1024 * 1024 for _ in range(50)]\"], check=True)", 55 | ], 56 | # pylint: disable=possibly-used-before-assignment 57 | creationflags=CREATE_SUSPENDED, 58 | stdin=PIPE, 59 | stderr=PIPE, 60 | ) as proc: 61 | sleep(0.1) 62 | # pylint: disable=no-member,protected-access,possibly-used-before-assignment 63 | config_job_object(proc._handle, 32 * 1024 * 1024) 64 | resume_suspended_process(proc.pid) 65 | _, err = proc.communicate(input=b"a", timeout=10) 66 | assert proc.wait(10) == 1 67 | assert b"MemoryError" in err 68 | -------------------------------------------------------------------------------- /src/ffpuppet/test_main.py: -------------------------------------------------------------------------------- 1 | # This Source Code Form is subject to the terms of the Mozilla Public 2 | # License, v. 2.0. If a copy of the MPL was not distributed with this file, 3 | # You can obtain one at http://mozilla.org/MPL/2.0/. 4 | """ffpuppet main.py tests""" 5 | 6 | from platform import system 7 | 8 | from pytest import mark, raises 9 | 10 | from .core import Reason 11 | from .exceptions import BrowserExecutionError 12 | from .main import dump_to_console, main, parse_args 13 | from .profile import Profile 14 | 15 | 16 | @mark.parametrize( 17 | "reason, launch, is_healthy, extra_args", 18 | [ 19 | # browser exit 20 | (Reason.EXITED, None, (False,), ["-d", "--save-all"]), 21 | # browser exit - more flags 22 | (Reason.EXITED, None, (False,), ["-a", "token", "--log-level", "DEBUG"]), 23 | # cannot launch browser binary 24 | (Reason.CLOSED, (BrowserExecutionError(),), None, []), 25 | # browser crash 26 | (Reason.ALERT, None, (False,), []), 27 | # user exit 28 | (Reason.CLOSED, None, (True, KeyboardInterrupt()), []), 29 | # exception 30 | (None, None, (False,), []), 31 | ], 32 | ) 33 | def test_main_01(mocker, tmp_path, reason, launch, is_healthy, extra_args): 34 | """test main()""" 35 | mocker.patch("ffpuppet.main.sleep", autospec=True) 36 | fake_ffp = mocker.patch("ffpuppet.main.FFPuppet", autospec=True) 37 | fake_ffp.return_value.get_pid.return_value = 12345 38 | fake_ffp.return_value.is_healthy.side_effect = is_healthy 39 | fake_ffp.return_value.launch.side_effect = launch 40 | fake_ffp.return_value.profile = mocker.Mock(spec_set=Profile, path=tmp_path) 41 | fake_ffp.return_value.reason = reason 42 | out_logs = tmp_path / "logs" 43 | out_logs.mkdir() 44 | prefs = tmp_path / "prefs.js" 45 | prefs.touch() 46 | fake_bin = tmp_path / "fake.bin" 47 | fake_bin.touch() 48 | args = [str(fake_bin), "-l", str(out_logs), "-p", str(prefs)] 49 | main(args + extra_args) 50 | if "-a" in extra_args: 51 | assert fake_ffp.return_value.add_abort_token.call_count == 1 52 | else: 53 | assert fake_ffp.return_value.add_abort_token.call_count == 0 54 | assert fake_ffp.return_value.close.call_count == 1 55 | assert fake_ffp.return_value.save_logs.call_count == 1 56 | assert fake_ffp.return_value.clean_up.call_count == 1 57 | 58 | 59 | def test_parse_args_01(capsys, mocker, tmp_path): 60 | """test parse_args()""" 61 | mocker.patch("ffpuppet.main.Path.read_bytes", autospec=True, return_value=b"99") 62 | certutil_avail = mocker.patch("ffpuppet.main.certutil_available", autospec=True) 63 | fake_which = mocker.patch("ffpuppet.main.which", autospec=True) 64 | with raises(SystemExit): 65 | parse_args(["-h"]) 66 | # invalid/missing binary 67 | with raises(SystemExit): 68 | parse_args(["missing_bin"]) 69 | assert "error: Invalid browser binary 'missing_bin'" in capsys.readouterr()[-1] 70 | fake_bin = tmp_path / "fake.bin" 71 | fake_bin.touch() 72 | # invalid log-limit 73 | with raises(SystemExit): 74 | parse_args([str(fake_bin), "--log-limit", "-1"]) 75 | assert "error: --log-limit must be >= 0" in capsys.readouterr()[-1] 76 | # invalid marionette port 77 | with raises(SystemExit): 78 | parse_args([str(fake_bin), "--marionette", "123"]) 79 | assert ( 80 | "error: --marionette must be 0 or > 1024 and < 65536" in capsys.readouterr()[-1] 81 | ) 82 | # invalid memory limit 83 | with raises(SystemExit): 84 | parse_args([str(fake_bin), "--memory", "-1"]) 85 | assert "error: --memory must be >= 0" in capsys.readouterr()[-1] 86 | # missing prefs 87 | with raises(SystemExit): 88 | parse_args([str(fake_bin), "-p", "missing_prefs"]) 89 | assert "error: Invalid prefs.js file 'missing_prefs'" in capsys.readouterr()[-1] 90 | # missing extension 91 | with raises(SystemExit): 92 | parse_args([str(fake_bin), "-e", "missing_ext"]) 93 | assert "error: Extension 'missing_ext' does not exist" in capsys.readouterr()[-1] 94 | # missing certificate 95 | certutil_avail.return_value = True 96 | with raises(SystemExit): 97 | parse_args([str(fake_bin), "--cert", "missing_cert"]) 98 | assert "error: Invalid certificate file 'missing_cert'" in capsys.readouterr()[-1] 99 | # missing certutil 100 | certutil_avail.return_value = False 101 | with raises(SystemExit): 102 | parse_args([str(fake_bin), "--cert", str(fake_bin)]) 103 | assert "error: '--certs' requires NSS certutil" in capsys.readouterr()[-1] 104 | # invalid log path 105 | (tmp_path / "junk.log").touch() 106 | missing = tmp_path / "missing" 107 | with raises(SystemExit): 108 | parse_args([str(fake_bin), "--logs", str(missing)]) 109 | assert f"Log output directory is invalid '{missing}'" in capsys.readouterr()[-1] 110 | # rr is Linux only 111 | if system() == "Linux": 112 | # missing rr 113 | fake_which.return_value = None 114 | with raises(SystemExit): 115 | parse_args([str(fake_bin), "--rr"]) 116 | assert "error: rr is not installed" in capsys.readouterr()[-1] 117 | # rr - perf_event_paranoid > 1 118 | fake_which.return_value = "rr" 119 | with raises(SystemExit): 120 | parse_args([str(fake_bin), "--rr"]) 121 | assert "/proc/sys/kernel/perf_event_paranoid <= 1" in capsys.readouterr()[-1] 122 | # success 123 | assert parse_args([str(fake_bin)]) 124 | 125 | 126 | def test_dump_to_console_01(tmp_path): 127 | """test dump_to_console()""" 128 | # call with no logs 129 | assert not dump_to_console(tmp_path) 130 | # call with dummy logs 131 | (tmp_path / "log_stderr.txt").write_bytes(b"dummy-stderr") 132 | (tmp_path / "log_stdout.txt").write_bytes(b"dummy-stdout") 133 | output = dump_to_console(tmp_path) 134 | assert "Dumping 'log_stderr.txt'" in output 135 | assert "dummy-stderr" in output 136 | assert "Dumping 'log_stdout.txt'" in output 137 | assert "dummy-stdout" in output 138 | # truncate log 139 | with (tmp_path / "log_stdout.txt").open("wb") as log_fp: 140 | log_fp.write(b"dummy-stdout") 141 | for _ in range(1024): 142 | log_fp.write(b"test") 143 | output = dump_to_console(tmp_path, log_quota=100) 144 | assert "Dumping 'log_stderr.txt'" in output 145 | assert "dummy-stderr" in output 146 | assert "Dumping 'log_stdout.txt'" in output 147 | assert "dummy-stdout" not in output 148 | -------------------------------------------------------------------------------- /src/ffpuppet/test_minidump_parser.py: -------------------------------------------------------------------------------- 1 | # This Source Code Form is subject to the terms of the Mozilla Public 2 | # License, v. 2.0. If a copy of the MPL was not distributed with this file, 3 | # You can obtain one at http://mozilla.org/MPL/2.0/. 4 | """ffpuppet minidump parser tests""" 5 | 6 | from copy import deepcopy 7 | from json import dumps 8 | from pathlib import Path 9 | from subprocess import CompletedProcess 10 | from sys import executable 11 | 12 | from pytest import mark 13 | 14 | from .minidump_parser import MinidumpParser 15 | 16 | MD_BASE_AMD64_WIN = { 17 | "crash_info": { 18 | "address": "0x00007ffe4e09af8d", 19 | "type": "EXCEPTION_BREAKPOINT", 20 | }, 21 | "system_info": { 22 | "cpu_arch": "amd64", 23 | "cpu_count": 8, 24 | "cpu_info": "family 6 model 70 stepping 1", 25 | "os": "Windows NT", 26 | "os_ver": "10.0.19044", 27 | }, 28 | } 29 | 30 | MD_UNSYMBOLIZED_AMD64_WIN = deepcopy(MD_BASE_AMD64_WIN) 31 | MD_UNSYMBOLIZED_AMD64_WIN["crash_info"]["crashing_thread"] = 0 32 | MD_UNSYMBOLIZED_AMD64_WIN["crashing_thread"] = { 33 | "frame_count": 49, 34 | "frames": [ 35 | { 36 | "file": None, 37 | "frame": 0, 38 | "function": None, 39 | "function_offset": None, 40 | "line": None, 41 | "module": "xul.dll", 42 | "registers": {"r10": "0x0"}, 43 | }, 44 | ], 45 | } 46 | 47 | MD_UNSYMBOLIZED_ARM64_MAC = { 48 | "crash_info": { 49 | "address": "0x0000000000000000", 50 | "crashing_thread": 0, 51 | "type": "EXC_BAD_ACCESS / KERN_INVALID_ADDRESS", 52 | }, 53 | "crashing_thread": { 54 | "frame_count": 32, 55 | "frames": [ 56 | { 57 | "file": None, 58 | "frame": 0, 59 | "function": None, 60 | "function_offset": None, 61 | "line": None, 62 | "module": "XUL", 63 | "registers": { 64 | "x1": "0x0000000000000001", 65 | "x2": "0x0000000000000002", 66 | }, 67 | }, 68 | ], 69 | }, 70 | "system_info": { 71 | "cpu_arch": "arm64", 72 | "cpu_count": 8, 73 | "cpu_info": None, 74 | "os": "Mac OS X", 75 | "os_ver": "13.0.1 22A400", 76 | }, 77 | } 78 | 79 | 80 | @mark.parametrize( 81 | "symbols", 82 | [ 83 | # use local path 84 | True, 85 | # use url 86 | False, 87 | ], 88 | ) 89 | def test_minidump_parser_01(mocker, tmp_path, symbols): 90 | """test MinidumpParser._cmd()""" 91 | mocker.patch.object(MinidumpParser, "MDSW_BIN", "minidump-stackwalk") 92 | with MinidumpParser(symbols=tmp_path if symbols else None) as parser: 93 | assert parser 94 | # pylint: disable=protected-access 95 | cmd = parser._cmd(tmp_path) 96 | assert cmd 97 | assert "minidump-stackwalk" in cmd 98 | if symbols: 99 | assert "--symbols-path" in cmd 100 | else: 101 | assert "--symbols-url" in cmd 102 | 103 | 104 | @mark.parametrize( 105 | "code, token, timeout", 106 | [ 107 | # success 108 | (f"print('{dumps(MD_UNSYMBOLIZED_AMD64_WIN)}')", "xul.dll", 60), 109 | # mdsw failed 110 | ("exit(1)", "minidump-stackwalk failed", 60), 111 | # invalid json 112 | ("print('bad,json')", "json decode error", 60), 113 | # mdsw hang 114 | ("import time;time.sleep(10)", "minidump-stackwalk timeout", 0), 115 | ], 116 | ) 117 | def test_minidump_parser_02(mocker, code, token, timeout): 118 | """test MinidumpParser.create_log()""" 119 | mocker.patch.object(MinidumpParser, "_cmd", return_value=[executable, "-c", code]) 120 | with MinidumpParser() as parser: 121 | # pylint: disable=protected-access 122 | assert parser._storage.is_dir() 123 | output = parser.create_log(Path("foo.dmp"), "minidump_00.txt", timeout=timeout) 124 | assert output 125 | assert output.name == "minidump_00.txt" 126 | assert output.is_file() 127 | assert token in output.read_text() 128 | assert not output.is_file() 129 | 130 | 131 | @mark.parametrize( 132 | "data, reg, operating_system, cpu, crash, frame", 133 | [ 134 | # Windows - x86_64 / AMD64 135 | ( 136 | MD_UNSYMBOLIZED_AMD64_WIN, 137 | "r10 = 0x0", 138 | "OS|Windows NT|10.0.19044", 139 | "CPU|amd64|family 6 model 70 stepping 1|8", 140 | "Crash|EXCEPTION_BREAKPOINT|0x00007ffe4e09af8d|0", 141 | "0|0|xul.dll||||", 142 | ), 143 | # MacOS - ARM64 144 | ( 145 | MD_UNSYMBOLIZED_ARM64_MAC, 146 | " x1 = 0x0000000000000001\t x2 = 0x0000000000000002", 147 | "OS|Mac OS X|13.0.1 22A400", 148 | "CPU|arm64||8", 149 | "Crash|EXC_BAD_ACCESS / KERN_INVALID_ADDRESS|0x0000000000000000|0", 150 | "0|0|XUL||||", 151 | ), 152 | ], 153 | ) 154 | def test_minidump_parser_03(tmp_path, data, reg, operating_system, cpu, crash, frame): 155 | """test MinidumpParser._fmt_output() - un-symbolized""" 156 | with (tmp_path / "out.txt").open("w+b") as ofp: 157 | # pylint: disable=protected-access 158 | MinidumpParser._fmt_output(data, ofp, limit=2) 159 | ofp.seek(0) 160 | formatted = ofp.read().rstrip().decode().split("\n") 161 | assert len(formatted) == 5 162 | assert formatted[0] == reg 163 | assert formatted[1] == operating_system 164 | assert formatted[2] == cpu 165 | assert formatted[3] == crash 166 | assert formatted[4] == frame 167 | 168 | 169 | def test_minidump_parser_04(tmp_path): 170 | """test MinidumpParser._fmt_output() - symbolized""" 171 | data = deepcopy(MD_BASE_AMD64_WIN) 172 | data["crash_info"]["crashing_thread"] = 0 173 | data["crashing_thread"] = { 174 | "frames": [ 175 | { 176 | "file": "file0.cpp", 177 | "frame": 0, 178 | "function": "function00()", 179 | "function_offset": "0x00000000000001ed", 180 | "line": 47, 181 | "module": "xul.dll", 182 | "registers": { 183 | "r10": "0x12345678", 184 | "r11": "0x0badf00d", 185 | "r12": "0x00000000", 186 | "r13": "0x000000dceebfc2e8", 187 | }, 188 | }, 189 | { 190 | "file": "file1.cpp", 191 | "frame": 1, 192 | "function": "function01()", 193 | "function_offset": "0x00000000000001bb", 194 | "line": 210, 195 | "module": "xul.dll", 196 | }, 197 | { 198 | "file": "file2.cpp", 199 | "frame": 2, 200 | "function": "function02()", 201 | "function_offset": "0x0000000000000123", 202 | "line": 123, 203 | "module": "xul.dll", 204 | }, 205 | ], 206 | } 207 | 208 | with (tmp_path / "out.txt").open("w+b") as ofp: 209 | # pylint: disable=protected-access 210 | MinidumpParser._fmt_output(data, ofp, limit=2) 211 | ofp.seek(0) 212 | formatted = ofp.read().rstrip().decode().split("\n") 213 | assert len(formatted) == 8 214 | assert formatted[0] == "r10 = 0x12345678\tr11 = 0x0badf00d\tr12 = 0x00000000" 215 | assert formatted[1] == "r13 = 0x000000dceebfc2e8" 216 | assert formatted[2] == "OS|Windows NT|10.0.19044" 217 | assert formatted[3] == "CPU|amd64|family 6 model 70 stepping 1|8" 218 | assert formatted[4] == "Crash|EXCEPTION_BREAKPOINT|0x00007ffe4e09af8d|0" 219 | assert formatted[5] == "0|0|xul.dll|function00()|file0.cpp|47|0x1ed" 220 | assert formatted[6] == "0|1|xul.dll|function01()|file1.cpp|210|0x1bb" 221 | assert formatted[7] == "WARNING: Hit stack size output limit!" 222 | 223 | 224 | @mark.parametrize( 225 | "call_result, mdsw_bin, result", 226 | [ 227 | # minidump-stackwalk is available 228 | ( 229 | (CompletedProcess([], 0, stdout=b"minidump-stackwalk 0.17.0\n"),), 230 | "minidump-stackwalk", 231 | True, 232 | ), 233 | # minidump-stackwalk is matches minimum version 234 | ( 235 | (CompletedProcess([], 0, stdout=b"minidump-stackwalk 0.15.2\n"),), 236 | "minidump-stackwalk", 237 | True, 238 | ), 239 | # minidump-stackwalk is out-of-date 240 | ( 241 | (CompletedProcess([], 0, stdout=b"minidump-stackwalk 0.10.0\n"),), 242 | "minidump-stackwalk", 243 | False, 244 | ), 245 | # minidump-stackwalk is out-of-date 246 | ( 247 | (CompletedProcess([], 0, stdout=b"minidump-stackwalk 0.15.1\n"),), 248 | "minidump-stackwalk", 249 | False, 250 | ), 251 | # minidump-stackwalk is bad version 252 | ( 253 | (CompletedProcess([], 0, stdout=b"minidump-stackwalk badversion\n"),), 254 | "minidump-stackwalk", 255 | False, 256 | ), 257 | # minidump-stackwalk is not available 258 | (OSError("test"), "minidump-stackwalk", False), 259 | # minidump-stackwalk not installed 260 | (None, None, False), 261 | ], 262 | ) 263 | def test_minidump_parser_05(mocker, call_result, mdsw_bin, result): 264 | """test MinidumpParser.mdsw_available()""" 265 | mocker.patch("ffpuppet.minidump_parser.run", side_effect=call_result) 266 | mocker.patch.object(MinidumpParser, "MDSW_BIN", mdsw_bin) 267 | assert MinidumpParser.mdsw_available(min_version="0.15.2") == result 268 | 269 | 270 | def test_minidump_parser_06(tmp_path): 271 | """test MinidumpParser.dmp_files()""" 272 | # empty minidump path 273 | assert not MinidumpParser.dmp_files(tmp_path) 274 | # find single dump file 275 | (tmp_path / "a.dmp").write_text("a") 276 | assert tmp_path / "a.dmp" in MinidumpParser.dmp_files(tmp_path) 277 | # find multiple dump files 278 | (tmp_path / "b.dmp").write_text("b") 279 | (tmp_path / "c.dmp").write_text("c") 280 | assert len(MinidumpParser.dmp_files(tmp_path)) == 3 281 | # add .extra file to prioritize .dmp file 282 | (tmp_path / "b.extra").write_text('{"MozCrashReason":"foo"}') 283 | assert MinidumpParser.dmp_files(tmp_path)[0] == (tmp_path / "b.dmp") 284 | 285 | 286 | def test_minidump_parser_missing_crashing_thread(tmp_path): 287 | """test MinidumpParser._fmt_output() - missing crashing thread""" 288 | with (tmp_path / "out.txt").open("w+b") as ofp: 289 | # pylint: disable=protected-access 290 | MinidumpParser._fmt_output(MD_BASE_AMD64_WIN, ofp) 291 | ofp.seek(0) 292 | formatted = ofp.read().rstrip().decode().split("\n") 293 | assert len(formatted) == 3 294 | assert formatted[0] == "OS|Windows NT|10.0.19044" 295 | assert formatted[1] == "CPU|amd64|family 6 model 70 stepping 1|8" 296 | assert formatted[2] == "Crash|EXCEPTION_BREAKPOINT|0x00007ffe4e09af8d|?" 297 | -------------------------------------------------------------------------------- /src/ffpuppet/test_process_tree.py: -------------------------------------------------------------------------------- 1 | # This Source Code Form is subject to the terms of the Mozilla Public 2 | # License, v. 2.0. If a copy of the MPL was not distributed with this file, 3 | # You can obtain one at http://mozilla.org/MPL/2.0/. 4 | """process_tree.py tests""" 5 | 6 | from collections import namedtuple 7 | from itertools import chain, count, repeat 8 | from pathlib import Path 9 | from subprocess import Popen 10 | from time import sleep 11 | from unittest import mock 12 | 13 | from psutil import STATUS_ZOMBIE, AccessDenied, NoSuchProcess, Process, TimeoutExpired 14 | from pytest import mark, raises 15 | 16 | from .exceptions import TerminateError 17 | from .process_tree import ( 18 | ProcessTree, 19 | _filter_zombies, 20 | _last_modified, 21 | _safe_wait_procs, 22 | _writing_coverage, 23 | ) 24 | 25 | TREE = Path(__file__).parent / "resources" / "tree.py" 26 | 27 | 28 | @mark.parametrize( 29 | "enable_launcher, launcher_is_parent", 30 | [ 31 | # no launcher 32 | (False, False), 33 | # use launcher 34 | (True, False), 35 | # launcher disabled (browser.launcherProcess.enabled=false) 36 | (True, True), 37 | ], 38 | ) 39 | def test_process_tree_01(tmp_path, enable_launcher, launcher_is_parent): 40 | """test ProcessTree() with actual processes""" 41 | content_procs = 3 42 | flag = tmp_path / "running" 43 | # don't use sys.executable it is not always correct (incompatible with tox) 44 | cmd = [Process().exe(), str(TREE), str(content_procs), str(flag)] 45 | 46 | # parent + content + launcher 47 | expected_procs = 1 + content_procs 48 | if enable_launcher: 49 | if launcher_is_parent: 50 | cmd.append("--launcher-is-parent") 51 | else: 52 | expected_procs += 1 53 | cmd.append("-no-deelevate") 54 | else: 55 | # make sure the test is not broken 56 | assert not launcher_is_parent, "launcher_is_parent requires launcher!" 57 | 58 | # pylint: disable=consider-using-with 59 | proc = Popen(cmd) 60 | tree = None 61 | try: 62 | # wait (30 seconds) for tree to launch all processes 63 | for _ in range(300): 64 | if flag.exists(): 65 | break 66 | assert proc.poll() is None 67 | sleep(0.1) 68 | else: 69 | raise AssertionError(f"Process tree ({expected_procs}) failed to launch") 70 | 71 | tree = ProcessTree(proc) 72 | # pylint: disable=protected-access 73 | tree._launcher_check = enable_launcher 74 | assert tree.parent 75 | if enable_launcher and not launcher_is_parent: 76 | assert tree.launcher is not None 77 | assert tree.launcher.pid == proc.pid 78 | else: 79 | assert tree.launcher is None 80 | assert tree.parent.pid == proc.pid 81 | assert ProcessTree._poll(tree.parent) is None 82 | assert tree.is_running() 83 | assert len(tree.processes()) == expected_procs 84 | assert tree.wait_procs() == expected_procs 85 | usage = tuple(tree.cpu_usage()) 86 | assert len(usage) == expected_procs 87 | tree.terminate() 88 | finally: 89 | # this should cause everything to close gracefully if it is still running 90 | flag.unlink(missing_ok=True) 91 | if tree and tree.parent.is_running(): 92 | tree.parent.terminate() 93 | if proc.poll() is None: 94 | proc.terminate() 95 | proc.wait(timeout=30) 96 | assert not tree.is_running() 97 | assert not tree.processes() 98 | assert tree.wait() is not None 99 | assert tree.wait_procs() == 0 100 | 101 | 102 | @mark.parametrize( 103 | "side_effect, expected_result", 104 | [ 105 | # process exited 106 | ((0,), 0), 107 | # process exited - exit code not available 108 | ((None,), 0), 109 | # can't find process 110 | (NoSuchProcess(1), 0), 111 | # process is running 112 | (TimeoutExpired(1), None), 113 | ], 114 | ) 115 | def test_process_tree_02(mocker, side_effect, expected_result): 116 | """test ProcessTree._poll()""" 117 | proc = mocker.Mock(spec_set=Process) 118 | proc.wait.side_effect = side_effect 119 | # pylint: disable=protected-access 120 | assert ProcessTree._poll(proc) == expected_result 121 | 122 | 123 | def test_process_tree_03(mocker): 124 | """test ProcessTree.terminate()""" 125 | mocker.patch("ffpuppet.process_tree.Process", autospec=True) 126 | wait_procs = mocker.patch("ffpuppet.process_tree.wait_procs", autospec=True) 127 | 128 | # no processes to terminate 129 | mocker.patch.object(ProcessTree, "processes", side_effect=([],)) 130 | tree = ProcessTree(mocker.Mock()) 131 | tree.parent = mocker.Mock(spec_set=Process) 132 | tree.terminate() 133 | # pylint: disable=no-member 134 | assert tree.processes.call_count == 1 135 | assert tree.parent.wait.call_count == 0 136 | assert tree.parent.terminate.call_count == 0 137 | 138 | # this should be the "normal" code path 139 | proc = mocker.Mock(spec_set=Process, pid=1337) 140 | wait_procs.return_value = ([proc], []) 141 | proc.wait.side_effect = (TimeoutExpired(1), None) 142 | mocker.patch.object(ProcessTree, "processes", side_effect=([proc],)) 143 | tree = ProcessTree(mocker.Mock()) 144 | tree.parent = proc 145 | tree.terminate() 146 | # pylint: disable=no-member 147 | assert tree.processes.call_count == 1 148 | assert tree.parent.wait.call_count == 2 149 | assert tree.parent.terminate.call_count == 1 150 | assert wait_procs.call_count == 1 151 | wait_procs.reset_mock() 152 | 153 | # this is the stubborn code path that should not happen 154 | proc = mocker.Mock(spec_set=Process, pid=1337) 155 | wait_procs.return_value = ([], [proc]) 156 | proc.wait.side_effect = (TimeoutExpired(1), None) 157 | mocker.patch.object(ProcessTree, "processes", side_effect=([proc],)) 158 | tree = ProcessTree(mocker.Mock()) 159 | tree.parent = proc 160 | with raises(TerminateError, match="Failed to terminate processes"): 161 | tree.terminate() 162 | # pylint: disable=no-member 163 | assert tree.processes.call_count == 1 164 | assert tree.parent.wait.call_count == 2 165 | assert tree.parent.terminate.call_count == 2 166 | assert tree.parent.kill.call_count == 1 167 | assert wait_procs.call_count == 3 168 | 169 | 170 | def test_process_tree_04(mocker): 171 | """test ProcessTree.cpu_usage()""" 172 | mocker.patch("ffpuppet.process_tree.Process", autospec=True) 173 | proc = mocker.Mock(spec_set=Process, pid=1234) 174 | proc.cpu_percent.return_value = 2.3 175 | mocker.patch.object(ProcessTree, "processes", side_effect=([proc],)) 176 | tree = ProcessTree(mocker.Mock()) 177 | stats = tuple(tree.cpu_usage()) 178 | assert stats 179 | assert stats[0][0] == 1234 180 | assert stats[0][1] == 2.3 181 | 182 | 183 | @mark.parametrize( 184 | "procs, last_mod, writing, is_running, success", 185 | [ 186 | # no processes 187 | (False, repeat(0), False, True, True), 188 | # data written successfully 189 | (True, chain([0], repeat(2)), False, True, True), 190 | # data not updated 191 | (True, repeat(0), False, True, False), 192 | # data write timeout 193 | (True, chain([0], repeat(2)), True, True, False), 194 | # process exits 195 | (True, repeat(0), False, False, True), 196 | ], 197 | ) 198 | def test_process_tree_05(mocker, procs, last_mod, writing, is_running, success): 199 | """test ProcessTree.dump_coverage()""" 200 | mocker.patch("ffpuppet.process_tree.COVERAGE_SIGNAL", return_value="foo") 201 | mocker.patch("ffpuppet.process_tree.getenv", return_value="foo") 202 | mocker.patch("ffpuppet.process_tree.perf_counter", side_effect=count(step=0.25)) 203 | mocker.patch("ffpuppet.process_tree.sleep", autospec=True) 204 | mocker.patch("ffpuppet.process_tree._last_modified", side_effect=last_mod) 205 | mocker.patch("ffpuppet.process_tree._writing_coverage", return_value=writing) 206 | 207 | # pylint: disable=missing-class-docstring,super-init-not-called 208 | class CovProcessTree(ProcessTree): 209 | def __init__(self): 210 | pass 211 | 212 | def is_running(self) -> bool: 213 | return is_running 214 | 215 | def processes(self, recursive=False): 216 | return [] if not procs else [mocker.Mock(spec_set=Process)] 217 | 218 | tree = CovProcessTree() 219 | assert tree.dump_coverage() == success 220 | 221 | 222 | def test_last_modified_01(tmp_path): 223 | """test _last_modified()""" 224 | # scan missing path 225 | assert _last_modified(tmp_path / "missing") is None 226 | # scan empty path 227 | assert _last_modified(tmp_path) is None 228 | # scan path without gcda files 229 | (tmp_path / "somefile.txt").touch() 230 | assert _last_modified(tmp_path) is None 231 | # scan nested path with gcda files 232 | (tmp_path / "a").mkdir() 233 | (tmp_path / "a" / "file.gcda").touch() 234 | assert _last_modified(tmp_path) > 0 235 | 236 | 237 | def test_writing_coverage_01(mocker): 238 | """test _writing_coverage()""" 239 | openfile = namedtuple("openfile", ["path", "fd"]) 240 | # empty list 241 | assert not _writing_coverage([]) 242 | # no open files 243 | proc = mocker.Mock(spec_set=Process, pid=1337) 244 | proc.open_files.return_value = () 245 | assert not _writing_coverage([proc]) 246 | assert proc.open_files.call_count == 1 247 | # open test 248 | proc.reset_mock() 249 | proc.open_files.return_value = (openfile("file.txt", None),) 250 | assert not _writing_coverage([proc]) 251 | assert proc.open_files.call_count == 1 252 | # open gcda 253 | proc.reset_mock() 254 | proc.open_files.return_value = (openfile("file.gcda", None),) 255 | assert _writing_coverage([proc]) 256 | assert proc.open_files.call_count == 1 257 | 258 | 259 | @mark.parametrize( 260 | "wait_side_effect, procs, alive_count, gone_count", 261 | [ 262 | # no processes - passthrough 263 | ((([], []),), [], 0, 0), 264 | # AccessDenied - no procs 265 | (AccessDenied(), [], 0, 0), 266 | # AccessDenied - alive (is_running check) 267 | ( 268 | AccessDenied(), 269 | [mock.Mock(spec_set=Process, is_running=mock.Mock(return_value=True))], 270 | 1, 271 | 0, 272 | ), 273 | # AccessDenied - gone (is_running check) 274 | ( 275 | AccessDenied(), 276 | [mock.Mock(spec_set=Process, is_running=mock.Mock(return_value=False))], 277 | 0, 278 | 1, 279 | ), 280 | # AccessDenied - alive 281 | ( 282 | AccessDenied(), 283 | [ 284 | mock.Mock( 285 | spec_set=Process, is_running=mock.Mock(side_effect=AccessDenied()) 286 | ) 287 | ], 288 | 1, 289 | 0, 290 | ), 291 | # AccessDenied - gone 292 | ( 293 | AccessDenied(), 294 | [ 295 | mock.Mock( 296 | spec_set=Process, 297 | is_running=mock.Mock(side_effect=NoSuchProcess(pid=1)), 298 | ) 299 | ], 300 | 0, 301 | 1, 302 | ), 303 | ], 304 | ) 305 | def test_safe_wait_procs_01(mocker, wait_side_effect, procs, alive_count, gone_count): 306 | """test _safe_wait_procs()""" 307 | mocker.patch("ffpuppet.process_tree.perf_counter", side_effect=count(step=0.25)) 308 | mocker.patch("ffpuppet.process_tree.sleep", autospec=True) 309 | mocker.patch("ffpuppet.process_tree.wait_procs", side_effect=wait_side_effect) 310 | 311 | result = _safe_wait_procs(procs, timeout=1) 312 | assert len(result[0]) == gone_count 313 | assert len(result[1]) == alive_count 314 | 315 | 316 | def test_filter_zombies_01(mocker): 317 | """test _filter_zombies()""" 318 | zombie = mocker.Mock(spec_set=Process, pid=123) 319 | zombie.status.return_value = STATUS_ZOMBIE 320 | procs = tuple(_filter_zombies([zombie, mocker.Mock(spec_set=Process)])) 321 | assert len(procs) == 1 322 | assert not any(x for x in procs if x.status() == STATUS_ZOMBIE) 323 | -------------------------------------------------------------------------------- /src/ffpuppet/test_profile.py: -------------------------------------------------------------------------------- 1 | # This Source Code Form is subject to the terms of the Mozilla Public 2 | # License, v. 2.0. If a copy of the MPL was not distributed with this file, 3 | # You can obtain one at http://mozilla.org/MPL/2.0/. 4 | """ffpuppet profile tests""" 5 | 6 | from shutil import rmtree 7 | from subprocess import CalledProcessError 8 | 9 | from pytest import mark, raises 10 | 11 | from .profile import Profile 12 | 13 | 14 | def test_profile_basic(tmp_path): 15 | """test basic Profile""" 16 | with Profile(working_path=str(tmp_path)) as profile: 17 | assert profile 18 | assert str(profile) 19 | assert profile.path.parent == tmp_path 20 | assert not (profile.path / "times.json").is_file() 21 | assert profile.invalid_prefs is None 22 | (profile.path / "Invalidprefs.js").touch() 23 | assert profile.invalid_prefs is not None 24 | profile.remove() 25 | assert profile.path is None 26 | 27 | 28 | def test_profile_use_template(tmp_path): 29 | """test Profile with template""" 30 | template = tmp_path / "template" 31 | template.mkdir() 32 | (template / "a.txt").touch() 33 | (template / "Invalidprefs.js").touch() 34 | working = tmp_path / "working" 35 | working.mkdir() 36 | with Profile(template=template, working_path=str(working)) as profile: 37 | assert profile 38 | assert profile.path.parent == working 39 | assert (profile.path / "a.txt").is_file() 40 | assert not (profile.path / "Invalidprefs.js").is_file() 41 | 42 | 43 | @mark.parametrize( 44 | "existing, additional", 45 | [ 46 | ({}, {}), 47 | ({"pre.existing": "1"}, {}), 48 | ({"pre.existing": "1"}, {"foo": "'a1b1c1'", "test.enabled": "true"}), 49 | ({}, {"foo": "'a1b1c1'", "test.enabled": "true"}), 50 | ], 51 | ) 52 | def test_profile_prefs_js(tmp_path, existing, additional): 53 | """test Profile with prefs.js""" 54 | prefs = None 55 | if existing: 56 | prefs = tmp_path / "prefs.js" 57 | for name, value in existing.items(): 58 | prefs.write_text(f"user_pref('{name}', {value});\n") 59 | working = tmp_path / "working" 60 | working.mkdir() 61 | with Profile(prefs_file=prefs, working_path=str(working)) as profile: 62 | assert profile 63 | assert profile.path.parent == working 64 | profile.add_prefs(additional) 65 | if additional or existing: 66 | assert (profile.path / "prefs.js").is_file() 67 | assert (profile.path / "times.json").is_file() 68 | data = (profile.path / "prefs.js").read_text() 69 | for name, value in existing.items(): 70 | assert f"user_pref('{name}', {value});\n" in data 71 | for name, value in additional.items(): 72 | assert f"user_pref('{name}', {value});\n" in data 73 | lines = [x for x in data.splitlines() if x.startswith("user_pref(")] 74 | assert len(lines) == len(existing) + len(additional) 75 | 76 | 77 | def test_profile_extensions(mocker, tmp_path): 78 | """test create_profile() extension support""" 79 | mocker.patch( 80 | "ffpuppet.profile.mkdtemp", autospec=True, return_value=str(tmp_path / "dst") 81 | ) 82 | # create a profile with a non-existent ext 83 | (tmp_path / "dst").mkdir() 84 | with raises(RuntimeError, match=r"Unknown extension: '.+?fake_ext'"): 85 | Profile(extensions=[tmp_path / "fake_ext"]) 86 | assert not (tmp_path / "dst").is_dir() 87 | # create a profile with an xpi ext 88 | (tmp_path / "dst").mkdir() 89 | xpi = tmp_path / "xpi-ext.xpi" 90 | xpi.touch() 91 | with Profile(extensions=[xpi]) as prof: 92 | assert any(prof.path.glob("extensions")) 93 | assert (prof.path / "extensions" / "xpi-ext.xpi").is_file() 94 | rmtree(tmp_path / "dst") 95 | # create a profile with an unknown ext 96 | (tmp_path / "dst").mkdir() 97 | dummy_ext = tmp_path / "dummy_ext" 98 | dummy_ext.mkdir() 99 | with raises( 100 | RuntimeError, match=r"Failed to find extension id in manifest: '.+?dummy_ext'" 101 | ): 102 | Profile(extensions=[dummy_ext]) 103 | assert not (tmp_path / "dst").is_dir() 104 | # create a profile with a bad legacy ext 105 | (tmp_path / "dst").mkdir() 106 | bad_legacy = tmp_path / "bad_legacy" 107 | bad_legacy.mkdir() 108 | (bad_legacy / "install.rdf").touch() 109 | with raises( 110 | RuntimeError, match=r"Failed to find extension id in manifest: '.+?bad_legacy'" 111 | ): 112 | Profile(extensions=[bad_legacy]) 113 | assert not (tmp_path / "dst").is_dir() 114 | # create a profile with a good legacy ext 115 | (tmp_path / "dst").mkdir() 116 | good_legacy = tmp_path / "good_legacy" 117 | good_legacy.mkdir() 118 | (good_legacy / "install.rdf").write_text( 119 | '' 120 | '\n' 122 | ' \n' 123 | " good-ext-id\n" 124 | " \n" 125 | "" 126 | ) 127 | (good_legacy / "example.js").touch() 128 | with Profile(extensions=[good_legacy]) as prof: 129 | assert any(prof.path.glob("extensions")) 130 | ext_path = prof.path / "extensions" / "good-ext-id" 131 | assert (ext_path / "install.rdf").is_file() 132 | assert (ext_path / "example.js").is_file() 133 | rmtree(tmp_path / "dst") 134 | # create a profile with a bad webext 135 | (tmp_path / "dst").mkdir() 136 | bad_webext = tmp_path / "bad_webext" 137 | bad_webext.mkdir() 138 | (bad_webext / "manifest.json").touch() 139 | with raises( 140 | RuntimeError, match=r"Failed to find extension id in manifest: '.+?bad_webext'" 141 | ): 142 | Profile(extensions=[bad_webext]) 143 | assert not (tmp_path / "dst").is_dir() 144 | # create a profile with a good webext 145 | (tmp_path / "dst").mkdir() 146 | good_webext = tmp_path / "good_webext" 147 | good_webext.mkdir() 148 | (good_webext / "manifest.json").write_bytes( 149 | b"""{"applications": {"gecko": {"id": "good-webext-id"}}}""" 150 | ) 151 | (good_webext / "example.js").touch() 152 | with Profile(extensions=[good_webext]) as prof: 153 | assert any(prof.path.glob("extensions")) 154 | ext_path = prof.path / "extensions" / "good-webext-id" 155 | assert ext_path.is_dir() 156 | assert (ext_path / "manifest.json").is_file() 157 | assert (ext_path / "example.js").is_file() 158 | rmtree(tmp_path / "dst") 159 | # create a profile with multiple extensions 160 | (tmp_path / "dst").mkdir() 161 | with Profile(extensions=[good_webext, good_legacy]) as prof: 162 | assert any(prof.path.glob("extensions")) 163 | ext_path = prof.path / "extensions" 164 | assert ext_path.is_dir() 165 | ext_path = prof.path / "extensions" / "good-webext-id" 166 | assert ext_path.is_dir() 167 | assert (ext_path / "manifest.json").is_file() 168 | assert (ext_path / "example.js").is_file() 169 | ext_path = prof.path / "extensions" / "good-ext-id" 170 | assert ext_path.is_dir() 171 | assert (ext_path / "install.rdf").is_file() 172 | assert (ext_path / "example.js").is_file() 173 | 174 | 175 | def test_profile_check_prefs(tmp_path): 176 | """test check_prefs()""" 177 | dummy_prefs = tmp_path / "dummy.js" 178 | dummy_prefs.write_text( 179 | "// comment line\n" 180 | "# comment line\n" 181 | " \n\n" 182 | 'user_pref("a.a", 0);\n' 183 | 'user_pref("a.b", "test");\n' 184 | 'user_pref("a.c", true);\n' 185 | ) 186 | custom_prefs = tmp_path / "custom.js" 187 | custom_prefs.write_text( 188 | "// comment line\n" 189 | "# comment line\n" 190 | "/* comment block.\n" 191 | "*\n" 192 | " \n\n" 193 | 'user_pref("a.a", 0); // test comment\n' 194 | 'user_pref("a.c", true);\n' 195 | ) 196 | assert Profile.check_prefs(dummy_prefs, custom_prefs) 197 | # test detecting missing prefs 198 | custom_prefs.write_text('user_pref("a.a", 0);\nuser_pref("b.a", false);\n') 199 | assert not Profile.check_prefs(dummy_prefs, custom_prefs) 200 | 201 | 202 | def test_profile_remove(mocker, tmp_path): 203 | """test Profile.remove() fail to remove data directory""" 204 | mocker.patch("ffpuppet.profile.rmtree", autospec=True) 205 | with Profile(working_path=str(tmp_path)) as profile: 206 | path = profile.path 207 | profile.remove() 208 | assert profile.path is None 209 | assert path.exists() 210 | 211 | 212 | def test_profile_install_certs(mocker, tmp_path): 213 | """test Profile with certs""" 214 | mocker.patch("ffpuppet.profile.certutil_available", autospec=True) 215 | fake_check = mocker.patch("ffpuppet.profile.check_output", autospec=True) 216 | working = tmp_path / "working" 217 | working.mkdir() 218 | cert = tmp_path / "cert" 219 | cert.touch() 220 | with Profile(cert_files=[cert], working_path=str(working)): 221 | assert fake_check.call_count == 2 222 | 223 | 224 | def test_profile_certutil_missing(mocker, tmp_path): 225 | """test Profile missing certutil binary""" 226 | mocker.patch("ffpuppet.profile.certutil_available", return_value=False) 227 | mocker.patch("ffpuppet.profile.certutil_find", autospec=True) 228 | cert = tmp_path / "cert" 229 | cert.touch() 230 | with raises(OSError, match="certutil not found"): 231 | Profile(cert_files=[cert], working_path=str(tmp_path)) 232 | 233 | 234 | def test_profile_install_cert(mocker, tmp_path): 235 | """test Profile.install_cert() certutil""" 236 | mocker.patch("ffpuppet.profile.certutil_available", autospec=True) 237 | fake_check = mocker.patch("ffpuppet.profile.check_output", autospec=True) 238 | 239 | cert = tmp_path / "cert" 240 | cert.touch() 241 | 242 | Profile.install_cert(tmp_path, cert, "fake_certutil") 243 | assert fake_check.call_count == 1 244 | 245 | fake_check.side_effect = CalledProcessError(1, "test", output=b"error msg") 246 | with raises(RuntimeError, match="Install cert: certutil error"): 247 | Profile.install_cert(tmp_path, cert, "fake_certutil") 248 | 249 | 250 | def test_profile_init_cert_db(mocker, tmp_path): 251 | """test Profile.init_cert_db() certutil""" 252 | mocker.patch("ffpuppet.profile.certutil_available", autospec=True) 253 | fake_check = mocker.patch("ffpuppet.profile.check_output", autospec=True) 254 | 255 | Profile.init_cert_db(tmp_path, "fake_certutil") 256 | assert fake_check.call_count == 1 257 | 258 | fake_check.side_effect = CalledProcessError(1, "test", output=b"error msg") 259 | with raises(RuntimeError, match="Init cert db: certutil error"): 260 | Profile.init_cert_db(tmp_path, "fake_certutil") 261 | -------------------------------------------------------------------------------- /src/ffpuppet/test_puppet_logger.py: -------------------------------------------------------------------------------- 1 | # This Source Code Form is subject to the terms of the Mozilla Public 2 | # License, v. 2.0. If a copy of the MPL was not distributed with this file, 3 | # You can obtain one at http://mozilla.org/MPL/2.0/. 4 | """ffpuppet puppet logger tests""" 5 | # pylint: disable=protected-access 6 | 7 | from os import stat 8 | from os.path import isfile 9 | from tempfile import SpooledTemporaryFile 10 | from time import sleep 11 | 12 | from pytest import raises 13 | 14 | from .puppet_logger import PuppetLogger 15 | 16 | 17 | def test_puppet_logger_01(tmp_path): 18 | """test simple PuppetLogger()""" 19 | plog = PuppetLogger(base_path=str(tmp_path)) 20 | assert not plog.closed 21 | assert not plog._logs 22 | assert plog.path is not None 23 | assert plog.path.is_dir() 24 | assert plog._base == str(tmp_path) 25 | assert any(tmp_path.iterdir()) 26 | plog.close() 27 | assert any(tmp_path.iterdir()) 28 | assert plog.closed 29 | with raises(AssertionError): 30 | plog.add_log("test") 31 | assert plog.log_length("missing") is None 32 | 33 | 34 | def test_puppet_logger_02(tmp_path): 35 | """test PuppetLogger.add_log() and PuppetLogger.available_logs()""" 36 | with PuppetLogger(base_path=str(tmp_path)) as plog: 37 | assert not plog._logs 38 | assert not plog.available_logs() 39 | assert not any(plog.files) 40 | plog.add_log("test_new") # non-existing log 41 | assert "test_new" in plog.available_logs() 42 | plog_fp_test_new = plog.get_fp("test_new") 43 | assert plog_fp_test_new is not None 44 | assert isfile(plog_fp_test_new.name) 45 | with (tmp_path / "test_existing.txt").open("w+b") as in_fp: 46 | in_fp.write(b"blah") 47 | plog.add_log("test_existing", logfp=in_fp) 48 | assert len(plog.available_logs()) == 2 49 | assert len(tuple(plog.files)) == 2 50 | plog_fp_test_existing = plog.get_fp("test_existing") 51 | assert plog_fp_test_existing is not None 52 | assert isfile(plog_fp_test_existing.name) 53 | assert plog.log_length("test_new") == 0 54 | assert plog.log_length("test_existing") == 4 55 | 56 | 57 | def test_puppet_logger_03(tmp_path): 58 | """test PuppetLogger.clean_up()""" 59 | with PuppetLogger(base_path=str(tmp_path)) as plog: 60 | assert not plog.closed 61 | assert not plog._logs 62 | assert plog.path is not None 63 | assert plog.path.is_dir() 64 | assert plog._base == str(tmp_path) 65 | assert any(tmp_path.iterdir()) 66 | plog.add_log("test_new") 67 | plog.clean_up() 68 | assert plog.closed 69 | assert not any(tmp_path.iterdir()) 70 | assert plog.path is None 71 | assert plog.closed 72 | assert not plog._logs 73 | 74 | 75 | def test_puppet_logger_04(tmp_path): 76 | """test PuppetLogger.reset()""" 77 | with PuppetLogger(base_path=str(tmp_path)) as plog: 78 | plog.add_log("test_new") 79 | plog.clean_up() 80 | plog.reset() 81 | assert not plog.closed 82 | assert not plog._logs 83 | assert plog.path is not None 84 | assert plog.path.is_dir() 85 | assert plog._base == str(tmp_path) 86 | assert len(tuple(tmp_path.iterdir())) == 1 87 | 88 | 89 | def test_puppet_logger_05(tmp_path): 90 | """test PuppetLogger.clone_log()""" 91 | with PuppetLogger(base_path=str(tmp_path)) as plog: 92 | plog.add_log("test_empty") 93 | plog.add_log("test_extra") 94 | plog_fp_test_extra = plog.get_fp("test_extra") 95 | assert plog_fp_test_extra is not None 96 | plog_fp_test_extra.write(b"stuff") 97 | plog_fp_test_extra.flush() 98 | # test clone 99 | plog.add_log("test_new") 100 | pl_fp = plog.get_fp("test_new") 101 | assert pl_fp is not None 102 | pl_fp.write(b"test1") 103 | cloned = plog.clone_log("test_new") 104 | assert cloned is not None 105 | assert cloned.is_file() 106 | assert cloned.read_bytes() == b"test1" 107 | cloned.unlink() 108 | # test target exists 109 | target = tmp_path / "target.txt" 110 | target.touch() 111 | pl_fp.write(b"test2") 112 | pl_fp.flush() 113 | cloned = plog.clone_log("test_new", target_file=str(target)) 114 | assert cloned is not None 115 | assert cloned.is_file() 116 | assert cloned.read_bytes() == b"test1test2" 117 | cloned.unlink() 118 | # test target does not exist with offset 119 | assert not target.is_file() 120 | pl_fp.write(b"test3") 121 | pl_fp.flush() 122 | cloned = plog.clone_log("test_new", target_file=str(target), offset=4) 123 | assert cloned is not None 124 | assert cloned.is_file() 125 | assert cloned.read_bytes() == b"1test2test3" 126 | assert plog.log_length("test_new") == 15 127 | cloned.unlink() 128 | # test non existent log 129 | assert plog.clone_log("no_log") is None 130 | # test empty log 131 | assert plog.log_length("test_empty") == 0 132 | cloned = plog.clone_log("test_empty") 133 | assert cloned is not None 134 | assert cloned.is_file() 135 | assert not cloned.stat().st_size 136 | cloned.unlink() 137 | 138 | 139 | def test_puppet_logger_06(tmp_path): 140 | """test PuppetLogger.save_logs()""" 141 | with PuppetLogger(base_path=str(tmp_path)) as plog: 142 | plog.close() 143 | # save when there are no logs 144 | dest = tmp_path / "dest" 145 | plog.save_logs(dest) 146 | assert not any(dest.iterdir()) 147 | plog.reset() 148 | dest.rmdir() 149 | # add small log 150 | plog.add_log("test_1") 151 | plog_fp_test_1 = plog.get_fp("test_1") 152 | assert plog_fp_test_1 is not None 153 | plog_fp_test_1.write(b"test1\ntest1\n") 154 | # add binary data in log 155 | plog.add_log("test_2") 156 | plog_fp_test_2 = plog.get_fp("test_2") 157 | assert plog_fp_test_2 is not None 158 | plog_fp_test_2.write(b"\x00TEST\xff\xef") 159 | # add empty log 160 | plog.add_log("test_empty") 161 | # add larger log (not a power of 2 to help catch buffer issues) 162 | plog.add_log("test_3") 163 | data = b"A" * 1234 164 | plog_fp_test_3 = plog.get_fp("test_3") 165 | assert plog_fp_test_3 is not None 166 | for _ in range(500): 167 | plog_fp_test_3.write(data) 168 | # delay to check if creation time was copied when save_logs is called 169 | sleep(0.1) 170 | plog.close() 171 | dest.mkdir() 172 | plog.save_logs(dest) 173 | # check saved file count 174 | assert len(plog.available_logs()) == 4 175 | assert len(tuple(dest.iterdir())) == 4 176 | # verify all data was copied 177 | assert stat(plog_fp_test_1.name).st_size == 12 178 | assert stat(plog_fp_test_2.name).st_size == 7 179 | assert stat(plog_fp_test_3.name).st_size == 500 * 1234 180 | 181 | 182 | def test_puppet_logger_07(mocker, tmp_path): 183 | """test PuppetLogger.save_logs() rr trace directory""" 184 | fake_ck = mocker.patch("ffpuppet.puppet_logger.check_output", autospec=True) 185 | with PuppetLogger(base_path=str(tmp_path)) as plog: 186 | assert plog.path is not None 187 | # add log data to test rr backtrace detection 188 | with (tmp_path / "test_stderr.txt").open("w+b") as in_fp: 189 | in_fp.write(b"foo\n") 190 | in_fp.write(b"=== Start rr backtrace:\n") 191 | in_fp.write(b"foo\n") 192 | plog.add_log("stderr", logfp=in_fp) 193 | (plog.path / plog.PATH_RR / "latest-trace").mkdir(parents=True) 194 | plog.close() 195 | # test call to rr failing 196 | fake_ck.side_effect = OSError 197 | plog.save_logs(tmp_path / "dest1", rr_pack=True) 198 | assert fake_ck.call_count == 1 199 | assert not plog._rr_packed 200 | # test call to rr passing 201 | fake_ck.side_effect = None 202 | plog.save_logs(tmp_path / "dest2", rr_pack=True) 203 | assert fake_ck.call_count == 2 204 | assert plog._rr_packed 205 | # test 'taskcluster-build-task' copied 206 | bin_path = tmp_path / "bin_path" 207 | bin_path.mkdir() 208 | (bin_path / "taskcluster-build-task").write_text("task-info\n") 209 | plog.save_logs(tmp_path / "dest3", bin_path=bin_path) 210 | assert ( 211 | tmp_path 212 | / "dest3" 213 | / "rr-traces" 214 | / "latest-trace" 215 | / "files.mozilla" 216 | / "taskcluster-build-task" 217 | ).is_file() 218 | assert fake_ck.call_count == 2 219 | assert plog._rr_packed 220 | 221 | 222 | def test_puppet_logger_08(tmp_path): 223 | """test PuppetLogger.add_log() with file not on disk""" 224 | with ( 225 | PuppetLogger(base_path=str(tmp_path)) as plog, 226 | SpooledTemporaryFile(max_size=2048) as log_fp, 227 | ): 228 | plog.add_log("test", logfp=log_fp) 229 | with raises(FileNotFoundError, match="Log file not found: None"): 230 | plog.get_fp("test") 231 | 232 | 233 | def test_puppet_logger_09(mocker, tmp_path): 234 | """test PuppetLogger.clean_up() with in-use file or inaccessible directory""" 235 | fake_rmtree = mocker.patch("ffpuppet.puppet_logger.rmtree", autospec=True) 236 | with PuppetLogger(base_path=str(tmp_path)) as plog: 237 | plog.add_log("test") 238 | path = plog.path 239 | # test with ignore_errors=False 240 | fake_rmtree.side_effect = OSError("test") 241 | with raises(OSError): 242 | plog.clean_up() 243 | assert fake_rmtree.call_count == 1 244 | fake_rmtree.assert_called_with(path, ignore_errors=False) 245 | assert plog.path is not None 246 | fake_rmtree.reset_mock() 247 | # test with ignore_errors=True 248 | fake_rmtree.side_effect = None 249 | plog.clean_up(ignore_errors=True) 250 | assert fake_rmtree.call_count == 1 251 | fake_rmtree.assert_called_with(path, ignore_errors=True) 252 | assert plog.path is None 253 | 254 | 255 | def test_puppet_logger_10(tmp_path): 256 | """test PuppetLogger.add_path()""" 257 | with PuppetLogger(base_path=str(tmp_path)) as plog: 258 | path = plog.add_path("test") 259 | assert path.is_dir() 260 | (path / "simple.txt").write_text("test") 261 | plog.close() 262 | dest = tmp_path / "dest" 263 | plog.save_logs(dest) 264 | assert (dest / "test").is_dir() 265 | assert (dest / "test" / "simple.txt").is_file() 266 | -------------------------------------------------------------------------------- /src/ffpuppet/test_sanitizer_util.py: -------------------------------------------------------------------------------- 1 | # This Source Code Form is subject to the terms of the Mozilla Public 2 | # License, v. 2.0. If a copy of the MPL was not distributed with this file, 3 | # You can obtain one at http://mozilla.org/MPL/2.0/. 4 | """ffpuppet sanitizer_util tests""" 5 | 6 | from pytest import mark, raises 7 | 8 | from .sanitizer_util import SanitizerOptions 9 | 10 | 11 | @mark.parametrize( 12 | "init, add, result, overwrite", 13 | [ 14 | # do nothing 15 | ("", {}, [""], False), 16 | # add single option 17 | ("", {"a": "1"}, ["a=1"], False), 18 | # add multiple options 19 | ("", {"b": "2", "a": "1"}, ["a=1", "b=2"], False), 20 | # existing 21 | ("a=1", {}, ["a=1"], False), 22 | # add to existing 23 | ("a=1", {"b": "2"}, ["a=1", "b=2"], False), 24 | # no overwrite existing 25 | ("a=1", {"a": "2"}, ["a=1"], False), 26 | # overwrite existing 27 | ("a=1", {"a": "2"}, ["a=2"], True), 28 | # parse quoted 29 | ( 30 | "a='C:\\test\\':b=\"/dev/null\"", 31 | {}, 32 | ["a='C:\\test\\'", 'b="/dev/null"'], 33 | False, 34 | ), 35 | ], 36 | ) 37 | def test_sanitizer_options_parsing_adding(init, add, result, overwrite): 38 | """test SanitizerOptions() - parsing and adding""" 39 | opts = SanitizerOptions(init) 40 | for key, value in add.items(): 41 | opts.add(key, value, overwrite=overwrite) 42 | # test __str__ 43 | split_opts = SanitizerOptions.re_delim.split(str(opts)) 44 | assert len(split_opts) == len(result) 45 | if opts: 46 | # test __len___ 47 | assert len(opts) == len(result) 48 | for opt in split_opts: 49 | assert opt in result 50 | # test __iter__ 51 | for opt, value in opts: 52 | assert f"{opt}={value}" in result 53 | # test __contains___ 54 | for opt in result: 55 | assert opt.split("=")[0] in opts 56 | else: 57 | assert not result[-1] 58 | 59 | 60 | def test_sanitizer_load_options(): 61 | """test SanitizerOptions.load_options -""" 62 | opts = SanitizerOptions() 63 | # empty 64 | assert not opts 65 | assert len(opts) == 0 66 | # single options 67 | opts.load_options("a=1") 68 | assert opts 69 | assert len(opts) == 1 70 | assert opts.pop("a") == "1" 71 | # multiple options 72 | opts.load_options("a=1:b=2") 73 | assert len(opts) == 2 74 | assert opts.pop("a") == "1" 75 | assert opts.pop("b") == "2" 76 | # malformed option 77 | opts.load_options("foo") 78 | assert len(opts) == 0 79 | # malformed option with valid option 80 | opts.load_options("a=1:foo") 81 | assert len(opts) == 1 82 | assert opts.pop("a") == "1" 83 | 84 | 85 | @mark.parametrize( 86 | "flag, value, msg", 87 | [ 88 | # empty flag name 89 | ("", "test", r"Flag name cannot be empty"), 90 | # missing quotes with ':' 91 | ("test", "a:b", r"'a:b' \(test\) must be quoted"), 92 | # missing quotes with ' ' 93 | ("test", "a b", r"'a b' \(test\) must be quoted"), 94 | ], 95 | ) 96 | def test_sanitizer_options_invalid_add(flag, value, msg): 97 | """test SanitizerOptions() -""" 98 | with raises(ValueError, match=msg): 99 | SanitizerOptions().add(flag, value) 100 | 101 | 102 | def test_sanitizer_options_get_pop(): 103 | """test SanitizerOptions() - get() and pop()""" 104 | opts = SanitizerOptions() 105 | assert opts.get("missing") is None 106 | assert opts.pop("missing") is None 107 | opts.add("exists", "1") 108 | assert opts.pop("exists") == "1" 109 | assert opts.get("exists") is None 110 | 111 | 112 | def test_sanitizer_options_check_path(tmp_path): 113 | """test SanitizerOptions() - check_path()""" 114 | opts = SanitizerOptions() 115 | # test missing key 116 | assert opts.check_path("file") 117 | # test exists 118 | file = tmp_path / "file.bin" 119 | file.touch() 120 | opts.add("file", f"'{file}'") 121 | assert opts.check_path("file") 122 | # test missing file 123 | file.unlink() 124 | assert not opts.check_path("file") 125 | 126 | 127 | def test_sanitizer_options_is_quoted(): 128 | """test SanitizerOptions.is_quoted()""" 129 | assert SanitizerOptions.is_quoted("'quoted'") 130 | assert SanitizerOptions.is_quoted('"quoted"') 131 | assert not SanitizerOptions.is_quoted("not'quoted") 132 | assert not SanitizerOptions.is_quoted("'not'quoted") 133 | assert not SanitizerOptions.is_quoted("not'quoted'") 134 | assert not SanitizerOptions.is_quoted("'test\"") 135 | assert not SanitizerOptions.is_quoted("'") 136 | -------------------------------------------------------------------------------- /tox.ini: -------------------------------------------------------------------------------- 1 | [tox] 2 | envlist = py{39,310,311,312,313},lint 3 | skip_missing_interpreters = true 4 | tox_pip_extensions_ext_venv_update = true 5 | 6 | [testenv] 7 | commands = pytest -v --cache-clear --cov={toxinidir} --cov-config={toxinidir}/pyproject.toml --cov-report=term-missing --basetemp={envtmpdir} {posargs} --disable-pytest-warnings 8 | deps = 9 | pytest 10 | pytest-cov 11 | pytest-mock 12 | passenv = 13 | BUILD_CACHE 14 | CI 15 | CI_* 16 | CODECOV_* 17 | TOXENV 18 | TRAVIS 19 | TRAVIS_* 20 | TWINE_* 21 | VCS_* 22 | usedevelop = true 23 | 24 | [testenv:codecov] 25 | commands = 26 | codecov upload-process 27 | deps = 28 | codecov-cli 29 | coverage[toml] 30 | skip_install = true 31 | 32 | [testenv:lint] 33 | commands = 34 | pre-commit run -a {posargs} 35 | deps = 36 | pre-commit 37 | skip_install = true 38 | 39 | [testenv:mypy] 40 | commands = 41 | mypy --install-types --non-interactive {posargs} 42 | deps = 43 | mypy==v1.14.1 44 | usedevelop = true 45 | 46 | [testenv:pylint] 47 | commands = 48 | pylint -j 0 {posargs} 49 | deps = 50 | pylint==3.3.3 51 | usedevelop = true 52 | 53 | [testenv:pypi] 54 | commands = 55 | python -m build 56 | twine upload --skip-existing dist/* 57 | deps = 58 | build 59 | twine 60 | skip_install = true 61 | --------------------------------------------------------------------------------