├── .gitignore ├── CODE_OF_CONDUCT.md ├── Dockerfile ├── LICENSE.txt ├── MANIFEST.in ├── README.md ├── bootstrap ├── arch_bootstrap.sh ├── bootstrap.sh ├── linux_bootstrap.sh └── osx_bootstrap.sh ├── doc ├── commands.txt ├── devnotes.txt └── runxpcshell.sh ├── hooks └── pre-commit ├── setup.cfg ├── setup.py ├── tests ├── __init__.py ├── cache_test.py ├── cert_test.py ├── conftest.py ├── files │ ├── firefox-nightly_linux-dummy.tar.bz2 │ ├── firefox-nightly_osx-dummy.dmg │ ├── firefox-nightly_win-dummy.exe │ ├── mozilla.org.der │ ├── mozilla.org.pem │ └── revocations_nodigicert.txt ├── firefox_app_test.py ├── firefox_downloader_test.py ├── firefox_extractor_test.py ├── progress_test.py ├── runlog_test.py ├── sources_db_test.py ├── tags_db_test.py ├── xpcshell_worker_test.py ├── zz_tlscanary_integration_test.py └── zz_tlscanary_revocations.py └── tlscanary ├── __init__.py ├── default_profile ├── SecurityPreloadState.txt ├── SiteSecurityServiceState.txt ├── cert8.db ├── cert9.db ├── key3.db ├── key4.db ├── pkcs11.txt └── revocations.txt ├── js ├── scan_worker.js └── worker_common.js ├── loader.py ├── main.py ├── modes ├── __init__.py ├── basemode.py ├── log.py ├── performance.py ├── regression.py ├── scan.py └── sourceupdate.py ├── report.py ├── runlog.py ├── scheduler ├── __init__.py ├── main.py └── matplotlib_agg.py ├── sources ├── debug.csv ├── debug2.csv ├── digicert.csv ├── revoked.csv ├── smoke_list.csv ├── test_url_list.csv └── top_sites.csv ├── sources_db.py ├── template ├── css │ ├── index_styles.css │ └── ui_style.css ├── img │ ├── favicon.svg │ └── logo.svg ├── index.htm ├── js │ ├── index_page.js │ ├── index_transform.json │ ├── report_page.js │ └── transform.json └── report_template.htm ├── tools ├── __init__.py ├── cache.py ├── cert.py ├── cleanup.py ├── firefox_app.py ├── firefox_downloader.py ├── firefox_extractor.py ├── one_crl_downloader.py ├── progress.py ├── tags_db.py └── xpcshell_worker.py └── worker_pool.py /.gitignore: -------------------------------------------------------------------------------- 1 | .Python 2 | bin/ 3 | lib/ 4 | share/ 5 | man/ 6 | include/ 7 | Scripts/ 8 | tcl/ 9 | venv/ 10 | venv3/ 11 | *.pyc 12 | *.pyo 13 | *.un~ 14 | *.py~ 15 | .idea/ 16 | pip-selfcheck.json 17 | *.egg-info/ 18 | dist/ 19 | .eggs/ 20 | doc/ 21 | .coverage 22 | Pipfile 23 | Pipfile.lock 24 | .vscode/ 25 | -------------------------------------------------------------------------------- /CODE_OF_CONDUCT.md: -------------------------------------------------------------------------------- 1 | # Community Participation Guidelines 2 | 3 | This repository is governed by Mozilla's code of conduct and etiquette guidelines. 4 | For more details, please read the 5 | [Mozilla Community Participation Guidelines](https://www.mozilla.org/about/governance/policies/participation/). 6 | 7 | ## How to Report 8 | For more information on how to report violations of the Community Participation Guidelines, please read our '[How to Report](https://www.mozilla.org/about/governance/policies/participation/reporting/)' page. 9 | 10 | 16 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM ubuntu:20.04 2 | 3 | ENV DEBIAN_FRONTEND=noninteractive 4 | 5 | # pkgs from bootstrap/linux_bootstrap.sh + curl + git 6 | RUN apt-get update && \ 7 | apt-get -y install \ 8 | curl \ 9 | gcc \ 10 | git \ 11 | golang-go \ 12 | libasound2 \ 13 | libdbus-glib-1-2 \ 14 | libffi-dev \ 15 | libgtk-3-0 \ 16 | libssl-dev \ 17 | libxt6 \ 18 | p7zip-full \ 19 | python3 \ 20 | python3-dev \ 21 | python3-pip \ 22 | python3-virtualenv \ 23 | libx11-xcb-dev 24 | 25 | RUN curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y 26 | ENV PATH="/root/.cargo/bin:$PATH" 27 | 28 | RUN pip3 install --upgrade git+git://github.com/mozilla/tls-canary.git 29 | 30 | ENTRYPOINT [ "tlscanary" ] 31 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | global-exclude *.py[co] 2 | include LICENSE.txt 3 | include README.rst 4 | recursive-include tlscanary/default_profile * 5 | recursive-include tlscanary/js * 6 | recursive-include tlscanary/sources *.csv 7 | recursive-include tlscanary/template * 8 | exclude README.md 9 | exclude tests/* 10 | -------------------------------------------------------------------------------- /bootstrap/arch_bootstrap.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # Arch linux bootstrap 3 | sudo pacman -Sy \ 4 | gcc \ 5 | go \ 6 | p7zip \ 7 | python \ 8 | python-pip \ 9 | python-virtualenv 10 | 11 | -------------------------------------------------------------------------------- /bootstrap/bootstrap.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | if which apt-get &>/dev/null 3 | then 4 | echo "Bootstrapping for Linux / apt-get" 5 | echo "sudo required" 6 | sudo $(dirname "$0")/linux_bootstrap.sh 7 | elif which pacman &>/dev/null 8 | then 9 | echo "Bootstrapping for Arch Linux / pacman" 10 | echo "sudo required" 11 | sudo $(dirname "$0")/arch_bootstrap.sh 12 | elif which brew &>/dev/null 13 | then 14 | echo "Bootstrapping for Mac OS X / Homebrew" 15 | $(dirname "$0")/osx_bootstrap.sh 16 | else 17 | echo "ERROR: can't provide automatic bootstrapping" 18 | exit 5 19 | fi 20 | -------------------------------------------------------------------------------- /bootstrap/linux_bootstrap.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # AWS Ubuntu 20.4 linux bootstrap 3 | sudo apt-get update 4 | sudo apt-get -y install \ 5 | gcc \ 6 | golang-go \ 7 | libasound2 \ 8 | libdbus-glib-1-2 \ 9 | libffi-dev \ 10 | libgtk-3-0 \ 11 | libssl-dev \ 12 | libxt6 \ 13 | p7zip-full \ 14 | python3 \ 15 | python3-dev \ 16 | python3-pip \ 17 | libx11-xcb-dev 18 | 19 | # The virtualenv package is not consistently named across distros 20 | sudo apt-get -y install virtualenv \ 21 | || sudo apt-get -y install python3-virtualenv 22 | 23 | curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh 24 | 25 | -------------------------------------------------------------------------------- /bootstrap/osx_bootstrap.sh: -------------------------------------------------------------------------------- 1 | #/bin/bash 2 | 3 | brew ls --versions openssl || brew install openssl 4 | brew ls --versions libffi || brew install libffi 5 | brew ls --versions python || brew install python 6 | brew ls --versions p7zip || brew install p7zip 7 | brew ls --versions go || brew install go 8 | -------------------------------------------------------------------------------- /doc/commands.txt: -------------------------------------------------------------------------------- 1 | {"mode":"useprofile","path":"/tmp/test_profile"} 2 | {"mode":"updateprofile"} 3 | {"mode":"wakeup"} 4 | {"mode":"quit"} -------------------------------------------------------------------------------- /doc/devnotes.txt: -------------------------------------------------------------------------------- 1 | How to create a dummy DMG for testing: 2 | 3 | hdiutil create -srcfolder /tmp/Nightly -format UDBZ -nospotlight -layout SPUD -ov -scrub /tmp/firefox-nightly_osx-dummy.dmg 4 | 5 | (-ov and -scrub are undocumented. One prevent creation of .Trashes folder which can't be copied and results in an error during extraction) 6 | 7 | 8 | Linux issue: 9 | 10 | Passing the `-a path/browser` flag to xpcshell causes a segfaul on mys linux system. Potential ld problem? 11 | -------------------------------------------------------------------------------- /doc/runxpcshell.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | APP=/Applications/Firefox.app 3 | FIREFOX=$APP/Contents/MacOS/firefox 4 | GREDIR=$APP/Contents/Resources 5 | BROWSERDIR=$APP/Contents/Resources/browser 6 | $FIREFOX -xpcshell -g "$GREDIR" -a "$BROWSERDIR" "$@" 7 | -------------------------------------------------------------------------------- /hooks/pre-commit: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | """ 3 | Originally forked from ``https://gist.github.com/810399`` 4 | 5 | Altered by @cr for the TLS Canary project 6 | """ 7 | import os 8 | import re 9 | import shutil 10 | import subprocess 11 | import sys 12 | import tempfile 13 | 14 | 15 | def system(*args, **kwargs): 16 | kwargs.setdefault('stdout', subprocess.PIPE) 17 | proc = subprocess.Popen(args, **kwargs) 18 | out, err = proc.communicate() 19 | return out 20 | 21 | 22 | def ignore_file(filename): 23 | ignored = [] 24 | match = False 25 | for i in ignored: 26 | if i in filename: 27 | match = True 28 | return match 29 | 30 | 31 | def main(): 32 | modified = re.compile('''^[AM]+\s+(?P.*\.py)''', re.MULTILINE) 33 | files = system('git', 'status', '--porcelain').decode("utf-8") 34 | files = modified.findall(files) 35 | 36 | tempdir = tempfile.mkdtemp() 37 | for name in files: 38 | filename = os.path.join(tempdir, name) 39 | 40 | if not ignore_file(filename): 41 | filepath = os.path.dirname(filename) 42 | if not os.path.exists(filepath): 43 | os.makedirs(filepath) 44 | with open(filename, 'w') as f: 45 | system('git', 'show', ':' + name, stdout=f) 46 | try: 47 | output = system('pycodestyle', '--show-source', '--max-line-length=120', '.', cwd=tempdir).decode("utf-8") 48 | except OSError: 49 | print("The `pycodestyle` checker is required for commits.") 50 | print("You probably haven't activated TLS Canary's Python dev environment.") 51 | sys.exit(1) 52 | 53 | shutil.rmtree(tempdir) 54 | if output: 55 | sys.stdout.write(output) 56 | sys.exit(1) 57 | 58 | 59 | if __name__ == '__main__': 60 | main() 61 | -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [metadata] 2 | description-file = README.rst 3 | 4 | [aliases] 5 | test = pytest 6 | 7 | [tool:pytest] 8 | testpaths = tests/ tlscanary/ 9 | addopts = --codestyle 10 | codestyle_max_line_length = 120 11 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | # This Source Code Form is subject to the terms of the Mozilla Public 2 | # License, v. 2.0. If a copy of the MPL was not distributed with this file, 3 | # You can obtain one at http://mozilla.org/MPL/2.0/. 4 | 5 | from os import path 6 | from setuptools import setup, find_packages 7 | 8 | PACKAGE_NAME = 'tlscanary' 9 | PACKAGE_VERSION = '4.0.2' 10 | 11 | INSTALL_REQUIRES = [ 12 | 'coloredlogs', 13 | 'cryptography', 14 | 'hashfs', 15 | 'python-dateutil', 16 | 'worq' 17 | ] 18 | 19 | SCHEDULER_REQUIRES = [ 20 | 'matplotlib', 21 | 'schedule' 22 | ] 23 | 24 | TESTS_REQUIRE = [ 25 | 'coverage', 26 | 'pycodestyle', 27 | 'pytest', 28 | 'pytest-pycodestyle', 29 | 'pytest-runner' 30 | ] 31 | 32 | DEV_REQUIRES = TESTS_REQUIRE + SCHEDULER_REQUIRES 33 | 34 | with open(path.join(path.abspath(path.dirname(__file__)), 'README.md'), encoding='utf-8') as f: 35 | long_description = f.read() 36 | 37 | setup( 38 | name=PACKAGE_NAME, 39 | version=PACKAGE_VERSION, 40 | description='TLS/SSL Test Suite for Mozilla Firefox', 41 | long_description=long_description, 42 | long_description_content_type='text/markdown', 43 | classifiers=[ 44 | 'Environment :: Console', 45 | 'Development Status :: 7 - Inactive', 46 | 'Intended Audience :: Developers', 47 | 'License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)', 48 | 'Natural Language :: English', 49 | 'Operating System :: MacOS :: MacOS X', 50 | 'Operating System :: Microsoft :: Windows :: Windows 10', 51 | 'Operating System :: Microsoft :: Windows :: Windows 7', 52 | 'Operating System :: Microsoft :: Windows :: Windows 8', 53 | 'Operating System :: Microsoft :: Windows :: Windows 8.1', 54 | 'Operating System :: POSIX :: Linux', 55 | 'Programming Language :: Python :: 3 :: Only', 56 | 'Programming Language :: Python :: 3.6', 57 | 'Programming Language :: Python :: 3.7', 58 | 'Topic :: Software Development :: Quality Assurance', 59 | 'Topic :: Software Development :: Testing' 60 | ], 61 | keywords=['mozilla', 'firefox', 'tls', 'regression-testing', 'testing'], 62 | author='Christiane Ruetten', 63 | author_email='cr@mozilla.com', 64 | url='https://github.com/mozilla/tls-canary', 65 | download_url='https://github.com/mozilla/tls-canary/archive/latest.tar.gz', 66 | license='MPL2', 67 | packages=find_packages(exclude=["tests"]), 68 | include_package_data=True, # See MANIFEST.in 69 | zip_safe=False, 70 | install_requires=INSTALL_REQUIRES, 71 | tests_require=TESTS_REQUIRE, 72 | extras_require={ 73 | 'dev': DEV_REQUIRES, # For `pip install -e .[dev]` 74 | 'scheduler': SCHEDULER_REQUIRES # For `pip install -e .[scheduler]` 75 | }, 76 | entry_points={ 77 | 'console_scripts': [ 78 | 'tlscanary = tlscanary.main:main', 79 | 'tlscscheduler = tlscanary.scheduler.main:main' 80 | ] 81 | } 82 | ) 83 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- 1 | # This Source Code Form is subject to the terms of the Mozilla Public 2 | # License, v. 2.0. If a copy of the MPL was not distributed with this file, 3 | # You can obtain one at http://mozilla.org/MPL/2.0/. 4 | 5 | 6 | class ArgsMock(object): 7 | """ 8 | Mock used for testing functionality that 9 | requires access to an args-style object. 10 | """ 11 | def __init__(self, **kwargs): 12 | self.kwargs = kwargs 13 | 14 | def __getattr__(self, attr): 15 | try: 16 | return self.kwargs[attr] 17 | except KeyError: 18 | return None 19 | -------------------------------------------------------------------------------- /tests/cache_test.py: -------------------------------------------------------------------------------- 1 | # This Source Code Form is subject to the terms of the Mozilla Public 2 | # License, v. 2.0. If a copy of the MPL was not distributed with this file, 3 | # You can obtain one at http://mozilla.org/MPL/2.0/. 4 | 5 | from math import floor 6 | import os 7 | from time import sleep, time 8 | 9 | import tlscanary.tools.cache as cache 10 | 11 | 12 | def test_cache_instance(tmpdir): 13 | """Comprehensive cache test suite""" 14 | 15 | # CAVE: Some file systems (HFS+, ext3, ...) only provide timestamps with 1.0 second resolution. 16 | # This affects testing accuracy when working with `maximum_age` in the range of a second. 17 | # Wait until we're within 10ms past a full second to keep jitter low and this test stable. 18 | t = time() 19 | while t - floor(t) >= 0.01: 20 | t = time() 21 | 22 | cache_root_dir = os.path.join(tmpdir, "test_cache") 23 | dc = cache.DiskCache(cache_root_dir, maximum_age=1, purge=False) 24 | 25 | assert os.path.isdir(cache_root_dir), "cache creates directory" 26 | assert len(dc.list()) == 0, "cache is initially empty" 27 | 28 | # Create a test entry in the cache 29 | test_file = dc["foo"] 30 | with open(test_file, "w") as f: 31 | f.write("foo") 32 | 33 | assert test_file.startswith(cache_root_dir), "cache entries are located in cache directory" 34 | assert "foo" in dc, "cache accepts new file entries" 35 | assert "baz" not in dc, "cache does not obviously phantasize about its content" 36 | 37 | # Create a slightly newer cache entry 38 | # Ensure that it's regarded to be one second younger even with 1s mtime resolution 39 | sleep(1.01) 40 | newer_test_file = dc["bar"] 41 | with open(newer_test_file, "w") as f: 42 | f.write("bar") 43 | 44 | assert "foo" in dc and "bar" in dc and len(dc.list()) == 2, "cache accepts more new file entries" 45 | 46 | # At this point, "foo" is considered to be at least 1s old, "bar" just a few ms. 47 | assert "foo" in dc and "bar" in dc, "purge only happens when explicitly called" 48 | dc.purge(maximum_age=10) 49 | assert "foo" in dc and "bar" in dc, "purge only affects stale files" 50 | dc.purge() # uses `maximum_age` value from init, 1 51 | assert "foo" not in dc, "purge actually purges" 52 | assert "bar" in dc, "purge does not overly purge" 53 | 54 | dc.delete() 55 | assert "bar" not in dc and len(dc.list()) == 0, "cache can be fully emptied" 56 | 57 | # Deleting unknown cache entries should not lead to an error 58 | dc.delete("foofoo") 59 | -------------------------------------------------------------------------------- /tests/cert_test.py: -------------------------------------------------------------------------------- 1 | # This Source Code Form is subject to the terms of the Mozilla Public 2 | # License, v. 2.0. If a copy of the MPL was not distributed with this file, 3 | # You can obtain one at http://mozilla.org/MPL/2.0/. 4 | 5 | import os 6 | import pkg_resources as pkgr 7 | 8 | import tlscanary.tools.cert as cert 9 | 10 | 11 | def test_cert_instance_with_pem(): 12 | """Cert instances can process PEM and DER certificates""" 13 | 14 | # Create an instance from PEM data 15 | pem_cert_file = pkgr.resource_filename(__name__, "files/mozilla.org.pem") 16 | assert os.path.isfile(pem_cert_file) 17 | with open(pem_cert_file, "rb") as f: 18 | # Test from bytes object 19 | pem = cert.Cert(f.read()) 20 | assert type(pem) is cert.Cert, "can open PEM content" 21 | 22 | # Create an instance from DER data 23 | der_cert_file = pkgr.resource_filename(__name__, "files/mozilla.org.der") 24 | assert os.path.isfile(der_cert_file) 25 | with open(pem_cert_file, "rb") as f: 26 | # Test from list of int 27 | der_data = list(f.read()) 28 | der = cert.Cert(der_data) 29 | assert type(der) is cert.Cert, "can open DER content" 30 | 31 | assert pem.as_pem() == der.as_pem(), "PEM conversions are identical" 32 | assert pem.as_der() == der.as_der(), "DER conversions are identical" 33 | 34 | # Now assuming that all instances have identical content 35 | assert der.signature_hash_algorithm() == "sha256", "SIGNATURE_HASH_ALGORITHM extracts fine" 36 | assert der.subject_alt_name() == "mozilla.org,www.mozilla.org", "SUBJECT_ALTERNATIVE_NAME OID extracts fine" 37 | assert der.ext_key_usage() == "serverAuth,clientAuth", "EXTENDED_KEY_USAGE OID extracts fine" 38 | -------------------------------------------------------------------------------- /tests/conftest.py: -------------------------------------------------------------------------------- 1 | # This Source Code Form is subject to the terms of the Mozilla Public 2 | # License, v. 2.0. If a copy of the MPL was not distributed with this file, 3 | # You can obtain one at http://mozilla.org/MPL/2.0/. 4 | 5 | import pytest 6 | from subprocess import check_output 7 | 8 | import tlscanary.tools.firefox_downloader as fd 9 | import tlscanary.tools.firefox_extractor as fe 10 | 11 | 12 | @pytest.fixture(scope="session") 13 | def caching_firefox_downloader(tmpdir_factory): 14 | return fd.FirefoxDownloader(tmpdir_factory.mktemp("caching_downloader")) 15 | 16 | 17 | @pytest.fixture(scope="session") 18 | def nightly_archive(caching_firefox_downloader): 19 | """A Firefox Nightly archive downloaded from the Web""" 20 | return caching_firefox_downloader.download("nightly", use_cache=True) 21 | 22 | 23 | @pytest.fixture(scope="session") 24 | def nightly_win_archive(caching_firefox_downloader): 25 | """A Firefox Nightly archive for Windows downloaded from the Web""" 26 | return caching_firefox_downloader.download("nightly", platform="win", use_cache=True) 27 | 28 | 29 | @pytest.fixture(scope="session") 30 | def nightly_osx_archive(caching_firefox_downloader): 31 | """A Firefox Nightly archive for Mac OS X downloaded from the Web""" 32 | return caching_firefox_downloader.download("nightly", platform="osx", use_cache=True) 33 | 34 | 35 | @pytest.fixture(scope="session") 36 | def nightly_linux_archive(caching_firefox_downloader): 37 | """A Firefox Nightly archive for Linux downloaded from the Web""" 38 | return caching_firefox_downloader.download("nightly", platform="linux", use_cache=True) 39 | 40 | 41 | @pytest.fixture(scope="session") 42 | def beta_archive(caching_firefox_downloader): 43 | """A Firefox Beta archive downloaded from the Web""" 44 | return caching_firefox_downloader.download("beta", use_cache=True) 45 | 46 | 47 | @pytest.fixture(scope="session") 48 | def beta_win_archive(caching_firefox_downloader): 49 | """A Firefox Beta archive for Windows downloaded from the Web""" 50 | return caching_firefox_downloader.download("beta", platform="win", use_cache=True) 51 | 52 | 53 | @pytest.fixture(scope="session") 54 | def beta_osx_archive(caching_firefox_downloader): 55 | """A Firefox Beta archive for Mac OS X downloaded from the Web""" 56 | return caching_firefox_downloader.download("beta", platform="osx", use_cache=True) 57 | 58 | 59 | @pytest.fixture(scope="session") 60 | def beta_linux_archive(caching_firefox_downloader): 61 | """A Firefox Beta archive for Linux downloaded from the Web""" 62 | return caching_firefox_downloader.download("beta", platform="linux", use_cache=True) 63 | 64 | 65 | @pytest.fixture(scope="session") 66 | def release_archive(caching_firefox_downloader): 67 | """A Firefox Release archive downloaded from the Web""" 68 | return caching_firefox_downloader.download("release", use_cache=True) 69 | 70 | 71 | @pytest.fixture(scope="session") 72 | def release_win_archive(caching_firefox_downloader): 73 | """A Firefox Release archive for Windows downloaded from the Web""" 74 | return caching_firefox_downloader.download("release", platform="win", use_cache=True) 75 | 76 | 77 | @pytest.fixture(scope="session") 78 | def release_osx_archive(caching_firefox_downloader): 79 | """A Firefox Release archive for Mac OS X downloaded from the Web""" 80 | return caching_firefox_downloader.download("release", platform="osx", use_cache=True) 81 | 82 | 83 | @pytest.fixture(scope="session") 84 | def release_linux_archive(caching_firefox_downloader): 85 | """A Firefox Release archive for Linux downloaded from the Web""" 86 | return caching_firefox_downloader.download("release", platform="linux", use_cache=True) 87 | 88 | 89 | @pytest.fixture(scope="session") 90 | def esr_archive(caching_firefox_downloader): 91 | """A Firefox ESR archive downloaded from the Web""" 92 | return caching_firefox_downloader.download("esr", use_cache=True) 93 | 94 | 95 | @pytest.fixture(scope="session") 96 | def esr_win_archive(caching_firefox_downloader): 97 | """A Firefox ESR archive for Windows downloaded from the Web""" 98 | return caching_firefox_downloader.download("esr", platform="win", use_cache=True) 99 | 100 | 101 | @pytest.fixture(scope="session") 102 | def esr_osx_archive(caching_firefox_downloader): 103 | """A Firefox ESR archive for Mac OS X downloaded from the Web""" 104 | return caching_firefox_downloader.download("esr", platform="osx", use_cache=True) 105 | 106 | 107 | @pytest.fixture(scope="session") 108 | def esr_linux_archive(caching_firefox_downloader): 109 | """A Firefox ESR archive for Linux downloaded from the Web""" 110 | return caching_firefox_downloader.download("esr", platform="linux", use_cache=True) 111 | 112 | 113 | @pytest.fixture(scope="session") 114 | def nightly_app(tmpdir_factory, nightly_archive): 115 | """A Firefox Nightly app fixture""" 116 | return fe.extract(nightly_archive, tmpdir_factory.mktemp("nightly_app")) 117 | 118 | 119 | @pytest.fixture(scope="session") 120 | def beta_app(tmpdir_factory, beta_archive): 121 | """A Firefox Beta app fixture""" 122 | return fe.extract(beta_archive, tmpdir_factory.mktemp("beta_app")) 123 | 124 | 125 | @pytest.fixture(scope="session") 126 | def release_app(tmpdir_factory, release_archive): 127 | """A Firefox Release app fixture""" 128 | return fe.extract(release_archive, tmpdir_factory.mktemp("release_app")) 129 | 130 | 131 | @pytest.fixture(scope="session") 132 | def esr_app(tmpdir_factory, esr_archive): 133 | """A Firefox ESR app fixture""" 134 | return fe.extract(esr_archive, tmpdir_factory.mktemp("esr_app")) 135 | 136 | 137 | @pytest.fixture(scope="session") 138 | def nightly_win_app(tmpdir_factory, nightly_win_archive): 139 | """A Firefox Nightly app for Windows fixture""" 140 | return fe.extract(nightly_win_archive, tmpdir_factory.mktemp("nightly_win_app")) 141 | 142 | 143 | @pytest.fixture(scope="session") 144 | def beta_win_app(tmpdir_factory, beta_win_archive): 145 | """A Firefox Beta app for Windows fixture""" 146 | return fe.extract(beta_win_archive, tmpdir_factory.mktemp("beta_win_app")) 147 | 148 | 149 | @pytest.fixture(scope="session") 150 | def release_win_app(tmpdir_factory, release_win_archive): 151 | """A Firefox Release app for Windows fixture""" 152 | return fe.extract(release_win_archive, tmpdir_factory.mktemp("release_win_app")) 153 | 154 | 155 | @pytest.fixture(scope="session") 156 | def esr_win_app(tmpdir_factory, esr_win_archive): 157 | """A Firefox ESR app for Windows fixture""" 158 | return fe.extract(esr_win_archive, tmpdir_factory.mktemp("esr_win_app")) 159 | 160 | 161 | def __check_7z_version(): 162 | sz_out = check_output("7z") 163 | assert sz_out is not None 164 | sz_version = float(sz_out.splitlines()[1].split()[2]) 165 | if sz_version < 16: 166 | pytest.skip("7-zip version 16+ required to extract DMG images for Mac OS X") 167 | 168 | 169 | @pytest.fixture(scope="session") 170 | def nightly_osx_app(tmpdir_factory, nightly_osx_archive): 171 | """A Firefox Nightly app for Mac OS X fixture""" 172 | __check_7z_version() 173 | return fe.extract(nightly_osx_archive, tmpdir_factory.mktemp("nightly_osx_app")) 174 | 175 | 176 | @pytest.fixture(scope="session") 177 | def beta_osx_app(tmpdir_factory, beta_osx_archive): 178 | """A Firefox Beta app for Mac OS X fixture""" 179 | __check_7z_version() 180 | return fe.extract(beta_osx_archive, tmpdir_factory.mktemp("beta_osx_app")) 181 | 182 | 183 | @pytest.fixture(scope="session") 184 | def release_osx_app(tmpdir_factory, release_osx_archive): 185 | """A Firefox Release app for Mac OS X fixture""" 186 | __check_7z_version() 187 | return fe.extract(release_osx_archive, tmpdir_factory.mktemp("release_osx_app")) 188 | 189 | 190 | @pytest.fixture(scope="session") 191 | def esr_osx_app(tmpdir_factory, esr_osx_archive): 192 | """A Firefox ESR app for Mac OS X fixture""" 193 | __check_7z_version() 194 | return fe.extract(esr_osx_archive, tmpdir_factory.mktemp("esr_osx_app")) 195 | 196 | 197 | @pytest.fixture(scope="session") 198 | def nightly_linux_app(tmpdir_factory, nightly_linux_archive): 199 | """A Firefox Nightly app for Linux fixture""" 200 | return fe.extract(nightly_linux_archive, tmpdir_factory.mktemp("nightly_linux_app")) 201 | 202 | 203 | @pytest.fixture(scope="session") 204 | def beta_linux_app(tmpdir_factory, beta_linux_archive): 205 | """A Firefox Beta app for Linux fixture""" 206 | return fe.extract(beta_linux_archive, tmpdir_factory.mktemp("beta_linux_app")) 207 | 208 | 209 | @pytest.fixture(scope="session") 210 | def release_linux_app(tmpdir_factory, release_linux_archive): 211 | """A Firefox Release app for Linux fixture""" 212 | return fe.extract(release_linux_archive, tmpdir_factory.mktemp("release_linux_app")) 213 | 214 | 215 | @pytest.fixture(scope="session") 216 | def esr_linux_app(tmpdir_factory, esr_linux_archive): 217 | """A Firefox ESR app for Linux fixture""" 218 | return fe.extract(esr_linux_archive, tmpdir_factory.mktemp("esr_linux_app")) 219 | -------------------------------------------------------------------------------- /tests/files/firefox-nightly_linux-dummy.tar.bz2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mozilla/tls-canary/dce7d04e0f8c1234787ab85cfefb31e4cc2493ec/tests/files/firefox-nightly_linux-dummy.tar.bz2 -------------------------------------------------------------------------------- /tests/files/firefox-nightly_osx-dummy.dmg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mozilla/tls-canary/dce7d04e0f8c1234787ab85cfefb31e4cc2493ec/tests/files/firefox-nightly_osx-dummy.dmg -------------------------------------------------------------------------------- /tests/files/firefox-nightly_win-dummy.exe: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mozilla/tls-canary/dce7d04e0f8c1234787ab85cfefb31e4cc2493ec/tests/files/firefox-nightly_win-dummy.exe -------------------------------------------------------------------------------- /tests/files/mozilla.org.der: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mozilla/tls-canary/dce7d04e0f8c1234787ab85cfefb31e4cc2493ec/tests/files/mozilla.org.der -------------------------------------------------------------------------------- /tests/files/mozilla.org.pem: -------------------------------------------------------------------------------- 1 | -----BEGIN CERTIFICATE----- 2 | MIIH+TCCBuGgAwIBAgIQCLTVuQqUkB24PV+Clbic+TANBgkqhkiG9w0BAQsFADB1 3 | MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 4 | d3cuZGlnaWNlcnQuY29tMTQwMgYDVQQDEytEaWdpQ2VydCBTSEEyIEV4dGVuZGVk 5 | IFZhbGlkYXRpb24gU2VydmVyIENBMB4XDTE2MTEwOTAwMDAwMFoXDTE4MTExNDEy 6 | MDAwMFowggECMR0wGwYDVQQPDBRQcml2YXRlIE9yZ2FuaXphdGlvbjETMBEGCysG 7 | AQQBgjc8AgEDEwJVUzEbMBkGCysGAQQBgjc8AgECEwpDYWxpZm9ybmlhMREwDwYD 8 | VQQFEwhDMjc1OTIwODEeMBwGA1UECRMVNjUwIENhc3RybyBTdCBTdGUgMzAwMQ4w 9 | DAYDVQQREwU5NDA0MTELMAkGA1UEBhMCVVMxEzARBgNVBAgTCkNhbGlmb3JuaWEx 10 | FjAUBgNVBAcTDU1vdW50YWluIFZpZXcxHDAaBgNVBAoTE01vemlsbGEgQ29ycG9y 11 | YXRpb24xFDASBgNVBAMTC21vemlsbGEub3JnMIIBIjANBgkqhkiG9w0BAQEFAAOC 12 | AQ8AMIIBCgKCAQEAojTWgYqURVDP291yrYhgqecAn++PQjzOesgJkZoPQdd4JuJM 13 | xg4Cxbfp00qpjJv9DNWCoQsYGGRNyXL6Xmxy7PVwD5m+2C27ogOa0xSBP5hBr07C 14 | aqQ1yAJw0RKwGilLbiiSzJoy21AUyY/uKxKogfMM9nnTfF4TQaYkahyeOCjhoIXW 15 | 2/nhoRvmzQkKtCc3kdKE64j8PgmeImu3nV4uw6XNj48umyuhHG+6G5Svt7d/DhSo 16 | TNKoiyigf4lLZuVCY3fwLVQh2Rx1Xz+cMpXLU6OIqdnZbDa9hlEFgN2C6MoSmcfo 17 | 6BX8Ruvyzqc2f+FNDGwjCGiAlxes3ZhcSwFjsQIDAQABo4ID9DCCA/AwHwYDVR0j 18 | BBgwFoAUPdNQpdagre7zSmAKZdMh1Pj41g8wHQYDVR0OBBYEFCqAuKQ3S0o1NBRk 19 | MZ7we/y2w6CTMCcGA1UdEQQgMB6CC21vemlsbGEub3Jngg93d3cubW96aWxsYS5v 20 | cmcwDgYDVR0PAQH/BAQDAgWgMB0GA1UdJQQWMBQGCCsGAQUFBwMBBggrBgEFBQcD 21 | AjB1BgNVHR8EbjBsMDSgMqAwhi5odHRwOi8vY3JsMy5kaWdpY2VydC5jb20vc2hh 22 | Mi1ldi1zZXJ2ZXItZzEuY3JsMDSgMqAwhi5odHRwOi8vY3JsNC5kaWdpY2VydC5j 23 | b20vc2hhMi1ldi1zZXJ2ZXItZzEuY3JsMEsGA1UdIAREMEIwNwYJYIZIAYb9bAIB 24 | MCowKAYIKwYBBQUHAgEWHGh0dHBzOi8vd3d3LmRpZ2ljZXJ0LmNvbS9DUFMwBwYF 25 | Z4EMAQEwgYgGCCsGAQUFBwEBBHwwejAkBggrBgEFBQcwAYYYaHR0cDovL29jc3Au 26 | ZGlnaWNlcnQuY29tMFIGCCsGAQUFBzAChkZodHRwOi8vY2FjZXJ0cy5kaWdpY2Vy 27 | dC5jb20vRGlnaUNlcnRTSEEyRXh0ZW5kZWRWYWxpZGF0aW9uU2VydmVyQ0EuY3J0 28 | MAwGA1UdEwEB/wQCMAAwggH3BgorBgEEAdZ5AgQCBIIB5wSCAeMB4QB1AKS5CZC0 29 | GFgUh7sTosxncAo8NZgE+RvfuON3zQ7IDdwQAAABWEtou44AAAQDAEYwRAIgadQv 30 | Taz8vG28WPPQyoD32Nfvekz/WfdMdTjc88YcYC4CIBncp5Yqet7J3JSiNWJnbaeW 31 | tBvTjoO6vudBurSFaqWmAHYAaPaY+B9kgr46jO65KB1M/HFRXWeT1ETRCmesu09P 32 | +8QAAAFYS2i7XQAABAMARzBFAiEAxbQYHhg6zA3Q7Y1hdXNVtd2yaOg1g4iIV38V 33 | eG2WBoMCIAT2azrUy9ndA3hZPPk2hV8YTEItIocJB81F/J4wf2NMAHcAVhQGmi/X 34 | wuzT9eG9RLI+x0Z2ubyZEVzA75SYVdaJ0N0AAAFYS2i7+gAABAMASDBGAiEA4/5v 35 | iFhhenp07noR8YSkvuiHF+D+6fmDhqcH2shxiboCIQCyPAVUuxnyrWK3PPDbqGx4 36 | YPi5i5DIQUbyT/UiqaOVtwB3AO5Lvbd1zmC64UJpH6vhnmajD35fsHLYgwDEe4l6 37 | qP3LAAABWEtovaIAAAQDAEgwRgIhAJ1S5/U2TqjMRHM8WLpZ+SRroBRoFXqLLj0k 38 | EffVe9MtAiEAgxkb74Z6pNVKx898PkNtxICcVRDah4loTSFsftgMAjYwDQYJKoZI 39 | hvcNAQELBQADggEBAAAxqPv0YMMRvgwOIKr5s6DqE4fUdkntQZn2y8VkLyNPodUP 40 | 5s64ZbnNcceubSPwgqbrXAEVdn2YLyFde1j/npTRfvQH4nPjN8zmIdZFQ/7FYp9b 41 | TUFQbouea6gf8/pVUgIFa0945hikojNzIXATFynDFFarpx+h957jOGlfhkXqFEK0 42 | cPrikLhxDrI5LWDv8tcJSW971gDngSY8N3htFgvDkCMVhBuQM02UTW++WuCe5rzW 43 | k8vQ3NfJoqy3Vm3YhRpOTF3UFbTXCpmzAb1hgt+iz4k1soCJo9YwOewwd5aS8Bk2 44 | +vjtOafBNN6UdLa4WjRX+JnX5wIANzL9azCzLUc= 45 | -----END CERTIFICATE----- 46 | -------------------------------------------------------------------------------- /tests/files/revocations_nodigicert.txt: -------------------------------------------------------------------------------- 1 | MIGCMQswCQYDVQQGEwJERTErMCkGA1UECgwiVC1TeXN0ZW1zIEVudGVycHJpc2UgU2VydmljZXMgR21iSDEfMB0GA1UECwwWVC1TeXN0ZW1zIFRydXN0IENlbnRlcjElMCMGA1UEAwwcVC1UZWxlU2VjIEdsb2JhbFJvb3QgQ2xhc3MgMw== 2 | H0ZpvlsmQE2QLTqXSDrK4g== 3 | MGYxCzAJBgNVBAYTAkJFMRkwFwYDVQQKExBHbG9iYWxTaWduIG52LXNhMTwwOgYDVQQDEzNHbG9iYWxTaWduIE9yZ2FuaXphdGlvbiBWYWxpZGF0aW9uIENBIC0gU0hBMjU2IC0gRzI= 4 | CpI/GtuuSFspBu4E 5 | MFoxCzAJBgNVBAYTAk5MMREwDwYDVQQKEwhLUE4gQi5WLjEfMB0GA1UECxMWU3ltYW50ZWMgVHJ1c3QgTmV0d29yazEXMBUGA1UEAxMOS1BOIENsYXNzIDIgQ0E= 6 | BzFJVBKUYLx5d6jqaoqnQA== 7 | MIGnMQswCQYDVQQGEwJIVTERMA8GA1UEBwwIQnVkYXBlc3QxFTATBgNVBAoMDE5ldExvY2sgS2Z0LjE3MDUGA1UECwwuVGFuw7pzw610dsOhbnlraWFkw7NrIChDZXJ0aWZpY2F0aW9uIFNlcnZpY2VzKTE1MDMGA1UEAwwsTmV0TG9jayBBcmFueSAoQ2xhc3MgR29sZCkgRsWRdGFuw7pzw610dsOhbnk= 8 | SUEs5ABI 9 | SUEs5ABG 10 | SUEs5ABThOepKci5Vzg= 11 | MFoxCzAJBgNVBAYTAklFMRIwEAYDVQQKEwlCYWx0aW1vcmUxEzARBgNVBAsTCkN5YmVyVHJ1c3QxIjAgBgNVBAMTGUJhbHRpbW9yZSBDeWJlclRydXN0IFJvb3Q= 12 | ByembA== 13 | Bye2Cg== 14 | Byemaw== 15 | ByembQ== 16 | ByemaA== 17 | BydWSw== 18 | DL0FAAzqeadFvWvsl9xaiA== 19 | CcaPWuZtcdneSnerYJH33A== 20 | ByfDrA== 21 | ByfNeQ== 22 | BwSvmwHgIISTeM4uX8FUyg== 23 | Byekbg== 24 | Byekbw== 25 | ByfDrQ== 26 | ByfFng== 27 | ByeYsg== 28 | ByfDtQ== 29 | Byc3Cw== 30 | MIGSMQswCQYDVQQGEwJHQjEbMBkGA1UECBMSR3JlYXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHEwdTYWxmb3JkMRowGAYDVQQKExFDT01PRE8gQ0EgTGltaXRlZDE4MDYGA1UEAxMvQ09NT0RPIFJTQSBFeHRlbmRlZCBWYWxpZGF0aW9uIFNlY3VyZSBTZXJ2ZXIgQ0E= 31 | TasC8Zd8BT8kXEE67cFQmA== 32 | MGUxCzAJBgNVBAYTAlVTMRUwEwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5jb20xJDAiBgNVBAMTG0RpZ2lDZXJ0IEFzc3VyZWQgSUQgUm9vdCBHMg== 33 | BiVxMmicX7C3qkZW3rCNuQ== 34 | A8Ir2pM+N2ukJUbcpQ9bGA== 35 | Cx0MvOeigXRY2Z4Uxr0Ypw== 36 | BC05mGqcJqxN4m1fywzqdQ== 37 | MHMxCzAJBgNVBAYTAklUMRwwGgYDVQQKExNUcnVzdCBJdGFsaWEgUy5wLkEuMR8wHQYDVQQLExZTeW1hbnRlYyBUcnVzdCBOZXR3b3JrMSUwIwYDVQQDExxUcnVzdCBJdGFsaWEgQ2xhc3MgMiBDQSAtIEcz 38 | WuRlPlB5dcPHk+Ni2m98uQ== 39 | MIGxMQswCQYDVQQGEwJUUjEPMA0GA1UEBwwGQW5rYXJhMU0wSwYDVQQKDERUw5xSS1RSVVNUIEJpbGdpIMSwbGV0acWfaW0gdmUgQmlsacWfaW0gR8O8dmVubGnEn2kgSGl6bWV0bGVyaSBBLsWeLjFCMEAGA1UEAww5VMOcUktUUlVTVCBFbGVrdHJvbmlrIFNlcnRpZmlrYSBIaXptZXQgU2HEn2xhecSxY8Sxc8SxIEg1 40 | AZkNBFXrl1Zg 41 | Aay2vr4aoUeZ 42 | AUMyuCiycPJJ 43 | MGUxCzAJBgNVBAYTAlVTMRUwEwYDVQQKEwx0aGF3dGUsIEluYy4xHTAbBgNVBAsTFERvbWFpbiBWYWxpZGF0ZWQgU1NMMSAwHgYDVQQDExd0aGF3dGUgRFYgU1NMIFNIQTI1NiBDQQ== 44 | dqN9ZZM/PfFCXStajJdbtQ== 45 | MIGCMQswCQYDVQQGEwJERTErMCkGA1UECgwiVC1TeXN0ZW1zIEVudGVycHJpc2UgU2VydmljZXMgR21iSDEfMB0GA1UECwwWVC1TeXN0ZW1zIFRydXN0IENlbnRlcjElMCMGA1UEAwwcVC1UZWxlU2VjIEdsb2JhbFJvb3QgQ2xhc3MgMg== 46 | JxLAYQXlXzf2wpMVxAUkPw== 47 | MFwxCzAJBgNVBAYTAkJFMRUwEwYDVQQLEwxUcnVzdGVkIFJvb3QxGTAXBgNVBAoTEEdsb2JhbFNpZ24gbnYtc2ExGzAZBgNVBAMTElRydXN0ZWQgUm9vdCBDQSBHMg== 48 | RsdOCxepZXHEs1ErwPc= 49 | MFoxCzAJBgNVBAYTAkZSMRMwEQYDVQQKEwpDZXJ0aW5vbWlzMRcwFQYDVQQLEw4wMDAyIDQzMzk5ODkwMzEdMBsGA1UEAxMUQ2VydGlub21pcyAtIFJvb3QgQ0E= 50 | Z7mwlz4NA2s+8dnwRzT/RvK9ZZQ= 51 | Wu0lOm5kylP5uOu6md4xmWC3AtQ= 52 | ME4xCzAJBgNVBAYTAk5PMR0wGwYDVQQKDBRCdXlwYXNzIEFTLTk4MzE2MzMyNzEgMB4GA1UEAwwXQnV5cGFzcyBDbGFzcyAzIFJvb3QgQ0E= 53 | Gg== 54 | MIGYMQswCQYDVQQGEwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjE5MDcGA1UECxMwKGMpIDIwMDcgR2VvVHJ1c3QgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MTYwNAYDVQQDEy1HZW9UcnVzdCBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IC0gRzI= 55 | D4IrRtQ6ByW09Im/6D1i3Q== 56 | MIGWMQswCQYDVQQGEwJHQjEbMBkGA1UECBMSR3JlYXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHEwdTYWxmb3JkMRowGAYDVQQKExFDT01PRE8gQ0EgTGltaXRlZDE8MDoGA1UEAxMzQ09NT0RPIFJTQSBPcmdhbml6YXRpb24gVmFsaWRhdGlvbiBTZWN1cmUgU2VydmVyIENB 57 | AMN6iHtOgy68QBu3kXiaFc8= 58 | MEIxCzAJBgNVBAYTAlVTMRYwFAYDVQQKEw1HZW9UcnVzdCBJbmMuMRswGQYDVQQDExJSYXBpZFNTTCBTSEEyNTYgQ0E= 59 | L41amoCH4B2agSUpD8Wd2A== 60 | MFwxCzAJBgNVBAYTAk5MMR4wHAYDVQQKDBVTdGFhdCBkZXIgTmVkZXJsYW5kZW4xLTArBgNVBAMMJFN0YWF0IGRlciBOZWRlcmxhbmRlbiBCdXJnZXIgQ0EgLSBHMw== 61 | cguCxFXg1BY= 62 | MHMxCzAJBgNVBAYTAk1LMRcwFQYDVQQKEw5LSUJTIEFEIFNrb3BqZTEfMB0GA1UECxMWU3ltYW50ZWMgVHJ1c3QgTmV0d29yazEqMCgGA1UEAxMhS2lic1RydXN0IENlcnRpZmljYXRpb24gQXV0aG9yaXR5 63 | XtiXZFzoWEohcuXO3Qakfg== 64 | MIGUMQswCQYDVQQGEwJVUzEdMBsGA1UEChMUU3ltYW50ZWMgQ29ycG9yYXRpb24xHzAdBgNVBAsTFlN5bWFudGVjIFRydXN0IE5ldHdvcmsxRTBDBgNVBAMTPFN5bWFudGVjIENsYXNzIDIgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgLSBHNg== 65 | ZGOlNeh1j5diJSijGSk5yw== 66 | MFExCzAJBgNVBAYTAkVTMUIwQAYDVQQDDDlBdXRvcmlkYWQgZGUgQ2VydGlmaWNhY2lvbiBGaXJtYXByb2Zlc2lvbmFsIENJRiBBNjI2MzQwNjg= 67 | QwCyx4wTlCQ= 68 | MGExCzAJBgNVBAYTAkpQMSUwIwYDVQQKExxTRUNPTSBUcnVzdCBTeXN0ZW1zIENPLixMVEQuMSswKQYDVQQDEyJTZWN1cml0eSBDb21tdW5pY2F0aW9uIEVDQyBSb290Q0Ex 69 | AQAAOct/vFej 70 | MDQxCzAJBgNVBAYTAkZSMRIwEAYDVQQKDAlEaGlteW90aXMxETAPBgNVBAMMCENlcnRpZ25h 71 | Hg== 72 | Gw== 73 | HQ== 74 | Hw== 75 | MDsxGDAWBgNVBAoTD0N5YmVydHJ1c3QsIEluYzEfMB0GA1UEAxMWQ3liZXJ0cnVzdCBHbG9iYWwgUm9vdA== 76 | BAAAAAABSOXEhcs= 77 | MEoxCzAJBgNVBAYTAlVTMRYwFAYDVQQKEw1MZXQncyBFbmNyeXB0MSMwIQYDVQQDExpMZXQncyBFbmNyeXB0IEF1dGhvcml0eSBYMw== 78 | AxPlMqxkByCn3XNuYMhYNMcp 79 | AwBGo0Zmp6KRryAguuMvXATI 80 | AxPlMqxkByCn3XNuYMhYNMcp 81 | AxW0+uDsfyCSfhECdsGGpVD8 82 | Ax6Jm7ajV49tqHgf9nYnzRCI 83 | AyYMguSo1my449OZq51C3s3Z 84 | AyjNQ4dnGD3FD6WL5gYrYru7 85 | AzL4tLuklekJ8lSh6VnRMSrk 86 | A0BOaf9UbJxzqBudSyes/cEM 87 | A1V4dX0tTb1rdTZxdWcuZ7YR 88 | A3TWA5Aylxw0x8bVvrmUSNJd 89 | A3UNTBOHUkbq+k999nJeSJdF 90 | A3WVy2V+2VFkWtMvA6HFwnhq 91 | A3ZQibPGSZ8nPVbuccaCvUfa 92 | A5oET6WBWx72ColKf0txoWyR 93 | A7GX+szdK8/7Kf0xUuarfyIN 94 | A7RCxMe1S9Hb7ENzRxl0mxGP 95 | A7T0V6o47rgCKl3oUb7jF2Ph 96 | A7uy+rmTav6tDH4dRrsnvXGH 97 | A8LV4zckxcwdttbQSk0EPnoA 98 | A8aDg1/IA4O8gjMPZHVqPI+w 99 | A8wZnhfuY6VIV1SwGsTGNR7L 100 | A9BRwOwbXRRhCe+kcmglgW3z 101 | A+RCQYwhofmXM+/hxdyoUzkI 102 | A+ly3y1rVP59k/MKfcE3DoEq 103 | A/7DHCczBnP5qUVh0jF2pvwB 104 | A/99bZCzSpexYL5y6dSryDn3 105 | BDV89QWZE9MJYlCpFQUv5Y2W 106 | BHT6CK6B569m/dd5dEluBOEd 107 | BJDHnthjoDRutxFRJPFnixbU 108 | BKobzjrOxa/6kCR0ImKoqaQW 109 | BKrxi2/1iFxHEFzyZvegxq5C 110 | BLlQHJ611eOZuedFrFgVAfAs 111 | BOIIipysxAz5xHIMmFRvYchY 112 | BONHqLIx/ibQE08IQIyoGaXg 113 | BOPwjyn5eqfeoxs7Z0y3vqNN 114 | BOc11keA9WJ9R20XQY8hO7yi 115 | BOncXh7IZp1SNydhtUdyh2O2 116 | BPVqx4UbKVAbJSFTKwrcFryU 117 | MGExCzAJBgNVBAYTAlVTMRUwEwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5jb20xIDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IENB 118 | CDvgVpBCRrGhdWrJWZHHSg== 119 | MHQxCzAJBgNVBAYTAkdCMScwJQYDVQQKEx5Ccml0aXNoIFRlbGVjb21tdW5pY2F0aW9ucyBwbGMxHzAdBgNVBAsTFlN5bWFudGVjIFRydXN0IE5ldHdvcmsxGzAZBgNVBAMTEkJUIENsYXNzIDIgQ0EgLSBHMw== 120 | SAOpMdpJPNmCofX5swZ5oQ== 121 | MGkxCzAJBgNVBAYTAk5MMR4wHAYDVQQKDBVTdGFhdCBkZXIgTmVkZXJsYW5kZW4xOjA4BgNVBAMMMVN0YWF0IGRlciBOZWRlcmxhbmRlbiBPcmdhbmlzYXRpZSBQZXJzb29uIENBIC0gRzM= 122 | MpyrUqgKLj8= 123 | MHAxCzAJBgNVBAYTAk5MMRcwFQYDVQRhDA5OVFJOTC0zMDIzNzQ1OTEgMB4GA1UECgwXUXVvVmFkaXMgVHJ1c3RsaW5rIEIuVi4xJjAkBgNVBAMMHVF1b1ZhZGlzIFF1YWxpZmllZCBXZWIgSUNBIEcx 124 | DKKQ8bcYDwn4Pows6KhzW6JxMYw= 125 | MEUxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1pdGVkMRswGQYDVQQDExJRdW9WYWRpcyBSb290IENBIDI= 126 | duIdILCWeCm7g2bzVNjyc3z3hUM= 127 | TRRJlNTx8n7cQvNehM76B+mI/gI= 128 | MDsxCzAJBgNVBAYTAkVTMREwDwYDVQQKDAhGTk1ULVJDTTEZMBcGA1UECwwQQUMgUkFJWiBGTk1ULVJDTQ== 129 | RV864VwhzbpUT4KqR1Hr2w== 130 | YcLU1PaprndVkma5ja/WIQ== 131 | MDMxCzAJBgNVBAYTAlBUMQ0wCwYDVQQKDARTQ0VFMRUwEwYDVQQDDAxFQ1JhaXpFc3RhZG8= 132 | ObszBuNYqt9If26rE5MLnA== 133 | a0zzyZD4OEdRpzTBCGWFnQ== 134 | MHMxCzAJBgNVBAYTAlVTMQswCQYDVQQIEwJDQTEUMBIGA1UEBxMLU2FudGEgQ2xhcmExGjAYBgNVBAoTEUludGVsIENvcnBvcmF0aW9uMSUwIwYDVQQDExxJbnRlbCBFeHRlcm5hbCBJc3N1aW5nIENBIDZC 135 | HwAABsvzDP+DIzUG6QAAAAAGyw== 136 | MIGLMQswCQYDVQQGEwJVUzETMBEGA1UECBMKV2FzaGluZ3RvbjEQMA4GA1UEBxMHUmVkbW9uZDEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMRUwEwYDVQQLEwxNaWNyb3NvZnQgSVQxHjAcBgNVBAMTFU1pY3Jvc29mdCBJVCBTU0wgU0hBMg== 137 | WgAFElcDxFjoswSzjAABAAUSVw== 138 | WgAFElbyxxPA8BdM4gABAAUSVg== 139 | MHIxCzAJBgNVBAYTAlVTMQswCQYDVQQIEwJUWDEQMA4GA1UEBxMHSG91c3RvbjEVMBMGA1UEChMMY1BhbmVsLCBJbmMuMS0wKwYDVQQDEyRjUGFuZWwsIEluYy4gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHk= 140 | NlLRZJFLco/An3cLAGjGgQ== 141 | AJk3QFH13eHUHHVnsvwS0Vo= 142 | MH8xCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1pdGVkMSUwIwYDVQQLExxSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MS4wLAYDVQQDEyVRdW9WYWRpcyBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5 143 | IBhwLg4/l5vcFjA30Du20n1cSyI= 144 | MF0xCzAJBgNVBAYTAkpQMSUwIwYDVQQKExxTRUNPTSBUcnVzdCBTeXN0ZW1zIENPLixMVEQuMScwJQYDVQQLEx5TZWN1cml0eSBDb21tdW5pY2F0aW9uIFJvb3RDQTI= 145 | IrmxWpuP9VVz 146 | MF4xCzAJBgNVBAYTAlVTMTAwLgYDVQQKEydIeWRyYW50SUQgKEF2YWxhbmNoZSBDbG91ZCBDb3Jwb3JhdGlvbikxHTAbBgNVBAMTFEh5ZHJhbnRJRCBTU0wgSUNBIEcy 147 | ZhcM4uyLfYi04utzLnOP46Z89nI= 148 | ME0xCzAJBgNVBAYTAlVTMRUwEwYDVQQKEwxEaWdpQ2VydCBJbmMxJzAlBgNVBAMTHkRpZ2lDZXJ0IFNIQTIgU2VjdXJlIFNlcnZlciBDQQ== 149 | CR8HWlsGr6Sdlw/mzOv8gA== 150 | MDwxHjAcBgNVBAMMFUF0b3MgVHJ1c3RlZFJvb3QgMjAxMTENMAsGA1UECgwEQXRvczELMAkGA1UEBhMCREU= 151 | b+PF5BvGM1c= 152 | esKlE8WJEhw= 153 | MFwxCzAJBgNVBAYTAlVTMRkwFwYDVQQKDBBWZXJpem9uIEJ1c2luZXNzMREwDwYDVQQLDAhPbW5pUm9vdDEfMB0GA1UEAwwWVmVyaXpvbiBHbG9iYWwgUm9vdCBDQQ== 154 | BkQ= 155 | MHAxCzAJBgNVBAYTAlVTMRUwEwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5jb20xLzAtBgNVBAMTJkRpZ2lDZXJ0IFNIQTIgSGlnaCBBc3N1cmFuY2UgU2VydmVyIENB 156 | Cn+uUpLudsH09lYYIPTK5A== 157 | MEIxCzAJBgNVBAYTAlVTMRYwFAYDVQQKEw1HZW9UcnVzdCBJbmMuMRswGQYDVQQDExJHZW9UcnVzdCBHbG9iYWwgQ0E= 158 | Ajp1 159 | ME4xCzAJBgNVBAYTAk5PMR0wGwYDVQQKDBRCdXlwYXNzIEFTLTk4MzE2MzMyNzEgMB4GA1UEAwwXQnV5cGFzcyBDbGFzcyAyIFJvb3QgQ0E= 160 | Gg== 161 | MF8xCzAJBgNVBAYTAk1LMRcwFQYDVQQKEw5LSUJTIEFEIFNrb3BqZTEfMB0GA1UECxMWVmVyaVNpZ24gVHJ1c3QgTmV0d29yazEWMBQGA1UEAxMNS0lCUyBWZXJiYSBDQQ== 162 | fafwGJHMSPO0K7nm6dSxiA== 163 | GWX2i+qVAVmIAm9D6fgusw== 164 | MGcxCzAJBgNVBAYTAktSMRMwEQYDVQQKEwpLRUNBLCBJbmMuMR8wHQYDVQQLExZTeW1hbnRlYyBUcnVzdCBOZXR3b3JrMSIwIAYDVQQDExlDcm9zc0NlcnQgQ2xhc3MgMiBDQSAtIEcz 165 | H4L5/FnWXnXdXs/8x+BDpw== 166 | MIG0MQswCQYDVQQGEwJVUzEQMA4GA1UECBMHQXJpem9uYTETMBEGA1UEBxMKU2NvdHRzZGFsZTEaMBgGA1UEChMRR29EYWRkeS5jb20sIEluYy4xLTArBgNVBAsTJGh0dHA6Ly9jZXJ0cy5nb2RhZGR5LmNvbS9yZXBvc2l0b3J5LzEzMDEGA1UEAxMqR28gRGFkZHkgU2VjdXJlIENlcnRpZmljYXRlIEF1dGhvcml0eSAtIEcy 167 | AOfHzdPzlvw5 168 | MIHKMQswCQYDVQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZlcmlTaWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWduLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlTaWduIENsYXNzIDEgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgLSBHMw== 169 | APx4jVLURBZ4JDuYgssVtA== 170 | MIGYMQswCQYDVQQGEwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjE5MDcGA1UECxMwKGMpIDIwMDggR2VvVHJ1c3QgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MTYwNAYDVQQDEy1HZW9UcnVzdCBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IC0gRzM= 171 | Bl3KfQ+qIpjDCrBhKUDefQ== 172 | MEkxCzAJBgNVBAYTAkNIMRUwEwYDVQQKEwxTd2lzc1NpZ24gQUcxIzAhBgNVBAMTGlN3aXNzU2lnbiBQbGF0aW51bSBDQSAtIEcy 173 | JD4RO0Oolos= 174 | AJHE7Dx8fWBV 175 | MEUxCzAJBgNVBAYTAkNIMRUwEwYDVQQKEwxTd2lzc1NpZ24gQUcxHzAdBgNVBAMTFlN3aXNzU2lnbiBHb2xkIENBIC0gRzI= 176 | ALNpo1yEOMIuR5TLwIEiPg== 177 | MsKCw6ASAH4= 178 | APeSt8SBjARY 179 | MH4xCzAJBgNVBAYTAlVTMR0wGwYDVQQKExRTeW1hbnRlYyBDb3Jwb3JhdGlvbjEfMB0GA1UECxMWU3ltYW50ZWMgVHJ1c3QgTmV0d29yazEvMC0GA1UEAxMmU3ltYW50ZWMgQ2xhc3MgMyBTZWN1cmUgU2VydmVyIENBIC0gRzQ= 180 | UDE/uwr4z5V8eZI4+1gkAw== 181 | MIGFMQswCQYDVQQGEwJQTDEiMCAGA1UEChMZVW5pemV0byBUZWNobm9sb2dpZXMgUy5BLjEnMCUGA1UECxMeQ2VydHVtIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MSkwJwYDVQQDEyBDZXJ0dW0gRG9tYWluIFZhbGlkYXRpb24gQ0EgU0hBMg== 182 | VEav0UR+l38TpKTRi7sS1g== 183 | -------------------------------------------------------------------------------- /tests/firefox_app_test.py: -------------------------------------------------------------------------------- 1 | # This Source Code Form is subject to the terms of the Mozilla Public 2 | # License, v. 2.0. If a copy of the MPL was not distributed with this file, 3 | # You can obtain one at http://mozilla.org/MPL/2.0/. 4 | 5 | import os 6 | import pytest 7 | import subprocess 8 | 9 | import tlscanary.tools.firefox_app as fa 10 | 11 | 12 | def __check_app(app): 13 | assert type(app) is fa.FirefoxApp, "App has right type" 14 | assert os.path.isdir(app.app_dir), "App dir exists" 15 | assert os.path.isfile(app.exe) and os.access(app.exe, os.X_OK), "App binary is executable" 16 | 17 | 18 | def __run_app(app): 19 | cmd = [app.exe, '-xpcshell', "-g", app.gredir, "-a", app.browser] 20 | p = subprocess.Popen(cmd, stdin=subprocess.PIPE, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL) 21 | p.communicate(b"quit();\n") 22 | ret = p.wait(timeout=1) # Throws TimeoutExpired on timeout 23 | assert ret == 0, "Firefox app runs XPCShell without error" 24 | 25 | 26 | def __check_and_run_app(app): 27 | """Check Firefox App and try to run it""" 28 | __check_app(app) 29 | __run_app(app) 30 | 31 | 32 | def test_nightly_app(nightly_app): 33 | """Test Firefox Nightly app on local platform""" 34 | __check_and_run_app(nightly_app) 35 | 36 | 37 | @pytest.mark.slow 38 | def test_beta_app(beta_app): 39 | """Test Firefox Beta app on local platform""" 40 | __check_and_run_app(beta_app) 41 | 42 | 43 | @pytest.mark.slow 44 | def test_release_app(release_app): 45 | """Test Firefox Release app on local platform""" 46 | __check_and_run_app(release_app) 47 | 48 | 49 | @pytest.mark.slow 50 | def test_esr_app(esr_app): 51 | """Test Firefox ESR app on local platform""" 52 | __check_and_run_app(esr_app) 53 | 54 | 55 | @pytest.mark.slow 56 | def test_nightly_win_app(nightly_win_app): 57 | """Test Firefox Nightly app for Windows""" 58 | __check_app(nightly_win_app) 59 | 60 | 61 | @pytest.mark.slow 62 | def test_nightly_osx_app(nightly_osx_app): 63 | """Test Firefox Nightly app for Mac OS X""" 64 | __check_app(nightly_osx_app) 65 | 66 | 67 | @pytest.mark.slow 68 | def test_nightly_linux_app(nightly_linux_app): 69 | """Test Firefox Nightly app for Mac Linux""" 70 | __check_app(nightly_linux_app) 71 | 72 | 73 | @pytest.mark.slow 74 | def test_beta_win_app(beta_win_app): 75 | """Test Firefox Beta app for Windows""" 76 | __check_app(beta_win_app) 77 | 78 | 79 | @pytest.mark.slow 80 | def test_beta_osx_app(beta_osx_app): 81 | """Test Firefox Beta app for Mac OS X""" 82 | __check_app(beta_osx_app) 83 | 84 | 85 | @pytest.mark.slow 86 | def test_beta_linux_app(beta_linux_app): 87 | """Test Firefox Beta app for Mac Linux""" 88 | __check_app(beta_linux_app) 89 | 90 | 91 | @pytest.mark.slow 92 | def test_release_win_app(release_win_app): 93 | """Test Firefox Release app for Windows""" 94 | __check_app(release_win_app) 95 | 96 | 97 | @pytest.mark.slow 98 | def test_release_osx_app(release_osx_app): 99 | """Test Firefox Release app for Mac OS X""" 100 | __check_app(release_osx_app) 101 | 102 | 103 | @pytest.mark.slow 104 | def test_release_linux_app(release_linux_app): 105 | """Test Firefox Release app for Mac Linux""" 106 | __check_app(release_linux_app) 107 | 108 | 109 | @pytest.mark.slow 110 | def test_esr_win_app(esr_win_app): 111 | """Test Firefox ESR app for Windows""" 112 | __check_app(esr_win_app) 113 | 114 | 115 | @pytest.mark.slow 116 | def test_esr_osx_app(esr_osx_app): 117 | """Test Firefox ESR app for Mac OS X""" 118 | __check_app(esr_osx_app) 119 | 120 | 121 | @pytest.mark.slow 122 | def test_esr_linux_app(esr_linux_app): 123 | """Test Firefox ESR app for Mac Linux""" 124 | __check_app(esr_linux_app) 125 | -------------------------------------------------------------------------------- /tests/firefox_downloader_test.py: -------------------------------------------------------------------------------- 1 | # This Source Code Form is subject to the terms of the Mozilla Public 2 | # License, v. 2.0. If a copy of the MPL was not distributed with this file, 3 | # You can obtain one at http://mozilla.org/MPL/2.0/. 4 | 5 | import os 6 | import pytest 7 | from time import sleep 8 | import unittest.mock as mock 9 | 10 | import tlscanary.tools.firefox_downloader as fd 11 | 12 | 13 | def test_firefox_downloader_instance(tmpdir): 14 | """FirefoxDownloader instances sanity check""" 15 | 16 | fdl = fd.FirefoxDownloader(tmpdir) 17 | 18 | build_list, platform_list, test_default, base_default = fdl.list() 19 | assert "nightly" in build_list and "release" in build_list, "build list looks sane" 20 | assert "linux" in platform_list and "osx" in platform_list, "platform list looks sane" 21 | assert test_default in build_list and base_default in build_list, "defaults are valid builds" 22 | 23 | 24 | def test_firefox_downloader_exceptions(tmpdir): 25 | """Test handling of invalid parameters""" 26 | 27 | fdl = fd.FirefoxDownloader(tmpdir) 28 | build_list, platform_list, test_default, base_default = fdl.list() 29 | 30 | assert "foobar" not in build_list and "foobar" not in platform_list 31 | 32 | with pytest.raises(Exception): 33 | fdl.download("foobar", platform_list[0]) 34 | 35 | with pytest.raises(Exception): 36 | fdl.download(test_default, "foobar") 37 | 38 | 39 | @mock.patch('urllib.request.urlopen') 40 | @mock.patch('sys.stdout') # to silence progress bar 41 | def test_firefox_downloader_downloading(mock_stdout, mock_urlopen, tmpdir): 42 | """Test the download function""" 43 | del mock_stdout 44 | 45 | # This test is checking caching behavior, hence: 46 | # Using a test-specific test directory to not wipe regular cache. 47 | test_tmp_dir = str(tmpdir.join("download_test")) 48 | 49 | fdl = fd.FirefoxDownloader(test_tmp_dir, cache_timeout=1) 50 | 51 | mock_req = mock.Mock() 52 | mock_read = mock.Mock(side_effect=(b"foo", b"bar", None)) 53 | mock_info = mock.Mock() 54 | mock_get = mock.Mock(return_value="6") 55 | mock_info.return_value = mock.Mock(get=mock_get) 56 | mock_req.info = mock_info 57 | mock_req.read = mock_read 58 | mock_urlopen.return_value = mock_req 59 | 60 | output_file_name = fdl.download("nightly", "linux", use_cache=True) 61 | assert mock_get.call_args_list == [(("Content-Length",),)],\ 62 | "only checks content length (assumed by test mock)" 63 | expected_url = """https://download.mozilla.org/?product=firefox-nightly-latest&os=linux64&lang=en-US""" 64 | assert mock_urlopen.call_args_list == [((expected_url,),)], "downloads the expected URL" 65 | assert len(mock_read.call_args_list) == 3, "properly calls read()" 66 | assert output_file_name.endswith("firefox-nightly_linux.tar.bz2"), "uses expected file name" 67 | assert output_file_name.startswith(test_tmp_dir), "writes file to expected directory" 68 | assert os.path.isfile(output_file_name), "creates proper file" 69 | with open(output_file_name, "rb") as f: 70 | content = f.read() 71 | assert content == b"foobar", "downloads expected content" 72 | 73 | # Test caching by re-downloading 74 | mock_read.reset_mock() 75 | mock_read.side_effect = (b"foo", b"bar", None) 76 | second_output_file_name = fdl.download("nightly", "linux", use_cache=True) 77 | assert not mock_read.called, "does not re-download" 78 | assert output_file_name == second_output_file_name, "uses cached file" 79 | 80 | # Test purging on obsolete cache. Cache is purged on fdl init. 81 | sleep(1.1) 82 | mock_read.reset_mock() 83 | mock_read.side_effect = (b"foo", b"bar", None) 84 | fdl = fd.FirefoxDownloader(test_tmp_dir, cache_timeout=1) 85 | fdl.download("nightly", "linux", use_cache=True) 86 | assert mock_read.called, "re-downloads when cache is stale" 87 | 88 | # Test caching when file changes upstream (checks file size). 89 | mock_get.reset_mock() 90 | mock_get.return_value = "7" 91 | mock_read.reset_mock() 92 | mock_read.side_effect = (b"foo", b"barr", None) 93 | fdl.download("nightly", "linux", use_cache=True) 94 | assert mock_read.called, "re-downloads when upstream changes" 95 | -------------------------------------------------------------------------------- /tests/firefox_extractor_test.py: -------------------------------------------------------------------------------- 1 | # This Source Code Form is subject to the terms of the Mozilla Public 2 | # License, v. 2.0. If a copy of the MPL was not distributed with this file, 3 | # You can obtain one at http://mozilla.org/MPL/2.0/. 4 | 5 | import os 6 | import pkg_resources as pkgr 7 | import pytest 8 | import subprocess 9 | 10 | 11 | import tlscanary.tools.firefox_extractor as fe 12 | import tlscanary.tools.firefox_app as fa 13 | 14 | 15 | def test_osx_extractor(tmpdir): 16 | """Extractor can extract a OS X Nightly archive""" 17 | 18 | sz_out = subprocess.check_output("7z") 19 | assert sz_out is not None 20 | sz_version = float(sz_out.splitlines()[1].split()[2]) 21 | if sz_version < 16: 22 | pytest.skip("7-zip version 16+ required to extract DMG images") 23 | 24 | test_archive = pkgr.resource_filename(__name__, "files/firefox-nightly_osx-dummy.dmg") 25 | assert os.path.isfile(test_archive) 26 | 27 | app = fe.extract(test_archive, tmpdir) 28 | 29 | assert type(app) is fa.FirefoxApp, "return value is correct" 30 | assert os.path.isdir(app.app_dir), "app dir is extracted" 31 | assert os.path.isfile(app.app_ini), "app ini is extracted" 32 | assert os.path.isdir(app.browser), "browser dir is extracted" 33 | assert os.path.isfile(app.exe), "exe file is extracted" 34 | assert app.exe.startswith(str(tmpdir)), "archive is extracted to specified directory" 35 | assert app.platform == "osx", "platform is detected correctly" 36 | assert app.release == "Nightly", "release branch is detected correctly" 37 | assert app.version == "53.0a1", "version is detected correctly" 38 | 39 | 40 | def test_linux_extractor(tmpdir): 41 | """Extractor can extract a Linux Nightly archive""" 42 | 43 | test_archive = pkgr.resource_filename(__name__, "files/firefox-nightly_linux-dummy.tar.bz2") 44 | assert os.path.isfile(test_archive) 45 | 46 | app = fe.extract(test_archive, tmpdir) 47 | 48 | assert type(app) is fa.FirefoxApp, "return value is correct" 49 | assert os.path.isdir(app.app_dir), "app dir is extracted" 50 | assert os.path.isfile(app.app_ini), "app ini is extracted" 51 | assert os.path.isdir(app.browser), "browser dir is extracted" 52 | assert os.path.isfile(app.exe), "exe file is extracted" 53 | assert app.exe.startswith(str(tmpdir)), "archive is extracted to specified directory" 54 | assert app.platform == "linux", "platform is detected correctly" 55 | assert app.release == "Nightly", "release branch is detected correctly" 56 | assert app.version == "53.0a1", "version is detected correctly" 57 | 58 | 59 | def test_win_extractor(tmpdir): 60 | """Extractor can extract a Windows Nightly archive""" 61 | 62 | test_archive = pkgr.resource_filename(__name__, "files/firefox-nightly_win-dummy.exe") 63 | assert os.path.isfile(test_archive) 64 | 65 | app = fe.extract(test_archive, tmpdir) 66 | 67 | assert type(app) is fa.FirefoxApp, "return value is correct" 68 | assert os.path.isdir(app.app_dir), "app dir is extracted" 69 | assert os.path.isfile(app.app_ini), "app ini is extracted" 70 | assert os.path.isdir(app.browser), "browser dir is extracted" 71 | assert os.path.isfile(app.exe), "exe file is extracted" 72 | assert app.exe.startswith(str(tmpdir)), "archive is extracted to specified directory" 73 | assert app.platform == "win32", "platform is detected correctly" 74 | assert app.release == "Nightly", "release branch is detected correctly" 75 | assert app.version == "55.0a1", "version is detected correctly" 76 | -------------------------------------------------------------------------------- /tests/progress_test.py: -------------------------------------------------------------------------------- 1 | # This Source Code Form is subject to the terms of the Mozilla Public 2 | # License, v. 2.0. If a copy of the MPL was not distributed with this file, 3 | # You can obtain one at http://mozilla.org/MPL/2.0/. 4 | 5 | import time 6 | 7 | import tlscanary.tools.progress as pr 8 | 9 | 10 | def test_progress_logger_instance(): 11 | """ProgressTracker does its thing""" 12 | 13 | progress = pr.ProgressTracker(100, unit="bubbles") 14 | assert type(progress) is pr.ProgressTracker, "ProgressTracker can be instantiated" 15 | 16 | assert len(str(progress)) > 0, "can be turned into string (even when empty)" 17 | 18 | # Make progress 19 | progress.log_completed(1) 20 | time.sleep(0.01) 21 | progress.log_completed(9) 22 | time.sleep(0.01) 23 | progress.log_completed(10) 24 | time.sleep(0.01) 25 | progress.log_completed(30) 26 | time.sleep(0.01) 27 | progress.log_completed(0) 28 | progress.log_overhead(1) 29 | 30 | # Check results 31 | status = str(progress) 32 | assert "50/100" in status, "reports correct total progress" 33 | assert "50%" in status, "reports correct progress percentage" 34 | assert "2.0% overhead" in status, "reports correct overhead percentage" 35 | assert "bubbles" in status, "uses custom item unit for speed" 36 | 37 | # See if starting and stopping the monitor thread works 38 | progress.start_reporting(0.1, 0.1) 39 | thread = progress.logger_thread 40 | assert thread.is_alive(), "monitor thread can be started" 41 | progress.stop_reporting() 42 | time.sleep(1.1) 43 | assert not thread.is_alive(), "monitor thread can be terminated" 44 | -------------------------------------------------------------------------------- /tests/runlog_test.py: -------------------------------------------------------------------------------- 1 | # This Source Code Form is subject to the terms of the Mozilla Public 2 | # License, v. 2.0. If a copy of the MPL was not distributed with this file, 3 | # You can obtain one at http://mozilla.org/MPL/2.0/. 4 | 5 | import datetime 6 | import os 7 | 8 | from tests import ArgsMock 9 | import tlscanary.runlog as rl 10 | 11 | 12 | def test_runlog_db_instance(tmpdir): 13 | """RunLogDB can list log handles""" 14 | 15 | db = rl.RunLogDB(ArgsMock(workdir=tmpdir)) 16 | handle_list = db.list() 17 | dir_list = db.list_logs() 18 | assert type(handle_list) is list, "handle listing is a python list" 19 | assert type(dir_list) is list, "dir listing is a python list" 20 | assert len(handle_list) == 0, "empty db yields empty handle list" 21 | assert len(dir_list) == 0, "empty db yields empty dir list" 22 | 23 | 24 | def test_runlog_db_file_handling(tmpdir): 25 | """RunLogDB associates handles and directories""" 26 | 27 | db = rl.RunLogDB(ArgsMock(workdir=tmpdir)) 28 | 29 | now = datetime.datetime.utcnow().strftime("%Y-%m-%dZ%H-%M-%S") 30 | dir_name = os.path.abspath(db.handle_to_dir_name(now)) 31 | assert dir_name.startswith(str(tmpdir)), "log files are stored in the right location" 32 | handle_name = db.dir_name_to_handle(dir_name) 33 | assert now == handle_name, "converting between handles and file names works" 34 | 35 | 36 | def test_runlog_db_rw(tmpdir): 37 | """RunLogDB can read and write""" 38 | 39 | db = rl.RunLogDB(ArgsMock(workdir=tmpdir)) 40 | 41 | in_db_before = db.list() 42 | now = datetime.datetime.utcnow().strftime("%Y-%m-%dZ%H-%M-%S") 43 | assert now not in in_db_before, "database is not cluttered" 44 | test_data = "hello,test" 45 | db.write(now, "test", test_data) 46 | in_db_after = db.list() 47 | assert now in in_db_after, "written log appears in db listing" 48 | read_test_data = db.read(now, "test") 49 | assert test_data == read_test_data, "read data is same as data" 50 | 51 | 52 | def test_runlog_rw(tmpdir): 53 | """RunLog objects can write and read""" 54 | 55 | db = rl.RunLogDB(ArgsMock(workdir=tmpdir)) 56 | now = datetime.datetime.utcnow().strftime("%Y-%m-%dZ%H-%M-%S") 57 | log = rl.RunLog(now, "w", db) 58 | 59 | # Write something to log using the with statement 60 | with log: 61 | log.log({"foo": 5}) 62 | log.update_meta({"just": "testing"}) 63 | assert log.is_running, "log is running in with" 64 | assert not log.is_running, "log is not running after with" 65 | 66 | # Write something to log. Previous content will be overwritten 67 | log.start(meta={"first_meta": "one"}) 68 | log.log([{"foo": 1}, {"foo": 2}]) 69 | log.log({"foo": 3}) 70 | assert not log.has_finished(), "log is not marked `finished` while logging" 71 | log.stop(meta={"last_meta": "two"}) 72 | assert log.has_finished(), "log is marked `finished` after logging" 73 | 74 | test_log_dir = db.handle_to_dir_name(now) 75 | assert os.path.isdir(test_log_dir), "log directory is created" 76 | 77 | assert len(os.listdir(test_log_dir)) == 2, "log and meta files are written to disk" 78 | 79 | # Read from log 80 | log = rl.RunLog(now, "r", db) 81 | assert log.has_finished(), "completed log is marked as `finished`" 82 | meta = log.get_meta() 83 | # Metadata always has "format_revision", "log_lines" and "run_completed" keys 84 | assert len(list(meta.keys())) == 5, "log has correct number of meta data" 85 | log_lines = [line for line in log] 86 | assert len(log_lines) == 3, "log has correct number of lines" 87 | 88 | 89 | def test_runlog_integration(tmpdir): 90 | """RunLog system works as intended""" 91 | 92 | db = rl.RunLogDB(ArgsMock(workdir=tmpdir)) 93 | log_handles_before = db.list() 94 | 95 | start_meta = {"begin": True} 96 | stop_meta = {"end": True} 97 | combined_meta = start_meta 98 | combined_meta.update(stop_meta) 99 | log_lines = [{"foo": 1}, {"foo": 2}, {"foo": 3, "nolog": True}] 100 | log = db.new_log() 101 | log.start(meta=start_meta, log_filter=lambda x: None if "nolog" in x else x) 102 | log.log(log_lines) 103 | log.stop(meta=stop_meta) 104 | 105 | log_handles_after = db.list() 106 | assert len(log_handles_after) == len(log_handles_before) + 1, "new log is listed" 107 | log = db.read_log(log_handles_after[-1]) 108 | read_meta = log.get_meta() 109 | assert "log_lines" in read_meta and read_meta["log_lines"] == 2, "has right number of lines in metadata" 110 | has_meta = True 111 | for k in combined_meta: 112 | if k not in read_meta or read_meta[k] != combined_meta[k]: 113 | has_meta = False 114 | break 115 | assert has_meta, "log metadata is correct" 116 | read_lines = [line for line in log] 117 | assert len(read_lines) == 2, "log has correct number of lines" 118 | assert read_lines == log_lines[:2], "log lines have correct content" 119 | -------------------------------------------------------------------------------- /tests/sources_db_test.py: -------------------------------------------------------------------------------- 1 | # This Source Code Form is subject to the terms of the Mozilla Public 2 | # License, v. 2.0. If a copy of the MPL was not distributed with this file, 3 | # You can obtain one at http://mozilla.org/MPL/2.0/. 4 | 5 | import os 6 | import pytest 7 | 8 | import tlscanary.sources_db as sdb 9 | from tests import ArgsMock 10 | 11 | 12 | @pytest.fixture 13 | def sources_db(tmpdir): 14 | """A SourcesDB fixture""" 15 | return sdb.SourcesDB(ArgsMock(workdir=tmpdir)) 16 | 17 | 18 | def test_sources_db_instance(sources_db): 19 | """SourcesDB can list database handles""" 20 | 21 | handle_list = sources_db.list() 22 | assert type(handle_list) is list, "handle listing is an actual list" 23 | assert len(handle_list) > 0, "handle listing is not empty" 24 | assert sources_db.default in handle_list, "default handle appears in listing" 25 | assert "list" not in handle_list, "`list` must not be an existing handle" 26 | assert "debug" in handle_list, "`debug` handle is required for testing" 27 | 28 | 29 | def test_sources_db_read(sources_db): 30 | """SourcesDB can read databases""" 31 | 32 | src = sources_db.read("debug") 33 | assert type(src) is sdb.Sources, "reading yields a Sources object" 34 | assert len(src) == len(src.rows), "length seems to be correct" 35 | assert "hostname" in list(src[0].keys()), "`hostname` is amongst keys" 36 | assert "rank" in list(src[0].keys()), "`rank` is amongst keys" 37 | rows = [row for row in src] 38 | assert len(rows) == len(src), "yields expected number of iterable rows" 39 | 40 | 41 | def test_sources_db_write_and_override(tmpdir): 42 | """SourcesDB databases can be written and overridden""" 43 | 44 | db = sdb.SourcesDB(ArgsMock(workdir=tmpdir)) 45 | old = db.read("debug") 46 | old_default = db.default 47 | override = sdb.Sources("debug", True) 48 | row_one = {"foo": "bar", "baz": "bang", "boom": "bang"} 49 | row_two = {"foo": "bar2", "baz": "bang2", "boom": "bang2"} 50 | override.append(row_one) 51 | override.append(row_two) 52 | db.write(override) 53 | 54 | # New SourcesDB instance required to detect overrides 55 | db = sdb.SourcesDB(ArgsMock(workdir=tmpdir)) 56 | assert os.path.exists(tmpdir.join("sources", "debug.csv")), "override file is written" 57 | assert db.default == "debug", "overriding the default works" 58 | assert old_default != db.default, "overridden default actually changes" 59 | new = db.read("debug") 60 | assert len(new) == 2, "number of overridden rows is correct" 61 | assert new[0] == row_one and new[1] == row_two, "new rows are written as expected" 62 | assert old[0] != new[0], "overridden rows actually change" 63 | 64 | 65 | def test_sources_set_interface(): 66 | """Sources object can be created from and yield sets""" 67 | 68 | # Sets are assumed to contain (rank, hostname) pairs 69 | src_set = {(1, "mozilla.org"), (2, "mozilla.com"), (3, "addons.mozilla.org")} 70 | src = sdb.Sources("foo") 71 | src.from_set(src_set) 72 | assert len(src) == 3, "database from set has correct length" 73 | assert src_set == src.as_set(), "yielded set is identical to the original" 74 | assert len(src.as_set(1, 2)) == 1, "yielded subset has expected length" 75 | 76 | 77 | def test_sources_sorting(): 78 | """Sources object can sort its rows by rank""" 79 | 80 | src_set = {(1, "mozilla.org"), (2, "mozilla.com"), (3, "addons.mozilla.org")} 81 | src = sdb.Sources("foo") 82 | src.from_set(src_set) 83 | # Definitely "unsort" 84 | if int(src.rows[0]["rank"]) < int(src.rows[1]["rank"]): 85 | src.rows[0], src.rows[1] = src.rows[1], src.rows[0] 86 | assert not int(src.rows[0]["rank"]) < int(src.rows[1]["rank"]) < int(src.rows[2]["rank"]), "list is scrambled" 87 | src.sort() 88 | assert int(src.rows[0]["rank"]) < int(src.rows[1]["rank"]) < int(src.rows[2]["rank"]), "sorting works" 89 | 90 | 91 | def test_sources_chunking(): 92 | """Sources object can be iterated in chunks""" 93 | 94 | src_set = {(1, "mozilla.org"), (2, "mozilla.com"), (3, "addons.mozilla.org"), 95 | (4, "irc.mozilla.org"), (5, "firefox.com")} 96 | assert len(src_set) == 5, "hardcoded test set has expected length" 97 | src = sdb.Sources("foo") 98 | src.from_set(src_set) 99 | next_chunk = src.iter_chunks(chunk_start=1, chunk_stop=20, chunk_size=2, min_chunk_size=100) 100 | assert src.chunk_size == 100, "chunking respects minimum size setting" 101 | assert src.chunk_start == 1, "chunking respects chunk start setting" 102 | chunk = next_chunk(20) 103 | assert len(chunk) == 4, "chunks are not larger than remaining data" 104 | 105 | read_set = set() 106 | next_chunk = src.iter_chunks(chunk_size=2) 107 | lengths = list() 108 | for _ in range(10): 109 | chunk = next_chunk(as_set=True) 110 | if chunk is None: 111 | break 112 | lengths.append(len(chunk)) 113 | read_set.update(chunk) 114 | assert lengths == [2, 2, 1], "chunks have expected lengths" 115 | assert src_set == read_set, "chunks cover full set" 116 | 117 | next_chunk = src.iter_chunks(chunk_size=10) 118 | lengths = list() 119 | lengths.append(len(next_chunk(1))) 120 | lengths.append(len(next_chunk(2))) 121 | lengths.append(len(next_chunk(3))) 122 | assert next_chunk() is None, "after last chunk comes None" 123 | assert next_chunk() is None, "after last chunk comes None again" 124 | assert lengths == [1, 2, 2], "chunks size can be varied on-the-fly" 125 | -------------------------------------------------------------------------------- /tests/tags_db_test.py: -------------------------------------------------------------------------------- 1 | # This Source Code Form is subject to the terms of the Mozilla Public 2 | # License, v. 2.0. If a copy of the MPL was not distributed with this file, 3 | # You can obtain one at http://mozilla.org/MPL/2.0/. 4 | 5 | import pytest 6 | 7 | import tlscanary.tools.tags_db as tdb 8 | from tests import ArgsMock 9 | 10 | 11 | @pytest.fixture 12 | def tags_db(tmpdir): 13 | """A TagsDB fixture""" 14 | return tdb.TagsDB(ArgsMock(workdir=tmpdir)) 15 | 16 | 17 | def test_tags_db_instance(tags_db): 18 | """TagsDB can list database handles""" 19 | 20 | assert type(tags_db) is tdb.TagsDB, "TagsDB object fixture has correct type" 21 | assert len(tags_db.list()) == 0, "Empty TagsDB has no tags" 22 | assert "notatag" not in tags_db, "TagsDB does not pretend to contain non-existent tag" 23 | 24 | # Test the dict-style interface 25 | assert type(tags_db["notatag"]) is set and len(tags_db["notatag"]) == 0, "non-existent tag yields empty set" 26 | 27 | assert "newhandleA" not in tags_db["newtag"], "unknown handle is not associated with unknown tag" 28 | tags_db["newtag"] = "newhandleA" 29 | assert "newtag" in tags_db, "new tag is added" 30 | assert "newhandleA" in tags_db["newtag"], "new handle is associated with new tag" 31 | assert "newhandleB" not in tags_db["newtag"], "unknown handle is not associated with new tag" 32 | tags_db["newtag"] = "newhandleB" 33 | assert "newhandleA" in tags_db["newtag"], "old handle is still associated with new tag" 34 | assert "newhandleB" in tags_db["newtag"], "second handle is associated with existing tag" 35 | 36 | tags_db.remove("newtag", "newhandleB") 37 | assert "newhandleB" not in tags_db["newtag"], "handle can be disassociated from tag" 38 | assert "newhandleA" in tags_db["newtag"], "other handles are not affected by disassociation" 39 | tags_db.remove("newtag", "newhandleA") 40 | assert "newhandleA" not in tags_db["newtag"], "first handle can be disassociated from tag as well" 41 | assert "newtag" not in tags_db, "tag that lost all handles is forgotten" 42 | 43 | tags_db["droptag"] = "drophandleA" 44 | tags_db["droptag"] = "drophandleB" 45 | tags_db.drop("droptag") 46 | assert "droptag" not in tags_db, "tags can be dropped entirely" 47 | assert "drophandleA" not in tags_db["droptag"] and "drophandleB" not in tags_db["droptag"], \ 48 | "associated handles are dropped along tags" 49 | 50 | 51 | def test_tags_db_persistence(tmpdir): 52 | """TagsDB databases are persistent on disk""" 53 | 54 | db = tdb.TagsDB(ArgsMock(workdir=tmpdir)) 55 | db["newnewtag"] = "newnewhandleA" # Any modification should save the DB to disk 56 | db["newnewtag"] = "newnewhandleB" 57 | db["newnewtag"] = "newnewhandleC" 58 | db.remove("newnewtag", "newnewhandleC") 59 | 60 | del db 61 | assert tmpdir.join("tags.json").exists(), "TaskDB is written to disk" 62 | 63 | db = tdb.TagsDB(ArgsMock(workdir=tmpdir)) 64 | assert "newnewtag" in db, "a TaskDB does not forget about tags" 65 | assert "newnewhandleA" in db["newnewtag"] and "newnewhandleB" in db["newnewtag"], "a TaskDB does not forget handles" 66 | assert "newnewhandleC" not in db["newnewtag"], "a TaskDB does not dream about deleted handles" 67 | 68 | 69 | def test_tags_db_dangling(tmpdir): 70 | """TagDB discards of dangling references""" 71 | 72 | db = tdb.TagsDB(ArgsMock(workdir=tmpdir)) 73 | db["tag"] = "handleA" 74 | db["tag"] = "handleB" 75 | db["tag"] = "handleC" 76 | db["newtag"] = "handleB" 77 | db["newtag"] = "handleC" 78 | db["newtag"] = "handleD" 79 | 80 | # Assume B and C were deleted elsewhere, so all their references should be dropped 81 | db.remove_dangling(["handleA", "handleD"]) 82 | assert "handleA" in db["tag"] and "handleD" in db["newtag"], "handles rememberd after un-dangling" 83 | assert "handleB" not in db["tag"] and "handleB" not in db["newtag"], "handle B forgotten after un-dangling" 84 | assert "handleC" not in db["tag"] and "handleC" not in db["newtag"], "handle C forgotten after un-dangling" 85 | -------------------------------------------------------------------------------- /tests/xpcshell_worker_test.py: -------------------------------------------------------------------------------- 1 | # This Source Code Form is subject to the terms of the Mozilla Public 2 | # License, v. 2.0. If a copy of the MPL was not distributed with this file, 3 | # You can obtain one at http://mozilla.org/MPL/2.0/. 4 | 5 | import pytest 6 | from time import sleep 7 | 8 | import tlscanary.tools.xpcshell_worker as xw 9 | 10 | 11 | def test_xpcshell_worker(nightly_app): 12 | """XPCShell worker runs and is responsive""" 13 | 14 | # Skip test if there is no app for this platform 15 | if nightly_app is None: 16 | pytest.skip("XPCShell worker can not be tested on this platform") 17 | 18 | # Spawn a worker. 19 | w = xw.XPCShellWorker(nightly_app) 20 | w.spawn() 21 | assert w.is_running(), "XPCShell worker is starting" 22 | 23 | # Send commands 24 | w.send(xw.Command("info", id=1)) 25 | w.send(xw.Command("quit", id=2)) 26 | 27 | # We need to wait until the reader thread is guaranteed to have run. 28 | sleep(1) 29 | 30 | # Unfetched results should stay queued even after the worker terminated. 31 | w.terminate() 32 | 33 | # Get the results 34 | responses = w.receive() 35 | 36 | assert len(responses) == 2, "XPCShell worker delivers expected number of responses" 37 | assert type(responses[0]) is xw.Response, "XPCShell worker delivers valid 1st response" 38 | assert type(responses[1]) is xw.Response, "XPCShell worker delivers valid 2nd response" 39 | 40 | info_response, quit_response = responses 41 | 42 | assert info_response.id == 1, "Info response has expected ID" 43 | assert info_response.success, "Info command was successful" 44 | assert "appConstants" in info_response.result, "Info response contains `appConstants`" 45 | assert "nssInfo" in info_response.result, "Info response contains `nssInfo`" 46 | assert info_response.result["appConstants"]["MOZ_UPDATE_CHANNEL"] == "nightly", "Info response has expected value" 47 | 48 | assert quit_response.id == 2, "Quit response has expected ID" 49 | assert info_response.success, "Quit command was successful" 50 | -------------------------------------------------------------------------------- /tests/zz_tlscanary_integration_test.py: -------------------------------------------------------------------------------- 1 | # This Source Code Form is subject to the terms of the Mozilla Public 2 | # License, v. 2.0. If a copy of the MPL was not distributed with this file, 3 | # You can obtain one at http://mozilla.org/MPL/2.0/. 4 | 5 | import glob 6 | import io 7 | import json 8 | import os 9 | import pytest 10 | import unittest.mock as mock 11 | 12 | from tlscanary import main 13 | 14 | 15 | @pytest.mark.slow 16 | def test_tlscanary_regression_and_log(tmpdir, nightly_archive): 17 | """TLS Canary can make regression runs""" 18 | 19 | work_dir = tmpdir.join("workdir") 20 | 21 | # Run a quick regression scan, simulating error conditions by -p1 22 | argv = [ 23 | "--workdir", str(work_dir), 24 | "regression", 25 | "-t", nightly_archive, 26 | "-b", nightly_archive, 27 | "-l", "9", 28 | "-p1", "security.tls.version.min;4" 29 | ] 30 | ret = main.main(argv) 31 | assert ret == 0, "regression run finished without error" 32 | 33 | # Check log 34 | argv = [ 35 | "--workdir", str(work_dir), 36 | "log", 37 | "-a", "json", 38 | "-i", "1" 39 | ] 40 | with mock.patch('sys.stdout', new=io.StringIO()) as mock_stdout: 41 | ret = main.main(argv) 42 | stdout = mock_stdout.getvalue() 43 | assert ret == 0, "regression log dump finished without error" 44 | assert len(stdout) > 0, "regression log dump is not empty" 45 | log = json.loads(stdout) 46 | assert type(log) is list, "regression JSON log is list" 47 | assert len(log) == 1, "there is one log in the dump" 48 | assert "meta" in log[0] and "data" in log[0], "log has meta and data" 49 | assert len(log[0]["data"]) > 3, "log has correct number of lines" 50 | 51 | # Write HTML report 52 | # TODO: Regression log was generated by the -p1 hack, thus does not contain certificate data. 53 | report_dir = tmpdir.join("report") 54 | argv = [ 55 | "--workdir", str(work_dir), 56 | "log", 57 | "-a", "webreport", 58 | "-i", "1", 59 | "-o", str(report_dir) 60 | ] 61 | ret = main.main(argv) 62 | assert ret == 0, "regression HTML report finished without error" 63 | assert os.path.isdir(report_dir), "HTML report dir was created" 64 | assert os.path.isfile(report_dir.join("index.htm")), "HTML report index was written" 65 | runs_file = report_dir.join("runs", "runs.json") 66 | assert os.path.isfile(runs_file), "HTML `runs.json` file was written" 67 | with open(runs_file) as f: 68 | runs_lines = json.load(f) 69 | assert len(runs_lines) == 1, "one HTML run was written" 70 | run_dir = report_dir.join("runs", runs_lines[0]["data"][0]["run"]) 71 | assert os.path.isdir(run_dir), "HTML run dir was created" 72 | zip_glob = glob.glob(str(run_dir.join("*.zip"))) 73 | assert len(zip_glob) == 3, "three profile archives were written to HTML run dir" 74 | 75 | 76 | @pytest.mark.slow 77 | def test_tlscanary_srcupdate_and_scan_and_log(tmpdir, nightly_archive): 78 | """TLS Canary can update source DBs""" 79 | 80 | work_dir = tmpdir.join("workdir") 81 | 82 | # Compile a fresh `pytest` host db 83 | argv = [ 84 | "--workdir", str(work_dir), 85 | "srcupdate", 86 | "-b", nightly_archive, 87 | "-l", "5", 88 | "-s", "pytest" 89 | ] 90 | ret = main.main(argv) 91 | assert ret == 0, "srcupdate run finished without error" 92 | 93 | # Run a scan against `pytest` host db 94 | argv = [ 95 | "--workdir", str(work_dir), 96 | "scan", 97 | "-t", nightly_archive, 98 | "-s", "pytest", 99 | ] 100 | ret = main.main(argv) 101 | assert ret == 0, "scan run finished without error" 102 | 103 | # Check logs 104 | argv = [ 105 | "--workdir", str(work_dir), 106 | "log", 107 | "-a", "json", 108 | "-i", "1" 109 | ] 110 | with mock.patch('sys.stdout', new=io.StringIO()) as mock_stdout: 111 | ret = main.main(argv) 112 | stdout = mock_stdout.getvalue() 113 | assert ret == 0, "scan log dump finished without error" 114 | log = json.loads(stdout) 115 | assert type(log) is list, "scan JSON log is list" 116 | assert len(log) == 1, "there is one log in the dump" 117 | assert "meta" in log[0] and "data" in log[0], "log has meta and data" 118 | assert len(log[0]["data"]) == 5, "log has correct number of lines" 119 | -------------------------------------------------------------------------------- /tests/zz_tlscanary_revocations.py: -------------------------------------------------------------------------------- 1 | # This Source Code Form is subject to the terms of the Mozilla Public 2 | # License, v. 2.0. If a copy of the MPL was not distributed with this file, 3 | # You can obtain one at http://mozilla.org/MPL/2.0/. 4 | 5 | import glob 6 | import io 7 | import json 8 | import os 9 | import pkg_resources as pkgr 10 | import pytest 11 | import unittest.mock as mock 12 | 13 | from tlscanary import main 14 | 15 | 16 | @pytest.mark.slow 17 | def test_tlscanary_revocations(tmpdir, nightly_archive): 18 | """TLS Canary detect OneCRL revocations""" 19 | 20 | work_dir = tmpdir.join("workdir") 21 | revocations_file = pkgr.resource_filename(__name__, "files/revocations_nodigicert.txt") 22 | 23 | # Run a quick regression scan against a revoked DigiCert Root CA 24 | argv = [ 25 | "--workdir", str(work_dir), 26 | "regression", 27 | "-t", nightly_archive, 28 | "-b", nightly_archive, 29 | "-x", "2", 30 | "-s", "digicert", 31 | "-o", revocations_file 32 | ] 33 | ret = main.main(argv) 34 | assert ret == 0, "regression run finished without error" 35 | 36 | # Check log 37 | argv = [ 38 | "--workdir", str(work_dir), 39 | "log", 40 | "-a", "json", 41 | "-i", "1" 42 | ] 43 | with mock.patch('sys.stdout', new=io.StringIO()) as mock_stdout: 44 | ret = main.main(argv) 45 | stdout = mock_stdout.getvalue() 46 | assert ret == 0, "regression log dump finished without error" 47 | assert len(stdout) > 0, "regression log dump is not empty" 48 | log = json.loads(stdout) 49 | assert type(log) is list, "regression JSON log is list" 50 | assert len(log) == 1, "there is one log in the dump" 51 | assert "meta" in log[0] and "data" in log[0], "log has meta and data" 52 | assert len(log[0]["data"]) > 10, "log contains mostly regressions" 53 | -------------------------------------------------------------------------------- /tlscanary/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mozilla/tls-canary/dce7d04e0f8c1234787ab85cfefb31e4cc2493ec/tlscanary/__init__.py -------------------------------------------------------------------------------- /tlscanary/default_profile/SecurityPreloadState.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mozilla/tls-canary/dce7d04e0f8c1234787ab85cfefb31e4cc2493ec/tlscanary/default_profile/SecurityPreloadState.txt -------------------------------------------------------------------------------- /tlscanary/default_profile/SiteSecurityServiceState.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mozilla/tls-canary/dce7d04e0f8c1234787ab85cfefb31e4cc2493ec/tlscanary/default_profile/SiteSecurityServiceState.txt -------------------------------------------------------------------------------- /tlscanary/default_profile/cert8.db: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mozilla/tls-canary/dce7d04e0f8c1234787ab85cfefb31e4cc2493ec/tlscanary/default_profile/cert8.db -------------------------------------------------------------------------------- /tlscanary/default_profile/cert9.db: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mozilla/tls-canary/dce7d04e0f8c1234787ab85cfefb31e4cc2493ec/tlscanary/default_profile/cert9.db -------------------------------------------------------------------------------- /tlscanary/default_profile/key3.db: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mozilla/tls-canary/dce7d04e0f8c1234787ab85cfefb31e4cc2493ec/tlscanary/default_profile/key3.db -------------------------------------------------------------------------------- /tlscanary/default_profile/key4.db: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mozilla/tls-canary/dce7d04e0f8c1234787ab85cfefb31e4cc2493ec/tlscanary/default_profile/key4.db -------------------------------------------------------------------------------- /tlscanary/default_profile/pkcs11.txt: -------------------------------------------------------------------------------- 1 | library= 2 | name=NSS Internal PKCS #11 Module 3 | parameters=configdir='sql:.' certPrefix='' keyPrefix='' secmod='secmod.db' flags= updatedir='.' updateCertPrefix='' updateKeyPrefix='' updateid='0' updateTokenDescription='' 4 | NSS=Flags=internal,critical trustOrder=75 cipherOrder=100 slotParams=(1={slotFlags=[RSA,DSA,DH,RC2,RC4,DES,RANDOM,SHA1,MD5,MD2,SSL,TLS,AES,Camellia,SEED,SHA256,SHA512] askpw=any timeout=30}) 5 | 6 | -------------------------------------------------------------------------------- /tlscanary/js/scan_worker.js: -------------------------------------------------------------------------------- 1 | /* This Source Code Form is subject to the terms of the Mozilla Public 2 | * License, v. 2.0. If a copy of the MPL was not distributed with this 3 | * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ 4 | 5 | "use strict"; 6 | 7 | function collect_request_info(xhr, report_certs) { 8 | // This function copies and parses various properties of the connection state object 9 | // and wraps them into an info object to be returned with the command response. 10 | // Much of this is documented in https://developer.mozilla.org/en-US/docs/Web/API/ 11 | // XMLHttpRequest/How_to_check_the_secruity_state_of_an_XMLHTTPRequest_over_SSL, 12 | // but that source has gone out of date with Firefox 63. 13 | 14 | let info = {}; 15 | info.status = xhr.channel.QueryInterface(Ci.nsIRequest).status; 16 | info.original_uri = xhr.channel.originalURI.asciiSpec; 17 | info.uri = xhr.channel.URI.asciiSpec; 18 | 19 | try { 20 | info.error_class = nssErrorsService.getErrorClass(info.status); 21 | } catch (e) { 22 | info.error_class = null; 23 | } 24 | 25 | info.security_info_status = false; 26 | info.security_state_status = false; 27 | info.security_state = null; 28 | info.ssl_status_status = false; 29 | info.ssl_status_errors = null; 30 | info.certified_usages = null; 31 | info.certificate_chain_length = null; 32 | info.certificate_chain = null; 33 | info.error_code = null; 34 | info.raw_error = null; 35 | info.short_error_message = null; 36 | 37 | // Try to query security info 38 | let sec_info = xhr.channel.securityInfo; 39 | if (sec_info == null) 40 | return info; 41 | 42 | // If sec_info is not null, it contains SSL state info 43 | info.security_info_status = true; 44 | 45 | // Ci.nsISSLStatusProvider was removed in Firefox 63 and 46 | // SSLStatus moved to Ci.nsITransportSecurityInfo, so only 47 | // query the interface if it exists. 48 | if (Ci.hasOwnProperty("nsISSLStatusProvider")) 49 | if (sec_info instanceof Ci.nsISSLStatusProvider) { 50 | sec_info.QueryInterface(Ci.nsISSLStatusProvider); 51 | } 52 | if (sec_info instanceof Ci.nsITransportSecurityInfo) { 53 | sec_info.QueryInterface(Ci.nsITransportSecurityInfo); 54 | } 55 | 56 | // At this point, sec_info should be decorated with either one of the following property sets: 57 | // 58 | // Fx 63+ 59 | // securityState, errorCode, errorCodeString, failedCertList, SSLStatus 60 | // Fx 52-62 61 | // securityState, errorCode, errorMessage, failedCertList, SSLStatus 62 | 63 | // Process available SSL state and transfer to info object 64 | if (sec_info.securityState != null) { 65 | info.security_state_status = true; 66 | info.security_state = sec_info.securityState; 67 | } else { 68 | print("WARNING: securityInfo.securityState is null"); 69 | } 70 | 71 | if (sec_info.SSLStatus != null) { 72 | info.ssl_status_status = true; 73 | info.ssl_status = sec_info.SSLStatus; 74 | // TODO: Find way to extract this py-side. 75 | try { 76 | let usages = {}; 77 | let usages_string = {}; 78 | info.ssl_status.server_cert.getUsagesString(true, usages, usages_string); 79 | info.certified_usages = usages_string.value; 80 | } catch (e) { 81 | info.certified_usages = null; 82 | } 83 | } else { 84 | // Warning is too noisy 85 | // print("WARNING: securityInfo.SSLStatus is null"); 86 | } 87 | 88 | // Process errorCodeString or errorMessage 89 | if (sec_info.hasOwnProperty("errorMessage")) { 90 | // Old message format wich needs to be parsed 91 | info.raw_error = sec_info.errorMessage; 92 | if (info.raw_error) { 93 | try { 94 | info.short_error_message = info.raw_error.split("Error code:")[1].split(">")[1].split("<")[0]; 95 | } catch (e) { 96 | print("WARNING: unexpected errorMessage format: " + e.toString()); 97 | info.short_error_message = info.raw_error; 98 | } 99 | } else { 100 | info.raw_error = null; 101 | info.short_error_message = null; 102 | } 103 | } else if (sec_info.hasOwnProperty("errorCodeString")) { 104 | info.raw_error = sec_info.errorCodeString; 105 | info.short_error_message = sec_info.errorCodeString; 106 | } else { 107 | print("WARNING: securityInfo has neither errorCodeString nor errorMessage"); 108 | } 109 | if (sec_info.hasOwnProperty("errorCode")) { 110 | info.error_code = sec_info.errorCode; 111 | } else { 112 | print("WARNING: securityInfo has no errorCode"); 113 | } 114 | 115 | // Extract certificate objects if requested 116 | if (info.ssl_status_status && report_certs) { 117 | let server_cert = info.ssl_status.serverCert; 118 | let cert_chain = []; 119 | if (server_cert.sha1Fingerprint) { 120 | cert_chain.push(server_cert.getRawDER({})); 121 | let chain = []; 122 | if (info.ssl_status.succeededCertChain != null) { 123 | chain = info.ssl_status.succeededCertChain; 124 | } else if (info.ssl_status.failedCertChain != null) { 125 | chain = info.ssl_status.failedCertChain; 126 | } 127 | let enumerator = chain.getEnumerator(); 128 | 129 | // XPCOMUtils.IterSimpleEnumerator removed in Firefox 63 (bug 1484496) 130 | let cert_enumerator = XPCOMUtils.IterSimpleEnumerator ? 131 | XPCOMUtils.IterSimpleEnumerator(enumerator, Ci.nsIX509Cert) : enumerator; 132 | for (let cert of cert_enumerator) { 133 | cert_chain.push(cert); 134 | } 135 | } 136 | info.certificate_chain_length = cert_chain.length; 137 | info.certificate_chain = cert_chain; 138 | } 139 | 140 | // Some values might be missing from the connection state, for example due 141 | // to a broken SSL handshake. Try to catch exceptions before report_result's 142 | // JSON serializing does. 143 | if (info.ssl_status_status) { 144 | let sane_ssl_status = {}; 145 | info.ssl_status_errors = []; 146 | for (let key in info.ssl_status) { 147 | if (!info.ssl_status.hasOwnProperty(key)) continue; 148 | try { 149 | sane_ssl_status[key] = JSON.parse(JSON.stringify(info.ssl_status[key])); 150 | } catch (e) { 151 | sane_ssl_status[key] = null; 152 | info.ssl_status_errors.push({key: e.toString()}); 153 | } 154 | } 155 | info.ssl_status = sane_ssl_status; 156 | } 157 | 158 | return info; 159 | } 160 | 161 | function scan_host(args, response_cb) { 162 | 163 | let host = args.host; 164 | let report_certs = args.include_certificates === true; 165 | 166 | function load_handler(msg) { 167 | if (msg.target.readyState === 4) { 168 | response_cb(true, {origin: "load_handler", info: collect_request_info(msg.target, report_certs)}); 169 | } else { 170 | response_cb(false, {origin: "load_handler", info: collect_request_info(msg.target, report_certs)}); 171 | } 172 | } 173 | 174 | function error_handler(msg) { 175 | response_cb(false, {origin: "error_handler", info: collect_request_info(msg.target, report_certs)}); 176 | } 177 | 178 | function abort_handler(msg) { 179 | response_cb(false, {origin: "abort_handler", info: collect_request_info(msg.target, report_certs)}); 180 | } 181 | 182 | function timeout_handler(msg) { 183 | response_cb(false, {origin: "timeout_handler", info: collect_request_info(msg.target, report_certs)}); 184 | } 185 | 186 | // This gets called when a redirect happens. 187 | function RedirectStopper() {} 188 | RedirectStopper.prototype = { 189 | asyncOnChannelRedirect: function (oldChannel, newChannel, flags, callback) { 190 | // This callback prevents redirects, and the request's error handler will be called. 191 | callback.onRedirectVerifyCallback(Cr.NS_ERROR_ABORT); 192 | }, 193 | getInterface: function (iid) { 194 | return this.QueryInterface(iid); 195 | }, 196 | QueryInterface: generateQI([Ci.nsIChannelEventSink]) 197 | }; 198 | 199 | let request = new XMLHttpRequest(); 200 | try { 201 | request.mozBackgroundRequest = true; 202 | request.open("HEAD", "https://" + host, true); 203 | request.timeout = args.timeout ? args.timeout * 1000 : DEFAULT_TIMEOUT; 204 | request.channel.loadFlags |= Ci.nsIRequest.LOAD_ANONYMOUS 205 | | Ci.nsIRequest.LOAD_BYPASS_CACHE 206 | | Ci.nsIRequest.INHIBIT_PERSISTENT_CACHING 207 | | Ci.nsIRequest.VALIDATE_NEVER; 208 | request.channel.notificationCallbacks = new RedirectStopper(); 209 | request.addEventListener("load", load_handler, false); 210 | request.addEventListener("error", error_handler, false); 211 | request.addEventListener("abort", abort_handler, false); 212 | request.addEventListener("timeout", timeout_handler, false); 213 | request.send(null); 214 | } catch (error) { 215 | // This is supposed to catch malformed host names, but could 216 | // potentially mask other errors. 217 | response_cb(false, {origin: "request_error", error: error, info: collect_request_info(request, false)}); 218 | } 219 | } 220 | 221 | register_command("scan", scan_host); 222 | 223 | run_loop(); -------------------------------------------------------------------------------- /tlscanary/js/worker_common.js: -------------------------------------------------------------------------------- 1 | /* This Source Code Form is subject to the terms of the Mozilla Public 2 | * License, v. 2.0. If a copy of the MPL was not distributed with this 3 | * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ 4 | 5 | "use strict"; 6 | 7 | const { classes: Cc, interfaces: Ci, utils: Cu, results: Cr } = Components; 8 | 9 | const DEFAULT_TIMEOUT = 10000; 10 | 11 | // This is a global random ID that is sent with every message to the Python world 12 | const worker_id = Math.floor(Math.random() * 2**64); 13 | 14 | 15 | Cu.import("resource://gre/modules/Services.jsm"); 16 | Cu.import("resource://gre/modules/XPCOMUtils.jsm"); 17 | Cu.import("resource://gre/modules/NetUtil.jsm"); 18 | Cu.import("resource://gre/modules/AppConstants.jsm"); 19 | Cu.importGlobalProperties(["XMLHttpRequest"]); 20 | 21 | let custom_commands = []; 22 | 23 | // generateQI was moved from XPCOMUtils to ChromeUtils in Fx 63 24 | let generateQI = ChromeUtils.generateQI ? ChromeUtils.generateQI : XPCOMUtils.generateQI; 25 | if (!generateQI) { 26 | print("WARNING: no valid generateQI found"); 27 | } 28 | 29 | 30 | function get_runtime_info() { 31 | const ac = JSON.parse(JSON.stringify(AppConstants)); 32 | for (let key in ac) { 33 | if (key.endsWith("API_KEY")) { 34 | ac[key] = "__STRIPPED__"; 35 | } 36 | } 37 | return { 38 | nssInfo: Cc["@mozilla.org/security/nssversion;1"].getService(Ci.nsINSSVersion), 39 | appConstants: ac 40 | }; 41 | } 42 | 43 | 44 | function set_prefs(prefs) { 45 | for (let key in prefs) { 46 | let prop = prefs[key].split(";")[0]; 47 | let value = prefs[key].split(";")[1]; 48 | 49 | // Pref values are passed in as strings and must be examined 50 | // to determine the intended types and values. 51 | let type = "string"; // default 52 | if (value === "true" || value === "false") type = "boolean"; 53 | if (!isNaN(value)) type = "number"; 54 | if (value == undefined) type = "undefined"; 55 | 56 | switch (type) { 57 | case "boolean": 58 | Services.prefs.setBoolPref(prop, value === "true" ? 1 : 0); 59 | break; 60 | case "number": 61 | Services.prefs.setIntPref(prop, value); 62 | break; 63 | case "string": 64 | Services.prefs.setStringPref(prop, value); 65 | break; 66 | default: 67 | throw "Unsupported pref type " + type; 68 | } 69 | } 70 | } 71 | 72 | function set_profile(profile_path) { 73 | let file = Cc["@mozilla.org/file/local;1"] 74 | .createInstance(Ci.nsIFile); 75 | file.initWithPath(profile_path); 76 | let dir_service = Cc["@mozilla.org/file/directory_service;1"] 77 | .getService(Ci.nsIProperties); 78 | let provider = { 79 | getFile: function(prop, persistent) { 80 | persistent.value = true; 81 | if (prop == "ProfD" || prop == "ProfLD" || prop == "ProfDS" || 82 | prop == "ProfLDS" || prop == "PrefD" || prop == "TmpD") { 83 | return file.clone(); 84 | } 85 | return null; 86 | }, 87 | QueryInterface: function(iid) { 88 | if (iid.equals(Ci.nsIDirectoryServiceProvider) || 89 | iid.equals(Ci.nsISupports)) { 90 | return this; 91 | } 92 | throw Cr.NS_ERROR_NO_INTERFACE; 93 | } 94 | }; 95 | dir_service.QueryInterface(Ci.nsIDirectoryService) 96 | .registerProvider(provider); 97 | 98 | // The methods of 'provider' will retain this scope so null out 99 | // everything to avoid spurious leak reports. 100 | profile_path = null; 101 | dir_service = null; 102 | provider = null; 103 | 104 | return file.clone(); 105 | } 106 | 107 | // Allow extended information (including preferences) to be obtained from the worker. 108 | const PREF_INVALID = 0; 109 | const PREF_STRING = 32; 110 | const PREF_INT = 64; 111 | const PREF_BOOL = 128; 112 | 113 | // Create a command to obtain extended info. 114 | // config - a javascript object with an element 'preference_keys' - an array of 115 | // the names of the preferences to fetch 116 | var create_extended_info_command = function(config) { 117 | return async function(args, response_cb) { 118 | try { 119 | var prefs = {}; 120 | 121 | for (let key of config.preference_keys) { 122 | let pref_type = _Services.prefs.getPrefType(key); 123 | switch (pref_type) { 124 | case PREF_STRING: 125 | prefs[key] = _Services.prefs.getCharPref(key); 126 | break; 127 | case PREF_INT: 128 | prefs[key] = _Services.prefs.getIntPref(key); 129 | break; 130 | case PREF_BOOL: 131 | prefs[key] = _Services.prefs.getBoolPref(key); 132 | break; 133 | default: 134 | prefs[key] = "pref invalid"; 135 | } 136 | } 137 | 138 | response_cb(true, 139 | { 140 | runtime_info: get_runtime_info(), 141 | preferences: prefs 142 | }); 143 | } catch (e) { 144 | response_cb(false, {origin: "get_worker_info", exception: e}); 145 | } 146 | } 147 | } 148 | 149 | // register_command allows worker scripts to register their own commands. See use of custom_commands in _handle 150 | function register_command(name, callback) { 151 | custom_commands[name] = callback; 152 | } 153 | 154 | // Command object definition. Must be in-sync with Python world. 155 | // This is used for keeping state throughout async command handling. 156 | function Command(json_string) { 157 | let parsed_command = JSON.parse(json_string); 158 | this.id = parsed_command.id ? parsed_command.id : Math.floor(Math.random() * 2**64); 159 | this.mode = parsed_command.mode; 160 | this.args = parsed_command.args; 161 | this.original_cmd = parsed_command; 162 | this.start_time = new Date(); 163 | } 164 | 165 | // Even though it's a prototype method it will require bind when passed as callback. 166 | Command.prototype.send_response = function _report_result(success, result) { 167 | // Send a response back to the python world 168 | print(JSON.stringify({ 169 | "id": this.id, 170 | "worker_id": worker_id, 171 | "original_cmd": this.original_cmd, 172 | "success": success, 173 | "result": result, 174 | "command_time": this.start_time.getTime(), 175 | "response_time": new Date().getTime(), 176 | })); 177 | }; 178 | 179 | Command.prototype.handle = function _handle() { 180 | switch (this.mode) { 181 | case "info": 182 | this.send_response(true, get_runtime_info()); 183 | break; 184 | case "useprofile": 185 | set_profile(this.args.path); 186 | this.send_response(true, "ACK"); 187 | break; 188 | case "setprefs": 189 | set_prefs(this.args.prefs); 190 | this.send_response(true, "ACK"); 191 | break; 192 | case "quit": 193 | script_done = true; 194 | // Intentional fall-through 195 | case "wakeup": 196 | while (main_thread.hasPendingEvents()) main_thread.processNextEvent(true); 197 | this.send_response(true, "ACK"); 198 | break; 199 | default: 200 | let custom_command = custom_commands[this.mode]; 201 | if (undefined !== custom_command) { 202 | custom_command(this.args, this.send_response.bind(this)); 203 | this.send_response(true, "ACK"); 204 | } else { 205 | this.send_response(false, "Unknown command mode: " + this.mode); 206 | } 207 | } 208 | }; 209 | 210 | 211 | // The main loop only reads and handles commands from stdin. 212 | let script_done = false; 213 | let thread_manager = Cc["@mozilla.org/thread-manager;1"].getService(Ci.nsIThreadManager); 214 | let main_thread = thread_manager.mainThread; 215 | 216 | function run_loop() { 217 | while (!script_done) { 218 | let cmd = null; 219 | try { 220 | cmd = new Command(readline()); 221 | cmd.handle(); 222 | } catch (error) { 223 | print(error); 224 | } 225 | } 226 | } 227 | 228 | -------------------------------------------------------------------------------- /tlscanary/loader.py: -------------------------------------------------------------------------------- 1 | # This Source Code Form is subject to the terms of the Mozilla Public 2 | # License, v. 2.0. If a copy of the MPL was not distributed with this file, 3 | # You can obtain one at http://mozilla.org/MPL/2.0/. 4 | 5 | import logging 6 | import sys 7 | 8 | from tlscanary import modes 9 | 10 | 11 | # Eventually import other future tests, like 12 | # pin, performance 13 | 14 | 15 | logger = logging.getLogger(__name__) 16 | 17 | 18 | def run(args, module_dir, tmp_dir): 19 | global logger 20 | 21 | try: 22 | current_mode = modes.all_modes[args.mode](args, module_dir, tmp_dir) 23 | except KeyError: 24 | logger.critical("Unknown run mode `%s`. Choose one of: %s" % (args.mode, ", ".join(modes.all_mode_names))) 25 | sys.exit(5) 26 | 27 | logger.debug("Running mode .setup()") 28 | current_mode.setup() 29 | logger.debug("Running mode .run()") 30 | current_mode.run() 31 | logger.debug("Running mode .teardown()") 32 | current_mode.teardown() 33 | logger.debug("Mode finished") 34 | -------------------------------------------------------------------------------- /tlscanary/main.py: -------------------------------------------------------------------------------- 1 | # This Source Code Form is subject to the terms of the Mozilla Public 2 | # License, v. 2.0. If a copy of the MPL was not distributed with this file, 3 | # You can obtain one at http://mozilla.org/MPL/2.0/. 4 | 5 | import argparse 6 | import logging 7 | import coloredlogs 8 | import os 9 | import pkg_resources 10 | import shutil 11 | import sys 12 | import tempfile 13 | import threading 14 | import time 15 | 16 | from tlscanary import loader 17 | from tlscanary import modes 18 | from tlscanary import sources_db as sdb 19 | from tlscanary.tools import cleanup 20 | from tlscanary.tools import firefox_downloader as fd 21 | 22 | 23 | # Initialize coloredlogs 24 | logging.Formatter.converter = time.gmtime 25 | logger = logging.getLogger(__name__) 26 | coloredlogs.DEFAULT_LOG_FORMAT = "%(asctime)s %(levelname)s %(threadName)s %(name)s %(message)s" 27 | coloredlogs.install(level="INFO") 28 | 29 | 30 | def parse_args(argv=None): 31 | """ 32 | Argument parsing. Parses from sys.argv if argv is None. 33 | :param argv: argument vector to parse 34 | :return: parsed arguments 35 | """ 36 | if argv is None: 37 | argv = sys.argv[1:] 38 | 39 | pkg_version = pkg_resources.require("tlscanary")[0].version 40 | home = os.path.expanduser("~") 41 | 42 | # Set up the parent parser with shared arguments 43 | parser = argparse.ArgumentParser(prog="tlscanary") 44 | parser.add_argument("--version", action="version", version="%(prog)s " + pkg_version) 45 | parser.add_argument("-d", "--debug", 46 | help="Enable debug", 47 | action="store_true") 48 | parser.add_argument("-w", "--workdir", 49 | help="Path to working directory", 50 | type=os.path.abspath, 51 | action="store", 52 | default="%s/.tlscanary" % home) 53 | 54 | # Set up subparsers, one for each mode 55 | subparsers = parser.add_subparsers(help="Run mode", dest="mode") 56 | for mode_name in modes.all_modes: 57 | mode_class = modes.all_modes[mode_name] 58 | sub_parser = subparsers.add_parser(mode_name, help=mode_class.help) 59 | mode_class.setup_args(sub_parser) 60 | 61 | return parser.parse_args(argv) 62 | 63 | 64 | tmp_dir = None 65 | module_dir = None 66 | 67 | 68 | def __create_tempdir(): 69 | """ 70 | Helper function for creating the temporary directory. 71 | Writes to the global variable tmp_dir 72 | :return: Path of temporary directory 73 | """ 74 | temp_dir = tempfile.mkdtemp(prefix='tlscanary_') 75 | logger.debug('Created temp dir `%s`' % temp_dir) 76 | return temp_dir 77 | 78 | 79 | class RemoveTempDir(cleanup.CleanUp): 80 | """ 81 | Class definition for cleanup helper responsible 82 | for deleting the temporary directory prior to exit. 83 | """ 84 | @staticmethod 85 | def at_exit(): 86 | global tmp_dir 87 | if tmp_dir is not None: 88 | logger.debug('Removing temp dir `%s`' % tmp_dir) 89 | shutil.rmtree(tmp_dir, ignore_errors=True) 90 | 91 | 92 | restore_terminal_encoding = None 93 | 94 | 95 | def get_terminal_encoding(): 96 | """ 97 | Helper function to get current terminal encoding 98 | """ 99 | global logger 100 | if sys.platform.startswith("win"): 101 | logger.debug("Running `chcp` shell command") 102 | chcp_output = os.popen("chcp").read().strip() 103 | logger.debug("chcp output: `%s`" % chcp_output) 104 | if chcp_output.startswith("Active code page:"): 105 | codepage = chcp_output.split(": ")[1] 106 | logger.debug("Active codepage is `%s`" % codepage) 107 | return codepage 108 | else: 109 | logger.warning("There was an error detecting the active codepage") 110 | return None 111 | else: 112 | logger.debug("Platform does not require switching terminal encoding") 113 | return None 114 | 115 | 116 | def set_terminal_encoding(encoding): 117 | """ 118 | Helper function to set terminal encoding. 119 | """ 120 | global logger 121 | if sys.platform.startswith("win"): 122 | logger.debug("Running `chcp` shell command, setting codepage to `%s`", encoding) 123 | chcp_output = os.popen("chcp %s" % encoding).read().strip() 124 | logger.debug("chcp output: `%s`" % chcp_output) 125 | if chcp_output == "Active code page: %s" % encoding: 126 | logger.debug("Successfully set codepage to `%s`" % encoding) 127 | else: 128 | logger.warning("Can't set codepage for terminal") 129 | 130 | 131 | def fix_terminal_encoding(): 132 | """ 133 | Helper function to set terminal to platform-specific UTF encoding 134 | """ 135 | global restore_terminal_encoding 136 | restore_terminal_encoding = get_terminal_encoding() 137 | if restore_terminal_encoding is None: 138 | return 139 | if sys.platform.startswith("win"): 140 | platform_utf_encoding = "65001" 141 | else: 142 | platform_utf_encoding = None 143 | if restore_terminal_encoding != platform_utf_encoding: 144 | set_terminal_encoding(platform_utf_encoding) 145 | 146 | 147 | class ResetTerminalEncoding(cleanup.CleanUp): 148 | """ 149 | Class for restoring original terminal encoding at exit. 150 | """ 151 | @staticmethod 152 | def at_exit(): 153 | global restore_terminal_encoding 154 | if restore_terminal_encoding is not None: 155 | set_terminal_encoding(restore_terminal_encoding) 156 | 157 | 158 | # This is the entry point used in setup.py 159 | def main(argv=None): 160 | global logger, tmp_dir, module_dir 161 | 162 | module_dir = os.path.split(__file__)[0] 163 | 164 | args = parse_args(argv) 165 | 166 | if args.debug: 167 | coloredlogs.install(level='DEBUG') 168 | 169 | logger.debug("Command arguments: %s" % args) 170 | 171 | cleanup.init() 172 | fix_terminal_encoding() 173 | tmp_dir = __create_tempdir() 174 | 175 | # If 'list' is specified as test, list available test sets, builds, and platforms 176 | if "source" in args and args.source == "list": 177 | coloredlogs.install(level='ERROR') 178 | db = sdb.SourcesDB(args) 179 | build_list, platform_list, _, _ = fd.FirefoxDownloader.list() 180 | print("Available builds: %s" % ' '.join(build_list)) 181 | print("Available platforms: %s" % ' '.join(platform_list)) 182 | print("Available test sets:") 183 | for handle in db.list(): 184 | test_set = db.read(handle) 185 | if handle == db.default: 186 | default = " (default)" 187 | else: 188 | default = "" 189 | print(" - %s [%d hosts]%s" % (handle, len(test_set), default)) 190 | return 0 191 | 192 | # Create workdir (usually ~/.tlscanary, used for caching etc.) 193 | # Assumes that no previous code must write to it. 194 | if not os.path.exists(args.workdir): 195 | logger.debug('Creating working directory %s' % args.workdir) 196 | os.makedirs(args.workdir) 197 | 198 | # Load the specified test mode 199 | try: 200 | loader.run(args, module_dir, tmp_dir) 201 | 202 | except KeyboardInterrupt: 203 | logger.critical("\nUser interrupt. Quitting...") 204 | return 10 205 | 206 | if len(threading.enumerate()) > 1: 207 | logger.info("Waiting for background threads to finish") 208 | while len(threading.enumerate()) > 1: 209 | logger.debug("Remaining threads: %s" % threading.enumerate()) 210 | time.sleep(2) 211 | 212 | return 0 213 | -------------------------------------------------------------------------------- /tlscanary/modes/__init__.py: -------------------------------------------------------------------------------- 1 | # This Source Code Form is subject to the terms of the Mozilla Public 2 | # License, v. 2.0. If a copy of the MPL was not distributed with this file, 3 | # You can obtain one at http://mozilla.org/MPL/2.0/. 4 | 5 | from . import basemode 6 | from . import performance 7 | from . import regression 8 | from . import log 9 | from . import scan 10 | from . import sourceupdate 11 | 12 | __all__ = ["log", "performance", "regression", "scan", "sourceupdate"] 13 | 14 | 15 | def __subclasses_of(cls): 16 | sub_classes = cls.__subclasses__() 17 | sub_sub_classes = [] 18 | for sub_cls in sub_classes: 19 | sub_sub_classes += __subclasses_of(sub_cls) 20 | return sub_classes + sub_sub_classes 21 | 22 | 23 | # Keep a record of all BaseMode subclasses 24 | all_modes = dict([(mode.name, mode) for mode in __subclasses_of(basemode.BaseMode)]) 25 | all_mode_names = sorted(all_modes.keys()) 26 | -------------------------------------------------------------------------------- /tlscanary/modes/performance.py: -------------------------------------------------------------------------------- 1 | # This Source Code Form is subject to the terms of the Mozilla Public 2 | # License, v. 2.0. If a copy of the MPL was not distributed with this file, 3 | # You can obtain one at http://mozilla.org/MPL/2.0/. 4 | 5 | import datetime 6 | import logging 7 | import pkg_resources as pkgr 8 | import sys 9 | 10 | from .regression import RegressionMode 11 | import tlscanary.runlog as rl 12 | 13 | 14 | logger = logging.getLogger(__name__) 15 | 16 | 17 | class PerformanceMode(RegressionMode): 18 | 19 | name = "performance" 20 | help = "Run a performance regression test on two Firefox versions" 21 | 22 | def __init__(self, args, module_dir, tmp_dir): 23 | global logger 24 | 25 | super(PerformanceMode, self).__init__(args, module_dir, tmp_dir) 26 | 27 | # Define instance attributes for later use 28 | self.start_time = None 29 | self.test_uri_set = None 30 | self.base_uri_set = None 31 | self.total_change = None 32 | 33 | def setup(self): 34 | # Additional argument validation for hard-coded limits, 35 | # because 1000 URIs x 20 scans per URI x 2 builds is a lot of data 36 | # will investigate upper limit later 37 | if self.args.limit > 1000: 38 | logger.warning("Limiting performance tests to 1000 hosts.") 39 | self.args.limit = 1000 40 | if self.args.scans > 20: 41 | logger.critical("Limiting performance test to 20 scans per URI list for now") 42 | sys.exit(1) 43 | super(PerformanceMode, self).setup() 44 | 45 | def run(self): 46 | # Perform the scan 47 | self.start_time = datetime.datetime.now() 48 | 49 | meta = { 50 | "tlscanary_version": pkgr.require("tlscanary")[0].version, 51 | "mode": self.name, 52 | "args": vars(self.args), 53 | "argv": sys.argv, 54 | "sources_size": len(self.sources), 55 | "test_metadata": self.test_metadata, 56 | "base_metadata": self.base_metadata, 57 | "run_start_time": datetime.datetime.utcnow().isoformat() 58 | } 59 | 60 | rldb = rl.RunLogDB(self.args) 61 | log = rldb.new_log() 62 | log.start(meta=meta) 63 | 64 | test_uri_sets = [] 65 | base_uri_sets = [] 66 | 67 | self.total_change = 0 68 | test_speed_aggregate = 0 69 | base_speed_aggregate = 0 70 | 71 | for i in range(0, self.args.scans): 72 | test_uri_sets.append(self.run_test(self.test_app, self.sources, profile=self.test_profile, 73 | prefs=self.args.prefs_test, get_info=True, 74 | get_certs=not self.args.remove_certs, return_only_errors=False)) 75 | 76 | base_uri_sets.append(self.run_test(self.base_app, self.sources, profile=self.base_profile, 77 | prefs=self.args.prefs_base, get_info=True, 78 | get_certs=not self.args.remove_certs, return_only_errors=False)) 79 | 80 | # extract connection speed from all scans 81 | test_connections_all = [] 82 | for uri_set in test_uri_sets: 83 | test_connections_all.append(self.extract_connection_speed(uri_set)) 84 | 85 | base_connections_all = [] 86 | for uri_set in base_uri_sets: 87 | base_connections_all.append(self.extract_connection_speed(uri_set)) 88 | 89 | # collapse all scan data into one URI set 90 | self.consolidate_connection_speed_info(test_uri_sets[0], test_connections_all) 91 | self.consolidate_connection_speed_info(base_uri_sets[0], base_connections_all) 92 | 93 | # the first URI set becomes our primary set 94 | self.test_uri_set = test_uri_sets[0] 95 | self.base_uri_set = base_uri_sets[0] 96 | 97 | # new values to be inserted into response 98 | for test_record in self.test_uri_set: 99 | base_record = [d for d in self.base_uri_set if d[1] == test_record[1]][0] 100 | test_response_time = float(test_record[2].response.connection_speed_average) 101 | base_response_time = float(base_record[2].response.connection_speed_average) 102 | test_speed_aggregate += test_response_time 103 | base_speed_aggregate += base_response_time 104 | pct_change = float((test_response_time - base_response_time) / base_response_time) * 100 105 | test_record[2].response.connection_speed_change = int(pct_change) 106 | # save the speed samples of the base record to the test record for now, 107 | # in case we decide we want to include this in the report later 108 | test_record[2].response.connection_speed_base_samples = base_record[2].response.connection_speed_samples 109 | 110 | self.total_change = float((test_speed_aggregate - base_speed_aggregate) / base_speed_aggregate) * 100 111 | 112 | for rank, host, result in self.test_uri_set: 113 | log.log(result.as_dict()) 114 | 115 | meta["run_finish_time"] = datetime.datetime.utcnow().isoformat() 116 | meta["total_change"] = self.total_change 117 | self.save_profile(self.test_profile, "test_profile", log) 118 | self.save_profile(self.base_profile, "base_profile", log) 119 | log.stop(meta=meta) 120 | 121 | @staticmethod 122 | def extract_connection_speed(uri_set): 123 | new_set = [] 124 | for record in uri_set: 125 | new_set.append([ 126 | record[0], # URI rank 127 | record[2].response.response_time - record[2].response.command_time # connection speed 128 | ]) 129 | return new_set 130 | 131 | def consolidate_connection_speed_info(self, uri_set, connection_sets): 132 | for record in uri_set: 133 | temp_speeds = [] 134 | speed_aggregate = 0 135 | for connection_set in connection_sets: 136 | speed = [d for d in connection_set if d[0] == record[0]][0][1] 137 | speed_aggregate += speed 138 | temp_speeds.append(speed) 139 | record[2].response.connection_speed_average = speed_aggregate / self.args.scans 140 | record[2].response.connection_speed_samples = str(temp_speeds).strip('[]') 141 | -------------------------------------------------------------------------------- /tlscanary/modes/scan.py: -------------------------------------------------------------------------------- 1 | # This Source Code Form is subject to the terms of the Mozilla Public 2 | # License, v. 2.0. If a copy of the MPL was not distributed with this file, 3 | # You can obtain one at http://mozilla.org/MPL/2.0/. 4 | 5 | import datetime 6 | import logging 7 | import pkg_resources as pkgr 8 | import sys 9 | 10 | from .basemode import BaseMode 11 | import tlscanary.runlog as rl 12 | import tlscanary.sources_db as sdb 13 | import tlscanary.tools.progress as pr 14 | 15 | 16 | logger = logging.getLogger(__name__) 17 | 18 | 19 | class ScanMode(BaseMode): 20 | 21 | name = "scan" 22 | help = "Collect SSL connection state info on hosts" 23 | 24 | def __init__(self, args, module_dir, tmp_dir): 25 | global logger 26 | 27 | super(ScanMode, self).__init__(args, module_dir, tmp_dir) 28 | 29 | # Define instance attributes for later use 30 | self.log = None 31 | self.start_time = None 32 | self.test_profile = None 33 | self.test_app = None 34 | self.test_metadata = None 35 | self.sources = None 36 | 37 | def setup(self): 38 | global logger 39 | 40 | # Argument validation logic to make sure user has specified only test build 41 | if self.args.test is None: 42 | logger.critical("Must specify test build for scan") 43 | sys.exit(5) 44 | elif self.args.base is not None: 45 | logger.debug("Ignoring base build parameter") 46 | 47 | # Download app and extract metadata 48 | self.test_app = self.get_test_candidate(self.args.test) 49 | self.test_metadata = self.collect_worker_info(self.test_app) 50 | logger.info("Testing Firefox %s %s scan run" % 51 | (self.test_metadata["app_version"], self.test_metadata["branch"])) 52 | 53 | # Create custom profile 54 | self.test_profile = self.make_profile("test_profile") 55 | 56 | # Compile the set of hosts to test 57 | db = sdb.SourcesDB(self.args) 58 | logger.info("Reading `%s` host database" % self.args.source) 59 | self.sources = db.read(self.args.source) 60 | logger.info("%d hosts in test set" % len(self.sources)) 61 | 62 | def run(self): 63 | global logger 64 | 65 | logger.info("Testing Firefox %s %s" % 66 | (self.test_metadata["app_version"], self.test_metadata["branch"])) 67 | 68 | self.start_time = datetime.datetime.now() 69 | 70 | meta = { 71 | "tlscanary_version": pkgr.require("tlscanary")[0].version, 72 | "mode": self.name, 73 | "args": vars(self.args), 74 | "argv": sys.argv, 75 | "sources_size": len(self.sources), 76 | "test_metadata": self.test_metadata, 77 | "run_start_time": datetime.datetime.utcnow().isoformat() 78 | } 79 | 80 | rldb = rl.RunLogDB(self.args) 81 | log = rldb.new_log() 82 | log.start(meta=meta) 83 | progress = pr.ProgressTracker(total=len(self.sources), unit="hosts", average=30*60.0) 84 | 85 | limit = len(self.sources) if self.args.limit is None else self.args.limit 86 | 87 | # Split work into 50 chunks to conserve memory, but make no chunk smaller than 1000 hosts 88 | next_chunk = self.sources.iter_chunks(chunk_size=limit/50, min_chunk_size=1000) 89 | 90 | try: 91 | while True: 92 | host_set_chunk = next_chunk(as_set=True) 93 | if host_set_chunk is None: 94 | break 95 | 96 | logger.info("Starting scan of chunk of %d hosts" % len(host_set_chunk)) 97 | 98 | info_uri_set = self.run_test(self.test_app, host_set_chunk, profile=self.test_profile, 99 | prefs=self.args.prefs, get_info=True, 100 | get_certs=not self.args.remove_certs, return_only_errors=False, 101 | report_callback=progress.log_completed) 102 | # Log progress per chunk 103 | logger.info("Progress: %s" % str(progress)) 104 | 105 | # Commit results to log 106 | for rank, host, result in info_uri_set: 107 | log.log(result.as_dict()) 108 | 109 | except KeyboardInterrupt: 110 | logger.critical("Ctrl-C received") 111 | progress.stop_reporting() 112 | raise KeyboardInterrupt 113 | 114 | finally: 115 | progress.stop_reporting() 116 | 117 | meta["run_finish_time"] = datetime.datetime.utcnow().isoformat() 118 | self.save_profile(self.test_profile, "test_profile", log) 119 | log.stop(meta=meta) 120 | -------------------------------------------------------------------------------- /tlscanary/modes/sourceupdate.py: -------------------------------------------------------------------------------- 1 | # This Source Code Form is subject to the terms of the Mozilla Public 2 | # License, v. 2.0. If a copy of the MPL was not distributed with this file, 3 | # You can obtain one at http://mozilla.org/MPL/2.0/. 4 | 5 | import csv 6 | import datetime 7 | import logging 8 | import os 9 | import sys 10 | import zipfile 11 | 12 | from .basemode import BaseMode 13 | import tlscanary.sources_db as sdb 14 | from tlscanary.tools.firefox_downloader import get_to_file 15 | import tlscanary.tools.progress as pr 16 | 17 | 18 | logger = logging.getLogger(__name__) 19 | 20 | 21 | class SourceUpdateMode(BaseMode): 22 | """ 23 | Mode to update host databases from publicly available top sites data 24 | """ 25 | 26 | name = "srcupdate" 27 | help = "Update hosts databases used by other modes" 28 | 29 | # There are various top sites databases that might be considered for querying here. 30 | # The other notable database is the notorious `Alexa Top 1M` which is available at 31 | # "http://s3.amazonaws.com/alexa-static/top-1m.csv.zip". It is based on usage data 32 | # gathered from the equally notorious Alexa browser toolbar, while the `Umbrella top 1M` 33 | # used is DNS-based and its ranking is hence considered to be more representative. 34 | # It's available at "http://s3-us-west-1.amazonaws.com/umbrella-static/top-1m.csv.zip". 35 | # In February 2019 we decided to switch to the new Tranco database which is comprised of 36 | # a running 30-day average across Alexa, Umbrella, Majestic, and Quantcast, employing a 37 | # Dowdall ranking system. This approach solves our noise problem introduced by frequent 38 | # automatic database updates. 39 | # `Tranco`, `Umbrella`, and `Alexa` use precisely the same format and their links are thus 40 | # interchangeable. 41 | # For future reference, there is also Ulfr's database at 42 | # "https://ulfr.io/f/top1m_has_tls_sorted.csv". It requires a different parser but 43 | # has the advantage of clustering hosts by shared certificates. 44 | 45 | top_sites_location = "https://tranco-list.eu/top-1m.csv.zip" 46 | 47 | def __init__(self, args, module_dir, tmp_dir): 48 | super(SourceUpdateMode, self).__init__(args, module_dir, tmp_dir) 49 | self.start_time = None 50 | self.db = None 51 | self.sources = None 52 | self.app = None 53 | self.profile = None 54 | 55 | def setup(self): 56 | global logger 57 | 58 | self.app = self.get_test_candidate(self.args.base) 59 | self.profile = self.make_profile("base_profile") 60 | 61 | tmp_zip_name = os.path.join(self.tmp_dir, "top.zip") 62 | logger.info("Fetching unfiltered top sites data from the `Tranco Top 1M` online database") 63 | get_to_file(self.top_sites_location, tmp_zip_name) 64 | 65 | try: 66 | zipped = zipfile.ZipFile(tmp_zip_name) 67 | if len(zipped.filelist) != 1 or not zipped.filelist[0].orig_filename.lower().endswith(".csv"): 68 | logger.critical("Top sites zip file has unexpected content") 69 | sys.exit(5) 70 | tmp_csv_name = zipped.extract(zipped.filelist[0], self.tmp_dir) 71 | except zipfile.BadZipfile: 72 | logger.critical("Error opening top sites zip archive") 73 | sys.exit(5) 74 | 75 | self.db = sdb.SourcesDB(self.args) 76 | is_default = self.args.source == self.db.default 77 | self.sources = sdb.Sources(self.args.source, is_default) 78 | 79 | with open(tmp_csv_name) as f: 80 | cr = csv.DictReader(f, fieldnames=["rank", "hostname"]) 81 | self.sources.rows = [row for row in cr] 82 | 83 | # A mild sanity check to see whether the downloaded data is valid. 84 | if len(self.sources) < 900000: 85 | logger.warning("Top sites is surprisingly small, just %d hosts" % len(self.sources)) 86 | 87 | if "hostname" not in self.sources.rows[0] or "rank" not in self.sources.rows[0] \ 88 | or self.sources.rows[0]["rank"] != 1: 89 | logger.warning("Top sites data looks weird. First line: `%s`" % self.sources.rows[0]) 90 | 91 | def run(self): 92 | """ 93 | Perform the filter run. The objective is to filter out permanent errors so 94 | we don't waste time on them during regular test runs. 95 | 96 | The concept is: 97 | Run top sites in chunks through Firefox and re-test all error URLs from that 98 | chunk a number of times to weed out spurious network errors. Stop the process 99 | once the required number of working hosts is collected. 100 | """ 101 | global logger 102 | 103 | self.start_time = datetime.datetime.now() 104 | 105 | limit = 1000000 106 | if self.args.limit is not None: 107 | limit = self.args.limit 108 | 109 | logger.info("There are %d hosts in the unfiltered host set" % len(self.sources)) 110 | logger.info("Compiling set of %d working hosts for `%s` database update" % (limit, self.sources.handle)) 111 | working_set = set() 112 | 113 | # Chop unfiltered sources data into chunks and iterate over each 114 | # .iter_chunks() returns a generator method to call for next chunk 115 | next_chunk = self.sources.iter_chunks(chunk_size=1000) 116 | chunk_size = self.sources.chunk_size 117 | 118 | progress = pr.ProgressTracker(total=limit, unit="hosts", average=60 * 60.0) 119 | 120 | try: 121 | while True: 122 | hosts_to_go = max(0, limit - len(working_set)) 123 | # Check if we're done 124 | if hosts_to_go == 0: 125 | break 126 | logger.info("%d hosts to go to complete the working set" % hosts_to_go) 127 | 128 | # Shrink chunk if it contains way more hosts than required to complete the working set 129 | # 130 | # CAVE: This assumes that this is the last chunk we require. The downsized chunk 131 | # is still 50% larger than required to complete the set to compensate for broken 132 | # hosts. If the error rate in the chunk is greater than 50%, another chunk will be 133 | # consumed, resulting in a gap of untested hosts between the end of this downsized 134 | # chunk and the beginning of the next. Not too bad, but important to be aware of. 135 | if chunk_size > hosts_to_go * 2: 136 | chunk_size = min(chunk_size, hosts_to_go * 2) 137 | pass_chunk = next_chunk(chunk_size, as_set=True) 138 | 139 | # Check if we ran out of data for completing the set 140 | if pass_chunk is None: 141 | logger.warning("Ran out of hosts to complete the working set") 142 | break 143 | 144 | # Run chunk through multiple passes of Firefox, leaving only persistent errors in the 145 | # error set. 146 | pass_chunk_size = len(pass_chunk) 147 | chunk_end = self.sources.chunk_offset 148 | chunk_start = chunk_end - pass_chunk_size 149 | logger.info("Processing chunk of %d hosts from the unfiltered set (#%d to #%d)" 150 | % (chunk_end - chunk_start, chunk_start, chunk_end - 1)) 151 | pass_errors = pass_chunk 152 | 153 | for i in range(self.args.scans): 154 | 155 | logger.info("Pass %d with %d hosts" % (i + 1, len(pass_errors))) 156 | 157 | # First run is regular, every other run is overhead 158 | if i == 0: 159 | report_callback = None 160 | else: 161 | report_callback = progress.log_overhead 162 | 163 | pass_errors = self.run_test(self.app, pass_errors, profile=self.profile, get_info=False, 164 | get_certs=False, return_only_errors=True, 165 | report_callback=report_callback) 166 | len_pass_errors = len(pass_errors) 167 | 168 | # Log progress of first pass 169 | if i == 0: 170 | progress.log_completed(pass_chunk_size - len_pass_errors) 171 | progress.log_overhead(len_pass_errors) 172 | 173 | if len_pass_errors == 0: 174 | break 175 | 176 | logger.info("Error rate in chunk was %.1f%%" 177 | % (100.0 * float(len_pass_errors) / float(chunk_end - chunk_start))) 178 | 179 | # Add all non-errors to the working set 180 | working_set.update(pass_chunk.difference(pass_errors)) 181 | 182 | # Log progress after every chunk 183 | logger.info(str(progress)) 184 | 185 | except KeyboardInterrupt: 186 | logger.critical("Ctrl-C received") 187 | raise KeyboardInterrupt 188 | 189 | final_src = sdb.Sources(self.sources.handle, is_default=self.sources.is_default) 190 | final_src.from_set(working_set) 191 | final_src.sort() 192 | final_src.trim(limit) 193 | 194 | if len(final_src) < limit: 195 | logger.warning("Ran out of hosts to complete the working set") 196 | 197 | logger.info("Collected %d working hosts for the updated test set" % len(final_src)) 198 | logger.info("Writing updated `%s` host database" % final_src.handle) 199 | self.db.write(final_src) 200 | 201 | def teardown(self): 202 | # Free some memory 203 | self.db = None 204 | self.sources = None 205 | self.app = None 206 | self.profile = None 207 | -------------------------------------------------------------------------------- /tlscanary/scheduler/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mozilla/tls-canary/dce7d04e0f8c1234787ab85cfefb31e4cc2493ec/tlscanary/scheduler/__init__.py -------------------------------------------------------------------------------- /tlscanary/scheduler/matplotlib_agg.py: -------------------------------------------------------------------------------- 1 | # This is just a hack to avoid pycodestyle complaining about 2 | # E402 module level import not at top of file 3 | # when loading matplotlib with Agg backend, which has to be 4 | # procedurally configured after importing matplotlib and before 5 | # importing matplotlib.pyplot. 6 | # Thanks to https://stackoverflow.com/questions/39305810 7 | 8 | import matplotlib 9 | matplotlib.use("Agg") 10 | -------------------------------------------------------------------------------- /tlscanary/sources/debug.csv: -------------------------------------------------------------------------------- 1 | rank,hostname 2 | 1,google.com 3 | 2,facebook.com 4 | 3,youtube.com 5 | 4,baidu.com 6 | 5,yahoo.com 7 | 6,amazon.com 8 | 7,wikipedia.org 9 | 9,twitter.com 10 | 10,taobao.com 11 | 11,google.co.in 12 | 12,www.live.com 13 | 14,linkedin.com 14 | 16,weibo.com 15 | 17,bing.com 16 | 18,ebay.com 17 | 19,google.co.jp 18 | 20,yandex.ru 19 | 21,vk.com 20 | 22,www.hao123.com 21 | 23,tmall.com 22 | 24,msn.com 23 | 25,instagram.com 24 | 26,google.de 25 | 27,amazon.co.jp 26 | 28,t.co 27 | 30,pinterest.com 28 | 31,onclickads.net 29 | 32,reddit.com 30 | 33,mail.ru 31 | 34,google.fr 32 | 35,paypal.com 33 | 36,google.ru 34 | 37,wordpress.com 35 | 38,google.com.br 36 | 40,blogspot.com 37 | 42,tumblr.com 38 | 44,imgur.com 39 | 45,microsoft.com 40 | 46,apple.com 41 | 48,google.it 42 | 49,amazon.de 43 | 50,google.es 44 | 51,1001pharmacies.com 45 | 52,101cookingfortwo.com 46 | 53,456.com 47 | 54,4over.com 48 | 55,800florals.com 49 | 49182,psarips.com 50 | 257666,www.obsidiana.com 51 | 120451,yugiohcardmarket.eu 52 | 9901,englishforums.com 53 | 43694,www.csajokespasik.hu 54 | 157298,futuramo.com 55 | 1377,www.onlinecreditcenter6.com 56 | 15752,www.jcpcreditcard.com 57 | 137890,my.jobs 58 | 31862,samsungcsportal.com 59 | 40034,uob.com.my 60 | 104674,incontrion.com 61 | 89913,hslda.org 62 | 64349,www.chevrontexacocards.com 63 | 69037,www.onlinecreditcenter4.com 64 | 3128,www.synchronycredit.com 65 | 84681,ruscreditcard.com 66 | 252820,quyiyuan.com 67 | 241254,www.steinmartcredit.com 68 | 123888,saveful.com 69 | 230374,sirdatainitiative.com 70 | 135435,gearheads.in 71 | 97220,gira.de 72 | 85697,magickartenmarkt.de 73 | 26598,tommybahama.com 74 | 29458,cxem.net 75 | 62059,www.awardslinq.com 76 | 32146,www.onlinecreditcenter2.com 77 | 247769,gmprograminfo.com 78 | 265649,piratenpartei.de 79 | 23525,vesti.lv 80 | 124345,www.gpkafunda.com 81 | 192596,robots-and-dragons.de 82 | 212740,www.chs.hu 83 | 68345,bandzone.cz 84 | 255349,censusmapper.ca 85 | 174612,patschool.com 86 | 80121,27399.com 87 | 114128,universoracionalista.org 88 | 100333,toccata.ru 89 | 222646,futuramo.net 90 | 14624,reviewmyaccount.com 91 | 20099,portaportal.com 92 | 147865,sherdle.com 93 | 40192,www.belkcredit.com 94 | 47428,www.wangfujing.com 95 | 162199,hikvision.ru 96 | 74032,cardoverview.com 97 | 22279,prospero.ru 98 | 143928,www.e-financas.gov.pt 99 | 130883,favoritsport.com.ua 100 | -------------------------------------------------------------------------------- /tlscanary/sources/debug2.csv: -------------------------------------------------------------------------------- 1 | rank,hostname 2 | 1,google.com 3 | 2,facebook.com 4 | 3,youtube.com 5 | 4,baidu.com 6 | 5,yahoo.com 7 | 6,amazon.com 8 | 7,wikipedia.org 9 | 9,twitter.com 10 | 10,taobao.com 11 | 11,google.co.in 12 | 12,www.live.com 13 | 14,linkedin.com 14 | 16,weibo.com 15 | 17,bing.com 16 | 18,ebay.com 17 | 19,google.co.jp 18 | 20,yandex.ru 19 | 21,vk.com 20 | 22,www.hao123.com 21 | 23,tmall.com 22 | 24,msn.com 23 | 25,instagram.com 24 | 26,google.de 25 | 27,amazon.co.jp 26 | 28,t.co 27 | 30,pinterest.com 28 | 31,onclickads.net 29 | 32,reddit.com 30 | 33,mail.ru 31 | 34,google.fr 32 | 35,paypal.com 33 | 36,google.ru 34 | 37,wordpress.com 35 | 38,google.com.br 36 | 40,blogspot.com 37 | 42,tumblr.com 38 | 44,imgur.com 39 | 45,microsoft.com 40 | 46,apple.com 41 | 48,google.it 42 | 49,amazon.de 43 | 50,google.es 44 | 51,1001pharmacies.com 45 | 52,101cookingfortwo.com 46 | 53,456.com 47 | 54,4over.com 48 | 55,800florals.com 49 | 1377,www.onlinecreditcenter6.com 50 | 3128,www.synchronycredit.com 51 | 9901,englishforums.com 52 | 14624,reviewmyaccount.com 53 | 15752,www.jcpcreditcard.com 54 | 20099,portaportal.com 55 | 22279,prospero.ru 56 | 23525,vesti.lv 57 | 26598,tommybahama.com 58 | 29458,cxem.net 59 | 31862,samsungcsportal.com 60 | 32146,www.onlinecreditcenter2.com 61 | 40034,uob.com.my 62 | 40192,www.belkcredit.com 63 | 43694,www.csajokespasik.hu 64 | 47428,www.wangfujing.com 65 | 49182,psarips.com 66 | 62059,www.awardslinq.com 67 | 64349,www.chevrontexacocards.com 68 | 68345,bandzone.cz 69 | 69037,www.onlinecreditcenter4.com 70 | 74032,cardoverview.com 71 | 80121,27399.com 72 | 84681,ruscreditcard.com 73 | 85697,magickartenmarkt.de 74 | 89913,hslda.org 75 | 97220,gira.de 76 | 100333,toccata.ru 77 | 104674,incontrion.com 78 | 114128,universoracionalista.org 79 | 120451,yugiohcardmarket.eu 80 | 123888,saveful.com 81 | 124345,www.gpkafunda.com 82 | 130883,favoritsport.com.ua 83 | 135435,gearheads.in 84 | 137890,my.jobs 85 | 143928,www.e-financas.gov.pt 86 | 147865,sherdle.com 87 | 157298,futuramo.com 88 | 162199,hikvision.ru 89 | 174612,patschool.com 90 | 192596,robots-and-dragons.de 91 | 212740,www.chs.hu 92 | 222646,futuramo.net 93 | 230374,sirdatainitiative.com 94 | 241254,www.steinmartcredit.com 95 | 247769,gmprograminfo.com 96 | 252820,quyiyuan.com 97 | 255349,censusmapper.ca 98 | 257666,www.obsidiana.com 99 | 265649,piratenpartei.de 100 | -------------------------------------------------------------------------------- /tlscanary/sources/digicert.csv: -------------------------------------------------------------------------------- 1 | #handle:digicert 2 | hostname 3 | adobe.com 4 | csdn.net 5 | ebay.com 6 | imgur.com 7 | linkedin.com 8 | live.com 9 | mozilla.org 10 | netflix.com 11 | quizlet.com 12 | reddit.com 13 | vimeo.com 14 | yelp.com 15 | -------------------------------------------------------------------------------- /tlscanary/sources/revoked.csv: -------------------------------------------------------------------------------- 1 | #handle:revoked 2 | hostname 3 | revoked.badssl.com 4 | -------------------------------------------------------------------------------- /tlscanary/sources/smoke_list.csv: -------------------------------------------------------------------------------- 1 | #handle:smoke 2 | hostname 3 | 1010.m2m.com 4 | 1800cpap.com 5 | 1800registry.com 6 | 3dexport.com 7 | 5linx.com 8 | a3.acteva.com 9 | absolwenci.amu.edu.pl 10 | accesd.acadie.com 11 | accesd.affaires.acadie.com 12 | access.dogproblems.com 13 | discovery.cor.gov 14 | dominodeveloper.net 15 | donate.wfp.org 16 | donkiegirl.valtaf.nl 17 | donor.navigators.org 18 | drc.ohiolink.edu 19 | driehonderdmiljoen.valtaf.nl 20 | duckweb.uoregon.edu 21 | e-mediador.fiatc.es 22 | e-museum.emu.dk 23 | e-profesional.fiatc.es 24 | ebank.khb.hu 25 | ebanking.ocb.com.vn 26 | ebanking.vietabank.com.vn 27 | ecom.morethangourmet.com 28 | ecommerce.saa.org 29 | ecpay.com.tw 30 | edu.ingang.go.kr 31 | edu.nonghyup.com 32 | elearning.kumc.edu 33 | entrepreneurship.asu.edu 34 | epay.regionalacceptance.com 35 | eps.hrdkorea.or.kr 36 | eqs.mohe.gov.sa 37 | eremit.sbising.com 38 | eremit.unionbankofindia.co.in 39 | erewards.regionalacceptance.com 40 | erhverv.tdc.dk 41 | es.flowershopnetwork.com 42 | es.kepco.co.kr 43 | eseries.ansi.org 44 | incontrion.com 45 | parents.ou.edu 46 | parking.leicester.gov.uk 47 | parkseed.com 48 | partners.hunterdouglas.com 49 | partners.warn.com 50 | parts.andersenstormdoors.com 51 | parts.andersenwindows.com 52 | payments.appliedbank.com 53 | reviewmyaccount.com 54 | vesti.lv 55 | www.aimp.ru 56 | www.air-watch.com 57 | www.aircraftspruce.com 58 | www.akmembers.com 59 | www.alaskacommunications.com 60 | www.alc.ca 61 | www.aliceandolivia.com 62 | www.alivemax.com 63 | www.all-battery.com 64 | www.all-spec.com 65 | www.allamericanswim.com 66 | www.allfoodequipment.com.au 67 | www.allistanbultours.com 68 | www.allnatura.de 69 | www.allthingsforsale.com 70 | www.alphashirt.com 71 | www.chs.hu 72 | www.csajokespasik.hu 73 | www.kumc.edu 74 | www.kunsthauswien.com 75 | www.kuponan.ph 76 | www.kyushu.npa.go.jp 77 | www.laboutiquedelhogar.es 78 | www.lacajaroja.com 79 | www.lacrossefootwear.com 80 | www.ladysoma.com 81 | www.lakecountyin.org 82 | www.laptoppartsexpert.com 83 | www.lasvegashotel.com 84 | www.latsportswear.com 85 | www.lauraingraham.com 86 | www.lawnet.com.sg 87 | www.lawnornamentsandfountains.com 88 | www.lax.com 89 | www.ldmountaincentre.com 90 | www.leaderoption.com 91 | www.leasepurchaselocators.com 92 | www.thehandbook.com 93 | www.theharmonycompany.com 94 | www.thehawnfoundation.org 95 | www.thestationerystudio.com 96 | www.thewinfieldcollection.com 97 | www.thingsyouneverknew.com 98 | www.thinkholisticactpersonal.com 99 | www.thistlegirldesigns.com 100 | www.tianet.org 101 | www.ticketline.co.uk 102 | www.ticketstogo.com 103 | www.ticketturtle.com 104 | www.ticwatches.co.uk 105 | www.tiecoon.com 106 | www.tienda-online-informatica.com 107 | www.tightstightstights.co.uk 108 | www.tikbok.com 109 | parts.andersenwindows.com 110 | payments.appliedbank.com 111 | -------------------------------------------------------------------------------- /tlscanary/template/css/index_styles.css: -------------------------------------------------------------------------------- 1 | body { 2 | font-family: sans-serif; 3 | font-size: 14px; 4 | margin: 0; 5 | padding-left: 30px; 6 | padding-right: 30px; 7 | background-color: #fafafa; 8 | } 9 | 10 | .vertical-section-container { 11 | height: 600; 12 | display: flex; 13 | flex-direction: column; 14 | } 15 | 16 | .centered { 17 | max-width: 100%; 18 | height: 600; 19 | margin-left: auto; 20 | margin-right: auto; 21 | border: none; 22 | position: relative; 23 | left: 0px; 24 | } 25 | 26 | .chart { 27 | position: absolute; 28 | top: 40px; 29 | left: 0px; 30 | } 31 | 32 | .meta { 33 | position: absolute; 34 | top: 20px; 35 | left: 0px; 36 | } 37 | 38 | .bottom { 39 | border-bottom: 1px solid #006080; 40 | } 41 | 42 | .full { 43 | width: 100%; 44 | } 45 | 46 | .labels { 47 | font-family: "Arial"; 48 | font-weight: bold; 49 | font-size: 20px; 50 | } 51 | 52 | .labels_small { 53 | font-family: "Arial"; 54 | font-weight: bold; 55 | font-size: 16px; 56 | font-color: black; 57 | } 58 | 59 | .pagination>li>span { 60 | border: 1px #1f7a1f; 61 | } 62 | 63 | .pagination > li.active > a:focus, 64 | .pagination > li.active > a:hover, 65 | .pagination > li.active > span:focus, 66 | .pagination > li.active > span:hover { 67 | z-index: 3; 68 | color: white; 69 | background-color: #33cc00; 70 | border-color: #33cc00; 71 | } 72 | 73 | .pagination>li.active>a { 74 | background: #1f7a1f; 75 | } 76 | 77 | .pagination>li { 78 | color: black; 79 | } 80 | .pagination>li>a { 81 | color: black; 82 | } 83 | 84 | .pagination>li.active { 85 | color: white; 86 | } 87 | 88 | .header { 89 | font-family: "Arial"; 90 | font-weight: bold; 91 | font-size: 24px; 92 | padding-left: 80px; 93 | } 94 | 95 | h3 { 96 | font-family: "Arial"; 97 | font-weight: bold; 98 | font-size: 20px; 99 | } 100 | 101 | :root { 102 | --primary-color: #00B4F0; 103 | --light-primary-color: #7CD8F7; 104 | --dark-primary-color: #0078A0; 105 | --error-color: #E61E6E; 106 | } 107 | 108 | h3 .anchor { 109 | color: grey; 110 | text-decoration: none; 111 | opacity: 0; 112 | font-size: 14px; 113 | margin-left: -1.6em; 114 | width: 1.6em; 115 | display: inline-block; 116 | outline: none; 117 | } 118 | 119 | h3:hover .anchor { 120 | opacity: 1; 121 | } 122 | 123 | #tabs ul { 124 | font-size: 20px; 125 | font-weight: bold; 126 | list-style: none; 127 | margin: 0; 128 | padding: 0; 129 | text-align: left; 130 | } 131 | 132 | #tabs li { 133 | background: #40bf40; 134 | border-radius: 15px 15px 0 0; 135 | display: inline; 136 | margin: 1px 1px 2px 0; 137 | padding: .75em 1em; 138 | padding-bottom: 4px; 139 | padding-top: 4px; 140 | } 141 | 142 | #tabs #nav li:hover { 143 | background: #33cc00; 144 | } 145 | 146 | #tabs #nav #selected li:hover { 147 | background: #33cc00; 148 | } 149 | 150 | #tabs a { 151 | color: white; 152 | padding: 0 1em; 153 | text-decoration: none; 154 | } 155 | 156 | #tabs a:hover { 157 | padding: 0 1em; 158 | } 159 | 160 | #tabs #nav a:hover { 161 | color: white; 162 | } 163 | 164 | #tabs #selected { 165 | border: none; 166 | background: #1f7a1f; 167 | } 168 | 169 | #tabs #selected a:hover { 170 | border: none; 171 | #background: #33cc00; 172 | } 173 | 174 | #tabs #selected a { 175 | color: white; 176 | } 177 | -------------------------------------------------------------------------------- /tlscanary/template/css/ui_style.css: -------------------------------------------------------------------------------- 1 | body { 2 | font-family: sans-serif; 3 | font-size: 14px; 4 | margin: 0; 5 | padding-left: 30px; 6 | padding-right: 30px; 7 | background-color: #fafafa; 8 | } 9 | 10 | .vertical-section-container { 11 | height: 600; 12 | display: flex; 13 | flex-direction: column; 14 | } 15 | 16 | .centered { 17 | max-width: 100%; 18 | height: 600; 19 | margin-left: auto; 20 | margin-right: auto; 21 | border: none; 22 | position: relative; 23 | left: 0px; 24 | } 25 | 26 | .chart { 27 | position: absolute; 28 | top: 40px; 29 | left: 0px; 30 | } 31 | 32 | .meta { 33 | position: absolute; 34 | top: 20px; 35 | left: 0px; 36 | } 37 | 38 | .bottom { 39 | border-bottom: 1px solid #006080; 40 | } 41 | 42 | .full { 43 | width: 100%; 44 | } 45 | 46 | .labels { 47 | font-family: "Arial"; 48 | font-weight: bold; 49 | font-size: 20px; 50 | } 51 | 52 | .labels_small { 53 | font-family: "Arial"; 54 | font-weight: bold; 55 | font-size: 16px; 56 | } 57 | 58 | .header { 59 | font-family: "Arial"; 60 | font-weight: bold; 61 | font-size: 24px; 62 | padding-left: 80px; 63 | } 64 | 65 | h3 { 66 | font-family: "Arial"; 67 | font-weight: bold; 68 | font-size: 20px; 69 | } 70 | 71 | :root { 72 | --primary-color: #00B4F0; 73 | --light-primary-color: #7CD8F7; 74 | --dark-primary-color: #0078A0; 75 | --error-color: #E61E6E; 76 | } 77 | 78 | h3 .anchor { 79 | color: grey; 80 | text-decoration: none; 81 | opacity: 0; 82 | font-size: 14px; 83 | margin-left: -1.6em; 84 | width: 1.6em; 85 | display: inline-block; 86 | outline: none; 87 | } 88 | 89 | h3:hover .anchor { 90 | opacity: 1; 91 | } 92 | 93 | #tabs ul { 94 | font-size: 20px; 95 | font-weight: bold; 96 | list-style: none; 97 | margin: 0; 98 | padding: 0; 99 | text-align: left; 100 | } 101 | 102 | #tabs li { 103 | background: #00ace6; 104 | border-radius: 15px 15px 0 0; 105 | display: inline; 106 | margin: 1px 1px 2px 0; 107 | padding: .75em 1em; 108 | padding-bottom: 4px; 109 | padding-top: 4px; 110 | } 111 | 112 | #tabs #nav li:hover { 113 | background: #0086b3; 114 | } 115 | 116 | #tabs #nav #selected li:hover { 117 | background: #00ace6; 118 | } 119 | 120 | #tabs a { 121 | color: white; 122 | padding: 0 1em; 123 | text-decoration: none; 124 | } 125 | 126 | #tabs a:hover { 127 | padding: 0 1em; 128 | } 129 | 130 | #tabs #nav a:hover { 131 | color: white; 132 | } 133 | 134 | #tabs #selected { 135 | border: none; 136 | background: #006080; 137 | } 138 | 139 | #tabs #selected a:hover { 140 | border: none; 141 | #background: #006080; 142 | } 143 | 144 | #tabs #selected a { 145 | color: white; 146 | } 147 | -------------------------------------------------------------------------------- /tlscanary/template/img/favicon.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | -------------------------------------------------------------------------------- /tlscanary/template/img/logo.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | -------------------------------------------------------------------------------- /tlscanary/template/index.htm: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | TLS Canary 8 | 9 | 10 | 11 | 12 | 13 |
14 |
15 | 16 | 19 | 22 |
17 | 18 | 20 |
23 |

24 |

25 |

26 |
27 | 31 |
32 |
33 |
34 |
35 |
36 |
37 | 38 |
39 |
40 |
41 |
42 |
43 | 44 | 45 | 46 | 47 | 48 | 49 | 50 | -------------------------------------------------------------------------------- /tlscanary/template/js/index_transform.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "prop": "run", 4 | "name": "run", 5 | "type": "str", 6 | "default": true, 7 | "width": "30%" 8 | }, 9 | { 10 | "prop": "description", 11 | "name": "description", 12 | "type": "str", 13 | "default": true, 14 | "width": "40%" 15 | }, 16 | { 17 | "prop": "errors", 18 | "name": "errors", 19 | "type": "int", 20 | "default": true, 21 | "width": "10%" 22 | }, 23 | { 24 | "prop": "branch", 25 | "name": "branch", 26 | "type": "str", 27 | "default": true, 28 | "width": "20%" 29 | } 30 | ] 31 | -------------------------------------------------------------------------------- /tlscanary/template/js/transform.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "prop": "rank", 4 | "name": "rank", 5 | "type": "int", 6 | "default": true, 7 | "width": "10%" 8 | }, 9 | { 10 | "prop": "host", 11 | "name": "host", 12 | "type": "str", 13 | "default": true, 14 | "width": "40%" 15 | }, 16 | { 17 | "prop": "response.result.info.short_error_message", 18 | "name": "error", 19 | "type": "str", 20 | "default": true, 21 | "width": "40%" 22 | }, 23 | { 24 | "prop": "response.result.info.raw_error", 25 | "name": "raw_error", 26 | "type": "str", 27 | "default": false, 28 | "width": "50%" 29 | }, 30 | { 31 | "prop": "success", 32 | "name": "success", 33 | "type": "bool", 34 | "default": false 35 | }, 36 | { 37 | "prop": "response.worker_id", 38 | "name": "worker_id", 39 | "type": "int", 40 | "default": false 41 | }, 42 | { 43 | "prop": "response.result.info.certificate_chain_length", 44 | "name": "certificate_chain_length", 45 | "type": "int", 46 | "default": false, 47 | "width": "10%" 48 | }, 49 | { 50 | "prop": "response.result.info.ssl_status.certificateTransparencyStatus", 51 | "name": "ct_status", 52 | "type": "int", 53 | "default": false 54 | }, 55 | { 56 | "prop": "response.result.info.ssl_status.cipherName", 57 | "name": "cipher_name", 58 | "type": "str", 59 | "default": false 60 | }, 61 | { 62 | "prop": "response.result.info.ssl_status.isExtendedValidation", 63 | "name": "is_ev", 64 | "type": "bool", 65 | "default": false, 66 | "width": "10%" 67 | }, 68 | { 69 | "prop": "response.result.info.ssl_status.keyLength", 70 | "name": "key_length", 71 | "type": "int", 72 | "default": false, 73 | "width": "10%" 74 | }, 75 | { 76 | "prop": "response.result.info.ssl_status.protocolVersion", 77 | "name": "protocol_version", 78 | "type": "int", 79 | "default": false, 80 | "width": "10%" 81 | }, 82 | { 83 | "prop": "response.result.info.ssl_status.secretKeyLength", 84 | "name": "secret_key_length", 85 | "type": "int", 86 | "default": false, 87 | "width": "10%" 88 | }, 89 | { 90 | "prop": "response.result.info.ssl_status.serverCert.commonName", 91 | "name": "common_name", 92 | "type": "str", 93 | "default": false 94 | }, 95 | { 96 | "prop": "response.result.info.ssl_status.serverCert.displayName", 97 | "name": "display_name", 98 | "type": "str", 99 | "default": false 100 | }, 101 | { 102 | "prop": "response.result.info.ssl_status.serverCert.emailAddress", 103 | "name": "email_address", 104 | "type": "str", 105 | "default": false 106 | }, 107 | { 108 | "prop": "response.result.info.ssl_status.serverCert.isSelfSigned", 109 | "name": "is_self_signed", 110 | "type": "bool", 111 | "default": false 112 | }, 113 | { 114 | "prop": "response.result.info.ssl_status.serverCert.issuerCommonName", 115 | "name": "issuer_common_name", 116 | "type": "str", 117 | "default": false 118 | }, 119 | { 120 | "prop": "response.result.info.ssl_status.serverCert.issuerDisplayName", 121 | "name": "issuer_display_name", 122 | "type": "str", 123 | "default": false 124 | }, 125 | { 126 | "prop": "response.result.info.ssl_status.serverCert.issuerName", 127 | "name": "issuer_name", 128 | "type": "str", 129 | "default": false 130 | }, 131 | { 132 | "prop": "response.result.info.ssl_status.serverCert.issuerOrganization", 133 | "name": "issuer_organization", 134 | "type": "str", 135 | "default": false, 136 | "width": "30%" 137 | }, 138 | { 139 | "prop": "response.result.info.ssl_status.serverCert.issuerOrganizationalUnit", 140 | "name": "issuer_organizational_unit", 141 | "type": "str", 142 | "default": false 143 | }, 144 | { 145 | "prop": "response.result.info.ssl_status.serverCert.keyUsages", 146 | "name": "key_usages", 147 | "type": "str", 148 | "default": false 149 | }, 150 | { 151 | "prop": "response.result.info.ssl_status.serverCert.organization", 152 | "name": "organization", 153 | "type": "str", 154 | "default": false 155 | }, 156 | { 157 | "prop": "response.result.info.ssl_status.serverCert.organizationalUnit", 158 | "name": "organizational_unit", 159 | "type": "str", 160 | "default": false 161 | }, 162 | { 163 | "prop": "response.result.info.ssl_status.serverCert.serialNumber", 164 | "name": "serial_number", 165 | "type": "str", 166 | "default": false, 167 | "width": "30%" 168 | }, 169 | { 170 | "prop": "response.result.info.ssl_status.serverCert.sha1Fingerprint", 171 | "name": "sha1_fingerprint", 172 | "type": "str", 173 | "default": false, 174 | "width": "30%" 175 | }, 176 | { 177 | "prop": "response.result.info.ssl_status.serverCert.sha256Fingerprint", 178 | "name": "sha256_fingerprint", 179 | "type": "str", 180 | "default": false, 181 | "width": "30%" 182 | }, 183 | { 184 | "prop": "response.result.info.ssl_status.serverCert.subjectName", 185 | "name": "subject_name", 186 | "type": "str", 187 | "default": false, 188 | "width": "40%" 189 | }, 190 | { 191 | "prop": "response.result.info.ssl_status.serverCert.tokenName", 192 | "name": "token_name", 193 | "type": "str", 194 | "default": false 195 | }, 196 | { 197 | "prop": "response.result.info.ssl_status.serverCert.validity.notAfter", 198 | "name": "not_after", 199 | "type": "int", 200 | "default": false, 201 | "width": "20%" 202 | }, 203 | { 204 | "prop": "response.result.info.ssl_status.serverCert.validity.notAfterGMT", 205 | "name": "not_after_gmt", 206 | "type": "str", 207 | "default": false, 208 | "width": "30%" 209 | }, 210 | { 211 | "prop": "response.result.info.ssl_status.serverCert.validity.notBefore", 212 | "name": "not_before", 213 | "type": "int", 214 | "default": false, 215 | "width": "20%" 216 | }, 217 | { 218 | "prop": "response.result.info.ssl_status.serverCert.validity.notBeforeGMT", 219 | "name": "not_before_gmt", 220 | "type": "str", 221 | "default": false, 222 | "width": "30%" 223 | } 224 | ] 225 | -------------------------------------------------------------------------------- /tlscanary/template/report_template.htm: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | TLS Canary report page 8 | 9 | 10 | 11 | 12 | 13 |
14 |
15 | 16 | 19 | 22 |
17 | 18 | 20 |
23 |

24 |

25 |

26 |
27 | 32 |
33 |
34 |
35 |
36 | 37 | 42 | 45 |
38 |
39 | 40 |
41 |
43 |
44 |
46 |
47 |
48 |
49 |
50 | 51 | 52 | 53 | 54 | 55 | 56 | 57 | -------------------------------------------------------------------------------- /tlscanary/tools/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mozilla/tls-canary/dce7d04e0f8c1234787ab85cfefb31e4cc2493ec/tlscanary/tools/__init__.py -------------------------------------------------------------------------------- /tlscanary/tools/cache.py: -------------------------------------------------------------------------------- 1 | # This Source Code Form is subject to the terms of the Mozilla Public 2 | # License, v. 2.0. If a copy of the MPL was not distributed with this file, 3 | # You can obtain one at http://mozilla.org/MPL/2.0/. 4 | 5 | import logging 6 | import os 7 | from shutil import rmtree 8 | from time import time 9 | 10 | 11 | logger = logging.getLogger(__name__) 12 | 13 | 14 | class DiskCache(object): 15 | 16 | def __init__(self, root_directory, maximum_age=24*60*60, purge=False): 17 | global logger 18 | self.__root = os.path.abspath(root_directory) 19 | self.__maximum_age = maximum_age 20 | if not os.path.exists(self.__root): 21 | logger.debug('Creating cache directory %s' % self.__root) 22 | os.makedirs(self.__root) 23 | if purge: 24 | self.purge() 25 | 26 | def list(self): 27 | """Return list of IDs in cache""" 28 | return os.listdir(self.__root) 29 | 30 | def __clear(self): 31 | """Remove all entries from cache directory""" 32 | global logger 33 | logger.debug("Clearing cache directory `%s`" % self.__root) 34 | for cache_id in self: 35 | path = self[cache_id] 36 | if os.path.isdir(path): 37 | rmtree(path) 38 | else: 39 | os.remove(path) 40 | 41 | def delete(self, id_or_path=None): 42 | """ 43 | Delete cache entries by ID or full path. 44 | Delete everything if no ID or path given. 45 | """ 46 | if id_or_path is None: 47 | self.__clear() 48 | return 49 | path = id_or_path if id_or_path.startswith(self.__root) else self[id_or_path] 50 | if os.path.isdir(path): 51 | rmtree(path) 52 | elif os.path.exists(path): 53 | os.remove(path) 54 | 55 | def purge(self, maximum_age=None): 56 | """Remove stale entries from cache directory""" 57 | global logger 58 | logger.debug("Purging stale cache entries from `%s`" % self.__root) 59 | if maximum_age is None: 60 | maximum_age = self.__maximum_age 61 | now = time() # Current time as epoch 62 | stale_limit = now - maximum_age 63 | for cache_id in self: 64 | path = self[cache_id] 65 | mtime = os.path.getmtime(path) # Modification time as epoch (might have just 1.0s resolution) 66 | if mtime < stale_limit: 67 | logger.debug('Purging stale cache entry `%s`' % cache_id) 68 | if os.path.isdir(path): 69 | rmtree(path) 70 | else: 71 | os.remove(path) 72 | 73 | def __iter__(self): 74 | """Iterate IDs of cache entries""" 75 | for name in self.list(): 76 | yield name 77 | 78 | def __contains__(self, cache_id): 79 | """Check for presence of ID in cache""" 80 | return cache_id in self.list() 81 | 82 | def __getitem__(self, cache_id): 83 | """Return full path of cache entry (existing or not)""" 84 | return os.path.join(self.__root, cache_id) 85 | -------------------------------------------------------------------------------- /tlscanary/tools/cert.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python2 2 | 3 | # This Source Code Form is subject to the terms of the Mozilla Public 4 | # License, v. 2.0. If a copy of the MPL was not distributed with this file, 5 | # You can obtain one at http://mozilla.org/MPL/2.0/. 6 | 7 | from cryptography import x509 8 | from cryptography.hazmat import backends 9 | from cryptography.hazmat.primitives import serialization 10 | from cryptography.x509.oid import ExtensionOID 11 | 12 | 13 | class Cert(object): 14 | """Class for handling X509 certificates""" 15 | 16 | def __init__(self, data): 17 | """ 18 | Cert constructor 19 | 20 | It can handle PEM and DER encoded strings and lists of int bytes. 21 | 22 | :param data: bytes or list of int 23 | """ 24 | if type(data) == list: 25 | data = bytes(data) 26 | if type(data) != bytes: 27 | raise Exception("data must be bytes or list of int bytes") 28 | self.__raw_data = data 29 | if b"-----BEGIN CERTIFICATE-----" in data: 30 | self.x509 = x509.load_pem_x509_certificate(data, backends.default_backend()) 31 | self.__raw_type = "PEM" 32 | else: 33 | self.x509 = x509.load_der_x509_certificate(data, backends.default_backend()) 34 | self.__raw_type = "DER" 35 | 36 | def as_pem(self): 37 | """ 38 | Convert certificate to PEM-encoded string 39 | 40 | :return: str 41 | """ 42 | return self.x509.public_bytes(encoding=serialization.Encoding.PEM) 43 | 44 | def as_der(self): 45 | """ 46 | Convert certificate to DER-encoded string 47 | 48 | :return: str 49 | """ 50 | return self.x509.public_bytes(encoding=serialization.Encoding.DER) 51 | 52 | def signature_hash_algorithm(self): 53 | """ 54 | Extract certificate's hash algorithm 55 | 56 | :return: str 57 | """ 58 | return self.x509.signature_hash_algorithm.name 59 | 60 | def subject_alt_name(self): 61 | """ 62 | Extract certificate's alt names 63 | 64 | :return: unicode 65 | """ 66 | try: 67 | alt_names = self.x509.extensions.get_extension_for_oid(ExtensionOID.SUBJECT_ALTERNATIVE_NAME).value 68 | alt_name_strings = [alt_name.value for alt_name in alt_names] 69 | return ",".join(alt_name_strings) 70 | except x509.ExtensionNotFound: 71 | return "(no subject alt name)" 72 | 73 | def ext_key_usage(self): 74 | """ 75 | Extract certificate's permitted extended usages 76 | 77 | :return: str 78 | """ 79 | try: 80 | usages = self.x509.extensions.get_extension_for_oid(ExtensionOID.EXTENDED_KEY_USAGE).value 81 | usages_strings = [usage._name for usage in usages] 82 | return ",".join(usages_strings) 83 | except x509.ExtensionNotFound: 84 | return "(no ext key usage)" 85 | -------------------------------------------------------------------------------- /tlscanary/tools/cleanup.py: -------------------------------------------------------------------------------- 1 | # This Source Code Form is subject to the terms of the Mozilla Public 2 | # License, v. 2.0. If a copy of the MPL was not distributed with this file, 3 | # You can obtain one at http://mozilla.org/MPL/2.0/. 4 | 5 | import atexit 6 | import signal 7 | import sys 8 | 9 | 10 | __cleanup_done = False 11 | 12 | 13 | def init(): 14 | """Register cleanup handler""" 15 | 16 | # print "Registering cleanup handler" 17 | 18 | global __cleanup_done 19 | __cleanup_done = False 20 | 21 | # Will be OS-specific, see https://docs.python.org/2/library/signal.html 22 | atexit.register(cleanup_handler) 23 | signal.signal(signal.SIGTERM, cleanup_handler) 24 | if sys.platform == "darwin" or "linux" in sys.platform: 25 | # SIGHUP is not available on Windows 26 | signal.signal(signal.SIGHUP, cleanup_handler) 27 | 28 | 29 | def cleanup_handler(): 30 | """The cleanup handler that runs when process terminates""" 31 | # print "Cleanup handler called" 32 | global __cleanup_done 33 | if not __cleanup_done: 34 | __cleanup_done = True 35 | for child in CleanUp.__subclasses__(): 36 | child.at_exit() 37 | 38 | 39 | class CleanUp(object): 40 | """When process terminates, .at_exit() is called on every subclass.""" 41 | pass 42 | -------------------------------------------------------------------------------- /tlscanary/tools/firefox_app.py: -------------------------------------------------------------------------------- 1 | # This Source Code Form is subject to the terms of the Mozilla Public 2 | # License, v. 2.0. If a copy of the MPL was not distributed with this file, 3 | # You can obtain one at http://mozilla.org/MPL/2.0/. 4 | 5 | import configparser 6 | import glob 7 | import os 8 | import struct 9 | 10 | 11 | class FirefoxApp(object): 12 | """Class holding information about an extracted Firefox application directory""" 13 | 14 | __locations = { 15 | "osx": { 16 | "base": os.path.join("*", "*.app"), 17 | "exe": os.path.join("Contents", "MacOS", "firefox"), 18 | "browser": os.path.join("Contents", "Resources", "browser"), 19 | "gredir": os.path.join("Contents", "Resources"), 20 | "ini": os.path.join("Contents", "Resources", "application.ini"), 21 | }, 22 | "linux": { 23 | "base": "firefox", 24 | "exe": "firefox", 25 | "browser": "browser", 26 | "gredir": "", 27 | "ini": "application.ini", 28 | }, 29 | "win": { 30 | "base": "core", 31 | "exe": "firefox.exe", 32 | "browser": "browser", 33 | "gredir": "", # FIXME: this may be wrong 34 | "ini": "application.ini", 35 | }, 36 | } 37 | 38 | def __init__(self, directory): 39 | 40 | # Assuming that directory points to a directory 41 | # where a stock Firefox archive was extracted. 42 | self.platform = None 43 | self.app_dir = None 44 | for platform in self.__locations: 45 | base = self.__locations[platform]["base"] 46 | matches = glob.glob(os.path.join(directory, base)) 47 | if len(matches) == 0: 48 | continue 49 | elif len(matches) >= 1: 50 | if os.path.isdir(matches[0]): 51 | self.platform = platform 52 | self.app_dir = matches[0] 53 | break 54 | raise Exception("Unsupported application package format (missing or ambiguous base folder)") 55 | 56 | if self.platform is None: 57 | raise Exception("Unsupported application package platform") 58 | 59 | # Fill in the rest of the package locations 60 | self.exe = os.path.join(self.app_dir, self.__locations[self.platform]["exe"]) 61 | self.browser = os.path.join(self.app_dir, self.__locations[self.platform]["browser"]) 62 | self.gredir = os.path.join(self.app_dir, self.__locations[self.platform]["gredir"]) 63 | self.app_ini = os.path.join(self.app_dir, self.__locations[self.platform]["ini"]) 64 | 65 | # Sanity checks 66 | if not os.path.isfile(self.exe) or not os.path.isdir(self.browser): 67 | raise Exception("Unsupported application package format (missing files)") 68 | 69 | # For `linux`: byte 4 in ELF header is 01/02 for 32/64 bit 70 | if self.platform == "linux": 71 | with open(self.exe, "rb") as f: 72 | head = f.read(5) 73 | if head[4] == 1: 74 | self.platform = "linux32" 75 | elif head[4] == 2: 76 | self.platform = "linux" 77 | else: 78 | print(type(head[4])) 79 | raise Exception("Unsupported ELF binary (%s)" % head[4]) 80 | # Windows PE header offset is at 0x3c. Bytes 4 and 5 there tell 32 from 64 bit 81 | elif self.platform == "win": 82 | with open(self.exe, "rb") as f: 83 | try: 84 | f.seek(0x3c) 85 | pe_header_offset = struct.unpack("/cache/_cert_storage/security/data.safe.bin. This database 23 | # should in turn be copied to /security_state/data.safe.bin. 24 | # 25 | # The string returned is the absolute path to the data.safe.bin that should be installed 26 | # to /security_state/data.safe.bin. 27 | def populate_cert_storage(onecrl_env, workdir, commit="stable", use_cache=True, cache_timeout=60*60): 28 | global logger 29 | 30 | dc = cache.DiskCache(os.path.join(workdir, "cache"), cache_timeout, purge=True) 31 | cache_id = "%s_cert_storage" % onecrl_env 32 | if not use_cache: 33 | # Enforce re-extraction even if cached 34 | dc.delete(cache_id) 35 | if cache_id in dc: 36 | logger.warning("Using cached OneCRL cert_storage data from `%s`" % dc[cache_id]) 37 | return os.path.join(dc[cache_id], "security_state", "data.safe.bin") 38 | 39 | if _ONE_CRL_TOOLS_REPO not in dc: 40 | subprocess.call(["git", "clone", _ONE_CRL_TOOLS_GIT_URL, dc[_ONE_CRL_TOOLS_REPO]]) 41 | 42 | cached_security_state = dc[cache_id] 43 | os.makedirs(cached_security_state) 44 | 45 | repo_dir = dc[_ONE_CRL_TOOLS_REPO] 46 | 47 | cargo_bin = find_executable("cargo") 48 | if cargo_bin is None: 49 | logger.critical("Cannot find Cargo toolchain") 50 | sys.exit(5) 51 | logger.debug("Using Cargo toolchain at `%s`" % cargo_bin) 52 | 53 | # Checkout a known-working commit before running 54 | logger.debug("Checking out commit `%s` in `%s`" % (commit, repo_dir)) 55 | if subprocess.call(["git", "checkout", "-q", commit], cwd=repo_dir) != 0: 56 | logger.critical("Cannot checkout OneCRL-Tools git commit `%s`" % commit) 57 | sys.exit(5) 58 | tool_dir = os.path.join(repo_dir, "one_crl_to_cert_storage") 59 | # The user may have their global toolchain set to nightly, but we would 60 | # like if the local use of the toolchain pointed to stable. 61 | if subprocess.call(["rustup", "override", "set", "--path", tool_dir, "stable"]) != 0: 62 | logger.critical("Cannot set the working toolchain for `%s` to stable" % tool_dir) 63 | sys.exit(5) 64 | # cargo run --manifest-path one_crl_to_cert_storage/Cargo.toml -- --env $onecrl_env --profile $profile_path 65 | manifest = os.path.join(tool_dir, "Cargo.toml") 66 | result = subprocess.call([ 67 | # "--" delimits arguments given to Cargo from the arguments given to the built tool. 68 | "cargo", "run", "--manifest-path", manifest, "--", 69 | # arguments for the tool itself 70 | "--env", onecrl_env, "--profile", cached_security_state]) 71 | if result != 0: 72 | logger.critical("Cannot populate cert_storage from OneCRL") 73 | sys.exit(5) 74 | return os.path.join(cached_security_state, "security_state", "data.safe.bin") -------------------------------------------------------------------------------- /tlscanary/tools/progress.py: -------------------------------------------------------------------------------- 1 | # This Source Code Form is subject to the terms of the Mozilla Public 2 | # License, v. 2.0. If a copy of the MPL was not distributed with this file, 3 | # You can obtain one at http://mozilla.org/MPL/2.0/. 4 | 5 | import logging 6 | import threading 7 | import time 8 | from functools import reduce 9 | 10 | logger = logging.getLogger(__name__) 11 | 12 | 13 | class ProgressTracker(object): 14 | """Class that implements progress tracking""" 15 | 16 | def __init__(self, total, unit=None, percent=True, speed=True, eta=True, average=30*60.0): 17 | """ 18 | Constructor for a ProcessLogger 19 | 20 | The class keeps track of completed and overhead calculations, and 21 | can estimate current progress speed and time of completion (ETA). 22 | 23 | :param total: int total calculations required for completion 24 | :param unit: str for unit used for speed 25 | :param percent: bool whether to show percent 26 | :param speed: bool whether or not to show speed 27 | :param eta: bool whether or not to show ETA 28 | :param average: float length of averaging window 29 | """ 30 | self.total = total 31 | self.completed = 0 32 | self.overhead = 0 33 | self.unit = "" if unit is None else " %s" % unit 34 | self.show_percent = percent 35 | self.show_speed = speed 36 | self.show_eta = eta 37 | self.average_window = average 38 | self.log = [] 39 | self.write_lock = threading.Lock() 40 | self.start_time = time.time() 41 | self.logger_thread = None 42 | 43 | def log_completed(self, completed): 44 | """ 45 | Log a number of completed calculations 46 | 47 | :param completed: Number of completed items 48 | :return: None 49 | """ 50 | self.write_lock.acquire() 51 | try: 52 | self.log.append((time.time(), completed, 0)) 53 | self.completed += completed 54 | finally: 55 | self.write_lock.release() 56 | 57 | def log_overhead(self, overhead): 58 | """ 59 | Log a number of overhead calculations 60 | :param overhead: 61 | :return: 62 | """ 63 | self.write_lock.acquire() 64 | try: 65 | self.log.append((time.time(), 0, overhead)) 66 | self.overhead += overhead 67 | finally: 68 | self.write_lock.release() 69 | 70 | def log_window(self, window): 71 | """ 72 | Return current averaging window 73 | 74 | :param window: float time span for window 75 | :return: list of (time, int completed, int overhead) 76 | """ 77 | earliest_time = time.time() - window 78 | latest_entry = len(self.log) 79 | earliest_entry = latest_entry 80 | for i in range(latest_entry - 1, -1, -1): 81 | if self.log[i][0] >= earliest_time: 82 | earliest_entry = i 83 | else: 84 | break 85 | return self.log[earliest_entry:latest_entry] 86 | 87 | @staticmethod 88 | def __window_parameters(log_window): 89 | """ 90 | Calculate timespan and sums of completed and overhead for log window 91 | 92 | :param log_window: list 93 | :return: float time span, int completed, int overhead 94 | """ 95 | # Skip first log entry in window, because it logs values from before target timeframe 96 | earliest, _, _ = log_window[0] 97 | latest, completed, overhead = reduce(lambda x, y: (y[0], x[1] + y[1], x[2] + y[2]), log_window[1:]) 98 | span = latest - earliest 99 | return span, completed, overhead 100 | 101 | def __str__(self): 102 | """ 103 | Return string representation of current progress 104 | 105 | :return: str 106 | """ 107 | 108 | now = time.time() 109 | overall_time = now - self.start_time 110 | 111 | # Calculate overall progress and percentages 112 | net_total = self.total 113 | net_done = self.completed 114 | # net_todo = self.total - net_done 115 | net_percent = min(100.0 * net_done / net_total, 100.0) 116 | 117 | # Gross values take overhead into account 118 | gross_done = self.completed + self.overhead 119 | # Gross total is estimated by total overhead so far 120 | gross_total = net_total if net_done == 0 else net_total * gross_done / net_done 121 | gross_todo = gross_total - gross_done 122 | 123 | # Overhead is relative to net total 124 | overhead_percent = 0.0 if net_done == 0 else 100.0 * self.overhead / net_done 125 | 126 | # Get current averaging window 127 | log_window = self.log_window(self.average_window) 128 | 129 | # Bail out if there is not enough data in the window 130 | if len(log_window) < 2: 131 | s = "" 132 | if self.show_percent: 133 | s += "%.0f%% " % net_percent 134 | s += "%d/%d" % (min(net_done, net_total), net_total) 135 | s += ", %.0f%% overhead" % overhead_percent 136 | if self.show_speed: 137 | s += ", --%s/s net" % self.unit 138 | s += ", --%s/s gross" % self.unit 139 | if self.show_eta: 140 | s += ", ETA --" 141 | return s 142 | 143 | # Get values for current averaging window 144 | win_span, win_completed, win_overhead = self.__window_parameters(log_window) 145 | 146 | # Calculate overall and current net and gross speeds, and ETA 147 | # net_speed = float(net_done) / overall_time 148 | gross_speed = float(gross_done) / overall_time 149 | net_win_speed = float(win_completed) / win_span 150 | gross_win_speed = float(win_completed + win_overhead) / win_span 151 | # Speed might have been zero 152 | if gross_win_speed != 0.0: 153 | gross_eta = now + gross_todo / gross_win_speed 154 | else: 155 | gross_eta = None 156 | 157 | # Build the string according to config 158 | s = "" 159 | if self.show_percent: 160 | s += "%.0f%% " % net_percent 161 | s += "%d/%d" % (min(net_done, net_total), net_total) 162 | s += ", %.1f%% overhead" % overhead_percent 163 | if self.show_speed: 164 | match = [ 165 | (0.001, "%s/ms" % self.unit), 166 | (1.0, "%s/s" % self.unit), 167 | (60.0, "%s/min" % self.unit), 168 | (60.0 * 60.0, "%s/h" % self.unit), 169 | (24.0 * 60.0 * 60.0, "%s/day" % self.unit) 170 | ] 171 | scale, unit = match[1] 172 | for scale, unit in match: 173 | if gross_speed * scale > 100: 174 | break 175 | s += ", %.0f%s net" % (scale * net_win_speed, unit) 176 | s += ", %.0f%s gross" % (scale * gross_win_speed, unit) 177 | if self.show_eta: 178 | if gross_eta is not None: 179 | s += ", ETA %s" % time.strftime('%Y-%m-%d %H:%M:%S', time.gmtime(gross_eta)) 180 | else: 181 | s += ", ETA --" 182 | return s 183 | 184 | def start_reporting(self, interval, first_interval=None): 185 | """ 186 | Spawn logger thread 187 | 188 | :param interval: float seconds between log update 189 | :param first_interval: optional float seconds before first log update 190 | :return: None 191 | """ 192 | global logger 193 | 194 | if self.logger_thread is None: 195 | logger.debug("Starting progress logger thread") 196 | self.logger_thread = ProgressLogger(self, first_interval, interval) 197 | self.logger_thread.setName("Progress") 198 | self.logger_thread.daemon = True # Thread dies with worker 199 | self.logger_thread.start() 200 | 201 | def stop_reporting(self): 202 | """ 203 | Stop logger thread 204 | 205 | :return: None 206 | """ 207 | global logger 208 | 209 | if self.logger_thread is not None: 210 | logger.debug("Terminating progress logger thread") 211 | try: 212 | self.logger_thread.quit() 213 | except AttributeError: 214 | pass 215 | finally: 216 | self.logger_thread = None 217 | 218 | 219 | class ProgressLogger(threading.Thread): 220 | """Progress logger thread that logs progress updates""" 221 | 222 | def __init__(self, pr, first_interval, regular_interval): 223 | """ 224 | Constructor 225 | 226 | :param pr: ProgressTracker instance to monitor 227 | :param first_interval: float seconds before first log update 228 | :param regular_interval: float seconds between log update 229 | """ 230 | super(ProgressLogger, self).__init__() 231 | self.pr = pr 232 | self.first_interval = first_interval 233 | self.regular_interval = regular_interval 234 | self.updated_at = time.time() 235 | self.__quit = False 236 | 237 | def __update_time(self): 238 | """ 239 | Iterator that returns time of next progress update. 240 | The first update may be different from the rest. 241 | 242 | :return: float iterator 243 | """ 244 | if self.first_interval is not None: 245 | yield self.updated_at + self.first_interval 246 | while True: 247 | yield self.updated_at + self.regular_interval 248 | 249 | def run(self): 250 | """ 251 | Start thread 252 | 253 | :return: None 254 | """ 255 | global logger 256 | 257 | logger.debug("ProgressLogger thread starting") 258 | update_time = self.__update_time() 259 | next_update = next(update_time) 260 | while not self.__quit: 261 | time.sleep(1) 262 | now = time.time() 263 | if now >= next_update: 264 | logger.info(str(self.pr)) 265 | self.updated_at = now 266 | next_update = next(update_time) 267 | logger.debug("ProgressLogger thread exiting") 268 | 269 | def quit(self): 270 | """ 271 | Signal thread to terminate 272 | 273 | :return: None 274 | """ 275 | self.__quit = True 276 | -------------------------------------------------------------------------------- /tlscanary/tools/tags_db.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf8 -*- 2 | 3 | # This Source Code Form is subject to the terms of the Mozilla Public 4 | # License, v. 2.0. If a copy of the MPL was not distributed with this file, 5 | # You can obtain one at http://mozilla.org/MPL/2.0/. 6 | 7 | import json 8 | import logging 9 | import os 10 | 11 | logger = logging.getLogger(__name__) 12 | 13 | 14 | class TagsDB(object): 15 | """ 16 | Class to manage snapshot tags 17 | """ 18 | 19 | def __init__(self, args): 20 | self.__args = args 21 | self.__tags_file = os.path.join(self.__args.workdir, "tags.json") 22 | self.__tags = None # Overwritten by TagsDB.load() 23 | self.load() 24 | 25 | def load(self) -> None: 26 | """ 27 | Load TagDB from disk 28 | :return: None 29 | """ 30 | try: 31 | with open(self.__tags_file, mode="r") as f: 32 | parsed = json.load(f) 33 | except FileNotFoundError: 34 | parsed = {} 35 | # Convert json arrays to sets 36 | self.__tags = {} 37 | for tag in parsed: 38 | assert type(parsed[tag]) is list 39 | self.__tags[tag] = set(parsed[tag]) 40 | 41 | def remove_dangling(self, existing_refs: list, save: bool = False): 42 | """ 43 | Remove all tag references to non-existent refs 44 | :param existing_refs: list of existing references 45 | :param save: optional bool whether TagDB to be saved to disk 46 | :return: None 47 | """ 48 | changed = False 49 | for tag in self: 50 | for ref in self[tag]: 51 | if ref not in existing_refs: 52 | self.remove(tag, ref, save=False) 53 | changed = True 54 | if changed and save: 55 | self.save() 56 | 57 | def save(self): 58 | """ 59 | Save TagDB to disk 60 | :return: None 61 | """ 62 | for_parser = {} 63 | for tag in self: # Iterates just associated tags 64 | for_parser[tag] = list(self[tag]) 65 | with open(self.__tags_file, mode="w") as f: 66 | json.dump(for_parser, f, indent=4, sort_keys=True) 67 | 68 | def __contains__(self, tag: str) -> bool: 69 | """ 70 | Implements `tag in TagDB()` 71 | :param tag: str with tag 72 | :return: bool 73 | """ 74 | return tag in self.__tags and len(self.__tags[tag]) > 0 75 | 76 | def __getitem__(self, tag: str) -> set: 77 | """ 78 | Implements `handles = TagDB()[tag]` 79 | :param tag: str with tag 80 | :return: set of str of handles (may be empty) 81 | """ 82 | return self.tag_to_handles(tag) 83 | 84 | def __setitem__(self, tag: str, handle: str) -> None: 85 | """ 86 | Implements `TagDB()[tag] = handle` for tagging a handle 87 | :param tag: str with tag 88 | :param handle: str with handle 89 | :return: None 90 | """ 91 | self.add(tag, handle) 92 | 93 | def __delitem__(self, tag: str) -> None: 94 | """ 95 | Implements `del TagDB()[tag]`, dropping a tag completely 96 | :param tag: str with tag 97 | :return: None 98 | """ 99 | self.drop(tag) 100 | 101 | def __iter__(self): 102 | """ 103 | Implements iterating over all tags that have associated handles 104 | :return: 105 | """ 106 | for tag in self.__tags: 107 | if len(self.__tags[tag]) > 0: 108 | yield tag 109 | 110 | @staticmethod 111 | def is_valid_tag(tag): 112 | return type(tag) is str and tag.isalnum() and not tag.isdigit() and " " not in tag 113 | 114 | def tag_to_handles(self, tag: str) -> set: 115 | """ 116 | Converts a tag to its associated handles 117 | :param tag: str with tag 118 | :return: set of str handles (may be empty) 119 | """ 120 | try: 121 | return self.__tags[tag].copy() 122 | except KeyError: 123 | return set() 124 | 125 | def handle_to_tags(self, handle: str) -> set: 126 | """ 127 | Converts a handle to its associated tags 128 | :param handle: str with handle 129 | :return: set of str with tags (may be empty) 130 | """ 131 | tags = set() 132 | for tag in self.__tags: 133 | if handle in self.__tags[tag]: 134 | tags.add(tag) 135 | return tags 136 | 137 | def exists(self, tag: str, handle: str = None) -> bool: 138 | """ 139 | Check whether a tag exists, or exists for a specific handle 140 | :param tag: str with tag 141 | :param handle: optional str with handle 142 | :return: bool 143 | """ 144 | if handle is None: 145 | return tag in self.__tags 146 | else: 147 | if tag in self.__tags: 148 | return handle in self.__tags[tag] 149 | else: 150 | return False 151 | 152 | def list(self, tag: str = None) -> list: 153 | """ 154 | Returns a list of tags, or handles associated with tag 155 | :param tag: optional str with tag 156 | :return: list of str of tags 157 | """ 158 | if tag is None: 159 | return sorted(self.__tags.keys()) 160 | else: 161 | return list(self.tag_to_handles(tag)) 162 | 163 | def add(self, tag: str, handle: str, save: bool = True): 164 | """ 165 | Associate tag with handle 166 | :param tag: str with tag 167 | :param handle: str with handle 168 | :param save: optional bool whether TagDB to be saved to disk 169 | :return: None 170 | """ 171 | try: 172 | self.__tags[tag].add(handle) 173 | except KeyError: 174 | self.__tags[tag] = {handle} 175 | if save: 176 | self.save() 177 | 178 | def remove(self, tag: str, handle: str, save: bool = True): 179 | """ 180 | Disassociate tag from handle 181 | :param tag: str with tag 182 | :param handle: str with handle 183 | :param save: optional bool whether TagDB to be saved to disk 184 | :return: None 185 | """ 186 | try: 187 | self.__tags[tag].remove(handle) 188 | except KeyError: 189 | logger.warning("Tag `%s` does not exist" % tag) 190 | return # Nothing changed, so no need to save 191 | except ValueError: 192 | logger.warning("Handle `%s` is not associated with tag `%s`" % (handle, tag)) 193 | return # Nothing changed, so no need to save 194 | if save: 195 | self.save() 196 | 197 | def drop(self, tag, save: bool = True): 198 | """ 199 | Completely delete a tag and all of its handle associations 200 | :param tag: str with tag 201 | :param save: optional bool whether TagDB to be saved to disk 202 | :return: None 203 | """ 204 | if tag in self.__tags and len(self.__tags[tag]) > 0: 205 | del self.__tags[tag] 206 | else: 207 | logger.debug("Not dropping non-existent tag `%s`" % tag) 208 | return # No need to save 209 | if save: 210 | self.save() 211 | -------------------------------------------------------------------------------- /tlscanary/tools/xpcshell_worker.py: -------------------------------------------------------------------------------- 1 | # This Source Code Form is subject to the terms of the Mozilla Public 2 | # License, v. 2.0. If a copy of the MPL was not distributed with this file, 3 | # You can obtain one at http://mozilla.org/MPL/2.0/. 4 | 5 | import json 6 | import logging 7 | import os 8 | from queue import Queue, Empty 9 | import subprocess 10 | from threading import Thread 11 | 12 | logger = logging.getLogger(__name__) 13 | module_dir = os.path.realpath(os.path.join(os.path.split(__file__)[0], os.path.pardir)) 14 | 15 | 16 | def read_from_worker(worker, response_queue): 17 | """Reader thread that reads messages from the worker. 18 | The convention is that all worker output that parses 19 | as JSON is routed to the response queue, else it is 20 | interpreted as a JavaScript error or warning. 21 | """ 22 | global logger 23 | 24 | logger.debug('Reader thread started for worker %s' % worker) 25 | for line in iter(worker.stdout.readline, b''): 26 | line = line.decode("utf-8").strip() 27 | try: 28 | response_queue.put(Response(line)) 29 | logger.debug("Received worker message: %s" % line) 30 | except ValueError: 31 | if line.startswith("JavaScript error:"): 32 | logger.error("JS error from worker %s: %s" % (worker, line)) 33 | elif line.startswith("JavaScript warning:"): 34 | logger.warning("JS warning from worker %s: %s" % (worker, line)) 35 | else: 36 | logger.critical("Invalid output from worker %s: %s" % (worker, line)) 37 | logger.debug('Reader thread finished for worker %s' % worker) 38 | worker.stdout.close() 39 | 40 | 41 | class XPCShellWorker(object): 42 | """XPCShell worker implementing an asynchronous, JSON-based message system""" 43 | 44 | def __init__(self, app, script=None, head_script=None, profile=None, prefs=None): 45 | global module_dir 46 | 47 | self.__app = app 48 | if script is None: 49 | self.__script = os.path.join(module_dir, "js", "scan_worker.js") 50 | else: 51 | self.__script = script 52 | if head_script is None: 53 | self.__head_script = os.path.join(module_dir, "js", "worker_common.js") 54 | else: 55 | self.__head_script = head_script 56 | self.__profile = profile 57 | self.__prefs = prefs 58 | self.__worker_thread = None 59 | self.__reader_thread = None 60 | self.__response_queue = Queue() 61 | 62 | def spawn(self): 63 | """Spawn the worker process and its dedicated reader thread""" 64 | global logger, module_dir 65 | 66 | cmd = [self.__app.exe, '-xpcshell', "-g", self.__app.gredir, "-a", self.__app.browser, "-f", self.__head_script, self.__script] 67 | logger.debug("Executing worker shell command `%s`" % ' '.join(cmd)) 68 | 69 | self.__worker_thread = subprocess.Popen( 70 | cmd, 71 | cwd=self.__app.browser, 72 | stdin=subprocess.PIPE, 73 | stdout=subprocess.PIPE, 74 | stderr=subprocess.STDOUT, 75 | bufsize=1) # `1` means line-buffered 76 | 77 | # Spawn a reader thread, because stdio reads are blocking 78 | self.__reader_thread = Thread(target=read_from_worker, name="Reader", 79 | args=(self.__worker_thread, self.__response_queue)) 80 | self.__reader_thread.daemon = True # Thread dies with worker 81 | self.__reader_thread.start() 82 | 83 | if self.__profile is not None: 84 | logger.debug("Changing worker profile to `%s`" % self.__profile) 85 | if self.send(Command("useprofile", path=self.__profile)): 86 | response = self.wait() 87 | else: 88 | # .wait() would wait forever if .send() was not successful 89 | response = None 90 | if response is None or response.original_cmd["mode"] != "useprofile" or response.result != "ACK": 91 | logger.error("Worker failed to set profile `%s`" % self.__profile) 92 | return False 93 | 94 | if self.__prefs is not None: 95 | logger.debug("Setting worker prefs to `%s`" % self.__prefs) 96 | if self.send(Command("setprefs", prefs=self.__prefs)): 97 | response = self.wait() 98 | else: 99 | # .wait() would wait forever if .send() was not successful 100 | response = None 101 | if response is None or response.original_cmd["mode"] != "setprefs" or response.result != "ACK": 102 | logger.error("Worker failed to set prefs `%s`" % self.__prefs) 103 | return False 104 | 105 | return True 106 | 107 | def terminate(self): 108 | """Signal the worker process to quit""" 109 | # The reader thread dies when the Firefox process quits 110 | self.__worker_thread.terminate() 111 | 112 | def kill(self): 113 | """Kill the worker process""" 114 | self.__worker_thread.kill() 115 | 116 | def is_running(self): 117 | """Check whether the worker is still fully running""" 118 | if self.__worker_thread is None: 119 | return False 120 | return self.__worker_thread.poll() is None 121 | 122 | def send(self, cmd): 123 | """Send a command message to the worker""" 124 | global logger 125 | 126 | cmd_string = str(cmd) 127 | logger.debug("Sending worker message: `%s`" % cmd_string) 128 | try: 129 | self.__worker_thread.stdin.write((cmd_string + "\n").encode("utf-8")) 130 | self.__worker_thread.stdin.flush() 131 | except IOError: 132 | logger.debug("Can't write to worker. Message `%s` wasn't heard." % cmd_string) 133 | return False 134 | return True 135 | 136 | def receive(self): 137 | """Read queued messages from worker. Returns [] if there were none.""" 138 | 139 | global logger 140 | 141 | # Read everything from the reader queue 142 | responses = [] 143 | try: 144 | while True: 145 | responses.append(self.__response_queue.get_nowait()) 146 | except Empty: 147 | pass 148 | 149 | return responses 150 | 151 | def wait(self): 152 | """Wait for and return the next single message from the worker.""" 153 | return self.__response_queue.get() 154 | 155 | 156 | class Command(object): 157 | 158 | def __init__(self, mode, id=None, **kwargs): 159 | if mode is None: 160 | raise Exception("Refusing to init mode-less command") 161 | self.__id = id 162 | self.__mode = mode 163 | self.__args = kwargs 164 | 165 | def as_dict(self): 166 | return {"id": self.__id, "mode": self.__mode, "args": self.__args} 167 | 168 | def __str__(self): 169 | return json.dumps(self.as_dict()) 170 | 171 | 172 | class Response(object): 173 | 174 | def __init__(self, message_string): 175 | global logger 176 | 177 | self.id = None 178 | self.worker_id = None 179 | self.original_cmd = None 180 | self.success = None 181 | self.result = None 182 | self.elapsed_ms = None 183 | message = json.loads(message_string) # May throw ValueError 184 | if "id" in message: 185 | self.id = message["id"] 186 | if "original_cmd" in message: 187 | self.original_cmd = message["original_cmd"] 188 | if "worker_id" in message: 189 | self.worker_id = message["worker_id"] 190 | if "success" in message: 191 | self.success = message["success"] 192 | if "result" in message: 193 | self.result = message["result"] 194 | if "command_time" in message: 195 | self.command_time = message["command_time"] 196 | if "response_time" in message: 197 | self.response_time = message["response_time"] 198 | if len(message) != 7: 199 | logger.error("Worker response has unexpected format: %s" % message_string) 200 | 201 | def as_dict(self): 202 | return { 203 | "id": self.id, 204 | "original_cmd": self.original_cmd, 205 | "worker_id": self.worker_id, 206 | "success": self.success, 207 | "result": self.result, 208 | "command_time": self.command_time, 209 | "response_time": self.response_time, 210 | } 211 | -------------------------------------------------------------------------------- /tlscanary/worker_pool.py: -------------------------------------------------------------------------------- 1 | # This Source Code Form is subject to the terms of the Mozilla Public 2 | # License, v. 2.0. If a copy of the MPL was not distributed with this file, 3 | # You can obtain one at http://mozilla.org/MPL/2.0/. 4 | 5 | import logging 6 | import time 7 | from worq.pool.thread import WorkerPool 8 | from worq import get_broker, get_queue, TaskSpace 9 | 10 | from tlscanary.tools import xpcshell_worker as xw 11 | 12 | 13 | logger = logging.getLogger(__name__) 14 | ts = TaskSpace(__name__) 15 | pool = None 16 | 17 | 18 | def init(worq_url): 19 | global ts 20 | broker = get_broker(worq_url) 21 | broker.expose(ts) 22 | return broker 23 | 24 | 25 | def start_pool(worq_url, num_workers=1, **kw): 26 | broker = init(worq_url) 27 | new_pool = WorkerPool(broker, workers=num_workers) 28 | new_pool.start(**kw) 29 | return new_pool 30 | 31 | 32 | def stop(): 33 | global logger, pool 34 | logger.debug("Stopping worker pool %s" % pool) 35 | if pool is not None: 36 | pool.stop() 37 | pool = None 38 | 39 | 40 | class ScanResult(object): 41 | """Class to hold and evaluate scan responses.""" 42 | 43 | def __init__(self, response): 44 | self.response = response 45 | self.success = self.evaluate_success(response) 46 | self.host = self.get_host() 47 | self.rank = self.get_rank() 48 | 49 | def get_host(self): 50 | return self.response.original_cmd["args"]["host"] 51 | 52 | def get_rank(self): 53 | return self.response.original_cmd["args"]["rank"] 54 | 55 | @staticmethod 56 | def evaluate_success(response): 57 | global logger 58 | 59 | # if .success is true, the result came through the requests 60 | # `load` handler with state == 4 (fully loaded). 61 | if response.success: 62 | return True 63 | 64 | # Else, check whether the error was due to a redirect error, with the first hop being OK. 65 | uri = response.result["info"]["original_uri"] 66 | status = response.result["info"]["status"] 67 | origin = response.result["origin"] 68 | if origin == "error_handler" and status == 0: # NS_OK 69 | logger.debug("Ignored redirect by `%s`" % uri) 70 | return True 71 | 72 | # Else, the request had some sort of issue 73 | return False 74 | 75 | def as_dict(self): 76 | return { 77 | "response": self.response.as_dict(), 78 | "success": self.success, 79 | "host": self.host, 80 | "rank": self.rank 81 | } 82 | 83 | 84 | @ts.task 85 | def scan_urls(app, target_list, profile=None, prefs=None, get_certs=False, timeout=10): 86 | global logger 87 | 88 | logger.debug("scan_urls task called with %s" % repr(target_list)) 89 | 90 | # Spawn a worker instance 91 | xpcw = xw.XPCShellWorker(app, profile=profile, prefs=prefs) 92 | xpcw.spawn() 93 | 94 | # Enqueue all host scans for this worker instance 95 | wakeup_cmd = xw.Command("wakeup") 96 | cmd_count = 0 97 | for rank, host in target_list: 98 | scan_cmd = xw.Command("scan", host=host, rank=rank, include_certificates=get_certs, timeout=timeout) 99 | xpcw.send(scan_cmd) 100 | if cmd_count % 10 == 0: 101 | xpcw.send(wakeup_cmd) 102 | cmd_count += 1 103 | 104 | # Fetch results from queue, until all results are in or until the last 105 | # scan must have into timeout. Note that ACKs come in strict sequence of 106 | # their respective commands. 107 | results = {} 108 | timeout_time = time.time() + timeout + 1 109 | while time.time() < timeout_time: 110 | for response in xpcw.receive(): 111 | if response.result == "ACK": 112 | # Reset timeout when scan commands are ACKed. 113 | if response.original_cmd["mode"] == "scan": 114 | timeout_time = time.time() + timeout + 1 115 | # Ignore other ACKs. 116 | continue 117 | # Else we know this is the result of a scan command. 118 | result = ScanResult(response) 119 | results[result.host] = result 120 | if len(results) >= len(target_list): 121 | break 122 | if xpcw.send(wakeup_cmd): 123 | time.sleep(0.1) 124 | else: 125 | break 126 | 127 | if len(results) < len(target_list): 128 | logger.warning("Worker task dropped results, yielded %d instead of %d" % (len(results), len(target_list))) 129 | 130 | # Wind down the worker 131 | xpcw.send(xw.Command("quit")) 132 | xpcw.terminate() 133 | 134 | logger.debug("Worker task finished, returning %d results" % len(results)) 135 | 136 | return results 137 | 138 | 139 | @ts.task 140 | def collect(result_dicts): 141 | combined_results = {} 142 | for result in result_dicts: 143 | combined_results.update(result) 144 | return combined_results 145 | 146 | 147 | def __as_chunks(flat_list, chunk_size): 148 | for i in range(0, len(flat_list), chunk_size): 149 | yield flat_list[i:i + chunk_size] 150 | 151 | 152 | # CAVE: run_scans is not re-entrant due to use of global variables. 153 | def run_scans(app, target_list, profile=None, prefs=None, num_workers=4, targets_per_worker=50, worq_url="memory://", 154 | get_certs=False, timeout=10, progress_callback=None): 155 | global logger, pool 156 | 157 | pool = start_pool(worq_url, timeout=1, num_workers=num_workers) 158 | 159 | chunks = __as_chunks(target_list, targets_per_worker) 160 | try: 161 | queue = get_queue(worq_url, target=__name__) 162 | 163 | # Enqueue tasks to be executed in parallel 164 | scan_results = [queue.scan_urls(app, targets, profile=profile, prefs=prefs, 165 | get_certs=get_certs, timeout=timeout) 166 | for targets in chunks] 167 | result = queue.collect(scan_results) 168 | 169 | queue_len = len(queue) 170 | logged_len = 0 # Required to correct for "overlogging" due to chunking 171 | 172 | while True: 173 | finished = result.wait(timeout=10) 174 | current_queue_len = len(queue) 175 | chunks_done = queue_len - current_queue_len 176 | logger.debug("After queue wait: %d old - %d new = %d done" % (queue_len, current_queue_len, chunks_done)) 177 | queue_len = current_queue_len 178 | # Check finished first to ensure that the final chunk is not logged, 179 | # because the final chunk might not have the full chunk size. 180 | if finished: 181 | break 182 | if progress_callback is not None and chunks_done > 0: 183 | # We must assume the maximum chunk size here to calculate the number of results 184 | progress_callback(chunks_done * targets_per_worker) 185 | logged_len += chunks_done * targets_per_worker 186 | 187 | except KeyboardInterrupt: 188 | logger.critical("Ctrl-C received. Winding down workers...") 189 | stop() 190 | logger.debug("Signaled workers to quit") 191 | raise KeyboardInterrupt 192 | 193 | finally: 194 | stop() 195 | 196 | # Log the results of the final chunk 197 | if progress_callback is not None: 198 | actual_len = len(result.value) 199 | logger.debug("Chunkwise logging reported on %d results, actually received %d" % (logged_len, actual_len)) 200 | len_correction = actual_len - logged_len 201 | if len_correction != 0: 202 | logger.debug("Logging correction for %d results" % len_correction) 203 | progress_callback(len_correction) 204 | 205 | return result.value 206 | --------------------------------------------------------------------------------