├── ready ├── __init__.py ├── checks │ ├── __init__.py │ ├── dns.py │ ├── status.py │ ├── report_to.py │ ├── ns.py │ ├── redirect.py │ ├── bad_response.py │ ├── swagger.py │ ├── graphql.py │ ├── cookies.py │ ├── corp_coop_coep.py │ ├── leaky_headers.py │ ├── hsts.py │ ├── content.py │ ├── well_known.py │ ├── csp.py │ ├── ssl.py │ ├── email.py │ └── html.py ├── __init__.pyc ├── result.py ├── thttp.py └── ready.py ├── MANIFEST.in ├── .gitignore ├── pyproject.toml ├── tox.ini ├── SECURITY.txt ├── test ├── test_status.py ├── test_dns.py ├── test_leaky_headers.py ├── test_report_to.py ├── test_redirect.py ├── test_swagger.py ├── test_ns.py ├── test_corp_coop_coep.py ├── test_cookies.py ├── test_content.py ├── test_hsts.py ├── test_well_known.py ├── test_csp.py ├── test_bad_response.py └── test_html.py ├── LICENSE ├── setup.cfg ├── .github └── workflows │ ├── codeql-analysis.yml │ ├── publish.yml │ └── ci.yml └── README.md /ready/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /ready/checks/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include README.md LICENSE -------------------------------------------------------------------------------- /ready/__init__.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sesh/ready/main/ready/__init__.pyc -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | build 2 | ready.egg-info 3 | __pycache__ 4 | .mypy_cache 5 | out 6 | many.py 7 | domains.txt 8 | Pipfile.lock -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [tool.ruff] 2 | line-length = 129 3 | 4 | [tool.black] 5 | line-length = 129 6 | 7 | [tool.isort] 8 | profile = "black" 9 | -------------------------------------------------------------------------------- /tox.ini: -------------------------------------------------------------------------------- 1 | [tox] 2 | requires = tox>=4 3 | env_list = py{39,310,311,312} 4 | 5 | [testenv] 6 | description = run unit tests 7 | commands = python -m unittest discover test 8 | -------------------------------------------------------------------------------- /SECURITY.txt: -------------------------------------------------------------------------------- 1 | # Security Policy 2 | 3 | ## Supported Versions 4 | 5 | Only the latest PyPI release is supported. 6 | This library is small, you should audit the code and ensure that it's of a similar quality to the rest of the code in your project. 7 | 8 | ## Reporting a Vulnerability 9 | 10 | Security issues can be reported to security@brntn.me. 11 | -------------------------------------------------------------------------------- /ready/checks/dns.py: -------------------------------------------------------------------------------- 1 | import json 2 | 3 | from ready.result import result 4 | 5 | 6 | # Check: An AAAA DNS record exists (IPv6 Support) 7 | def check_aaaa_record_exists(responses, **kwargs): 8 | records = [x["data"] for x in responses["dns_aaaa_response"].json.get("Answer", [])] 9 | 10 | return result( 11 | len(records) >= 1, 12 | f"An AAAA DNS record exists ({records})", 13 | "dns_aaaa", 14 | **kwargs, 15 | ) 16 | -------------------------------------------------------------------------------- /ready/checks/status.py: -------------------------------------------------------------------------------- 1 | from ready.result import result 2 | 3 | 4 | # Check: Response should be a 200 (after redirects) 5 | def check_http_response_should_be_200(responses, **kwargs): 6 | return result( 7 | responses["response"] and responses["response"].status == 200, 8 | f"Response should be a 200 ({getattr(responses['response'], 'status')} - {getattr(responses['response'], 'url')})", 9 | "https_status", 10 | **kwargs, 11 | ) 12 | -------------------------------------------------------------------------------- /ready/checks/report_to.py: -------------------------------------------------------------------------------- 1 | import re 2 | 3 | from ready.result import result 4 | 5 | 6 | # Check: Report-To Header must not be included in response 7 | def check_report_to_header_must_not_be_included_in_response(responses, **kwargs): 8 | return result( 9 | responses["response"].headers.get("report-to") in [None, ""], 10 | f"Report-To Header must not be included in response ({responses['response'].headers.get('report-to')})", 11 | "report_to", 12 | **kwargs, 13 | ) 14 | -------------------------------------------------------------------------------- /ready/checks/ns.py: -------------------------------------------------------------------------------- 1 | from ready.result import result 2 | 3 | 4 | # Check: At least two nameservers should be configured 5 | def check_at_least_two_nameservers_configured(responses, **kwargs): 6 | nameservers = [x["data"] for x in responses["dns_ns_response"].json.get("Answer", [])] 7 | 8 | if not nameservers and "dns_ns_response_fld" in responses: 9 | nameservers = [x["data"] for x in responses["dns_ns_response_fld"].json.get("Answer", [])] 10 | 11 | return result( 12 | len(nameservers) > 1, 13 | f"At least two nameservers should be provided ({nameservers})", 14 | "ns_minimum_count", 15 | **kwargs, 16 | ) 17 | -------------------------------------------------------------------------------- /ready/checks/redirect.py: -------------------------------------------------------------------------------- 1 | from ready.result import result 2 | 3 | 4 | # Check: HTTP -> HTTPS redirection occurs 5 | def check_http_to_https_redirect(responses, **kwargs): 6 | if responses["http_response"]: 7 | return result( 8 | responses["http_response"].url.startswith("https://"), 9 | f"HTTP -> HTTPS redirection ({responses['http_response'].url})", 10 | "redirect_http", 11 | **kwargs, 12 | ) 13 | 14 | return result( 15 | False, 16 | f"HTTP -> HTTPS redirection (no HTTP response)", 17 | "redirect_http", 18 | warn_on_fail=True, 19 | **kwargs, 20 | ) 21 | -------------------------------------------------------------------------------- /test/test_status.py: -------------------------------------------------------------------------------- 1 | from unittest import TestCase 2 | 3 | from ready.checks.status import check_http_response_should_be_200 4 | from ready.thttp import Response 5 | 6 | 7 | class StatusChecksTestCase(TestCase): 8 | def test_check_http_response_should_be_200(self): 9 | r = Response(None, "", None, 200, None, {}, None) 10 | result = check_http_response_should_be_200({"response": r}, print_output=False) 11 | self.assertTrue(result.passed) 12 | 13 | r = Response(None, "", None, 499, None, {}, None) 14 | result = check_http_response_should_be_200({"response": r}, print_output=False) 15 | self.assertFalse(result.passed) 16 | -------------------------------------------------------------------------------- /test/test_dns.py: -------------------------------------------------------------------------------- 1 | from unittest import TestCase 2 | 3 | from ready.checks.dns import check_aaaa_record_exists 4 | from ready.thttp import Response 5 | 6 | 7 | class DnsChecksTestCase(TestCase): 8 | def test_check_aaaa_record_exists(self): 9 | r = Response(None, "", {"Answer": [{"data": ""}]}, 200, None, {}, None) 10 | 11 | result = check_aaaa_record_exists({"dns_aaaa_response": r}, print_output=False) 12 | self.assertTrue(result.passed) 13 | 14 | r = Response(None, "", {"Answer": []}, 200, None, {}, None) 15 | 16 | result = check_aaaa_record_exists({"dns_aaaa_response": r}, print_output=False) 17 | self.assertFalse(result.passed) 18 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Copyright 2021 Brenton Cleeland 2 | 3 | Permission to use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby granted, provided that the above copyright notice and this permission notice appear in all copies. 4 | 5 | THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. 6 | -------------------------------------------------------------------------------- /test/test_leaky_headers.py: -------------------------------------------------------------------------------- 1 | from unittest import TestCase 2 | 3 | from ready.checks.leaky_headers import check_should_not_include_leaky_headers 4 | from ready.thttp import Response 5 | 6 | 7 | class LeakyHeadersChecksTestCase(TestCase): 8 | def test_check_should_not_include_leaky_headers(self): 9 | r = Response(None, "", None, None, None, {}, None) 10 | result = check_should_not_include_leaky_headers({"response": r}, print_output=False) 11 | self.assertTrue(result.passed) 12 | 13 | r = Response(None, "", None, None, None, {"x-hosted-by": "22.11"}, None) 14 | result = check_should_not_include_leaky_headers({"response": r}, print_output=False) 15 | self.assertFalse(result.passed) 16 | -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [metadata] 2 | name = ready-check 3 | version = 1.7.0 4 | author = Brenton Cleeland 5 | author_email = brenton@brntn.me 6 | description = A developer-friendly web scanning tool 7 | long_description = file: README.md 8 | long_description_content_type = text/markdown 9 | url = https://github.com/sesh/ready 10 | project_urls = 11 | Bug Tracker = https://github.com/sesh/ready/issues 12 | classifiers = 13 | Programming Language :: Python :: 3 14 | 15 | [options] 16 | packages = find: 17 | python_requires = >=3.9 18 | install_requires = 19 | thttp 20 | tld 21 | beautifulsoup4 22 | cryptography 23 | 24 | [options.entry_points] 25 | console_scripts = 26 | ready = ready:ready.cli 27 | 28 | 29 | [coverage:run] 30 | omit = 31 | ready/thttp.py 32 | -------------------------------------------------------------------------------- /test/test_report_to.py: -------------------------------------------------------------------------------- 1 | from unittest import TestCase 2 | 3 | from ready.checks.report_to import check_report_to_header_must_not_be_included_in_response 4 | from ready.thttp import Response 5 | 6 | 7 | class ReportToChecksTestCase(TestCase): 8 | def test_check_report_to_header_must_not_be_included_in_response(self): 9 | r = Response(None, "", None, None, None, {}, None) 10 | result = check_report_to_header_must_not_be_included_in_response({"response": r}, print_output=False) 11 | self.assertTrue(result.passed) 12 | 13 | r = Response(None, "", None, None, None, {"report-to": "some-value"}, None) 14 | result = check_report_to_header_must_not_be_included_in_response({"response": r}, print_output=False) 15 | self.assertFalse(result.passed) 16 | -------------------------------------------------------------------------------- /test/test_redirect.py: -------------------------------------------------------------------------------- 1 | from unittest import TestCase 2 | 3 | from ready.checks.redirect import check_http_to_https_redirect 4 | from ready.thttp import Response 5 | 6 | 7 | class RedirectChecksTestCase(TestCase): 8 | def test_check_http_to_https_redirect(self): 9 | r = Response(None, "", None, 200, "https://ready.invalid", {}, None) 10 | result = check_http_to_https_redirect({"http_response": r}, print_output=False) 11 | self.assertTrue(result.passed) 12 | 13 | r = Response(None, "", None, 200, "http://ready.invalid", {}, None) 14 | result = check_http_to_https_redirect({"http_response": r}, print_output=False) 15 | self.assertFalse(result.passed) 16 | 17 | result = check_http_to_https_redirect({"http_response": None}, print_output=False) 18 | self.assertFalse(result.passed) 19 | -------------------------------------------------------------------------------- /test/test_swagger.py: -------------------------------------------------------------------------------- 1 | from ready.checks.swagger import check_swagger_should_not_return_200, SWAGGER_PATHS 2 | from unittest import TestCase 3 | 4 | 5 | from ready.thttp import Response 6 | from unittest.mock import patch 7 | 8 | 9 | class SwaggerChecksTestCase(TestCase): 10 | def test_check_swagger_should_not_return_200(self): 11 | mocked_response = Response(None, "", None, 404, None, None, None) 12 | r = Response(None, "", None, 200, "https://ready.invalid", {}, None) 13 | 14 | with patch("ready.checks.swagger.request", return_value=mocked_response): 15 | result = check_swagger_should_not_return_200({"response": r}, print_output=False) 16 | self.assertTrue(result.passed) 17 | 18 | mocked_response = Response(None, "", None, 200, None, None, None) 19 | with patch("ready.checks.swagger.request", return_value=mocked_response): 20 | result = check_swagger_should_not_return_200({"response": r}, print_output=False) 21 | self.assertFalse(result.passed) 22 | -------------------------------------------------------------------------------- /ready/result.py: -------------------------------------------------------------------------------- 1 | class ReadyResult: 2 | def __init__(self, passed, message, check, warn_on_fail, *, domain=None): 3 | self.passed = passed 4 | self.message = message 5 | self.check = check 6 | self.warn_on_fail = warn_on_fail 7 | self.domain = domain 8 | 9 | 10 | def result( 11 | passed, 12 | message, 13 | check, 14 | *, 15 | warn_on_fail=False, 16 | colour=True, 17 | print_output=True, 18 | **kwargs, 19 | ): 20 | RED = "\033[0;31m" if colour else "" 21 | GREEN = "\033[0;32m" if colour else "" 22 | YELLOW = "\033[0;33m" if colour else "" 23 | NC = "\033[0m" if colour else "" # No Color 24 | 25 | pre = f"[ {GREEN}OK{NC} ]" 26 | if not passed and not warn_on_fail: 27 | pre = f"[{RED}FAIL{NC}]" 28 | elif not passed: 29 | pre = f"[{YELLOW}WARN{NC}]" 30 | 31 | if print_output: # pragma: no cover 32 | print(f"{pre} {message}") 33 | 34 | return ReadyResult(passed, message, check, warn_on_fail, domain=kwargs.get("domain")) 35 | -------------------------------------------------------------------------------- /test/test_ns.py: -------------------------------------------------------------------------------- 1 | from unittest import TestCase 2 | 3 | from ready.checks.ns import check_at_least_two_nameservers_configured 4 | from ready.thttp import Response 5 | 6 | 7 | class NameserversTestCase(TestCase): 8 | def test_subdomain(self): 9 | # this should make an additional dns request 10 | responses = { 11 | "dns_ns_response": Response(None, None, {"Answer": []}, None, None, None, None), 12 | "dns_ns_response_fld": Response( 13 | None, 14 | None, 15 | { 16 | "Answer": [ 17 | {"data": "ns1.example.com"}, 18 | {"data": "ns2.example.com"}, 19 | {"data": "ns3.example.com"}, 20 | {"data": "ns3.example.com"}, 21 | ] 22 | }, 23 | None, 24 | None, 25 | None, 26 | None, 27 | ), 28 | } 29 | result = check_at_least_two_nameservers_configured(responses, domain="subdomain.example.com", print_output=False) 30 | self.assertTrue(result.passed) 31 | -------------------------------------------------------------------------------- /ready/checks/bad_response.py: -------------------------------------------------------------------------------- 1 | import re 2 | 3 | from ready.result import result 4 | 5 | 6 | # Check: Response should not contain hints of a Cloudflare captcha page 7 | def check_bad_response_cloudflare(responses, **kwargs): 8 | return result( 9 | 'div id="cf-content"' not in responses["response"].content.decode(errors="ignore"), 10 | f"Response should not contain hints of a Cloudflare captcha page", 11 | "bad_cloudflare", 12 | warn_on_fail=True, 13 | **kwargs, 14 | ) 15 | 16 | 17 | # Check: Response should not contain hints of a Kasada error page 18 | def check_bad_response_kasada(responses, **kwargs): 19 | uuid_pattern = r"[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}" 20 | failing = False 21 | 22 | if responses["response"].status == 429: 23 | if re.search(uuid_pattern + r"/" + uuid_pattern, responses["response"].content.decode(errors="ignore")): 24 | failing = True 25 | 26 | return result( 27 | not failing, 28 | f"Response should not contain hints of a Kasada error page", 29 | "bad_kasada", 30 | warn_on_fail=True, 31 | **kwargs, 32 | ) 33 | -------------------------------------------------------------------------------- /ready/checks/swagger.py: -------------------------------------------------------------------------------- 1 | import re 2 | from urllib.parse import urljoin 3 | 4 | from ready.result import result 5 | from ready.thttp import request 6 | 7 | SWAGGER_PATHS = [ 8 | "core/latest/swagger-ui/index.html", 9 | "csp/gateway/slc/api/swagger-ui.html", 10 | "swagger", 11 | "swagger-resources", 12 | "swagger-ui", 13 | "swagger-ui.html", 14 | "swagger.json", 15 | "swagger.yaml", 16 | "swagger/index.html", 17 | "swagger/swagger-ui.htm", 18 | "swagger/swagger-ui.html", 19 | "swagger/ui", 20 | "swagger/v1/swagger.json", 21 | "swaggerui", 22 | ] 23 | 24 | 25 | # Check: Swagger URLs should not return 200 (requires --fuzz) 26 | def check_swagger_should_not_return_200(responses, **kwargs): 27 | url = responses["response"].url 28 | 29 | swagger_responses = [] 30 | for path in SWAGGER_PATHS: 31 | response = request(urljoin(url, path)) 32 | if response.status < 299: 33 | swagger_responses.append(response) 34 | 35 | return result( 36 | len(swagger_responses) == 0, 37 | f"Swagger URLs should not return 200 ({[(r.url, r.status) for r in swagger_responses]})", 38 | "cors_header_exists", 39 | **kwargs, 40 | ) 41 | -------------------------------------------------------------------------------- /ready/checks/graphql.py: -------------------------------------------------------------------------------- 1 | from ready.result import result 2 | from ready.thttp import request 3 | 4 | 5 | # WIP: GraphQL introspection should not be enabled (requires --fuzz) 6 | def check_graphql_introspection_should_not_be_enabled(requests, **kwargs): 7 | # https://owasp.org/www-project-web-security-testing-guide/latest/4-Web_Application_Security_Testing/12-API_Testing/01-Testing_GraphQL#introspection-queries 8 | # https://the-bilal-rizwan.medium.com/graphql-common-vulnerabilities-how-to-exploit-them-464f9fdce696 9 | paths = [ 10 | "graphql" 11 | "api" 12 | "api/graphql" 13 | "api/graphiql" 14 | "graphiql" 15 | "v1/graphql" 16 | "v2/graphql" 17 | "v3/graphql" 18 | "v1/graphiql" 19 | "v2/graphiql" 20 | "v3/graphiql" 21 | "console" 22 | "playground" 23 | "gql" 24 | "index.php%3Fgraphql" 25 | "graphql/" 26 | "api/" 27 | "api/graphql/" 28 | "api/graphiql/" 29 | "graphiql/" 30 | "v1/graphql/" 31 | "v2/graphql/" 32 | "v3/graphql/" 33 | "v1/graphiql/" 34 | "v2/graphiql/" 35 | "v3/graphiql/" 36 | "console/" 37 | "playground/" 38 | "gql" 39 | ] 40 | -------------------------------------------------------------------------------- /.github/workflows/codeql-analysis.yml: -------------------------------------------------------------------------------- 1 | # For most projects, this workflow file will not need changing; you simply need 2 | # to commit it to your repository. 3 | # 4 | # You may wish to alter this file to override the set of languages analyzed, 5 | # or to provide custom queries or build logic. 6 | # 7 | # ******** NOTE ******** 8 | # We have attempted to detect the languages in your repository. Please check 9 | # the `language` matrix defined below to confirm you have the correct set of 10 | # supported CodeQL languages. 11 | # 12 | name: "CodeQL" 13 | 14 | on: 15 | push: 16 | branches: [ main ] 17 | pull_request: 18 | # The branches below must be a subset of the branches above 19 | branches: [ main ] 20 | schedule: 21 | - cron: '26 3 * * 6' 22 | 23 | jobs: 24 | analyze: 25 | name: Analyze 26 | runs-on: ubuntu-latest 27 | permissions: 28 | actions: read 29 | contents: read 30 | security-events: write 31 | 32 | strategy: 33 | fail-fast: false 34 | matrix: 35 | language: [ 'python' ] 36 | 37 | steps: 38 | - name: Checkout repository 39 | uses: actions/checkout@v2 40 | 41 | - name: Initialize CodeQL 42 | uses: github/codeql-action/init@v2 43 | with: 44 | languages: ${{ matrix.language }} 45 | 46 | - name: Perform CodeQL Analysis 47 | uses: github/codeql-action/analyze@v2 48 | -------------------------------------------------------------------------------- /ready/checks/cookies.py: -------------------------------------------------------------------------------- 1 | from ready.result import result 2 | 3 | 4 | # Check: Cookies should set the SameSite flag 5 | def check_cookies_should_be_samesite(responses, **kwargs): 6 | cookies = responses["response"].headers.get("set-cookie", "") 7 | cookie_note = cookies or "no cookie set" 8 | 9 | return result( 10 | not cookies or "samesite=" in cookies.lower(), 11 | f"Cookies should set the SameSite flag ({cookie_note})", 12 | "cookies_samesite", 13 | **kwargs, 14 | ) 15 | 16 | 17 | # Check: Cookies should set the Secure flag 18 | def check_cookies_should_be_secure(responses, **kwargs): 19 | cookies = responses["response"].headers.get("set-cookie", "") 20 | cookie_note = cookies or "no cookie set" 21 | 22 | return result( 23 | not cookies or "secure;" in cookies.lower(), 24 | f"Cookies should set the Secure flag ({cookie_note})", 25 | "cookies_secure", 26 | **kwargs, 27 | ) 28 | 29 | 30 | # Check: Cookies should set the HttpOnly flag 31 | def check_cookies_should_be_httponly(responses, **kwargs): 32 | cookies = responses["response"].headers.get("set-cookie", "") 33 | cookie_note = cookies or "no cookie set" 34 | 35 | return result( 36 | not cookies or "httponly;" in cookies.lower(), 37 | f"Cookies should set the HttpOnly flag ({cookie_note})", 38 | "cookies_httponly", 39 | **kwargs, 40 | ) 41 | -------------------------------------------------------------------------------- /ready/checks/corp_coop_coep.py: -------------------------------------------------------------------------------- 1 | from ready.result import result 2 | 3 | 4 | # Check: Cross-Origin-Resource-Policy should be "same-origin" 5 | def check_cross_origin_resource_policy_should_be_sameorigin(responses, **kwargs): 6 | return result( 7 | responses["response"].headers.get("cross-origin-resource-policy", "") == "same-origin", 8 | f"Cross-Origin-Resource-Policy header should be same-origin ({responses['response'].headers.get('cross-origin-resource-policy')})", 9 | "http_corp", 10 | warn_on_fail=True, 11 | **kwargs, 12 | ) 13 | 14 | 15 | # Check: cross-origin-opener-policy should be "same-origin" 16 | def check_cross_origin_opener_policy_should_be_sameorigin(responses, **kwargs): 17 | return result( 18 | responses["response"].headers.get("cross-origin-opener-policy", "") == "same-origin", 19 | f"Cross-Origin-Opener-Policy header should be same-origin ({responses['response'].headers.get('cross-origin-opener-policy')})", 20 | "http_coop", 21 | warn_on_fail=True, 22 | **kwargs, 23 | ) 24 | 25 | 26 | # Check: Cross-Origin-Embedder-Policy should be "require-corp" 27 | def check_cross_origin_embedder_policy_should_be_require_corp(responses, **kwargs): 28 | return result( 29 | responses["response"].headers.get("cross-origin-embedder-policy", "") == "require-corp", 30 | f"Cross-Origin-Embedder-Policy header should be require-corp ({responses['response'].headers.get('cross-origin-embedder-policy')})", 31 | "http_coep", 32 | warn_on_fail=True, 33 | **kwargs, 34 | ) 35 | -------------------------------------------------------------------------------- /.github/workflows/publish.yml: -------------------------------------------------------------------------------- 1 | name: Publish Package 2 | 3 | on: 4 | release: 5 | types: [created] 6 | workflow_dispatch: 7 | 8 | jobs: 9 | test: 10 | runs-on: ubuntu-latest 11 | strategy: 12 | matrix: 13 | python-version: ["3.9", "3.10", "3.11"] 14 | 15 | steps: 16 | - uses: actions/checkout@v3 17 | 18 | - name: Set up Python ${{ matrix.python-version }} 19 | uses: actions/setup-python@v4 20 | with: 21 | python-version: ${{ matrix.python-version }} 22 | 23 | - name: Install dependencies 24 | run: | 25 | python -m pip install tld beautifulsoup4 26 | 27 | - name: Test with unittest 28 | run: | 29 | python -m unittest discover test 30 | 31 | pypi-publish: 32 | name: Upload release to PyPI 33 | runs-on: ubuntu-latest 34 | needs: [test] 35 | environment: 36 | name: pypi 37 | url: https://pypi.org/p/ready-check 38 | permissions: 39 | id-token: write 40 | 41 | steps: 42 | - uses: actions/checkout@v3 43 | 44 | - name: Set up Python 3.11 45 | uses: actions/setup-python@v4 46 | with: 47 | python-version: 3.11 48 | 49 | - name: Install test dependencies 50 | run: | 51 | pip install -e '.' 52 | 53 | - name: Install publishing dependencies 54 | run: | 55 | pip install setuptools wheel twine build 56 | 57 | - name: Build python package 58 | run: | 59 | python -m build 60 | 61 | - name: Publish package to PyPI 62 | uses: pypa/gh-action-pypi-publish@release/v1 63 | -------------------------------------------------------------------------------- /ready/checks/leaky_headers.py: -------------------------------------------------------------------------------- 1 | import re 2 | 3 | from ready.result import result 4 | 5 | LEAKY_HEADERS = [ 6 | "apigw-requestid", 7 | "cdn-cache", 8 | "cf-edge-cache", 9 | "fastly-debug-states", 10 | "fly-request-id", 11 | "ghost-fastly", 12 | "served-by", 13 | "server", 14 | "x-appversion", 15 | "x-aspnet-version", 16 | "x-aspnetmvc-version", 17 | "x-backend-name", 18 | "x-backend-server", 19 | "x-backend", 20 | "x-build-id", 21 | "x-build", 22 | "x-cache-info", 23 | "x-cache-key", 24 | "x-cache-rule", 25 | "x-cached-by", 26 | "x-cdn-rule", 27 | "x-cdn", 28 | "x-cf-worker", 29 | "x-client-ip", 30 | "x-diaspora-version", 31 | "x-drupal-theme", 32 | "x-fastly-request-id", 33 | "x-fw-version", 34 | "x-generator", 35 | "x-github-backend", 36 | "x-hosted-by", 37 | "x-httpd", 38 | "x-kinja-revision", 39 | "x-lambda-id", 40 | "x-last-commmit-hash", 41 | "x-litespeed-cache", 42 | "x-nextjs-page", 43 | "x-nodejs", 44 | "x-origin-server", 45 | "x-powered-by-plesk", 46 | "x-powered-by", 47 | "x-powered", 48 | "x-protected-by", 49 | "x-provided-by", 50 | "x-section", 51 | "x-server-powered-by", 52 | "x-server", 53 | "x-tumblr-user", 54 | "x-varnish", 55 | "x-vercel-id", 56 | "x-version", 57 | "via", 58 | ] 59 | 60 | 61 | # Check: Headers that leak information should not be in the response 62 | def check_should_not_include_leaky_headers(responses, **kwargs): 63 | leaky = [x for x in LEAKY_HEADERS if re.search(r"\d\.\d", responses["response"].headers.get(x, "")) != None] 64 | return result( 65 | len(leaky) == 0, 66 | f"Headers that leak information should not be in the response ({leaky})", 67 | "leaky_headers", 68 | **kwargs, 69 | ) 70 | -------------------------------------------------------------------------------- /test/test_corp_coop_coep.py: -------------------------------------------------------------------------------- 1 | from unittest import TestCase 2 | 3 | from ready.checks.corp_coop_coep import ( 4 | check_cross_origin_resource_policy_should_be_sameorigin, 5 | check_cross_origin_opener_policy_should_be_sameorigin, 6 | check_cross_origin_embedder_policy_should_be_require_corp, 7 | ) 8 | from ready.thttp import Response 9 | 10 | 11 | class CorpCoopCoepChecksTestCase(TestCase): 12 | def test_check_cross_origin_resource_policy_should_be_sameorigin(self): 13 | r = Response( 14 | None, 15 | "", 16 | None, 17 | 200, 18 | None, 19 | { 20 | "cross-origin-resource-policy": "same-origin", 21 | }, 22 | None, 23 | ) 24 | result = check_cross_origin_resource_policy_should_be_sameorigin({"response": r}, print_output=False) 25 | self.assertTrue(result.passed) 26 | 27 | r = Response(None, "", None, 200, None, {}, None) 28 | result = check_cross_origin_resource_policy_should_be_sameorigin({"response": r}, print_output=False) 29 | self.assertFalse(result.passed) 30 | 31 | def test_check_cross_origin_opener_policy_should_be_sameorigin(self): 32 | r = Response( 33 | None, 34 | "", 35 | None, 36 | 200, 37 | None, 38 | { 39 | "cross-origin-opener-policy": "same-origin", 40 | }, 41 | None, 42 | ) 43 | result = check_cross_origin_opener_policy_should_be_sameorigin({"response": r}, print_output=False) 44 | self.assertTrue(result.passed) 45 | 46 | r = Response(None, "", None, 200, None, {}, None) 47 | result = check_cross_origin_opener_policy_should_be_sameorigin({"response": r}, print_output=False) 48 | self.assertFalse(result.passed) 49 | 50 | def test_check_cross_origin_embedder_policy_should_be_require_corp(self): 51 | r = Response( 52 | None, 53 | "", 54 | None, 55 | 200, 56 | None, 57 | { 58 | "cross-origin-embedder-policy": "require-corp", 59 | }, 60 | None, 61 | ) 62 | result = check_cross_origin_embedder_policy_should_be_require_corp({"response": r}, print_output=False) 63 | self.assertTrue(result.passed) 64 | 65 | r = Response(None, "", None, 200, None, {}, None) 66 | result = check_cross_origin_embedder_policy_should_be_require_corp({"response": r}, print_output=False) 67 | self.assertFalse(result.passed) 68 | -------------------------------------------------------------------------------- /test/test_cookies.py: -------------------------------------------------------------------------------- 1 | from unittest import TestCase 2 | 3 | from ready.checks.cookies import ( 4 | check_cookies_should_be_samesite, 5 | check_cookies_should_be_secure, 6 | check_cookies_should_be_httponly, 7 | ) 8 | from ready.thttp import Response 9 | 10 | 11 | # logged_in=no; Path=/; Domain=github.com; Expires=Mon, 10 Mar 2025 21:34:03 GMT; HttpOnly; Secure; SameSite=Lax 12 | 13 | 14 | class ContentChecksTestCase(TestCase): 15 | def test_check_cookies_should_be_samesite(self): 16 | r = Response(None, "", None, 200, None, {}, None) 17 | result = check_cookies_should_be_samesite({"response": r}, print_output=False) 18 | self.assertTrue(result.passed) 19 | 20 | r = Response(None, "", None, 200, None, {"set-cookie": "admin=no; Path=/; Domain=ready.invalid; SameSite=Lax"}, None) 21 | result = check_cookies_should_be_samesite({"response": r}, print_output=False) 22 | self.assertTrue(result.passed) 23 | 24 | r = Response(None, "", None, 200, None, {"set-cookie": "admin=no; Path=/; Domain=ready.invalid"}, None) 25 | result = check_cookies_should_be_samesite({"response": r}, print_output=False) 26 | self.assertFalse(result.passed) 27 | 28 | def test_check_cookies_should_be_secure(self): 29 | r = Response(None, "", None, 200, None, {}, None) 30 | result = check_cookies_should_be_secure({"response": r}, print_output=False) 31 | self.assertTrue(result.passed) 32 | 33 | r = Response(None, "", None, 200, None, {"set-cookie": "admin=no; Path=/; Domain=ready.invalid; Secure;"}, None) 34 | result = check_cookies_should_be_secure({"response": r}, print_output=False) 35 | self.assertTrue(result.passed) 36 | 37 | r = Response(None, "", None, 200, None, {"set-cookie": "admin=no; Path=/; Domain=ready.invalid;"}, None) 38 | result = check_cookies_should_be_secure({"response": r}, print_output=False) 39 | self.assertFalse(result.passed) 40 | 41 | def test_check_cookies_should_be_httponly(self): 42 | r = Response(None, "", None, 200, None, {}, None) 43 | result = check_cookies_should_be_httponly({"response": r}, print_output=False) 44 | self.assertTrue(result.passed) 45 | 46 | r = Response(None, "", None, 200, None, {"set-cookie": "admin=no; Path=/; Domain=ready.invalid; HttpOnly;"}, None) 47 | result = check_cookies_should_be_httponly({"response": r}, print_output=False) 48 | self.assertTrue(result.passed) 49 | 50 | r = Response(None, "", None, 200, None, {"set-cookie": "admin=no; Path=/; Domain=ready.invalid;"}, None) 51 | result = check_cookies_should_be_httponly({"response": r}, print_output=False) 52 | self.assertFalse(result.passed) 53 | -------------------------------------------------------------------------------- /ready/checks/hsts.py: -------------------------------------------------------------------------------- 1 | import re 2 | 3 | from ready.result import result 4 | 5 | 6 | # Check: HSTS Header should be included in response 7 | def check_hsts_header_should_be_included_in_response(responses, **kwargs): 8 | return result( 9 | responses["response"].headers.get("strict-transport-security") != None, 10 | f"HSTS Header should be included in response ({responses['response'].headers.get('strict-transport-security')})", 11 | "ssl_hsts", 12 | **kwargs, 13 | ) 14 | 15 | 16 | # Check: HSTS Header should have a long max-age 17 | def check_hsts_header_should_have_a_long_max_age(responses, **kwargs): 18 | try: 19 | hsts = responses["response"].headers.get("strict-transport-security", "") 20 | max_age_re = re.compile(r"max-age=(\d+)", re.IGNORECASE) 21 | m = max_age_re.match(hsts) 22 | max_age = int(m.groups()[0]) 23 | if max_age < 31536000: 24 | raise Exception 25 | return result( 26 | True, 27 | f"HSTS Header should have a long max-age ({hsts})", 28 | "ssl_hsts_duration", 29 | **kwargs, 30 | ) 31 | except: 32 | return result( 33 | False, 34 | f"HSTS Header should have a long max-age ({hsts})", 35 | "ssl_hsts_duration", 36 | **kwargs, 37 | ) 38 | 39 | 40 | # Check: HSTS Header should have includeSubdomains 41 | def check_hsts_header_should_have_includesubdomains(responses, **kwargs): 42 | hsts = responses["response"].headers.get("strict-transport-security", "") 43 | 44 | # this check uses the response from the Apex/Second level domain if it fails for the 45 | # provided domain. See: https://github.com/sesh/ready/issues/22 46 | if "includesubdomains" not in hsts.lower() and responses.get("response_fld"): 47 | hsts = responses["response_fld"].headers.get("strict-transport-security", "") + " (from apex domain)" 48 | 49 | return result( 50 | "includesubdomains" in hsts.lower(), 51 | f"HSTS Header should have includeSubDomains ({hsts})", 52 | "ssl_hsts_subdomains", 53 | **kwargs, 54 | ) 55 | 56 | 57 | # Check: HSTS Header should have preload 58 | def check_hsts_header_should_have_preload(responses, **kwargs): 59 | hsts = responses["response"].headers.get("strict-transport-security", "") 60 | 61 | # this check use the response from the Apex/Second Level domain if it exists 62 | # instead of any subdomains. See: https://github.com/sesh/ready/issues/22 63 | if responses.get("response_fld"): 64 | hsts = responses["response_fld"].headers.get("strict-transport-security", "") + " (from apex domain)" 65 | 66 | return result( 67 | "preload" in hsts.lower() and "includesubdomains" in hsts.lower(), 68 | f"HSTS Header should have preload and includeSubDomains ({hsts})", 69 | "ssl_hsts_preload", 70 | **kwargs, 71 | ) 72 | -------------------------------------------------------------------------------- /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | # This workflow will install Python dependencies, run tests and lint with a variety of Python versions 2 | # For more information see: https://help.github.com/actions/language-and-framework-guides/using-python-with-github-actions 3 | 4 | name: CI 5 | 6 | on: 7 | push: 8 | branches: [ main ] 9 | pull_request: 10 | branches: [ main ] 11 | 12 | jobs: 13 | test: 14 | runs-on: ubuntu-latest 15 | strategy: 16 | matrix: 17 | python-version: ["3.9", "3.10", "3.11", "3.12"] 18 | 19 | steps: 20 | - uses: actions/checkout@v4 21 | 22 | - name: Set up Python ${{ matrix.python-version }} 23 | uses: actions/setup-python@v4 24 | with: 25 | python-version: ${{ matrix.python-version }} 26 | 27 | - name: Install dependencies 28 | run: | 29 | python -m pip install tld beautifulsoup4 cryptography 30 | 31 | - name: Test with unittest 32 | run: | 33 | python -m unittest discover test 34 | 35 | test_without_dependencies: 36 | runs-on: ubuntu-latest 37 | 38 | steps: 39 | - uses: actions/checkout@v4 40 | 41 | - name: Set up Python ${{ matrix.python-version }} 42 | uses: actions/setup-python@v4 43 | with: 44 | python-version: 3 45 | 46 | - name: Test with unittest 47 | run: | 48 | python -m unittest discover test 49 | 50 | coverage: 51 | runs-on: ubuntu-latest 52 | 53 | steps: 54 | - uses: actions/checkout@v4 55 | 56 | - name: Set up Python ${{ matrix.python-version }} 57 | uses: actions/setup-python@v4 58 | with: 59 | python-version: 3 60 | 61 | - name: Install coverage 62 | run: | 63 | python -m pip install coverage httpie tld beautifulsoup4 cryptography 64 | 65 | - name: Run coverage 66 | run: | 67 | python -m coverage run -m unittest discover test 68 | coverage report -m 69 | coverage json 70 | jq '.totals.percent_covered' coverage.json > coverage.txt 71 | cat coverage.txt 72 | 73 | bandit: 74 | runs-on: ubuntu-latest 75 | 76 | steps: 77 | - uses: actions/checkout@v4 78 | 79 | - name: Set up Python 80 | uses: actions/setup-python@v4 81 | with: 82 | python-version: 3 83 | 84 | - name: Install bandit 85 | run: | 86 | python -m pip install bandit 87 | 88 | - name: Run bandit scan 89 | run: | 90 | bandit -r . 91 | 92 | black: 93 | runs-on: ubuntu-latest 94 | 95 | steps: 96 | - uses: actions/checkout@v4 97 | 98 | - name: Set up Python 99 | uses: actions/setup-python@v4 100 | with: 101 | python-version: 3 102 | 103 | - name: Install black 104 | run: | 105 | python -m pip install black 106 | 107 | - name: Run black 108 | run: | 109 | black --check . 110 | -------------------------------------------------------------------------------- /ready/checks/content.py: -------------------------------------------------------------------------------- 1 | from ready.result import result 2 | 3 | 4 | # Check: Response should include a Content-Type 5 | def check_http_response_should_include_content_type(responses, **kwargs): 6 | return result( 7 | responses["response"].headers.get("content-type") != None, 8 | f"Response should include a Content-Type ({responses['response'].headers.get('content-type')})", 9 | "http_content_type", 10 | **kwargs, 11 | ) 12 | 13 | 14 | # Check: Response should be gzipped 15 | def check_http_response_should_be_gzipped(responses, **kwargs): 16 | return result( 17 | "gzip" in responses["response"].headers.get("content-encoding", ""), 18 | f"Response should be gzipped ({responses['response'].headers.get('content-encoding', '')})", 19 | "http_gzipped", 20 | **kwargs, 21 | ) 22 | 23 | 24 | # Check: Content-Type header should contain charset 25 | def check_http_content_type_header_contains_charset(responses, **kwargs): 26 | return result( 27 | "charset=" in responses["response"].headers.get("content-type", ""), 28 | f'Content-Type header should contain charset ({responses["response"].headers.get("content-type", "")})', 29 | "http_charset", 30 | **kwargs, 31 | ) 32 | 33 | 34 | # Check: Expires header should not be used without Cache-Control 35 | def check_http_expires_header_not_used_without_cache_control(responses, **kwargs): 36 | # see: https://github.com/sesh/ready/issues/24 37 | # nginx sets Cache-Control and Expires on documents 38 | # this test should: 39 | # - pass if Expires is unset 40 | # - pass if both Expires and Cache-Control are set 41 | # - fail in all other scenarios 42 | 43 | expires = responses["response"].headers.get("expires") 44 | cache_control = responses["response"].headers.get("cache-control") 45 | 46 | check_passed = False 47 | if not expires or (expires and cache_control): 48 | check_passed = True 49 | 50 | return result( 51 | check_passed, 52 | f"Expires header should not be used without Cache-Control (Expires: {expires}, Cache-Control: {cache_control})", 53 | "http_expires", 54 | **kwargs, 55 | ) 56 | 57 | 58 | # Check: Cache-Control header should be included in the response 59 | def check_http_cache_control_is_included(responses, **kwargs): 60 | return result( 61 | "cache-control" in responses["response"].headers, 62 | f"Cache-Control header should be included in the response ({responses['response'].headers.get('cache-control')})", 63 | "http_expires", 64 | **kwargs, 65 | ) 66 | 67 | 68 | # Check: P3P header is deprecated and should not be returned 69 | def check_http_p3p_header_is_not_set(responses, **kwargs): 70 | return result( 71 | "p3p" not in responses["response"].headers, 72 | f"P3P header is deprecated and should not be returned ({responses['response'].headers.get('p3p')})", 73 | "http_p3p", 74 | **kwargs, 75 | ) 76 | -------------------------------------------------------------------------------- /ready/checks/well_known.py: -------------------------------------------------------------------------------- 1 | from ready.result import result 2 | import datetime 3 | 4 | 5 | def get_utc_time(): 6 | try: 7 | return datetime.datetime.now(datetime.UTC) 8 | except Exception as e: # pragma: no cover 9 | # python <= 3.10 10 | print(e) 11 | return datetime.datetime.utcnow().replace(tzinfo=datetime.timezone.utc) 12 | 13 | 14 | # Check: Robots.txt exists and is a text file 15 | def check_robots_txt_exists(responses, **kwargs): 16 | robots_response = responses["robots_txt_response"] 17 | 18 | return result( 19 | robots_response and robots_response.status == 200 and "text/plain" in robots_response.headers.get("content-type", ""), 20 | "Robots.txt exists and is a text file", 21 | "wellknown_robots", 22 | **kwargs, 23 | ) 24 | 25 | 26 | # Check: Security.txt exists and is a text file that contains required attributes 27 | def check_security_txt_exists(responses, **kwargs): 28 | security_txt_response = responses["security_txt_response"] 29 | 30 | return result( 31 | security_txt_response 32 | and security_txt_response.status == 200 33 | and "text/plain" in security_txt_response.headers.get("content-type", "") 34 | and b"Contact:" in security_txt_response.content 35 | and b"Expires:" in security_txt_response.content, 36 | "Security.txt exists and is a text file that contains required attributes", 37 | "wellknown_security", 38 | **kwargs, 39 | ) 40 | 41 | 42 | # Check: Security.txt has an expiry date in the future 43 | def check_security_txt_not_expired(responses, **kwargs): 44 | security_txt_response = responses["security_txt_response"] 45 | 46 | for line in security_txt_response.content.splitlines(): 47 | line = line.decode() 48 | 49 | if line.strip().startswith("Expires:"): 50 | date = line.replace("Expires:", "").strip() 51 | 52 | if date.endswith("Z"): 53 | # required for 3.9 / 3.10 support, fromisoformat was updated in 3.11 to support a wider range of values 54 | date = date.replace("Z", "+00:00") 55 | try: 56 | dt = datetime.datetime.fromisoformat(date.upper()) 57 | 58 | return result( 59 | dt > get_utc_time(), 60 | f"Security.txt has an expiry date in the future ({dt})", 61 | "wellknown_security_not_expired", 62 | **kwargs, 63 | ) 64 | except Exception as e: 65 | print(e) 66 | break 67 | 68 | return result( 69 | False, 70 | "Security.txt has an expiry date in the future (missing file or expires line)", 71 | "wellknown_security_not_expired", 72 | **kwargs, 73 | ) 74 | 75 | 76 | # Check: Favicon is served at /favicon.ico 77 | def check_favicon_is_served(responses, **kwargs): 78 | favicon_response = responses["favicon_response"] 79 | return result( 80 | favicon_response 81 | and favicon_response.status == 200 82 | and favicon_response.headers.get("content-type", "").startswith("image/"), 83 | "Favicon is served at /favicon.ico", 84 | "wellknown_favicon", 85 | **kwargs, 86 | ) 87 | -------------------------------------------------------------------------------- /test/test_content.py: -------------------------------------------------------------------------------- 1 | from unittest import TestCase 2 | 3 | from ready.checks.content import ( 4 | check_http_response_should_include_content_type, 5 | check_http_response_should_be_gzipped, 6 | check_http_content_type_header_contains_charset, 7 | check_http_cache_control_is_included, 8 | check_http_p3p_header_is_not_set, 9 | check_http_expires_header_not_used_without_cache_control, 10 | ) 11 | from ready.thttp import Response 12 | 13 | 14 | class ContentChecksTestCase(TestCase): 15 | def test_check_http_response_should_include_content_type(self): 16 | r = Response(None, "", None, 200, None, {"content-type": "example/example"}, None) 17 | result = check_http_response_should_include_content_type({"response": r}, print_output=False) 18 | self.assertTrue(result.passed) 19 | 20 | r = Response(None, "", None, 200, None, {}, None) 21 | result = check_http_response_should_include_content_type({"response": r}, print_output=False) 22 | self.assertFalse(result.passed) 23 | 24 | def test_check_http_response_should_be_gzipped(self): 25 | r = Response(None, "", None, 200, None, {"content-encoding": "gzip; example"}, None) 26 | result = check_http_response_should_be_gzipped({"response": r}, print_output=False) 27 | self.assertTrue(result.passed) 28 | 29 | r = Response(None, "", None, 200, None, {}, None) 30 | result = check_http_response_should_be_gzipped({"response": r}, print_output=False) 31 | self.assertFalse(result.passed) 32 | 33 | def test_check_http_content_type_header_contains_charset(self): 34 | r = Response(None, "", None, 200, None, {"content-type": "example/example; charset=utf-8"}, None) 35 | result = check_http_content_type_header_contains_charset({"response": r}, print_output=False) 36 | self.assertTrue(result.passed) 37 | 38 | r = Response(None, "", None, 200, None, {"content-type": "example/example"}, None) 39 | result = check_http_content_type_header_contains_charset({"response": r}, print_output=False) 40 | self.assertFalse(result.passed) 41 | 42 | def test_check_http_cache_control_is_included(self): 43 | r = Response(None, "", None, 200, None, {"cache-control": "nocache"}, None) 44 | result = check_http_cache_control_is_included({"response": r}, print_output=False) 45 | self.assertTrue(result.passed) 46 | 47 | r = Response(None, "", None, 200, None, {}, None) 48 | result = check_http_cache_control_is_included({"response": r}, print_output=False) 49 | self.assertFalse(result.passed) 50 | 51 | def test_check_http_p3p_header_is_not_set(self): 52 | r = Response(None, "", None, 200, None, {}, None) 53 | result = check_http_p3p_header_is_not_set({"response": r}, print_output=False) 54 | self.assertTrue(result.passed) 55 | 56 | r = Response(None, "", None, 200, None, {"p3p": "some-value"}, None) 57 | result = check_http_p3p_header_is_not_set({"response": r}, print_output=False) 58 | self.assertFalse(result.passed) 59 | 60 | def test_check_http_expires_header_not_used_without_cache_control(self): 61 | r = Response(None, "", None, 200, None, {"cache-control": "maxage=20"}, None) 62 | result = check_http_expires_header_not_used_without_cache_control({"response": r}, print_output=False) 63 | self.assertTrue(result.passed) 64 | 65 | r = Response(None, "", None, 200, None, {"cache-control": "maxage=20", "expires": "Wed, 21 Oct 2015 07:28:00 GMT"}, None) 66 | result = check_http_expires_header_not_used_without_cache_control({"response": r}, print_output=False) 67 | self.assertTrue(result.passed) 68 | 69 | r = Response(None, "", None, 200, None, {"expires": "Wed, 21 Oct 2015 07:28:00 GMT"}, None) 70 | result = check_http_expires_header_not_used_without_cache_control({"response": r}, print_output=False) 71 | self.assertFalse(result.passed) 72 | -------------------------------------------------------------------------------- /test/test_hsts.py: -------------------------------------------------------------------------------- 1 | from unittest import TestCase 2 | 3 | from ready.checks.hsts import ( 4 | check_hsts_header_should_be_included_in_response, 5 | check_hsts_header_should_have_a_long_max_age, 6 | check_hsts_header_should_have_includesubdomains, 7 | check_hsts_header_should_have_preload, 8 | ) 9 | from ready.thttp import Response 10 | 11 | 12 | class HstsChecksTestCase(TestCase): 13 | def test_check_hsts_header_should_be_included_in_response(self): 14 | r = Response(None, "", None, 200, None, {"strict-transport-security": "max-age=0"}, None) 15 | result = check_hsts_header_should_be_included_in_response({"response": r}, print_output=False) 16 | self.assertTrue(result.passed) 17 | 18 | r = Response(None, "", None, 200, None, {}, None) 19 | result = check_hsts_header_should_be_included_in_response({"response": r}, print_output=False) 20 | self.assertFalse(result.passed) 21 | 22 | def test_check_hsts_header_should_have_a_long_max_age(self): 23 | r = Response(None, "", None, 200, None, {"strict-transport-security": "max-age=31536001"}, None) 24 | result = check_hsts_header_should_have_a_long_max_age({"response": r}, print_output=False) 25 | self.assertTrue(result.passed) 26 | 27 | r = Response(None, "", None, 200, None, {"strict-transport-security": "max-age=0"}, None) 28 | result = check_hsts_header_should_have_a_long_max_age({"response": r}, print_output=False) 29 | self.assertFalse(result.passed) 30 | 31 | def test_check_hsts_header_should_have_includesubdomains(self): 32 | r = Response(None, "", None, 200, None, {"strict-transport-security": "max-age=0; includeSubDomains"}, None) 33 | result = check_hsts_header_should_have_includesubdomains({"response": r}, print_output=False) 34 | self.assertTrue(result.passed) 35 | 36 | r = Response(None, "", None, 200, None, {"strict-transport-security": "max-age=0;"}, None) 37 | r_fld = Response(None, "", None, 200, None, {"strict-transport-security": "max-age=0; includeSubDomains"}, None) 38 | result = check_hsts_header_should_have_includesubdomains({"response": r, "response_fld": r_fld}, print_output=False) 39 | self.assertTrue(result.passed) 40 | 41 | r = Response(None, "", None, 200, None, {}, None) 42 | r_fld = Response(None, "", None, 200, None, {"strict-transport-security": "max-age=0; includeSubDomains"}, None) 43 | result = check_hsts_header_should_have_includesubdomains({"response": r, "response_fld": r_fld}, print_output=False) 44 | self.assertTrue(result.passed) 45 | 46 | r = Response(None, "", None, 200, None, {}, None) 47 | result = check_hsts_header_should_have_includesubdomains({"response": r}, print_output=False) 48 | self.assertFalse(result.passed) 49 | 50 | def test_check_hsts_header_should_have_preload(self): 51 | r = Response(None, "", None, 200, None, {"strict-transport-security": "max-age=0; preload; includeSubDomains"}, None) 52 | result = check_hsts_header_should_have_preload({"response": r}, print_output=False) 53 | self.assertTrue(result.passed) 54 | 55 | r = Response(None, "", None, 200, None, {"strict-transport-security": "max-age=0;"}, None) 56 | r_fld = Response(None, "", None, 200, None, {"strict-transport-security": "max-age=0; preload; includeSubDomains"}, None) 57 | result = check_hsts_header_should_have_preload({"response": r, "response_fld": r_fld}, print_output=False) 58 | self.assertTrue(result.passed) 59 | 60 | r = Response(None, "", None, 200, None, {}, None) 61 | r_fld = Response(None, "", None, 200, None, {"strict-transport-security": "max-age=0; preload; includeSubDomains"}, None) 62 | result = check_hsts_header_should_have_preload({"response": r, "response_fld": r_fld}, print_output=False) 63 | self.assertTrue(result.passed) 64 | 65 | r = Response(None, "", None, 200, None, {}, None) 66 | result = check_hsts_header_should_have_preload({"response": r}, print_output=False) 67 | self.assertFalse(result.passed) 68 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | 🚀 `ready` is a tool for developers to check how production ready their website. 2 | 3 | 4 | ## Usage 5 | 6 | The simplest way to quickly check your site is with `uvx`: 7 | 8 | ``` 9 | uvx --from ready-check ready 10 | ``` 11 | 12 | Alternatively, install the tool from PyPI with: 13 | 14 | ``` 15 | pip install ready-check 16 | ``` 17 | 18 | Running the checks for a domain is as simple as: 19 | 20 | ``` 21 | ready 22 | ``` 23 | 24 | For more options, check the output of `--help`. 25 | 26 | 27 | ### Usage during development 28 | 29 | If you have cloned the repository and would like to run the checks with your local version, simply run: 30 | 31 | ``` 32 | python3 -m ready.ready [--request-filter=] [--check-filter=] 33 | ``` 34 | 35 | ### Optional Dependencies 36 | 37 | There are no required dependencies, but two optional dependencies that enable some additional behaviour: 38 | 39 | - Installing the `tld` package adds support for using the fully-qualified domain name for some DNS-related checks. This is handy if you want to check a subdomain. 40 | - Installing `beautifulsoup4` adds support for extracting some headers from the HTML document as well as the headers. This technique can be used for sites that use static hosting like Github Pages. 41 | 42 | Note: if you install from PyPI these dependencies are installed. 43 | 44 | 45 | ## Check list 46 | 47 | - Cookies should set the SameSite flag 48 | - Cookies should set the Secure flag 49 | - Cookies should set the HttpOnly flag 50 | - Swagger URLs should not return 200 (requires --fuzz) 51 | - HSTS Header should be included in response 52 | - HSTS Header should have a long max-age 53 | - HSTS Header should have includeSubdomains 54 | - HSTS Header should have preload 55 | - An AAAA DNS record exists (IPv6 Support) 56 | - HTTP -> HTTPS redirection occurs 57 | - Permissions-Policy should exist if the response is HTML 58 | - frame-ancestors should be in CSP or X-Frame-Options should exist if the response is HTML 59 | - X-Content-Type-options should be "nosniff" 60 | - Referrer-Policy should be set 61 | - X-XSS-Protection header should not exist 62 | - HTML should start with "" 63 | - `` tag should include lang 64 | - HTML should include meta charset tag 65 | - HTML should include `` 66 | - HTML should include link with rel="icon" 67 | - HTML should not use schemeless urls for links or hrefs 68 | - HTML should not use unnecessary HTML entities 69 | - All script tags should use subresource integrity 70 | - X-DNS-Prefetch-Control should be set to off 71 | - CDNs should not be used for Javascript or CSS assets 72 | - RSS and JSON feeds should return Access-Control-Allow-Origin header 73 | - Cache-Control max-age should be <= 86400 for HTML documents 74 | - Content-Security-Policy header should exist 75 | - Content-Security-Policy header should start with default-src 'none' 76 | - Content-Security-Policy must include either default-src or script-src 77 | - Content-Security-Policy header must not include unsafe-inline 78 | - Content-Security-Policy header must not include unsafe-eval 79 | - Content-Security-Policy header must not include report-sample 80 | - Content-Security-Policy header must not include report-uri 81 | - Content-Security-Policy header should not include report-to 82 | - Content-Security-Policy header should include upgrade-insecure-requests 83 | - Content-Security-Policy header only includes valid directives 84 | - At least two nameservers should be configured 85 | - Cross-Origin-Resource-Policy should be "same-origin" 86 | - cross-origin-opener-policy should be "same-origin" 87 | - Cross-Origin-Embedder-Policy should be "require-corp" 88 | - Report-To Header must not be included in response 89 | - Response should not contain hints of a Cloudflare captcha page 90 | - Response should not contain hints of a Kasada error page 91 | - Response should include a Content-Type 92 | - Response should be gzipped 93 | - Content-Type header should contain charset 94 | - Expires header should not be used without Cache-Control 95 | - Cache-Control header should be included in the response 96 | - P3P header is deprecated and should not be returned 97 | - SPF TXT record should exist 98 | - SPF TXT record should contain "-all" 99 | - SPF DNS record is deprecated and should not exist 100 | - SPF includes use less than 10 DNS requests 101 | - DMARC record should exist 102 | - DMARC record should contain p=reject 103 | - SPF should be "v=spf1 -all" if there are no MX records or MX record is "." 104 | - Robots.txt exists and is a text file 105 | - Security.txt exists and is a text file that contains required attributes 106 | - Security.txt has an expiry date in the future 107 | - Favicon is served at /favicon.ico 108 | - Headers that leak information should not be in the response 109 | - SSL certificate should be trusted 110 | - SSL expiry should be less than one year 111 | - SSL expiry should be greater than five days 112 | - SSL connection fails when using TLS 1.1 113 | - SSL connection fails when using TLS 1.0 114 | - DNS CAA should be enabled 115 | - DNS CAA should include accounturi 116 | - DNS CAA should include validationmethods 117 | - Response should be a 200 (after redirects) 118 | 119 | 120 | 121 | ## Other Tools 122 | 123 | This tool overlaps with a bunch of other online site checking tools. 124 | Here's a few that I have used in the past: 125 | 126 | - https://webhint.io/ 127 | - https://pagespeed.web.dev/ 128 | - https://internet.nl/ 129 | - https://www.ssllabs.com/ssltest/ 130 | - https://securityheaders.com/ 131 | - https://csp-evaluator.withgoogle.com/ 132 | - https://observatory.mozilla.org/ 133 | - https://tools.pingdom.com/ 134 | - https://web-check.xyz/ 135 | -------------------------------------------------------------------------------- /test/test_well_known.py: -------------------------------------------------------------------------------- 1 | from unittest import TestCase 2 | from datetime import timedelta 3 | from ready.checks.well_known import ( 4 | check_robots_txt_exists, 5 | check_security_txt_exists, 6 | check_security_txt_not_expired, 7 | check_favicon_is_served, 8 | get_utc_time, 9 | ) 10 | from ready.thttp import Response 11 | 12 | 13 | class WellKnownChecksTestCase(TestCase): 14 | def test_check_robots_txt_exists(self): 15 | r = Response(None, b"", None, 200, None, {"content-type": "text/plain"}, None) 16 | result = check_robots_txt_exists({"robots_txt_response": r}, print_output=False) 17 | self.assertTrue(result.passed) 18 | 19 | r = Response(None, b"", None, 404, None, {}, None) 20 | result = check_robots_txt_exists({"robots_txt_response": r}, print_output=False) 21 | self.assertFalse(result.passed) 22 | 23 | def test_check_security_txt_exists(self): 24 | # Test with valid security.txt response 25 | security_response = Response( 26 | None, 27 | b"Contact: security@ready.invalid\nExpires: 2030-12-31T00:00:00Z", 28 | None, 29 | 200, 30 | None, 31 | {"content-type": "text/plain"}, 32 | None, 33 | ) 34 | result = check_security_txt_exists({"security_txt_response": security_response}, print_output=False) 35 | self.assertTrue(result.passed) 36 | 37 | # Test with None security.txt response 38 | result = check_security_txt_exists({"security_txt_response": None}, print_output=False) 39 | self.assertFalse(result.passed) 40 | 41 | # Test with non-200 status code 42 | security_response = Response(None, b"", None, 404, None, {"content-type": "text/plain"}, None) 43 | result = check_security_txt_exists({"security_txt_response": security_response}, print_output=False) 44 | self.assertFalse(result.passed) 45 | 46 | # Test with incorrect content-type 47 | security_response = Response(None, b"", None, 200, None, {"content-type": "application/json"}, None) 48 | result = check_security_txt_exists({"security_txt_response": security_response}, print_output=False) 49 | self.assertFalse(result.passed) 50 | 51 | # Test with missing required attributes 52 | security_response = Response( 53 | None, b"Contact: security@ready.invalid", None, 200, None, {"content-type": "text/plain"}, None 54 | ) 55 | result = check_security_txt_exists({"security_txt_response": security_response}, print_output=False) 56 | self.assertFalse(result.passed) 57 | 58 | def test_check_favicon_is_served(self): 59 | # Test with valid favicon response 60 | favicon_response = Response(None, b"", None, 200, None, {"content-type": "image/x-icon"}, None) 61 | result = check_favicon_is_served({"favicon_response": favicon_response}, print_output=False) 62 | self.assertTrue(result.passed) 63 | 64 | # Test with invalid content-type 65 | favicon_response = Response(None, b"", None, 200, None, {"content-type": "text/html"}, None) 66 | result = check_favicon_is_served({"favicon_response": favicon_response}, print_output=False) 67 | self.assertFalse(result.passed) 68 | 69 | # Test with non-200 status code 70 | favicon_response = Response(None, b"", None, 404, None, {}, None) 71 | result = check_favicon_is_served({"favicon_response": favicon_response}, print_output=False) 72 | self.assertFalse(result.passed) 73 | 74 | # Test with None response 75 | result = check_favicon_is_served({"favicon_response": None}, print_output=False) 76 | self.assertFalse(result.passed) 77 | 78 | def test_check_security_txt_not_expired(self): 79 | # Test with valid future expiry date 80 | future_date = get_utc_time() + timedelta(days=30) 81 | content = f"Expires: {future_date.isoformat()}\n".encode() 82 | security_response = Response(None, content, None, 200, {"content-type": "text/plain"}, {}, None) 83 | result = check_security_txt_not_expired({"security_txt_response": security_response}, print_output=False) 84 | self.assertTrue(result.passed) 85 | 86 | content = f"Expires: 2030-12-31T00:00:00Z\n".encode() 87 | security_response = Response(None, content, None, 200, {"content-type": "text/plain"}, {}, None) 88 | result = check_security_txt_not_expired({"security_txt_response": security_response}, print_output=False) 89 | self.assertTrue(result.passed) 90 | 91 | content = f"Expires: Not a date\n".encode() 92 | security_response = Response(None, content, None, 200, {"content-type": "text/plain"}, {}, None) 93 | result = check_security_txt_not_expired({"security_txt_response": security_response}, print_output=False) 94 | self.assertFalse(result.passed) 95 | 96 | # Test with expired expiry date 97 | past_date = get_utc_time() - timedelta(days=30) 98 | content = f"Expires: {past_date.isoformat()}\n".encode() 99 | security_response = Response(None, content, None, 200, {"content-type": "text/plain"}, {}, None) 100 | result = check_security_txt_not_expired({"security_txt_response": security_response}, print_output=False) 101 | self.assertFalse(result.passed) 102 | 103 | # Test with missing expiry date 104 | content = b"Contact: security@example.com\n" 105 | security_response = Response(None, content, None, 200, {"content-type": "text/plain"}, {}, None) 106 | result = check_security_txt_not_expired({"security_txt_response": security_response}, print_output=False) 107 | self.assertFalse(result.passed) 108 | -------------------------------------------------------------------------------- /ready/checks/csp.py: -------------------------------------------------------------------------------- 1 | from ready.result import result 2 | 3 | USE_BS4 = True 4 | 5 | try: 6 | from bs4 import BeautifulSoup 7 | except ImportError: # pragma: no cover 8 | USE_BS4 = False 9 | 10 | 11 | def extract_csp(response): 12 | if "content-security-policy" in response.headers: 13 | return response.headers["content-security-policy"].lower() 14 | 15 | if USE_BS4: 16 | soup = BeautifulSoup(response.content, "html.parser") 17 | meta_tags = soup.find_all("meta") 18 | for t in meta_tags: 19 | if t.attrs.get("http-equiv", "").lower() == "content-security-policy": 20 | return t.attrs.get("content", "").lower() 21 | else: # pragma: no cover 22 | print("No Content-Security-Policy header, and beautifulsoup4 is not installed to inspect HTML") 23 | 24 | return None 25 | 26 | 27 | def _trunc(s, max_length=200): 28 | if not s: 29 | return "" 30 | 31 | if len(s) > max_length: 32 | return s[:max_length] + "..." 33 | 34 | return s 35 | 36 | 37 | # Check: Content-Security-Policy header should exist 38 | def check_csp_should_exist(responses, **kwargs): 39 | csp = extract_csp(responses["response"]) 40 | 41 | return result( 42 | csp != None and csp != "", 43 | f"Content-Security-Policy header should exist ({_trunc(csp)})", 44 | "csp", 45 | **kwargs, 46 | ) 47 | 48 | 49 | # Check: Content-Security-Policy header should start with default-src 'none' 50 | def check_csp_should_start_with_defaultsrc_none(responses, **kwargs): 51 | csp = extract_csp(responses["response"]) 52 | 53 | return result( 54 | csp != None and csp.startswith("default-src 'none'"), 55 | f"Content-Security-Policy header should start with default-src 'none' ({_trunc(csp)})", 56 | "csp_defaultsrc_none", 57 | **kwargs, 58 | ) 59 | 60 | 61 | # Check: Content-Security-Policy must include either default-src or script-src 62 | def check_csp_includes_default_or_script_directive(responses, **kwargs): 63 | csp = extract_csp(responses["response"]) 64 | 65 | return result( 66 | csp != None and ("default-src" in csp or "script-src" in csp), 67 | f"Content-Security-Policy must include either default-src or script-src ({_trunc(csp)})", 68 | "csp_required_directives", 69 | **kwargs, 70 | ) 71 | 72 | 73 | # Check: Content-Security-Policy header must not include unsafe-inline 74 | # NOTE: this checks everywhere, not just in script-src 75 | def check_csp_must_not_include_unsafe_eval(responses, **kwargs): 76 | csp = extract_csp(responses["response"]) 77 | 78 | return result( 79 | csp != None and "unsafe-eval" not in csp, 80 | f"Content-Security-Policy header must not include unsafe-eval ({_trunc(csp)})", 81 | "csp_no_unsafe_inline", 82 | **kwargs, 83 | ) 84 | 85 | 86 | # Check: Content-Security-Policy header must not include unsafe-eval 87 | def check_csp_must_not_include_unsafe_inline(responses, **kwargs): 88 | csp = extract_csp(responses["response"]) 89 | 90 | return result( 91 | csp != None and "unsafe-inline" not in csp, 92 | f"Content-Security-Policy header must not include unsafe-inline ({_trunc(csp)})", 93 | "csp_no_unsafe_inline", 94 | **kwargs, 95 | ) 96 | 97 | 98 | # Check: Content-Security-Policy header must not include report-sample 99 | def check_csp_must_not_include_report_sample(responses, **kwargs): 100 | csp = extract_csp(responses["response"]) 101 | 102 | return result( 103 | csp != None and "script-sample" not in csp, 104 | f"Content-Security-Policy header must not include report-sample ({_trunc(csp)})", 105 | "csp_no_report_sample", 106 | **kwargs, 107 | ) 108 | 109 | 110 | # Check: Content-Security-Policy header must not include report-uri 111 | # NOTE: report-uri is being replaced by report-to but browser support is spotty so report-uri should still exist 112 | def check_csp_must_not_include_reporturi(responses, **kwargs): 113 | csp = extract_csp(responses["response"]) 114 | 115 | return result( 116 | csp is None or (("report-uri https://" not in csp)), 117 | f"Content-Security-Policy header must not include report-uri ({_trunc(csp)})", 118 | "csp_report_uri", 119 | warn_on_fail=False, 120 | **kwargs, 121 | ) 122 | 123 | 124 | # Check: Content-Security-Policy header should not include report-to 125 | def check_csp_should_not_include_reportto(responses, **kwargs): 126 | csp = extract_csp(responses["response"]) 127 | 128 | return result( 129 | csp is None or "report-to" not in csp, 130 | f"Content-Security-Policy header should not include report-to ({_trunc(csp)})", 131 | "csp_report_to", 132 | warn_on_fail=True, 133 | **kwargs, 134 | ) 135 | 136 | 137 | # Check: Content-Security-Policy header should include upgrade-insecure-requests 138 | def check_csp_upgrade_insecure_requests(responses, **kwargs): 139 | csp = extract_csp(responses["response"]) 140 | 141 | return result( 142 | csp != None and "upgrade-insecure-requests" in csp, 143 | f"Content-Security-Policy header should include upgrade-insecure-requests ({_trunc(csp)})", 144 | "csp_upgrade_insecure_requests", 145 | **kwargs, 146 | ) 147 | 148 | 149 | # Check: Content-Security-Policy header only includes valid directives 150 | def check_csp_should_only_include_valid_directives(responses, **kwargs): 151 | csp = extract_csp(responses["response"]) 152 | 153 | directives = [] 154 | 155 | if csp: 156 | for directive in csp.split(";"): 157 | directive = directive.strip() 158 | 159 | if " " in directive: 160 | directives.append(directive.split()[0]) 161 | else: 162 | directives.append(directive.strip()) 163 | 164 | valid_directives = [ 165 | "base-uri", 166 | "block-all-mixed-content", 167 | "child-src", 168 | "connect-src", 169 | "default-src", 170 | "font-src", 171 | "form-action", 172 | "frame-ancestors", 173 | "frame-src", 174 | "img-src", 175 | "manifest-src", 176 | "media-src", 177 | "navigate-to", 178 | "object-src", 179 | "plugin-types", 180 | "prefetch-src", 181 | "report-to", 182 | "report-uri", 183 | "require-sri-for", 184 | "require-trusted-types-for", 185 | "sandbox", 186 | "script-src-attr", 187 | "script-src-elem", 188 | "script-src", 189 | "style-src-attr", 190 | "style-src-elem", 191 | "style-src", 192 | "trusted-types", 193 | "upgrade-insecure-requests", 194 | "worker-src", 195 | ] 196 | 197 | return result( 198 | csp != None and all([x in valid_directives for x in directives]), 199 | f"Content-Security-Policy header only includes valid directives ({directives})", 200 | "csp_valid_directives", 201 | warn_on_fail=False, 202 | **kwargs, 203 | ) 204 | -------------------------------------------------------------------------------- /ready/checks/ssl.py: -------------------------------------------------------------------------------- 1 | import errno 2 | import socket 3 | import ssl 4 | from datetime import date, datetime 5 | 6 | from ready.result import result 7 | from ready.thttp import request 8 | 9 | CONNECTION_TIMEOUT = 5.0 10 | 11 | 12 | class SSLConnectionFailed(Exception): 13 | pass 14 | 15 | 16 | class UnknownSSLFailure(Exception): 17 | pass 18 | 19 | 20 | class LookupFailed(Exception): 21 | pass 22 | 23 | 24 | def connect_with_specific_protocol(domain, protocol, ipv6=False): 25 | successful = False 26 | 27 | ssl_sock = None 28 | 29 | try: 30 | sock_type = socket.AF_INET6 if ipv6 else socket.AF_INET 31 | sock = socket.socket(sock_type, socket.SOCK_STREAM) 32 | 33 | context = ssl.SSLContext(protocol=protocol) 34 | ssl_sock = context.wrap_socket(sock, server_hostname=domain) 35 | ssl_sock.settimeout(CONNECTION_TIMEOUT) 36 | ssl_sock.connect((domain, 443)) 37 | ssl_sock.close() 38 | successful = True 39 | except: 40 | successful = False 41 | finally: 42 | if ssl_sock: 43 | ssl_sock.close() 44 | 45 | 46 | def get_ssl_expiry(domain, ipv6=False): 47 | try: 48 | sock_type = socket.AF_INET6 if ipv6 else socket.AF_INET 49 | sock = socket.socket(sock_type, socket.SOCK_STREAM) 50 | sock.settimeout(CONNECTION_TIMEOUT) 51 | 52 | context = ssl.create_default_context() 53 | ssl_sock = context.wrap_socket(sock, server_hostname=domain) 54 | ssl_sock.settimeout(CONNECTION_TIMEOUT) 55 | ssl_sock.connect((domain, 443)) 56 | 57 | cert = ssl_sock.getpeercert() 58 | end = datetime.fromtimestamp(ssl.cert_time_to_seconds(cert["notAfter"])) 59 | ssl_sock.close() 60 | return end.date() 61 | except: 62 | return None 63 | 64 | 65 | def get_ssl_certificate(domain, ipv6=False, binary=False): 66 | try: 67 | sock_type = socket.AF_INET6 if ipv6 else socket.AF_INET 68 | sock = socket.socket(sock_type, socket.SOCK_STREAM) 69 | sock.settimeout(CONNECTION_TIMEOUT) 70 | 71 | context = ssl.create_default_context() 72 | ssl_sock = context.wrap_socket(sock, server_hostname=domain) 73 | ssl_sock.settimeout(CONNECTION_TIMEOUT) 74 | ssl_sock.connect((domain, 443)) 75 | 76 | cert = ssl_sock.getpeercert(binary_form=binary) 77 | ssl_sock.close() 78 | return cert 79 | except: 80 | return None 81 | 82 | 83 | # Check: SSL certificate should be trusted 84 | def check_ssl_certificate_should_be_trusted(responses, **kwargs): 85 | try: 86 | response = request(f'https://{kwargs["domain"]}', verify=True) 87 | return result( 88 | True, 89 | f"SSL certificate should be trusted", 90 | "ssl_trusted", 91 | **kwargs, 92 | ) 93 | except: 94 | return result(False, f"SSL certificate should be trusted", "ssl_trusted", **kwargs) 95 | 96 | 97 | # Check: SSL expiry should be less than one year 98 | def check_ssl_expiry_should_be_less_than_one_year(responses, **kwargs): 99 | ssl_expiry = get_ssl_expiry(kwargs["domain_with_no_path"], ipv6=kwargs["is_ipv6"]) 100 | ssl_expiry_days = (ssl_expiry - date.today()).days if ssl_expiry else None 101 | 102 | return result( 103 | ssl_expiry_days and ssl_expiry_days < 398, 104 | f"SSL expiry should be less than 398 days ({ssl_expiry_days} days)", 105 | "ssl_expiry_max", 106 | **kwargs, 107 | ) 108 | 109 | 110 | # Check: SSL expiry should be greater than five days 111 | def check_ssl_expiry_should_be_greater_than_five_days(responses, **kwargs): 112 | ssl_expiry = get_ssl_expiry(kwargs["domain_with_no_path"], ipv6=kwargs["is_ipv6"]) 113 | ssl_expiry_days = (ssl_expiry - date.today()).days if ssl_expiry else None 114 | 115 | return result( 116 | ssl_expiry_days and ssl_expiry_days > 5, 117 | f"SSL expiry should be greater than five days ({ssl_expiry_days} days)", 118 | "ssl_expiry_min", 119 | **kwargs, 120 | ) 121 | 122 | 123 | # Check: SSL connection fails when using TLS 1.1 124 | def check_ssl_connection_fails_with_tls_1_1(responses, **kwargs): 125 | domain = kwargs["domain"] 126 | connection_successful = connect_with_specific_protocol(domain, ssl.PROTOCOL_TLSv1_1, ipv6=kwargs["is_ipv6"]) 127 | 128 | return result( 129 | not connection_successful, 130 | f"SSL connection fails when using TLS 1.1", 131 | "ssl_tls_1_1", 132 | **kwargs, 133 | ) 134 | 135 | 136 | # Check: SSL connection fails when using TLS 1.0 137 | def check_ssl_connection_fails_with_tls_1_0(responses, **kwargs): 138 | domain = kwargs["domain"] 139 | connection_successful = connect_with_specific_protocol(domain, ssl.PROTOCOL_TLSv1, ipv6=kwargs["is_ipv6"]) 140 | 141 | return result( 142 | not connection_successful, 143 | f"SSL connection fails when using TLS 1.0", 144 | "ssl_tls_1_0", 145 | **kwargs, 146 | ) 147 | 148 | 149 | # Check: DNS CAA should be enabled 150 | # https://blog.qualys.com/product-tech/2017/03/13/caa-mandated-by-cabrowser-forum 151 | def check_dns_caa_record_should_exist(responses, **kwargs): 152 | records = [ 153 | r["data"] for r in responses["dns_caa_response"].json.get("Answer", []) if "data" in r and r.get("type", 0) == 257 154 | ] 155 | 156 | if not records and "dns_caa_response_fld" in responses: 157 | records = [ 158 | r["data"] 159 | for r in responses["dns_caa_response_fld"].json.get("Answer", []) 160 | if "data" in r and r.get("type", 0) == 257 161 | ] 162 | 163 | return result( 164 | records and all(["issue" in r or "iodef" in r for r in records]), 165 | f"DNS CAA should be enabled ({records})", 166 | "ssl_dns_caa", 167 | **kwargs, 168 | ) 169 | 170 | 171 | # Check: DNS CAA should include accounturi 172 | def check_dns_caa_record_should_include_accounturi(responses, **kwargs): 173 | records = [ 174 | r["data"] for r in responses["dns_caa_response"].json.get("Answer", []) if "data" in r and r.get("type", 0) == 257 175 | ] 176 | 177 | if not records and "dns_caa_response_fld" in responses: 178 | records = [ 179 | r["data"] 180 | for r in responses["dns_caa_response_fld"].json.get("Answer", []) 181 | if "data" in r and r.get("type", 0) == 257 182 | ] 183 | 184 | # filter to just the issue records 185 | records = [r for r in records if "issue " in r] 186 | 187 | return result( 188 | records and all(["accounturi=" in r for r in records]), 189 | f"DNS CAA should include accounturi ({records})", 190 | "ssl_dns_caa_accounturi", 191 | warn_on_fail=True, 192 | **kwargs, 193 | ) 194 | 195 | 196 | # Check: DNS CAA should include validationmethods 197 | def check_dns_caa_record_should_include_validationmethods(responses, **kwargs): 198 | records = [ 199 | r["data"] for r in responses["dns_caa_response"].json.get("Answer", []) if "data" in r and r.get("type", 0) == 257 200 | ] 201 | 202 | if not records and "dns_caa_response_fld" in responses: 203 | records = [ 204 | r["data"] 205 | for r in responses["dns_caa_response_fld"].json.get("Answer", []) 206 | if "data" in r and r.get("type", 0) == 257 207 | ] 208 | 209 | # filter to just the issue records 210 | records = [r for r in records if "issue " in r] 211 | 212 | return result( 213 | records and all(["validationmethods=" in r for r in records]), 214 | f"DNS CAA should include validationmethods ({records})", 215 | "ssl_dns_caa_validationmethods", 216 | warn_on_fail=True, 217 | **kwargs, 218 | ) 219 | -------------------------------------------------------------------------------- /ready/checks/email.py: -------------------------------------------------------------------------------- 1 | import json 2 | import re 3 | 4 | from ready import thttp 5 | from ready.result import result 6 | 7 | 8 | # Check: SPF TXT record should exist 9 | def check_spf_record_should_exist(responses, **kwargs): 10 | records = [ 11 | r["data"] 12 | for r in responses["dns_txt_response"].json.get("Answer", []) 13 | if r["data"].strip('"').strip("'").startswith("v=spf") 14 | ] 15 | 16 | if not records and "dns_txt_response_fld" in responses: 17 | records = [ 18 | r["data"] 19 | for r in responses["dns_txt_response_fld"].json.get("Answer", []) 20 | if r["data"].strip('"').strip("'").startswith("v=spf") 21 | ] 22 | 23 | return result( 24 | len(records) > 0, 25 | f"SPF TXT record should exist ({records})", 26 | "email_spf", 27 | **kwargs, 28 | ) 29 | 30 | 31 | # Check: SPF TXT record should contain "-all" 32 | def check_spf_txt_record_should_disallow_all(responses, **kwargs): 33 | records = [ 34 | r["data"] 35 | for r in responses["dns_txt_response"].json.get("Answer", []) 36 | if r["data"].strip('"').strip("'").startswith("v=spf") 37 | ] 38 | 39 | if not records and "dns_txt_response_fld" in responses: 40 | records = [ 41 | r["data"] 42 | for r in responses["dns_txt_response_fld"].json.get("Answer", []) 43 | if r["data"].strip('"').strip("'").startswith("v=spf") 44 | ] 45 | 46 | return result( 47 | records and all(["-all" in r for r in records]), 48 | f'SPF TXT record should contain "-all" ({records})', 49 | "email_spf_disallow_all", 50 | **kwargs, 51 | ) 52 | 53 | 54 | # Check: SPF DNS record is deprecated and should not exist 55 | def check_spf_dns_record_does_not_exist(responses, **kwargs): 56 | records = [r["data"] for r in responses["dns_spf_response"].json.get("Answer", []) if "data" in r and r["type"] == 99] 57 | 58 | if "dns_spf_response_fld" in responses: 59 | records.extend([r["data"] for r in responses["dns_spf_response_fld"].json.get("Answer", []) if "data" in r]) 60 | 61 | return result( 62 | len(records) == 0, 63 | f"SPF DNS record is deprecated and should not exist ({records})", 64 | "email_spf_dns", 65 | **kwargs, 66 | ) 67 | 68 | 69 | def _spf_for_domain(domain, depth=0, lookups=[], dns_resolver=None): 70 | if domain in lookups: 71 | return [] 72 | 73 | response = thttp.request(f"{dns_resolver}?name={domain}&type=TXT") 74 | lookups.append(domain) 75 | 76 | j = json.loads(response.content) 77 | 78 | spf_records = [(domain, x["data"]) for x in j.get("Answer", []) if x["data"].strip('"').strip("'").startswith("v=spf")] 79 | 80 | results = [x for x in spf_records] 81 | 82 | for _, record in spf_records: 83 | depth += 1 84 | if depth > 13: 85 | return results 86 | 87 | matches = re.findall(r"include\:([^\s]+)", record) 88 | 89 | for d in matches: 90 | results.extend(_spf_for_domain(d, depth, lookups, dns_resolver)) 91 | 92 | matches = re.findall(r"redirect\=([^\s]+)", record) 93 | 94 | for d in matches: 95 | results.extend(_spf_for_domain(d, depth, lookups, dns_resolver)) 96 | 97 | return results 98 | 99 | 100 | # Check: SPF includes use less than 10 DNS requests 101 | def check_spf_uses_less_than_10_requests(responses, **kwargs): 102 | records = [ 103 | r["data"] 104 | for r in responses["dns_txt_response"].json.get("Answer", []) 105 | if r["data"].strip('"').strip("'").startswith("v=spf") 106 | ] 107 | 108 | if not records and "dns_txt_response_fld" in responses: 109 | records = [ 110 | r["data"] 111 | for r in responses["dns_txt_response_fld"].json.get("Answer", []) 112 | if "data" in r and r["data"].strip('"').strip("'").startswith("v=spf") 113 | ] 114 | 115 | additional_lookups = [] 116 | for record in records: 117 | matches = re.findall(r"include\:([^\s]+)", record) 118 | 119 | for d in matches: 120 | additional_lookups.extend(_spf_for_domain(d, dns_resolver=kwargs["dns_resolver"])) 121 | 122 | matches = re.findall(r"redirect\=([^\s]+)", record) 123 | 124 | for d in matches: 125 | additional_lookups.extend(_spf_for_domain(d, dns_resolver=kwargs["dns_resolver"])) 126 | 127 | return result( 128 | len(additional_lookups) <= 10, 129 | f"SPF includes use less than 10 DNS requests ({len(additional_lookups)})", 130 | "email_spf_recursion", 131 | **kwargs, 132 | ) 133 | 134 | 135 | # Check: DMARC record should exist 136 | def check_dmarc_record_should_exist(responses, **kwargs): 137 | records = [r["data"] for r in responses["dns_dmarc_response"].json.get("Answer", []) if "data" in r] 138 | 139 | if not records and "dns_dmarc_response_fld" in responses: 140 | records = [r["data"] for r in responses["dns_dmarc_response_fld"].json.get("Answer", []) if "data" in r] 141 | 142 | return result( 143 | records and all([r.strip('"').strip("'").startswith("v=DMARC1") for r in records]), 144 | f"DMARC record should exist ({records})", 145 | "email_dmarc_exists", 146 | **kwargs, 147 | ) 148 | 149 | 150 | # Check: DMARC record should contain p=reject 151 | def check_dmarc_record_should_reject_failures(responses, **kwargs): 152 | records = [r["data"] for r in responses["dns_dmarc_response"].json.get("Answer", []) if "data" in r] 153 | 154 | if not records and "dns_dmarc_response_fld" in responses: 155 | records = [r["data"] for r in responses["dns_dmarc_response_fld"].json.get("Answer", []) if "data" in r] 156 | 157 | failing = True 158 | 159 | for r in records: 160 | if "p=reject" in [x.strip().lower() for x in r.split(";")]: 161 | failing = False 162 | 163 | return result( 164 | not failing, 165 | f"DMARC record should contain p=reject ({records})", 166 | "email_dmarc_none", 167 | **kwargs, 168 | ) 169 | 170 | 171 | # Check: SPF should be "v=spf1 -all" if there are no MX records or MX record is "." 172 | def check_spf_dash_all(responses, **kwargs): 173 | # return none if there is an mx record 174 | mx_records = [] 175 | if responses["dns_mx_response"] and "Answer" in responses["dns_mx_response"].json: 176 | mx_records.extend(responses["dns_mx_response"].json["Answer"]) 177 | 178 | if "dns_mx_response_fld" in responses and "Answer" in responses["dns_mx_response_fld"].json: 179 | mx_records.extend(responses["dns_mx_response_fld"].json["Answer"]) 180 | 181 | mx_record_data = [r["data"] for r in mx_records] 182 | 183 | if len(mx_record_data) == 0 or all([r == "0 ." for r in mx_record_data]): 184 | spf_records = [ 185 | r["data"] 186 | for r in responses["dns_txt_response"].json.get("Answer", []) 187 | if r["data"].strip('"').strip("'").startswith("v=spf") 188 | ] 189 | 190 | if spf_records: 191 | spf_record = spf_records[0] 192 | else: 193 | spf_record = "" 194 | 195 | return result( 196 | spf_record.lower().strip().strip('"').strip("'") == "v=spf1 -all", 197 | f"SPF should be 'v=spf1 -all' if there are no MX records or MX record is '.' ({spf_record})", 198 | "email_spf_disallow_all_with_empty_mx", 199 | **kwargs, 200 | ) 201 | else: 202 | return result( 203 | True, 204 | f"SPF should be 'v=spf1 -all' if there are no MX records or MX record is '.' (MX records exist: {mx_record_data})", 205 | "email_spf_disallow_all_with_empty_mx", 206 | **kwargs, 207 | ) 208 | -------------------------------------------------------------------------------- /test/test_csp.py: -------------------------------------------------------------------------------- 1 | from unittest import TestCase, skipIf 2 | 3 | from ready.checks.csp import ( 4 | extract_csp, 5 | check_csp_should_exist, 6 | check_csp_should_start_with_defaultsrc_none, 7 | check_csp_includes_default_or_script_directive, 8 | check_csp_must_not_include_unsafe_eval, 9 | check_csp_must_not_include_unsafe_inline, 10 | check_csp_must_not_include_report_sample, 11 | check_csp_must_not_include_reporturi, 12 | check_csp_should_not_include_reportto, 13 | check_csp_upgrade_insecure_requests, 14 | check_csp_should_only_include_valid_directives, 15 | ) 16 | from ready.thttp import Response 17 | 18 | SKIP_BS4_TESTS = False 19 | try: 20 | import bs4 21 | except ImportError: 22 | SKIP_BS4_TESTS = True 23 | 24 | 25 | response_with_html_csp = Response( 26 | None, 27 | """<!doctype html> 28 | <meta 29 | http-equiv="Content-Security-Policy" 30 | content="default-src 'self'; img-src https://*; child-src 'none';" /> 31 | </head>""", 32 | None, 33 | None, 34 | None, 35 | {}, 36 | None, 37 | ) 38 | 39 | 40 | def response_with_csp(csp): 41 | return Response(None, "", None, None, None, {"content-security-policy": csp}, None) 42 | 43 | 44 | class ExtractContentSecurityPolicyTestCase(TestCase): 45 | @skipIf(SKIP_BS4_TESTS, "beautifulsoup is not available") 46 | def test_extract_csp_meta_tag(self): 47 | from bs4 import BeautifulSoup 48 | 49 | csp = extract_csp(response_with_html_csp) 50 | self.assertEqual(csp, "default-src 'self'; img-src https://*; child-src 'none';") 51 | 52 | def test_extract_csp_from_header(self): 53 | csp = extract_csp(response_with_csp("default-src 'none';")) 54 | self.assertEqual(csp, "default-src 'none';") 55 | 56 | def test_extract_csp_empry(self): 57 | csp = extract_csp(Response(None, "", None, None, None, {}, None)) 58 | self.assertEqual(csp, None) 59 | 60 | 61 | class ContentSecurityPolicyChecksTestCase(TestCase): 62 | def test_check_csp_should_exist(self): 63 | result = check_csp_should_exist({"response": response_with_csp("default-src 'none';")}, print_output=False) 64 | self.assertTrue(result.passed) 65 | 66 | result = check_csp_should_exist({"response": Response(None, "", None, None, None, {}, None)}, print_output=False) 67 | self.assertFalse(result.passed) 68 | 69 | def test_check_csp_should_start_with_defaultsrc_none(self): 70 | result = check_csp_should_start_with_defaultsrc_none( 71 | {"response": response_with_csp("default-src 'none';")}, print_output=False 72 | ) 73 | self.assertTrue(result.passed) 74 | 75 | result = check_csp_should_start_with_defaultsrc_none( 76 | {"response": response_with_csp("default-src 'self';")}, print_output=False 77 | ) 78 | self.assertFalse(result.passed) 79 | 80 | def test_check_csp_includes_default_or_script_directive(self): 81 | result = check_csp_includes_default_or_script_directive( 82 | {"response": response_with_csp("default-src 'none';")}, print_output=False 83 | ) 84 | self.assertTrue(result.passed) 85 | 86 | result = check_csp_includes_default_or_script_directive( 87 | {"response": response_with_csp("script-src 'none';")}, print_output=False 88 | ) 89 | self.assertTrue(result.passed) 90 | 91 | result = check_csp_includes_default_or_script_directive( 92 | {"response": response_with_csp("default-src 'none'; script-src 'self'")}, print_output=False 93 | ) 94 | self.assertTrue(result.passed) 95 | 96 | result = check_csp_includes_default_or_script_directive( 97 | {"response": response_with_csp("upgrade-insecure-requests")}, print_output=False 98 | ) 99 | self.assertFalse(result.passed) 100 | 101 | def test_check_csp_must_not_include_unsafe_eval(self): 102 | result = check_csp_must_not_include_unsafe_eval( 103 | {"response": response_with_csp("default-src 'none';")}, print_output=False 104 | ) 105 | self.assertTrue(result.passed) 106 | 107 | result = check_csp_must_not_include_unsafe_eval( 108 | {"response": response_with_csp("default-src 'none'; unsafe-eval;")}, print_output=False 109 | ) 110 | self.assertFalse(result.passed) 111 | 112 | result = check_csp_must_not_include_unsafe_eval( 113 | {"response": response_with_csp("default-src 'none'; UnSafe-EVAL")}, print_output=False 114 | ) 115 | self.assertFalse(result.passed) 116 | 117 | def test_check_csp_must_not_include_unsafe_inline(self): 118 | result = check_csp_must_not_include_unsafe_inline( 119 | {"response": response_with_csp("default-src 'none';")}, print_output=False 120 | ) 121 | self.assertTrue(result.passed) 122 | 123 | result = check_csp_must_not_include_unsafe_inline( 124 | {"response": response_with_csp("default-src 'none'; unsafe-inline;")}, print_output=False 125 | ) 126 | self.assertFalse(result.passed) 127 | 128 | result = check_csp_must_not_include_unsafe_inline( 129 | {"response": response_with_csp("default-src 'none'; UnSafe-inLINe")}, print_output=False 130 | ) 131 | self.assertFalse(result.passed) 132 | 133 | def test_check_csp_must_not_include_report_sample(self): 134 | result = check_csp_must_not_include_report_sample( 135 | {"response": response_with_csp("default-src 'none';")}, print_output=False 136 | ) 137 | self.assertTrue(result.passed) 138 | 139 | result = check_csp_must_not_include_report_sample( 140 | {"response": response_with_csp("default-src 'none'; unsafe-inline; script-sample")}, print_output=False 141 | ) 142 | self.assertFalse(result.passed) 143 | 144 | def test_check_csp_must_not_include_reporturi(self): 145 | result = check_csp_must_not_include_reporturi({"response": response_with_csp("default-src 'none';")}, print_output=False) 146 | self.assertTrue(result.passed) 147 | 148 | result = check_csp_must_not_include_reporturi( 149 | {"response": response_with_csp("default-src 'none'; report-uri https://example.org")}, print_output=False 150 | ) 151 | self.assertFalse(result.passed) 152 | 153 | def test_check_csp_should_not_include_reportto(self): 154 | result = check_csp_should_not_include_reportto( 155 | {"response": response_with_csp("default-src 'none';")}, print_output=False 156 | ) 157 | self.assertTrue(result.passed) 158 | 159 | result = check_csp_should_not_include_reportto( 160 | {"response": response_with_csp("default-src 'none'; report-to example")}, print_output=False 161 | ) 162 | self.assertFalse(result.passed) 163 | 164 | def test_check_csp_upgrade_insecure_requests(self): 165 | result = check_csp_upgrade_insecure_requests( 166 | {"response": response_with_csp("default-src 'none'; upgrade-insecure-requests")}, print_output=False 167 | ) 168 | self.assertTrue(result.passed) 169 | 170 | result = check_csp_upgrade_insecure_requests({"response": response_with_csp("default-src 'none'")}, print_output=False) 171 | self.assertFalse(result.passed) 172 | 173 | def test_check_csp_should_only_include_valid_directives(self): 174 | result = check_csp_should_only_include_valid_directives( 175 | {"response": response_with_csp("default-src 'none'; upgrade-insecure-requests; invalid-directive;")}, 176 | print_output=False, 177 | ) 178 | self.assertFalse(result.passed) 179 | 180 | def test_checks_fail_with_missing_csp(self): 181 | checks = [ 182 | check_csp_should_exist, 183 | check_csp_should_start_with_defaultsrc_none, 184 | check_csp_includes_default_or_script_directive, 185 | check_csp_upgrade_insecure_requests, 186 | ] 187 | 188 | for c in checks: 189 | result = c({"response": Response(None, "", None, None, None, {}, None)}, print_output=False) 190 | self.assertFalse(result.passed) 191 | 192 | result = c({"response": response_with_csp("")}, print_output=False) 193 | self.assertFalse(result.passed) 194 | 195 | def test_long_csp_should_be_truncated(self): 196 | result = check_csp_should_exist( 197 | { 198 | "response": response_with_csp( 199 | "default-src 'none'; base-uri 'self'; child-src ready.invalid/assets-cdn/worker/ gist.ready.invalid/assets-cdn/worker/; connect-src 'self' uploads.ready.invalid collector.ready.invalid raw.ready.invalid api.ready.invalid objects-origin.ready.invalid *.actions.ready.invalid wss://*.actions.ready.invalid insights.ready.invalid wss://alive.ready.invalid github.ready.invalid; font-src github.ready.invalid; form-action 'self' ready.invalid gist.ready.invalid objects-origin.ready.invalid; frame-ancestors 'none'; frame-src viewscreen.ready.invalid notebooks.ready.invalid; img-src 'self' data: github.ready.invalid media.ready.invalid camo.ready.invalid identicons.ready.invalid avatars.ready.invalid objects.ready.invalid secured-user-images.ready.invalid/ user-images.ready.invalid/ private-user-images.ready.invalid opengraph.ready.invalid customer-stories-feed.ready.invalid spotlights-feed.ready.invalid objects-origin.ready.invalid *.ready.invalid; manifest-src 'self'; media-src ready.invalid user-images.ready.invalid/ secured-user-images.ready.invalid/ private-user-images.ready.invalid github-production-user-asset-6210df.s3.amazonaws.com gist.ready.invalid github.ready.invalid; script-src github.ready.invalid; style-src 'unsafe-inline' github.ready.invalid; upgrade-insecure-requests; worker-src ready.invalid/assets-cdn/worker/ gist.ready.invalid/assets-cdn/worker/" 200 | ) 201 | }, 202 | print_output=False, 203 | ) 204 | self.assertTrue("uploads.ready.invalid" in result.message) 205 | self.assertFalse("worker-src ready.invalid" in result.message) 206 | -------------------------------------------------------------------------------- /ready/checks/html.py: -------------------------------------------------------------------------------- 1 | import re 2 | from urllib.parse import urljoin 3 | 4 | from ready import thttp 5 | from ready.checks.csp import extract_csp 6 | from ready.result import result 7 | 8 | USE_BS4 = True 9 | 10 | try: 11 | from bs4 import BeautifulSoup 12 | except ImportError: # pragma: no cover 13 | USE_BS4 = False 14 | 15 | 16 | # Check: Permissions-Policy should exist if the response is HTML 17 | def check_permissions_policy_should_exist(responses, **kwargs): 18 | return result( 19 | responses["response"].headers.get("permissions-policy") != None, 20 | f"Permissions-Policy should exist if the response is HTML ({responses['response'].headers.get('permissions-policy')})", 21 | "html_permissions_policy", 22 | **kwargs, 23 | ) 24 | 25 | 26 | # Check: frame-ancestors should be in CSP or X-Frame-Options should exist if the response is HTML 27 | def check_frame_ancestors_should_exist(responses, **kwargs): 28 | csp = extract_csp(responses["response"]) 29 | 30 | return result( 31 | responses["response"].headers.get("x-frame-options") != None or (csp != None and "frame-ancestors" in csp), 32 | f"frame-ancestors should be in CSP or X-Frame-Options should exist if the response is HTML (X-Frame-Options: {responses['response'].headers.get('x-frame-options')}, CSP: {csp})", 33 | "html_frame_ancestors", 34 | **kwargs, 35 | ) 36 | 37 | 38 | # Check: X-Content-Type-options should be "nosniff" 39 | def check_x_content_type_options_should_be_nosniff(responses, **kwargs): 40 | return result( 41 | responses["response"].headers.get("x-content-type-options", "") == "nosniff", 42 | f'X-Content-Type-options should be "nosniff" ({responses["response"].headers.get("x-content-type-options", "")})', 43 | "html_x_content_type_options_nosniff", 44 | **kwargs, 45 | ) 46 | 47 | 48 | # Check: Referrer-Policy should be set 49 | def check_referrer_policy_should_be_set(responses, **kwargs): 50 | return result( 51 | responses["response"].headers.get("referrer-policy") != None, 52 | f'Referrer-Policy should be set ({responses["response"].headers.get("referrer-policy", "")})', 53 | "html_referrer_policy", 54 | **kwargs, 55 | ) 56 | 57 | 58 | # Check: X-XSS-Protection header should not exist 59 | def check_x_xss_protection_should_not_exist(responses, **kwargs): 60 | return result( 61 | "x-xss-protection" not in responses["response"].headers, 62 | f'X-XSS-Protection header should not exist ({responses["response"].headers.get("x-xss-protection")})', 63 | "html_x_xss_protection_not_set", 64 | warn_on_fail=True, 65 | **kwargs, 66 | ) 67 | 68 | 69 | # Check: HTML should start with "<!doctype html>" 70 | def check_html_starts_with_doctype(responses, **kwargs): 71 | return result( 72 | responses["response"].content.lower().strip().startswith(b"<!doctype html>"), 73 | f'HTML should start with "<!doctype html>"', 74 | "html_doctype", 75 | **kwargs, 76 | ) 77 | 78 | 79 | # Check: `<html>` tag should include lang 80 | def check_html_tag_includes_lang(responses, **kwargs): 81 | if "<html" in responses["response"].content.decode(errors="ignore"): 82 | html_tag = responses["response"].content.decode(errors="ignore").split("<html")[1].split(">")[0].replace("'", '"') 83 | html_tag = "<html" + html_tag + ">" 84 | else: 85 | html_tag = "no tag" 86 | return result( 87 | "lang=" in html_tag, 88 | f"<html> tag should include lang ({html_tag})", 89 | "html_tag_includes_lang", 90 | **kwargs, 91 | ) 92 | 93 | 94 | # Check: HTML should include meta charset tag 95 | def check_html_meta_charset(responses, **kwargs): 96 | return result( 97 | b"<meta charset=" in responses["response"].content.lower(), 98 | f"HTML should include meta charset tag", 99 | "html_meta_charset", 100 | **kwargs, 101 | ) 102 | 103 | 104 | # Check: HTML should include `<title>` 105 | def check_html_includes_title(responses, **kwargs): 106 | return result( 107 | b"<title>" in responses["response"].content.lower(), 108 | f"HTML should include title", 109 | "html_includes_title", 110 | **kwargs, 111 | ) 112 | 113 | 114 | # Check: HTML should include link with rel="icon" 115 | def check_html_includes_rel_icon(responses, **kwargs): 116 | link_re = re.compile(r"<link (.+)>") 117 | links = [l.replace("'", '"') for l in link_re.findall(responses["response"].content.decode())] 118 | 119 | return result( 120 | any(['rel="icon"' in link for link in links]) or any(['rel="shortcut icon"' in link for link in links]), 121 | 'HTML should include link with rel="icon"', 122 | "html_rel_icon", 123 | **kwargs, 124 | ) 125 | 126 | 127 | # Check: HTML should not use schemeless urls for links or hrefs 128 | def check_html_should_not_use_schemeless_urls(responses, **kwargs): 129 | return result( 130 | b'="//' not in responses["response"].content and b"='//" not in responses["response"].content, 131 | "HTML should not use schemeless urls for links or hrefs", 132 | "html_schemeless", 133 | **kwargs, 134 | ) 135 | 136 | 137 | # Check: HTML should not use unnecessary HTML entities 138 | def check_html_should_not_use_unnecessary_entities(responses, **kwargs): 139 | allow_list = [b"nbsp", b"amp", b"quot", b"lt", b"gt"] 140 | 141 | # The longest entity on the registered entity list is "CounterClockwiseContourIntegral" 142 | # https://html.spec.whatwg.org/entities.json 143 | entities = re.findall(r"&([\w#]{1,32});", responses["response"].content.decode()) 144 | entities = [e for e in entities if e not in allow_list] 145 | 146 | return result( 147 | len(entities) == 0, 148 | f"HTML should not use unnecessary HTML entities ({[e for e in set(entities)]})", 149 | "html_unnecessary_entities", 150 | warn_on_fail=True, 151 | **kwargs, 152 | ) 153 | 154 | 155 | # Check: All script tags should use subresource integrity 156 | def check_html_script_tags_use_sri(responses, **kwargs): 157 | script_tags = re.findall(r"<script ([^\>]+)", responses["response"].content.decode()) 158 | 159 | return result( 160 | all(["integrity" in tag for tag in script_tags]), 161 | f"All script tags should use subresource integrity", 162 | "html_sri_js", 163 | **kwargs, 164 | ) 165 | 166 | 167 | # Check: X-DNS-Prefetch-Control should be set to off 168 | def check_x_dns_prefetch_control_is_off(responses, **kwargs): 169 | return result( 170 | responses["response"].headers.get("x-dns-prefetch-control", "") == "off", 171 | f"X-DNS-Prefetch-Control should be set to off ({responses['response'].headers.get('x-dns-prefetch-control', '')})", 172 | "html_x_dns_prefetch", 173 | **kwargs, 174 | ) 175 | 176 | 177 | # Check: CDNs should not be used for Javascript or CSS assets 178 | def check_cdns_should_not_be_used(responses, **kwargs): 179 | # XXX: This list was compiled by myself from a number of random web sources, if a better maintained list 180 | # exists then I would love to replace this 181 | cdn_domains = [ 182 | "cdn.jsdelivr.net", 183 | "cdn.statically.io", 184 | "bootstrapcdn.com", 185 | "cdnjs.cloudflare.com", 186 | "sentry-cdn.com", 187 | "ajax.googleapis.com", 188 | "code.jquery.com", 189 | "ajax.aspnetcdn.com", 190 | ] 191 | 192 | script_tags = re.findall(r"<script ([^\>]+)", responses["response"].content.decode()) 193 | link_tags = re.findall(r"<link (.+)>", responses["response"].content.decode()) 194 | 195 | for tag in script_tags + link_tags: 196 | if any([x in tag for x in cdn_domains]): 197 | return result(False, "CDNs should not be used for Javascript or CSS assets", "html_cdn_usage", **kwargs) 198 | 199 | return result(True, "CDNs should not be used for Javascript or CSS assets", "html_cdn_usage", **kwargs) 200 | 201 | 202 | # Check: RSS and JSON feeds should return Access-Control-Allow-Origin header 203 | def check_rss_should_return_cors_header(responses, **kwargs): 204 | if USE_BS4: 205 | feed_urls = [] 206 | feed_types = [ 207 | "application/rss+xml", 208 | "application/feed+json", 209 | ] 210 | 211 | soup = BeautifulSoup(responses["response"].content, features="html.parser") 212 | 213 | links = soup.find_all("link") 214 | 215 | for link in links: 216 | if "alternate" in link.attrs.get("rel", ""): 217 | if link.attrs.get("type", "") in feed_types: 218 | feed_urls.append(urljoin(responses["response"].url, link.attrs.get("href"))) 219 | 220 | cors_values = [] 221 | for url in feed_urls: 222 | # TODO: with the urljoin above this block probably isn't needed 223 | if url.startswith("//"): # pragma: no cover 224 | url = "https:" + url 225 | elif url.startswith("/"): # pragma: no cover 226 | url = responses["response"].url.rstrip("/") + url 227 | 228 | if url.startswith("http"): 229 | response = thttp.request(url) 230 | cors_values.append(response.headers.get("access-control-allow-origin")) 231 | 232 | return result( 233 | all([x is not None for x in cors_values]), 234 | f"RSS and JSON feeds should return Access-Control-Allow-Origin header ({', '.join(feed_urls) if feed_urls else 'no feeds'})", 235 | "feeds_cors_enabled", 236 | **kwargs, 237 | ) 238 | else: # pragma: no cover 239 | return result( 240 | False, 241 | f"RSS and JSON feeds should return Access-Control-Allow-Origin header (skipped because beautifulsoup is missing)", 242 | "feeds_cors_enabled", 243 | warn_on_fail=True, 244 | **kwargs, 245 | ) 246 | 247 | 248 | # Check: Cache-Control max-age should be <= 86400 for HTML documents 249 | def check_html_should_not_be_cached_for_more_than_24_hours(responses, **kwargs): 250 | cc_header = responses["response"].headers.get("cache-control", "") 251 | error = "no Cache-Control header" 252 | 253 | if "max-age=" in cc_header: 254 | max_age = re.search(r"max-age=(?P<age>\d+)", cc_header) 255 | 256 | if max_age: 257 | age = int(max_age.group("age")) 258 | return result( 259 | age <= 86400, 260 | f"Cache-Control max-age should be <= 86400 for HTML documents ({cc_header})", 261 | "html_cache_duration", 262 | **kwargs, 263 | ) 264 | else: 265 | error = f"match error: {cc_header}" 266 | 267 | return result( 268 | False, 269 | f"Cache-Control max-age should be <= 86400 for HTML documents ({error})", 270 | "html_cache_duration", 271 | warn_on_fail=True, 272 | **kwargs, 273 | ) 274 | -------------------------------------------------------------------------------- /test/test_bad_response.py: -------------------------------------------------------------------------------- 1 | from unittest import TestCase 2 | 3 | from ready.checks.bad_response import ( 4 | check_bad_response_cloudflare, 5 | check_bad_response_kasada, 6 | ) 7 | from ready.thttp import Response 8 | 9 | 10 | class CloudflareTestCase(TestCase): 11 | def test_cloudflare_blocked_content(self): 12 | content = b"""<!DOCTYPE html>\n<html>\n<head>\n <meta content="width=device-width, initial-scale=1, maximum-scale=1" name="viewport">\n <title>Checking your Browser - GitLab\n \n\n\n\n\n

\n GitLab
\n

\n
\n
\n \n
\n
\n
\n
\n
\n
\n

Checking your browser before accessing gitlab.com.

\n \n

This process is automatic. Your browser will redirect to your requested content shortly.

\n

Please allow up to 5 seconds...

\n \n
\n
\n \n \n
\n \n \n\n
\n\n
\n
\n\n""" 13 | 14 | responses = {"response": Response(None, content, None, 200, None, None, None)} 15 | 16 | result = check_bad_response_cloudflare(responses, print_output=False) 17 | self.assertFalse(result.passed) 18 | 19 | 20 | class KasadaTestCase(TestCase): 21 | def test_kasada_blocked_content(self): 22 | content = b"""""" 23 | 24 | responses = {"response": Response(None, content, None, 429, None, None, None)} 25 | result = check_bad_response_kasada(responses, print_output=False) 26 | self.assertFalse(result.passed) 27 | 28 | def test_kasada_normal_content(self): 29 | content = b"""< 30 | 31 | 32 | 33 | 34 | 35 | Minimal base.html 36 | 37 | 38 | 39 | 40 |

base.html

41 |

The absolute minimum base.html to get your project started.

42 |

Usage:

43 |
44 |               curl https://basehtml.xyz > base.html
45 |             
46 | more info 47 | 48 | 49 | """ 50 | 51 | responses = {"response": Response(None, content, None, 200, None, None, None)} 52 | result = check_bad_response_kasada(responses, print_output=False) 53 | self.assertTrue(result.passed) 54 | -------------------------------------------------------------------------------- /ready/thttp.py: -------------------------------------------------------------------------------- 1 | """ 2 | UNLICENSED 3 | This is free and unencumbered software released into the public domain. 4 | 5 | https://github.com/sesh/thttp 6 | """ 7 | 8 | import gzip 9 | import json as json_lib 10 | import ssl 11 | from base64 import b64encode 12 | from collections import namedtuple 13 | from http.cookiejar import CookieJar 14 | from urllib.error import HTTPError, URLError 15 | from urllib.parse import urlencode 16 | from urllib.request import ( 17 | HTTPCookieProcessor, 18 | HTTPRedirectHandler, 19 | HTTPSHandler, 20 | Request, 21 | build_opener, 22 | ) 23 | 24 | Response = namedtuple("Response", "request content json status url headers cookiejar") 25 | 26 | 27 | JSON_HEADERS = ["application/x-javascript", "application/json"] 28 | 29 | 30 | class NoRedirect(HTTPRedirectHandler): 31 | def redirect_request(self, req, fp, code, msg, headers, newurl): 32 | return None 33 | 34 | 35 | def request( 36 | url, 37 | params={}, 38 | json=None, 39 | data=None, 40 | headers={}, 41 | method="GET", 42 | verify=True, 43 | redirect=True, 44 | cookiejar=None, 45 | basic_auth=None, 46 | timeout=None, 47 | ): 48 | """ 49 | Returns a (named)tuple with the following properties: 50 | - request 51 | - content 52 | - json (dict; or None) 53 | - headers (dict; all lowercase keys) 54 | - https://stackoverflow.com/questions/5258977/are-http-headers-case-sensitive 55 | - status 56 | - url (final url, after any redirects) 57 | - cookiejar 58 | """ 59 | method = method.upper() 60 | headers = {k.lower(): v for k, v in headers.items()} # lowecase headers 61 | 62 | if params: 63 | url += "?" + urlencode(params) # build URL from params 64 | if json and data: 65 | raise Exception("Cannot provide both json and data parameters") 66 | if method not in ["POST", "PATCH", "PUT"] and (json or data): 67 | raise Exception("Request method must POST, PATCH or PUT if json or data is provided") 68 | if not timeout: 69 | timeout = 60 70 | 71 | if json: # if we have json, stringify and put it in our data variable 72 | headers["content-type"] = "application/json" 73 | data = json_lib.dumps(json).encode("utf-8") 74 | elif data: 75 | data = urlencode(data).encode() 76 | 77 | if basic_auth and len(basic_auth) == 2 and "authorization" not in headers: 78 | username, password = basic_auth 79 | headers["authorization"] = f'Basic {b64encode(f"{username}:{password}".encode()).decode("ascii")}' 80 | 81 | if not cookiejar: 82 | cookiejar = CookieJar() 83 | 84 | ctx = ssl.create_default_context() 85 | if not verify: # ignore ssl errors 86 | ctx.check_hostname = False 87 | ctx.verify_mode = ssl.CERT_NONE 88 | 89 | handlers = [] 90 | handlers.append(HTTPSHandler(context=ctx)) 91 | handlers.append(HTTPCookieProcessor(cookiejar=cookiejar)) 92 | 93 | if not redirect: 94 | no_redirect = NoRedirect() 95 | handlers.append(no_redirect) 96 | 97 | opener = build_opener(*handlers) 98 | req = Request(url, data=data, headers=headers, method=method) 99 | 100 | try: 101 | with opener.open(req, timeout=timeout) as resp: 102 | status, content, resp_url = (resp.getcode(), resp.read(), resp.geturl()) 103 | headers = {k.lower(): v for k, v in list(resp.info().items())} 104 | 105 | if "gzip" in headers.get("content-encoding", ""): 106 | content = gzip.decompress(content) 107 | 108 | json = json_lib.loads(content) if any([x in headers.get("content-type", "").lower() for x in JSON_HEADERS]) else None 109 | except HTTPError as e: 110 | status, content, resp_url = (e.code, e.read(), e.geturl()) 111 | headers = {k.lower(): v for k, v in list(e.headers.items())} 112 | 113 | if "gzip" in headers.get("content-encoding", ""): 114 | content = gzip.decompress(content) 115 | 116 | json = json_lib.loads(content) if any([x in headers.get("content-type", "").lower() for x in JSON_HEADERS]) else None 117 | 118 | return Response(req, content, json, status, resp_url, headers, cookiejar) 119 | 120 | 121 | import unittest 122 | 123 | 124 | class RequestTestCase(unittest.TestCase): 125 | def test_cannot_provide_json_and_data(self): 126 | with self.assertRaises(Exception): 127 | request( 128 | "https://httpbingo.org/post", 129 | json={"name": "Brenton"}, 130 | data="This is some form data", 131 | ) 132 | 133 | def test_should_fail_if_json_or_data_and_not_p_method(self): 134 | with self.assertRaises(Exception): 135 | request("https://httpbingo.org/post", json={"name": "Brenton"}) 136 | 137 | with self.assertRaises(Exception): 138 | request("https://httpbingo.org/post", json={"name": "Brenton"}, method="HEAD") 139 | 140 | def test_should_set_content_type_for_json_request(self): 141 | response = request("https://httpbingo.org/post", json={"name": "Brenton"}, method="POST") 142 | self.assertEqual(response.request.headers["Content-type"], "application/json") 143 | 144 | def test_should_work(self): 145 | response = request("https://httpbingo.org/get") 146 | self.assertEqual(response.status, 200) 147 | 148 | def test_should_create_url_from_params(self): 149 | response = request( 150 | "https://httpbingo.org/get", 151 | params={"name": "brenton", "library": "tiny-request"}, 152 | ) 153 | self.assertEqual(response.url, "https://httpbingo.org/get?name=brenton&library=tiny-request") 154 | 155 | def test_should_return_headers(self): 156 | response = request("https://httpbingo.org/response-headers", params={"Test-Header": "value"}) 157 | self.assertEqual(response.headers["test-header"], "value") 158 | 159 | def test_should_populate_json(self): 160 | response = request("https://httpbingo.org/json") 161 | self.assertTrue("slideshow" in response.json) 162 | 163 | def test_should_return_response_for_404(self): 164 | response = request("https://httpbingo.org/404") 165 | self.assertEqual(response.status, 404) 166 | self.assertTrue("text/plain" in response.headers["content-type"]) 167 | 168 | def test_should_fail_with_bad_ssl(self): 169 | with self.assertRaises(URLError): 170 | response = request("https://expired.badssl.com/") 171 | 172 | def test_should_load_bad_ssl_with_verify_false(self): 173 | response = request("https://expired.badssl.com/", verify=False) 174 | self.assertEqual(response.status, 200) 175 | 176 | def test_should_form_encode_non_json_post_requests(self): 177 | response = request("https://httpbingo.org/post", data={"name": "test-user"}, method="POST") 178 | self.assertEqual(response.json["form"]["name"], ["test-user"]) 179 | 180 | def test_should_follow_redirect(self): 181 | response = request( 182 | "https://httpbingo.org/redirect-to", 183 | params={"url": "https://duckduckgo.com/"}, 184 | ) 185 | self.assertEqual(response.url, "https://duckduckgo.com/") 186 | self.assertEqual(response.status, 200) 187 | 188 | def test_should_not_follow_redirect_if_redirect_false(self): 189 | response = request( 190 | "https://httpbingo.org/redirect-to", 191 | params={"url": "https://duckduckgo.com/"}, 192 | redirect=False, 193 | ) 194 | self.assertEqual(response.status, 302) 195 | 196 | def test_cookies(self): 197 | response = request( 198 | "https://httpbingo.org/cookies/set", 199 | params={"cookie": "test"}, 200 | redirect=False, 201 | ) 202 | response = request("https://httpbingo.org/cookies", cookiejar=response.cookiejar) 203 | self.assertEqual(response.json["cookie"], "test") 204 | 205 | def test_basic_auth(self): 206 | response = request("http://httpbingo.org/basic-auth/user/passwd", basic_auth=("user", "passwd")) 207 | self.assertEqual(response.json["authorized"], True) 208 | 209 | def test_should_handle_gzip(self): 210 | response = request("http://httpbingo.org/gzip", headers={"Accept-Encoding": "gzip"}) 211 | self.assertEqual(response.json["gzipped"], True) 212 | 213 | def test_should_timeout(self): 214 | with self.assertRaises(TimeoutError): 215 | response = request("http://httpbingo.org/delay/3", timeout=1) 216 | 217 | 218 | STATUS_CODES = { 219 | "100": "Continue", 220 | "101": "Switching Protocols", 221 | "102": "Processing", 222 | "103": "Early Hints", 223 | "200": "OK", 224 | "201": "Created", 225 | "202": "Accepted", 226 | "203": "Non-Authoritative Information", 227 | "204": "No Content", 228 | "205": "Reset Content", 229 | "206": "Partial Content", 230 | "207": "Multi-Status", 231 | "208": "Already Reported", 232 | "226": "IM Used", 233 | "300": "Multiple Choices", 234 | "301": "Moved Permanently", 235 | "302": "Found", 236 | "303": "See Other", 237 | "304": "Not Modified", 238 | "305": "Use Proxy", 239 | "307": "Temporary Redirect", 240 | "308": "Permanent Redirect", 241 | "400": "Bad Request", 242 | "401": "Unauthorized", 243 | "402": "Payment Required", 244 | "403": "Forbidden", 245 | "404": "Not Found", 246 | "405": "Method Not Allowed", 247 | "406": "Not Acceptable", 248 | "407": "Proxy Authentication Required", 249 | "408": "Request Timeout", 250 | "409": "Conflict", 251 | "410": "Gone", 252 | "411": "Length Required", 253 | "412": "Precondition Failed", 254 | "413": "Content Too Large", 255 | "414": "URI Too Long", 256 | "415": "Unsupported Media Type", 257 | "416": "Range Not Satisfiable", 258 | "417": "Expectation Failed", 259 | "418": "I'm a Teapot", 260 | "421": "Misdirected Request", 261 | "422": "Unprocessable Content", 262 | "423": "Locked", 263 | "424": "Failed Dependency", 264 | "425": "Too Early", 265 | "426": "Upgrade Required", 266 | "427": "Unassigned: ", 267 | "428": "Precondition Required", 268 | "429": "Too Many Requests", 269 | "431": "Request Header Fields Too Large", 270 | "451": "Unavailable For Legal Reasons", 271 | "500": "Internal Server Error", 272 | "501": "Not Implemented", 273 | "502": "Bad Gateway", 274 | "503": "Service Unavailable", 275 | "504": "Gateway Timeout", 276 | "505": "HTTP Version Not Supported", 277 | "506": "Variant Also Negotiates", 278 | "507": "Insufficient Storage", 279 | "508": "Loop Detected", 280 | "509": "Unassigned: ", 281 | "510": "Not Extended", 282 | "511": "Network Authentication Required", 283 | } 284 | 285 | 286 | def pretty(response, content=False): 287 | RESET = "\033[0m" 288 | HIGHLIGHT = "\033[34m" 289 | 290 | # status code 291 | print(HIGHLIGHT + str(response.status) + " " + RESET + STATUS_CODES[str(response.status)]) 292 | 293 | # headers 294 | for k in sorted(response.headers.keys()): 295 | print(HIGHLIGHT + k + RESET + ": " + response.headers[k]) 296 | 297 | if content: 298 | # blank line 299 | print() 300 | 301 | # response body 302 | if response.json: 303 | print(json_lib.dumps(response.json, indent=2)) 304 | else: 305 | print(response.content.decode()) 306 | -------------------------------------------------------------------------------- /test/test_html.py: -------------------------------------------------------------------------------- 1 | from unittest import TestCase, skipIf 2 | from unittest.mock import patch 3 | from ready.checks.html import ( 4 | check_permissions_policy_should_exist, 5 | check_frame_ancestors_should_exist, 6 | check_x_content_type_options_should_be_nosniff, 7 | check_referrer_policy_should_be_set, 8 | check_x_xss_protection_should_not_exist, 9 | check_html_starts_with_doctype, 10 | check_html_tag_includes_lang, 11 | check_html_meta_charset, 12 | check_html_includes_title, 13 | check_html_includes_rel_icon, 14 | check_html_should_not_use_schemeless_urls, 15 | check_html_should_not_use_unnecessary_entities, 16 | check_html_script_tags_use_sri, 17 | check_x_dns_prefetch_control_is_off, 18 | check_cdns_should_not_be_used, 19 | check_rss_should_return_cors_header, 20 | check_html_should_not_be_cached_for_more_than_24_hours, 21 | ) 22 | 23 | from ready.thttp import Response 24 | 25 | SKIP_BS4_TESTS = False 26 | try: 27 | import bs4 28 | except ImportError: 29 | SKIP_BS4_TESTS = True 30 | 31 | 32 | class HtmlChecksTestCase(TestCase): 33 | def test_check_permissions_policy_should_exist(self): 34 | r = Response(None, "", None, 200, None, {"permissions-policy": "camera=()"}, None) 35 | result = check_permissions_policy_should_exist({"response": r}, print_output=False) 36 | self.assertTrue(result.passed) 37 | 38 | r = Response(None, "", None, 200, None, {}, None) 39 | result = check_permissions_policy_should_exist({"response": r}, print_output=False) 40 | self.assertFalse(result.passed) 41 | 42 | def test_check_frame_ancestors_should_exist(self): 43 | r = Response(None, "", None, 200, None, {"x-frame-options": "DENY"}, None) 44 | result = check_frame_ancestors_should_exist({"response": r}, print_output=False) 45 | self.assertTrue(result.passed) 46 | 47 | r = Response(None, "", None, 200, None, {"content-security-policy": "frame-ancestors 'none'"}, None) 48 | result = check_frame_ancestors_should_exist({"response": r}, print_output=False) 49 | self.assertTrue(result.passed) 50 | 51 | r = Response(None, "", None, 200, None, {}, None) 52 | result = check_frame_ancestors_should_exist({"response": r}, print_output=False) 53 | self.assertFalse(result.passed) 54 | 55 | def test_check_x_content_type_options_should_be_nosniff(self): 56 | r = Response(None, "", None, 200, None, {"x-content-type-options": "nosniff"}, None) 57 | result = check_x_content_type_options_should_be_nosniff({"response": r}, print_output=False) 58 | self.assertTrue(result.passed) 59 | 60 | r = Response(None, "", None, 200, None, {"x-content-type-options": "other-value"}, None) 61 | result = check_x_content_type_options_should_be_nosniff({"response": r}, print_output=False) 62 | self.assertFalse(result.passed) 63 | 64 | r = Response(None, "", None, 200, None, {}, None) 65 | result = check_x_content_type_options_should_be_nosniff({"response": r}, print_output=False) 66 | self.assertFalse(result.passed) 67 | 68 | def test_check_referrer_policy_should_be_set(self): 69 | r = Response(None, "", None, 200, None, {"referrer-policy": "no-referrer"}, None) 70 | result = check_referrer_policy_should_be_set({"response": r}, print_output=False) 71 | self.assertTrue(result.passed) 72 | 73 | r = Response(None, "", None, 200, None, {}, None) 74 | result = check_referrer_policy_should_be_set({"response": r}, print_output=False) 75 | self.assertFalse(result.passed) 76 | 77 | def test_check_x_xss_protection_should_not_exist(self): 78 | r = Response(None, "", None, 200, None, {}, None) 79 | result = check_x_xss_protection_should_not_exist({"response": r}, print_output=False) 80 | self.assertTrue(result.passed) 81 | 82 | r = Response(None, "", None, 200, None, {"x-xss-protection": "some-value"}, None) 83 | result = check_x_xss_protection_should_not_exist({"response": r}, print_output=False) 84 | self.assertFalse(result.passed) 85 | 86 | def test_check_html_starts_with_doctype(self): 87 | r = Response(None, b" ", None, 200, None, {}, None) 88 | result = check_html_starts_with_doctype({"response": r}, print_output=False) 89 | self.assertTrue(result.passed) 90 | 91 | r = Response(None, b"aaaa", None, 200, None, {}, None) 92 | result = check_html_starts_with_doctype({"response": r}, print_output=False) 93 | self.assertFalse(result.passed) 94 | 95 | def test_check_html_tag_includes_lang(self): 96 | r = Response(None, b"", None, 200, None, {}, None) 97 | result = check_html_tag_includes_lang({"response": r}, print_output=False) 98 | self.assertTrue(result.passed) 99 | 100 | r = Response(None, b"", None, 200, None, {}, None) 101 | result = check_html_tag_includes_lang({"response": r}, print_output=False) 102 | self.assertFalse(result.passed) 103 | 104 | r = Response(None, b"", None, 200, None, {}, None) 105 | result = check_html_tag_includes_lang({"response": r}, print_output=False) 106 | self.assertFalse(result.passed) 107 | 108 | def test_check_html_meta_charset(self): 109 | r = Response(None, b"", None, 200, None, {}, None) 110 | result = check_html_meta_charset({"response": r}, print_output=False) 111 | self.assertTrue(result.passed) 112 | 113 | r = Response(None, b"", None, 200, None, {}, None) 114 | result = check_html_meta_charset({"response": r}, print_output=False) 115 | self.assertFalse(result.passed) 116 | 117 | def test_check_html_includes_title(self): 118 | r = Response(None, b"Test", None, 200, None, {}, None) 119 | result = check_html_includes_title({"response": r}, print_output=False) 120 | self.assertTrue(result.passed) 121 | 122 | r = Response(None, b"", None, 200, None, {}, None) 123 | result = check_html_includes_title({"response": r}, print_output=False) 124 | self.assertFalse(result.passed) 125 | 126 | def test_check_html_includes_rel_icon(self): 127 | r = Response(None, b"", None, 200, None, {}, None) 128 | result = check_html_includes_rel_icon({"response": r}, print_output=False) 129 | self.assertTrue(result.passed) 130 | 131 | r = Response(None, b"", None, 200, None, {}, None) 132 | result = check_html_includes_rel_icon({"response": r}, print_output=False) 133 | self.assertTrue(result.passed) 134 | 135 | r = Response(None, b"", None, 200, None, {}, None) 136 | result = check_html_includes_rel_icon({"response": r}, print_output=False) 137 | self.assertTrue(result.passed) 138 | 139 | r = Response(None, b"", None, 200, None, {}, None) 140 | result = check_html_includes_rel_icon({"response": r}, print_output=False) 141 | self.assertTrue(result.passed) 142 | 143 | r = Response(None, b"", None, 200, None, {}, None) 144 | result = check_html_includes_rel_icon({"response": r}, print_output=False) 145 | self.assertFalse(result.passed) 146 | 147 | def test_check_html_should_not_use_schemeless_urls(self): 148 | r = Response(None, b"", None, 200, None, {}, None) 149 | result = check_html_should_not_use_schemeless_urls({"response": r}, print_output=False) 150 | self.assertTrue(result.passed) 151 | 152 | r = Response(None, b"='//'", None, 200, None, {}, None) 153 | result = check_html_should_not_use_schemeless_urls({"response": r}, print_output=False) 154 | self.assertFalse(result.passed) 155 | 156 | r = Response(None, b'="//"', None, 200, None, {}, None) 157 | result = check_html_should_not_use_schemeless_urls({"response": r}, print_output=False) 158 | self.assertFalse(result.passed) 159 | 160 | def test_check_html_should_not_use_unnecessary_entities(self): 161 | r = Response(None, b"", None, 200, None, {}, None) 162 | result = check_html_should_not_use_unnecessary_entities({"response": r}, print_output=False) 163 | self.assertTrue(result.passed) 164 | 165 | r = Response(None, b" ", None, 200, None, {}, None) 166 | result = check_html_should_not_use_unnecessary_entities({"response": r}, print_output=False) 167 | self.assertFalse(result.passed) 168 | 169 | r = Response(None, b" &"", None, 200, None, {}, None) 170 | result = check_html_should_not_use_unnecessary_entities({"response": r}, print_output=False) 171 | self.assertFalse(result.passed) 172 | 173 | def test_check_html_script_tags_use_sri(self): 174 | r = Response(None, b'', None, 200, None, {}, None) 175 | result = check_html_script_tags_use_sri({"response": r}, print_output=False) 176 | self.assertTrue(result.passed) 177 | 178 | r = Response(None, b'', None, 200, None, {}, None) 179 | result = check_html_script_tags_use_sri({"response": r}, print_output=False) 180 | self.assertFalse(result.passed) 181 | 182 | r = Response( 183 | None, 184 | b'', 185 | None, 186 | 200, 187 | None, 188 | {}, 189 | None, 190 | ) 191 | result = check_html_script_tags_use_sri({"response": r}, print_output=False) 192 | self.assertFalse(result.passed) 193 | 194 | def test_check_x_dns_prefetch_control_is_off(self): 195 | r = Response(None, b"", None, 200, None, {"x-dns-prefetch-control": "off"}, None) 196 | result = check_x_dns_prefetch_control_is_off({"response": r}, print_output=False) 197 | self.assertTrue(result.passed) 198 | 199 | r = Response(None, b"", None, 200, None, {"x-dns-prefetch-control": "on"}, None) 200 | result = check_x_dns_prefetch_control_is_off({"response": r}, print_output=False) 201 | self.assertFalse(result.passed) 202 | 203 | r = Response(None, b"", None, 200, None, {}, None) 204 | result = check_x_dns_prefetch_control_is_off({"response": r}, print_output=False) 205 | self.assertFalse(result.passed) 206 | 207 | def test_check_cdns_should_not_be_used(self): 208 | r = Response( 209 | None, b'', None, 200, None, {}, None 210 | ) 211 | result = check_cdns_should_not_be_used({"response": r}, print_output=False) 212 | self.assertTrue(result.passed) 213 | 214 | r = Response(None, b'', None, 200, None, {}, None) 215 | result = check_cdns_should_not_be_used({"response": r}, print_output=False) 216 | self.assertFalse(result.passed) 217 | 218 | r = Response(None, b'', None, 200, None, {}, None) 219 | result = check_cdns_should_not_be_used({"response": r}, print_output=False) 220 | self.assertFalse(result.passed) 221 | 222 | r = Response( 223 | None, 224 | b'', 225 | None, 226 | 200, 227 | None, 228 | {}, 229 | None, 230 | ) 231 | result = check_cdns_should_not_be_used({"response": r}, print_output=False) 232 | self.assertFalse(result.passed) 233 | 234 | def test_check_html_should_not_be_cached_for_more_than_24_hours(self): 235 | r = Response(None, b"", None, 200, None, {"cache-control": "max-age=86400"}, None) 236 | result = check_html_should_not_be_cached_for_more_than_24_hours({"response": r}, print_output=False) 237 | self.assertTrue(result.passed) 238 | 239 | r = Response(None, b"", None, 200, None, {"cache-control": "max-age=90000"}, None) 240 | result = check_html_should_not_be_cached_for_more_than_24_hours({"response": r}, print_output=False) 241 | self.assertFalse(result.passed) 242 | 243 | r = Response(None, b"", None, 200, None, {}, None) 244 | result = check_html_should_not_be_cached_for_more_than_24_hours({"response": r}, print_output=False) 245 | self.assertFalse(result.passed) 246 | 247 | r = Response(None, b"", None, 200, None, {"cache-control": "max-age=abc"}, None) 248 | result = check_html_should_not_be_cached_for_more_than_24_hours({"response": r}, print_output=False) 249 | self.assertFalse(result.passed) 250 | 251 | @skipIf(SKIP_BS4_TESTS, "beautifulsoup is not available") 252 | def test_check_rss_should_return_cors_header(self): 253 | mock_response = Response(None, b"", None, 200, None, {"access-control-allow-origin": "*"}, None) 254 | with patch("ready.checks.html.thttp.request", return_value=mock_response): 255 | r = Response( 256 | None, 257 | b'', 258 | None, 259 | 200, 260 | "https://ready.invalid", 261 | {}, 262 | None, 263 | ) 264 | result = check_rss_should_return_cors_header({"response": r}, print_output=False) 265 | self.assertTrue(result.passed) 266 | 267 | r = Response( 268 | None, 269 | b'', 270 | None, 271 | 200, 272 | "https://ready.invalid", 273 | {}, 274 | None, 275 | ) 276 | result = check_rss_should_return_cors_header({"response": r}, print_output=False) 277 | self.assertTrue(result.passed) 278 | 279 | r = Response( 280 | None, 281 | b'', 282 | None, 283 | 200, 284 | "https://ready.invalid", 285 | {}, 286 | None, 287 | ) 288 | result = check_rss_should_return_cors_header({"response": r}, print_output=False) 289 | self.assertTrue(result.passed) 290 | 291 | mock_response = Response(None, b"", None, 200, None, {}, None) 292 | with patch("ready.checks.html.thttp.request", return_value=mock_response): 293 | r = Response( 294 | None, 295 | b'', 296 | None, 297 | 200, 298 | {}, 299 | {}, 300 | None, 301 | ) 302 | result = check_rss_should_return_cors_header({"response": r}, print_output=False) 303 | self.assertFalse(result.passed) 304 | -------------------------------------------------------------------------------- /ready/ready.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | import json 3 | import os 4 | import sys 5 | import urllib 6 | 7 | from importlib import resources 8 | from . import checks as checks_module 9 | 10 | VERSION = "1.7.0" 11 | 12 | from ready.checks.bad_response import ( 13 | check_bad_response_cloudflare, 14 | check_bad_response_kasada, 15 | ) 16 | from ready.checks.content import ( 17 | check_http_cache_control_is_included, 18 | check_http_content_type_header_contains_charset, 19 | check_http_expires_header_not_used_without_cache_control, 20 | check_http_p3p_header_is_not_set, 21 | check_http_response_should_be_gzipped, 22 | check_http_response_should_include_content_type, 23 | ) 24 | from ready.checks.cookies import ( 25 | check_cookies_should_be_httponly, 26 | check_cookies_should_be_samesite, 27 | check_cookies_should_be_secure, 28 | ) 29 | from ready.checks.corp_coop_coep import ( 30 | check_cross_origin_embedder_policy_should_be_require_corp, 31 | check_cross_origin_opener_policy_should_be_sameorigin, 32 | check_cross_origin_resource_policy_should_be_sameorigin, 33 | ) 34 | from ready.checks.csp import ( 35 | check_csp_includes_default_or_script_directive, 36 | check_csp_must_not_include_report_sample, 37 | check_csp_must_not_include_unsafe_eval, 38 | check_csp_must_not_include_unsafe_inline, 39 | check_csp_should_exist, 40 | check_csp_should_not_include_reportto, 41 | check_csp_must_not_include_reporturi, 42 | check_csp_should_only_include_valid_directives, 43 | check_csp_should_start_with_defaultsrc_none, 44 | check_csp_upgrade_insecure_requests, 45 | ) 46 | from ready.checks.dns import check_aaaa_record_exists 47 | from ready.checks.email import ( 48 | check_dmarc_record_should_exist, 49 | check_dmarc_record_should_reject_failures, 50 | check_spf_dash_all, 51 | check_spf_dns_record_does_not_exist, 52 | check_spf_record_should_exist, 53 | check_spf_txt_record_should_disallow_all, 54 | check_spf_uses_less_than_10_requests, 55 | ) 56 | from ready.checks.hsts import ( 57 | check_hsts_header_should_be_included_in_response, 58 | check_hsts_header_should_have_a_long_max_age, 59 | check_hsts_header_should_have_includesubdomains, 60 | check_hsts_header_should_have_preload, 61 | ) 62 | from ready.checks.html import ( 63 | check_cdns_should_not_be_used, 64 | check_frame_ancestors_should_exist, 65 | check_html_includes_rel_icon, 66 | check_html_includes_title, 67 | check_html_meta_charset, 68 | check_html_script_tags_use_sri, 69 | check_html_should_not_be_cached_for_more_than_24_hours, 70 | check_html_should_not_use_schemeless_urls, 71 | check_html_should_not_use_unnecessary_entities, 72 | check_html_starts_with_doctype, 73 | check_html_tag_includes_lang, 74 | check_permissions_policy_should_exist, 75 | check_referrer_policy_should_be_set, 76 | check_rss_should_return_cors_header, 77 | check_x_content_type_options_should_be_nosniff, 78 | check_x_dns_prefetch_control_is_off, 79 | check_x_xss_protection_should_not_exist, 80 | ) 81 | from ready.checks.leaky_headers import check_should_not_include_leaky_headers 82 | from ready.checks.ns import check_at_least_two_nameservers_configured 83 | from ready.checks.redirect import check_http_to_https_redirect 84 | from ready.checks.report_to import check_report_to_header_must_not_be_included_in_response 85 | from ready.checks.ssl import ( 86 | check_dns_caa_record_should_exist, 87 | check_dns_caa_record_should_include_accounturi, 88 | check_dns_caa_record_should_include_validationmethods, 89 | check_ssl_certificate_should_be_trusted, 90 | check_ssl_connection_fails_with_tls_1_0, 91 | check_ssl_connection_fails_with_tls_1_1, 92 | check_ssl_expiry_should_be_greater_than_five_days, 93 | check_ssl_expiry_should_be_less_than_one_year, 94 | ) 95 | from ready.checks.status import check_http_response_should_be_200 96 | from ready.checks.swagger import check_swagger_should_not_return_200 97 | from ready.checks.well_known import ( 98 | check_favicon_is_served, 99 | check_robots_txt_exists, 100 | check_security_txt_exists, 101 | check_security_txt_not_expired, 102 | ) 103 | from ready.thttp import pretty, request 104 | 105 | USE_FLD = True 106 | 107 | try: 108 | from tld import get_fld 109 | except ImportError: 110 | USE_FLD = False 111 | 112 | 113 | DEFAULT_HEADERS = { 114 | # MacOS Safari 115 | "user-agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/17.2.1 Safari/605.1.15", 116 | "accept": "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", 117 | "accept-language": "en-AU,en;q=0.9", 118 | "accept-encoding": "gzip", 119 | } 120 | 121 | DNS_RESOLVERS = { 122 | "quad9": "https://dns.quad9.net:5053/dns-query", 123 | "google": "https://dns.google/resolve", 124 | "doh.li": "https://doh.li/dns-query", 125 | "dns.sb": "https://doh.dns.sb/dns-query", 126 | } 127 | 128 | 129 | def response_or_none(url, name="", request_filter="", **kwargs): 130 | if request_filter and request_filter not in name: 131 | print(f"Skipping HTTP request {name}") 132 | return None 133 | 134 | try: 135 | response = request(url, **kwargs) 136 | return response 137 | except urllib.error.URLError: 138 | return None 139 | except Exception as e: 140 | print(url, type(e)) 141 | return None 142 | 143 | 144 | def ready( 145 | domain, 146 | print_headers=False, 147 | print_content=False, 148 | json_output=False, 149 | hide_output=False, 150 | fuzz=False, 151 | check_filter=None, 152 | request_filter=None, 153 | dns_resolver="https://dns.google/resolve", 154 | extra_args={}, 155 | ): 156 | domain_with_no_path = urllib.parse.urlparse("https://" + domain).hostname 157 | 158 | if USE_FLD: 159 | fld = get_fld(domain, fix_protocol=True) 160 | else: 161 | fld = "Disabled. Install tld to improve support for subdomains." 162 | 163 | if not hide_output: 164 | print(f"URL (no scheme): {domain}, Domain (no path): {domain_with_no_path}, Second Level Domain: {fld}") 165 | 166 | responses = { 167 | "http_response": response_or_none( 168 | f"http://{domain}", "http_response", request_filter, verify=False, headers=DEFAULT_HEADERS, timeout=3 169 | ), 170 | "response": response_or_none( 171 | f"https://{domain}", "response", request_filter, verify=False, headers=DEFAULT_HEADERS, timeout=3 172 | ), 173 | } 174 | 175 | if not responses["response"]: 176 | print(f"No response from https://{domain}") 177 | 178 | if request_filter: 179 | print("Request filter in place, continuing...") 180 | else: 181 | return None 182 | 183 | responses["security_txt_response"] = response_or_none( 184 | f"https://{domain_with_no_path}/.well-known/security.txt", 185 | "security_txt_response", 186 | request_filter, 187 | headers=DEFAULT_HEADERS, 188 | timeout=3, 189 | ) 190 | 191 | responses["robots_txt_response"] = response_or_none( 192 | f"https://{domain_with_no_path}/robots.txt", "robots_txt_response", request_filter, headers=DEFAULT_HEADERS, timeout=3 193 | ) 194 | 195 | responses["favicon_response"] = response_or_none( 196 | f"https://{domain_with_no_path}/favicon.ico", 197 | "favicon_response", 198 | request_filter, 199 | verify=False, 200 | headers=DEFAULT_HEADERS, 201 | timeout=3, 202 | ) 203 | 204 | responses["dns_ns_response"] = response_or_none( 205 | f"{dns_resolver}?name={domain_with_no_path}&type=NS", "dns_ns_response", request_filter 206 | ) 207 | responses["dns_mx_response"] = response_or_none( 208 | f"{dns_resolver}?name={domain_with_no_path}&type=MX", "dns_mx_response", request_filter 209 | ) 210 | responses["dns_txt_response"] = response_or_none( 211 | f"{dns_resolver}?name={domain_with_no_path}&type=TXT", "dns_txt_response", request_filter 212 | ) 213 | responses["dns_spf_response"] = response_or_none( 214 | f"{dns_resolver}?name={domain_with_no_path}&type=SPF", "dns_spf_response", request_filter 215 | ) 216 | responses["dns_caa_response"] = response_or_none( 217 | f"{dns_resolver}?name={domain_with_no_path}&type=CAA", "dns_caa_response", request_filter 218 | ) 219 | responses["dns_a_response"] = response_or_none( 220 | f"{dns_resolver}?name={domain_with_no_path}&type=A", "dns_aaaa_response", request_filter 221 | ) 222 | responses["dns_aaaa_response"] = response_or_none( 223 | f"{dns_resolver}?name={domain_with_no_path}&type=AAAA", "dns_aaaa_response", request_filter 224 | ) 225 | responses["dns_dmarc_response"] = response_or_none( 226 | f"{dns_resolver}?name=_dmarc.{domain_with_no_path}&type=TXT", "dns_dmarc_response", request_filter 227 | ) 228 | 229 | if USE_FLD and domain != fld: 230 | responses["response_fld"] = response_or_none( 231 | f"https://{fld}", "response_fld", request_filter, verify=False, headers=DEFAULT_HEADERS, timeout=3 232 | ) 233 | 234 | responses["dns_ns_response_fld"] = response_or_none(f"{dns_resolver}?name={fld}&type=NS") 235 | responses["dns_mx_response_fld"] = response_or_none(f"{dns_resolver}?name={fld}&type=MX") 236 | responses["dns_spf_response_fld"] = response_or_none(f"{dns_resolver}?name={fld}&type=SPF") 237 | responses["dns_txt_response_fld"] = response_or_none(f"{dns_resolver}?name={fld}&type=TXT") 238 | responses["dns_dmarc_response_fld"] = response_or_none(f"{dns_resolver}?name=_dmarc.{fld}&type=TXT") 239 | responses["dns_caa_response_fld"] = response_or_none(f"{dns_resolver}?name={fld}&type=CAA") 240 | 241 | checks = [] 242 | is_html = responses["response"] and "html" in responses["response"].headers.get("content-type", "") 243 | 244 | extra_args["is_ipv6"] = False 245 | if responses.get("dns_a_records") and responses.get("dns_aaaa_response"): 246 | a_records = [x["data"] for x in responses["dns_a_response"].json.get("Answer", [])] 247 | aaaa_records = [x["data"] for x in responses["dns_aaaa_response"].json.get("Answer", [])] 248 | extra_args["is_ipv6"] = len(a_records) == 0 and len(aaaa_records) > 0 249 | 250 | # TODO: accept argument to _not_ print to stdout 251 | if print_headers: 252 | pretty(responses["response"], content=False) 253 | print() 254 | 255 | if print_content: 256 | print(responses["response"].content) 257 | 258 | # bad response checks go first 259 | checks = [ 260 | check_bad_response_kasada, 261 | check_bad_response_cloudflare, 262 | ] 263 | 264 | checks.extend( 265 | [ 266 | check_http_to_https_redirect, 267 | check_http_response_should_be_200, 268 | check_http_response_should_include_content_type, 269 | check_aaaa_record_exists, 270 | check_hsts_header_should_be_included_in_response, 271 | check_hsts_header_should_have_a_long_max_age, 272 | check_hsts_header_should_have_includesubdomains, 273 | check_hsts_header_should_have_preload, 274 | check_csp_should_exist, 275 | check_csp_should_start_with_defaultsrc_none, 276 | check_csp_includes_default_or_script_directive, 277 | check_csp_must_not_include_unsafe_inline, 278 | check_csp_must_not_include_unsafe_eval, 279 | check_csp_must_not_include_report_sample, 280 | check_csp_upgrade_insecure_requests, 281 | check_csp_must_not_include_reporturi, 282 | check_csp_should_not_include_reportto, 283 | check_csp_should_only_include_valid_directives, 284 | check_report_to_header_must_not_be_included_in_response, 285 | check_robots_txt_exists, 286 | check_security_txt_exists, 287 | check_security_txt_not_expired, 288 | check_favicon_is_served, 289 | check_http_response_should_be_gzipped, 290 | check_http_content_type_header_contains_charset, 291 | check_http_expires_header_not_used_without_cache_control, 292 | check_http_cache_control_is_included, 293 | check_http_p3p_header_is_not_set, 294 | check_referrer_policy_should_be_set, 295 | check_cross_origin_resource_policy_should_be_sameorigin, 296 | check_cross_origin_opener_policy_should_be_sameorigin, 297 | check_cross_origin_embedder_policy_should_be_require_corp, 298 | check_should_not_include_leaky_headers, 299 | check_ssl_expiry_should_be_less_than_one_year, 300 | check_ssl_expiry_should_be_greater_than_five_days, 301 | check_ssl_certificate_should_be_trusted, 302 | check_ssl_connection_fails_with_tls_1_1, 303 | check_ssl_connection_fails_with_tls_1_0, 304 | check_dns_caa_record_should_exist, 305 | check_dns_caa_record_should_include_accounturi, 306 | check_dns_caa_record_should_include_validationmethods, 307 | check_at_least_two_nameservers_configured, 308 | check_cookies_should_be_samesite, 309 | check_cookies_should_be_secure, 310 | check_cookies_should_be_httponly, 311 | check_spf_dash_all, 312 | check_spf_record_should_exist, 313 | check_spf_dns_record_does_not_exist, 314 | check_spf_txt_record_should_disallow_all, 315 | check_dmarc_record_should_exist, 316 | check_dmarc_record_should_reject_failures, 317 | check_spf_uses_less_than_10_requests, 318 | ] 319 | ) 320 | 321 | if is_html: 322 | checks.extend( 323 | [ 324 | check_permissions_policy_should_exist, 325 | check_frame_ancestors_should_exist, 326 | check_x_content_type_options_should_be_nosniff, 327 | check_x_xss_protection_should_not_exist, 328 | check_html_starts_with_doctype, 329 | check_html_tag_includes_lang, 330 | check_html_meta_charset, 331 | check_html_includes_title, 332 | check_html_includes_rel_icon, 333 | check_html_should_not_use_schemeless_urls, 334 | check_html_script_tags_use_sri, 335 | check_html_should_not_use_unnecessary_entities, 336 | check_html_should_not_be_cached_for_more_than_24_hours, 337 | check_x_dns_prefetch_control_is_off, 338 | check_cdns_should_not_be_used, 339 | check_rss_should_return_cors_header, 340 | ] 341 | ) 342 | 343 | if fuzz: 344 | checks.extend( 345 | [ 346 | check_swagger_should_not_return_200, 347 | ] 348 | ) 349 | 350 | extra_args["print_output"] = not hide_output 351 | extra_args["dns_resolver"] = dns_resolver 352 | 353 | results = [] 354 | for c in checks: 355 | if check_filter and check_filter not in c.__name__: 356 | continue 357 | 358 | result = c(responses, domain=domain, domain_with_no_path=domain_with_no_path, **extra_args) 359 | if result: 360 | results.append(result) 361 | 362 | if json_output: 363 | print( 364 | json.dumps( 365 | { 366 | "domain": domain, 367 | "score": score_from_results(results), 368 | "checks": { 369 | r.check: { 370 | "passed": r.passed, 371 | "message": r.message, 372 | } 373 | for r in results 374 | }, 375 | "when": datetime.datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%SZ"), 376 | "version": VERSION, 377 | }, 378 | indent=2, 379 | ) 380 | ) 381 | 382 | return results 383 | 384 | 385 | def score_from_results(results): 386 | return 100 - 3 * len([x for x in results if not x.passed and not x.warn_on_fail]) 387 | 388 | 389 | def parse_args(args): 390 | result = { 391 | a.split("=")[0]: ( 392 | int(a.split("=")[1]) if "=" in a and a.split("=")[1].isnumeric() else a.split("=")[1] if "=" in a else True 393 | ) 394 | for a in args 395 | if "--" in a 396 | } 397 | result["[]"] = [a for a in args if not a.startswith("--")] 398 | return result 399 | 400 | 401 | def usage(): 402 | print("ready") 403 | print("") 404 | print("Usage: ready.py [--headers] [--content] [--json] [--quiet] [--score] [--fuzz] ") 405 | print("") 406 | print(" --headers Output the headers from the HTTPS request made to the domain") 407 | print(" --content Output the content from the HTTPS request made to the domain") 408 | print(" --fuzz Include checks that fuzz urls (only run this on your own domain)") 409 | print(" --json Provide JSON output") 410 | print(" --quiet No text output") 411 | print(" --score Print a score out of 100 for this domain") 412 | print(" --doc Print the list of check names") 413 | print(" --version Print version information") 414 | 415 | print("\nDevelopment / experimental options for filtering checks and HTTP requests during testing:") 416 | print("") 417 | print(" --check-filter= Only run checks that match the provided filter") 418 | print(" --request-filter= Only make HTTP requests that match the provided filter") 419 | 420 | 421 | def cli(): 422 | args = parse_args(sys.argv[1:]) 423 | 424 | if "--version" in args: 425 | print(f"ready {VERSION}") 426 | sys.exit() 427 | 428 | if "--doc" in args: 429 | for f in resources.files(checks_module).iterdir(): 430 | if f.name.endswith(".py"): 431 | for line in open(f).readlines(): 432 | if line.strip().startswith("# Check: "): 433 | print(line.strip().replace("# Check: ", "- ")) 434 | sys.exit() 435 | 436 | if "--help" in args or not args["[]"]: 437 | usage() 438 | sys.exit() 439 | 440 | resolver_name = args.get("--dns-resolver", "google") 441 | if not resolver_name.startswith("http"): 442 | if resolver_name.lower() in DNS_RESOLVERS: 443 | dns_resolver = DNS_RESOLVERS[resolver_name.lower()] 444 | else: 445 | print( 446 | f"{resolver_name} is not a valid DNS resolver name. Provide one of {DNS_RESOLVERS.keys()} or a full URI for the DoH resolver." 447 | ) 448 | sys.exit(1) 449 | else: 450 | dns_resolver = resolver_name 451 | 452 | results = ready( 453 | args["[]"][0], 454 | print_headers=args.get("--headers", False), 455 | print_content=args.get("--content", False), 456 | json_output=args.get("--json", False), 457 | hide_output=args.get("--quiet", False), 458 | fuzz=args.get("--fuzz", False), 459 | check_filter=args.get("--check-filter", ""), 460 | request_filter=args.get("--request-filter", ""), 461 | dns_resolver=dns_resolver, 462 | ) 463 | 464 | if "--score" in args: 465 | print(f"Score: {score_from_results(results)}/100") 466 | 467 | 468 | if __name__ == "__main__": 469 | cli() 470 | --------------------------------------------------------------------------------