├── .gitignore ├── .gitmodules ├── LICENSE_CATLFISH ├── README ├── certkeys.py ├── certtools.py ├── chromium-0001-Support-requireCT-in-HSTS.patch ├── fetchallcerts.py ├── fetchalllogkeys.py ├── findcerts.py ├── findcerts_extra.py ├── parse-sct.py ├── submit-cert.py ├── update-all.sh └── write-sct.py /.gitignore: -------------------------------------------------------------------------------- 1 | *~ 2 | *.pyc -------------------------------------------------------------------------------- /.gitmodules: -------------------------------------------------------------------------------- 1 | [submodule "pyx509"] 2 | path = pyx509 3 | url = git@github.com:tomrittervg/pyx509.git 4 | [submodule "pyasn1"] 5 | path = pyasn1 6 | url = git@github.com:tomrittervg/pyasn1.git 7 | -------------------------------------------------------------------------------- /LICENSE_CATLFISH: -------------------------------------------------------------------------------- 1 | This file contains the license for Catlfish, an Erlang implementation 2 | of a Certificate Transparency log server (RFC 6962). 3 | 4 | It also contains the licenses for other components used by Catlfish. 5 | -------------------------------------------------------------------------------- 6 | Catlfish is distributed under this license: 7 | 8 | Copyright (c) 2014-2015, NORDUnet A/S 9 | 10 | Redistribution and use in source and binary forms, with or without 11 | modification, are permitted provided that the following conditions are 12 | met: 13 | 14 | * Redistributions of source code must retain the above copyright 15 | notice, this list of conditions and the following disclaimer. 16 | 17 | * Redistributions in binary form must reproduce the above copyright 18 | notice, this list of conditions and the following disclaimer in the 19 | documentation and/or other materials provided with distribution. 20 | 21 | * Neither the name of the copyright holders nor the names of the 22 | contributors may be used to endorse or promote products derived from 23 | this software without specific prior written permission. 24 | 25 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 26 | "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 27 | LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 28 | A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 29 | HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, 30 | INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, 31 | BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS 32 | OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND 33 | ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR 34 | TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE 35 | USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH 36 | DAMAGE. 37 | -------------------------------------------------------------------------------- /README: -------------------------------------------------------------------------------- 1 | See: 2 | - https://ritter.vg/blog-querying_ct_logs.html 3 | - https://ritter.vg/blog-require_certificate_transparency.html 4 | 5 | Certain files from https://git.nordu.net/catlfish.git: 6 | certkeys.py 7 | certtools.py 8 | fetchallcerts.py 9 | See LICENSE_CATLFISH 10 | 11 | See also https://github.com/alex/ct-submitter which also submits certs to logs. 12 | -------------------------------------------------------------------------------- /certkeys.py: -------------------------------------------------------------------------------- 1 | 2 | publickeys = { 3 | "https://ct.googleapis.com/pilot/": 4 | "MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEfahLEimAoz2t01p3uMziiLOl/fHTD" 5 | "M0YDOhBRuiBARsV4UvxG2LdNgoIGLrtCzWE0J5APC2em4JlvR8EEEFMoA==", 6 | 7 | "https://flimsy.ct.nordu.net/": 8 | "MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAE4qWq6afhBUi0OdcWUYhyJLNXTkGqQ9" 9 | "PMS5lqoCgkV2h1ZvpNjBH2u8UbgcOQwqDo66z6BWQJGolozZYmNHE2kQ==", 10 | } 11 | -------------------------------------------------------------------------------- /certtools.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2014, NORDUnet A/S. 2 | # See LICENSE for licensing information. 3 | 4 | import subprocess 5 | import json 6 | import base64 7 | import urllib 8 | import urllib2 9 | import ssl 10 | import urlparse 11 | import struct 12 | import sys 13 | import hashlib 14 | import ecdsa 15 | import datetime 16 | import cStringIO 17 | import zipfile 18 | import shutil 19 | from certkeys import publickeys 20 | 21 | def get_cert_info(s): 22 | p = subprocess.Popen( 23 | ["openssl", "x509", "-noout", "-subject", "-issuer", "-inform", "der"], 24 | stdin=subprocess.PIPE, stdout=subprocess.PIPE, 25 | stderr=subprocess.PIPE) 26 | parsed = p.communicate(s) 27 | if parsed[1]: 28 | print "ERROR:", parsed[1] 29 | sys.exit(1) 30 | result = {} 31 | for line in parsed[0].split("\n"): 32 | (key, sep, value) = line.partition("=") 33 | if sep == "=": 34 | result[key] = value 35 | return result 36 | 37 | 38 | def get_pemlike(filename, marker): 39 | return get_pemlike_from_file(open(filename), marker) 40 | 41 | def get_pemlike_from_file(f, marker): 42 | entries = [] 43 | entry = "" 44 | inentry = False 45 | 46 | for line in f: 47 | line = line.strip() 48 | if line == "-----BEGIN " + marker + "-----": 49 | entry = "" 50 | inentry = True 51 | elif line == "-----END " + marker + "-----": 52 | entries.append(base64.decodestring(entry)) 53 | inentry = False 54 | elif inentry: 55 | entry += line 56 | return entries 57 | 58 | def get_certs_from_file(certfile): 59 | return get_pemlike(certfile, "CERTIFICATE") 60 | 61 | def get_certs_from_string(s): 62 | f = cStringIO.StringIO(s) 63 | return get_pemlike_from_file(f, "CERTIFICATE") 64 | 65 | def get_precerts_from_string(s): 66 | f = cStringIO.StringIO(s) 67 | return get_pemlike_from_file(f, "PRECERTIFICATE") 68 | 69 | def get_eckey_from_file(keyfile): 70 | keys = get_pemlike(keyfile, "EC PRIVATE KEY") 71 | assert len(keys) == 1 72 | return keys[0] 73 | 74 | def get_public_key_from_file(keyfile): 75 | keys = get_pemlike(keyfile, "PUBLIC KEY") 76 | assert len(keys) == 1 77 | return keys[0] 78 | 79 | def get_root_cert(issuer): 80 | accepted_certs = \ 81 | json.loads(open("googlelog-accepted-certs.txt").read())["certificates"] 82 | 83 | root_cert = None 84 | 85 | for accepted_cert in accepted_certs: 86 | subject = get_cert_info(base64.decodestring(accepted_cert))["subject"] 87 | if subject == issuer: 88 | root_cert = base64.decodestring(accepted_cert) 89 | 90 | return root_cert 91 | 92 | class sslparameters: 93 | sslcontext = None 94 | 95 | def create_ssl_context(cafile=None): 96 | try: 97 | sslparameters.sslcontext = ssl.create_default_context(cafile=cafile) 98 | except AttributeError: 99 | sslparameters.sslcontext = None 100 | 101 | def get_opener(): 102 | try: 103 | opener = urllib2.build_opener(urllib2.HTTPSHandler(context=sslparameters.sslcontext)) 104 | except TypeError: 105 | opener = urllib2.build_opener(urllib2.HTTPSHandler()) 106 | return opener 107 | 108 | def urlopen(url, data=None): 109 | return get_opener().open(url, data) 110 | 111 | def pyopenssl_https_get(url): 112 | """ 113 | HTTPS GET-function to use when running old Python < 2.7 114 | """ 115 | from OpenSSL import SSL 116 | import socket 117 | 118 | # TLSv1 is the best we can get on Python 2.6 119 | context = SSL.Context(SSL.TLSv1_METHOD) 120 | sock = SSL.Connection(context, socket.socket(socket.AF_INET, socket.SOCK_STREAM)) 121 | 122 | url_without_scheme = url.split('https://')[-1] 123 | host = url_without_scheme.split('/')[0] 124 | path = url_without_scheme.split('/', 1)[1] 125 | http_get_request = ("GET /{path} HTTP/1.1\r\n" 126 | "Host: {host}\r\n" 127 | "\r\n" 128 | ).format(path=path, host=host) 129 | 130 | sock.connect((host, 443)) 131 | sock.write(http_get_request) 132 | response = sock.recv(1024) 133 | response_lines = response.rsplit('\n') 134 | 135 | # We are only interested in the actual response, 136 | # without headers, contained in the last line. 137 | return response_lines[len(response_lines) - 1] 138 | 139 | def get_sth(baseurl): 140 | result = urlopen(baseurl + "ct/v1/get-sth").read() 141 | return json.loads(result) 142 | 143 | def get_proof_by_hash(baseurl, hash, tree_size): 144 | try: 145 | params = urllib.urlencode({"hash":base64.b64encode(hash), 146 | "tree_size":tree_size}) 147 | result = \ 148 | urlopen(baseurl + "ct/v1/get-proof-by-hash?" + params).read() 149 | return json.loads(result) 150 | except urllib2.HTTPError, e: 151 | print "ERROR:", e.read() 152 | sys.exit(1) 153 | 154 | def get_consistency_proof(baseurl, tree_size1, tree_size2): 155 | try: 156 | params = urllib.urlencode({"first":tree_size1, 157 | "second":tree_size2}) 158 | result = \ 159 | urlopen(baseurl + "ct/v1/get-sth-consistency?" + params).read() 160 | return json.loads(result)["consistency"] 161 | except urllib2.HTTPError, e: 162 | print "ERROR:", e.read() 163 | sys.exit(1) 164 | 165 | def tls_array(data, length_len): 166 | length_bytes = struct.pack(">Q", len(data))[-length_len:] 167 | return length_bytes + data 168 | 169 | def unpack_tls_array(packed_data, length_len): 170 | padded_length = ["\x00"] * 8 171 | padded_length[-length_len:] = packed_data[:length_len] 172 | (length,) = struct.unpack(">Q", "".join(padded_length)) 173 | unpacked_data = packed_data[length_len:length_len+length] 174 | assert len(unpacked_data) == length, \ 175 | "data is only %d bytes long, but length is %d bytes" % \ 176 | (len(unpacked_data), length) 177 | rest_data = packed_data[length_len+length:] 178 | return (unpacked_data, rest_data) 179 | 180 | def add_chain(baseurl, submission): 181 | try: 182 | result = urlopen(baseurl + "ct/v1/add-chain", json.dumps(submission)).read() 183 | return json.loads(result) 184 | except urllib2.HTTPError, e: 185 | print "ERROR", e.code,":", e.read() 186 | if e.code == 400: 187 | return None 188 | sys.exit(1) 189 | except ValueError, e: 190 | print "==== FAILED REQUEST ====" 191 | print submission 192 | print "======= RESPONSE =======" 193 | print result 194 | print "========================" 195 | raise e 196 | 197 | def add_prechain(baseurl, submission): 198 | try: 199 | result = urlopen(baseurl + "ct/v1/add-pre-chain", 200 | json.dumps(submission)).read() 201 | return json.loads(result) 202 | except urllib2.HTTPError, e: 203 | print "ERROR", e.code,":", e.read() 204 | if e.code == 400: 205 | return None 206 | sys.exit(1) 207 | except ValueError, e: 208 | print "==== FAILED REQUEST ====" 209 | print submission 210 | print "======= RESPONSE =======" 211 | print result 212 | print "========================" 213 | raise e 214 | 215 | def get_entries(baseurl, start, end): 216 | params = urllib.urlencode({"start":start, "end":end}) 217 | try: 218 | result = urlopen(baseurl + "ct/v1/get-entries?" + params).read() 219 | return json.loads(result) 220 | except urllib2.HTTPError, e: 221 | print "ERROR:", e.read() 222 | sys.exit(1) 223 | 224 | def extract_precertificate(precert_chain_entry): 225 | (precert, certchain) = unpack_tls_array(precert_chain_entry, 3) 226 | return (precert, certchain) 227 | 228 | def decode_certificate_chain(packed_certchain): 229 | (unpacked_certchain, rest) = unpack_tls_array(packed_certchain, 3) 230 | assert len(rest) == 0 231 | certs = [] 232 | while len(unpacked_certchain): 233 | (cert, rest) = unpack_tls_array(unpacked_certchain, 3) 234 | certs.append(cert) 235 | unpacked_certchain = rest 236 | return certs 237 | 238 | def decode_signature(signature): 239 | (hash_alg, signature_alg) = struct.unpack(">bb", signature[0:2]) 240 | (unpacked_signature, rest) = unpack_tls_array(signature[2:], 2) 241 | assert rest == "" 242 | return (hash_alg, signature_alg, unpacked_signature) 243 | 244 | def encode_signature(hash_alg, signature_alg, unpacked_signature): 245 | signature = struct.pack(">bb", hash_alg, signature_alg) 246 | signature += tls_array(unpacked_signature, 2) 247 | return signature 248 | 249 | def check_signature(baseurl, signature, data, publickey=None): 250 | if publickey == None: 251 | if baseurl in publickeys: 252 | publickey = base64.decodestring(publickeys[baseurl]) 253 | else: 254 | print >>sys.stderr, "Public key for", baseurl, \ 255 | "not found, specify key file with --publickey" 256 | sys.exit(1) 257 | (hash_alg, signature_alg, unpacked_signature) = decode_signature(signature) 258 | assert hash_alg == 4, \ 259 | "hash_alg is %d, expected 4" % (hash_alg,) # sha256 260 | assert signature_alg == 3, \ 261 | "signature_alg is %d, expected 3" % (signature_alg,) # ecdsa 262 | 263 | vk = ecdsa.VerifyingKey.from_der(publickey) 264 | vk.verify(unpacked_signature, data, hashfunc=hashlib.sha256, 265 | sigdecode=ecdsa.util.sigdecode_der) 266 | 267 | def parse_auth_header(authheader): 268 | splittedheader = authheader.split(";") 269 | (signature, rawoptions) = (splittedheader[0], splittedheader[1:]) 270 | options = dict([(e.partition("=")[0], e.partition("=")[2]) for e in rawoptions]) 271 | return (base64.b64decode(signature), options) 272 | 273 | def check_auth_header(authheader, expected_key, publickeydir, data, path): 274 | if expected_key == None: 275 | return True 276 | (signature, options) = parse_auth_header(authheader) 277 | keyname = options.get("key") 278 | if keyname != expected_key: 279 | raise Exception("Response claimed to come from %s, expected %s" % (keyname, expected_key)) 280 | publickey = get_public_key_from_file(publickeydir + "/" + keyname + ".pem") 281 | vk = ecdsa.VerifyingKey.from_der(publickey) 282 | vk.verify(signature, "%s\0%s\0%s" % ("REPLY", path, data), hashfunc=hashlib.sha256, 283 | sigdecode=ecdsa.util.sigdecode_der) 284 | return True 285 | 286 | def http_request(url, data=None, key=None, verifynode=None, publickeydir="."): 287 | opener = get_opener() 288 | 289 | (keyname, keyfile) = key 290 | privatekey = get_eckey_from_file(keyfile) 291 | sk = ecdsa.SigningKey.from_der(privatekey) 292 | parsed_url = urlparse.urlparse(url) 293 | if data == None: 294 | data_to_sign = parsed_url.query 295 | method = "GET" 296 | else: 297 | data_to_sign = data 298 | method = "POST" 299 | signature = sk.sign("%s\0%s\0%s" % (method, parsed_url.path, data_to_sign), hashfunc=hashlib.sha256, 300 | sigencode=ecdsa.util.sigencode_der) 301 | opener.addheaders = [('X-Catlfish-Auth', base64.b64encode(signature) + ";key=" + keyname)] 302 | result = opener.open(url, data) 303 | authheader = result.info().get('X-Catlfish-Auth') 304 | data = result.read() 305 | check_auth_header(authheader, verifynode, publickeydir, data, parsed_url.path) 306 | return data 307 | 308 | def get_signature(baseurl, data, key=None): 309 | try: 310 | params = json.dumps({"plop_version":1, "data": base64.b64encode(data)}) 311 | result = http_request(baseurl + "plop/v1/signing/sth", params, key=key) 312 | parsed_result = json.loads(result) 313 | return base64.b64decode(parsed_result.get(u"result")) 314 | except urllib2.URLError, e: 315 | print >>sys.stderr, "ERROR: get_signature", e.reason 316 | sys.exit(1) 317 | except urllib2.HTTPError, e: 318 | print "ERROR: get_signature", e.read() 319 | raise e 320 | 321 | def create_signature(baseurl, data, key=None): 322 | unpacked_signature = get_signature(baseurl, data, key) 323 | return encode_signature(4, 3, unpacked_signature) 324 | 325 | def check_sth_signature(baseurl, sth, publickey=None): 326 | signature = base64.decodestring(sth["tree_head_signature"]) 327 | 328 | version = struct.pack(">b", 0) 329 | signature_type = struct.pack(">b", 1) 330 | timestamp = struct.pack(">Q", sth["timestamp"]) 331 | tree_size = struct.pack(">Q", sth["tree_size"]) 332 | hash = base64.decodestring(sth["sha256_root_hash"]) 333 | tree_head = version + signature_type + timestamp + tree_size + hash 334 | 335 | check_signature(baseurl, signature, tree_head, publickey=publickey) 336 | 337 | def create_sth_signature(tree_size, timestamp, root_hash, baseurl, key=None): 338 | version = struct.pack(">b", 0) 339 | signature_type = struct.pack(">b", 1) 340 | timestamp_packed = struct.pack(">Q", timestamp) 341 | tree_size_packed = struct.pack(">Q", tree_size) 342 | tree_head = version + signature_type + timestamp_packed + tree_size_packed + root_hash 343 | 344 | return create_signature(baseurl, tree_head, key=key) 345 | 346 | def check_sct_signature(baseurl, signed_entry, sct, precert=False, publickey=None): 347 | if publickey == None: 348 | publickey = base64.decodestring(publickeys[baseurl]) 349 | calculated_logid = hashlib.sha256(publickey).digest() 350 | received_logid = base64.decodestring(sct["id"]) 351 | assert calculated_logid == received_logid, \ 352 | "log id is incorrect:\n should be %s\n got %s" % \ 353 | (calculated_logid.encode("hex_codec"), 354 | received_logid.encode("hex_codec")) 355 | 356 | signature = base64.decodestring(sct["signature"]) 357 | 358 | version = struct.pack(">b", sct["sct_version"]) 359 | signature_type = struct.pack(">b", 0) 360 | timestamp = struct.pack(">Q", sct["timestamp"]) 361 | if precert: 362 | entry_type = struct.pack(">H", 1) 363 | else: 364 | entry_type = struct.pack(">H", 0) 365 | signed_struct = version + signature_type + timestamp + \ 366 | entry_type + signed_entry + \ 367 | tls_array(base64.decodestring(sct["extensions"]), 2) 368 | 369 | check_signature(baseurl, signature, signed_struct, publickey=publickey) 370 | 371 | def pack_mtl(timestamp, leafcert): 372 | entry_type = struct.pack(">H", 0) 373 | extensions = "" 374 | 375 | timestamped_entry = struct.pack(">Q", timestamp) + entry_type + \ 376 | tls_array(leafcert, 3) + tls_array(extensions, 2) 377 | version = struct.pack(">b", 0) 378 | leaf_type = struct.pack(">b", 0) 379 | merkle_tree_leaf = version + leaf_type + timestamped_entry 380 | return merkle_tree_leaf 381 | 382 | def pack_mtl_precert(timestamp, cleanedcert, issuer_key_hash): 383 | entry_type = struct.pack(">H", 1) 384 | extensions = "" 385 | 386 | timestamped_entry = struct.pack(">Q", timestamp) + entry_type + \ 387 | pack_precert(cleanedcert, issuer_key_hash) + tls_array(extensions, 2) 388 | version = struct.pack(">b", 0) 389 | leaf_type = struct.pack(">b", 0) 390 | merkle_tree_leaf = version + leaf_type + timestamped_entry 391 | return merkle_tree_leaf 392 | 393 | def pack_precert(cleanedcert, issuer_key_hash): 394 | assert len(issuer_key_hash) == 32 395 | 396 | return issuer_key_hash + tls_array(cleanedcert, 3) 397 | 398 | def pack_cert(cert): 399 | return tls_array(cert, 3) 400 | 401 | def unpack_mtl(merkle_tree_leaf): 402 | version = merkle_tree_leaf[0:1] 403 | leaf_type = merkle_tree_leaf[1:2] 404 | timestamped_entry = merkle_tree_leaf[2:] 405 | (timestamp, entry_type) = struct.unpack(">QH", timestamped_entry[0:10]) 406 | if entry_type == 0: 407 | issuer_key_hash = None 408 | (leafcert, rest_entry) = unpack_tls_array(timestamped_entry[10:], 3) 409 | elif entry_type == 1: 410 | issuer_key_hash = timestamped_entry[10:42] 411 | (leafcert, rest_entry) = unpack_tls_array(timestamped_entry[42:], 3) 412 | return (leafcert, timestamp, issuer_key_hash) 413 | 414 | def get_leaf_hash(merkle_tree_leaf): 415 | leaf_hash = hashlib.sha256() 416 | leaf_hash.update(struct.pack(">b", 0)) 417 | leaf_hash.update(merkle_tree_leaf) 418 | 419 | return leaf_hash.digest() 420 | 421 | def timing_point(timer_dict=None, name=None): 422 | t = datetime.datetime.now() 423 | if timer_dict: 424 | starttime = timer_dict["lasttime"] 425 | stoptime = t 426 | deltatime = stoptime - starttime 427 | timer_dict["deltatimes"].append((name, deltatime.seconds * 1000000 + deltatime.microseconds)) 428 | timer_dict["lasttime"] = t 429 | return None 430 | else: 431 | timer_dict = {"deltatimes":[], "lasttime":t} 432 | return timer_dict 433 | 434 | def internal_hash(pair): 435 | if len(pair) == 1: 436 | return pair[0] 437 | else: 438 | hash = hashlib.sha256() 439 | hash.update(struct.pack(">b", 1)) 440 | hash.update(pair[0]) 441 | hash.update(pair[1]) 442 | return hash.digest() 443 | 444 | def chunks(l, n): 445 | return [l[i:i+n] for i in range(0, len(l), n)] 446 | 447 | def next_merkle_layer(layer): 448 | return [internal_hash(pair) for pair in chunks(layer, 2)] 449 | 450 | def build_merkle_tree(layer0): 451 | if len(layer0) == 0: 452 | return [[hashlib.sha256().digest()]] 453 | layers = [] 454 | current_layer = layer0 455 | layers.append(current_layer) 456 | while len(current_layer) > 1: 457 | current_layer = next_merkle_layer(current_layer) 458 | layers.append(current_layer) 459 | return layers 460 | 461 | def print_inclusion_proof(proof): 462 | audit_path = proof[u'audit_path'] 463 | n = proof[u'leaf_index'] 464 | level = 0 465 | for s in audit_path: 466 | entry = base64.b16encode(base64.b64decode(s)) 467 | n ^= 1 468 | print level, n, entry 469 | n >>= 1 470 | level += 1 471 | 472 | def get_one_cert(store, i): 473 | filename = i / 10000 474 | zf = zipfile.ZipFile("%s/%04d.zip" % (store, i / 10000)) 475 | cert = zf.read("%08d" % i) 476 | zf.close() 477 | return cert 478 | 479 | def get_hash_from_certfile(cert): 480 | for line in cert.split("\n"): 481 | if line.startswith("-----"): 482 | return None 483 | if line.startswith("Leafhash: "): 484 | return base64.b16decode(line[len("Leafhash: "):]) 485 | return None 486 | 487 | def get_timestamp_from_certfile(cert): 488 | for line in cert.split("\n"): 489 | if line.startswith("-----"): 490 | return None 491 | if line.startswith("Timestamp: "): 492 | return int(line[len("Timestamp: "):]) 493 | return None 494 | 495 | def get_proof(store, tree_size, n): 496 | hash = get_hash_from_certfile(get_one_cert(store, n)) 497 | return get_proof_by_hash(args.baseurl, hash, tree_size) 498 | 499 | def get_certs_from_zipfiles(zipfiles, firstleaf, lastleaf): 500 | for i in range(firstleaf, lastleaf + 1): 501 | try: 502 | yield zipfiles[i / 10000].read("%08d" % i) 503 | except KeyError: 504 | return 505 | 506 | def get_merkle_hash_64k(store, blocknumber, write_to_cache=False, treesize=None): 507 | firstleaf = blocknumber * 65536 508 | lastleaf = firstleaf + 65535 509 | if treesize != None: 510 | assert firstleaf < treesize 511 | usecache = lastleaf < treesize 512 | lastleaf = min(lastleaf, treesize - 1) 513 | else: 514 | usecache = True 515 | 516 | hashfilename = "%s/%04x.64khash" % (store, blocknumber) 517 | if usecache: 518 | try: 519 | hash = base64.b16decode(open(hashfilename).read()) 520 | assert len(hash) == 32 521 | return ("hash", hash) 522 | except IOError: 523 | pass 524 | firstfile = firstleaf / 10000 525 | lastfile = lastleaf / 10000 526 | zipfiles = {} 527 | for i in range(firstfile, lastfile + 1): 528 | try: 529 | zipfiles[i] = zipfile.ZipFile("%s/%04d.zip" % (store, i)) 530 | except IOError: 531 | break 532 | certs = get_certs_from_zipfiles(zipfiles, firstleaf, lastleaf) 533 | layer0 = [get_hash_from_certfile(cert) for cert in certs] 534 | tree = build_merkle_tree(layer0) 535 | calculated_hash = tree[-1][0] 536 | for zf in zipfiles.values(): 537 | zf.close() 538 | if len(layer0) != lastleaf - firstleaf + 1: 539 | return ("incomplete", (len(layer0), calculated_hash)) 540 | if write_to_cache: 541 | f = open(hashfilename, "w") 542 | f.write(base64.b16encode(calculated_hash)) 543 | f.close() 544 | return ("hash", calculated_hash) 545 | 546 | def get_tree_head(store, treesize): 547 | merkle_64klayer = [] 548 | 549 | for blocknumber in range(0, (treesize / 65536) + 1): 550 | (resulttype, result) = get_merkle_hash_64k(store, blocknumber, treesize=treesize) 551 | if resulttype == "incomplete": 552 | print >>sys.stderr, "Couldn't read until tree size", treesize 553 | (incompletelength, hash) = result 554 | print >>sys.stderr, "Stopped at", blocknumber * 65536 + incompletelength 555 | sys.exit(1) 556 | assert resulttype == "hash" 557 | hash = result 558 | merkle_64klayer.append(hash) 559 | #print >>sys.stderr, print blocknumber * 65536, 560 | sys.stdout.flush() 561 | tree = build_merkle_tree(merkle_64klayer) 562 | calculated_root_hash = tree[-1][0] 563 | return calculated_root_hash 564 | 565 | def get_intermediate_hash(store, treesize, level, index): 566 | if level >= 16: 567 | merkle_64klayer = [] 568 | 569 | levelsize = (2**(level-16)) 570 | 571 | for blocknumber in range(index * levelsize, (index + 1) * levelsize): 572 | if blocknumber * (2 ** 16) >= treesize: 573 | break 574 | #print "looking at block", blocknumber 575 | (resulttype, result) = get_merkle_hash_64k(store, blocknumber, treesize=treesize) 576 | if resulttype == "incomplete": 577 | print >>sys.stderr, "Couldn't read until tree size", treesize 578 | (incompletelength, hash) = result 579 | print >>sys.stderr, "Stopped at", blocknumber * 65536 + incompletelength 580 | sys.exit(1) 581 | assert resulttype == "hash" 582 | hash = result 583 | #print "block hash", base64.b16encode(hash) 584 | merkle_64klayer.append(hash) 585 | #print >>sys.stderr, print blocknumber * 65536, 586 | sys.stdout.flush() 587 | tree = build_merkle_tree(merkle_64klayer) 588 | return tree[-1][0] 589 | else: 590 | levelsize = 2 ** level 591 | firstleaf = index * levelsize 592 | lastleaf = firstleaf + levelsize - 1 593 | #print "firstleaf", firstleaf 594 | #print "lastleaf", lastleaf 595 | assert firstleaf < treesize 596 | lastleaf = min(lastleaf, treesize - 1) 597 | #print "modified lastleaf", lastleaf 598 | firstfile = firstleaf / 10000 599 | lastfile = lastleaf / 10000 600 | #print "files", firstfile, lastfile 601 | zipfiles = {} 602 | for i in range(firstfile, lastfile + 1): 603 | try: 604 | zipfiles[i] = zipfile.ZipFile("%s/%04d.zip" % (store, i)) 605 | except IOError: 606 | break 607 | certs = get_certs_from_zipfiles(zipfiles, firstleaf, lastleaf) 608 | layer0 = [get_hash_from_certfile(cert) for cert in certs] 609 | #print "layer0", repr(layer0) 610 | tree = build_merkle_tree(layer0) 611 | calculated_hash = tree[-1][0] 612 | for zf in zipfiles.values(): 613 | zf.close() 614 | assert len(layer0) == lastleaf - firstleaf + 1 615 | return calculated_hash 616 | 617 | def bits(n): 618 | p = 0 619 | while n > 0: 620 | n >>= 1 621 | p += 1 622 | return p 623 | 624 | def merkle_height(n): 625 | if n == 0: 626 | return 1 627 | return bits(n - 1) 628 | 629 | def node_above((pathp, pathl), levels=1): 630 | return (pathp >> levels, pathl + levels) 631 | 632 | def node_even((pathp, pathl)): 633 | return pathp & 1 == 0 634 | 635 | def node_odd((pathp, pathl)): 636 | return pathp & 1 == 1 637 | 638 | def node_lower((path1p, path1l), (path2p, path2l)): 639 | return path1l < path2l 640 | 641 | def node_higher((path1p, path1l), (path2p, path2l)): 642 | return path1l > path2l 643 | 644 | def node_level((path1p, path1l)): 645 | return path1l 646 | 647 | def node_outside((path1p, path1l), (path2p, path2l)): 648 | assert path1l == path2l 649 | return path1p > path2p 650 | 651 | def combine_two_hashes((path1, hash1), (path2, hash2), treesize): 652 | assert not node_higher(path1, path2) 653 | edge_node = (treesize - 1, 0) 654 | 655 | if node_lower(path1, path2): 656 | assert path1 == node_above(edge_node, levels=node_level(path1)) 657 | while node_even(path1): 658 | path1 = node_above(path1) 659 | 660 | assert node_above(path1) == node_above(path2) 661 | assert (node_even(path1) and node_odd(path2)) or (node_odd(path1) and node_even(path2)) 662 | 663 | if node_outside(path2, node_above(edge_node, levels=node_level(path2))): 664 | return (node_above(path1), hash1) 665 | 666 | if node_even(path1): 667 | newhash = internal_hash((hash1, hash2)) 668 | else: 669 | newhash = internal_hash((hash2, hash1)) 670 | 671 | return (node_above(path1), newhash) 672 | 673 | def path_as_string(pos, level, treesize): 674 | height = merkle_height(treesize) 675 | path = "{0:0{width}b}".format(pos, width=height - level) 676 | if height == level: 677 | return "" 678 | return path 679 | 680 | def nodes_for_subtree(subtreesize, treesize): 681 | height = merkle_height(treesize) 682 | nodes = [] 683 | level = 0 684 | pos = subtreesize 685 | while pos > 0 and pos & 1 == 0: 686 | pos >>= 1 687 | level += 1 688 | if pos & 1: 689 | nodes.append((pos ^ 1, level)) 690 | #print pos, level 691 | while level < height: 692 | pos_level0 = pos * (2 ** level) 693 | #print pos, level 694 | if pos_level0 < treesize: 695 | nodes.append((pos, level)) 696 | pos >>= 1 697 | pos ^= 1 698 | level += 1 699 | return nodes 700 | 701 | def nodes_for_index(pos, treesize): 702 | height = merkle_height(treesize) 703 | nodes = [] 704 | level = 0 705 | pos ^= 1 706 | #print pos, level 707 | while level < height: 708 | pos_level0 = pos * (2 ** level) 709 | #print pos, level 710 | if pos_level0 < treesize: 711 | nodes.append((pos, level)) 712 | pos >>= 1 713 | pos ^= 1 714 | level += 1 715 | return nodes 716 | 717 | def verify_consistency_proof(consistency_proof, first, second, oldhash_input): 718 | if 2 ** bits(first - 1) == first: 719 | consistency_proof = [oldhash_input] + consistency_proof 720 | chain = zip(nodes_for_subtree(first, second), consistency_proof) 721 | assert len(nodes_for_subtree(first, second)) == len(consistency_proof) 722 | (_, hash) = reduce(lambda e1, e2: combine_two_hashes(e1, e2, second), chain) 723 | (_, oldhash) = reduce(lambda e1, e2: combine_two_hashes(e1, e2, first), chain) 724 | return (oldhash, hash) 725 | 726 | def verify_inclusion_proof(inclusion_proof, index, treesize, leafhash): 727 | chain = zip([(index, 0)] + nodes_for_index(index, treesize), [leafhash] + inclusion_proof) 728 | assert len(nodes_for_index(index, treesize)) == len(inclusion_proof) 729 | (_, hash) = reduce(lambda e1, e2: combine_two_hashes(e1, e2, treesize), chain) 730 | return hash 731 | 732 | def extract_original_entry(entry): 733 | leaf_input = base64.decodestring(entry["leaf_input"]) 734 | (leaf_cert, timestamp, issuer_key_hash) = unpack_mtl(leaf_input) 735 | extra_data = base64.decodestring(entry["extra_data"]) 736 | if issuer_key_hash != None: 737 | (precert, extra_data) = extract_precertificate(extra_data) 738 | leaf_cert = precert 739 | certchain = decode_certificate_chain(extra_data) 740 | return ([leaf_cert] + certchain, timestamp, issuer_key_hash) 741 | 742 | def mv_file(fromfn, tofn): 743 | shutil.move(fromfn, tofn) 744 | 745 | def write_file(fn, jsondata): 746 | tempname = fn + ".new" 747 | open(tempname, 'w').write(json.dumps(jsondata)) 748 | mv_file(tempname, fn) 749 | -------------------------------------------------------------------------------- /chromium-0001-Support-requireCT-in-HSTS.patch: -------------------------------------------------------------------------------- 1 | From 41daba989e5d349be7bf7244d639bf053387f1d3 Mon Sep 17 00:00:00 2001 2 | From: Tom Ritter 3 | Date: Tue, 10 Feb 2015 08:11:51 +0000 4 | Subject: [PATCH] Support "requireCT" in HSTS 5 | 6 | Patch to Chromium 42.0.2292.0 to support a "requireCT" directive for HSTS that requires at least one SCT to the present for a certificate presented for a domain. 7 | This patch is hardly complete - it doesn't add unit tests, it hasn't been tested on Android, it doesn't do anything to the preload list, it has a few TODOs marked around for things that are probably relevant. 8 | But it serves as an adequate POC. 9 | --- 10 | chrome/app/generated_resources.grd | 10 ++ 11 | .../captive_portal/captive_portal_browsertest.cc | 3 +- 12 | chrome/browser/net/predictor_unittest.cc | 4 +- 13 | .../browser/resources/net_internals/hsts_view.js | 4 +- 14 | chrome/browser/ssl/ssl_error_info.cc | 8 ++ 15 | chrome/browser/ssl/ssl_error_info.h | 1 + 16 | .../ui/webui/net_internals/net_internals_ui.cc | 8 +- 17 | chrome/common/localized_error.cc | 7 ++ 18 | chrome/renderer/security_filter_peer.cc | 1 + 19 | content/browser/ssl/ssl_policy.cc | 3 + 20 | net/base/net_error_list.h | 6 +- 21 | net/cert/cert_policy_enforcer.cc | 50 ++++++++- 22 | net/cert/cert_policy_enforcer.h | 10 ++ 23 | net/cert/cert_status_flags.cc | 6 ++ 24 | net/cert/cert_status_flags_list.h | 1 + 25 | net/http/http_security_headers.cc | 18 +++- 26 | net/http/http_security_headers.h | 5 +- 27 | net/http/http_security_headers_unittest.cc | 120 +++++++++++---------- 28 | net/http/transport_security_persister.cc | 11 ++ 29 | net/http/transport_security_state.cc | 34 +++++- 30 | net/http/transport_security_state.h | 14 ++- 31 | net/socket/ssl_client_socket_nss.cc | 26 ++++- 32 | net/socket/ssl_client_socket_nss.h | 2 +- 33 | net/socket/ssl_client_socket_openssl.cc | 28 ++++- 34 | net/socket/ssl_client_socket_openssl.h | 2 +- 35 | net/spdy/spdy_session.cc | 2 + 36 | net/url_request/url_request_http_job.cc | 14 ++- 37 | net/url_request/url_request_unittest.cc | 6 +- 38 | 28 files changed, 307 insertions(+), 97 deletions(-) 39 | 40 | diff --git a/chrome/app/generated_resources.grd b/chrome/app/generated_resources.grd 41 | index 8c42545..ded1099 100644 42 | --- a/chrome/app/generated_resources.grd 43 | +++ b/chrome/app/generated_resources.grd 44 | @@ -9462,6 +9462,16 @@ I don't think this site should be blocked! 45 | The server's certificate appears to be a forgery. 46 | 47 | 48 | + 49 | + Missing Public Audit Records 50 | + 51 | + 52 | + The server presented a certificate that doesn't contain public audit records. These records are required for certain, high-security websites in order to protect you. 53 | + 54 | + 55 | + The server's certificate is unauditable. 56 | + 57 | + 58 | 59 | The SSL renegotiation extension was missing from the secure handshake. For some sites, which are known to support the renegotiation extension, Chrome requires a more secure handshake to prevent a class of known attacks. The omission of this extension suggests that your connection was intercepted and manipulated in transit. 60 | 61 | diff --git a/chrome/browser/captive_portal/captive_portal_browsertest.cc b/chrome/browser/captive_portal/captive_portal_browsertest.cc 62 | index 7fd8389..e970767 100644 63 | --- a/chrome/browser/captive_portal/captive_portal_browsertest.cc 64 | +++ b/chrome/browser/captive_portal/captive_portal_browsertest.cc 65 | @@ -868,7 +868,8 @@ void AddHstsHost(net::URLRequestContextGetter* context_getter, 66 | 67 | base::Time expiry = base::Time::Now() + base::TimeDelta::FromDays(1000); 68 | bool include_subdomains = false; 69 | - transport_security_state->AddHSTS(host, expiry, include_subdomains); 70 | + bool require_ct = false; 71 | + transport_security_state->AddHSTS(host, expiry, include_subdomains, require_ct); 72 | } 73 | 74 | } // namespace 75 | diff --git a/chrome/browser/net/predictor_unittest.cc b/chrome/browser/net/predictor_unittest.cc 76 | index 4fa7dd9..1f8ea7d 100644 77 | --- a/chrome/browser/net/predictor_unittest.cc 78 | +++ b/chrome/browser/net/predictor_unittest.cc 79 | @@ -722,7 +722,7 @@ TEST_F(PredictorTest, HSTSRedirect) { 80 | const base::Time expiry = 81 | base::Time::Now() + base::TimeDelta::FromSeconds(1000); 82 | net::TransportSecurityState state; 83 | - state.AddHSTS(kHttpUrl.host(), expiry, false); 84 | + state.AddHSTS(kHttpUrl.host(), expiry, false, false); 85 | 86 | Predictor predictor(true, true); 87 | TestPredictorObserver observer; 88 | @@ -747,7 +747,7 @@ TEST_F(PredictorTest, HSTSRedirectSubresources) { 89 | const base::Time expiry = 90 | base::Time::Now() + base::TimeDelta::FromSeconds(1000); 91 | net::TransportSecurityState state; 92 | - state.AddHSTS(kHttpUrl.host(), expiry, false); 93 | + state.AddHSTS(kHttpUrl.host(), expiry, false, false); 94 | 95 | SimplePredictor predictor(true, true); 96 | TestPredictorObserver observer; 97 | diff --git a/chrome/browser/resources/net_internals/hsts_view.js b/chrome/browser/resources/net_internals/hsts_view.js 98 | index 2310b9f..44a844c 100644 99 | --- a/chrome/browser/resources/net_internals/hsts_view.js 100 | +++ b/chrome/browser/resources/net_internals/hsts_view.js 101 | @@ -120,10 +120,10 @@ var HSTSView = (function() { 102 | 103 | var keys = [ 104 | 'static_sts_domain', 'static_upgrade_mode', 105 | - 'static_sts_include_subdomains', 'static_sts_observed', 106 | + 'static_sts_include_subdomains', 'static_sts_require_ct', 'static_sts_observed', 107 | 'static_pkp_domain', 'static_pkp_include_subdomains', 108 | 'static_pkp_observed', 'static_spki_hashes', 'dynamic_sts_domain', 109 | - 'dynamic_upgrade_mode', 'dynamic_sts_include_subdomains', 110 | + 'dynamic_upgrade_mode', 'dynamic_sts_include_subdomains', 'dynamic_sts_require_ct', 111 | 'dynamic_sts_observed', 'dynamic_pkp_domain', 112 | 'dynamic_pkp_include_subdomains', 'dynamic_pkp_observed', 113 | 'dynamic_spki_hashes', 114 | diff --git a/chrome/browser/ssl/ssl_error_info.cc b/chrome/browser/ssl/ssl_error_info.cc 115 | index 80aaf9b..bf1e994 100644 116 | --- a/chrome/browser/ssl/ssl_error_info.cc 117 | +++ b/chrome/browser/ssl/ssl_error_info.cc 118 | @@ -158,6 +158,12 @@ SSLErrorInfo SSLErrorInfo::CreateError(ErrorType error_type, 119 | short_description = l10n_util::GetStringUTF16( 120 | IDS_ERRORPAGES_DETAILS_PINNING_FAILURE); 121 | break; 122 | + case CERT_MISSING_CT_RECORDS: 123 | + details = l10n_util::GetStringUTF16( 124 | + IDS_ERRORPAGES_SUMMARY_CTREQUIRED_FAILURE); 125 | + short_description = l10n_util::GetStringUTF16( 126 | + IDS_ERRORPAGES_DETAILS_CTREQUIRED_FAILURE); 127 | + break; 128 | case UNKNOWN: 129 | details = l10n_util::GetStringUTF16(IDS_CERT_ERROR_UNKNOWN_ERROR_DETAILS); 130 | short_description = 131 | @@ -204,6 +210,8 @@ SSLErrorInfo::ErrorType SSLErrorInfo::NetErrorToErrorType(int net_error) { 132 | return CERT_WEAK_KEY_DH; 133 | case net::ERR_SSL_PINNED_KEY_NOT_IN_CERT_CHAIN: 134 | return CERT_PINNED_KEY_MISSING; 135 | + case net::ERR_SSL_CERTIFICATE_NOT_VALIDATED_THROUGH_CT: 136 | + return CERT_MISSING_CT_RECORDS; 137 | default: 138 | NOTREACHED(); 139 | return UNKNOWN; 140 | diff --git a/chrome/browser/ssl/ssl_error_info.h b/chrome/browser/ssl/ssl_error_info.h 141 | index 2713374..aa7f6bd 100644 142 | --- a/chrome/browser/ssl/ssl_error_info.h 143 | +++ b/chrome/browser/ssl/ssl_error_info.h 144 | @@ -37,6 +37,7 @@ class SSLErrorInfo { 145 | CERT_WEAK_KEY_DH, 146 | CERT_PINNED_KEY_MISSING, 147 | CERT_VALIDITY_TOO_LONG, 148 | + CERT_MISSING_CT_RECORDS, 149 | END_OF_ENUM 150 | }; 151 | 152 | diff --git a/chrome/browser/ui/webui/net_internals/net_internals_ui.cc b/chrome/browser/ui/webui/net_internals/net_internals_ui.cc 153 | index 93022ab..5bb53a2 100644 154 | --- a/chrome/browser/ui/webui/net_internals/net_internals_ui.cc 155 | +++ b/chrome/browser/ui/webui/net_internals/net_internals_ui.cc 156 | @@ -804,6 +804,8 @@ void NetInternalsMessageHandler::IOThreadImpl::OnHSTSQuery( 157 | static_cast(static_state.sts.upgrade_mode)); 158 | result->SetBoolean("static_sts_include_subdomains", 159 | static_state.sts.include_subdomains); 160 | + result->SetBoolean("static_sts_require_ct", 161 | + static_state.sts.require_ct); 162 | result->SetDouble("static_sts_observed", 163 | static_state.sts.last_observed.ToDoubleT()); 164 | result->SetDouble("static_sts_expiry", 165 | @@ -831,6 +833,8 @@ void NetInternalsMessageHandler::IOThreadImpl::OnHSTSQuery( 166 | static_cast(dynamic_state.sts.upgrade_mode)); 167 | result->SetBoolean("dynamic_sts_include_subdomains", 168 | dynamic_state.sts.include_subdomains); 169 | + result->SetBoolean("dynamic_sts_require_ct", 170 | + dynamic_state.sts.require_ct); 171 | result->SetBoolean("dynamic_pkp_include_subdomains", 172 | dynamic_state.pkp.include_subdomains); 173 | result->SetDouble("dynamic_sts_observed", 174 | @@ -867,6 +871,8 @@ void NetInternalsMessageHandler::IOThreadImpl::OnHSTSAdd( 175 | } 176 | bool sts_include_subdomains; 177 | CHECK(list->GetBoolean(1, &sts_include_subdomains)); 178 | + bool sts_require_ct = false; 179 | + //TODO: Track down this list thing and make it support requireCT 180 | bool pkp_include_subdomains; 181 | CHECK(list->GetBoolean(2, &pkp_include_subdomains)); 182 | std::string hashes_str; 183 | @@ -884,7 +890,7 @@ void NetInternalsMessageHandler::IOThreadImpl::OnHSTSAdd( 184 | return; 185 | } 186 | 187 | - transport_security_state->AddHSTS(domain, expiry, sts_include_subdomains); 188 | + transport_security_state->AddHSTS(domain, expiry, sts_include_subdomains, sts_require_ct); 189 | transport_security_state->AddHPKP(domain, expiry, pkp_include_subdomains, 190 | hashes); 191 | } 192 | diff --git a/chrome/common/localized_error.cc b/chrome/common/localized_error.cc 193 | index 5472b77..b82a8ba 100644 194 | --- a/chrome/common/localized_error.cc 195 | +++ b/chrome/common/localized_error.cc 196 | @@ -263,6 +263,13 @@ const LocalizedErrorMap net_error_options[] = { 197 | IDS_ERRORPAGES_DETAILS_SSL_PROTOCOL_ERROR, 198 | SUGGEST_LEARNMORE, 199 | }, 200 | + {net::ERR_SSL_CERTIFICATE_NOT_VALIDATED_THROUGH_CT, 201 | + IDS_ERRORPAGES_TITLE_LOAD_FAILED, 202 | + IDS_ERRORPAGES_HEADING_CTREQUIRED_FAILURE, 203 | + IDS_ERRORPAGES_SUMMARY_CTREQUIRED_FAILURE, 204 | + IDS_ERRORPAGES_DETAILS_CTREQUIRED_FAILURE, 205 | + SUGGEST_NONE, 206 | + }, 207 | {net::ERR_SSL_PINNED_KEY_NOT_IN_CERT_CHAIN, 208 | IDS_ERRORPAGES_TITLE_LOAD_FAILED, 209 | IDS_ERRORPAGES_HEADING_PINNING_FAILURE, 210 | diff --git a/chrome/renderer/security_filter_peer.cc b/chrome/renderer/security_filter_peer.cc 211 | index e43e7ec..218151c 100644 212 | --- a/chrome/renderer/security_filter_peer.cc 213 | +++ b/chrome/renderer/security_filter_peer.cc 214 | @@ -40,6 +40,7 @@ SecurityFilterPeer::CreateSecurityFilterPeerForDeniedRequest( 215 | case net::ERR_CERT_NAME_CONSTRAINT_VIOLATION: 216 | case net::ERR_INSECURE_RESPONSE: 217 | case net::ERR_SSL_PINNED_KEY_NOT_IN_CERT_CHAIN: 218 | + case net::ERR_SSL_CERTIFICATE_NOT_VALIDATED_THROUGH_CT: 219 | if (content::IsResourceTypeFrame(resource_type)) 220 | return CreateSecurityFilterPeerForFrame(peer, os_error); 221 | // Any other content is entirely filtered-out. 222 | diff --git a/content/browser/ssl/ssl_policy.cc b/content/browser/ssl/ssl_policy.cc 223 | index 610f741..f5d86e5 100644 224 | --- a/content/browser/ssl/ssl_policy.cc 225 | +++ b/content/browser/ssl/ssl_policy.cc 226 | @@ -79,6 +79,7 @@ void SSLPolicy::OnCertError(SSLCertErrorHandler* handler) { 227 | case net::ERR_CERT_INVALID: 228 | case net::ERR_SSL_WEAK_SERVER_EPHEMERAL_DH_KEY: 229 | case net::ERR_SSL_PINNED_KEY_NOT_IN_CERT_CHAIN: 230 | + case net::ERR_SSL_CERTIFICATE_NOT_VALIDATED_THROUGH_CT: 231 | if (handler->fatal()) 232 | options_mask |= STRICT_ENFORCEMENT; 233 | if (expired_previous_decision) 234 | @@ -110,6 +111,7 @@ void SSLPolicy::OnRequestStarted(SSLRequestInfo* info) { 235 | // this information back through WebKit and out some FrameLoaderClient 236 | // methods. 237 | 238 | + //TODO Make need to tie in CT here 239 | if (net::IsCertStatusError(info->ssl_cert_status())) 240 | backend_->HostRanInsecureContent(info->url().host(), info->child_id()); 241 | } 242 | @@ -137,6 +139,7 @@ void SSLPolicy::UpdateEntry(NavigationEntryImpl* entry, 243 | if (web_contents->DisplayedInsecureContent()) 244 | entry->GetSSL().content_status |= SSLStatus::DISPLAYED_INSECURE_CONTENT; 245 | 246 | + //TODO May need to add CT here 247 | if (net::IsCertStatusError(entry->GetSSL().cert_status)) { 248 | // Minor errors don't lower the security style to 249 | // SECURITY_STYLE_AUTHENTICATION_BROKEN. 250 | diff --git a/net/base/net_error_list.h b/net/base/net_error_list.h 251 | index e1d65e1..328568b 100644 252 | --- a/net/base/net_error_list.h 253 | +++ b/net/base/net_error_list.h 254 | @@ -443,13 +443,17 @@ NET_ERROR(CERT_NAME_CONSTRAINT_VIOLATION, -212) 255 | // The certificate's validity period is too long. 256 | NET_ERROR(CERT_VALIDITY_TOO_LONG, -213) 257 | 258 | +// The SSL server requires falling back to a version older than the configured 259 | +// minimum fallback version, and thus fallback failed. 260 | +NET_ERROR(SSL_CERTIFICATE_NOT_VALIDATED_THROUGH_CT, -214) 261 | + 262 | // Add new certificate error codes here. 263 | // 264 | // Update the value of CERT_END whenever you add a new certificate error 265 | // code. 266 | 267 | // The value immediately past the last certificate error code. 268 | -NET_ERROR(CERT_END, -214) 269 | +NET_ERROR(CERT_END, -215) 270 | 271 | // The URL is invalid. 272 | NET_ERROR(INVALID_URL, -300) 273 | diff --git a/net/cert/cert_policy_enforcer.cc b/net/cert/cert_policy_enforcer.cc 274 | index 25e9325..9762013 100644 275 | --- a/net/cert/cert_policy_enforcer.cc 276 | +++ b/net/cert/cert_policy_enforcer.cc 277 | @@ -59,7 +59,13 @@ uint32_t ApproximateMonthDifference(const base::Time& start, 278 | return month_diff; 279 | } 280 | 281 | -bool HasRequiredNumberOfSCTs(const X509Certificate& cert, 282 | +bool HasRequiredNumberOfSCTsForCTRequired(const X509Certificate& cert, 283 | + const ct::CTVerifyResult& ct_result) { 284 | + // For now, let's not set up a big complicated policy for this. 285 | + return ct_result.verified_scts.size() >= 1; 286 | +} 287 | + 288 | +bool HasRequiredNumberOfSCTsForEVPolicy(const X509Certificate& cert, 289 | const ct::CTVerifyResult& ct_result) { 290 | // TODO(eranm): Count the number of *independent* SCTs once the information 291 | // about log operators is available, crbug.com/425174 292 | @@ -204,7 +210,24 @@ void CheckCTEVPolicyCompliance(X509Certificate* cert, 293 | return; 294 | } 295 | 296 | - if (HasRequiredNumberOfSCTs(*cert, ct_result)) { 297 | + if (HasRequiredNumberOfSCTsForEVPolicy(*cert, ct_result)) { 298 | + result->status = CT_ENOUGH_SCTS; 299 | + return; 300 | + } 301 | + 302 | + result->status = CT_NOT_COMPLIANT; 303 | +} 304 | + 305 | +void CheckCTRequiredPolicyCompliance(X509Certificate* cert, 306 | + const ct::CTVerifyResult& ct_result, 307 | + ComplianceDetails* result) { 308 | + result->ct_presence_required = true; 309 | + 310 | + if (!IsBuildTimely()) 311 | + return; 312 | + result->build_timely = true; 313 | + 314 | + if (HasRequiredNumberOfSCTsForCTRequired(*cert, ct_result)) { 315 | result->status = CT_ENOUGH_SCTS; 316 | return; 317 | } 318 | @@ -252,4 +275,27 @@ bool CertPolicyEnforcer::DoesConformToCTEVPolicy( 319 | return false; 320 | } 321 | 322 | +bool CertPolicyEnforcer::DoesConformToCTRequiredPolicy( 323 | + X509Certificate* cert, 324 | + const ct::CTVerifyResult& ct_result, 325 | + const BoundNetLog& net_log) { 326 | + ComplianceDetails details; 327 | + CheckCTRequiredPolicyCompliance(cert, ct_result, &details); 328 | + 329 | + if (!details.ct_presence_required) 330 | + return true; 331 | + 332 | + // Returning false here would be bad. We'd be failing _closed_ for 333 | + // these sites, rather than just stripping the EV indicator. 334 | + // So instead fail open. (And hope that Chrome does something about 335 | + // not being able to update for 10 weeks...) 336 | + if (!details.build_timely) 337 | + return true; 338 | + 339 | + if (details.status == CT_ENOUGH_SCTS) 340 | + return true; 341 | + 342 | + return false; 343 | +} 344 | + 345 | } // namespace net 346 | diff --git a/net/cert/cert_policy_enforcer.h b/net/cert/cert_policy_enforcer.h 347 | index 5d6b64b..181482e 100644 348 | --- a/net/cert/cert_policy_enforcer.h 349 | +++ b/net/cert/cert_policy_enforcer.h 350 | @@ -41,6 +41,16 @@ class NET_EXPORT CertPolicyEnforcer { 351 | const ct::CTVerifyResult& ct_result, 352 | const BoundNetLog& net_log); 353 | 354 | + // Returns true if the collection of SCTs for the given certificate 355 | + // conforms with the CT Policy for domains that have opted into 'CT Required' 356 | + // processing. 357 | + // |cert| is the certificate for which the SCTs apply. 358 | + // |ct_result| must contain the result of verifying any SCTs associated with 359 | + // |cert| prior to invoking this method. 360 | + bool DoesConformToCTRequiredPolicy( 361 | + X509Certificate* cert, 362 | + const ct::CTVerifyResult& ct_result, 363 | + const BoundNetLog& net_log); 364 | private: 365 | bool require_ct_for_ev_; 366 | }; 367 | diff --git a/net/cert/cert_status_flags.cc b/net/cert/cert_status_flags.cc 368 | index e8d9aab..ef271a1 100644 369 | --- a/net/cert/cert_status_flags.cc 370 | +++ b/net/cert/cert_status_flags.cc 371 | @@ -47,6 +47,8 @@ CertStatus MapNetErrorToCertStatus(int error) { 372 | return CERT_STATUS_WEAK_KEY; 373 | case ERR_SSL_PINNED_KEY_NOT_IN_CERT_CHAIN: 374 | return CERT_STATUS_PINNED_KEY_MISSING; 375 | + case ERR_SSL_CERTIFICATE_NOT_VALIDATED_THROUGH_CT: 376 | + return CERT_STATUS_CT_RECORDS_UNAVAILABLE; 377 | case ERR_CERT_NAME_CONSTRAINT_VIOLATION: 378 | return CERT_STATUS_NAME_CONSTRAINT_VIOLATION; 379 | case ERR_CERT_VALIDITY_TOO_LONG: 380 | @@ -68,6 +70,10 @@ int MapCertStatusToNetError(CertStatus cert_status) { 381 | if (cert_status & CERT_STATUS_PINNED_KEY_MISSING) 382 | return ERR_SSL_PINNED_KEY_NOT_IN_CERT_CHAIN; 383 | 384 | + // May or may not be enforced 385 | + if (cert_status & CERT_STATUS_CT_RECORDS_UNAVAILABLE) 386 | + return ERR_SSL_CERTIFICATE_NOT_VALIDATED_THROUGH_CT; 387 | + 388 | // Recoverable errors 389 | if (cert_status & CERT_STATUS_AUTHORITY_INVALID) 390 | return ERR_CERT_AUTHORITY_INVALID; 391 | diff --git a/net/cert/cert_status_flags_list.h b/net/cert/cert_status_flags_list.h 392 | index 932e938..27ffa58 100644 393 | --- a/net/cert/cert_status_flags_list.h 394 | +++ b/net/cert/cert_status_flags_list.h 395 | @@ -31,3 +31,4 @@ CERT_STATUS_FLAG(IS_EV, 1 << 16) 396 | CERT_STATUS_FLAG(REV_CHECKING_ENABLED, 1 << 17) 397 | // Bit 18 was CERT_STATUS_IS_DNSSEC 398 | CERT_STATUS_FLAG(SHA1_SIGNATURE_PRESENT, 1 << 19) 399 | +CERT_STATUS_FLAG(CT_RECORDS_UNAVAILABLE, 1 << 20) 400 | diff --git a/net/http/http_security_headers.cc b/net/http/http_security_headers.cc 401 | index aff4a30..8256f0a 402 | --- a/net/http/http_security_headers.cc 403 | +++ b/net/http/http_security_headers.cc 404 | @@ -169,14 +169,18 @@ bool ParseAndAppendPin(const std::string& value, 405 | // through 4), the UA MUST process the recognized directives. 406 | bool ParseHSTSHeader(const std::string& value, 407 | base::TimeDelta* max_age, 408 | - bool* include_subdomains) { 409 | + bool* include_subdomains, 410 | + bool* require_ct) { 411 | uint32 max_age_candidate = 0; 412 | bool include_subdomains_candidate = false; 413 | + bool require_ct_candidate = false; 414 | 415 | // We must see max-age exactly once. 416 | int max_age_observed = 0; 417 | // We must see includeSubdomains exactly 0 or 1 times. 418 | int include_subdomains_observed = 0; 419 | + // We must see requireCT exactly 0 or 1 times. 420 | + int require_ct_observed = 0; 421 | 422 | enum ParserState { 423 | START, 424 | @@ -184,6 +188,7 @@ bool ParseHSTSHeader(const std::string& value, 425 | AFTER_MAX_AGE_EQUALS, 426 | AFTER_MAX_AGE, 427 | AFTER_INCLUDE_SUBDOMAINS, 428 | + AFTER_REQUIRE_CT, 429 | AFTER_UNKNOWN_LABEL, 430 | DIRECTIVE_END 431 | } state = START; 432 | @@ -207,6 +212,11 @@ bool ParseHSTSHeader(const std::string& value, 433 | state = AFTER_INCLUDE_SUBDOMAINS; 434 | include_subdomains_observed++; 435 | include_subdomains_candidate = true; 436 | + } else if (LowerCaseEqualsASCII(tokenizer.token(), 437 | + "requirect")) { 438 | + state = AFTER_REQUIRE_CT; 439 | + require_ct_observed++; 440 | + require_ct_candidate = true; 441 | } else { 442 | state = AFTER_UNKNOWN_LABEL; 443 | } 444 | @@ -232,6 +242,7 @@ bool ParseHSTSHeader(const std::string& value, 445 | 446 | case AFTER_MAX_AGE: 447 | case AFTER_INCLUDE_SUBDOMAINS: 448 | + case AFTER_REQUIRE_CT: 449 | if (IsAsciiWhitespace(*tokenizer.token_begin())) 450 | continue; 451 | else if (*tokenizer.token_begin() == ';') 452 | @@ -251,7 +262,8 @@ bool ParseHSTSHeader(const std::string& value, 453 | 454 | // We've consumed all the input. Let's see what state we ended up in. 455 | if (max_age_observed != 1 || 456 | - (include_subdomains_observed != 0 && include_subdomains_observed != 1)) { 457 | + (include_subdomains_observed != 0 && include_subdomains_observed != 1) || 458 | + (require_ct_observed != 0 && require_ct_observed != 1)) { 459 | return false; 460 | } 461 | 462 | @@ -259,9 +271,11 @@ bool ParseHSTSHeader(const std::string& value, 463 | case DIRECTIVE_END: 464 | case AFTER_MAX_AGE: 465 | case AFTER_INCLUDE_SUBDOMAINS: 466 | + case AFTER_REQUIRE_CT: 467 | case AFTER_UNKNOWN_LABEL: 468 | *max_age = base::TimeDelta::FromSeconds(max_age_candidate); 469 | *include_subdomains = include_subdomains_candidate; 470 | + *require_ct = require_ct_candidate; 471 | return true; 472 | case START: 473 | case AFTER_MAX_AGE_LABEL: 474 | diff --git a/net/http/http_security_headers.h b/net/http/http_security_headers.h 475 | index 12e6be9..2ede208 476 | --- a/net/http/http_security_headers.h 477 | +++ b/net/http/http_security_headers.h 478 | @@ -19,7 +19,7 @@ namespace net { 479 | const int64 kMaxHSTSAgeSecs = 86400 * 365; // 1 year 480 | 481 | // Parses |value| as a Strict-Transport-Security header value. If successful, 482 | -// returns true and sets |*max_age| and |*include_subdomains|. 483 | +// returns true and sets |*max_age|, |*include_subdomains|, and |*require_ct|. 484 | // Otherwise returns false and leaves the output parameters unchanged. 485 | // 486 | // value is the right-hand side of: 487 | @@ -28,7 +28,8 @@ const int64 kMaxHSTSAgeSecs = 86400 * 365; // 1 year 488 | // [ directive ] *( ";" [ directive ] ) 489 | bool NET_EXPORT_PRIVATE ParseHSTSHeader(const std::string& value, 490 | base::TimeDelta* max_age, 491 | - bool* include_subdomains); 492 | + bool* include_subdomains, 493 | + bool* require_ct); 494 | 495 | // Parses |value| as a Public-Key-Pins header value. If successful, returns 496 | // true and populates the |*max_age|, |*include_subdomains|, and |*hashes| 497 | diff --git a/net/http/http_security_headers_unittest.cc b/net/http/http_security_headers_unittest.cc 498 | index 234c5f0..d97cb78 499 | --- a/net/http/http_security_headers_unittest.cc 500 | +++ b/net/http/http_security_headers_unittest.cc 501 | @@ -70,75 +70,76 @@ class HttpSecurityHeadersTest : public testing::Test { 502 | TEST_F(HttpSecurityHeadersTest, BogusHeaders) { 503 | base::TimeDelta max_age; 504 | bool include_subdomains = false; 505 | + bool require_ct = false; 506 | 507 | EXPECT_FALSE( 508 | - ParseHSTSHeader(std::string(), &max_age, &include_subdomains)); 509 | - EXPECT_FALSE(ParseHSTSHeader(" ", &max_age, &include_subdomains)); 510 | - EXPECT_FALSE(ParseHSTSHeader("abc", &max_age, &include_subdomains)); 511 | - EXPECT_FALSE(ParseHSTSHeader(" abc", &max_age, &include_subdomains)); 512 | - EXPECT_FALSE(ParseHSTSHeader(" abc ", &max_age, &include_subdomains)); 513 | - EXPECT_FALSE(ParseHSTSHeader("max-age", &max_age, &include_subdomains)); 514 | + ParseHSTSHeader(std::string(), &max_age, &include_subdomains, &require_ct)); 515 | + EXPECT_FALSE(ParseHSTSHeader(" ", &max_age, &include_subdomains, &require_ct)); 516 | + EXPECT_FALSE(ParseHSTSHeader("abc", &max_age, &include_subdomains, &require_ct)); 517 | + EXPECT_FALSE(ParseHSTSHeader(" abc", &max_age, &include_subdomains, &require_ct)); 518 | + EXPECT_FALSE(ParseHSTSHeader(" abc ", &max_age, &include_subdomains, &require_ct)); 519 | + EXPECT_FALSE(ParseHSTSHeader("max-age", &max_age, &include_subdomains, &require_ct)); 520 | EXPECT_FALSE(ParseHSTSHeader(" max-age", &max_age, 521 | - &include_subdomains)); 522 | + &include_subdomains, &require_ct)); 523 | EXPECT_FALSE(ParseHSTSHeader(" max-age ", &max_age, 524 | - &include_subdomains)); 525 | - EXPECT_FALSE(ParseHSTSHeader("max-age=", &max_age, &include_subdomains)); 526 | + &include_subdomains, &require_ct)); 527 | + EXPECT_FALSE(ParseHSTSHeader("max-age=", &max_age, &include_subdomains, &require_ct)); 528 | EXPECT_FALSE(ParseHSTSHeader(" max-age=", &max_age, 529 | - &include_subdomains)); 530 | + &include_subdomains, &require_ct)); 531 | EXPECT_FALSE(ParseHSTSHeader(" max-age =", &max_age, 532 | - &include_subdomains)); 533 | + &include_subdomains, &require_ct)); 534 | EXPECT_FALSE(ParseHSTSHeader(" max-age= ", &max_age, 535 | - &include_subdomains)); 536 | + &include_subdomains, &require_ct)); 537 | EXPECT_FALSE(ParseHSTSHeader(" max-age = ", &max_age, 538 | - &include_subdomains)); 539 | + &include_subdomains, &require_ct)); 540 | EXPECT_FALSE(ParseHSTSHeader(" max-age = xy", &max_age, 541 | - &include_subdomains)); 542 | + &include_subdomains, &require_ct)); 543 | EXPECT_FALSE(ParseHSTSHeader(" max-age = 3488a923", &max_age, 544 | - &include_subdomains)); 545 | + &include_subdomains, &require_ct)); 546 | EXPECT_FALSE(ParseHSTSHeader("max-age=3488a923 ", &max_age, 547 | - &include_subdomains)); 548 | + &include_subdomains, &require_ct)); 549 | EXPECT_FALSE(ParseHSTSHeader("max-ag=3488923", &max_age, 550 | - &include_subdomains)); 551 | + &include_subdomains, &require_ct)); 552 | EXPECT_FALSE(ParseHSTSHeader("max-aged=3488923", &max_age, 553 | - &include_subdomains)); 554 | + &include_subdomains, &require_ct)); 555 | EXPECT_FALSE(ParseHSTSHeader("max-age==3488923", &max_age, 556 | - &include_subdomains)); 557 | + &include_subdomains, &require_ct)); 558 | EXPECT_FALSE(ParseHSTSHeader("amax-age=3488923", &max_age, 559 | - &include_subdomains)); 560 | + &include_subdomains, &require_ct)); 561 | EXPECT_FALSE(ParseHSTSHeader("max-age=-3488923", &max_age, 562 | - &include_subdomains)); 563 | + &include_subdomains, &require_ct)); 564 | EXPECT_FALSE(ParseHSTSHeader("max-age=3488923 e", &max_age, 565 | - &include_subdomains)); 566 | + &include_subdomains, &require_ct)); 567 | EXPECT_FALSE(ParseHSTSHeader("max-age=3488923 includesubdomain", 568 | - &max_age, &include_subdomains)); 569 | + &max_age, &include_subdomains, &require_ct)); 570 | EXPECT_FALSE(ParseHSTSHeader("max-age=3488923includesubdomains", 571 | - &max_age, &include_subdomains)); 572 | + &max_age, &include_subdomains, &require_ct)); 573 | EXPECT_FALSE(ParseHSTSHeader("max-age=3488923=includesubdomains", 574 | - &max_age, &include_subdomains)); 575 | + &max_age, &include_subdomains, &require_ct)); 576 | EXPECT_FALSE(ParseHSTSHeader("max-age=3488923 includesubdomainx", 577 | - &max_age, &include_subdomains)); 578 | + &max_age, &include_subdomains, &require_ct)); 579 | EXPECT_FALSE(ParseHSTSHeader("max-age=3488923 includesubdomain=", 580 | - &max_age, &include_subdomains)); 581 | + &max_age, &include_subdomains, &require_ct)); 582 | EXPECT_FALSE(ParseHSTSHeader("max-age=3488923 includesubdomain=true", 583 | - &max_age, &include_subdomains)); 584 | + &max_age, &include_subdomains, &require_ct)); 585 | EXPECT_FALSE(ParseHSTSHeader("max-age=3488923 includesubdomainsx", 586 | - &max_age, &include_subdomains)); 587 | + &max_age, &include_subdomains, &require_ct)); 588 | EXPECT_FALSE(ParseHSTSHeader("max-age=3488923 includesubdomains x", 589 | - &max_age, &include_subdomains)); 590 | + &max_age, &include_subdomains, &require_ct)); 591 | EXPECT_FALSE(ParseHSTSHeader("max-age=34889.23 includesubdomains", 592 | - &max_age, &include_subdomains)); 593 | + &max_age, &include_subdomains, &require_ct)); 594 | EXPECT_FALSE(ParseHSTSHeader("max-age=34889 includesubdomains", 595 | - &max_age, &include_subdomains)); 596 | + &max_age, &include_subdomains, &require_ct)); 597 | EXPECT_FALSE(ParseHSTSHeader(";;;; ;;;", 598 | - &max_age, &include_subdomains)); 599 | + &max_age, &include_subdomains, &require_ct)); 600 | EXPECT_FALSE(ParseHSTSHeader(";;;; includeSubDomains;;;", 601 | - &max_age, &include_subdomains)); 602 | + &max_age, &include_subdomains, &require_ct)); 603 | EXPECT_FALSE(ParseHSTSHeader(" includeSubDomains; ", 604 | - &max_age, &include_subdomains)); 605 | + &max_age, &include_subdomains, &require_ct)); 606 | EXPECT_FALSE(ParseHSTSHeader(";", 607 | - &max_age, &include_subdomains)); 608 | + &max_age, &include_subdomains, &require_ct)); 609 | EXPECT_FALSE(ParseHSTSHeader("max-age; ;", 610 | - &max_age, &include_subdomains)); 611 | + &max_age, &include_subdomains, &require_ct)); 612 | 613 | // Check the out args were not updated by checking the default 614 | // values for its predictable fields. 615 | @@ -245,90 +246,91 @@ TEST_F(HttpSecurityHeadersTest, ValidSTSHeaders) { 616 | base::TimeDelta max_age; 617 | base::TimeDelta expect_max_age; 618 | bool include_subdomains = false; 619 | + bool require_ct = false; 620 | 621 | EXPECT_TRUE(ParseHSTSHeader("max-age=243", &max_age, 622 | - &include_subdomains)); 623 | + &include_subdomains, &require_ct)); 624 | expect_max_age = base::TimeDelta::FromSeconds(243); 625 | EXPECT_EQ(expect_max_age, max_age); 626 | EXPECT_FALSE(include_subdomains); 627 | 628 | EXPECT_TRUE(ParseHSTSHeader("max-age=3488923;", &max_age, 629 | - &include_subdomains)); 630 | + &include_subdomains, &require_ct)); 631 | 632 | EXPECT_TRUE(ParseHSTSHeader(" Max-agE = 567", &max_age, 633 | - &include_subdomains)); 634 | + &include_subdomains, &require_ct)); 635 | expect_max_age = base::TimeDelta::FromSeconds(567); 636 | EXPECT_EQ(expect_max_age, max_age); 637 | EXPECT_FALSE(include_subdomains); 638 | 639 | EXPECT_TRUE(ParseHSTSHeader(" mAx-aGe = 890 ", &max_age, 640 | - &include_subdomains)); 641 | + &include_subdomains, &require_ct)); 642 | expect_max_age = base::TimeDelta::FromSeconds(890); 643 | EXPECT_EQ(expect_max_age, max_age); 644 | EXPECT_FALSE(include_subdomains); 645 | 646 | EXPECT_TRUE(ParseHSTSHeader("max-age=123;incLudesUbdOmains", &max_age, 647 | - &include_subdomains)); 648 | + &include_subdomains, &require_ct)); 649 | expect_max_age = base::TimeDelta::FromSeconds(123); 650 | EXPECT_EQ(expect_max_age, max_age); 651 | EXPECT_TRUE(include_subdomains); 652 | 653 | EXPECT_TRUE(ParseHSTSHeader("incLudesUbdOmains; max-age=123", &max_age, 654 | - &include_subdomains)); 655 | + &include_subdomains, &require_ct)); 656 | expect_max_age = base::TimeDelta::FromSeconds(123); 657 | EXPECT_EQ(expect_max_age, max_age); 658 | EXPECT_TRUE(include_subdomains); 659 | 660 | EXPECT_TRUE(ParseHSTSHeader(" incLudesUbdOmains; max-age=123", 661 | - &max_age, &include_subdomains)); 662 | + &max_age, &include_subdomains, &require_ct)); 663 | expect_max_age = base::TimeDelta::FromSeconds(123); 664 | EXPECT_EQ(expect_max_age, max_age); 665 | EXPECT_TRUE(include_subdomains); 666 | 667 | EXPECT_TRUE(ParseHSTSHeader( 668 | " incLudesUbdOmains; max-age=123; pumpkin=kitten", &max_age, 669 | - &include_subdomains)); 670 | + &include_subdomains, &require_ct)); 671 | expect_max_age = base::TimeDelta::FromSeconds(123); 672 | EXPECT_EQ(expect_max_age, max_age); 673 | EXPECT_TRUE(include_subdomains); 674 | 675 | EXPECT_TRUE(ParseHSTSHeader( 676 | " pumpkin=894; incLudesUbdOmains; max-age=123 ", &max_age, 677 | - &include_subdomains)); 678 | + &include_subdomains, &require_ct)); 679 | expect_max_age = base::TimeDelta::FromSeconds(123); 680 | EXPECT_EQ(expect_max_age, max_age); 681 | EXPECT_TRUE(include_subdomains); 682 | 683 | EXPECT_TRUE(ParseHSTSHeader( 684 | " pumpkin; incLudesUbdOmains; max-age=123 ", &max_age, 685 | - &include_subdomains)); 686 | + &include_subdomains, &require_ct)); 687 | expect_max_age = base::TimeDelta::FromSeconds(123); 688 | EXPECT_EQ(expect_max_age, max_age); 689 | EXPECT_TRUE(include_subdomains); 690 | 691 | EXPECT_TRUE(ParseHSTSHeader( 692 | " pumpkin; incLudesUbdOmains; max-age=\"123\" ", &max_age, 693 | - &include_subdomains)); 694 | + &include_subdomains, &require_ct)); 695 | expect_max_age = base::TimeDelta::FromSeconds(123); 696 | EXPECT_EQ(expect_max_age, max_age); 697 | EXPECT_TRUE(include_subdomains); 698 | 699 | EXPECT_TRUE(ParseHSTSHeader( 700 | "animal=\"squirrel; distinguished\"; incLudesUbdOmains; max-age=123", 701 | - &max_age, &include_subdomains)); 702 | + &max_age, &include_subdomains, &require_ct)); 703 | expect_max_age = base::TimeDelta::FromSeconds(123); 704 | EXPECT_EQ(expect_max_age, max_age); 705 | EXPECT_TRUE(include_subdomains); 706 | 707 | EXPECT_TRUE(ParseHSTSHeader("max-age=394082; incLudesUbdOmains", 708 | - &max_age, &include_subdomains)); 709 | + &max_age, &include_subdomains, &require_ct)); 710 | expect_max_age = base::TimeDelta::FromSeconds(394082); 711 | EXPECT_EQ(expect_max_age, max_age); 712 | EXPECT_TRUE(include_subdomains); 713 | 714 | EXPECT_TRUE(ParseHSTSHeader( 715 | "max-age=39408299 ;incLudesUbdOmains", &max_age, 716 | - &include_subdomains)); 717 | + &include_subdomains, &require_ct)); 718 | expect_max_age = base::TimeDelta::FromSeconds( 719 | std::min(kMaxHSTSAgeSecs, static_cast(GG_INT64_C(39408299)))); 720 | EXPECT_EQ(expect_max_age, max_age); 721 | @@ -336,7 +338,7 @@ TEST_F(HttpSecurityHeadersTest, ValidSTSHeaders) { 722 | 723 | EXPECT_TRUE(ParseHSTSHeader( 724 | "max-age=394082038 ; incLudesUbdOmains", &max_age, 725 | - &include_subdomains)); 726 | + &include_subdomains, &require_ct)); 727 | expect_max_age = base::TimeDelta::FromSeconds( 728 | std::min(kMaxHSTSAgeSecs, static_cast(GG_INT64_C(394082038)))); 729 | EXPECT_EQ(expect_max_age, max_age); 730 | @@ -344,7 +346,7 @@ TEST_F(HttpSecurityHeadersTest, ValidSTSHeaders) { 731 | 732 | EXPECT_TRUE(ParseHSTSHeader( 733 | "max-age=394082038 ; incLudesUbdOmains;", &max_age, 734 | - &include_subdomains)); 735 | + &include_subdomains, &require_ct)); 736 | expect_max_age = base::TimeDelta::FromSeconds( 737 | std::min(kMaxHSTSAgeSecs, static_cast(GG_INT64_C(394082038)))); 738 | EXPECT_EQ(expect_max_age, max_age); 739 | @@ -352,7 +354,7 @@ TEST_F(HttpSecurityHeadersTest, ValidSTSHeaders) { 740 | 741 | EXPECT_TRUE(ParseHSTSHeader( 742 | ";; max-age=394082038 ; incLudesUbdOmains; ;", &max_age, 743 | - &include_subdomains)); 744 | + &include_subdomains, &require_ct)); 745 | expect_max_age = base::TimeDelta::FromSeconds( 746 | std::min(kMaxHSTSAgeSecs, static_cast(GG_INT64_C(394082038)))); 747 | EXPECT_EQ(expect_max_age, max_age); 748 | @@ -360,7 +362,7 @@ TEST_F(HttpSecurityHeadersTest, ValidSTSHeaders) { 749 | 750 | EXPECT_TRUE(ParseHSTSHeader( 751 | ";; max-age=394082038 ;", &max_age, 752 | - &include_subdomains)); 753 | + &include_subdomains, &require_ct)); 754 | expect_max_age = base::TimeDelta::FromSeconds( 755 | std::min(kMaxHSTSAgeSecs, static_cast(GG_INT64_C(394082038)))); 756 | EXPECT_EQ(expect_max_age, max_age); 757 | @@ -368,7 +370,7 @@ TEST_F(HttpSecurityHeadersTest, ValidSTSHeaders) { 758 | 759 | EXPECT_TRUE(ParseHSTSHeader( 760 | ";; ; ; max-age=394082038;;; includeSubdomains ;; ;", &max_age, 761 | - &include_subdomains)); 762 | + &include_subdomains, &require_ct)); 763 | expect_max_age = base::TimeDelta::FromSeconds( 764 | std::min(kMaxHSTSAgeSecs, static_cast(GG_INT64_C(394082038)))); 765 | EXPECT_EQ(expect_max_age, max_age); 766 | @@ -376,7 +378,7 @@ TEST_F(HttpSecurityHeadersTest, ValidSTSHeaders) { 767 | 768 | EXPECT_TRUE(ParseHSTSHeader( 769 | "incLudesUbdOmains ; max-age=394082038 ;;", &max_age, 770 | - &include_subdomains)); 771 | + &include_subdomains, &require_ct)); 772 | expect_max_age = base::TimeDelta::FromSeconds( 773 | std::min(kMaxHSTSAgeSecs, static_cast(GG_INT64_C(394082038)))); 774 | EXPECT_EQ(expect_max_age, max_age); 775 | @@ -384,14 +386,14 @@ TEST_F(HttpSecurityHeadersTest, ValidSTSHeaders) { 776 | 777 | EXPECT_TRUE(ParseHSTSHeader( 778 | " max-age=0 ; incLudesUbdOmains ", &max_age, 779 | - &include_subdomains)); 780 | + &include_subdomains, &require_ct)); 781 | expect_max_age = base::TimeDelta::FromSeconds(0); 782 | EXPECT_EQ(expect_max_age, max_age); 783 | EXPECT_TRUE(include_subdomains); 784 | 785 | EXPECT_TRUE(ParseHSTSHeader( 786 | " max-age=999999999999999999999999999999999999999999999 ;" 787 | - " incLudesUbdOmains ", &max_age, &include_subdomains)); 788 | + " incLudesUbdOmains ", &max_age, &include_subdomains, &require_ct)); 789 | expect_max_age = base::TimeDelta::FromSeconds( 790 | kMaxHSTSAgeSecs); 791 | EXPECT_EQ(expect_max_age, max_age); 792 | diff --git a/net/http/transport_security_persister.cc b/net/http/transport_security_persister.cc 793 | index 82a4b51..8164a36 794 | --- a/net/http/transport_security_persister.cc 795 | +++ b/net/http/transport_security_persister.cc 796 | @@ -67,6 +67,7 @@ std::string ExternalStringToHashedDomain(const std::string& external) { 797 | } 798 | 799 | const char kIncludeSubdomains[] = "include_subdomains"; 800 | +const char kRequireCT[] = "require_ct"; 801 | const char kStsIncludeSubdomains[] = "sts_include_subdomains"; 802 | const char kPkpIncludeSubdomains[] = "pkp_include_subdomains"; 803 | const char kMode[] = "mode"; 804 | @@ -147,6 +148,8 @@ bool TransportSecurityPersister::SerializeData(std::string* output) { 805 | base::DictionaryValue* serialized = new base::DictionaryValue; 806 | serialized->SetBoolean(kStsIncludeSubdomains, 807 | domain_state.sts.include_subdomains); 808 | + serialized->SetBoolean(kRequireCT, 809 | + domain_state.sts.require_ct); 810 | serialized->SetBoolean(kPkpIncludeSubdomains, 811 | domain_state.pkp.include_subdomains); 812 | serialized->SetDouble(kStsObserved, 813 | @@ -231,9 +234,17 @@ bool TransportSecurityPersister::Deserialize(const std::string& serialized, 814 | parsed_include_subdomains = true; 815 | } 816 | 817 | + bool require_ct; 818 | + bool parsed_require_ct = false; 819 | + if (parsed->GetBoolean(kRequireCT, &require_ct)) { 820 | + domain_state.sts.require_ct = require_ct; 821 | + parsed_require_ct = true; 822 | + } 823 | + 824 | std::string mode_string; 825 | double expiry = 0; 826 | if (!parsed_include_subdomains || 827 | + !parsed_require_ct || 828 | !parsed->GetString(kMode, &mode_string) || 829 | !parsed->GetDouble(kExpiry, &expiry)) { 830 | LOG(WARNING) << "Could not parse some elements of entry " << i.key() 831 | diff --git a/net/http/transport_security_state.cc b/net/http/transport_security_state.cc 832 | index a174e98..f8ea963 833 | --- a/net/http/transport_security_state.cc 834 | +++ b/net/http/transport_security_state.cc 835 | @@ -108,6 +108,20 @@ bool TransportSecurityState::ShouldSSLErrorsBeFatal(const std::string& host) { 836 | return GetDynamicDomainState(host, &state); 837 | } 838 | 839 | +bool TransportSecurityState::ShouldRequireCT(const std::string& host) { 840 | + DomainState dynamic_state; 841 | + if (GetDynamicDomainState(host, &dynamic_state)) 842 | + return dynamic_state.ShouldRequireCT(); 843 | + 844 | + DomainState static_state; 845 | + if (GetStaticDomainState(host, &static_state) && 846 | + static_state.ShouldRequireCT()) { 847 | + return true; 848 | + } 849 | + 850 | + return false; 851 | +} 852 | + 853 | bool TransportSecurityState::ShouldUpgradeToSSL(const std::string& host) { 854 | DomainState dynamic_state; 855 | if (GetDynamicDomainState(host, &dynamic_state)) 856 | @@ -171,7 +185,8 @@ void TransportSecurityState::AddHSTSInternal( 857 | const std::string& host, 858 | TransportSecurityState::DomainState::UpgradeMode upgrade_mode, 859 | const base::Time& expiry, 860 | - bool include_subdomains) { 861 | + bool include_subdomains, 862 | + bool require_ct) { 863 | DCHECK(CalledOnValidThread()); 864 | 865 | // Copy-and-modify the existing DomainState for this host (if any). 866 | @@ -184,6 +199,7 @@ void TransportSecurityState::AddHSTSInternal( 867 | 868 | domain_state.sts.last_observed = base::Time::Now(); 869 | domain_state.sts.include_subdomains = include_subdomains; 870 | + domain_state.sts.require_ct = require_ct; 871 | domain_state.sts.expiry = expiry; 872 | domain_state.sts.upgrade_mode = upgrade_mode; 873 | EnableHost(host, domain_state); 874 | @@ -670,7 +686,8 @@ bool TransportSecurityState::AddHSTSHeader(const std::string& host, 875 | base::Time now = base::Time::Now(); 876 | base::TimeDelta max_age; 877 | bool include_subdomains; 878 | - if (!ParseHSTSHeader(value, &max_age, &include_subdomains)) { 879 | + bool require_ct; 880 | + if (!ParseHSTSHeader(value, &max_age, &include_subdomains, &require_ct)) { 881 | return false; 882 | } 883 | 884 | @@ -682,7 +699,7 @@ bool TransportSecurityState::AddHSTSHeader(const std::string& host, 885 | upgrade_mode = DomainState::MODE_FORCE_HTTPS; 886 | } 887 | 888 | - AddHSTSInternal(host, upgrade_mode, now + max_age, include_subdomains); 889 | + AddHSTSInternal(host, upgrade_mode, now + max_age, include_subdomains, require_ct); 890 | return true; 891 | } 892 | 893 | @@ -708,10 +725,11 @@ bool TransportSecurityState::AddHPKPHeader(const std::string& host, 894 | 895 | void TransportSecurityState::AddHSTS(const std::string& host, 896 | const base::Time& expiry, 897 | - bool include_subdomains) { 898 | + bool include_subdomains, 899 | + bool require_ct) { 900 | DCHECK(CalledOnValidThread()); 901 | AddHSTSInternal(host, DomainState::MODE_FORCE_HTTPS, expiry, 902 | - include_subdomains); 903 | + include_subdomains, require_ct); 904 | } 905 | 906 | void TransportSecurityState::AddHPKP(const std::string& host, 907 | @@ -782,6 +800,7 @@ bool TransportSecurityState::GetStaticDomainState(const std::string& host, 908 | 909 | out->sts.upgrade_mode = DomainState::MODE_FORCE_HTTPS; 910 | out->sts.include_subdomains = false; 911 | + out->sts.require_ct = false; 912 | out->pkp.include_subdomains = false; 913 | 914 | if (!IsBuildTimely()) 915 | @@ -895,6 +914,7 @@ void TransportSecurityState::AddOrUpdateEnabledHosts( 916 | TransportSecurityState::DomainState::DomainState() { 917 | sts.upgrade_mode = MODE_DEFAULT; 918 | sts.include_subdomains = false; 919 | + sts.require_ct = false; 920 | pkp.include_subdomains = false; 921 | } 922 | 923 | @@ -939,6 +959,10 @@ bool TransportSecurityState::DomainState::ShouldUpgradeToSSL() const { 924 | return sts.upgrade_mode == MODE_FORCE_HTTPS; 925 | } 926 | 927 | +bool TransportSecurityState::DomainState::ShouldRequireCT() const { 928 | + return sts.require_ct; 929 | +} 930 | + 931 | bool TransportSecurityState::DomainState::ShouldSSLErrorsBeFatal() const { 932 | // Both HSTS and HPKP cause fatal SSL errors, so enable this on the presense 933 | // of either. (If neither is active, no DomainState will be returned.) 934 | diff --git a/net/http/transport_security_state.h b/net/http/transport_security_state.h 935 | index 6a4615c..ea4fed6 936 | --- a/net/http/transport_security_state.h 937 | +++ b/net/http/transport_security_state.h 938 | @@ -81,6 +81,9 @@ class NET_EXPORT TransportSecurityState 939 | 940 | // Are subdomains subject to this policy state? 941 | bool include_subdomains; 942 | + 943 | + // Does this host require Certificate Transparency 944 | + bool require_ct; 945 | 946 | // The domain which matched during a search for this DomainState entry. 947 | // Updated by |GetDynamicDomainState| and |GetStaticDomainState|. 948 | @@ -142,6 +145,10 @@ class NET_EXPORT TransportSecurityState 949 | // redirected to HTTPS (also if WS should be upgraded to WSS). 950 | bool ShouldUpgradeToSSL() const; 951 | 952 | + // ShouldUpgradeToSSL returns true iff we should fail a connection if it 953 | + // omits Cert Transparency records 954 | + bool ShouldRequireCT() const; 955 | + 956 | // ShouldSSLErrorsBeFatal returns true iff HTTPS errors should cause 957 | // hard-fail behavior (e.g. if HSTS is set for the domain). 958 | bool ShouldSSLErrorsBeFatal() const; 959 | @@ -170,6 +177,7 @@ class NET_EXPORT TransportSecurityState 960 | // interface; direct access to DomainStates is best left to tests. 961 | bool ShouldSSLErrorsBeFatal(const std::string& host); 962 | bool ShouldUpgradeToSSL(const std::string& host); 963 | + bool ShouldRequireCT(const std::string& host); 964 | bool CheckPublicKeyPins(const std::string& host, 965 | bool is_issued_by_known_root, 966 | const HashValueVector& hashes, 967 | @@ -247,7 +255,8 @@ class NET_EXPORT TransportSecurityState 968 | // HSTS header (used for net-internals and unit tests). 969 | void AddHSTS(const std::string& host, 970 | const base::Time& expiry, 971 | - bool include_subdomains); 972 | + bool include_subdomains, 973 | + bool require_ct); 974 | 975 | // Adds explicitly-specified data as if it was processed from an 976 | // HPKP header (used for net-internals and unit tests). 977 | @@ -303,7 +312,8 @@ class NET_EXPORT TransportSecurityState 978 | void AddHSTSInternal(const std::string& host, 979 | DomainState::UpgradeMode upgrade_mode, 980 | const base::Time& expiry, 981 | - bool include_subdomains); 982 | + bool include_subdomains, 983 | + bool require_ct); 984 | 985 | // Adds HPKP state to |host|. 986 | void AddHPKPInternal(const std::string& host, 987 | diff --git a/net/socket/ssl_client_socket_nss.cc b/net/socket/ssl_client_socket_nss.cc 988 | index 483c5e7..cde0efa 100644 989 | --- a/net/socket/ssl_client_socket_nss.cc 990 | +++ b/net/socket/ssl_client_socket_nss.cc 991 | @@ -3550,8 +3550,11 @@ int SSLClientSocketNSS::DoVerifyCertComplete(int result) { 992 | if (result == OK) { 993 | // Only check Certificate Transparency if there were no other errors with 994 | // the connection. 995 | - VerifyCT(); 996 | + bool require_ct = transport_security_state_ && transport_security_state_->ShouldRequireCT(host_and_port_.host()); 997 | + result = VerifyCT(require_ct); 998 | + } 999 | 1000 | + if (result == OK) { 1001 | // Only cache the session if the certificate verified successfully. 1002 | core_->CacheSessionIfNecessary(); 1003 | } 1004 | @@ -3563,9 +3566,11 @@ int SSLClientSocketNSS::DoVerifyCertComplete(int result) { 1005 | return result; 1006 | } 1007 | 1008 | -void SSLClientSocketNSS::VerifyCT() { 1009 | - if (!cert_transparency_verifier_) 1010 | - return; 1011 | +int SSLClientSocketNSS::VerifyCT(bool require_ct) { 1012 | + if (!cert_transparency_verifier_ && !require_ct) 1013 | + return OK; 1014 | + else if (!cert_transparency_verifier_ && require_ct) 1015 | + return ERR_SSL_CERTIFICATE_NOT_VALIDATED_THROUGH_CT; 1016 | 1017 | // Note that this is a completely synchronous operation: The CT Log Verifier 1018 | // gets all the data it needs for SCT verification and does not do any 1019 | @@ -3579,6 +3584,7 @@ void SSLClientSocketNSS::VerifyCT() { 1020 | 1021 | if (!policy_enforcer_) { 1022 | server_cert_verify_result_.cert_status &= ~CERT_STATUS_IS_EV; 1023 | + if(require_ct) return ERR_SSL_CERTIFICATE_NOT_VALIDATED_THROUGH_CT; 1024 | } else { 1025 | if (server_cert_verify_result_.cert_status & CERT_STATUS_IS_EV) { 1026 | scoped_refptr ev_whitelist = 1027 | @@ -3594,7 +3600,19 @@ void SSLClientSocketNSS::VerifyCT() { 1028 | server_cert_verify_result_.cert_status &= ~CERT_STATUS_IS_EV; 1029 | } 1030 | } 1031 | + if (require_ct) { 1032 | + if (!policy_enforcer_->DoesConformToCTRequiredPolicy( 1033 | + server_cert_verify_result_.verified_cert.get(), 1034 | + ct_verify_result_, net_log_)) { 1035 | + VLOG(1) << "certificate for " 1036 | + << server_cert_verify_result_.verified_cert->subject() 1037 | + .GetDisplayName() 1038 | + << " does not conform to CT Required policy, aborting."; 1039 | + return ERR_SSL_CERTIFICATE_NOT_VALIDATED_THROUGH_CT; 1040 | + } 1041 | + } 1042 | } 1043 | + return OK; 1044 | } 1045 | 1046 | void SSLClientSocketNSS::EnsureThreadIdAssigned() const { 1047 | diff --git a/net/socket/ssl_client_socket_nss.h b/net/socket/ssl_client_socket_nss.h 1048 | index 10bb57f..bafd76d 100644 1049 | --- a/net/socket/ssl_client_socket_nss.h 1050 | +++ b/net/socket/ssl_client_socket_nss.h 1051 | @@ -143,7 +143,7 @@ class SSLClientSocketNSS : public SSLClientSocket { 1052 | int DoVerifyCert(int result); 1053 | int DoVerifyCertComplete(int result); 1054 | 1055 | - void VerifyCT(); 1056 | + int VerifyCT(bool); 1057 | 1058 | // The following methods are for debugging bug 65948. Will remove this code 1059 | // after fixing bug 65948. 1060 | diff --git a/net/socket/ssl_client_socket_openssl.cc b/net/socket/ssl_client_socket_openssl.cc 1061 | index c4af957..4c8bce1 100644 1062 | --- a/net/socket/ssl_client_socket_openssl.cc 1063 | +++ b/net/socket/ssl_client_socket_openssl.cc 1064 | @@ -1233,8 +1233,11 @@ int SSLClientSocketOpenSSL::DoVerifyCertComplete(int result) { 1065 | if (result == OK) { 1066 | // Only check Certificate Transparency if there were no other errors with 1067 | // the connection. 1068 | - VerifyCT(); 1069 | - 1070 | + bool require_ct = transport_security_state_ && transport_security_state_->ShouldRequireCT(host_and_port_.host()); 1071 | + result = VerifyCT(require_ct); 1072 | + } 1073 | + 1074 | + if (result == OK) { 1075 | // TODO(joth): Work out if we need to remember the intermediate CA certs 1076 | // when the server sends them to us, and do so here. 1077 | SSLContext::GetInstance()->session_cache()->MarkSSLSessionAsGood(ssl_); 1078 | @@ -1315,9 +1318,11 @@ void SSLClientSocketOpenSSL::UpdateServerCert() { 1079 | } 1080 | } 1081 | 1082 | -void SSLClientSocketOpenSSL::VerifyCT() { 1083 | - if (!cert_transparency_verifier_) 1084 | - return; 1085 | +int SSLClientSocketOpenSSL::VerifyCT(bool require_ct) { 1086 | + if (!cert_transparency_verifier_ && !require_ct) 1087 | + return OK; 1088 | + else if (!cert_transparency_verifier_ && require_ct) 1089 | + return ERR_SSL_CERTIFICATE_NOT_VALIDATED_THROUGH_CT; 1090 | 1091 | const uint8_t* ocsp_response_raw; 1092 | size_t ocsp_response_len; 1093 | @@ -1344,6 +1349,7 @@ void SSLClientSocketOpenSSL::VerifyCT() { 1094 | 1095 | if (!policy_enforcer_) { 1096 | server_cert_verify_result_.cert_status &= ~CERT_STATUS_IS_EV; 1097 | + if(require_ct) return ERR_SSL_CERTIFICATE_NOT_VALIDATED_THROUGH_CT; 1098 | } else { 1099 | if (server_cert_verify_result_.cert_status & CERT_STATUS_IS_EV) { 1100 | scoped_refptr ev_whitelist = 1101 | @@ -1359,7 +1365,19 @@ void SSLClientSocketOpenSSL::VerifyCT() { 1102 | server_cert_verify_result_.cert_status &= ~CERT_STATUS_IS_EV; 1103 | } 1104 | } 1105 | + if (require_ct) { 1106 | + if (!policy_enforcer_->DoesConformToCTRequiredPolicy( 1107 | + server_cert_verify_result_.verified_cert.get(), 1108 | + ct_verify_result_, net_log_)) { 1109 | + VLOG(1) << "certificate for " 1110 | + << server_cert_verify_result_.verified_cert->subject() 1111 | + .GetDisplayName() 1112 | + << " does not conform to CT Required policy, aborting."; 1113 | + return ERR_SSL_CERTIFICATE_NOT_VALIDATED_THROUGH_CT; 1114 | + } 1115 | + } 1116 | } 1117 | + return OK; 1118 | } 1119 | 1120 | void SSLClientSocketOpenSSL::OnHandshakeIOComplete(int result) { 1121 | diff --git a/net/socket/ssl_client_socket_openssl.h b/net/socket/ssl_client_socket_openssl.h 1122 | index 6aaf1e1..9198c73 100644 1123 | --- a/net/socket/ssl_client_socket_openssl.h 1124 | +++ b/net/socket/ssl_client_socket_openssl.h 1125 | @@ -124,7 +124,7 @@ class SSLClientSocketOpenSSL : public SSLClientSocket { 1126 | int DoVerifyCertComplete(int result); 1127 | void DoConnectCallback(int result); 1128 | void UpdateServerCert(); 1129 | - void VerifyCT(); 1130 | + int VerifyCT(bool); 1131 | 1132 | void OnHandshakeIOComplete(int result); 1133 | void OnSendComplete(int result); 1134 | diff --git a/net/spdy/spdy_session.cc b/net/spdy/spdy_session.cc 1135 | index a6478df..47b406e 100644 1136 | --- a/net/spdy/spdy_session.cc 1137 | +++ b/net/spdy/spdy_session.cc 1138 | @@ -588,6 +588,8 @@ bool SpdySession::CanPool(TransportSecurityState* transport_security_state, 1139 | if (!ssl_info.cert->VerifyNameMatch(new_hostname, &unused)) 1140 | return false; 1141 | 1142 | + //May need to put a CT check here 1143 | + 1144 | std::string pinning_failure_log; 1145 | if (!transport_security_state->CheckPublicKeyPins( 1146 | new_hostname, 1147 | diff --git a/net/url_request/url_request_http_job.cc b/net/url_request/url_request_http_job.cc 1148 | index 68858c1..f572e2b 100644 1149 | --- a/net/url_request/url_request_http_job.cc 1150 | +++ b/net/url_request/url_request_http_job.cc 1151 | @@ -915,13 +915,18 @@ void URLRequestHttpJob::OnStartCompleted(int result) { 1152 | 1153 | SaveCookiesAndNotifyHeadersComplete(net::OK); 1154 | } else if (IsCertificateError(result)) { 1155 | - // We encountered an SSL certificate error. 1156 | + // In either case, we need to update the certificate status so it 1157 | + // reflects an error. What's not clear to me (tom) is if a non-fatal 1158 | + // error should _not_ have the certificate status updated. But right 1159 | + // now, all errors are fatal. So it shouldn't matter? 1160 | + SSLInfo info(transaction_->GetResponseInfo()->ssl_info); 1161 | + info.cert_status = MapNetErrorToCertStatus(result); 1162 | + 1163 | + // We encountered an SSL certificate error. 1164 | if (result == ERR_SSL_WEAK_SERVER_EPHEMERAL_DH_KEY || 1165 | result == ERR_SSL_PINNED_KEY_NOT_IN_CERT_CHAIN) { 1166 | // These are hard failures. They're handled separately and don't have 1167 | // the correct cert status, so set it here. 1168 | - SSLInfo info(transaction_->GetResponseInfo()->ssl_info); 1169 | - info.cert_status = MapNetErrorToCertStatus(result); 1170 | NotifySSLCertificateError(info, true); 1171 | } else { 1172 | // Maybe overridable, maybe not. Ask the delegate to decide. 1173 | @@ -929,8 +934,7 @@ void URLRequestHttpJob::OnStartCompleted(int result) { 1174 | TransportSecurityState* state = context->transport_security_state(); 1175 | const bool fatal = 1176 | state && state->ShouldSSLErrorsBeFatal(request_info_.url.host()); 1177 | - NotifySSLCertificateError( 1178 | - transaction_->GetResponseInfo()->ssl_info, fatal); 1179 | + NotifySSLCertificateError(info, fatal); 1180 | } 1181 | } else if (result == ERR_SSL_CLIENT_AUTH_CERT_NEEDED) { 1182 | NotifyCertificateRequested( 1183 | diff --git a/net/url_request/url_request_unittest.cc b/net/url_request/url_request_unittest.cc 1184 | index c3bc166..294cea2 100644 1185 | --- a/net/url_request/url_request_unittest.cc 1186 | +++ b/net/url_request/url_request_unittest.cc 1187 | @@ -7165,8 +7165,9 @@ TEST_F(HTTPSRequestTest, HSTSPreservesPosts) { 1188 | TransportSecurityState transport_security_state; 1189 | base::Time expiry = base::Time::Now() + base::TimeDelta::FromDays(1000); 1190 | bool include_subdomains = false; 1191 | + bool require_ct = false; 1192 | transport_security_state.AddHSTS("www.somewhere.com", expiry, 1193 | - include_subdomains); 1194 | + include_subdomains, require_ct); 1195 | 1196 | TestNetworkDelegate network_delegate; // Must outlive URLRequest. 1197 | 1198 | @@ -7221,7 +7222,8 @@ TEST_F(HTTPSRequestTest, HSTSCrossOriginAddHeaders) { 1199 | TransportSecurityState transport_security_state; 1200 | base::Time expiry = base::Time::Now() + base::TimeDelta::FromDays(1); 1201 | bool include_subdomains = false; 1202 | - transport_security_state.AddHSTS("example.net", expiry, include_subdomains); 1203 | + bool require_ct = false; 1204 | + transport_security_state.AddHSTS("example.net", expiry, include_subdomains, require_ct); 1205 | 1206 | TestNetworkDelegate network_delegate; // Must outlive URLRequest. 1207 | 1208 | -- 1209 | 2.1.4 1210 | 1211 | -------------------------------------------------------------------------------- /fetchallcerts.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | # Copyright (c) 2014, NORDUnet A/S. 5 | # See LICENSE for licensing information. 6 | 7 | import argparse 8 | import urllib2 9 | import urllib 10 | import json 11 | import base64 12 | import sys 13 | import struct 14 | import hashlib 15 | import itertools 16 | from certtools import * 17 | import zipfile 18 | import os 19 | import time 20 | 21 | parser = argparse.ArgumentParser(description='') 22 | parser.add_argument('baseurl', help="Base URL for CT server") 23 | parser.add_argument('--store', default=None, metavar="dir", help='Store certificates in directory dir') 24 | parser.add_argument('--write-sth', action='store_true', help='Write STH') 25 | parser.add_argument('--no-check-signature', action='store_true', help='Don\'t check signature') 26 | parser.add_argument('--publickey', default=None, metavar="file", help='Public key for the CT log') 27 | parser.add_argument('--cafile', default=None, metavar="file", help='File containing the CA cert') 28 | args = parser.parse_args() 29 | 30 | create_ssl_context(cafile=args.cafile) 31 | 32 | def get_entries_wrapper(baseurl, start, end): 33 | fetched_entries = 0 34 | while start + fetched_entries < (end + 1): 35 | print "fetching from", start + fetched_entries 36 | entries = get_entries(baseurl, start + fetched_entries, end)["entries"] 37 | if len(entries) == 0: 38 | break 39 | for entry in entries: 40 | fetched_entries += 1 41 | yield entry 42 | 43 | def print_layer(layer): 44 | for entry in layer: 45 | print base64.b16encode(entry) 46 | 47 | logpublickey = get_public_key_from_file(args.publickey) if args.publickey else None 48 | 49 | sth = get_sth(args.baseurl) 50 | if not args.no_check_signature: 51 | check_sth_signature(args.baseurl, sth, publickey=logpublickey) 52 | tree_size = sth["tree_size"] 53 | root_hash = base64.decodestring(sth["sha256_root_hash"]) 54 | 55 | try: 56 | if args.store: 57 | oldsth = json.load(open(args.store + "/currentsth")) 58 | else: 59 | oldsth = None 60 | except IOError: 61 | oldsth = None 62 | 63 | sth_timestamp = datetime.datetime.fromtimestamp(sth["timestamp"]/1000) 64 | since_timestamp = time.time() - sth["timestamp"]/1000 65 | 66 | print "Log last updated %s, %d seconds ago" % (sth_timestamp.ctime(), since_timestamp) 67 | 68 | print "tree size", tree_size 69 | print "root hash", base64.b16encode(root_hash) 70 | 71 | if oldsth: 72 | if oldsth["tree_size"] == tree_size: 73 | print "Tree size has not changed" 74 | if oldsth["sha256_root_hash"] != sth["sha256_root_hash"]: 75 | print "Root hash is different even though tree size is the same." 76 | print "Log has violated the append-only property." 77 | print "Old hash:", oldsth["sha256_root_hash"] 78 | print "New hash:", sth["sha256_root_hash"] 79 | sys.exit(1) 80 | if oldsth["timestamp"] == sth["timestamp"]: 81 | print "Timestamp has not changed" 82 | else: 83 | print "Tree size changed, old tree size was", oldsth["tree_size"] 84 | 85 | merkle_64klayer = [] 86 | 87 | if args.store: 88 | ncerts = None 89 | for blocknumber in range(0, (tree_size / 65536) + 1): 90 | (resulttype, result) = get_merkle_hash_64k(args.store, blocknumber, write_to_cache=True) 91 | if resulttype == "incomplete": 92 | (incompletelength, hash) = result 93 | ncerts = blocknumber * 65536 + incompletelength 94 | break 95 | assert resulttype == "hash" 96 | hash = result 97 | merkle_64klayer.append(hash) 98 | print blocknumber * 65536, 99 | sys.stdout.flush() 100 | print 101 | print "ncerts", ncerts 102 | else: 103 | ncerts = 0 104 | 105 | entries = get_entries_wrapper(args.baseurl, ncerts, tree_size - 1) 106 | 107 | if not args.store: 108 | layer0 = [get_leaf_hash(base64.decodestring(entry["leaf_input"])) for entry in entries] 109 | 110 | tree = build_merkle_tree(layer0) 111 | 112 | calculated_root_hash = tree[-1][0] 113 | 114 | else: 115 | currentfilename = None 116 | zf = None 117 | for entry, i in itertools.izip(entries, itertools.count(ncerts)): 118 | try: 119 | (chain, timestamp, issuer_key_hash) = extract_original_entry(entry) 120 | zipfilename = args.store + "/" + ("%04d.zip" % (i / 10000)) 121 | if zipfilename != currentfilename: 122 | if zf: 123 | zf.close() 124 | zf = zipfile.ZipFile(zipfilename, "a", 125 | compression=zipfile.ZIP_DEFLATED) 126 | currentfilename = zipfilename 127 | s = "" 128 | s += "Timestamp: %s\n" % timestamp 129 | leaf_input = base64.decodestring(entry["leaf_input"]) 130 | leaf_hash = get_leaf_hash(leaf_input) 131 | s += "Leafhash: %s\n" % base64.b16encode(leaf_hash) 132 | if issuer_key_hash: 133 | s += "-----BEGIN PRECERTIFICATE-----\n" 134 | s += base64.encodestring(chain[0]).rstrip() + "\n" 135 | s += "-----END PRECERTIFICATE-----\n" 136 | s += "\n" 137 | chain = chain[1:] 138 | for cert in chain: 139 | s += "-----BEGIN CERTIFICATE-----\n" 140 | s += base64.encodestring(cert).rstrip() + "\n" 141 | s += "-----END CERTIFICATE-----\n" 142 | s += "\n" 143 | zf.writestr("%08d" % i, s) 144 | except AssertionError, e: 145 | print "error for cert", i, e 146 | if zf: 147 | zf.close() 148 | 149 | for blocknumber in range(ncerts / 65536, (tree_size / 65536) + 1): 150 | (resulttype, result) = get_merkle_hash_64k(args.store, blocknumber, write_to_cache=True) 151 | if resulttype == "incomplete": 152 | (incompletelength, hash) = result 153 | ncerts = blocknumber * 65536 + incompletelength 154 | merkle_64klayer.append(hash) 155 | break 156 | assert resulttype == "hash" 157 | hash = result 158 | merkle_64klayer.append(hash) 159 | print blocknumber * 65536, base64.b16encode(hash) 160 | 161 | tree = build_merkle_tree(merkle_64klayer) 162 | 163 | calculated_root_hash = tree[-1][0] 164 | 165 | assert ncerts == tree_size 166 | 167 | print "calculated root hash", base64.b16encode(calculated_root_hash) 168 | 169 | if oldsth and oldsth["tree_size"] > 0 and oldsth["tree_size"] != tree_size: 170 | consistency_proof = [base64.decodestring(entry) for entry in get_consistency_proof(args.baseurl, oldsth["tree_size"], tree_size)] 171 | (old_treehead, new_treehead) = verify_consistency_proof(consistency_proof, oldsth["tree_size"], tree_size, base64.b64decode(oldsth["sha256_root_hash"])) 172 | assert old_treehead == base64.b64decode(oldsth["sha256_root_hash"]) 173 | assert new_treehead == base64.b64decode(sth["sha256_root_hash"]) 174 | 175 | if calculated_root_hash != root_hash: 176 | print "fetched root hash and calculated root hash different" 177 | sys.exit(1) 178 | 179 | if args.store and args.write_sth: 180 | f = open(args.store + "/currentsth", "w") 181 | f.write(json.dumps(sth)) 182 | f.close() 183 | -------------------------------------------------------------------------------- /fetchalllogkeys.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | import os 4 | import sys 5 | import json 6 | import requests 7 | import argparse 8 | 9 | if __name__ == "__main__": 10 | parser = argparse.ArgumentParser(description='Fetch log keys from Google\'s CT Page') 11 | parser.add_argument("--out", help="Directory to store keys to", required=True) 12 | args = parser.parse_args() 13 | if os.path.exists(args.out) and not os.path.isdir(args.out): 14 | print "Error: Output directory is not a directory!" 15 | sys.exit(-1) 16 | elif not os.path.exists(args.out): 17 | os.mkdir(args.out) 18 | 19 | logs = requests.get("https://www.certificate-transparency.org/known-logs/all_logs_list.json?attredirects=0&d=1") 20 | logs = json.loads(logs.text) 21 | for l in logs['logs']: 22 | name = l['description'].replace(" ", "_").replace("'", "") 23 | url = "https://" + l['url'] + "/" 24 | key = l['key'] 25 | 26 | keyout = os.path.join(args.out, name + ".pem") 27 | print "Writing", keyout 28 | keyout = open(keyout, "w") 29 | keyout.write("-----BEGIN PUBLIC KEY-----\n") 30 | keyout.write(key + "\n") 31 | keyout.write("-----END PUBLIC KEY-----\n") 32 | keyout.write(url) 33 | -------------------------------------------------------------------------------- /findcerts.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | import os 3 | import sys 4 | import time 5 | import base64 6 | import random 7 | import ctypes 8 | import hashlib 9 | import zipfile 10 | import datetime 11 | import argparse 12 | 13 | import findcerts_extra 14 | 15 | from multiprocessing import Pool, cpu_count, Array 16 | 17 | from pyx509.pkcs7.asn1_models.X509_certificate import Certificate 18 | from pyx509.pkcs7_models import X509Certificate, PublicKeyInfo, ExtendedKeyUsageExt 19 | from pyx509.pkcs7.asn1_models.decoder_workarounds import decode 20 | 21 | processLeaf = True 22 | processIntermediates = False 23 | processRoot = False 24 | 25 | def certificate_interesting(cert): 26 | tbs = cert.tbsCertificate 27 | if "ritter.vg" in str(tbs.subject): 28 | return "ritter.vg" 29 | if tbs.subjAltNameExt: 30 | san = tbs.subjAltNameExt.value 31 | for component_type, name_list in san.values.items(): 32 | for n in name_list: 33 | if "ritter.vg" in n: 34 | return "ritter.vg" 35 | 36 | return None 37 | 38 | #========================================================================= 39 | 40 | class State: 41 | LookForCert = 0 42 | AppendCert = 1 43 | 44 | class Status: 45 | Queued = 0 46 | Processing = 1 47 | Completed = 2 48 | Errored = 3 49 | 50 | def process_zipfile(ziptuple): 51 | global args 52 | global processLeaf, processIntermediates, processRoot 53 | 54 | zipindx, zipfilename = ziptuple 55 | if isinstance(zipfilename, file): 56 | zipfilename = zipfilename.name 57 | if zipindx >= 0: 58 | findcerts_extra.zipfilestate[zipindx] = Status.Processing 59 | 60 | z = zipfile.ZipFile(zipfilename, "r") 61 | findx = 1 62 | hasError = False 63 | numcerts = len(z.namelist()) 64 | numMatchingCerts = 0 65 | for filename in z.namelist(): 66 | lines = z.open(filename, "r").readlines() 67 | 68 | certs = [] 69 | thiscert = "" 70 | currentstate = State.LookForCert 71 | for l in lines: 72 | if currentstate == State.LookForCert and \ 73 | ("-----BEGIN CERTIFICATE-----" in l or "-----BEGIN PRECERTIFICATE-----" in l): 74 | thiscert = "" 75 | currentstate = State.AppendCert 76 | elif currentstate == State.LookForCert and "-----BEGIN" in l: 77 | print "[?] Got an unexpected begin line:", l 78 | elif currentstate == State.AppendCert and "-----END" in l: 79 | certs.append(base64.b64decode(thiscert)) 80 | currentstate = State.LookForCert 81 | elif currentstate == State.AppendCert: 82 | thiscert += l 83 | elif currentstate == State.LookForCert and "Timestamp:" in l: 84 | pass 85 | elif currentstate == State.LookForCert and "Leafhash:" in l: 86 | pass 87 | elif currentstate == State.LookForCert and not l.strip(): 88 | pass 89 | else: 90 | print "[!] What the heck? State machine error." 91 | 92 | cindx = 1 93 | for c in certs: 94 | if cindx == len(certs) and not processRoot: 95 | continue 96 | elif cindx == 1 and not processLeaf: 97 | continue 98 | elif cindx not in [1, len(certs)] and not processIntermediates: 99 | continue 100 | 101 | fingerprint = hashlib.sha1(c).hexdigest() 102 | try: 103 | cert = decode(c, asn1Spec=Certificate())[0] 104 | cert = X509Certificate(cert) 105 | 106 | certMatchType = certificate_interesting(cert) 107 | 108 | if certMatchType: 109 | numMatchingCerts += 1 110 | outputname = fingerprint + "_" + str(cindx) + "_" + str(random.random())[2:] 111 | outputpath = os.path.join(args.out, certMatchType, fingerprint[0:2], fingerprint[2]) 112 | if not os.path.exists(outputpath): 113 | try: 114 | os.makedirs(outputpath) 115 | except: 116 | pass 117 | outputfile = open(os.path.join(outputpath, outputname), 'w') 118 | outputfile.write("-----BEGIN CERTIFICATE-----\n") 119 | outputfile.write(base64.b64encode(c) + "\n") 120 | outputfile.write("-----END CERTIFICATE-----\n") 121 | outputfile.write(zipfilename + " " + filename) 122 | outputfile.close() 123 | except Exception, e: 124 | exc_info = sys.exc_info() 125 | try: 126 | outputname = fingerprint + "_" + str(cindx) + "_" + str(random.random())[2:] 127 | outputpath = os.path.join(args.err, fingerprint[0:2], fingerprint[2]) 128 | if not os.path.exists(outputpath): 129 | try: 130 | os.makedirs(outputpath) 131 | except: 132 | pass 133 | outputfile = open(os.path.join(outputpath, outputname), 'w') 134 | outputfile.write("-----BEGIN CERTIFICATE-----\n") 135 | outputfile.write(base64.b64encode(c) + "\n") 136 | outputfile.write("-----END CERTIFICATE-----\n") 137 | outputfile.write(zipfilename + " " + filename + "\n") 138 | outputfile.write(str(exc_info) + "\n") 139 | outputfile.write(str(e) + "\n") 140 | outputfile.close() 141 | except: 142 | hasError = True 143 | cindx += 1 144 | findx += 1 145 | 146 | findcerts_extra.resultcount[zipindx] = numMatchingCerts 147 | if zipindx >= 0: 148 | if not hasError: 149 | findcerts_extra.zipfilestate[zipindx] = Status.Completed 150 | else: 151 | findcerts_extra.zipfilestate[zipindx] = Status.Errored 152 | else: 153 | if hasError: 154 | print "Job Status: Errored", zipfilename 155 | else: 156 | print "Job Status: Completed", zipfilename 157 | 158 | def initProcess(share1, share2): 159 | findcerts_extra.zipfilestate = share1 160 | findcerts_extra.resultcount = share2 161 | 162 | 163 | args = None 164 | if __name__ == "__main__": 165 | parser = argparse.ArgumentParser(description='Run a query on every certificate in the CT logs') 166 | parser.add_argument("--data", help="Directory the ct data is in") 167 | parser.add_argument("--out", help="Directory the results will go to", required=True) 168 | parser.add_argument("--err", help="Directory the errors will go to", required=True) 169 | parser.add_argument("--zip", action="append", help="Process a zipfile directly, ignoring the multiprocessing features. Can be specified multiple times", type=argparse.FileType('r')) 170 | parser.add_argument("--log", action="append", help="Limit searching to these logs. Case-insensitive match, can be specified multiple times.") 171 | args = parser.parse_args() 172 | if not args.data and not args.zip: 173 | print "Error: Must supply either --data or --zip" 174 | sys.exit(1) 175 | if args.data and (not os.path.exists(args.data) or not os.path.isdir(args.data)): 176 | print "Error: Input directory is missing?" 177 | sys.exit(-1) 178 | if os.path.exists(args.out) and not os.path.isdir(args.out): 179 | print "Error: Output directory is not a directory!" 180 | sys.exit(-1) 181 | if os.path.exists(args.err) and not os.path.isdir(args.err): 182 | print "Error: Error directory is not a directory!" 183 | sys.exit(-1) 184 | if args.log: 185 | for i in range(len(args.log)): 186 | args.log[i] = args.log[i].lower() 187 | 188 | logs = [] 189 | zipfiles = [] 190 | if args.zip: 191 | for z in args.zip: 192 | zipfiles.append(z) 193 | logs.append("User-Specified") 194 | else: 195 | for d in os.listdir(args.data): 196 | if os.path.isdir(os.path.join(args.data, d)): 197 | processThisLog = False 198 | if args.log: 199 | for l in args.log: 200 | processThisLog |= l in d.lower() 201 | else: 202 | processThisLog = True 203 | if processThisLog: 204 | logs.append(d) 205 | for f in os.listdir(os.path.join(args.data, d)): 206 | if ".zip" in f: 207 | zipfiles.append(os.path.join(args.data, d, f)) 208 | 209 | if not zipfiles: 210 | print "[!] No files were found to process!" 211 | sys.exit(0) 212 | 213 | print "[+] Found", cpu_count(), "CPUs and", len(zipfiles), "zipfiles in", len(logs), "log(s):", tuple(logs) 214 | if not args.zip: 215 | print "[+] Running", cpu_count(), "jobs to estimate completion time..." 216 | 217 | zipfilestate = Array('i', len(zipfiles), lock=False) 218 | resultcount = Array('i', len(zipfiles), lock=False) 219 | 220 | pool = Pool(processes=cpu_count(), initializer=initProcess, initargs=(zipfilestate,resultcount,)) 221 | 222 | bench = [] 223 | for i in range(len(zipfiles[:cpu_count()])): 224 | bench.append((i, zipfiles[i])) 225 | start = time.time() 226 | pool.map(process_zipfile, bench) 227 | runtime = time.time() - start 228 | 229 | chunks = (len(zipfiles[cpu_count():]) / cpu_count()) + 1 230 | total_runtime = runtime * chunks 231 | completion = datetime.datetime.now() + datetime.timedelta(seconds=total_runtime) 232 | print "[+] This is an estimate, but it looks like we'll complete sometime around", completion 233 | 234 | fullworkload = [] 235 | for i in range(len(zipfiles[cpu_count():])): 236 | fullworkload.append((i+cpu_count(), zipfiles[i])) 237 | result = pool.map_async(process_zipfile, fullworkload) 238 | 239 | errors = [] 240 | results = [] 241 | resultCount = 0 242 | while not result.ready(): 243 | q = 0 244 | p = 0 245 | c = 0 246 | e = 0 247 | for z in range(len(zipfilestate)): 248 | if zipfilestate[z] == Status.Queued: 249 | q += 1 250 | elif zipfilestate[z] == Status.Processing: 251 | p += 1 252 | elif zipfilestate[z] == Status.Completed or zipfilestate[z] == Status.Errored: 253 | if zipfiles[z] not in results: 254 | resultCount += resultcount[z] 255 | results.append(zipfiles[z]) 256 | 257 | if zipfilestate[z] == Status.Errored: 258 | e += 1 259 | if zipfiles[z] not in errors: 260 | print "[!] Caught a unhandle-able error:", zipfiles[z] 261 | errors.append(zipfiles[z]) 262 | else: 263 | c += 1 264 | sys.stdout.write("[+] Job Status: " + str(resultCount) + " results. Jobs: "+ str(p) + " in progress, " + str(q) + " queued, " + str(c+e) + " completed (" + str(e) + " Errors). \r") 265 | sys.stdout.flush() 266 | time.sleep(5) 267 | else: 268 | for z in zipfiles: 269 | process_zipfile((-1, z)) 270 | -------------------------------------------------------------------------------- /findcerts_extra.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tomrittervg/ct-tools/ca13bd3d8d1928e31577833c6c1a48b6e7b7c579/findcerts_extra.py -------------------------------------------------------------------------------- /parse-sct.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | import struct 4 | import base64 5 | import argparse 6 | import datetime 7 | 8 | LOGS = { 9 | 'aviator' : base64.b64decode("aPaY+B9kgr46jO65KB1M/HFRXWeT1ETRCmesu09P+8Q="), 10 | 'pilot' : base64.b64decode("pLkJkLQYWBSHuxOizGdwCjw1mAT5G9+443fNDsgN3BA="), 11 | 'rocketeer' : base64.b64decode("7ku9t3XOYLrhQmkfq+GeZqMPfl+wctiDAMR7iXqo/cs="), 12 | 'digicert' : base64.b64decode("VhQGmi/XwuzT9eG9RLI+x0Z2ubyZEVzA75SYVdaJ0N0="), 13 | 'izenpen' : base64.b64decode("dGG0oJz7PUHXUVlXWy52SaRFqNJ3CbDMVkpkgrfrQaM="), 14 | 'certly' : base64.b64decode("zbUXm3/BwEb+6jETaj+PAC5hgvr4iW/syLL1tatgSQA="), 15 | 'venafi' : base64.b64decode("rDua7X+pZ0dXFZ5tfVdWcvnZgQCUHpve/+yhMTt1eC0="), 16 | 'digicert' : base64.b64decode("VhQGmi/XwuzT9eG9RLI+x0Z2ubyZEVzA75SYVdaJ0N0="), 17 | 'skydiver' : base64.b64decode("u9nfvB+KcbWTlCOXqpJ7RzhXlQqrUugakJZkNo4e0YU=") 18 | } 19 | 20 | parser = argparse.ArgumentParser(description='Read a SCT') 21 | parser.add_argument("--sct", type=str, required=False, help="The SCT to Read, in base64") 22 | parser.add_argument("--file", type=str, required=False, help="The SCT File to Read") 23 | 24 | args = parser.parse_args() 25 | 26 | if args.sct: 27 | sct = base64.b64decode(args.sct) 28 | elif args.file: 29 | sct = "".join(open(args.file).readlines()) 30 | else: 31 | raise Exception("Must specify sct on command line or file") 32 | 33 | #Version 34 | print "SCT Version: " + str(ord(sct[0])) 35 | 36 | #Log ID 37 | log = sct[1:33] 38 | found_log = False 39 | for l in LOGS: 40 | if LOGS[l] == log: 41 | print "Log: " + l 42 | found_log = True 43 | if not found_log: 44 | print "Log: Unknown (" + base64.b64encode(log) + ")" 45 | 46 | #Timestamp (8 Bytes) 47 | timestamp = struct.unpack(">Q", sct[33:41])[0] 48 | print "Timestamp: " + str(datetime.datetime.fromtimestamp(timestamp / 1000)) 49 | 50 | 51 | #Extensions 52 | if ord(sct[41]) == 0 and ord(sct[42]) == 0: 53 | print "No extensions" 54 | else: 55 | print "Extensions?!?!?!" 56 | 57 | #Signature (Includes the Hash, Signature, SigLength, and SigData) 58 | print "Signature: " + base64.b64encode(sct[43:]) 59 | -------------------------------------------------------------------------------- /submit-cert.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | import sys 4 | import json 5 | import argparse 6 | import requests 7 | 8 | try: 9 | from requests.packages.urllib3.exceptions import InsecureRequestWarning 10 | requests.packages.urllib3.disable_warnings(InsecureRequestWarning) 11 | except: 12 | pass 13 | 14 | LOGS = { 15 | 'pilot' : 'https://ct.googleapis.com/pilot', 16 | 'aviator' : 'https://ct.googleapis.com/aviator', 17 | 'rocketeer' : 'https://ct.googleapis.com/rocketeer', 18 | 'icarus' : 'https://ct.googleapis.com/icarus', 19 | 'skydiver' : 'https://ct.googleapis.com/skydiver', 20 | 'nimbus2018' : 'https://ct.cloudflare.com/logs/nimbus2018', 21 | 'nimbus2019' : 'https://ct.cloudflare.com/logs/nimbus2019', 22 | 'nimbus2020' : 'https://ct.cloudflare.com/logs/nimbus2020', 23 | 'nimbus2021' : 'https://ct.cloudflare.com/logs/nimbus2021', 24 | # 'certly Log Server' : 'https://log.certly.io', 25 | 'symantec' : 'https://ct.ws.symantec.com', 26 | 'digicert1' : 'https://ct1.digicert-ct.com/log', 27 | 'digicert2' : 'https://ct2.digicert-ct.com/log', 28 | #'Google \'Submariner\' log' : 'https://ct.googleapis.com/submariner', 29 | # 'Izenpe Log Server' :'https://ct.izenpe.com', 30 | 'venafi' : 'https://ctlog.api.venafi.com', 31 | 'vega' : 'https://vega.ws.symantec.com', 32 | 'sirius' : 'https://sirius.ws.symantec.com', 33 | 'cnnic' : 'https://ctserver.cnnic.cn', 34 | 'startssl' : 'https://ct.startssl.com', 35 | 'sabre' : 'https://sabre.ct.comodo.com', 36 | 'mammoth' : 'https://mammoth.ct.comodo.com', 37 | #'GDCA CT Log Server' : 'https://ct.gdca.com.cn', 38 | 'wosign' : 'https://ct.wosign.com', 39 | #'Akamai Log' : 'https://ct.akamai.com', 40 | 'certificatetransparency.cn' : 'https://www.certificatetransparency.cn/ct', 41 | 'venafigen2' : 'http://ctlog-gen2.api.venafi.com', 42 | } 43 | 44 | parser = argparse.ArgumentParser(description='Submit a certificate to logs') 45 | parser.add_argument("--cert", type=argparse.FileType('r'), action="append", required=True, 46 | help="Certificate chain. Specify multiple times, start with the leaf, continuing to the root.") 47 | parser.add_argument("--log", action='append', type=str, 48 | help="logs to submit the cert to. Specify multiple times for explicit choice, or leave blank for all.") 49 | 50 | args = parser.parse_args() 51 | 52 | certdata = [] 53 | num_certs = 0 54 | for c in args.cert: 55 | lines = ''.join(c.readlines()) 56 | if lines.count('-----BEGIN CERTIFICATE-----') > 1: 57 | print "Error: Specify one certificate per file, with multiple --cert arguments, in the order of leaf, intermediate, root" 58 | sys.exit(-1) 59 | lines = lines.replace("-----BEGIN CERTIFICATE-----", "") 60 | lines = lines.replace("-----END CERTIFICATE-----", "") 61 | lines = lines.replace("\r", "") 62 | lines = lines.replace("\n", "") 63 | certdata.append(lines) 64 | 65 | 66 | data = '{"chain" : ["' + '", "'.join(certdata) + '"]}' 67 | 68 | for l in LOGS: 69 | if not args.log or [x for x in args.log if x in l.lower()]: 70 | try: 71 | r = requests.post(LOGS[l] + "/ct/v1/add-chain", data=data, verify=False, timeout=2) 72 | if r.status_code != 200: 73 | print("Error {0} while submitting to {1}".format(r.status_code, l)) 74 | print(r.text) 75 | else: 76 | r = json.loads(r.text) 77 | print(l) 78 | print("\tID", r['id']) 79 | print("\tTimestamp", r['timestamp']) 80 | print("\tSignature", r['signature']) 81 | print("\tCommand: ./write-sct.py --time " + str(r['timestamp']) + " --sig " + str(r['signature']) + " --log " + l) 82 | except Exception as e: 83 | print("Error communicating with", l) 84 | print(e) 85 | -------------------------------------------------------------------------------- /update-all.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | if [ "$#" -ne 2 ]; then 4 | echo "Usage: $0 key_directory certstore_directory"; 5 | exit 6 | fi 7 | 8 | for i in $1/* 9 | do 10 | echo "================================================================" 11 | echo "Processing $i" 12 | if [ ! -d "$2/`basename -s .pem $i`" ]; then 13 | mkdir $2/`basename -s .pem $i` 14 | fi 15 | ./fetchallcerts.py `tail -n 1 $i` --store $2/`basename -s .pem $i` --pub $i 16 | done 17 | -------------------------------------------------------------------------------- /write-sct.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | import struct 4 | import base64 5 | import argparse 6 | 7 | LOGS = { 8 | 'aviator' : base64.b64decode("aPaY+B9kgr46jO65KB1M/HFRXWeT1ETRCmesu09P+8Q="), 9 | 'pilot' : base64.b64decode("pLkJkLQYWBSHuxOizGdwCjw1mAT5G9+443fNDsgN3BA="), 10 | 'rocketeer' : base64.b64decode("7ku9t3XOYLrhQmkfq+GeZqMPfl+wctiDAMR7iXqo/cs="), 11 | 'digicert' : base64.b64decode("VhQGmi/XwuzT9eG9RLI+x0Z2ubyZEVzA75SYVdaJ0N0="), 12 | 'izenpen' : base64.b64decode("dGG0oJz7PUHXUVlXWy52SaRFqNJ3CbDMVkpkgrfrQaM="), 13 | 'certly' : base64.b64decode("zbUXm3/BwEb+6jETaj+PAC5hgvr4iW/syLL1tatgSQA="), 14 | 'venafi' : base64.b64decode("rDua7X+pZ0dXFZ5tfVdWcvnZgQCUHpve/+yhMTt1eC0="), 15 | 'digicert1' : base64.b64decode("VhQGmi/XwuzT9eG9RLI+x0Z2ubyZEVzA75SYVdaJ0N0="), 16 | 'digicert2' : base64.b64decode("h3W/51l8+IxDmV+9827/Vo1HVjb/SrVgwbTq/16ggw8="), 17 | 'skydiver' : base64.b64decode("u9nfvB+KcbWTlCOXqpJ7RzhXlQqrUugakJZkNo4e0YU="), 18 | 'icarus' : base64.b64decode("KTxRllTIOWW6qlD8WAfUt2+/WHopctykwwz05UVH9Hg="), 19 | 'vega' : base64.b64decode("vHjh38X2PGhGSTNNoQ+hXwl5aSAJwIG08/aRfz7ZuKU="), 20 | 'wosign' : base64.b64decode("QbLcLonmPOSvG6e7Kb9oxt7m+fHMBH4w3/rjs7olkmM="), 21 | 'cnnic' : base64.b64decode("pXesnO11SN2PAltnokEInfhuD0duwgPC7L7bGF8oJjg="), 22 | 'startssl' : base64.b64decode("NLtq1sPfnAPuqKSZ/3iRSGydXlysktAfe/0bzhnbSO8="), 23 | 'certificatetransparency.cn' : base64.b64decode("4BJ2KekEllZOPQFHmESYqkj4rbFmAOt5AqHvmQmQYnM="), 24 | 'venafigen2' : base64.b64decode("AwGd8/2FppqOvR+sxtqbpz5Gl3T+d/V5/FoIuDKMHWs="), 25 | 'nimbus2018' : base64.b64decode("23Sv7ssp7LH+yj5xbSzluaq7NveEcYPHXZ1PN7Yfv2Q="), 26 | 'nimbus2019' : base64.b64decode("dH7agzGtMxCRIZzOJU9CcMK//V5CIAjGNzV55hB7zFY="), 27 | 'nimbus2020' : base64.b64decode("Xqdz+d9WwOe1Nkh90EngMnqRmgyEoRIShBh1loFxRVg="), 28 | 'nimbus2021' : base64.b64decode("RJRlLrDuzq/EQAfYqP4owNrmgr7YyzG1P9MzlrW2gag="), 29 | 'sabre' : base64.b64decode("VYHUwhaQNgFK6gubVzxT8MDkOHhwJQgXL6OqHQcT0ww="), 30 | 'mammoth' : base64.b64decode("b1N2rDHwMRnYmQCkURX/dxUcEdkCwQApBo2yCJo32RM="), 31 | } 32 | 33 | parser = argparse.ArgumentParser(description='Write a SCT') 34 | parser.add_argument("--out", type=argparse.FileType('w'), help="file to write out to") 35 | parser.add_argument("--stdout", action="store_true", help="write to stdout, to be used in echo \"...\" | base64 -d > file") 36 | parser.add_argument("--log", type=str, required=True) 37 | parser.add_argument("--time", "--timestamp", type=int, required=True, help="Timestamp from the JSON response.") 38 | parser.add_argument("--sig", type=str, required=True, help="Signature value from the JSON response, base64 encoded") 39 | 40 | args = parser.parse_args() 41 | if not args.out and not args.stdout: 42 | parser.print_usage() 43 | print "\nError: Either --out or --stdout must be specified" 44 | exit(-1) 45 | if args.log not in LOGS: 46 | print "Error:", args.log, " log not known. Choose one of", str(LOGS.keys()) 47 | exit(-1) 48 | 49 | sct = "" 50 | 51 | #SCT Version 1 (1 Byte, 0x00) 52 | sct += "\x00" 53 | 54 | #SCT Log ID (32 Bytes) 55 | sct += LOGS[args.log] 56 | 57 | if len(sct) != 33: 58 | raise Exception("SCT Building has gone wrong.") 59 | 60 | #Timestamp (8 Bytes) 61 | sct += struct.pack(">Q", args.time) 62 | 63 | #Extensions Length (No extensions, 2 bytes) 64 | sct += "\x00\x00" 65 | 66 | if len(sct) != 43: 67 | raise Exception("SCT Building has gone wrong..") 68 | 69 | #Signature (Includes the Hash, Signature, SigLength, and SigData) 70 | sct += base64.b64decode(args.sig) 71 | 72 | #Write it all out to a file 73 | if args.stdout: 74 | print base64.b64encode(sct) 75 | elif args.out: 76 | args.out.write(sct) 77 | else: 78 | raise Exception("Unexpected program mode") 79 | --------------------------------------------------------------------------------