summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorJosef Gustafsson <josef.gson@gmail.com>2015-09-08 12:59:05 +0200
committerJosef Gustafsson <josef.gson@gmail.com>2015-09-08 12:59:05 +0200
commite52cd6a3f7b6f129966f9b3c1831e675dd49599e (patch)
treefd6c267c02389bf3abe4a1cbad6b5d7d71b7eef8
parentb2f0960622d94facf0a9e444dfba592506bf8444 (diff)
extracting monitor from auditor
-rw-r--r--monitor/certkeys.py43
-rw-r--r--monitor/josef_lib.py779
-rw-r--r--monitor/josef_monitor.py492
-rwxr-xr-xmonitor/josef_reader.py60
-rw-r--r--monitor/monitor.cfg7
-rw-r--r--tools/certkeys.py4
-rwxr-xr-xtools/josef_experimental.py46
-rwxr-xr-xtools/josef_experimental_auditor.py39
8 files changed, 1429 insertions, 41 deletions
diff --git a/monitor/certkeys.py b/monitor/certkeys.py
new file mode 100644
index 0000000..f7c83b1
--- /dev/null
+++ b/monitor/certkeys.py
@@ -0,0 +1,43 @@
+
+publickeys = {
+ "https://ct.googleapis.com/pilot/":
+ "MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEfahLEimAoz2t01p3uMziiLOl/fHTD"
+ "M0YDOhBRuiBARsV4UvxG2LdNgoIGLrtCzWE0J5APC2em4JlvR8EEEFMoA==",
+
+ "https://plausible.ct.nordu.net/":
+ "MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAE9UV9+jO2MCTzkabodO2F7LM03MUB"
+ "c8MrdAtkcW6v6GA9taTTw9QJqofm0BbdAsbtJL/unyEf0zIkRgXjjzaYqQ==",
+
+ "https://ct1.digicert-ct.com/log/":
+ "MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEAkbFvhu7gkAW6MHSrBlpE1n4+HCF"
+ "RkC5OLAjgqhkTH+/uzSfSl8ois8ZxAD2NgaTZe1M9akhYlrYkes4JECs6A==",
+
+ "https://ct.izenpe.com/":
+ "MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEJ2Q5DC3cUBj4IQCiDu0s6j51up+T"
+ "ZAkAEcQRF6tczw90rLWXkJMAW7jr9yc92bIKgV8vDXU4lDeZHvYHduDuvg==",
+
+ "https://log.certly.io/":
+ "MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAECyPLhWKYYUgEc+tUXfPQB4wtGS2M"
+ "NvXrjwFCCnyYJifBtd2Sk7Cu+Js9DNhMTh35FftHaHu6ZrclnNBKwmbbSA==",
+
+ "https://ct.googleapis.com/aviator/":
+ "MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAE1/TMabLkDpCjiupacAlP7xNi0I1J"
+ "YP8bQFAHDG1xhtolSY1l4QgNRzRrvSe8liE+NPWHdjGxfx3JhTsN9x8/6Q==",
+
+ "https://ct.googleapis.com/rocketeer/":
+ "MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEIFsYyDzBi7MxCAC/oJBXK7dHjG+1"
+ "aLCOkHjpoHPqTyghLpzA9BYbqvnV16mAw04vUjyYASVGJCUoI3ctBcJAeg==",
+
+ "https://ct.ws.symantec.com/":
+ "MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEluqsHEYMG1XcDfy1lCdGV0JwOmkY"
+ "4r87xNuroPS2bMBTP01CEDPwWJePa75y9CrsHEKqAy8afig1dpkIPSEUhg==",
+
+ "https://ctlog.api.venafi.com/":
+ "MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAolpIHxdSlTXLo1s6H1OC"
+ "dpSj/4DyHDc8wLG9wVmLqy1lk9fz4ATVmm+/1iN2Nk8jmctUKK2MFUtlWXZBSpym"
+ "97M7frGlSaQXUWyA3CqQUEuIJOmlEjKTBEiQAvpfDjCHjlV2Be4qTM6jamkJbiWt"
+ "gnYPhJL6ONaGTiSPm7Byy57iaz/hbckldSOIoRhYBiMzeNoA0DiRZ9KmfSeXZ1rB"
+ "8y8X5urSW+iBzf2SaOfzBvDpcoTuAaWx2DPazoOl28fP1hZ+kHUYvxbcMjttjauC"
+ "Fx+JII0dmuZNIwjfeG/GBb9frpSX219k1O4Wi6OEbHEr8at/XQ0y7gTikOxBn/s5"
+ "wQIDAQAB",
+}
diff --git a/monitor/josef_lib.py b/monitor/josef_lib.py
new file mode 100644
index 0000000..3c52761
--- /dev/null
+++ b/monitor/josef_lib.py
@@ -0,0 +1,779 @@
+# Copyright (c) 2014, NORDUnet A/S.
+# See LICENSE for licensing information.
+
+import subprocess
+import json
+import base64
+import urllib
+import urllib2
+import ssl
+import urlparse
+import struct
+import sys
+import hashlib
+import ecdsa
+import datetime
+import cStringIO
+import zipfile
+import shutil
+from certkeys import publickeys
+
+from Crypto.Hash import SHA256
+import Crypto.PublicKey.RSA as RSA
+from Crypto.Signature import PKCS1_v1_5
+
+def get_cert_info(s):
+ p = subprocess.Popen(
+ ["openssl", "x509", "-noout", "-subject", "-issuer", "-inform", "der"],
+ stdin=subprocess.PIPE, stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE)
+ parsed = p.communicate(s)
+ if parsed[1]:
+ print "ERROR:", parsed[1]
+ sys.exit(1)
+ result = {}
+ for line in parsed[0].split("\n"):
+ (key, sep, value) = line.partition("=")
+ if sep == "=":
+ result[key] = value
+ return result
+
+def my_get_cert_info(s):
+ p = subprocess.Popen(
+ ["openssl", "x509", "-fingerprint", "-text", "-noout", "-inform", "der"],
+ stdin=subprocess.PIPE, stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE)
+ parsed = p.communicate(s)
+ if parsed[1]:
+ print "ERROR:", parsed[1]
+ sys.exit(1)
+ result = {}
+ prev = ""
+ for line in parsed[0].split("\n"):
+ if "Subject:" in line:
+ result["subject"] = line.split("Subject: ")[1]
+ if "Issuer:" in line:
+ result["issuer"] = line.split("Issuer: ")[1]
+ if "Subject Alternative Name" in prev:
+ result["SAN"] = line.lstrip()
+ if "Not After" in line:
+ result["not_after"] = line.split(": ")[1]
+ if "Not Before" in line:
+ result["not_before"] = line.split(": ")[1]
+ prev = line
+ return result
+
+def get_pemlike(filename, marker):
+ return get_pemlike_from_file(open(filename), marker)
+
+def get_pemlike_from_file(f, marker):
+ entries = []
+ entry = ""
+ inentry = False
+
+ for line in f:
+ line = line.strip()
+ if line == "-----BEGIN " + marker + "-----":
+ entry = ""
+ inentry = True
+ elif line == "-----END " + marker + "-----":
+ entries.append(base64.decodestring(entry))
+ inentry = False
+ elif inentry:
+ entry += line
+ return entries
+
+def get_certs_from_file(certfile):
+ return get_pemlike(certfile, "CERTIFICATE")
+
+def get_certs_from_string(s):
+ f = cStringIO.StringIO(s)
+ return get_pemlike_from_file(f, "CERTIFICATE")
+
+def get_precerts_from_string(s):
+ f = cStringIO.StringIO(s)
+ return get_pemlike_from_file(f, "PRECERTIFICATE")
+
+def get_eckey_from_file(keyfile):
+ keys = get_pemlike(keyfile, "EC PRIVATE KEY")
+ assert len(keys) == 1
+ return keys[0]
+
+def get_public_key_from_file(keyfile):
+ keys = get_pemlike(keyfile, "PUBLIC KEY")
+ assert len(keys) == 1
+ return keys[0]
+
+def get_root_cert(issuer):
+ accepted_certs = \
+ json.loads(open("googlelog-accepted-certs.txt").read())["certificates"]
+
+ root_cert = None
+
+ for accepted_cert in accepted_certs:
+ subject = get_cert_info(base64.decodestring(accepted_cert))["subject"]
+ if subject == issuer:
+ root_cert = base64.decodestring(accepted_cert)
+
+ return root_cert
+
+class sslparameters:
+ sslcontext = None
+
+def create_ssl_context(cafile=None):
+ try:
+ sslparameters.sslcontext = ssl.create_default_context(cafile=cafile)
+ except AttributeError:
+ sslparameters.sslcontext = None
+
+def get_opener():
+ try:
+ opener = urllib2.build_opener(urllib2.HTTPSHandler(context=sslparameters.sslcontext))
+ except TypeError:
+ opener = urllib2.build_opener(urllib2.HTTPSHandler())
+ return opener
+
+def urlopen(url, data=None):
+ return get_opener().open(url, data)
+
+def pyopenssl_https_get(url):
+ """
+ HTTPS GET-function to use when running old Python < 2.7
+ """
+ from OpenSSL import SSL
+ import socket
+
+ # TLSv1 is the best we can get on Python 2.6
+ context = SSL.Context(SSL.TLSv1_METHOD)
+ sock = SSL.Connection(context, socket.socket(socket.AF_INET, socket.SOCK_STREAM))
+
+ url_without_scheme = url.split('https://')[-1]
+ host = url_without_scheme.split('/')[0]
+ path = url_without_scheme.split('/', 1)[1]
+ http_get_request = ("GET /{path} HTTP/1.1\r\n"
+ "Host: {host}\r\n"
+ "\r\n"
+ ).format(path=path, host=host)
+
+ sock.connect((host, 443))
+ sock.write(http_get_request)
+ response = sock.recv(1024)
+ response_lines = response.rsplit('\n')
+
+ # We are only interested in the actual response,
+ # without headers, contained in the last line.
+ return response_lines[len(response_lines) - 1]
+
+def get_sth(baseurl):
+ result = urlopen(baseurl + "ct/v1/get-sth").read()
+ return json.loads(result)
+
+def get_proof_by_hash(baseurl, hash, tree_size):
+ try:
+ params = urllib.urlencode({"hash":base64.b64encode(hash),
+ "tree_size":tree_size})
+ result = \
+ urlopen(baseurl + "ct/v1/get-proof-by-hash?" + params).read()
+ return json.loads(result)
+ except urllib2.HTTPError, e:
+ print "ERROR:", e.read()
+ sys.exit(1)
+
+def get_consistency_proof(baseurl, tree_size1, tree_size2):
+ try:
+ params = urllib.urlencode({"first":tree_size1,
+ "second":tree_size2})
+ result = \
+ urlopen(baseurl + "ct/v1/get-sth-consistency?" + params).read()
+ return json.loads(result)["consistency"]
+ except urllib2.HTTPError, e:
+ print "ERROR:", e.read()
+ sys.exit(1)
+
+def tls_array(data, length_len):
+ length_bytes = struct.pack(">Q", len(data))[-length_len:]
+ return length_bytes + data
+
+def unpack_tls_array(packed_data, length_len):
+ padded_length = ["\x00"] * 8
+ padded_length[-length_len:] = packed_data[:length_len]
+ (length,) = struct.unpack(">Q", "".join(padded_length))
+ unpacked_data = packed_data[length_len:length_len+length]
+ assert len(unpacked_data) == length, \
+ "data is only %d bytes long, but length is %d bytes" % \
+ (len(unpacked_data), length)
+ rest_data = packed_data[length_len+length:]
+ return (unpacked_data, rest_data)
+
+def add_chain(baseurl, submission):
+ try:
+ result = urlopen(baseurl + "ct/v1/add-chain", json.dumps(submission)).read()
+ return json.loads(result)
+ except urllib2.HTTPError, e:
+ print "ERROR", e.code,":", e.read()
+ if e.code == 400:
+ return None
+ sys.exit(1)
+ except ValueError, e:
+ print "==== FAILED REQUEST ===="
+ print submission
+ print "======= RESPONSE ======="
+ print result
+ print "========================"
+ raise e
+
+def add_prechain(baseurl, submission):
+ try:
+ result = urlopen(baseurl + "ct/v1/add-pre-chain",
+ json.dumps(submission)).read()
+ return json.loads(result)
+ except urllib2.HTTPError, e:
+ print "ERROR", e.code,":", e.read()
+ if e.code == 400:
+ return None
+ sys.exit(1)
+ except ValueError, e:
+ print "==== FAILED REQUEST ===="
+ print submission
+ print "======= RESPONSE ======="
+ print result
+ print "========================"
+ raise e
+
+def get_entries(baseurl, start, end):
+ params = urllib.urlencode({"start":start, "end":end})
+ try:
+ result = urlopen(baseurl + "ct/v1/get-entries?" + params).read()
+ return json.loads(result)
+ except urllib2.HTTPError, e:
+ print "ERROR:", e.read()
+ sys.exit(1)
+
+def extract_precertificate(precert_chain_entry):
+ (precert, certchain) = unpack_tls_array(precert_chain_entry, 3)
+ return (precert, certchain)
+
+def decode_certificate_chain(packed_certchain):
+ (unpacked_certchain, rest) = unpack_tls_array(packed_certchain, 3)
+ assert len(rest) == 0
+ certs = []
+ while len(unpacked_certchain):
+ (cert, rest) = unpack_tls_array(unpacked_certchain, 3)
+ certs.append(cert)
+ unpacked_certchain = rest
+ return certs
+
+def decode_signature(signature):
+ (hash_alg, signature_alg) = struct.unpack(">bb", signature[0:2])
+ (unpacked_signature, rest) = unpack_tls_array(signature[2:], 2)
+ assert rest == ""
+ return (hash_alg, signature_alg, unpacked_signature)
+
+def encode_signature(hash_alg, signature_alg, unpacked_signature):
+ signature = struct.pack(">bb", hash_alg, signature_alg)
+ signature += tls_array(unpacked_signature, 2)
+ return signature
+
+def check_signature(baseurl, signature, data, publickey=None):
+ if publickey == None:
+ if baseurl in publickeys:
+ publickey = base64.decodestring(publickeys[baseurl])
+ else:
+ print >>sys.stderr, "Public key for", baseurl, \
+ "not found, specify key file with --publickey"
+ sys.exit(1)
+ (hash_alg, signature_alg, unpacked_signature) = decode_signature(signature)
+ assert hash_alg == 4, \
+ "hash_alg is %d, expected 4" % (hash_alg,) # sha256
+ assert (signature_alg == 3 or signature_alg == 1), \
+ "signature_alg is %d, expected 1 or 3" % (signature_alg,) # ecdsa
+
+ if signature_alg == 3:
+ vk = ecdsa.VerifyingKey.from_der(publickey)
+ vk.verify(unpacked_signature, data, hashfunc=hashlib.sha256,
+ sigdecode=ecdsa.util.sigdecode_der)
+ else:
+ h = SHA256.new(data)
+ rsa_key = RSA.importKey(publickey)
+ verifier = PKCS1_v1_5.new(rsa_key)
+ assert verifier.verify(h, unpacked_signature), \
+ "could not verify RSA signature"
+
+def parse_auth_header(authheader):
+ splittedheader = authheader.split(";")
+ (signature, rawoptions) = (splittedheader[0], splittedheader[1:])
+ options = dict([(e.partition("=")[0], e.partition("=")[2]) for e in rawoptions])
+ return (base64.b64decode(signature), options)
+
+def check_auth_header(authheader, expected_key, publickeydir, data, path):
+ if expected_key == None:
+ return True
+ (signature, options) = parse_auth_header(authheader)
+ keyname = options.get("key")
+ if keyname != expected_key:
+ raise Exception("Response claimed to come from %s, expected %s" % (keyname, expected_key))
+ publickey = get_public_key_from_file(publickeydir + "/" + keyname + ".pem")
+ vk = ecdsa.VerifyingKey.from_der(publickey)
+ vk.verify(signature, "%s\0%s\0%s" % ("REPLY", path, data), hashfunc=hashlib.sha256,
+ sigdecode=ecdsa.util.sigdecode_der)
+ return True
+
+def http_request(url, data=None, key=None, verifynode=None, publickeydir="."):
+ opener = get_opener()
+
+ (keyname, keyfile) = key
+ privatekey = get_eckey_from_file(keyfile)
+ sk = ecdsa.SigningKey.from_der(privatekey)
+ parsed_url = urlparse.urlparse(url)
+ if data == None:
+ data_to_sign = parsed_url.query
+ method = "GET"
+ else:
+ data_to_sign = data
+ method = "POST"
+ signature = sk.sign("%s\0%s\0%s" % (method, parsed_url.path, data_to_sign), hashfunc=hashlib.sha256,
+ sigencode=ecdsa.util.sigencode_der)
+ opener.addheaders = [('X-Catlfish-Auth', base64.b64encode(signature) + ";key=" + keyname)]
+ result = opener.open(url, data)
+ authheader = result.info().get('X-Catlfish-Auth')
+ data = result.read()
+ check_auth_header(authheader, verifynode, publickeydir, data, parsed_url.path)
+ return data
+
+def get_signature(baseurl, data, key=None):
+ try:
+ params = json.dumps({"plop_version":1, "data": base64.b64encode(data)})
+ result = http_request(baseurl + "plop/v1/signing/sth", params, key=key)
+ parsed_result = json.loads(result)
+ return base64.b64decode(parsed_result.get(u"result"))
+ except urllib2.HTTPError, e:
+ print "ERROR: get_signature", e.read()
+ raise e
+
+def create_signature(baseurl, data, key=None):
+ unpacked_signature = get_signature(baseurl, data, key)
+ return encode_signature(4, 3, unpacked_signature)
+
+def check_sth_signature(baseurl, sth, publickey=None):
+ signature = base64.decodestring(sth["tree_head_signature"])
+
+ version = struct.pack(">b", 0)
+ signature_type = struct.pack(">b", 1)
+ timestamp = struct.pack(">Q", sth["timestamp"])
+ tree_size = struct.pack(">Q", sth["tree_size"])
+ hash = base64.decodestring(sth["sha256_root_hash"])
+ tree_head = version + signature_type + timestamp + tree_size + hash
+
+ check_signature(baseurl, signature, tree_head, publickey=publickey)
+
+def create_sth_signature(tree_size, timestamp, root_hash, baseurl, key=None):
+ version = struct.pack(">b", 0)
+ signature_type = struct.pack(">b", 1)
+ timestamp_packed = struct.pack(">Q", timestamp)
+ tree_size_packed = struct.pack(">Q", tree_size)
+ tree_head = version + signature_type + timestamp_packed + tree_size_packed + root_hash
+
+ return create_signature(baseurl, tree_head, key=key)
+
+def check_sct_signature(baseurl, signed_entry, sct, precert=False, publickey=None):
+ if publickey == None:
+ publickey = base64.decodestring(publickeys[baseurl])
+ calculated_logid = hashlib.sha256(publickey).digest()
+ received_logid = base64.decodestring(sct["id"])
+ assert calculated_logid == received_logid, \
+ "log id is incorrect:\n should be %s\n got %s" % \
+ (calculated_logid.encode("hex_codec"),
+ received_logid.encode("hex_codec"))
+
+ signature = base64.decodestring(sct["signature"])
+
+ version = struct.pack(">b", sct["sct_version"])
+ signature_type = struct.pack(">b", 0)
+ timestamp = struct.pack(">Q", sct["timestamp"])
+ if precert:
+ entry_type = struct.pack(">H", 1)
+ else:
+ entry_type = struct.pack(">H", 0)
+ signed_struct = version + signature_type + timestamp + \
+ entry_type + signed_entry + \
+ tls_array(base64.decodestring(sct["extensions"]), 2)
+
+ check_signature(baseurl, signature, signed_struct, publickey=publickey)
+
+def pack_mtl(timestamp, leafcert):
+ entry_type = struct.pack(">H", 0)
+ extensions = ""
+
+ timestamped_entry = struct.pack(">Q", timestamp) + entry_type + \
+ tls_array(leafcert, 3) + tls_array(extensions, 2)
+ version = struct.pack(">b", 0)
+ leaf_type = struct.pack(">b", 0)
+ merkle_tree_leaf = version + leaf_type + timestamped_entry
+ return merkle_tree_leaf
+
+def pack_mtl_precert(timestamp, cleanedcert, issuer_key_hash):
+ entry_type = struct.pack(">H", 1)
+ extensions = ""
+
+ timestamped_entry = struct.pack(">Q", timestamp) + entry_type + \
+ pack_precert(cleanedcert, issuer_key_hash) + tls_array(extensions, 2)
+ version = struct.pack(">b", 0)
+ leaf_type = struct.pack(">b", 0)
+ merkle_tree_leaf = version + leaf_type + timestamped_entry
+ return merkle_tree_leaf
+
+def pack_precert(cleanedcert, issuer_key_hash):
+ assert len(issuer_key_hash) == 32
+
+ return issuer_key_hash + tls_array(cleanedcert, 3)
+
+def pack_cert(cert):
+ return tls_array(cert, 3)
+
+def unpack_mtl(merkle_tree_leaf):
+ version = merkle_tree_leaf[0:1]
+ leaf_type = merkle_tree_leaf[1:2]
+ timestamped_entry = merkle_tree_leaf[2:]
+ (timestamp, entry_type) = struct.unpack(">QH", timestamped_entry[0:10])
+ if entry_type == 0:
+ issuer_key_hash = None
+ (leafcert, rest_entry) = unpack_tls_array(timestamped_entry[10:], 3)
+ elif entry_type == 1:
+ issuer_key_hash = timestamped_entry[10:42]
+ (leafcert, rest_entry) = unpack_tls_array(timestamped_entry[42:], 3)
+ return (leafcert, timestamp, issuer_key_hash)
+
+def get_leaf_hash(merkle_tree_leaf):
+ leaf_hash = hashlib.sha256()
+ leaf_hash.update(struct.pack(">b", 0))
+ leaf_hash.update(merkle_tree_leaf)
+
+ return leaf_hash.digest()
+
+def timing_point(timer_dict=None, name=None):
+ t = datetime.datetime.now()
+ if timer_dict:
+ starttime = timer_dict["lasttime"]
+ stoptime = t
+ deltatime = stoptime - starttime
+ timer_dict["deltatimes"].append((name, deltatime.seconds * 1000000 + deltatime.microseconds))
+ timer_dict["lasttime"] = t
+ return None
+ else:
+ timer_dict = {"deltatimes":[], "lasttime":t}
+ return timer_dict
+
+def internal_hash(pair):
+ if len(pair) == 1:
+ return pair[0]
+ else:
+ hash = hashlib.sha256()
+ hash.update(struct.pack(">b", 1))
+ hash.update(pair[0])
+ hash.update(pair[1])
+ digest = hash.digest()
+ return digest
+
+def chunks(l, n):
+ return [l[i:i+n] for i in range(0, len(l), n)]
+
+def next_merkle_layer(layer):
+ return [internal_hash(pair) for pair in chunks(layer, 2)]
+
+def build_merkle_tree(layer0):
+ if len(layer0) == 0:
+ return [[hashlib.sha256().digest()]]
+ layers = []
+ current_layer = layer0
+ layers.append(current_layer)
+ while len(current_layer) > 1:
+ current_layer = next_merkle_layer(current_layer)
+ layers.append(current_layer)
+ return layers
+
+def print_inclusion_proof(proof):
+ audit_path = proof[u'audit_path']
+ n = proof[u'leaf_index']
+ level = 0
+ for s in audit_path:
+ entry = base64.b16encode(base64.b64decode(s))
+ n ^= 1
+ print level, n, entry
+ n >>= 1
+ level += 1
+
+def get_one_cert(store, i):
+ filename = i / 10000
+ zf = zipfile.ZipFile("%s/%04d.zip" % (store, i / 10000))
+ cert = zf.read("%08d" % i)
+ zf.close()
+ return cert
+
+def get_hash_from_certfile(cert):
+ for line in cert.split("\n"):
+ if line.startswith("-----"):
+ return None
+ if line.startswith("Leafhash: "):
+ return base64.b16decode(line[len("Leafhash: "):])
+ return None
+
+def get_timestamp_from_certfile(cert):
+ for line in cert.split("\n"):
+ if line.startswith("-----"):
+ return None
+ if line.startswith("Timestamp: "):
+ return int(line[len("Timestamp: "):])
+ return None
+
+def get_proof(store, tree_size, n):
+ hash = get_hash_from_certfile(get_one_cert(store, n))
+ return get_proof_by_hash(args.baseurl, hash, tree_size)
+
+def get_certs_from_zipfiles(zipfiles, firstleaf, lastleaf):
+ for i in range(firstleaf, lastleaf + 1):
+ try:
+ yield zipfiles[i / 10000].read("%08d" % i)
+ except KeyError:
+ return
+
+def get_merkle_hash_64k(store, blocknumber, write_to_cache=False, treesize=None):
+ firstleaf = blocknumber * 65536
+ lastleaf = firstleaf + 65535
+ if treesize != None:
+ assert firstleaf < treesize
+ usecache = lastleaf < treesize
+ lastleaf = min(lastleaf, treesize - 1)
+ else:
+ usecache = True
+
+ hashfilename = "%s/%04x.64khash" % (store, blocknumber)
+ if usecache:
+ try:
+ hash = base64.b16decode(open(hashfilename).read())
+ assert len(hash) == 32
+ return ("hash", hash)
+ except IOError:
+ pass
+ firstfile = firstleaf / 10000
+ lastfile = lastleaf / 10000
+ zipfiles = {}
+ for i in range(firstfile, lastfile + 1):
+ try:
+ zipfiles[i] = zipfile.ZipFile("%s/%04d.zip" % (store, i))
+ except IOError:
+ break
+ certs = get_certs_from_zipfiles(zipfiles, firstleaf, lastleaf)
+ layer0 = [get_hash_from_certfile(cert) for cert in certs]
+ tree = build_merkle_tree(layer0)
+ calculated_hash = tree[-1][0]
+ for zf in zipfiles.values():
+ zf.close()
+ if len(layer0) != lastleaf - firstleaf + 1:
+ return ("incomplete", (len(layer0), calculated_hash))
+ if write_to_cache:
+ f = open(hashfilename, "w")
+ f.write(base64.b16encode(calculated_hash))
+ f.close()
+ return ("hash", calculated_hash)
+
+def get_tree_head(store, treesize):
+ merkle_64klayer = []
+
+ for blocknumber in range(0, (treesize / 65536) + 1):
+ (resulttype, result) = get_merkle_hash_64k(store, blocknumber, treesize=treesize)
+ if resulttype == "incomplete":
+ print >>sys.stderr, "Couldn't read until tree size", treesize
+ (incompletelength, hash) = result
+ print >>sys.stderr, "Stopped at", blocknumber * 65536 + incompletelength
+ sys.exit(1)
+ assert resulttype == "hash"
+ hash = result
+ merkle_64klayer.append(hash)
+ #print >>sys.stderr, print blocknumber * 65536,
+ sys.stdout.flush()
+ tree = build_merkle_tree(merkle_64klayer)
+ calculated_root_hash = tree[-1][0]
+ return calculated_root_hash
+
+def get_intermediate_hash(store, treesize, level, index):
+ if level >= 16:
+ merkle_64klayer = []
+
+ levelsize = (2**(level-16))
+
+ for blocknumber in range(index * levelsize, (index + 1) * levelsize):
+ if blocknumber * (2 ** 16) >= treesize:
+ break
+ #print "looking at block", blocknumber
+ (resulttype, result) = get_merkle_hash_64k(store, blocknumber, treesize=treesize)
+ if resulttype == "incomplete":
+ print >>sys.stderr, "Couldn't read until tree size", treesize
+ (incompletelength, hash) = result
+ print >>sys.stderr, "Stopped at", blocknumber * 65536 + incompletelength
+ sys.exit(1)
+ assert resulttype == "hash"
+ hash = result
+ #print "block hash", base64.b16encode(hash)
+ merkle_64klayer.append(hash)
+ #print >>sys.stderr, print blocknumber * 65536,
+ sys.stdout.flush()
+ tree = build_merkle_tree(merkle_64klayer)
+ return tree[-1][0]
+ else:
+ levelsize = 2 ** level
+ firstleaf = index * levelsize
+ lastleaf = firstleaf + levelsize - 1
+ #print "firstleaf", firstleaf
+ #print "lastleaf", lastleaf
+ assert firstleaf < treesize
+ lastleaf = min(lastleaf, treesize - 1)
+ #print "modified lastleaf", lastleaf
+ firstfile = firstleaf / 10000
+ lastfile = lastleaf / 10000
+ #print "files", firstfile, lastfile
+ zipfiles = {}
+ for i in range(firstfile, lastfile + 1):
+ try:
+ zipfiles[i] = zipfile.ZipFile("%s/%04d.zip" % (store, i))
+ except IOError:
+ break
+ certs = get_certs_from_zipfiles(zipfiles, firstleaf, lastleaf)
+ layer0 = [get_hash_from_certfile(cert) for cert in certs]
+ #print "layer0", repr(layer0)
+ tree = build_merkle_tree(layer0)
+ calculated_hash = tree[-1][0]
+ for zf in zipfiles.values():
+ zf.close()
+ assert len(layer0) == lastleaf - firstleaf + 1
+ return calculated_hash
+
+def bits(n):
+ p = 0
+ while n > 0:
+ n >>= 1
+ p += 1
+ return p
+
+def merkle_height(n):
+ if n == 0:
+ return 1
+ return bits(n - 1)
+
+def node_above((pathp, pathl), levels=1):
+ return (pathp >> levels, pathl + levels)
+
+def node_even((pathp, pathl)):
+ return pathp & 1 == 0
+
+def node_odd((pathp, pathl)):
+ return pathp & 1 == 1
+
+def node_lower((path1p, path1l), (path2p, path2l)):
+ return path1l < path2l
+
+def node_higher((path1p, path1l), (path2p, path2l)):
+ return path1l > path2l
+
+def node_level((path1p, path1l)):
+ return path1l
+
+def node_outside((path1p, path1l), (path2p, path2l)):
+ assert path1l == path2l
+ return path1p > path2p
+
+def combine_two_hashes((path1, hash1), (path2, hash2), treesize):
+ assert not node_higher(path1, path2)
+ edge_node = (treesize - 1, 0)
+
+ if node_lower(path1, path2):
+ assert path1 == node_above(edge_node, levels=node_level(path1))
+ while node_even(path1):
+ path1 = node_above(path1)
+
+ assert node_above(path1) == node_above(path2)
+ assert (node_even(path1) and node_odd(path2)) or (node_odd(path1) and node_even(path2))
+
+ if node_outside(path2, node_above(edge_node, levels=node_level(path2))):
+ return (node_above(path1), hash1)
+
+ if node_even(path1):
+ newhash = internal_hash((hash1, hash2))
+ else:
+ newhash = internal_hash((hash2, hash1))
+
+ return (node_above(path1), newhash)
+
+def path_as_string(pos, level, treesize):
+ height = merkle_height(treesize)
+ path = "{0:0{width}b}".format(pos, width=height - level)
+ if height == level:
+ return ""
+ return path
+
+def nodes_for_subtree(subtreesize, treesize):
+ height = merkle_height(treesize)
+ nodes = []
+ level = 0
+ pos = subtreesize
+ while pos > 0 and pos & 1 == 0:
+ pos >>= 1
+ level += 1
+ if pos & 1:
+ nodes.append((pos ^ 1, level))
+ #print pos, level
+ while level < height:
+ pos_level0 = pos * (2 ** level)
+ #print pos, level
+ if pos_level0 < treesize:
+ nodes.append((pos, level))
+ pos >>= 1
+ pos ^= 1
+ level += 1
+ return nodes
+
+def nodes_for_index(pos, treesize):
+ height = merkle_height(treesize)
+ nodes = []
+ level = 0
+ pos ^= 1
+ while level < height:
+ pos_level0 = pos * (2 ** level)
+ if pos_level0 < treesize:
+ nodes.append((pos, level))
+ pos >>= 1
+ pos ^= 1
+ level += 1
+ return nodes
+
+def verify_consistency_proof(consistency_proof, first, second, oldhash_input):
+ if 2 ** bits(first - 1) == first:
+ consistency_proof = [oldhash_input] + consistency_proof
+ chain = zip(nodes_for_subtree(first, second), consistency_proof)
+ assert len(nodes_for_subtree(first, second)) == len(consistency_proof)
+ (_, hash) = reduce(lambda e1, e2: combine_two_hashes(e1, e2, second), chain)
+ (_, oldhash) = reduce(lambda e1, e2: combine_two_hashes(e1, e2, first), chain)
+ return (oldhash, hash)
+
+def verify_inclusion_proof(inclusion_proof, index, treesize, leafhash):
+ chain = zip([(index, 0)] + nodes_for_index(index, treesize), [leafhash] + inclusion_proof)
+ assert len(nodes_for_index(index, treesize)) == len(inclusion_proof)
+ (_, hash) = reduce(lambda e1, e2: combine_two_hashes(e1, e2, treesize), chain)
+ return hash
+
+def extract_original_entry(entry):
+ leaf_input = base64.decodestring(entry["leaf_input"])
+ (leaf_cert, timestamp, issuer_key_hash) = unpack_mtl(leaf_input)
+ extra_data = base64.decodestring(entry["extra_data"])
+ if issuer_key_hash != None:
+ (precert, extra_data) = extract_precertificate(extra_data)
+ leaf_cert = precert
+ certchain = decode_certificate_chain(extra_data)
+ return ([leaf_cert] + certchain, timestamp, issuer_key_hash)
+
+def mv_file(fromfn, tofn):
+ shutil.move(fromfn, tofn)
+
+def write_file(fn, sth):
+ tempname = fn + ".new"
+ open(tempname, 'w').write(json.dumps(sth))
+ mv_file(tempname, fn)
diff --git a/monitor/josef_monitor.py b/monitor/josef_monitor.py
new file mode 100644
index 0000000..d84be8e
--- /dev/null
+++ b/monitor/josef_monitor.py
@@ -0,0 +1,492 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+
+import time
+import datetime
+import base64
+import argparse
+import errno
+from certtools import *
+
+NAGIOS_OK = 0
+NAGIOS_WARN = 1
+NAGIOS_CRIT = 2
+NAGIOS_UNKNOWN = 3
+
+DEFAULT_CUR_FILE = 'all-sth.json'
+DEFAULT_CERT_FILE = "plausible_cert_data.json"
+
+base_urls = [
+ "https://plausible.ct.nordu.net/",
+ # "https://ct1.digicert-ct.com/log/",
+ # "https://ct.izenpe.com/",
+ # "https://log.certly.io/",
+ # "https://ct.googleapis.com/aviator/",
+ # "https://ct.googleapis.com/pilot/",
+ # "https://ct.googleapis.com/rocketeer/",
+ # "https://ctlog.api.venafi.com/",
+ # "https://ct.ws.symantec.com/",
+]
+
+parser = argparse.ArgumentParser(description="")
+parser.add_argument('--audit', action='store_true', help="run lightweight auditor verifying consistency in STH")
+parser.add_argument('--monitor', action='store_true', help="run full monitoring for all logs")
+parser.add_argument('--audit2', action='store_true', help="run medium-weight auditor verifying consistency in STH and inclusion proofs of new entries")
+parser.add_argument('--audit3', action='store_true', help="continously run medium-weight auditor verifying consistency in STH and inclusion proofs of new entries")
+parser.add_argument('--audit4', action='store_true', help="run one check on one server")
+parser.add_argument('--build-sth', action='store_true', help="get all entries and construct STH")
+parser.add_argument('--verify-index', default=None, help="Verify a specific index in all logs" )
+parser.add_argument('--host', default=None, help="Base URL for CT log")
+parser.add_argument('--roots', action='store_true', help="Check accepted root certificates for all logs" )
+parser.add_argument('--cur-sth',
+ metavar='file',
+ default=DEFAULT_CUR_FILE,
+ help="File containing current STH (default=%s)" % DEFAULT_CUR_FILE)
+
+timings = {}
+errors = []
+
+monitored_domains = [
+ # "google.com",
+ # "preishelden.de",
+ # "liu.se",
+ # "nordu.net",
+ # "symantec.com",
+ # "sunet.se",
+ # ".se",
+]
+
+class UTC(datetime.tzinfo):
+ def utcoffset(self, dt):
+ return datetime.timedelta(hours=0)
+ def dst(self, dt):
+ return datetime.timedelta(0)
+
+def reduce_layer(layer):
+ new_layer = []
+ while len(layer) > 1:
+ e1 = layer.pop(0)
+ e2 = layer.pop(0)
+ new_layer.append(internal_hash((e1,e2)))
+ return new_layer
+
+def reduce_tree(entries, layers):
+ if len(entries) == 0 and layers is []:
+ return [[hashlib.sha256().digest()]]
+
+ layer_idx = 0
+ layers[layer_idx] += entries
+
+ while len(layers[layer_idx]) > 1:
+ if len(layers) == layer_idx + 1:
+ layers.append([])
+
+ layers[layer_idx + 1] += reduce_layer(layers[layer_idx])
+ layer_idx += 1
+ return layers
+
+def reduce_subtree_to_root(layers):
+ while len(layers) > 1:
+ if len(layers[1]) == 0:
+ layers[1] = layers[0]
+ else:
+ layers[1] += next_merkle_layer(layers[0])
+ del layers[0]
+
+ if len(layers[0]) > 1:
+ return next_merkle_layer(layers[0])
+ return layers[0]
+
+def fetch_all_sth():
+ sths = {}
+ for base_url in base_urls:
+ # Fetch STH
+ try:
+ sths[base_url] = get_sth(base_url)
+ except:
+ sths[base_url] = None
+ error_str = time.strftime('%H:%M:%S') + " ERROR: Failed to retrieve STH from " + base_url
+ print error_str
+ errors.append(error_str)
+ continue
+
+ # Check signature on the STH
+ try:
+ # check_sth_signature(base_url, sths[base_url], logkeys[base_url])
+ check_sth_signature(base_url, sths[base_url], None)
+ except:
+ error_str = time.strftime('%H:%M:%S') + " ERROR: Could not verify signature from " + base_url
+ print error_str
+ errors.append(error_str)
+ continue
+
+ return sths
+
+def verify_progress(old, new):
+ print "Verifying progress"
+ try:
+ for url in new:
+ if new and old and new[url] and old[url]:
+ if new[url]["tree_size"] == old[url]["tree_size"]:
+ if old[url]["sha256_root_hash"] != new[url]["sha256_root_hash"]:
+ errors.append(time.strftime('%H:%M:%S') + " CRITICAL: root hash is different for same tree size in " + url)
+ elif new[url]["tree_size"] < old[url]["tree_size"]:
+ errors.append(time.strftime('%H:%M:%S') + " CRITICAL: new tree smaller than previous tree (%d < %d)" % \
+ (new[url]["tree_size"], old[url]["tree_size"]))
+ if new[url]:
+ age = time.time() - new[url]["timestamp"]/1000
+ sth_time = datetime.datetime.fromtimestamp(new[url]['timestamp'] / 1000, UTC()).strftime("%Y-%m-%d %H:%M:%S")
+ roothash = new[url]['sha256_root_hash']
+ if age > 24 * 3600:
+ errors.append(time.strftime('%H:%M:%S') + " CRITICAL: %s is older than 24h: %s UTC" % (url, sth_time))
+ elif age > 12 * 3600:
+ errors.append(time.strftime('%H:%M:%S') + " WARNING: %s is older than 12h: %s UTC" % (url, sth_time))
+ elif age > 6 * 3600:
+ errors.append(time.strftime('%H:%M:%S') + " WARNING: %s is older than 6h: %s UTC" % (url, sth_time))
+ # elif age > 2 * 3600:
+ # errors.append(time.strftime('%H:%M:%S') + " WARNING: %s is older than 2h: %s UTC" % (url, sth_time))
+ except:
+ print time.strftime('%H:%M:%S') + " ERROR: Failed to verify progress for " + url
+
+
+def verify_consistency(old, new):
+ for url in old:
+ try:
+ if old[url] and new[url] and old[url]["tree_size"]!= new[url]["tree_size"]:
+ consistency_proof = get_consistency_proof(url, old[url]["tree_size"], new[url]["tree_size"])
+ decoded_consistency_proof = []
+ for item in consistency_proof:
+ decoded_consistency_proof.append(base64.b64decode(item))
+ res = verify_consistency_proof(decoded_consistency_proof, old[url]["tree_size"], new[url]["tree_size"], old[url]["sha256_root_hash"])
+
+ if old[url]["sha256_root_hash"] != str(base64.b64encode(res[0])):
+ print time.strftime('%H:%M:%S') + " Verification of old hash failed! " + old[url]["sha256_root_hash"], str(base64.b64encode(res[0]))
+ errors.append(time.strftime('%H:%M:%S') + " ERROR: Failed to verify consistency for " + url + ", tree size " + old[url]["tree_size"])
+ elif new[url]["sha256_root_hash"] != str(base64.b64encode(res[1])):
+ print time.strftime('%H:%M:%S') + " Verification of new hash failed! " + new[url]["sha256_root_hash"], str(base64.b64encode(res[1]))
+ errors.append(time.strftime('%H:%M:%S') + " ERROR: Failed to verify consistency for " + url + ", tree size " + new[url]["tree_size"])
+ else:
+ print time.strftime("%H:%M:%S") + " New STH from " + url + ", timestamp: " + \
+ str(new[url]["timestamp"]) + ", size: " + str(new[url]["tree_size"]) + "...OK."
+
+ except:
+ print "ERROR: Could not verify consistency for " + url
+
+def verify_inclusion_all(old, new):
+ for url in old:
+ try:
+ if old[url] and new[url]:
+ if old[url]["tree_size"]!= new[url]["tree_size"]:
+ entries = []
+
+ while len(entries) + old[url]["tree_size"]!= new[url]["tree_size"]:
+ entries += get_entries(url, str(int(old[url]["tree_size"]) + len(entries)), new[url]["tree_size"] -1)["entries"]
+ print "Got " + str(len(entries)) + " entries..."
+
+ success = True
+ for i in entries:
+ h = get_leaf_hash(base64.b64decode(i["leaf_input"]))
+ if not verify_inclusion_by_hash(url, h):
+ success = False
+
+ if success:
+ print time.strftime("%H:%M:%S") + " Verifying inclusion for " + str(len(entries)) + " new entries in " + url + " ...OK"
+ else:
+ print time.strftime('%H:%M:%S') + " ERROR: Failed to prove inclusion of all new entries in " + url
+ errors.append(time.strftime('%H:%M:%S') + " ERROR: Failed to prove inclusion of all new entries in " + url)
+ except:
+ print time.strftime('%H:%M:%S') + " ERROR: Failed to prove inclusion of all new entries in " + url
+ errors.append(time.strftime('%H:%M:%S') + " ERROR: Failed to prove inclusion of all new entries in " + url)
+
+def check_domain(raw_entry, log=None):
+ orig_entry = extract_original_entry(raw_entry)
+ cert_info = my_get_cert_info(orig_entry[0][0])
+ if log:
+ cert_info["log"] = log[8:-1] # strip generic URL stuff
+ return cert_info
+
+
+def fetch_and_increment_subtree(old_sth, new_sth_in, subtree, base_url):
+ try:
+ print "Initial hash:", hash(str(subtree))
+ sth = old_sth[base_url]
+ new_sth = new_sth_in[base_url]
+ idx = sth["tree_size"]
+ tmp_tree = list(subtree)
+ print "tmp hash:", hash(str(tmp_tree))
+
+ while idx < new_sth["tree_size"]:
+ pre_size = idx
+ entries = get_entries(base_url, idx, new_sth["tree_size"]-1)["entries"]
+ new_leafs = []
+ tmp_cert_data = []
+ for item in entries:
+ tmp_cert_data.append(check_domain(item, base_url))
+ new_leafs.append(get_leaf_hash(base64.b64decode(item["leaf_input"])))
+ append_file(DEFAULT_CERT_FILE, tmp_cert_data)
+ idx += len(new_leafs)
+ print time.strftime('%H:%M:%S') + " Got entries " + str(pre_size) + " to " \
+ + str(idx -1) + " (" + str(len(new_leafs)) +" entries) from " + base_url
+
+ print "Before reduction:", hash(str(tmp_tree))
+ res_tree = reduce_tree(new_leafs, tmp_tree)
+ print "After reduction:", hash(str(res_tree))
+
+ except:
+ print "Failed to build subtree :("
+
+ if subtree == res_tree:
+ print "Final subtree hash", hash(str(subtree))
+ print "Final restree hash", hash(str(res_tree))
+ return res_tree
+
+def fetch_and_build_subtree(old_sth, base_url):
+ try:
+ sth = old_sth[base_url]
+ subtree = [[]]
+ idx = 0
+
+ while idx < sth["tree_size"]:
+ pre_size = idx
+ entries = get_entries(base_url, idx, sth["tree_size"]-1)["entries"]
+ new_leafs = []
+ tmp_cert_data = []
+ for item in entries:
+ tmp_cert_data.append(check_domain(item, base_url))
+ new_leafs.append(get_leaf_hash(base64.b64decode(item["leaf_input"])))
+ idx += len(new_leafs)
+ append_file(DEFAULT_CERT_FILE, tmp_cert_data)
+ print time.strftime('%H:%M:%S') + " Got entries " + str(pre_size) + " to " + str(idx -1) + " from " + base_url
+ subtree = reduce_tree(new_leafs, subtree)
+
+ except:
+ print "Failed to build subtree :("
+
+ return subtree
+
+
+def verify_subtree(old_sth, subtree, base_url):
+ try:
+ sth = old_sth[base_url]
+ tmp = list(subtree)
+ root = base64.b64encode(reduce_subtree_to_root(tmp)[0])
+
+ if root == sth["sha256_root_hash"]:
+ print time.strftime('%H:%M:%S') + " Verifying root hashes for " + base_url + "...OK."
+ else:
+ print time.strftime('%H:%M:%S') + " ERROR: Failed to verify root hashes! STH root: " \
+ + sth["sha256_root_hash"] + ", Tree root: " + root
+ # errors.append(time.strftime('%H:%M:%S') + " ERROR: Failed to verify root hash for "
+ # + base_url + ", tre size " + sth["tree_size"])
+ except:
+ print time.strftime('%H:%M:%S') + " ERROR: Failed to build STH for " + base_url
+ # errors.append(time.strftime('%H:%M:%S') + " ERROR: Failed to build STH for " + base_url)
+
+def verify_inclusion_by_hash(base_url, leaf_hash):
+ try:
+ tmp_sth = get_sth(base_url)
+ proof = get_proof_by_hash(base_url, leaf_hash, tmp_sth["tree_size"])
+
+ decoded_inclusion_proof = []
+ for item in proof["audit_path"]:
+ decoded_inclusion_proof.append(base64.b64decode(item))
+
+ root = base64.b64encode(verify_inclusion_proof(decoded_inclusion_proof, proof["leaf_index"], tmp_sth["tree_size"], leaf_hash))
+
+ if tmp_sth["sha256_root_hash"] == root:
+ return True
+ else:
+ print time.strftime('%H:%M:%S') + " ERROR: Could not prove inclusion for entry " + str(proof["leaf_index"]) + " in " + base_url
+ errors.append(time.strftime('%H:%M:%S') + " ERROR: Could not prove inclusion for entry " + str(proof["leaf_index"]) + " in " + base_url)
+ return False
+ except:
+ print time.strftime('%H:%M:%S') + " ERROR: Could not prove inclusion for hashed entry in " + base_url
+ errors.append(time.strftime('%H:%M:%S') + " ERROR: Could not prove inclusion for hashed entry in " + base_url)
+ return False
+
+def verify_inclusion_by_index(base_url, index):
+ try:
+ tmp_sth = get_sth(base_url)
+ proof = get_proof_by_index(base_url, index, tmp_sth["tree_size"])
+
+ decoded_inclusion_proof = []
+ for item in proof["audit_path"]:
+ decoded_inclusion_proof.append(base64.b64decode(item))
+
+ root = base64.b64encode(verify_inclusion_proof(decoded_inclusion_proof, index, tmp_sth["tree_size"], get_leaf_hash(base64.b64decode(proof["leaf_input"]))))
+
+ if tmp_sth["sha256_root_hash"] == root:
+ print time.strftime('%H:%M:%S') + " Verifying inclusion for entry " + str(index) + " in " + base_url + "...OK."
+ else:
+ print time.strftime('%H:%M:%S') + " ERROR: Could not prove inclusion for entry " + str(index) + " in " + base_url
+ errors.append(time.strftime('%H:%M:%S') + " ERROR: Could not prove inclusion for entry " + str(index) + " in " + base_url)
+ except:
+ print time.strftime('%H:%M:%S') + " ERROR: Could not prove inclusion for entry " + str(index) + " in " + base_url
+ errors.append(time.strftime('%H:%M:%S') + " ERROR: Could not prove inclusion for entry " + str(index) + " in " + base_url)
+
+def get_proof_by_index(baseurl, index, tree_size):
+ try:
+ params = urllib.urlencode({"leaf_index":index,
+ "tree_size":tree_size})
+ result = \
+ urlopen(baseurl + "ct/v1/get-entry-and-proof?" + params).read()
+ return json.loads(result)
+ except urllib2.HTTPError, e:
+ print "ERROR:", e.read()
+ sys.exit(0)
+
+def get_all_roots(base_url):
+ result = urlopen(base_url + "ct/v1/get-roots").read()
+ certs = json.loads(result)["certificates"]
+ print time.strftime('%H:%M:%S') + " Received " + str(len(certs)) + " certs from " + base_url
+
+ for accepted_cert in certs:
+ subject = get_cert_info(base64.decodestring(accepted_cert))["subject"]
+ issuer = get_cert_info(base64.decodestring(accepted_cert))["issuer"]
+ if subject == issuer:
+ root_cert = base64.decodestring(accepted_cert)
+ print get_cert_info(root_cert)["subject"]
+
+def print_errors(errors):
+ print "Encountered " + str(len(errors)) + " errors:"
+ for item in errors:
+ print item
+
+def print_timings(timings):
+ for item in timings:
+ m,s = divmod(timings[item]["longest"], 60)
+ h,m = divmod(m, 60)
+ print item + " last seen " + datetime.datetime.fromtimestamp(int(timings[item]["last"])/1000).strftime('%Y-%m-%d %H:%M:%S') \
+ + " longest between two STH: " + str(int(h)) + "h " + str(int(m)) + "m "# + str(int(s)) + "s."
+
+def read_sth(fn):
+ try:
+ f = open(fn)
+ except IOError, e:
+ if e.errno == errno.ENOENT:
+ return None
+ raise e
+ return json.loads(f.read())
+
+def write_file(fn, content):
+ tempname = fn + ".new"
+ open(tempname, 'w').write(json.dumps(content))
+ mv_file(tempname, fn)
+ # print "wrote " + fn
+
+def append_file(fn, content):
+ with open(fn, 'a') as f:
+ for item in content:
+ try:
+ f.write(json.dumps(item) + "\n")
+ except:
+ # print "failed to write " + str(item)
+ pass
+
+def main(args):
+
+ # print time.strftime("%H:%M:%S") + " Starting..."
+ if args.verify_index is None and not args.build_sth and not args.audit and not args.audit2 \
+ and not args.audit3 and not args.audit4 and not args.roots and not args.monitor:
+
+ print time.strftime('%H:%M:%S') + " Nothing to do."
+ return
+ elif args.audit4:
+ pass
+ else:
+ sth = fetch_all_sth()
+
+ if args.verify_index is not None:
+ for url in base_urls:
+ verify_inclusion_by_index(url, int(args.verify_index))
+
+ if args.roots:
+ print time.strftime('%H:%M:%S') + " Getting accepted Root Certs from all logs..."
+ for url in base_urls:
+ get_all_roots(url)
+
+
+ if args.build_sth:
+ print time.strftime('%H:%M:%S') + " Building trees from entries. This may take a while, go get coffee or something..."
+ for base_url in base_urls:
+ subtree = fetch_and_build_subtree(sth, base_url)
+ verify_subtree(sth, subtree, base_url)
+ # fetch_and_build_tree(sth, base_urls[2])
+
+ if args.audit:
+ print time.strftime('%H:%M:%S') + " Running auditor1 for " +str(len(base_urls)) + " logs..."
+ old_sth = read_sth(args.cur_sth)
+ if old_sth:
+ verify_consistency(old_sth, sth)
+ else:
+ print "No old sth found..."
+ write_file(args.cur_sth, sth)
+
+
+ if args.audit3:
+ print time.strftime('%H:%M:%S') + " Running auditor3 for " +str(len(base_urls)) + " logs..."
+ while True:
+ time.sleep(30)
+ new_sth = fetch_all_sth()
+ verify_consistency(sth, new_sth)
+ verify_inclusion_all(sth, new_sth)
+ sth = new_sth
+
+ if args.audit2:
+ print time.strftime('%H:%M:%S') + " Running auditor2 for " +str(len(base_urls)) + " logs..."
+ old_sth = read_sth(args.cur_sth)
+ # print "Verifying progress..."
+ verify_progress(old_sth, sth)
+ if old_sth:
+ print "Verifying consistency..."
+ verify_consistency(old_sth, sth)
+ print "Verifying inclusion..."
+ verify_inclusion_all(old_sth, sth)
+ write_file(args.cur_sth, sth)
+
+ if args.monitor:
+ all_subtrees = {}
+ print time.strftime('%H:%M:%S') + " Building trees from entries. This may take a while, go get coffee or something..."
+ for url in base_urls:
+ all_subtrees[url] = fetch_and_build_subtree(sth, url)
+ verify_subtree(sth, all_subtrees[url], url)
+
+ while True:
+ time.sleep(30)
+ new_sth = fetch_all_sth()
+ for url in base_urls:
+ if url in sth and url in new_sth and sth[url]["tree_size"] != new_sth[url]["tree_size"]:
+ # print "Len before:", len(all_subtrees[url])
+ all_subtrees[url] = fetch_and_increment_subtree(sth, new_sth, all_subtrees[url], url)
+ # print "Len between:", len(all_subtrees[url])
+ verify_subtree(new_sth, all_subtrees[url], url)
+ # print "Len after:", len(all_subtrees[url])
+ print "Old sth:", sth[url]["sha256_root_hash"]
+ print "New sth:", new_sth[url]["sha256_root_hash"]
+ sth = new_sth
+
+
+if __name__ == '__main__':
+ # try:
+ main(parser.parse_args())
+ if len(errors) == 0:
+ print time.strftime('%H:%M:%S') + " Everything OK."
+ sys.exit(NAGIOS_OK)
+ else:
+ # print "errors found!"
+ print_errors(errors)
+ sys.exit(NAGIOS_WARN)
+ # except:
+ # pass
+ # finally:
+ # # print_timings(timings)
+ # print_errors(errors)
+
+
+
+
+
+
+
+
+
diff --git a/monitor/josef_reader.py b/monitor/josef_reader.py
new file mode 100755
index 0000000..bf415f7
--- /dev/null
+++ b/monitor/josef_reader.py
@@ -0,0 +1,60 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+
+import sys
+from certtools import *
+import argparse
+
+
+
+parser = argparse.ArgumentParser(description="")
+parser.add_argument('--domain', default=None, help="RTFM")
+parser.add_argument('--exclude-expired', action='store_true', help="RTFM")
+
+args = parser.parse_args()
+
+monitored_domains = [
+ "google.com",
+ "preishelden.de",
+ "liu.se",
+ "nordu.net",
+ "symantec.com",
+]
+
+
+
+# data = []
+f = open("plausible_cert_data.json")
+for line in f:
+ tmp = json.loads(line)
+ try:
+ success = True
+
+ if args.domain:
+ if args.domain in tmp["subject"].split("CN=")[1] or \
+ args.domain in tmp["SAN"]:
+ pass
+ else:
+ success = False
+
+ if args.exclude_expired:
+ print "EXCLUDE EXPIRED NOT IMPLEMENTED YET"
+
+
+ if success:
+ print tmp["subject"].split("CN=")[1] + " certified by " + tmp["issuer"].split("CN=")[1]
+ except:
+ pass
+
+f.close()
+
+# for item in data[10000:]:
+# try:
+# s = item["subject"].split("CN=")[1]
+# print "\n" + s
+# print item["SAN"]
+# except:
+# pass
+
+# print "\nTotal entries: " + str(len(data))
+
diff --git a/monitor/monitor.cfg b/monitor/monitor.cfg
new file mode 100644
index 0000000..1d17802
--- /dev/null
+++ b/monitor/monitor.cfg
@@ -0,0 +1,7 @@
+# CONFIG FILE FOR MONITOR
+
+# LOGS
+
+# CHECKS
+
+# OUTPUT
diff --git a/tools/certkeys.py b/tools/certkeys.py
index dd0570f..f7c83b1 100644
--- a/tools/certkeys.py
+++ b/tools/certkeys.py
@@ -4,10 +4,6 @@ publickeys = {
"MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEfahLEimAoz2t01p3uMziiLOl/fHTD"
"M0YDOhBRuiBARsV4UvxG2LdNgoIGLrtCzWE0J5APC2em4JlvR8EEEFMoA==",
- "https://flimsy.ct.nordu.net/":
- "MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAE4qWq6afhBUi0OdcWUYhyJLNXTkGqQ9"
- "PMS5lqoCgkV2h1ZvpNjBH2u8UbgcOQwqDo66z6BWQJGolozZYmNHE2kQ==",
-
"https://plausible.ct.nordu.net/":
"MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAE9UV9+jO2MCTzkabodO2F7LM03MUB"
"c8MrdAtkcW6v6GA9taTTw9QJqofm0BbdAsbtJL/unyEf0zIkRgXjjzaYqQ==",
diff --git a/tools/josef_experimental.py b/tools/josef_experimental.py
index 383e385..7f27945 100755
--- a/tools/josef_experimental.py
+++ b/tools/josef_experimental.py
@@ -136,26 +136,30 @@ monitored_domains = [
"symantec.com",
]
-# data = read_sth("plausible_cert_data.json")
-# ss = []
-# for item in data:
-# try:
-# s = item["subject"].split("CN=")[1]
-# print s
-# except:
-# # if not item["subject"] in ss:
-# # print item["subject"]
-# # ss.append(item["subject"])
-# pass
-
-# print "\nTotal entries: " + str(len(data))
-
-base_url = base_urls[0]
-
-entries = get_entries(base_url, 11, 11)["entries"]
-for item in entries:
- orig_entry = extract_original_entry(item)
- cert_info = my_get_cert_info(orig_entry[0][0])
+data = []
+# data = read_sth("cert_data.json")
+f = open("cert_data.json")
+for line in f:
+ data.append(json.loads(line))
+ss = []
+for item in data:
+ try:
+ s = item["subject"].split("CN=")[1]
+ print s
+ except:
+ # if not item["subject"] in ss:
+ # print item["subject"]
+ # ss.append(item["subject"])
+ pass
+
+print "\nTotal entries: " + str(len(data))
+
+# base_url = base_urls[0]
+
+# entries = get_entries(base_url, 11, 11)["entries"]
+# for item in entries:
+# orig_entry = extract_original_entry(item)
+# cert_info = my_get_cert_info(orig_entry[0][0])
# prev = ""
# res = {}
# for line in cert_info:
@@ -172,4 +176,4 @@ for item in entries:
# res["not_after"] = line.split(": ")[1]
# prev = line
- print cert_info
+ # print cert_info
diff --git a/tools/josef_experimental_auditor.py b/tools/josef_experimental_auditor.py
index 78a3fe2..7efd2dc 100755
--- a/tools/josef_experimental_auditor.py
+++ b/tools/josef_experimental_auditor.py
@@ -14,16 +14,17 @@ NAGIOS_CRIT = 2
NAGIOS_UNKNOWN = 3
DEFAULT_CUR_FILE = 'all-sth.json'
+DEFAULT_CERT_FILE = "cert_data.json"
base_urls = [
# "https://plausible.ct.nordu.net/",
# "https://ct1.digicert-ct.com/log/",
- # "https://ct.izenpe.com/",
+ "https://ct.izenpe.com/",
# "https://log.certly.io/",
# "https://ct.googleapis.com/aviator/",
# "https://ct.googleapis.com/pilot/",
# "https://ct.googleapis.com/rocketeer/",
- # "https://ctlog.api.venafi.com/",
+ "https://ctlog.api.venafi.com/",
"https://ct.ws.symantec.com/",
]
@@ -55,14 +56,6 @@ monitored_domains = [
# ".se",
]
-# cert_data = []
-
-# class cert(subject, issuer, log):
-# def __init__(self):
-# self.subject = subject
-# self.issuer = issuer
- # self.logs = [log]
-
class UTC(datetime.tzinfo):
def utcoffset(self, dt):
return datetime.timedelta(hours=0)
@@ -215,9 +208,12 @@ def check_domain(raw_entry, log=None):
def fetch_and_increment_subtree(old_sth, new_sth_in, subtree, base_url):
try:
+ print "Initial hash:", hash(str(subtree))
sth = old_sth[base_url]
new_sth = new_sth_in[base_url]
idx = sth["tree_size"]
+ tmp_tree = list(subtree)
+ print "tmp hash:", hash(str(tmp_tree))
while idx < new_sth["tree_size"]:
pre_size = idx
@@ -227,16 +223,22 @@ def fetch_and_increment_subtree(old_sth, new_sth_in, subtree, base_url):
for item in entries:
tmp_cert_data.append(check_domain(item, base_url))
new_leafs.append(get_leaf_hash(base64.b64decode(item["leaf_input"])))
- append_file("cert_data.json", tmp_cert_data)
+ append_file(DEFAULT_CERT_FILE, tmp_cert_data)
idx += len(new_leafs)
print time.strftime('%H:%M:%S') + " Got entries " + str(pre_size) + " to " \
+ str(idx -1) + " (" + str(len(new_leafs)) +" entries) from " + base_url
- subtree = reduce_tree(new_leafs, subtree)
+
+ print "Before reduction:", hash(str(tmp_tree))
+ res_tree = reduce_tree(new_leafs, tmp_tree)
+ print "After reduction:", hash(str(res_tree))
except:
print "Failed to build subtree :("
- return subtree
+ if subtree == res_tree:
+ print "Final subtree hash", hash(str(subtree))
+ print "Final restree hash", hash(str(res_tree))
+ return res_tree
def fetch_and_build_subtree(old_sth, base_url):
try:
@@ -253,8 +255,8 @@ def fetch_and_build_subtree(old_sth, base_url):
tmp_cert_data.append(check_domain(item, base_url))
new_leafs.append(get_leaf_hash(base64.b64decode(item["leaf_input"])))
idx += len(new_leafs)
- append_file("cert_data.json", tmp_cert_data)
- print time.strftime('%H:%M:%S') + " Got entries " + str(pre_size) + " to " + str(idx) + " from " + base_url
+ append_file(DEFAULT_CERT_FILE, tmp_cert_data)
+ print time.strftime('%H:%M:%S') + " Got entries " + str(pre_size) + " to " + str(idx -1) + " from " + base_url
subtree = reduce_tree(new_leafs, subtree)
except:
@@ -376,7 +378,7 @@ def append_file(fn, content):
with open(fn, 'a') as f:
for item in content:
try:
- f.write(json.dumps(item))
+ f.write(json.dumps(item) + "\n")
except:
# print "failed to write " + str(item)
pass
@@ -454,8 +456,13 @@ def main(args):
new_sth = fetch_all_sth()
for url in base_urls:
if url in sth and url in new_sth and sth[url]["tree_size"] != new_sth[url]["tree_size"]:
+ # print "Len before:", len(all_subtrees[url])
all_subtrees[url] = fetch_and_increment_subtree(sth, new_sth, all_subtrees[url], url)
+ # print "Len between:", len(all_subtrees[url])
verify_subtree(new_sth, all_subtrees[url], url)
+ # print "Len after:", len(all_subtrees[url])
+ print "Old sth:", sth[url]["sha256_root_hash"]
+ print "New sth:", new_sth[url]["sha256_root_hash"]
sth = new_sth