#!/usr/bin/python # -*- coding: utf-8 -*- import time import datetime import base64 import argparse import errno from copy import deepcopy from josef_lib import * from logs import ctlogs import os.path # NAGIOS_OK = 0 # NAGIOS_WARN = 1 # NAGIOS_CRIT = 2 # NAGIOS_UNKNOWN = 3 INTERVAL = 30 # interval (in seconds) between updates OUTPUT_DIR = "output/" DEFAULT_CERT_FILE = OUTPUT_DIR + "cert_data.json" parser = argparse.ArgumentParser(description="") # TODO implement silent mode # parser.add_argument('--silent', action='store_true', help="Dont output to stdout. logging only") # monitored_domains = [ # # "google.com", # # "preishelden.de", # # "liu.se", # # "nordu.net", # # "symantec.com", # # "sunet.se", # # ".se", # ] class ctlog: def __init__(self, name, url, key): self.name = name self.url = url self.key = key self.logfile = OUTPUT_DIR + name + ".log" self.savefile = OUTPUT_DIR + name + "-state-info.json" self.subtree = [[]] self.sth = None self.entries = 0 self.log("Starting monitor") def build(self): self.sth = get_sth(self.url) tmp_subtree = fetch_and_build_subtree(self.sth, self.url) if verify_subtree(self.sth, self.subtree, self.url): self.log("Successfully build tree from entries.") self.subtree = tmp_subtree self.entries = self.sth["tree_size"] else: self.log("ERROR Failed to build tree from entries.") def incremental_build(self): # Keeps state current during build, partial builds are possible. self.sth = get_sth(self.url) while self.entries < self.sth["tree_size"]: self.subtree, self.entries = fetch_and_increment_subtree(self.entries, self.sth["tree_size"] -1, self.url, self.subtree) if verify_subtree(self.sth, self.subtree, self.url): self.log("Successfully build tree from entries.") # self.subtree = tmp_subtree # self.entries = self.sth["tree_size"] else: self.log("ERROR Failed to build tree from entries.") def to_dict(self): d = {} # d["name"] = self.name # d["url"] = self.url d["entries"] = self.entries d["subtree"] = encode_tree(self.subtree) d["sth"] = self.sth return d def save(self): self.log("Saving state to file") open(self.savefile, 'w').write(json.dumps(self.to_dict())) def load(self): self.log("Loading state from file") try: f = open(self.savefile) s = f.read() d = json.loads(s) self.subtree = decode_tree(d["subtree"]) self.sth = d["sth"] self.entries = d["entries"] except IOError, e: if e.errno == errno.ENOENT: return None raise e def log(self, string): s = time.strftime('%H:%M:%S') + " " + string with open(self.logfile, 'a') as f: f.write(s + "\n") f.close() def update_sth(self): new_sth = get_sth(self.url) sth_time = datetime.datetime.fromtimestamp(new_sth['timestamp'] / 1000, UTC()).strftime("%Y-%m-%d %H:%M:%S") if new_sth["timestamp"] != self.sth["timestamp"]: self.log("STH updated. Size: " + str(new_sth["tree_size"]) + ", Time: " + sth_time) self.sth = new_sth # def fetch_all_sth(): # sths = {} # for base_url in base_urls: # # Fetch STH # try: # sths[base_url] = get_sth(base_url) # except: # sths[base_url] = None # error_str = time.strftime('%H:%M:%S') + " ERROR: Failed to retrieve STH from " + base_url # print error_str # errors.append(error_str) # continue # # Check signature on the STH # try: # # check_sth_signature(base_url, sths[base_url], logkeys[base_url]) # check_sth_signature(base_url, sths[base_url], None) # except: # error_str = time.strftime('%H:%M:%S') + " ERROR: Could not verify signature from " + base_url # print error_str # errors.append(error_str) # continue # return sths def verify_progress(old, new): print "Verifying progress" try: for url in new: if new and old and new[url] and old[url]: if new[url]["tree_size"] == old[url]["tree_size"]: if old[url]["sha256_root_hash"] != new[url]["sha256_root_hash"]: errors.append(time.strftime('%H:%M:%S') + " CRITICAL: root hash is different for same tree size in " + url) elif new[url]["tree_size"] < old[url]["tree_size"]: errors.append(time.strftime('%H:%M:%S') + " CRITICAL: new tree smaller than previous tree (%d < %d)" % \ (new[url]["tree_size"], old[url]["tree_size"])) if new[url]: age = time.time() - new[url]["timestamp"]/1000 sth_time = datetime.datetime.fromtimestamp(new[url]['timestamp'] / 1000, UTC()).strftime("%Y-%m-%d %H:%M:%S") roothash = new[url]['sha256_root_hash'] if age > 24 * 3600: errors.append(time.strftime('%H:%M:%S') + " CRITICAL: %s is older than 24h: %s UTC" % (url, sth_time)) elif age > 12 * 3600: errors.append(time.strftime('%H:%M:%S') + " WARNING: %s is older than 12h: %s UTC" % (url, sth_time)) elif age > 6 * 3600: errors.append(time.strftime('%H:%M:%S') + " WARNING: %s is older than 6h: %s UTC" % (url, sth_time)) # elif age > 2 * 3600: # errors.append(time.strftime('%H:%M:%S') + " WARNING: %s is older than 2h: %s UTC" % (url, sth_time)) except: print time.strftime('%H:%M:%S') + " ERROR: Failed to verify progress for " + url def verify_consistency(old, new): for url in old: try: if old[url] and new[url] and old[url]["tree_size"]!= new[url]["tree_size"]: consistency_proof = get_consistency_proof(url, old[url]["tree_size"], new[url]["tree_size"]) decoded_consistency_proof = [] for item in consistency_proof: decoded_consistency_proof.append(base64.b64decode(item)) res = verify_consistency_proof(decoded_consistency_proof, old[url]["tree_size"], new[url]["tree_size"], old[url]["sha256_root_hash"]) if old[url]["sha256_root_hash"] != str(base64.b64encode(res[0])): print time.strftime('%H:%M:%S') + " Verification of old hash failed! " + old[url]["sha256_root_hash"], str(base64.b64encode(res[0])) errors.append(time.strftime('%H:%M:%S') + " ERROR: Failed to verify consistency for " + url + ", tree size " + old[url]["tree_size"]) elif new[url]["sha256_root_hash"] != str(base64.b64encode(res[1])): print time.strftime('%H:%M:%S') + " Verification of new hash failed! " + new[url]["sha256_root_hash"], str(base64.b64encode(res[1])) errors.append(time.strftime('%H:%M:%S') + " ERROR: Failed to verify consistency for " + url + ", tree size " + new[url]["tree_size"]) else: print time.strftime("%H:%M:%S") + " New STH from " + url + ", timestamp: " + \ str(new[url]["timestamp"]) + ", size: " + str(new[url]["tree_size"]) + "...OK." except: print "ERROR: Could not verify consistency for " + url def verify_inclusion_all(old, new): for url in old: try: if old[url] and new[url]: if old[url]["tree_size"]!= new[url]["tree_size"]: entries = [] while len(entries) + old[url]["tree_size"]!= new[url]["tree_size"]: entries += get_entries(url, str(int(old[url]["tree_size"]) + len(entries)), new[url]["tree_size"] -1)["entries"] print "Got " + str(len(entries)) + " entries..." success = True for i in entries: h = get_leaf_hash(base64.b64decode(i["leaf_input"])) if not verify_inclusion_by_hash(url, h): success = False if success: print time.strftime("%H:%M:%S") + " Verifying inclusion for " + str(len(entries)) + " new entries in " + url + " ...OK" else: print time.strftime('%H:%M:%S') + " ERROR: Failed to prove inclusion of all new entries in " + url errors.append(time.strftime('%H:%M:%S') + " ERROR: Failed to prove inclusion of all new entries in " + url) except: print time.strftime('%H:%M:%S') + " ERROR: Failed to prove inclusion of all new entries in " + url errors.append(time.strftime('%H:%M:%S') + " ERROR: Failed to prove inclusion of all new entries in " + url) def check_domain(raw_entry, log=None): orig_entry = extract_original_entry(raw_entry) cert_info = my_get_cert_info(orig_entry[0][0]) if log: cert_info["log"] = log[8:-1] # strip generic URL stuff return cert_info def fetch_and_increment_subtree(first, last, url, subtree =[[]]): # try: new_leafs = [] if first <= last: entries = get_entries(url, first, last)["entries"] tmp_cert_data = [] for item in entries: tmp_cert_data.append(check_domain(item, url)) new_leafs.append(get_leaf_hash(base64.b64decode(item["leaf_input"]))) append_file(DEFAULT_CERT_FILE, tmp_cert_data) print time.strftime('%H:%M:%S') + " Got entries " + str(first) + " to " \ + str(first + len(new_leafs)) + " (" + str(len(new_leafs)) +" entries) from " + url subtree = reduce_tree(new_leafs, subtree) # except: # print "Failed to build subtree :(" return subtree, len(new_leafs) + first def fetch_and_build_subtree(sth, base_url): try: subtree = [[]] idx = 0 while idx < sth["tree_size"]: pre_size = idx entries = get_entries(base_url, idx, sth["tree_size"]-1)["entries"] new_leafs = [] tmp_cert_data = [] for item in entries: tmp_cert_data.append(check_domain(item, base_url)) new_leafs.append(get_leaf_hash(base64.b64decode(item["leaf_input"]))) idx += len(new_leafs) append_file(DEFAULT_CERT_FILE, tmp_cert_data) print time.strftime('%H:%M:%S') + " Got entries " + str(pre_size) + " to " + str(idx -1) + " from " + base_url subtree = reduce_tree(new_leafs, subtree) except: print "Failed to build subtree :(" return subtree def verify_subtree(sth, subtree, base_url): try: tmp = deepcopy(subtree) root = base64.b64encode(reduce_subtree_to_root(tmp)[0]) if root == sth["sha256_root_hash"]: print time.strftime('%H:%M:%S') + " Verifying root hashes for " + base_url + "...OK." return True else: print time.strftime('%H:%M:%S') + " ERROR: Failed to verify root hashes! STH root: " \ + sth["sha256_root_hash"] + ", Tree root: " + root return False except: print time.strftime('%H:%M:%S') + " ERROR: Failed to build STH for " + base_url return False def verify_inclusion_by_hash(base_url, leaf_hash): try: tmp_sth = get_sth(base_url) proof = get_proof_by_hash(base_url, leaf_hash, tmp_sth["tree_size"]) decoded_inclusion_proof = [] for item in proof["audit_path"]: decoded_inclusion_proof.append(base64.b64decode(item)) root = base64.b64encode(verify_inclusion_proof(decoded_inclusion_proof, proof["leaf_index"], tmp_sth["tree_size"], leaf_hash)) if tmp_sth["sha256_root_hash"] == root: return True else: print time.strftime('%H:%M:%S') + " ERROR: Could not prove inclusion for entry " + str(proof["leaf_index"]) + " in " + base_url errors.append(time.strftime('%H:%M:%S') + " ERROR: Could not prove inclusion for entry " + str(proof["leaf_index"]) + " in " + base_url) return False except: print time.strftime('%H:%M:%S') + " ERROR: Could not prove inclusion for hashed entry in " + base_url errors.append(time.strftime('%H:%M:%S') + " ERROR: Could not prove inclusion for hashed entry in " + base_url) return False def verify_inclusion_by_index(base_url, index): try: tmp_sth = get_sth(base_url) proof = get_proof_by_index(base_url, index, tmp_sth["tree_size"]) decoded_inclusion_proof = [] for item in proof["audit_path"]: decoded_inclusion_proof.append(base64.b64decode(item)) root = base64.b64encode(verify_inclusion_proof(decoded_inclusion_proof, index, tmp_sth["tree_size"], get_leaf_hash(base64.b64decode(proof["leaf_input"])))) if tmp_sth["sha256_root_hash"] == root: print time.strftime('%H:%M:%S') + " Verifying inclusion for entry " + str(index) + " in " + base_url + "...OK." else: print time.strftime('%H:%M:%S') + " ERROR: Could not prove inclusion for entry " + str(index) + " in " + base_url errors.append(time.strftime('%H:%M:%S') + " ERROR: Could not prove inclusion for entry " + str(index) + " in " + base_url) except: print time.strftime('%H:%M:%S') + " ERROR: Could not prove inclusion for entry " + str(index) + " in " + base_url errors.append(time.strftime('%H:%M:%S') + " ERROR: Could not prove inclusion for entry " + str(index) + " in " + base_url) def get_proof_by_index(baseurl, index, tree_size): try: params = urllib.urlencode({"leaf_index":index, "tree_size":tree_size}) result = \ urlopen(baseurl + "ct/v1/get-entry-and-proof?" + params).read() return json.loads(result) except urllib2.HTTPError, e: print "ERROR:", e.read() sys.exit(0) def get_all_roots(base_url): result = urlopen(base_url + "ct/v1/get-roots").read() certs = json.loads(result)["certificates"] print time.strftime('%H:%M:%S') + " Received " + str(len(certs)) + " certs from " + base_url for accepted_cert in certs: subject = get_cert_info(base64.decodestring(accepted_cert))["subject"] issuer = get_cert_info(base64.decodestring(accepted_cert))["issuer"] if subject == issuer: root_cert = base64.decodestring(accepted_cert) print get_cert_info(root_cert)["subject"] def main(args): # TODO cleanup files # Create logs logs = [] try: for item in ctlogs: logs.append(ctlog(item, ctlogs[item][0], ctlogs[item][1])) # Set up state for log in logs: if os.path.isfile(log.savefile): log.load() log.incremental_build() # Main loop: Monitor while True: time.sleep(INTERVAL) for log in logs: old_sth = log.sth log.update_sth() if old_sth["timestamp"] != log.sth["timestamp"]: # TODO verify signature # TODO fetch updates # TODO verify progress # TODO verify tree # TODO verify consistency proof? pass # Unreachable... usually. for log in logs: log.save() except KeyboardInterrupt: print 'Received interrupt from user. Saving and exiting....' for log in logs: log.save() if __name__ == '__main__': main(parser.parse_args())