summaryrefslogtreecommitdiff
path: root/monitor
diff options
context:
space:
mode:
Diffstat (limited to 'monitor')
-rw-r--r--monitor/josef_lib.py35
-rwxr-xr-xmonitor/josef_monitor.py343
-rw-r--r--monitor/logs.py38
-rw-r--r--monitor/monitor.cfg2
4 files changed, 249 insertions, 169 deletions
diff --git a/monitor/josef_lib.py b/monitor/josef_lib.py
index 89756cf..afa165b 100644
--- a/monitor/josef_lib.py
+++ b/monitor/josef_lib.py
@@ -22,6 +22,41 @@ from Crypto.Hash import SHA256
import Crypto.PublicKey.RSA as RSA
from Crypto.Signature import PKCS1_v1_5
+# def read_sth(fn):
+# try:
+# f = open(fn)
+# except IOError, e:
+# if e.errno == errno.ENOENT:
+# return None
+# raise e
+# return json.loads(f.read())
+
+def encode_tree(tree):
+ res = []
+ for layer in tree:
+ res.append([])
+ for item in layer:
+ tmp = base64.b64encode(item)
+ res[-1].append(tmp)
+ return res
+
+def decode_tree(tree):
+ res = []
+ for layer in tree:
+ res.append([])
+ for item in layer:
+ tmp = base64.b64decode(item)
+ res[-1].append(tmp)
+ return res
+
+def append_file(fn, content):
+ with open(fn, 'a') as f:
+ for item in content:
+ try:
+ f.write(json.dumps(item) + "\n")
+ except:
+ pass
+
def get_cert_info(s):
p = subprocess.Popen(
["openssl", "x509", "-noout", "-subject", "-issuer", "-inform", "der"],
diff --git a/monitor/josef_monitor.py b/monitor/josef_monitor.py
index cf5357c..f3a38bf 100755
--- a/monitor/josef_monitor.py
+++ b/monitor/josef_monitor.py
@@ -8,84 +8,138 @@ import argparse
import errno
from copy import deepcopy
from josef_lib import *
+from logs import ctlogs
+import os.path
-NAGIOS_OK = 0
-NAGIOS_WARN = 1
-NAGIOS_CRIT = 2
-NAGIOS_UNKNOWN = 3
-
-DEFAULT_CUR_FILE = 'all-sth.json'
-DEFAULT_CERT_FILE = "plausible_cert_data.json"
-
-base_urls = [
- # "https://plausible.ct.nordu.net/",
- # "https://ct1.digicert-ct.com/log/",
- # "https://ct.izenpe.com/",
- # "https://log.certly.io/",
- # "https://ct.googleapis.com/aviator/",
- # "https://ct.googleapis.com/pilot/",
- # "https://ct.googleapis.com/rocketeer/",
- # "https://ctlog.api.venafi.com/",
- "https://ct.ws.symantec.com/",
-]
+# NAGIOS_OK = 0
+# NAGIOS_WARN = 1
+# NAGIOS_CRIT = 2
+# NAGIOS_UNKNOWN = 3
+
+INTERVAL = 30 # interval (in seconds) between updates
+
+OUTPUT_DIR = "output/"
+DEFAULT_CERT_FILE = OUTPUT_DIR + "cert_data.json"
parser = argparse.ArgumentParser(description="")
-# parser.add_argument('--audit', action='store_true', help="run lightweight auditor verifying consistency in STH")
-# parser.add_argument('--monitor', action='store_true', help="run full monitoring for all logs")
-# parser.add_argument('--audit2', action='store_true', help="run medium-weight auditor verifying consistency in STH and inclusion proofs of new entries")
-# parser.add_argument('--audit3', action='store_true', help="continously run medium-weight auditor verifying consistency in STH and inclusion proofs of new entries")
-# parser.add_argument('--audit4', action='store_true', help="run one check on one server")
-# parser.add_argument('--build-sth', action='store_true', help="get all entries and construct STH")
-# parser.add_argument('--verify-index', default=None, help="Verify a specific index in all logs" )
-# parser.add_argument('--host', default=None, help="Base URL for CT log")
-# parser.add_argument('--roots', action='store_true', help="Check accepted root certificates for all logs" )
-# parser.add_argument('--cur-sth',
-# metavar='file',
-# default=DEFAULT_CUR_FILE,
-# help="File containing current STH (default=%s)" % DEFAULT_CUR_FILE)
-
-# timings = {}
-# errors = []
-
-monitored_domains = [
- # "google.com",
- # "preishelden.de",
- # "liu.se",
- # "nordu.net",
- # "symantec.com",
- # "sunet.se",
- # ".se",
-]
+# TODO implement silent mode
+# parser.add_argument('--silent', action='store_true', help="Dont output to stdout. logging only")
+
+
+# monitored_domains = [
+# # "google.com",
+# # "preishelden.de",
+# # "liu.se",
+# # "nordu.net",
+# # "symantec.com",
+# # "sunet.se",
+# # ".se",
+# ]
class ctlog:
- def __init__(name, url):
+ def __init__(self, name, url, key):
self.name = name
+ self.url = url
+ self.key = key
+ self.logfile = OUTPUT_DIR + name + ".log"
+ self.savefile = OUTPUT_DIR + name + "-state-info.json"
+ self.subtree = [[]]
+ self.sth = None
+ self.entries = 0
+
+ self.log("Starting monitor")
+
+
+ def build(self):
+ self.sth = get_sth(self.url)
+ tmp_subtree = fetch_and_build_subtree(self.sth, self.url)
+ if verify_subtree(self.sth, self.subtree, self.url):
+ self.log("Successfully build tree from entries.")
+ self.subtree = tmp_subtree
+ self.entries = self.sth["tree_size"]
+ else:
+ self.log("ERROR Failed to build tree from entries.")
+
+ def incremental_build(self):
+ # Keeps state current during build, partial builds are possible.
+ self.sth = get_sth(self.url)
+ while self.entries < self.sth["tree_size"]:
+ self.subtree, self.entries = fetch_and_increment_subtree(self.entries, self.sth["tree_size"] -1, self.url, self.subtree)
+
+ if verify_subtree(self.sth, self.subtree, self.url):
+ self.log("Successfully build tree from entries.")
+ # self.subtree = tmp_subtree
+ # self.entries = self.sth["tree_size"]
+ else:
+ self.log("ERROR Failed to build tree from entries.")
-def fetch_all_sth():
- sths = {}
- for base_url in base_urls:
- # Fetch STH
- try:
- sths[base_url] = get_sth(base_url)
- except:
- sths[base_url] = None
- error_str = time.strftime('%H:%M:%S') + " ERROR: Failed to retrieve STH from " + base_url
- print error_str
- errors.append(error_str)
- continue
+ def to_dict(self):
+ d = {}
+ # d["name"] = self.name
+ # d["url"] = self.url
+ d["entries"] = self.entries
+ d["subtree"] = encode_tree(self.subtree)
+ d["sth"] = self.sth
+ return d
- # Check signature on the STH
- try:
- # check_sth_signature(base_url, sths[base_url], logkeys[base_url])
- check_sth_signature(base_url, sths[base_url], None)
- except:
- error_str = time.strftime('%H:%M:%S') + " ERROR: Could not verify signature from " + base_url
- print error_str
- errors.append(error_str)
- continue
- return sths
+ def save(self):
+ self.log("Saving state to file")
+ open(self.savefile, 'w').write(json.dumps(self.to_dict()))
+
+ def load(self):
+ self.log("Loading state from file")
+ try:
+ f = open(self.savefile)
+ s = f.read()
+ d = json.loads(s)
+ self.subtree = decode_tree(d["subtree"])
+ self.sth = d["sth"]
+ self.entries = d["entries"]
+ except IOError, e:
+ if e.errno == errno.ENOENT:
+ return None
+ raise e
+
+ def log(self, string):
+ s = time.strftime('%H:%M:%S') + " " + string
+ with open(self.logfile, 'a') as f:
+ f.write(s + "\n")
+ f.close()
+
+ def update_sth(self):
+ new_sth = get_sth(self.url)
+ sth_time = datetime.datetime.fromtimestamp(new_sth['timestamp'] / 1000, UTC()).strftime("%Y-%m-%d %H:%M:%S")
+ if new_sth["timestamp"] != self.sth["timestamp"]:
+ self.log("STH updated. Size: " + str(new_sth["tree_size"]) + ", Time: " + sth_time)
+ self.sth = new_sth
+
+
+# def fetch_all_sth():
+# sths = {}
+# for base_url in base_urls:
+# # Fetch STH
+# try:
+# sths[base_url] = get_sth(base_url)
+# except:
+# sths[base_url] = None
+# error_str = time.strftime('%H:%M:%S') + " ERROR: Failed to retrieve STH from " + base_url
+# print error_str
+# errors.append(error_str)
+# continue
+
+# # Check signature on the STH
+# try:
+# # check_sth_signature(base_url, sths[base_url], logkeys[base_url])
+# check_sth_signature(base_url, sths[base_url], None)
+# except:
+# error_str = time.strftime('%H:%M:%S') + " ERROR: Could not verify signature from " + base_url
+# print error_str
+# errors.append(error_str)
+# continue
+
+# return sths
def verify_progress(old, new):
print "Verifying progress"
@@ -169,43 +223,26 @@ def check_domain(raw_entry, log=None):
cert_info["log"] = log[8:-1] # strip generic URL stuff
return cert_info
-def fetch_and_increment_subtree(old_sth, new_sth_in, subtree, base_url):
- try:
- print "Initial hash:", hash(str(subtree))
- sth = old_sth[base_url]
- new_sth = new_sth_in[base_url]
- idx = sth["tree_size"]
- tmp_tree = list(subtree)
- print "tmp hash:", hash(str(tmp_tree))
-
- while idx < new_sth["tree_size"]:
- pre_size = idx
- entries = get_entries(base_url, idx, new_sth["tree_size"]-1)["entries"]
- new_leafs = []
+def fetch_and_increment_subtree(first, last, url, subtree =[[]]):
+ # try:
+ new_leafs = []
+ if first <= last:
+ entries = get_entries(url, first, last)["entries"]
tmp_cert_data = []
for item in entries:
- tmp_cert_data.append(check_domain(item, base_url))
+ tmp_cert_data.append(check_domain(item, url))
new_leafs.append(get_leaf_hash(base64.b64decode(item["leaf_input"])))
append_file(DEFAULT_CERT_FILE, tmp_cert_data)
- idx += len(new_leafs)
- print time.strftime('%H:%M:%S') + " Got entries " + str(pre_size) + " to " \
- + str(idx -1) + " (" + str(len(new_leafs)) +" entries) from " + base_url
+ print time.strftime('%H:%M:%S') + " Got entries " + str(first) + " to " \
+ + str(first + len(new_leafs)) + " (" + str(len(new_leafs)) +" entries) from " + url
- print "Before reduction:", hash(str(tmp_tree))
- res_tree = reduce_tree(new_leafs, tmp_tree)
- print "After reduction:", hash(str(res_tree))
-
- except:
- print "Failed to build subtree :("
-
- if subtree == res_tree:
- print "Final subtree hash", hash(str(subtree))
- print "Final restree hash", hash(str(res_tree))
- return res_tree
+ subtree = reduce_tree(new_leafs, subtree)
+ # except:
+ # print "Failed to build subtree :("
+ return subtree, len(new_leafs) + first
-def fetch_and_build_subtree(old_sth, base_url):
+def fetch_and_build_subtree(sth, base_url):
try:
- sth = old_sth[base_url]
subtree = [[]]
idx = 0
@@ -221,33 +258,26 @@ def fetch_and_build_subtree(old_sth, base_url):
append_file(DEFAULT_CERT_FILE, tmp_cert_data)
print time.strftime('%H:%M:%S') + " Got entries " + str(pre_size) + " to " + str(idx -1) + " from " + base_url
subtree = reduce_tree(new_leafs, subtree)
-
except:
print "Failed to build subtree :("
-
return subtree
-def verify_subtree(old_sth, subtree, base_url):
+def verify_subtree(sth, subtree, base_url):
try:
- sth = old_sth[base_url]
-
- ### BUG ###!!
tmp = deepcopy(subtree)
-
-
root = base64.b64encode(reduce_subtree_to_root(tmp)[0])
if root == sth["sha256_root_hash"]:
print time.strftime('%H:%M:%S') + " Verifying root hashes for " + base_url + "...OK."
+ return True
else:
print time.strftime('%H:%M:%S') + " ERROR: Failed to verify root hashes! STH root: " \
+ sth["sha256_root_hash"] + ", Tree root: " + root
- # errors.append(time.strftime('%H:%M:%S') + " ERROR: Failed to verify root hash for "
- # + base_url + ", tre size " + sth["tree_size"])
+ return False
except:
print time.strftime('%H:%M:%S') + " ERROR: Failed to build STH for " + base_url
- # errors.append(time.strftime('%H:%M:%S') + " ERROR: Failed to build STH for " + base_url)
+ return False
def verify_inclusion_by_hash(base_url, leaf_hash):
try:
@@ -314,73 +344,50 @@ def get_all_roots(base_url):
root_cert = base64.decodestring(accepted_cert)
print get_cert_info(root_cert)["subject"]
-def print_errors(errors):
- print "Encountered " + str(len(errors)) + " errors:"
- for item in errors:
- print item
-def print_timings(timings):
- for item in timings:
- m,s = divmod(timings[item]["longest"], 60)
- h,m = divmod(m, 60)
- print item + " last seen " + datetime.datetime.fromtimestamp(int(timings[item]["last"])/1000).strftime('%Y-%m-%d %H:%M:%S') \
- + " longest between two STH: " + str(int(h)) + "h " + str(int(m)) + "m "# + str(int(s)) + "s."
+def main(args):
+ # TODO cleanup files
-def read_sth(fn):
+ # Create logs
+ logs = []
try:
- f = open(fn)
- except IOError, e:
- if e.errno == errno.ENOENT:
- return None
- raise e
- return json.loads(f.read())
-
-def write_file(fn, content):
- tempname = fn + ".new"
- open(tempname, 'w').write(json.dumps(content))
- mv_file(tempname, fn)
- # print "wrote " + fn
-
-def append_file(fn, content):
- with open(fn, 'a') as f:
- for item in content:
- try:
- f.write(json.dumps(item) + "\n")
- except:
- # print "failed to write " + str(item)
- pass
+ for item in ctlogs:
+ logs.append(ctlog(item, ctlogs[item][0], ctlogs[item][1]))
+
+ # Set up state
+ for log in logs:
+ if os.path.isfile(log.savefile):
+ log.load()
+ log.incremental_build()
+
+ # Main loop: Monitor
+ while True:
+ time.sleep(INTERVAL)
+ for log in logs:
+ old_sth = log.sth
+ log.update_sth()
+ if old_sth["timestamp"] != log.sth["timestamp"]:
+ # TODO verify signature
+ # TODO fetch updates
+ # TODO verify progress
+ # TODO verify tree
+ # TODO verify consistency proof?
+ pass
+
+ # Unreachable... usually.
+ for log in logs:
+ log.save()
+
+
+ except KeyboardInterrupt:
+ print 'Received interrupt from user. Saving and exiting....'
+ for log in logs:
+ log.save()
-def main(args):
-
- # TODO read state
- sth = fetch_all_sth()
- all_subtrees = {}
- print time.strftime('%H:%M:%S') + " Building trees from entries. This may take a while, go get coffee or something..."
- for url in base_urls:
- all_subtrees[url] = fetch_and_build_subtree(sth, url)
- verify_subtree(sth, all_subtrees[url], url)
-
- while True:
- time.sleep(30)
- new_sth = fetch_all_sth()
- for url in base_urls:
- if url in sth and url in new_sth and sth[url]["tree_size"] != new_sth[url]["tree_size"]:
- all_subtrees[url] = fetch_and_increment_subtree(sth, new_sth, all_subtrees[url], url)
- verify_subtree(new_sth, all_subtrees[url], url)
- print "Old sth:", sth[url]["sha256_root_hash"]
- print "New sth:", new_sth[url]["sha256_root_hash"]
- sth = new_sth
if __name__ == '__main__':
main(parser.parse_args())
- # TODO read config
- if len(errors) == 0:
- print time.strftime('%H:%M:%S') + " Everything OK."
- sys.exit(NAGIOS_OK)
- else:
- print_errors(errors)
- sys.exit(NAGIOS_WARN)
diff --git a/monitor/logs.py b/monitor/logs.py
new file mode 100644
index 0000000..12c58a5
--- /dev/null
+++ b/monitor/logs.py
@@ -0,0 +1,38 @@
+
+ctlogs = {
+ # "pilot":
+ # ["https://ct.googleapis.com/pilot/",
+ # "MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEfahLEimAoz2t01p3uMziiLOl/fHTDM0YDOhBRuiBARsV4UvxG2LdNgoIGLrtCzWE0J5APC2em4JlvR8EEEFMoA=="],
+
+ # "plausible":
+ # ["https://plausible.ct.nordu.net/",
+ # "MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAE9UV9+jO2MCTzkabodO2F7LM03MUBc8MrdAtkcW6v6GA9taTTw9QJqofm0BbdAsbtJL/unyEf0zIkRgXjjzaYqQ=="],
+
+ # "digicert":
+ # ["https://ct1.digicert-ct.com/log/",
+ # "MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEAkbFvhu7gkAW6MHSrBlpE1n4+HCFRkC5OLAjgqhkTH+/uzSfSl8ois8ZxAD2NgaTZe1M9akhYlrYkes4JECs6A=="],
+
+ "izenpe":
+ ["https://ct.izenpe.com/",
+ "MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEJ2Q5DC3cUBj4IQCiDu0s6j51up+TZAkAEcQRF6tczw90rLWXkJMAW7jr9yc92bIKgV8vDXU4lDeZHvYHduDuvg=="],
+
+ "certly":
+ ["https://log.certly.io/",
+ "MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAECyPLhWKYYUgEc+tUXfPQB4wtGS2MNvXrjwFCCnyYJifBtd2Sk7Cu+Js9DNhMTh35FftHaHu6ZrclnNBKwmbbSA=="],
+
+ # "aviator":
+ # ["https://ct.googleapis.com/aviator/",
+ # "MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAE1/TMabLkDpCjiupacAlP7xNi0I1JYP8bQFAHDG1xhtolSY1l4QgNRzRrvSe8liE+NPWHdjGxfx3JhTsN9x8/6Q=="],
+
+ # "rocketeer":
+ # ["https://ct.googleapis.com/rocketeer/",
+ # "MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEIFsYyDzBi7MxCAC/oJBXK7dHjG+1aLCOkHjpoHPqTyghLpzA9BYbqvnV16mAw04vUjyYASVGJCUoI3ctBcJAeg=="],
+
+ "symantec":
+ ["https://ct.ws.symantec.com/",
+ "MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEluqsHEYMG1XcDfy1lCdGV0JwOmkY4r87xNuroPS2bMBTP01CEDPwWJePa75y9CrsHEKqAy8afig1dpkIPSEUhg=="],
+
+ "venafi":
+ ["https://ctlog.api.venafi.com/",
+ "MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAolpIHxdSlTXLo1s6H1OCdpSj/4DyHDc8wLG9wVmLqy1lk9fz4ATVmm+/1iN2Nk8jmctUKK2MFUtlWXZBSpym97M7frGlSaQXUWyA3CqQUEuIJOmlEjKTBEiQAvpfDjCHjlV2Be4qTM6jamkJbiWtgnYPhJL6ONaGTiSPm7Byy57iaz/hbckldSOIoRhYBiMzeNoA0DiRZ9KmfSeXZ1rB8y8X5urSW+iBzf2SaOfzBvDpcoTuAaWx2DPazoOl28fP1hZ+kHUYvxbcMjttjauCFx+JII0dmuZNIwjfeG/GBb9frpSX219k1O4Wi6OEbHEr8at/XQ0y7gTikOxBn/s5wQIDAQAB"],
+}
diff --git a/monitor/monitor.cfg b/monitor/monitor.cfg
index 1d17802..f686809 100644
--- a/monitor/monitor.cfg
+++ b/monitor/monitor.cfg
@@ -1,7 +1,7 @@
# CONFIG FILE FOR MONITOR
-
# LOGS
+
# CHECKS
# OUTPUT