summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--monitor/josef_lib.py44
-rwxr-xr-x[-rw-r--r--]monitor/josef_monitor.py209
-rwxr-xr-xtools/josef_experimental_auditor.py26
3 files changed, 101 insertions, 178 deletions
diff --git a/monitor/josef_lib.py b/monitor/josef_lib.py
index 3c52761..89756cf 100644
--- a/monitor/josef_lib.py
+++ b/monitor/josef_lib.py
@@ -777,3 +777,47 @@ def write_file(fn, sth):
tempname = fn + ".new"
open(tempname, 'w').write(json.dumps(sth))
mv_file(tempname, fn)
+
+
+class UTC(datetime.tzinfo):
+ def utcoffset(self, dt):
+ return datetime.timedelta(hours=0)
+ def dst(self, dt):
+ return datetime.timedelta(0)
+
+def reduce_layer(layer):
+ new_layer = []
+ while len(layer) > 1:
+ e1 = layer.pop(0)
+ e2 = layer.pop(0)
+ new_layer.append(internal_hash((e1,e2)))
+ return new_layer
+
+def reduce_tree(entries, layers):
+ if len(entries) == 0 and layers is []:
+ return [[hashlib.sha256().digest()]]
+
+ layer_idx = 0
+ layers[layer_idx] += entries
+
+ while len(layers[layer_idx]) > 1:
+ if len(layers) == layer_idx + 1:
+ layers.append([])
+
+ layers[layer_idx + 1] += reduce_layer(layers[layer_idx])
+ layer_idx += 1
+ return layers
+
+def reduce_subtree_to_root(layers):
+ while len(layers) > 1:
+ if len(layers[1]) == 0:
+ layers[1] = layers[0]
+ else:
+ layers[1] += next_merkle_layer(layers[0])
+ del layers[0]
+
+ if len(layers[0]) > 1:
+ return next_merkle_layer(layers[0])
+ return layers[0]
+
+
diff --git a/monitor/josef_monitor.py b/monitor/josef_monitor.py
index d84be8e..cf5357c 100644..100755
--- a/monitor/josef_monitor.py
+++ b/monitor/josef_monitor.py
@@ -6,7 +6,8 @@ import datetime
import base64
import argparse
import errno
-from certtools import *
+from copy import deepcopy
+from josef_lib import *
NAGIOS_OK = 0
NAGIOS_WARN = 1
@@ -17,7 +18,7 @@ DEFAULT_CUR_FILE = 'all-sth.json'
DEFAULT_CERT_FILE = "plausible_cert_data.json"
base_urls = [
- "https://plausible.ct.nordu.net/",
+ # "https://plausible.ct.nordu.net/",
# "https://ct1.digicert-ct.com/log/",
# "https://ct.izenpe.com/",
# "https://log.certly.io/",
@@ -25,26 +26,26 @@ base_urls = [
# "https://ct.googleapis.com/pilot/",
# "https://ct.googleapis.com/rocketeer/",
# "https://ctlog.api.venafi.com/",
- # "https://ct.ws.symantec.com/",
+ "https://ct.ws.symantec.com/",
]
parser = argparse.ArgumentParser(description="")
-parser.add_argument('--audit', action='store_true', help="run lightweight auditor verifying consistency in STH")
-parser.add_argument('--monitor', action='store_true', help="run full monitoring for all logs")
-parser.add_argument('--audit2', action='store_true', help="run medium-weight auditor verifying consistency in STH and inclusion proofs of new entries")
-parser.add_argument('--audit3', action='store_true', help="continously run medium-weight auditor verifying consistency in STH and inclusion proofs of new entries")
-parser.add_argument('--audit4', action='store_true', help="run one check on one server")
-parser.add_argument('--build-sth', action='store_true', help="get all entries and construct STH")
-parser.add_argument('--verify-index', default=None, help="Verify a specific index in all logs" )
-parser.add_argument('--host', default=None, help="Base URL for CT log")
-parser.add_argument('--roots', action='store_true', help="Check accepted root certificates for all logs" )
-parser.add_argument('--cur-sth',
- metavar='file',
- default=DEFAULT_CUR_FILE,
- help="File containing current STH (default=%s)" % DEFAULT_CUR_FILE)
-
-timings = {}
-errors = []
+# parser.add_argument('--audit', action='store_true', help="run lightweight auditor verifying consistency in STH")
+# parser.add_argument('--monitor', action='store_true', help="run full monitoring for all logs")
+# parser.add_argument('--audit2', action='store_true', help="run medium-weight auditor verifying consistency in STH and inclusion proofs of new entries")
+# parser.add_argument('--audit3', action='store_true', help="continously run medium-weight auditor verifying consistency in STH and inclusion proofs of new entries")
+# parser.add_argument('--audit4', action='store_true', help="run one check on one server")
+# parser.add_argument('--build-sth', action='store_true', help="get all entries and construct STH")
+# parser.add_argument('--verify-index', default=None, help="Verify a specific index in all logs" )
+# parser.add_argument('--host', default=None, help="Base URL for CT log")
+# parser.add_argument('--roots', action='store_true', help="Check accepted root certificates for all logs" )
+# parser.add_argument('--cur-sth',
+# metavar='file',
+# default=DEFAULT_CUR_FILE,
+# help="File containing current STH (default=%s)" % DEFAULT_CUR_FILE)
+
+# timings = {}
+# errors = []
monitored_domains = [
# "google.com",
@@ -56,46 +57,10 @@ monitored_domains = [
# ".se",
]
-class UTC(datetime.tzinfo):
- def utcoffset(self, dt):
- return datetime.timedelta(hours=0)
- def dst(self, dt):
- return datetime.timedelta(0)
-
-def reduce_layer(layer):
- new_layer = []
- while len(layer) > 1:
- e1 = layer.pop(0)
- e2 = layer.pop(0)
- new_layer.append(internal_hash((e1,e2)))
- return new_layer
-
-def reduce_tree(entries, layers):
- if len(entries) == 0 and layers is []:
- return [[hashlib.sha256().digest()]]
-
- layer_idx = 0
- layers[layer_idx] += entries
-
- while len(layers[layer_idx]) > 1:
- if len(layers) == layer_idx + 1:
- layers.append([])
-
- layers[layer_idx + 1] += reduce_layer(layers[layer_idx])
- layer_idx += 1
- return layers
-
-def reduce_subtree_to_root(layers):
- while len(layers) > 1:
- if len(layers[1]) == 0:
- layers[1] = layers[0]
- else:
- layers[1] += next_merkle_layer(layers[0])
- del layers[0]
+class ctlog:
+ def __init__(name, url):
+ self.name = name
- if len(layers[0]) > 1:
- return next_merkle_layer(layers[0])
- return layers[0]
def fetch_all_sth():
sths = {}
@@ -148,7 +113,6 @@ def verify_progress(old, new):
except:
print time.strftime('%H:%M:%S') + " ERROR: Failed to verify progress for " + url
-
def verify_consistency(old, new):
for url in old:
try:
@@ -205,7 +169,6 @@ def check_domain(raw_entry, log=None):
cert_info["log"] = log[8:-1] # strip generic URL stuff
return cert_info
-
def fetch_and_increment_subtree(old_sth, new_sth_in, subtree, base_url):
try:
print "Initial hash:", hash(str(subtree))
@@ -268,7 +231,11 @@ def fetch_and_build_subtree(old_sth, base_url):
def verify_subtree(old_sth, subtree, base_url):
try:
sth = old_sth[base_url]
- tmp = list(subtree)
+
+ ### BUG ###!!
+ tmp = deepcopy(subtree)
+
+
root = base64.b64encode(reduce_subtree_to_root(tmp)[0])
if root == sth["sha256_root_hash"]:
@@ -385,105 +352,35 @@ def append_file(fn, content):
def main(args):
- # print time.strftime("%H:%M:%S") + " Starting..."
- if args.verify_index is None and not args.build_sth and not args.audit and not args.audit2 \
- and not args.audit3 and not args.audit4 and not args.roots and not args.monitor:
-
- print time.strftime('%H:%M:%S') + " Nothing to do."
- return
- elif args.audit4:
- pass
- else:
- sth = fetch_all_sth()
-
- if args.verify_index is not None:
- for url in base_urls:
- verify_inclusion_by_index(url, int(args.verify_index))
-
- if args.roots:
- print time.strftime('%H:%M:%S') + " Getting accepted Root Certs from all logs..."
- for url in base_urls:
- get_all_roots(url)
-
-
- if args.build_sth:
- print time.strftime('%H:%M:%S') + " Building trees from entries. This may take a while, go get coffee or something..."
- for base_url in base_urls:
- subtree = fetch_and_build_subtree(sth, base_url)
- verify_subtree(sth, subtree, base_url)
- # fetch_and_build_tree(sth, base_urls[2])
-
- if args.audit:
- print time.strftime('%H:%M:%S') + " Running auditor1 for " +str(len(base_urls)) + " logs..."
- old_sth = read_sth(args.cur_sth)
- if old_sth:
- verify_consistency(old_sth, sth)
- else:
- print "No old sth found..."
- write_file(args.cur_sth, sth)
-
-
- if args.audit3:
- print time.strftime('%H:%M:%S') + " Running auditor3 for " +str(len(base_urls)) + " logs..."
- while True:
- time.sleep(30)
- new_sth = fetch_all_sth()
- verify_consistency(sth, new_sth)
- verify_inclusion_all(sth, new_sth)
- sth = new_sth
-
- if args.audit2:
- print time.strftime('%H:%M:%S') + " Running auditor2 for " +str(len(base_urls)) + " logs..."
- old_sth = read_sth(args.cur_sth)
- # print "Verifying progress..."
- verify_progress(old_sth, sth)
- if old_sth:
- print "Verifying consistency..."
- verify_consistency(old_sth, sth)
- print "Verifying inclusion..."
- verify_inclusion_all(old_sth, sth)
- write_file(args.cur_sth, sth)
-
- if args.monitor:
- all_subtrees = {}
- print time.strftime('%H:%M:%S') + " Building trees from entries. This may take a while, go get coffee or something..."
+ # TODO read state
+ sth = fetch_all_sth()
+ all_subtrees = {}
+ print time.strftime('%H:%M:%S') + " Building trees from entries. This may take a while, go get coffee or something..."
+ for url in base_urls:
+ all_subtrees[url] = fetch_and_build_subtree(sth, url)
+ verify_subtree(sth, all_subtrees[url], url)
+
+ while True:
+ time.sleep(30)
+ new_sth = fetch_all_sth()
for url in base_urls:
- all_subtrees[url] = fetch_and_build_subtree(sth, url)
- verify_subtree(sth, all_subtrees[url], url)
-
- while True:
- time.sleep(30)
- new_sth = fetch_all_sth()
- for url in base_urls:
- if url in sth and url in new_sth and sth[url]["tree_size"] != new_sth[url]["tree_size"]:
- # print "Len before:", len(all_subtrees[url])
- all_subtrees[url] = fetch_and_increment_subtree(sth, new_sth, all_subtrees[url], url)
- # print "Len between:", len(all_subtrees[url])
- verify_subtree(new_sth, all_subtrees[url], url)
- # print "Len after:", len(all_subtrees[url])
- print "Old sth:", sth[url]["sha256_root_hash"]
- print "New sth:", new_sth[url]["sha256_root_hash"]
- sth = new_sth
+ if url in sth and url in new_sth and sth[url]["tree_size"] != new_sth[url]["tree_size"]:
+ all_subtrees[url] = fetch_and_increment_subtree(sth, new_sth, all_subtrees[url], url)
+ verify_subtree(new_sth, all_subtrees[url], url)
+ print "Old sth:", sth[url]["sha256_root_hash"]
+ print "New sth:", new_sth[url]["sha256_root_hash"]
+ sth = new_sth
if __name__ == '__main__':
- # try:
- main(parser.parse_args())
- if len(errors) == 0:
- print time.strftime('%H:%M:%S') + " Everything OK."
- sys.exit(NAGIOS_OK)
- else:
- # print "errors found!"
- print_errors(errors)
- sys.exit(NAGIOS_WARN)
- # except:
- # pass
- # finally:
- # # print_timings(timings)
- # print_errors(errors)
-
-
-
+ main(parser.parse_args())
+ # TODO read config
+ if len(errors) == 0:
+ print time.strftime('%H:%M:%S') + " Everything OK."
+ sys.exit(NAGIOS_OK)
+ else:
+ print_errors(errors)
+ sys.exit(NAGIOS_WARN)
diff --git a/tools/josef_experimental_auditor.py b/tools/josef_experimental_auditor.py
index 7efd2dc..e40e77e 100755
--- a/tools/josef_experimental_auditor.py
+++ b/tools/josef_experimental_auditor.py
@@ -6,6 +6,7 @@ import datetime
import base64
import argparse
import errno
+from copy import deepcopy
from certtools import *
NAGIOS_OK = 0
@@ -208,12 +209,9 @@ def check_domain(raw_entry, log=None):
def fetch_and_increment_subtree(old_sth, new_sth_in, subtree, base_url):
try:
- print "Initial hash:", hash(str(subtree))
sth = old_sth[base_url]
new_sth = new_sth_in[base_url]
idx = sth["tree_size"]
- tmp_tree = list(subtree)
- print "tmp hash:", hash(str(tmp_tree))
while idx < new_sth["tree_size"]:
pre_size = idx
@@ -227,18 +225,10 @@ def fetch_and_increment_subtree(old_sth, new_sth_in, subtree, base_url):
idx += len(new_leafs)
print time.strftime('%H:%M:%S') + " Got entries " + str(pre_size) + " to " \
+ str(idx -1) + " (" + str(len(new_leafs)) +" entries) from " + base_url
-
- print "Before reduction:", hash(str(tmp_tree))
- res_tree = reduce_tree(new_leafs, tmp_tree)
- print "After reduction:", hash(str(res_tree))
-
+ subtree = reduce_tree(new_leafs, subtree)
except:
print "Failed to build subtree :("
-
- if subtree == res_tree:
- print "Final subtree hash", hash(str(subtree))
- print "Final restree hash", hash(str(res_tree))
- return res_tree
+ return subtree
def fetch_and_build_subtree(old_sth, base_url):
try:
@@ -268,7 +258,7 @@ def fetch_and_build_subtree(old_sth, base_url):
def verify_subtree(old_sth, subtree, base_url):
try:
sth = old_sth[base_url]
- tmp = list(subtree)
+ tmp = deepcopy(subtree)
root = base64.b64encode(reduce_subtree_to_root(tmp)[0])
if root == sth["sha256_root_hash"]:
@@ -276,11 +266,8 @@ def verify_subtree(old_sth, subtree, base_url):
else:
print time.strftime('%H:%M:%S') + " ERROR: Failed to verify root hashes! STH root: " \
+ sth["sha256_root_hash"] + ", Tree root: " + root
- # errors.append(time.strftime('%H:%M:%S') + " ERROR: Failed to verify root hash for "
- # + base_url + ", tre size " + sth["tree_size"])
except:
print time.strftime('%H:%M:%S') + " ERROR: Failed to build STH for " + base_url
- # errors.append(time.strftime('%H:%M:%S') + " ERROR: Failed to build STH for " + base_url)
def verify_inclusion_by_hash(base_url, leaf_hash):
try:
@@ -456,13 +443,8 @@ def main(args):
new_sth = fetch_all_sth()
for url in base_urls:
if url in sth and url in new_sth and sth[url]["tree_size"] != new_sth[url]["tree_size"]:
- # print "Len before:", len(all_subtrees[url])
all_subtrees[url] = fetch_and_increment_subtree(sth, new_sth, all_subtrees[url], url)
- # print "Len between:", len(all_subtrees[url])
verify_subtree(new_sth, all_subtrees[url], url)
- # print "Len after:", len(all_subtrees[url])
- print "Old sth:", sth[url]["sha256_root_hash"]
- print "New sth:", new_sth[url]["sha256_root_hash"]
sth = new_sth