summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorjosef <josef@guest108.dk-kas.nordu.net>2015-09-03 11:51:42 +0200
committerjosef <josef@guest108.dk-kas.nordu.net>2015-09-03 11:51:42 +0200
commit8db244bd474ca5334933da927968ae4d1c3af525 (patch)
tree997e97348b1a3d82988260e30042096873fd0e22
parent7fccd81a4903507cbd7f5163ab08d37024d9499f (diff)
bugfix
-rwxr-xr-xtools/josef_experimental_auditor.py128
-rwxr-xr-xtools/josef_nagios_auditor.py2
2 files changed, 70 insertions, 60 deletions
diff --git a/tools/josef_experimental_auditor.py b/tools/josef_experimental_auditor.py
index 120f4e6..1a5b669 100755
--- a/tools/josef_experimental_auditor.py
+++ b/tools/josef_experimental_auditor.py
@@ -15,7 +15,8 @@ NAGIOS_UNKNOWN = 3
DEFAULT_CUR_FILE = 'all-sth.json'
-base_urls = ["https://plausible.ct.nordu.net/",
+base_urls = [
+ "https://plausible.ct.nordu.net/",
"https://ct1.digicert-ct.com/log/",
"https://ct.izenpe.com/",
"https://log.certly.io/",
@@ -23,7 +24,7 @@ base_urls = ["https://plausible.ct.nordu.net/",
"https://ct.googleapis.com/pilot/",
"https://ct.googleapis.com/rocketeer/",
"https://ct.ws.symantec.com/",
- "https://ctlog.api.venafi.com/",
+ # "https://ctlog.api.venafi.com/",
]
logkeys = {}
@@ -140,34 +141,38 @@ def fetch_all_sth():
def verify_progress(old, new):
print "Verifying progress"
- for url in new:
- if new and old and new[url] and old[url]:
- if new[url]["tree_size"] == old[url]["tree_size"]:
- if old[url]["sha256_root_hash"] != new[url]["sha256_root_hash"]:
- errors.append(time.strftime('%H:%M:%S') + " CRITICAL: root hash is different for same tree size in " + url)
- # print "tree size:", newsth["tree_size"],
- # print "old hash:", b64_to_b16(oldsth["sha256_root_hash"])
- # print "new hash:", b64_to_b16(newsth["sha256_root_hash"])
- # sys.exit(NAGIOS_CRIT)
- # TODO
- elif new[url]["tree_size"] < old[url]["tree_size"]:
- # if not args.allow_lag:
- errors.append(time.strftime('%H:%M:%S') + " CRITICAL: new tree smaller than previous tree (%d < %d)" % \
- (new[url]["tree_size"], old[url]["tree_size"]))
- # sys.exit(NAGIOS_CRIT)
- if new[url]:
- age = time.time() - new[url]["timestamp"]/1000
- sth_time = datetime.datetime.fromtimestamp(new[url]['timestamp'] / 1000, UTC()).strftime("%Y-%m-%d %H:%M:%S")
- # roothash = b64_to_b16(sth['sha256_root_hash'])
- roothash = new[url]['sha256_root_hash']
- if age > 24 * 3600:
- errors.append(time.strftime('%H:%M:%S') + " CRITICAL: %s is older than 24h: %s UTC" % (url, sth_time))
- elif age > 12 * 3600:
- errors.append(time.strftime('%H:%M:%S') + " WARNING: %s is older than 12h: %s UTC" % (url, sth_time))
- elif age > 6 * 3600:
- errors.append(time.strftime('%H:%M:%S') + " WARNING: %s is older than 6h: %s UTC" % (url, sth_time))
- # elif age > 2 * 3600:
- # errors.append(time.strftime('%H:%M:%S') + " WARNING: %s is older than 2h: %s UTC" % (url, sth_time))
+ try:
+ for url in new:
+ if new and old and new[url] and old[url]:
+ if new[url]["tree_size"] == old[url]["tree_size"]:
+ if old[url]["sha256_root_hash"] != new[url]["sha256_root_hash"]:
+ errors.append(time.strftime('%H:%M:%S') + " CRITICAL: root hash is different for same tree size in " + url)
+ # print "tree size:", newsth["tree_size"],
+ # print "old hash:", b64_to_b16(oldsth["sha256_root_hash"])
+ # print "new hash:", b64_to_b16(newsth["sha256_root_hash"])
+ # sys.exit(NAGIOS_CRIT)
+ # TODO
+ elif new[url]["tree_size"] < old[url]["tree_size"]:
+ # if not args.allow_lag:
+ errors.append(time.strftime('%H:%M:%S') + " CRITICAL: new tree smaller than previous tree (%d < %d)" % \
+ (new[url]["tree_size"], old[url]["tree_size"]))
+ # sys.exit(NAGIOS_CRIT)
+ if new[url]:
+ age = time.time() - new[url]["timestamp"]/1000
+ sth_time = datetime.datetime.fromtimestamp(new[url]['timestamp'] / 1000, UTC()).strftime("%Y-%m-%d %H:%M:%S")
+ # roothash = b64_to_b16(sth['sha256_root_hash'])
+ roothash = new[url]['sha256_root_hash']
+ if age > 24 * 3600:
+ errors.append(time.strftime('%H:%M:%S') + " CRITICAL: %s is older than 24h: %s UTC" % (url, sth_time))
+ elif age > 12 * 3600:
+ errors.append(time.strftime('%H:%M:%S') + " WARNING: %s is older than 12h: %s UTC" % (url, sth_time))
+ elif age > 6 * 3600:
+ errors.append(time.strftime('%H:%M:%S') + " WARNING: %s is older than 6h: %s UTC" % (url, sth_time))
+ # elif age > 2 * 3600:
+ # errors.append(time.strftime('%H:%M:%S') + " WARNING: %s is older than 2h: %s UTC" % (url, sth_time))
+ except:
+ print time.strftime('%H:%M:%S') + " ERROR: Failed to verify progress for " + url
+
def verify_consistency(old, new):
for url in old:
@@ -200,7 +205,7 @@ def verify_inclusion_all(old, new):
entries = []
while len(entries) + old[url]["tree_size"]!= new[url]["tree_size"]:
- entries = get_entries(url, str(int(old[url]["tree_size"]) + len(entries)), new[url]["tree_size"] -1)["entries"]
+ entries += get_entries(url, str(int(old[url]["tree_size"]) + len(entries)), new[url]["tree_size"] -1)["entries"]
print "Got " + str(len(entries)) + " entries..."
success = True
@@ -219,36 +224,41 @@ def verify_inclusion_all(old, new):
errors.append(time.strftime('%H:%M:%S') + " ERROR: Failed to prove inclusion of all new entries in " + url)
def fetch_and_build_tree(old_sth, base_url):
- sth = old_sth[base_url]
- subtree = [[]]
- idx = 0
-
- res_strings = [""]
+ try:
+ sth = old_sth[base_url]
+ subtree = [[]]
+ idx = 0
+
+ res_strings = [""]
+
+ print time.strftime('%H:%M:%S') + " Getting all entries from " + base_url
+ while idx < sth["tree_size"]:
+ pre_size = idx
+ entries = get_entries(base_url, idx, sth["tree_size"]-1)["entries"]
+
+ new_leafs = []
+ for item in entries:
+ new_leafs.append(get_leaf_hash(base64.b64decode(item["leaf_input"])))
+ idx += len(new_leafs)
+ print time.strftime('%H:%M:%S') + " Got entries " + str(pre_size) + " to " + str(idx) + " from " + base_url
+ subtree = reduce_tree(new_leafs, subtree)
+
+ root = base64.b64encode(reduce_subtree_to_root(subtree)[0])
+
+ if root == sth["sha256_root_hash"]:
+ print time.strftime('%H:%M:%S') + " Verifying root hashes for " + base_url + "...OK."
+ res_strings.append("STH for " + base_url + " built successfully.")
+ else:
+ print time.strftime('%H:%M:%S') + " ERROR: Failed to verify root hashes! STH root: " + sth["sha256_root_hash"] + ", Tree root: " + root
+ res_strings.append(time.strftime('%H:%M:%S') + " " + base_url + " Failed! STH root: " + sth["sha256_root_hash"] + " Calculated root: " + root)
+ errors.append(time.strftime('%H:%M:%S') + " ERROR: Failed to verify root hash for " + base_url + ", tre size " + sth["tree_size"])
- print time.strftime('%H:%M:%S') + " Getting all entries from " + base_url
- while idx < sth["tree_size"]:
- pre_size = idx
- entries = get_entries(base_url, idx, sth["tree_size"]-1)["entries"]
-
- new_leafs = []
- for item in entries:
- new_leafs.append(get_leaf_hash(base64.b64decode(item["leaf_input"])))
- idx += len(new_leafs)
- print time.strftime('%H:%M:%S') + " Got entries " + str(pre_size) + " to " + str(idx) + " from " + base_url
- subtree = reduce_tree(new_leafs, subtree)
-
- root = base64.b64encode(reduce_subtree_to_root(subtree)[0])
-
- if root == sth["sha256_root_hash"]:
- print time.strftime('%H:%M:%S') + " Verifying root hashes for " + base_url + "...OK."
- res_strings.append("STH for " + base_url + " built successfully.")
- else:
- print time.strftime('%H:%M:%S') + " ERROR: Failed to verify root hashes! STH root: " + sth["sha256_root_hash"] + ", Tree root: " + root
- res_strings.append(time.strftime('%H:%M:%S') + " " + base_url + " Failed! STH root: " + sth["sha256_root_hash"] + " Calculated root: " + root)
- errors.append(time.strftime('%H:%M:%S') + " ERROR: Failed to verify root hash for " + base_url + ", tre size " + sth["tree_size"])
+ for item in res_strings:
+ print item + "\n"
- for item in res_strings:
- print item + "\n"
+ except:
+ print time.strftime('%H:%M:%S') + " ERROR: Failed to build STH for " + base_url
+ errors.append(time.strftime('%H:%M:%S') + " ERROR: Failed to build STH for " + base_url)
def verify_inclusion_by_hash(base_url, leaf_hash):
try:
diff --git a/tools/josef_nagios_auditor.py b/tools/josef_nagios_auditor.py
index e950c8b..05390f8 100755
--- a/tools/josef_nagios_auditor.py
+++ b/tools/josef_nagios_auditor.py
@@ -165,7 +165,7 @@ def verify_inclusion_all(url, old, new):
if old["tree_size"]!= new["tree_size"]:
entries = []
while len(entries) + old[url]["tree_size"]!= new[url]["tree_size"]:
- entries = get_entries(url, str(int(old[url]["tree_size"]) + len(entries)), new[url]["tree_size"] -1)["entries"]
+ entries += get_entries(url, str(int(old[url]["tree_size"]) + len(entries)), new[url]["tree_size"] -1)["entries"]
success = True
for i in entries: