summaryrefslogtreecommitdiff
path: root/tools/josef_experimental_auditor.py
diff options
context:
space:
mode:
Diffstat (limited to 'tools/josef_experimental_auditor.py')
-rwxr-xr-xtools/josef_experimental_auditor.py45
1 files changed, 30 insertions, 15 deletions
diff --git a/tools/josef_experimental_auditor.py b/tools/josef_experimental_auditor.py
index 96a84ac..690f883 100755
--- a/tools/josef_experimental_auditor.py
+++ b/tools/josef_experimental_auditor.py
@@ -16,16 +16,17 @@ NAGIOS_UNKNOWN = 3
DEFAULT_CUR_FILE = 'all-sth.json'
base_urls = [
- # "https://plausible.ct.nordu.net/",
- # "https://ct1.digicert-ct.com/log/",
- "https://ct.izenpe.com/",
- # "https://log.certly.io/",
- # "https://ct.googleapis.com/aviator/",
- # "https://ct.googleapis.com/pilot/",
- # "https://ct.googleapis.com/rocketeer/",
- "https://ct.ws.symantec.com/",
- # "https://ctlog.api.venafi.com/",
- ]
+ # "https://plausible.ct.nordu.net/",
+ # "https://ct1.digicert-ct.com/log/",
+ "https://ct.izenpe.com/",
+ # "https://log.certly.io/",
+ # "https://ct.googleapis.com/aviator/",
+ # "https://ct.googleapis.com/pilot/",
+ # "https://ct.googleapis.com/rocketeer/",
+ "https://ct.ws.symantec.com/",
+ "https://ctlog.api.venafi.com/",
+]
+
parser = argparse.ArgumentParser(description="")
parser.add_argument('--audit', action='store_true', help="run lightweight auditor verifying consistency in STH")
parser.add_argument('--monitor', action='store_true', help="run full monitoring for all logs")
@@ -44,6 +45,15 @@ parser.add_argument('--cur-sth',
timings = {}
errors = []
+monitored_domains = [
+ # "google.com",
+ "preishelden.de",
+ "liu.se",
+ "nordu.net",
+ "symantec.com",
+ ".se",
+]
+
class UTC(datetime.tzinfo):
def utcoffset(self, dt):
return datetime.timedelta(hours=0)
@@ -186,6 +196,14 @@ def verify_inclusion_all(old, new):
print time.strftime('%H:%M:%S') + " ERROR: Failed to prove inclusion of all new entries in " + url
errors.append(time.strftime('%H:%M:%S') + " ERROR: Failed to prove inclusion of all new entries in " + url)
+def check_domain(raw_entry):
+ orig_entry = extract_original_entry(raw_entry)
+ cert_info = get_cert_info(orig_entry[0][0])
+ for md in monitored_domains:
+ if md in cert_info["subject"]:
+ print md + " (" + cert_info["subject"].split("CN=")[1] + ") certifed by " + cert_info["issuer"]
+
+
def fetch_and_increment_subtree(old_sth, new_sth_in, subtree, base_url):
try:
sth = old_sth[base_url]
@@ -199,6 +217,7 @@ def fetch_and_increment_subtree(old_sth, new_sth_in, subtree, base_url):
new_leafs = []
for item in entries:
+ check_domain(item)
new_leafs.append(get_leaf_hash(base64.b64decode(item["leaf_input"])))
idx += len(new_leafs)
print time.strftime('%H:%M:%S') + " Got entries " + str(pre_size) + " to " + str(idx) + " from " + base_url
@@ -215,15 +234,12 @@ def fetch_and_build_subtree(old_sth, base_url):
subtree = [[]]
idx = 0
- # print time.strftime('%H:%M:%S') + " Getting all entries from " + base_url
while idx < sth["tree_size"]:
pre_size = idx
- ### DEBUG!!
- # entries = get_entries(base_url, idx, sth["tree_size"]-100)["entries"]
entries = get_entries(base_url, idx, sth["tree_size"]-1)["entries"]
-
new_leafs = []
for item in entries:
+ check_domain(item)
new_leafs.append(get_leaf_hash(base64.b64decode(item["leaf_input"])))
idx += len(new_leafs)
print time.strftime('%H:%M:%S') + " Got entries " + str(pre_size) + " to " + str(idx) + " from " + base_url
@@ -427,7 +443,6 @@ def main(args):
if args.monitor:
# Run for one log only
- # url = base_urls[0]
all_subtrees = {}
print time.strftime('%H:%M:%S') + " Building trees from entries. This may take a while, go get coffee or something..."
for url in base_urls: