diff options
author | josef <josef.gson@gmail.com> | 2015-10-20 16:33:16 +0200 |
---|---|---|
committer | josef <josef.gson@gmail.com> | 2015-10-20 16:33:16 +0200 |
commit | 213f5cc0184bdb38cae8a530f263a5e0a7f60557 (patch) | |
tree | f8996ebd11923376c1f659c93cdaa5e83079d3a5 /monitor/josef_experimental.py | |
parent | 62035bd9ccb0efb21a234418127e20cc56cd7c0a (diff) |
setting up to run overlap measurement for venafi on glog
Diffstat (limited to 'monitor/josef_experimental.py')
-rwxr-xr-x | monitor/josef_experimental.py | 96 |
1 files changed, 64 insertions, 32 deletions
diff --git a/monitor/josef_experimental.py b/monitor/josef_experimental.py index fa553a0..580ee11 100755 --- a/monitor/josef_experimental.py +++ b/monitor/josef_experimental.py @@ -7,11 +7,69 @@ from josef_lib import * # import leveldb import argparse import json +import time # from josef_leveldb import * from datetime import datetime as dt # from josef_monitor import verify_inclusion_by_hash from monitor_conf import * +def is_new_timestamp(ts): + MAX_TIMEDIFF = 300 # 5 min, allows for some clock skew + ts_time = datetime.datetime.fromtimestamp(ts / 1000, UTC()).strftime('%Y-%m-%d %H:%M:%S') + start_time = datetime.datetime.utcnow().strftime('2015-10-19 00:00:00') + # delta_time = datetime.datetime.strptime(start_time, '%Y-%m-%d %H:%M:%S') - datetime.datetime.strptime(ts_time, '%Y-%m-%d %H:%M:%S') + # print delta_time.seconds + if ts_time < start_time: + return False + else: + return True + +def check_inclusion_by_submission(first, last, source, dests): + # print entries + for s_log in source: + try: + entries = [] + while len(entries) <= last - first: + print "Getting " + str(first + len(entries)) + " to " + str(last) + entries += get_entries(s_log["url"], first + len(entries), last)["entries"] + # print "Fetched entries up to " + str(len(first + len(entries))) + except: + print "Failed to get entries from " + s_log["name"] + + for i in range(len(entries)): + item = entries[i] + inclusions = [] + for d_log in dests: + try: + entry = extract_original_entry(item) + if entry[2]: + precert = True + else: + precert = False + submission = [] + + for e in entry[0]: + submission.append(base64.b64encode(e)) + + if entry[2]: + res = add_prechain(d_log["url"], {"chain" : submission}) + else: + res = add_chain(d_log["url"], {"chain" : submission}) + # print_reply(res, entry) + print res + + if not is_new_timestamp(res["timestamp"]): + inclusions.append(d_log["name"]) + + except KeyboardInterrupt: + sys.exit() + except Exception ,e: + print Exception, e + pass + s = s_log["name"] + "[" + str(first + i) + "] found in " + str(len(inclusions)) + " logs: " + str(inclusions) + print s + # log(logfile, s) + time.sleep(1) def update_roots(log): @@ -69,39 +127,13 @@ def update_roots(log): if __name__ == '__main__': - for log in CTLOGS: - url = log["url"] - try: - get_entries(url,2001,2001) - except Exception, e: - print "Failed to get entry from " + log["name"], e - - # dbdir = "tmpdb/" - # entry = get_entries(ctlogs[0]["url"], 1,1)["entries"] - # print extract_original_entry(entry[0]) - # for url in [CTLOGS[6]["url"]]: - # for url in [CTLOGS[0]["url"],CTLOGS[5]["url"],CTLOGS[6]["url"]]: - # for log in CTLOGS: + # for log in [CTLOGS[4]]: # url = log["url"] - # url = CTLOGS[1]["url"] - # entries = get_entries(url, 3638637,3638637)["entries"] - # entries = get_entries(url, first, last)["entries"] - # tmp_cert_data = [] - # for item in entries: - # tmp_data = check_domain(item, url) - # entry_hash = get_leaf_hash(base64.b64decode(item["leaf_input"])) - # if tmp_data: - # tmp_data["leaf_hash"] = base64.b64encode(entry_hash) - # tmp_cert_data.append(tmp_data) - # print tmp_data - # new_leafs.append(entry_hash) - # if self.dbdir:/ - - # db_add_certs(dbdir, tmp_cert_data) - - # if CONFIG.DEFAULT_CERT_FILE: - # append_file(CONFIG.DEFAULT_CERT_FILE, tmp_cert_data) - # subtree = reduce_tree(new_leafs, subtree) + # try: + # get_entries(url,8,8) + # except Exception, e: + # print "Failed to get entry from " + log["name"], e + check_inclusion_by_submission(1,1,[CTLOGS[3]],[CTLOGS[3]]) |