#!/usr/bin/python # -*- coding: utf-8 -*- import sys import time import datetime import os import json import multiprocessing from precerttools import cleanprecert from monitor_conf import * from josef_lib import * def print_reply(rep, entry): t = datetime.datetime.fromtimestamp(rep['timestamp'] / 1000, UTC()).strftime("%Y-%m-%d %H:%M:%S") log_id = rep["id"] for log in CTLOGS: if str(log_id) == log["id"]: l = log break else: l = {"name" : "Log not found" + log_id} s = "Time:", t if is_new_timestamp(rep["timestamp"]): print s, "(NEW)" else: print s, "(OLD)" if entry[2]: print "Type: Precert" signed_entry = pack_precert(cleanprecert(entry[0][0]), entry[2]) else: print "Type: Cert" signed_entry = pack_cert(entry[0][0]) key = base64.b64decode(log["key"]) try: check_sct_signature(log["url"], signed_entry, rep, entry[2], key) print "Signature: OK" except: print "Could not verify signature!" print "" def is_new_timestamp(ts): MAX_TIMEDIFF = 300 # 5 min, allows for some clock skew ts_time = datetime.datetime.fromtimestamp(ts / 1000, UTC()).strftime('%Y-%m-%d %H:%M:%S') start_time = datetime.datetime.utcnow().strftime('2015-11-02 00:00:00') # delta_time = datetime.datetime.strptime(start_time, '%Y-%m-%d %H:%M:%S') - datetime.datetime.strptime(ts_time, '%Y-%m-%d %H:%M:%S') # print delta_time.seconds if ts_time < start_time: return False else: return True def check_inclusion_all(first, last, source, dest): for s_log in source: url = s_log["url"] entries = [] while len(entries) + first != last + 1: entries += get_entries(url, str(first + len(entries)), last)["entries"] # print "Got " + str(len(entries)) + " entries..." for e in entries: inclusions = [] # print base64.b64decode(e["leaf_input"]) h = get_leaf_hash(base64.b64decode(e["leaf_input"])) for log in dest: url = log["url"] if verify_inclusion_by_hash(url, h): inclusions.append(log["name"]) print "Entry found in " + str(len(inclusions)) + " logs: ", inclusions # success = False # if success: # print time.strftime("%H:%M:%S") + " Verifying inclusion for " + str(len(entries)) + " new entries in " + url + " ...OK" # else: # print time.strftime('%H:%M:%S') + " ERROR: Failed to prove inclusion of all new entries in " + url # errors.append(time.strftime('%H:%M:%S') + " ERROR: Failed to prove inclusion of all new entries in " + url) # except: # print time.strftime('%H:%M:%S') + " ERROR: Failed to prove inclusion of all new entries in " + url def move_entry(first, last, source, dest): # print entries for s_log in source: entries = get_entries(s_log["url"], first, last)["entries"] print "\n\nSource: " + s_log["name"] + "\n" for i in range(len(entries)): # for item in entries: item = entries[i] inclusions = [] for d_log in dests: print "Log: " + d_log["name"] try: entry = extract_original_entry(item) if entry[2]: precert = True else: precert = False submission = [] for e in entry[0]: submission.append(base64.b64encode(e)) if entry[2]: res = add_prechain(d_log["url"], {"chain" : submission}) else: res = add_chain(d_log["url"], {"chain" : submission}) print_reply(res, entry) if not is_new_timestamp(res["timestamp"]): inclusions.append(d_log["name"]) # time.sleep(5) except KeyboardInterrupt: sys.exit() except: print "FAILED!\n" print s_log["name"] + "[" + str(first + i) + "] found in " + str(len(inclusions)) + " logs: ", inclusions def check_overlap(source, dests): PROCESS_COUNT = 50 for log in source: print "Checking overlap from " + log["name"] sth = get_sth(log["url"]) first = 0 # last = int(sth["tree_size"]) last = 23000 # split into tasks processes = [] chunk_size = (last - first)/PROCESS_COUNT print "chunk_size:", chunk_size for i in range(PROCESS_COUNT): if i + 1 == PROCESS_COUNT: tmp_start = first + (i * chunk_size) tmp_last = last - 1 else: tmp_start = first + (i * chunk_size) tmp_last = first + ((i+1) * chunk_size - 1) filename = OUTPUT_DIR + log["name"] + "_overlap_" + str(i) + ".tmp" p = multiprocessing.Process(target=check_stress_submission, \ args=(tmp_start, tmp_last, [log], dests, filename)) p.start() processes.append(p) time.sleep(1.3) for p in processes: p.join() # merge results with open(OUTPUT_DIR + log["name"] + "_overlap.log", "wb") as outfile: for i in range(PROCESS_COUNT): filename = OUTPUT_DIR + log["name"] + "_overlap_" + str(i) + ".tmp" # print filename if os.path.exists(filename): with open(filename, "rb") as infile: outfile.write(infile.read()) infile.close() os.remove(filename) outfile.close() def check_submission_inner(d_log, item, inclusions): entry = extract_original_entry(item) if entry[2]: precert = True else: precert = False submission = [] for e in entry[0]: submission.append(base64.b64encode(e)) if entry[2]: res = add_prechain(d_log["url"], {"chain" : submission}) else: res = add_chain(d_log["url"], {"chain" : submission}) # print res if not is_new_timestamp(res["timestamp"]): inclusions.append(d_log["name"]) def check_submission_outer(first, last, s_log, dest, logfile=None): MAX_CHUNK_SIZE = 65 MAX_RETRIES = 1 idx = 0 while first + idx <= last: if first + idx + MAX_CHUNK_SIZE < last: tmp_last = first + idx + MAX_CHUNK_SIZE - 1 else: tmp_last = last retries = 0 while retries <= MAX_RETRIES: try: print "Getting " + str(first + idx) + " to " + str(tmp_last) # print s_log entries = get_entries(s_log["url"], first + idx, tmp_last)["entries"] break except Exception, e: time.sleep(1) if retries == MAX_RETRIES: print "ERROR, could not download entries " + str(first + idx) + " to " + str(tmp_last), Exception, e raise Exception("Failed to get entries") else: print "Failed, retrying...(" + str(retries) + ") ", Exception, e retries += 1 for i in range(len(entries)): item = entries[i] inclusions = [] for d_log in dest: retries = 0 while retries <= MAX_RETRIES: try: check_submission_inner(d_log, item,inclusions) break except KeyboardInterrupt: sys.exit() except Exception, e: time.sleep(1) # Wait a bit before retrying... # if retries == MAX_RETRIES: # print "FAILED!", Exception, e # else: # print "Failed, retrying...(" + str(retries) + ")" retries += 1 s = s_log["name"] + "[" + str(first + idx + i) + "] found in " + str(len(inclusions)) + " logs: " + str(inclusions) print s if logfile: log(logfile, s) idx += len(entries) def check_stress_submission(first, last, source, dest, filename=None): MAX_RETRIES = 4 for s_log in source: retries = 0 while retries <= MAX_RETRIES: try: check_submission_outer(first, last, s_log, dest, filename) break except KeyboardInterrupt: sys.exit() except Exception, e: time.sleep(1) # Wait a bit before retrying... if retries == MAX_RETRIES: print "FAILED!", Exception, e else: print "Failed, retrying...(" + str(retries) + ")" pass retries += 1 def stress_test(dest): NR_OF_ENTRIES = 5 ENTRY_OFFSET = 5030 PROCESS_COUNT = 10 for log in dest: print "Stress testing " + log["name"] processes = [] for i in range(PROCESS_COUNT): # execute in parallell p = multiprocessing.Process(target=check_stress_submission, \ args=(ENTRY_OFFSET, ENTRY_OFFSET + NR_OF_ENTRIES, [log], [log])) p.start() processes.append(p) for p in processes: p.join() def log(fn, string): logfile = fn s = time_str() + " " + string with open(logfile, 'a') as f: f.write(s + "\n") f.close() if __name__ == "__main__": source = CTLOGS[0] dests = CTLOGS # stress_test([CTLOGS[0]]) # check_overlap(source, dests) check_submission_outer(10243750,10243750,source, dests)