summaryrefslogtreecommitdiff
path: root/monitor
diff options
context:
space:
mode:
Diffstat (limited to 'monitor')
-rwxr-xr-xmonitor/josef_experimental.py96
-rw-r--r--monitor/josef_lib.py2
-rwxr-xr-xmonitor/josef_logreader.py4
-rwxr-xr-xmonitor/josef_mover.py8
4 files changed, 72 insertions, 38 deletions
diff --git a/monitor/josef_experimental.py b/monitor/josef_experimental.py
index fa553a0..580ee11 100755
--- a/monitor/josef_experimental.py
+++ b/monitor/josef_experimental.py
@@ -7,11 +7,69 @@ from josef_lib import *
# import leveldb
import argparse
import json
+import time
# from josef_leveldb import *
from datetime import datetime as dt
# from josef_monitor import verify_inclusion_by_hash
from monitor_conf import *
+def is_new_timestamp(ts):
+ MAX_TIMEDIFF = 300 # 5 min, allows for some clock skew
+ ts_time = datetime.datetime.fromtimestamp(ts / 1000, UTC()).strftime('%Y-%m-%d %H:%M:%S')
+ start_time = datetime.datetime.utcnow().strftime('2015-10-19 00:00:00')
+ # delta_time = datetime.datetime.strptime(start_time, '%Y-%m-%d %H:%M:%S') - datetime.datetime.strptime(ts_time, '%Y-%m-%d %H:%M:%S')
+ # print delta_time.seconds
+ if ts_time < start_time:
+ return False
+ else:
+ return True
+
+def check_inclusion_by_submission(first, last, source, dests):
+ # print entries
+ for s_log in source:
+ try:
+ entries = []
+ while len(entries) <= last - first:
+ print "Getting " + str(first + len(entries)) + " to " + str(last)
+ entries += get_entries(s_log["url"], first + len(entries), last)["entries"]
+ # print "Fetched entries up to " + str(len(first + len(entries)))
+ except:
+ print "Failed to get entries from " + s_log["name"]
+
+ for i in range(len(entries)):
+ item = entries[i]
+ inclusions = []
+ for d_log in dests:
+ try:
+ entry = extract_original_entry(item)
+ if entry[2]:
+ precert = True
+ else:
+ precert = False
+ submission = []
+
+ for e in entry[0]:
+ submission.append(base64.b64encode(e))
+
+ if entry[2]:
+ res = add_prechain(d_log["url"], {"chain" : submission})
+ else:
+ res = add_chain(d_log["url"], {"chain" : submission})
+ # print_reply(res, entry)
+ print res
+
+ if not is_new_timestamp(res["timestamp"]):
+ inclusions.append(d_log["name"])
+
+ except KeyboardInterrupt:
+ sys.exit()
+ except Exception ,e:
+ print Exception, e
+ pass
+ s = s_log["name"] + "[" + str(first + i) + "] found in " + str(len(inclusions)) + " logs: " + str(inclusions)
+ print s
+ # log(logfile, s)
+ time.sleep(1)
def update_roots(log):
@@ -69,39 +127,13 @@ def update_roots(log):
if __name__ == '__main__':
- for log in CTLOGS:
- url = log["url"]
- try:
- get_entries(url,2001,2001)
- except Exception, e:
- print "Failed to get entry from " + log["name"], e
-
- # dbdir = "tmpdb/"
- # entry = get_entries(ctlogs[0]["url"], 1,1)["entries"]
- # print extract_original_entry(entry[0])
- # for url in [CTLOGS[6]["url"]]:
- # for url in [CTLOGS[0]["url"],CTLOGS[5]["url"],CTLOGS[6]["url"]]:
- # for log in CTLOGS:
+ # for log in [CTLOGS[4]]:
# url = log["url"]
- # url = CTLOGS[1]["url"]
- # entries = get_entries(url, 3638637,3638637)["entries"]
- # entries = get_entries(url, first, last)["entries"]
- # tmp_cert_data = []
- # for item in entries:
- # tmp_data = check_domain(item, url)
- # entry_hash = get_leaf_hash(base64.b64decode(item["leaf_input"]))
- # if tmp_data:
- # tmp_data["leaf_hash"] = base64.b64encode(entry_hash)
- # tmp_cert_data.append(tmp_data)
- # print tmp_data
- # new_leafs.append(entry_hash)
- # if self.dbdir:/
-
- # db_add_certs(dbdir, tmp_cert_data)
-
- # if CONFIG.DEFAULT_CERT_FILE:
- # append_file(CONFIG.DEFAULT_CERT_FILE, tmp_cert_data)
- # subtree = reduce_tree(new_leafs, subtree)
+ # try:
+ # get_entries(url,8,8)
+ # except Exception, e:
+ # print "Failed to get entry from " + log["name"], e
+ check_inclusion_by_submission(1,1,[CTLOGS[3]],[CTLOGS[3]])
diff --git a/monitor/josef_lib.py b/monitor/josef_lib.py
index e7b1e05..922636c 100644
--- a/monitor/josef_lib.py
+++ b/monitor/josef_lib.py
@@ -348,7 +348,7 @@ def add_chain(baseurl, submission):
result = urlopen(baseurl + "ct/v1/add-chain", json.dumps(submission)).read()
return json.loads(result)
except urllib2.HTTPError, e:
- # print "ERROR", e.code,":", e.read()
+ return "ERROR " + str(e.code) + " : " + e.read()
# if e.code == 400:
return None
# sys.exit(1)
diff --git a/monitor/josef_logreader.py b/monitor/josef_logreader.py
index 3d28146..b2bafe0 100755
--- a/monitor/josef_logreader.py
+++ b/monitor/josef_logreader.py
@@ -107,8 +107,8 @@ def print_overlap(l):
for line in log:
# line = item[TIME_LEN:]
logs = json.loads(line.split("logs: ")[-1][:-1].replace("'", '"'))
- if not source in logs: # don't check entries that were submitted after measurement start
- break
+ # if not source in logs: # don't check entries that were submitted after measurement start
+ # break
if len(logs) == 1:
metadata["unique"] += 1
if "pilot" in logs or "rocketeer" in logs or "aviator" in logs:
diff --git a/monitor/josef_mover.py b/monitor/josef_mover.py
index 63a155f..7c5eca1 100755
--- a/monitor/josef_mover.py
+++ b/monitor/josef_mover.py
@@ -59,6 +59,7 @@ def is_new_timestamp(ts):
else:
return True
+
def check_inclusion_all(first, last, source, dest):
for s_log in source:
url = s_log["url"]
@@ -168,6 +169,7 @@ def check_inclusion_by_submission(first, last, source, dest, logfile):
print s
# logfile = OUTPUT_DIR + s_log["name"] + "_overlap.log"
log(logfile, s)
+ time.sleep(1) # to ease up om rate-limiting...
@@ -181,9 +183,9 @@ def log(fn, string):
if __name__ == "__main__":
- source = [CTLOGS[3]]
+ source = [CTLOGS[8]]
dests = CTLOGS
- process_count = 4
+ process_count = 1
processes = []
for tmp_log in source:
sth = get_sth(tmp_log["url"])
@@ -191,7 +193,7 @@ if __name__ == "__main__":
first = 0
last = int(sth["tree_size"])
- last = 8
+ # last = 8
print "last:",last
# split into tasks