summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rwxr-xr-xmonitor/josef_logreader.py4
-rwxr-xr-xmonitor/josef_mover.py120
2 files changed, 102 insertions, 22 deletions
diff --git a/monitor/josef_logreader.py b/monitor/josef_logreader.py
index b2bafe0..3d28146 100755
--- a/monitor/josef_logreader.py
+++ b/monitor/josef_logreader.py
@@ -107,8 +107,8 @@ def print_overlap(l):
for line in log:
# line = item[TIME_LEN:]
logs = json.loads(line.split("logs: ")[-1][:-1].replace("'", '"'))
- # if not source in logs: # don't check entries that were submitted after measurement start
- # break
+ if not source in logs: # don't check entries that were submitted after measurement start
+ break
if len(logs) == 1:
metadata["unique"] += 1
if "pilot" in logs or "rocketeer" in logs or "aviator" in logs:
diff --git a/monitor/josef_mover.py b/monitor/josef_mover.py
index 7c5eca1..8d566cc 100755
--- a/monitor/josef_mover.py
+++ b/monitor/josef_mover.py
@@ -140,7 +140,7 @@ def check_inclusion_by_submission(first, last, source, dest, logfile):
for i in range(len(entries)):
item = entries[i]
inclusions = []
- for d_log in dests:
+ for d_log in dest:
try:
entry = extract_original_entry(item)
if entry[2]:
@@ -168,44 +168,33 @@ def check_inclusion_by_submission(first, last, source, dest, logfile):
s = s_log["name"] + "[" + str(first + i) + "] found in " + str(len(inclusions)) + " logs: " + str(inclusions)
print s
# logfile = OUTPUT_DIR + s_log["name"] + "_overlap.log"
- log(logfile, s)
+ if logfile:
+ log(logfile, s)
time.sleep(1) # to ease up om rate-limiting...
-
-
-def log(fn, string):
- logfile = fn
- s = time_str() + " " + string
- with open(logfile, 'a') as f:
- f.write(s + "\n")
- f.close()
-
-
-if __name__ == "__main__":
- source = [CTLOGS[8]]
- dests = CTLOGS
+def check_overlap(source, dests):
process_count = 1
processes = []
for tmp_log in source:
sth = get_sth(tmp_log["url"])
- first = 0
+ first = 6125
last = int(sth["tree_size"])
# last = 8
print "last:",last
# split into tasks
- chunk_size = last/process_count
+ chunk_size = (last - first)/process_count
print "chunk_size:", chunk_size
for i in range(process_count):
if i + 1 == process_count:
- tmp_start = i * chunk_size
+ tmp_start = first + (i * chunk_size)
tmp_last = last - 1
else:
- tmp_start = i * chunk_size
- tmp_last = (i+1) * chunk_size - 1
+ tmp_start = first + (i * chunk_size)
+ tmp_last = first + ((i+1) * chunk_size - 1)
# execute in parallell
filename = OUTPUT_DIR + tmp_log["name"] + "_overlap_" + str(i) + ".tmp"
@@ -228,6 +217,97 @@ if __name__ == "__main__":
infile.close()
os.remove(filename)
outfile.close()
+
+
+def check_stress_submission(first, last, source, dest, logfile):
+ # print entries
+ for s_log in source:
+ try:
+ entries = []
+ while len(entries) < last - first:
+ print "Getting " + str(first + len(entries)) + " to " + str(last)
+ entries += get_entries(s_log["url"], first + len(entries), last)["entries"]
+ # print "Fetched entries up to " + str(len(first + len(entries)))
+ except:
+ print "Failed to get entries from " + s_log["name"]
+
+ for i in range(len(entries)):
+ item = entries[i]
+ inclusions = []
+ for d_log in dest:
+ try:
+ entry = extract_original_entry(item)
+ if entry[2]:
+ precert = True
+ else:
+ precert = False
+ submission = []
+
+ for e in entry[0]:
+ submission.append(base64.b64encode(e))
+
+ if entry[2]:
+ res = add_prechain(d_log["url"], {"chain" : submission})
+ else:
+ res = add_chain(d_log["url"], {"chain" : submission})
+ # print_reply(res, entry)
+ # print res
+
+ if not is_new_timestamp(res["timestamp"]):
+ inclusions.append(d_log["name"])
+
+ except KeyboardInterrupt:
+ sys.exit()
+ except Exception, e:
+ print Exception, e
+
+ s = s_log["name"] + "[" + str(first + i) + "] found in " + str(len(inclusions)) + " logs: " + str(inclusions)
+ print s
+
+
+
+def stress_test(dest):
+ NR_OF_ENTRIES = 8
+ ENTRY_OFFSET = 1010
+ PROCESS_COUNT = 200
+
+ for log in dest:
+ print "Stress testing " + log["name"]
+
+ processes = []
+ for i in range(PROCESS_COUNT):
+ # execute in parallell
+ p = multiprocessing.Process(target=check_stress_submission, \
+ args=(ENTRY_OFFSET, ENTRY_OFFSET + NR_OF_ENTRIES, [log], [log], None))
+ p.start()
+ processes.append(p)
+
+ for p in processes:
+ p.join()
+ # print "Done!"
+
+
+ # TODO get entries from log
+ # TODO parallellize
+ # TODO submit really fast
+ # TODO catch errors
+ pass
+
+
+def log(fn, string):
+ logfile = fn
+ s = time_str() + " " + string
+ with open(logfile, 'a') as f:
+ f.write(s + "\n")
+ f.close()
+
+
+if __name__ == "__main__":
+ source = [CTLOGS[3]]
+ dests = CTLOGS
+
+ stress_test([CTLOGS[0]])
+ # check_overlap(source, dests)