summaryrefslogtreecommitdiff
path: root/tools
diff options
context:
space:
mode:
Diffstat (limited to 'tools')
-rwxr-xr-xtools/merge.py133
-rwxr-xr-xtools/testcase1.py13
2 files changed, 146 insertions, 0 deletions
diff --git a/tools/merge.py b/tools/merge.py
new file mode 100755
index 0000000..7120d04
--- /dev/null
+++ b/tools/merge.py
@@ -0,0 +1,133 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# Copyright (c) 2014 Kungliga Tekniska Högskolan
+# (KTH Royal Institute of Technology, Stockholm, Sweden).
+# See LICENSE for licensing information.
+
+import json
+import base64
+import urllib
+import urllib2
+import sys
+
+frontendnodes = ["https://127.0.0.1:8080/"]
+storagenodes = ["https://127.0.0.1:8081/"]
+
+chainsdir = "../rel/mergedb/chains"
+logorderfile = "../rel/mergedb/logorder"
+
+def parselogrow(row):
+ return base64.b16decode(row)
+
+def get_logorder():
+ f = open(logorderfile, "r")
+ return [parselogrow(row.rstrip()) for row in f]
+
+def write_chain(key, value):
+ f = open(chainsdir + "/" + base64.b16encode(key), "w")
+ f.write(value)
+ f.close()
+
+def read_chain(key):
+ f = open(chainsdir + "/" + base64.b16encode(key), "r")
+ value = f.read()
+ f.close()
+ return value
+
+def add_to_logorder(key):
+ f = open(logorderfile, "a")
+ f.write(base64.b16encode(key) + "\n")
+ f.close()
+
+def get_new_entries(baseurl):
+ try:
+ result = urllib2.urlopen(baseurl + "ct/storage/fetchnewentries").read()
+ parsed_result = json.loads(result)
+ if parsed_result.get(u"result") == u"ok":
+ return parsed_result[u"entries"]
+ print "ERROR: fetchnewentries", parsed_result
+ sys.exit(1)
+ except urllib2.HTTPError, e:
+ print "ERROR: fetchnewentries", e.read()
+ sys.exit(1)
+
+def get_curpos(baseurl):
+ try:
+ result = urllib2.urlopen(baseurl + "ct/frontend/currentposition").read()
+ parsed_result = json.loads(result)
+ if parsed_result.get(u"result") == u"ok":
+ return parsed_result[u"position"]
+ print "ERROR: currentposition", parsed_result
+ sys.exit(1)
+ except urllib2.HTTPError, e:
+ print "ERROR: currentposition", e.read()
+ sys.exit(1)
+
+def sendlog(baseurl, submission):
+ try:
+ result = urllib2.urlopen(baseurl + "ct/frontend/sendlog",
+ json.dumps(submission)).read()
+ return json.loads(result)
+ except urllib2.HTTPError, e:
+ print "ERROR: sendlog", e.read()
+ sys.exit(1)
+ except ValueError, e:
+ print "==== FAILED REQUEST ===="
+ print submission
+ print "======= RESPONSE ======="
+ print result
+ print "========================"
+ raise e
+
+def sendsth(baseurl, submission):
+ try:
+ result = urllib2.urlopen(baseurl + "ct/frontend/sendsth",
+ json.dumps(submission)).read()
+ return json.loads(result)
+ except urllib2.HTTPError, e:
+ print "ERROR: sendsth", e.read()
+ sys.exit(1)
+ except ValueError, e:
+ print "==== FAILED REQUEST ===="
+ print submission
+ print "======= RESPONSE ======="
+ print result
+ print "========================"
+ raise e
+
+def get_missingentries(baseurl):
+ try:
+ result = urllib2.urlopen(baseurl + "ct/frontend/missingentries").read()
+ parsed_result = json.loads(result)
+ if parsed_result.get(u"result") == u"ok":
+ return parsed_result[u"entries"]
+ print "ERROR: missingentries", parsed_result
+ sys.exit(1)
+ except urllib2.HTTPError, e:
+ print "ERROR: missingentries", e.read()
+ sys.exit(1)
+
+
+logorder = get_logorder()
+certsinlog = set(logorder)
+
+new_entries = [entry for storagenode in storagenodes for entry in get_new_entries(storagenode)]
+
+for new_entry in new_entries:
+ hash = base64.b64decode(new_entry["hash"])
+ entry = base64.b64decode(new_entry["entry"])
+ if hash not in certsinlog:
+ write_chain(hash, entry)
+ add_to_logorder(hash)
+ logorder.append(hash)
+ certsinlog.add(hash)
+ print "added", base64.b16encode(hash)
+
+for frontendnode in frontendnodes:
+ curpos = get_curpos(frontendnode)
+ entries = [base64.b64encode(entry) for entry in logorder[curpos:]]
+ sendlog(frontendnode, {"start": curpos, "hashes": entries})
+ missingentries = get_missingentries(frontendnode)
+ print "missing entries:", missingentries
+ # XXX: no test case for missing entries yet, waiting to implement
+ sendsth(frontendnode, {"tree_size": len(logorder)})
diff --git a/tools/testcase1.py b/tools/testcase1.py
index eab6c6f..2d5e0e8 100755
--- a/tools/testcase1.py
+++ b/tools/testcase1.py
@@ -1,4 +1,5 @@
#!/usr/bin/env python
+# -*- coding: utf-8 -*-
# Copyright (c) 2014, NORDUnet A/S.
# See LICENSE for licensing information.
@@ -125,12 +126,16 @@ testgroup("cert1")
result1 = do_add_chain(cc1)
+subprocess.call(["./merge.py"])
+
print_and_check_tree_size(1)
result2 = do_add_chain(cc1)
assert_equal(result2["timestamp"], result1["timestamp"], "timestamp")
+subprocess.call(["./merge.py"])
+
print_and_check_tree_size(1)
# TODO: add invalid cert and check that it generates an error
@@ -142,6 +147,8 @@ testgroup("cert2")
result3 = do_add_chain(cc2)
+subprocess.call(["./merge.py"])
+
print_and_check_tree_size(2)
get_and_validate_proof(result1["timestamp"], cc1, 0, 1)
@@ -151,6 +158,8 @@ testgroup("cert3")
result4 = do_add_chain(cc3)
+subprocess.call(["./merge.py"])
+
print_and_check_tree_size(3)
get_and_validate_proof(result1["timestamp"], cc1, 0, 2)
@@ -161,6 +170,8 @@ testgroup("cert4")
result5 = do_add_chain(cc4)
+subprocess.call(["./merge.py"])
+
print_and_check_tree_size(4)
get_and_validate_proof(result1["timestamp"], cc1, 0, 2)
@@ -172,6 +183,8 @@ testgroup("cert5")
result6 = do_add_chain(cc5)
+subprocess.call(["./merge.py"])
+
print_and_check_tree_size(5)
get_and_validate_proof(result1["timestamp"], cc1, 0, 3)