summaryrefslogtreecommitdiff
path: root/tools/merge.py
diff options
context:
space:
mode:
authorMagnus Ahltorp <map@kth.se>2014-10-20 14:33:41 +0200
committerLinus Nordberg <linus@nordberg.se>2014-10-24 15:01:33 +0200
commit194c9aa3b8c463fa487dc9ef7e172332a8d94d72 (patch)
tree1d7ca4494f826677e721707e306c4e06ce7a20d1 /tools/merge.py
parent86419f500e74386a4613d4cee0aab66e129d7ed5 (diff)
Added external merging supportmap-external-merge2
Diffstat (limited to 'tools/merge.py')
-rwxr-xr-xtools/merge.py133
1 files changed, 133 insertions, 0 deletions
diff --git a/tools/merge.py b/tools/merge.py
new file mode 100755
index 0000000..7120d04
--- /dev/null
+++ b/tools/merge.py
@@ -0,0 +1,133 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# Copyright (c) 2014 Kungliga Tekniska Högskolan
+# (KTH Royal Institute of Technology, Stockholm, Sweden).
+# See LICENSE for licensing information.
+
+import json
+import base64
+import urllib
+import urllib2
+import sys
+
+frontendnodes = ["https://127.0.0.1:8080/"]
+storagenodes = ["https://127.0.0.1:8081/"]
+
+chainsdir = "../rel/mergedb/chains"
+logorderfile = "../rel/mergedb/logorder"
+
+def parselogrow(row):
+ return base64.b16decode(row)
+
+def get_logorder():
+ f = open(logorderfile, "r")
+ return [parselogrow(row.rstrip()) for row in f]
+
+def write_chain(key, value):
+ f = open(chainsdir + "/" + base64.b16encode(key), "w")
+ f.write(value)
+ f.close()
+
+def read_chain(key):
+ f = open(chainsdir + "/" + base64.b16encode(key), "r")
+ value = f.read()
+ f.close()
+ return value
+
+def add_to_logorder(key):
+ f = open(logorderfile, "a")
+ f.write(base64.b16encode(key) + "\n")
+ f.close()
+
+def get_new_entries(baseurl):
+ try:
+ result = urllib2.urlopen(baseurl + "ct/storage/fetchnewentries").read()
+ parsed_result = json.loads(result)
+ if parsed_result.get(u"result") == u"ok":
+ return parsed_result[u"entries"]
+ print "ERROR: fetchnewentries", parsed_result
+ sys.exit(1)
+ except urllib2.HTTPError, e:
+ print "ERROR: fetchnewentries", e.read()
+ sys.exit(1)
+
+def get_curpos(baseurl):
+ try:
+ result = urllib2.urlopen(baseurl + "ct/frontend/currentposition").read()
+ parsed_result = json.loads(result)
+ if parsed_result.get(u"result") == u"ok":
+ return parsed_result[u"position"]
+ print "ERROR: currentposition", parsed_result
+ sys.exit(1)
+ except urllib2.HTTPError, e:
+ print "ERROR: currentposition", e.read()
+ sys.exit(1)
+
+def sendlog(baseurl, submission):
+ try:
+ result = urllib2.urlopen(baseurl + "ct/frontend/sendlog",
+ json.dumps(submission)).read()
+ return json.loads(result)
+ except urllib2.HTTPError, e:
+ print "ERROR: sendlog", e.read()
+ sys.exit(1)
+ except ValueError, e:
+ print "==== FAILED REQUEST ===="
+ print submission
+ print "======= RESPONSE ======="
+ print result
+ print "========================"
+ raise e
+
+def sendsth(baseurl, submission):
+ try:
+ result = urllib2.urlopen(baseurl + "ct/frontend/sendsth",
+ json.dumps(submission)).read()
+ return json.loads(result)
+ except urllib2.HTTPError, e:
+ print "ERROR: sendsth", e.read()
+ sys.exit(1)
+ except ValueError, e:
+ print "==== FAILED REQUEST ===="
+ print submission
+ print "======= RESPONSE ======="
+ print result
+ print "========================"
+ raise e
+
+def get_missingentries(baseurl):
+ try:
+ result = urllib2.urlopen(baseurl + "ct/frontend/missingentries").read()
+ parsed_result = json.loads(result)
+ if parsed_result.get(u"result") == u"ok":
+ return parsed_result[u"entries"]
+ print "ERROR: missingentries", parsed_result
+ sys.exit(1)
+ except urllib2.HTTPError, e:
+ print "ERROR: missingentries", e.read()
+ sys.exit(1)
+
+
+logorder = get_logorder()
+certsinlog = set(logorder)
+
+new_entries = [entry for storagenode in storagenodes for entry in get_new_entries(storagenode)]
+
+for new_entry in new_entries:
+ hash = base64.b64decode(new_entry["hash"])
+ entry = base64.b64decode(new_entry["entry"])
+ if hash not in certsinlog:
+ write_chain(hash, entry)
+ add_to_logorder(hash)
+ logorder.append(hash)
+ certsinlog.add(hash)
+ print "added", base64.b16encode(hash)
+
+for frontendnode in frontendnodes:
+ curpos = get_curpos(frontendnode)
+ entries = [base64.b64encode(entry) for entry in logorder[curpos:]]
+ sendlog(frontendnode, {"start": curpos, "hashes": entries})
+ missingentries = get_missingentries(frontendnode)
+ print "missing entries:", missingentries
+ # XXX: no test case for missing entries yet, waiting to implement
+ sendsth(frontendnode, {"tree_size": len(logorder)})