summaryrefslogtreecommitdiff
path: root/tools/mergetools.py
diff options
context:
space:
mode:
authorLinus Nordberg <linus@nordu.net>2015-09-22 21:59:48 +0200
committerLinus Nordberg <linus@nordu.net>2015-09-22 21:59:48 +0200
commit056cf9649f795948182aec73fc6aa42ebfdaed26 (patch)
tree9205dee2441b17cdc0ba0c9a8017706ccb245051 /tools/mergetools.py
parent8949606c5c8e4eac53b16ecab03011a884e8c5ac (diff)
Split merge.py into three pieces.
Diffstat (limited to 'tools/mergetools.py')
-rw-r--r--tools/mergetools.py250
1 files changed, 247 insertions, 3 deletions
diff --git a/tools/mergetools.py b/tools/mergetools.py
index 947d7f4..820087c 100644
--- a/tools/mergetools.py
+++ b/tools/mergetools.py
@@ -1,10 +1,17 @@
# Copyright (c) 2015, NORDUnet A/S.
# See LICENSE for licensing information.
+
+import os
import base64
import hashlib
import sys
import struct
-from certtools import get_leaf_hash
+import urllib
+import urllib2
+import json
+from certtools import get_leaf_hash, create_sth_signature, \
+ check_sth_signature, get_eckey_from_file, http_request, \
+ get_leaf_hash, decode_certificate_chain
def parselogrow(row):
return base64.b16decode(row, casefold=True)
@@ -22,7 +29,7 @@ def read_chain(chainsdir, key):
filename = base64.b16encode(key).upper()
try:
f = read_chain_open(chainsdir, filename)
- except IOError, e:
+ except IOError:
f = read_chain_open(chainsdir, filename.lower())
value = f.read()
f.close()
@@ -67,7 +74,7 @@ def unwrap_entry(entry):
def wrap_entry(entry):
return tlv_encodelist([("PLOP", entry),
- ("S256", hashlib.sha256(entry).digest())])
+ ("S256", hashlib.sha256(entry).digest())])
def verify_entry(verifycert, entry, hash):
packed = unwrap_entry(entry)
@@ -94,3 +101,240 @@ def verify_entry(verifycert, entry, hash):
if error_code != 0:
print >>sys.stderr, result[1:]
sys.exit(1)
+
+def hexencode(key):
+ return base64.b16encode(key).lower()
+
+def write_chain(key, value, chainsdir, hashed_dir=True):
+ filename = hexencode(key)
+ if hashed_dir:
+ path = chainsdir + "/" \
+ + filename[0:2] + "/" + filename[2:4] + "/" + filename[4:6]
+ try:
+ os.makedirs(path)
+ except Exception, e:
+ pass
+ else:
+ path = chainsdir
+ f = open(path + "/" + filename, "w")
+ f.write(value)
+ f.close()
+
+def add_to_logorder(logorderfile, key):
+ f = open(logorderfile, "a")
+ f.write(hexencode(key) + "\n")
+ f.close()
+
+def fsync_logorder(logorderfile):
+ f = open(logorderfile, "a")
+ os.fsync(f.fileno())
+ f.close()
+
+def get_new_entries(node, baseurl, own_key, paths):
+ try:
+ result = http_request(baseurl + "plop/v1/storage/fetchnewentries",
+ key=own_key, verifynode=node,
+ publickeydir=paths["publickeys"])
+ parsed_result = json.loads(result)
+ if parsed_result.get(u"result") == u"ok":
+ return [base64.b64decode(entry) for \
+ entry in parsed_result[u"entries"]]
+ print >>sys.stderr, "ERROR: fetchnewentries", parsed_result
+ sys.exit(1)
+ except urllib2.HTTPError, e:
+ print >>sys.stderr, "ERROR: fetchnewentries", e.read()
+ sys.exit(1)
+
+def get_entries(node, baseurl, own_key, paths, hashes):
+ try:
+ params = urllib.urlencode({"hash":[base64.b64encode(hash) for \
+ hash in hashes]}, doseq=True)
+ result = http_request(baseurl + "plop/v1/storage/getentry?" + params,
+ key=own_key, verifynode=node,
+ publickeydir=paths["publickeys"])
+ parsed_result = json.loads(result)
+ if parsed_result.get(u"result") == u"ok":
+ entries = dict([(base64.b64decode(entry["hash"]), base64.b64decode(entry["entry"])) for entry in parsed_result[u"entries"]])
+ assert len(entries) == len(hashes)
+ assert set(entries.keys()) == set(hashes)
+ return entries
+ print >>sys.stderr, "ERROR: getentry", parsed_result
+ sys.exit(1)
+ except urllib2.HTTPError, e:
+ print >>sys.stderr, "ERROR: getentry", e.read()
+ sys.exit(1)
+
+def get_curpos(node, baseurl, own_key, paths):
+ try:
+ result = http_request(baseurl + "plop/v1/frontend/currentposition", key=own_key, verifynode=node, publickeydir=paths["publickeys"])
+ parsed_result = json.loads(result)
+ if parsed_result.get(u"result") == u"ok":
+ return parsed_result[u"position"]
+ print >>sys.stderr, "ERROR: currentposition", parsed_result
+ sys.exit(1)
+ except urllib2.HTTPError, e:
+ print >>sys.stderr, "ERROR: currentposition", e.read()
+ sys.exit(1)
+
+def get_verifiedsize(node, baseurl, own_key, paths):
+ try:
+ result = http_request(baseurl + "plop/v1/merge/verifiedsize", key=own_key, verifynode=node, publickeydir=paths["publickeys"])
+ parsed_result = json.loads(result)
+ if parsed_result.get(u"result") == u"ok":
+ return parsed_result[u"size"]
+ print >>sys.stderr, "ERROR: verifiedsize", parsed_result
+ sys.exit(1)
+ except urllib2.HTTPError, e:
+ print >>sys.stderr, "ERROR: verifiedsize", e.read()
+ sys.exit(1)
+
+
+def sendlog(node, baseurl, own_key, paths, submission):
+ try:
+ result = http_request(baseurl + "plop/v1/frontend/sendlog",
+ json.dumps(submission), key=own_key, verifynode=node, publickeydir=paths["publickeys"])
+ return json.loads(result)
+ except urllib2.HTTPError, e:
+ print >>sys.stderr, "ERROR: sendlog", e.read()
+ sys.stderr.flush()
+ return None
+ except ValueError, e:
+ print >>sys.stderr, "==== FAILED REQUEST ===="
+ print >>sys.stderr, submission
+ print >>sys.stderr, "======= RESPONSE ======="
+ print >>sys.stderr, result
+ print >>sys.stderr, "========================"
+ sys.stderr.flush()
+ raise e
+
+def backup_sendlog(node, baseurl, own_key, paths, submission):
+ try:
+ result = http_request(baseurl + "plop/v1/merge/sendlog",
+ json.dumps(submission), key=own_key, verifynode=node, publickeydir=paths["publickeys"])
+ return json.loads(result)
+ except urllib2.HTTPError, e:
+ print >>sys.stderr, "ERROR: sendlog", e.read()
+ sys.stderr.flush()
+ return None
+ except ValueError, e:
+ print >>sys.stderr, "==== FAILED REQUEST ===="
+ print >>sys.stderr, submission
+ print >>sys.stderr, "======= RESPONSE ======="
+ print >>sys.stderr, result
+ print >>sys.stderr, "========================"
+ sys.stderr.flush()
+ raise e
+
+def sendentry(node, baseurl, own_key, paths, entry, hash):
+ try:
+ result = http_request(baseurl + "plop/v1/frontend/sendentry",
+ json.dumps({"entry":base64.b64encode(entry), "treeleafhash":base64.b64encode(hash)}), key=own_key,
+ verifynode=node, publickeydir=paths["publickeys"])
+ return json.loads(result)
+ except urllib2.HTTPError, e:
+ print >>sys.stderr, "ERROR: sendentry", e.read()
+ sys.exit(1)
+ except ValueError, e:
+ print >>sys.stderr, "==== FAILED REQUEST ===="
+ print >>sys.stderr, hash
+ print >>sys.stderr, "======= RESPONSE ======="
+ print >>sys.stderr, result
+ print >>sys.stderr, "========================"
+ sys.stderr.flush()
+ raise e
+
+def sendentry_merge(node, baseurl, own_key, paths, entry, hash):
+ try:
+ result = http_request(baseurl + "plop/v1/merge/sendentry",
+ json.dumps({"entry":base64.b64encode(entry), "treeleafhash":base64.b64encode(hash)}), key=own_key,
+ verifynode=node, publickeydir=paths["publickeys"])
+ return json.loads(result)
+ except urllib2.HTTPError, e:
+ print >>sys.stderr, "ERROR: sendentry", e.read()
+ sys.exit(1)
+ except ValueError, e:
+ print >>sys.stderr, "==== FAILED REQUEST ===="
+ print >>sys.stderr, hash
+ print >>sys.stderr, "======= RESPONSE ======="
+ print >>sys.stderr, result
+ print >>sys.stderr, "========================"
+ sys.stderr.flush()
+ raise e
+
+def sendsth(node, baseurl, own_key, paths, submission):
+ try:
+ result = http_request(baseurl + "plop/v1/frontend/sendsth",
+ json.dumps(submission), key=own_key, verifynode=node, publickeydir=paths["publickeys"])
+ return json.loads(result)
+ except urllib2.HTTPError, e:
+ print >>sys.stderr, "ERROR: sendsth", e.read()
+ sys.exit(1)
+ except ValueError, e:
+ print >>sys.stderr, "==== FAILED REQUEST ===="
+ print >>sys.stderr, submission
+ print >>sys.stderr, "======= RESPONSE ======="
+ print >>sys.stderr, result
+ print >>sys.stderr, "========================"
+ sys.stderr.flush()
+ raise e
+
+def verifyroot(node, baseurl, own_key, paths, treesize):
+ try:
+ result = http_request(baseurl + "plop/v1/merge/verifyroot",
+ json.dumps({"tree_size":treesize}), key=own_key, verifynode=node, publickeydir=paths["publickeys"])
+ return json.loads(result)
+ except urllib2.HTTPError, e:
+ print >>sys.stderr, "ERROR: verifyroot", e.read()
+ sys.exit(1)
+ except ValueError, e:
+ print >>sys.stderr, "==== FAILED REQUEST ===="
+ print >>sys.stderr, submission
+ print >>sys.stderr, "======= RESPONSE ======="
+ print >>sys.stderr, result
+ print >>sys.stderr, "========================"
+ sys.stderr.flush()
+ raise e
+
+def setverifiedsize(node, baseurl, own_key, paths, treesize):
+ try:
+ result = http_request(baseurl + "plop/v1/merge/setverifiedsize",
+ json.dumps({"size":treesize}), key=own_key, verifynode=node, publickeydir=paths["publickeys"])
+ return json.loads(result)
+ except urllib2.HTTPError, e:
+ print >>sys.stderr, "ERROR: setverifiedsize", e.read()
+ sys.exit(1)
+ except ValueError, e:
+ print >>sys.stderr, "==== FAILED REQUEST ===="
+ print >>sys.stderr, submission
+ print >>sys.stderr, "======= RESPONSE ======="
+ print >>sys.stderr, result
+ print >>sys.stderr, "========================"
+ sys.stderr.flush()
+ raise e
+
+def get_missingentries(node, baseurl, own_key, paths):
+ try:
+ result = http_request(baseurl + "plop/v1/frontend/missingentries", key=own_key, verifynode=node, publickeydir=paths["publickeys"])
+ parsed_result = json.loads(result)
+ if parsed_result.get(u"result") == u"ok":
+ return parsed_result[u"entries"]
+ print >>sys.stderr, "ERROR: missingentries", parsed_result
+ sys.exit(1)
+ except urllib2.HTTPError, e:
+ print >>sys.stderr, "ERROR: missingentries", e.read()
+ sys.exit(1)
+
+def get_missingentriesforbackup(node, baseurl, own_key, paths):
+ try:
+ result = http_request(baseurl + "plop/v1/merge/missingentries", key=own_key, verifynode=node, publickeydir=paths["publickeys"])
+ parsed_result = json.loads(result)
+ if parsed_result.get(u"result") == u"ok":
+ return parsed_result[u"entries"]
+ print >>sys.stderr, "ERROR: missingentriesforbackup", parsed_result
+ sys.exit(1)
+ except urllib2.HTTPError, e:
+ print >>sys.stderr, "ERROR: missingentriesforbackup", e.read()
+ sys.exit(1)
+
+def chunks(l, n):
+ return [l[i:i+n] for i in range(0, len(l), n)]