diff options
-rwxr-xr-x | tools/compileconfig.py | 6 | ||||
-rwxr-xr-x | tools/merge.py | 82 | ||||
-rw-r--r-- | tools/mergetools.py | 57 | ||||
-rwxr-xr-x | tools/testcase1.py | 20 | ||||
-rwxr-xr-x | tools/verifysecondary.py | 36 | ||||
-rwxr-xr-x | tools/verifysecondary.sh | 4 |
6 files changed, 142 insertions, 63 deletions
diff --git a/tools/compileconfig.py b/tools/compileconfig.py index 95c71be..a8fe408 100755 --- a/tools/compileconfig.py +++ b/tools/compileconfig.py @@ -242,10 +242,14 @@ def gen_config(nodename, config, localconfig): (Symbol("services"), services), ] if nodetype == "signingnodes": - plopconfig.append((Symbol("log_private_key"), paths["logprivatekey"])) hsm = localconfig.get("hsm") + if "logprivatekey" in paths: + plopconfig.append((Symbol("log_private_key"), paths["logprivatekey"])) if hsm: plopconfig.append((Symbol("hsm"), [hsm.get("library"), str(hsm.get("slot")), "ecdsa", hsm.get("label"), hsm.get("pin")])) + if not ("logprivatekey" in paths or hsm): + print >>sys.stderr, "Neither logprivatekey nor hsm configured for signing node", nodename + sys.exit(1) plopconfig += [ (Symbol("log_public_key"), paths["logpublickey"]), (Symbol("own_key"), (nodename, "%s/%s-private.pem" % (paths["privatekeys"], nodename))), diff --git a/tools/merge.py b/tools/merge.py index 76ffede..b426039 100755 --- a/tools/merge.py +++ b/tools/merge.py @@ -22,6 +22,8 @@ from certtools import build_merkle_tree, create_sth_signature, \ check_sth_signature, get_eckey_from_file, timing_point, http_request, \ get_public_key_from_file, get_leaf_hash, decode_certificate_chain, \ create_ssl_context +from mergetools import parselogrow, get_logorder, read_chain, unpack_entry, \ + verify_entry parser = argparse.ArgumentParser(description="") parser.add_argument('--config', help="System configuration", required=True) @@ -36,6 +38,7 @@ localconfig = yaml.load(open(args.localconfig)) ctbaseurl = config["baseurl"] frontendnodes = config["frontendnodes"] storagenodes = config["storagenodes"] +secondaries = localconfig.get("secondary", []) paths = localconfig["paths"] mergedb = paths["mergedb"] @@ -51,13 +54,6 @@ logpublickey = get_public_key_from_file(paths["logpublickey"]) hashed_dir = True -def parselogrow(row): - return base64.b16decode(row) - -def get_logorder(): - f = open(logorderfile, "r") - return [parselogrow(row.rstrip()) for row in f] - def write_chain(key, value): filename = base64.b16encode(key) if hashed_dir: @@ -72,17 +68,6 @@ def write_chain(key, value): f.write(value) f.close() -def read_chain(key): - filename = base64.b16encode(key) - path = chainsdir + "/" + filename[0:2] + "/" + filename[2:4] + "/" + filename[4:6] - try: - f = open(path + "/" + filename, "r") - except IOError, e: - f = open(chainsdir + "/" + filename, "r") - value = f.read() - f.close() - return value - def add_to_logorder(key): f = open(logorderfile, "a") f.write(base64.b16encode(key) + "\n") @@ -194,7 +179,7 @@ def chunks(l, n): timing = timing_point() -logorder = get_logorder() +logorder = get_logorder(logorderfile) timing_point(timing, "get logorder") @@ -210,41 +195,8 @@ for storagenode in storagenodes: new_entries.update(new_entries_per_node[storagenode["name"]]) entries_to_fetch[storagenode["name"]] = [] -def unpack_entry(entry): - pieces = [] - while len(entry): - (length,) = struct.unpack(">I", entry[0:4]) - data = entry[4:4+length] - entry = entry[4+length:] - pieces.append(data) - return pieces - import subprocess -def verify_entry(verifycert, entry, hash): - unpacked = unpack_entry(entry) - mtl = unpacked[0] - assert hash == get_leaf_hash(mtl) - s = struct.pack(">I", len(entry)) + entry - try: - verifycert.stdin.write(s) - except IOError, e: - sys.stderr.write("merge: unable to write to verifycert process: ") - while 1: - line = verifycert.stdout.readline() - if line: - sys.stderr.write(line) - else: - sys.exit(1) - result_length_packed = verifycert.stdout.read(4) - (result_length,) = struct.unpack(">I", result_length_packed) - result = verifycert.stdout.read(result_length) - assert len(result) == result_length - (error_code,) = struct.unpack("B", result[0:1]) - if error_code != 0: - print >>sys.stderr, result[1:] - sys.exit(1) - timing_point(timing, "get new entries") new_entries -= certsinlog @@ -286,6 +238,30 @@ tree_size = len(logorder) root_hash = tree[-1][0] timestamp = int(time.time() * 1000) +for secondary in secondaries: + remotehost = secondary["host"] + remotedir = remotehost + ":" + secondary["mergedir"] + localdir = mergedb + if localdir[:-1] != '/': + localdir = localdir + "/" + + print >>sys.stderr, "copying database to secondary:", remotehost + rsyncstatus = subprocess.call(["rsync", "-r", "--append", "--rsh=ssh", localdir, remotedir]) + if rsyncstatus: + print >>sys.stderr, "rsync failed:", rsyncstatus + sys.exit(1) + + print >>sys.stderr, "verifying database at secondary:", remotehost + verifysecondary = subprocess.Popen(["ssh", remotehost, secondary["verifycommand"], secondary["mergedir"]], + stdout=subprocess.PIPE) + + (verifysecondaryresult, _) = verifysecondary.communicate() + + if root_hash != base64.b16decode(verifysecondaryresult.strip()): + print >>sys.stderr, "secondary root hash was", verifysecondaryresult.strip() + print >>sys.stderr, " expected", base64.b16encode(root_hash) + sys.exit(1) + tree_head_signature = None for signingnode in signingnodes: try: @@ -343,7 +319,7 @@ for frontendnode in frontendnodes: print "missing entries:", len(missingentries) for missingentry in missingentries: hash = base64.b64decode(missingentry) - sendentryresult = sendentry(nodename, nodeaddress, read_chain(hash), hash) + sendentryresult = sendentry(nodename, nodeaddress, read_chain(chainsdir, hash), hash) if sendentryresult["result"] != "ok": print "send sth:", sendentryresult sys.exit(1) diff --git a/tools/mergetools.py b/tools/mergetools.py new file mode 100644 index 0000000..5cb36c4 --- /dev/null +++ b/tools/mergetools.py @@ -0,0 +1,57 @@ +# Copyright (c) 2015, NORDUnet A/S. +# See LICENSE for licensing information. +import base64 +import sys +import struct +from certtools import get_leaf_hash + +def parselogrow(row): + return base64.b16decode(row) + +def get_logorder(filename): + f = open(filename, "r") + return [parselogrow(row.rstrip()) for row in f] + +def read_chain(chainsdir, key): + filename = base64.b16encode(key) + path = chainsdir + "/" + filename[0:2] + "/" + filename[2:4] + "/" + filename[4:6] + try: + f = open(path + "/" + filename, "r") + except IOError, e: + f = open(chainsdir + "/" + filename, "r") + value = f.read() + f.close() + return value + +def unpack_entry(entry): + pieces = [] + while len(entry): + (length,) = struct.unpack(">I", entry[0:4]) + data = entry[4:4+length] + entry = entry[4+length:] + pieces.append(data) + return pieces + +def verify_entry(verifycert, entry, hash): + unpacked = unpack_entry(entry) + mtl = unpacked[0] + assert hash == get_leaf_hash(mtl) + s = struct.pack(">I", len(entry)) + entry + try: + verifycert.stdin.write(s) + except IOError, e: + sys.stderr.write("merge: unable to write to verifycert process: ") + while 1: + line = verifycert.stdout.readline() + if line: + sys.stderr.write(line) + else: + sys.exit(1) + result_length_packed = verifycert.stdout.read(4) + (result_length,) = struct.unpack(">I", result_length_packed) + result = verifycert.stdout.read(result_length) + assert len(result) == result_length + (error_code,) = struct.unpack("B", result[0:1]) + if error_code != 0: + print >>sys.stderr, result[1:] + sys.exit(1) diff --git a/tools/testcase1.py b/tools/testcase1.py index c1100ea..c66d976 100755 --- a/tools/testcase1.py +++ b/tools/testcase1.py @@ -48,13 +48,15 @@ def print_error(message, *args): def print_success(message, *args): print indentation + message % args -def assert_equal(actual, expected, name, quiet=False, nodata=False): +def assert_equal(actual, expected, name, quiet=False, nodata=False, fatal=False): global failures if actual != expected: if nodata: print_error("%s differs", name) else: print_error("%s expected %s got %s", name, expected, actual) + if fatal: + sys.exit(1) elif not quiet: print_success("%s was correct", name) @@ -149,7 +151,7 @@ def merge(): "--localconfig", "../test/catlfish-test-local-merge.cfg"]) mergeresult = merge() -assert_equal(mergeresult, 0, "merge", quiet=True) +assert_equal(mergeresult, 0, "merge", quiet=True, fatal=True) for baseurl in baseurls: print_and_check_tree_size(0, baseurl) @@ -159,7 +161,7 @@ testgroup("cert1") result1 = do_add_chain(cc1, baseurls[0]) mergeresult = merge() -assert_equal(mergeresult, 0, "merge", quiet=True) +assert_equal(mergeresult, 0, "merge", quiet=True, fatal=True) size_sth = {} @@ -172,7 +174,7 @@ result2 = do_add_chain(cc1, baseurls[0]) assert_equal(result2["timestamp"], result1["timestamp"], "timestamp") mergeresult = merge() -assert_equal(mergeresult, 0, "merge", quiet=True) +assert_equal(mergeresult, 0, "merge", quiet=True, fatal=True) for baseurl in baseurls: print_and_check_tree_size(1, baseurl) @@ -190,7 +192,7 @@ testgroup("cert2") result3 = do_add_chain(cc2, baseurls[0]) mergeresult = merge() -assert_equal(mergeresult, 0, "merge", quiet=True) +assert_equal(mergeresult, 0, "merge", quiet=True, fatal=True) for baseurl in baseurls: print_and_check_tree_size(2, baseurl) @@ -204,7 +206,7 @@ testgroup("cert3") result4 = do_add_chain(cc3, baseurls[0]) mergeresult = merge() -assert_equal(mergeresult, 0, "merge", quiet=True) +assert_equal(mergeresult, 0, "merge", quiet=True, fatal=True) for baseurl in baseurls: print_and_check_tree_size(3, baseurl) @@ -219,7 +221,7 @@ testgroup("cert4") result5 = do_add_chain(cc4, baseurls[0]) mergeresult = merge() -assert_equal(mergeresult, 0, "merge", quiet=True) +assert_equal(mergeresult, 0, "merge", quiet=True, fatal=True) for baseurl in baseurls: print_and_check_tree_size(4, baseurl) @@ -235,7 +237,7 @@ testgroup("cert5") result6 = do_add_chain(cc5, baseurls[0]) mergeresult = merge() -assert_equal(mergeresult, 0, "merge", quiet=True) +assert_equal(mergeresult, 0, "merge", quiet=True, fatal=True) for baseurl in baseurls: print_and_check_tree_size(5, baseurl) @@ -248,7 +250,7 @@ get_and_validate_proof(result5["timestamp"], cc4, 3, 3, baseurls[0]) get_and_validate_proof(result6["timestamp"], cc5, 4, 1, baseurls[0]) mergeresult = merge() -assert_equal(mergeresult, 0, "merge", quiet=True) +assert_equal(mergeresult, 0, "merge", quiet=True, fatal=True) for first_size in range(1, 5): for second_size in range(first_size + 1, 6): diff --git a/tools/verifysecondary.py b/tools/verifysecondary.py new file mode 100755 index 0000000..9a36b32 --- /dev/null +++ b/tools/verifysecondary.py @@ -0,0 +1,36 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +# Copyright (c) 2015, NORDUnet A/S. +# See LICENSE for licensing information. + +import argparse +import base64 +import sys +import subprocess +from certtools import build_merkle_tree +from mergetools import * + +parser = argparse.ArgumentParser(description="") +parser.add_argument('--mergedb', help="Merge database", required=True) +parser.add_argument('--verifycert', help="Path to verifycert program", required=True) +parser.add_argument('--knownroots', help="Path to knownroots directory", required=True) +args = parser.parse_args() + +mergedb = args.mergedb +chainsdir = mergedb + "/chains" +logorderfile = mergedb + "/logorder" + +verifycert = subprocess.Popen([args.verifycert, args.knownroots], + stdin=subprocess.PIPE, stdout=subprocess.PIPE) + +logorder = get_logorder(logorderfile) + +for hash in logorder: + entry = read_chain(chainsdir, hash) + verify_entry(verifycert, entry, hash) + +tree = build_merkle_tree(logorder) +root_hash = tree[-1][0] + +print base64.b16encode(root_hash) diff --git a/tools/verifysecondary.sh b/tools/verifysecondary.sh new file mode 100755 index 0000000..4a90543 --- /dev/null +++ b/tools/verifysecondary.sh @@ -0,0 +1,4 @@ +#!/bin/sh + +cd $(dirname $0)/../catlfish +../tools/verifysecondary.py --mergedb="$1" --verifycert=../verifycert.erl --knownroots=../tools/testcerts/roots/ |