summaryrefslogtreecommitdiff
path: root/tools
diff options
context:
space:
mode:
Diffstat (limited to 'tools')
-rw-r--r--tools/certtools.py43
-rwxr-xr-xtools/compileconfig.py283
-rwxr-xr-xtools/merge.py92
-rwxr-xr-xtools/testcase1.py12
4 files changed, 372 insertions, 58 deletions
diff --git a/tools/certtools.py b/tools/certtools.py
index cc423af..939d9f1 100644
--- a/tools/certtools.py
+++ b/tools/certtools.py
@@ -70,6 +70,11 @@ def get_eckey_from_file(keyfile):
assert len(keys) == 1
return keys[0]
+def get_public_key_from_file(keyfile):
+ keys = get_pemlike(keyfile, "PUBLIC KEY")
+ assert len(keys) == 1
+ return keys[0]
+
def get_root_cert(issuer):
accepted_certs = \
json.loads(open("googlelog-accepted-certs.txt").read())["certificates"]
@@ -84,7 +89,7 @@ def get_root_cert(issuer):
return root_cert
def get_sth(baseurl):
- result = urllib2.urlopen(baseurl + "ct/v1/get-sth", context=ssl.SSLContext(ssl.PROTOCOL_TLSv1)).read()
+ result = urllib2.urlopen(baseurl + "ct/v1/get-sth").read()
return json.loads(result)
def get_proof_by_hash(baseurl, hash, tree_size):
@@ -92,7 +97,7 @@ def get_proof_by_hash(baseurl, hash, tree_size):
params = urllib.urlencode({"hash":base64.b64encode(hash),
"tree_size":tree_size})
result = \
- urllib2.urlopen(baseurl + "ct/v1/get-proof-by-hash?" + params, context=ssl.SSLContext(ssl.PROTOCOL_TLSv1)).read()
+ urllib2.urlopen(baseurl + "ct/v1/get-proof-by-hash?" + params).read()
return json.loads(result)
except urllib2.HTTPError, e:
print "ERROR:", e.read()
@@ -103,7 +108,7 @@ def get_consistency_proof(baseurl, tree_size1, tree_size2):
params = urllib.urlencode({"first":tree_size1,
"second":tree_size2})
result = \
- urllib2.urlopen(baseurl + "ct/v1/get-sth-consistency?" + params, context=ssl.SSLContext(ssl.PROTOCOL_TLSv1)).read()
+ urllib2.urlopen(baseurl + "ct/v1/get-sth-consistency?" + params).read()
return json.loads(result)["consistency"]
except urllib2.HTTPError, e:
print "ERROR:", e.read()
@@ -126,7 +131,7 @@ def unpack_tls_array(packed_data, length_len):
def add_chain(baseurl, submission):
try:
- result = urllib2.urlopen(baseurl + "ct/v1/add-chain", json.dumps(submission), context=ssl.SSLContext(ssl.PROTOCOL_TLSv1)).read()
+ result = urllib2.urlopen(baseurl + "ct/v1/add-chain", json.dumps(submission)).read()
return json.loads(result)
except urllib2.HTTPError, e:
print "ERROR", e.code,":", e.read()
@@ -162,7 +167,7 @@ def add_prechain(baseurl, submission):
def get_entries(baseurl, start, end):
try:
params = urllib.urlencode({"start":start, "end":end})
- result = urllib2.urlopen(baseurl + "ct/v1/get-entries?" + params, context=ssl.SSLContext(ssl.PROTOCOL_TLSv1)).read()
+ result = urllib2.urlopen(baseurl + "ct/v1/get-entries?" + params).read()
return json.loads(result)
except urllib2.HTTPError, e:
print "ERROR:", e.read()
@@ -205,7 +210,26 @@ def check_signature(baseurl, signature, data):
vk.verify(unpacked_signature, data, hashfunc=hashlib.sha256,
sigdecode=ecdsa.util.sigdecode_der)
-def http_request(url, data=None, key=None):
+def parse_auth_header(authheader):
+ splittedheader = authheader.split(";")
+ (signature, rawoptions) = (splittedheader[0], splittedheader[1:])
+ options = dict([(e.partition("=")[0], e.partition("=")[2]) for e in rawoptions])
+ return (base64.b64decode(signature), options)
+
+def check_auth_header(authheader, expected_key, publickeydir, data, path):
+ if expected_key == None:
+ return True
+ (signature, options) = parse_auth_header(authheader)
+ keyname = options.get("key")
+ if keyname != expected_key:
+ raise Exception("Response claimed to come from %s, expected %s" % (keyname, expected_key))
+ publickey = get_public_key_from_file(publickeydir + "/" + keyname + ".pem")
+ vk = ecdsa.VerifyingKey.from_der(publickey)
+ vk.verify(signature, "%s\0%s\0%s" % ("REPLY", path, data), hashfunc=hashlib.sha256,
+ sigdecode=ecdsa.util.sigdecode_der)
+ return True
+
+def http_request(url, data=None, key=None, verifynode=None, publickeydir="."):
req = urllib2.Request(url, data)
(keyname, keyfile) = key
privatekey = get_eckey_from_file(keyfile)
@@ -219,8 +243,11 @@ def http_request(url, data=None, key=None):
signature = sk.sign("%s\0%s\0%s" % (method, parsed_url.path, data), hashfunc=hashlib.sha256,
sigencode=ecdsa.util.sigencode_der)
req.add_header('X-Catlfish-Auth', base64.b64encode(signature) + ";key=" + keyname)
- result = urllib2.urlopen(req, context=ssl.SSLContext(ssl.PROTOCOL_TLSv1)).read()
- return result
+ result = urllib2.urlopen(req)
+ authheader = result.info().get('X-Catlfish-Auth')
+ data = result.read()
+ check_auth_header(authheader, verifynode, publickeydir, data, parsed_url.path)
+ return data
def get_signature(baseurl, data, key=None):
try:
diff --git a/tools/compileconfig.py b/tools/compileconfig.py
new file mode 100755
index 0000000..30424c5
--- /dev/null
+++ b/tools/compileconfig.py
@@ -0,0 +1,283 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2014, NORDUnet A/S.
+# See LICENSE for licensing information.
+
+import argparse
+import sys
+import yaml
+import re
+
+class Symbol(str):
+ pass
+
+clean_string = re.compile(r'^[-.:_/A-Za-z0-9 ]*$')
+clean_symbol = re.compile(r'^[_A-Za-z0-9]*$')
+
+def quote_erlang_string(s):
+ if clean_string.match(s):
+ return '"' + s + '"'
+ else:
+ return "[" + ",".join([str(ord(c)) for c in s]) + "]"
+
+def quote_erlang_symbol(s):
+ if clean_symbol.match(s):
+ return s
+ elif clean_string.match(s):
+ return "'" + s + "'"
+ else:
+ print >>sys.stderr, "Cannot generate symbol", s
+ sys.exit(1)
+
+def gen_erlang(term, level=1):
+ indent = " " * level
+ separator = ",\n" + indent
+ if isinstance(term, Symbol):
+ return quote_erlang_symbol(term)
+ elif isinstance(term, basestring):
+ return quote_erlang_string(term)
+ elif isinstance(term, int):
+ return str(term)
+ elif isinstance(term, tuple):
+ tuplecontents = [gen_erlang(e, level=level+1) for e in term]
+ if "\n" not in "".join(tuplecontents):
+ separator = ", "
+ return "{" + separator.join(tuplecontents) + "}"
+ elif isinstance(term, list):
+ listcontents = [gen_erlang(e, level=level+1) for e in term]
+ return "[" + separator.join(listcontents) + "]"
+ else:
+ print "unknown type", type(term)
+ sys.exit(1)
+
+saslconfig = [(Symbol("sasl_error_logger"), Symbol("false")),
+ (Symbol("errlog_type"), Symbol("error")),
+ (Symbol("error_logger_mf_dir"), "log"),
+ (Symbol("error_logger_mf_maxbytes"), 10485760),
+ (Symbol("error_logger_mf_maxfiles"), 10),
+ ]
+
+def parse_address(address):
+ parsed_address = address.split(":")
+ if len(parsed_address) != 2:
+ print >>sys.stderr, "Invalid address format", address
+ sys.exit(1)
+ return (parsed_address[0], int(parsed_address[1]))
+
+def get_node_config(nodename, config):
+ nodetype = None
+ nodeconfig = None
+ for t in ["frontendnodes", "storagenodes", "signingnodes"]:
+ for node in config[t]:
+ if node["name"] == nodename:
+ nodetype = t
+ nodeconfig = node
+ if nodeconfig == None:
+ print >>sys.stderr, "Cannot find config for node", nodename
+ sys.exit(1)
+ return (nodetype, nodeconfig)
+
+def gen_https_servers(nodetype, nodeconfig):
+ if nodetype == "frontendnodes":
+ (publichost, publicport) = parse_address(nodeconfig["publicaddress"])
+ (host, port) = parse_address(nodeconfig["address"])
+ return [(Symbol("external_https_api"), publichost, publicport, Symbol("v1")),
+ (Symbol("frontend_https_api"), host, port, Symbol("frontend"))]
+ elif nodetype == "storagenodes":
+ (host, port) = parse_address(nodeconfig["address"])
+ return [(Symbol("storage_https_api"), host, port, Symbol("storage"))]
+ elif nodetype == "signingnodes":
+ (host, port) = parse_address(nodeconfig["address"])
+ return [(Symbol("signing_https_api"), host, port, Symbol("signing"))]
+
+def allowed_clients_frontend(mergenodenames):
+ return [
+ ("/ct/frontend/sendentry", mergenodenames),
+ ("/ct/frontend/sendlog", mergenodenames),
+ ("/ct/frontend/sendsth", mergenodenames),
+ ("/ct/frontend/currentposition", mergenodenames),
+ ("/ct/frontend/missingentries", mergenodenames),
+ ]
+
+def allowed_clients_public():
+ noauth = Symbol("noauth")
+ return [
+ ("/ct/v1/add-chain", noauth),
+ ("/ct/v1/add-pre-chain", noauth),
+ ("/ct/v1/get-sth", noauth),
+ ("/ct/v1/get-sth-consistency", noauth),
+ ("/ct/v1/get-proof-by-hash", noauth),
+ ("/ct/v1/get-entries", noauth),
+ ("/ct/v1/get-entry-and-proof", noauth),
+ ("/ct/v1/get-roots", noauth),
+ ]
+
+def allowed_clients_signing(frontendnodenames, mergenodenames):
+ return [
+ ("/ct/signing/sct", frontendnodenames),
+ ("/ct/signing/sth", mergenodenames),
+ ]
+
+def allowed_clients_storage(frontendnodenames, mergenodenames):
+ return [
+ ("/ct/storage/sendentry", frontendnodenames),
+ ("/ct/storage/entrycommitted", frontendnodenames),
+ ("/ct/storage/fetchnewentries", mergenodenames),
+ ("/ct/storage/getentry", mergenodenames),
+ ]
+
+def allowed_servers_frontend(signingnodenames, storagenodenames):
+ return [
+ ("/ct/storage/sendentry", storagenodenames),
+ ("/ct/storage/entrycommitted", storagenodenames),
+ ("/ct/signing/sct", signingnodenames),
+ ]
+
+def gen_config(nodename, config, localconfig):
+ print "generating config for", nodename
+ paths = localconfig["paths"]
+ options = localconfig.get("options", [])
+
+ configfile = open(paths["configdir"] + nodename + ".config", "w")
+ print >>configfile, "%% catlfish configuration file (-*- erlang -*-)"
+
+ (nodetype, nodeconfig) = get_node_config(nodename, config)
+ https_servers = gen_https_servers(nodetype, nodeconfig)
+
+ catlfishconfig = []
+ plopconfig = []
+
+ if nodetype == "frontendnodes":
+ catlfishconfig.append((Symbol("known_roots_path"), localconfig["paths"]["knownroots"]))
+ if "sctcaching" in options:
+ catlfishconfig.append((Symbol("sctcache_root_path"), paths["db"] + "sctcache/"))
+
+ catlfishconfig += [
+ (Symbol("https_servers"), https_servers),
+ (Symbol("https_certfile"), paths["https_certfile"]),
+ (Symbol("https_keyfile"), paths["https_keyfile"]),
+ (Symbol("https_cacertfile"), paths["https_cacertfile"]),
+ ]
+
+ lagerconfig = [
+ (Symbol("handlers"), [
+ (Symbol("lager_console_backend"), Symbol("info")),
+ (Symbol("lager_file_backend"), [(Symbol("file"), nodename + "-error.log"), (Symbol("level"), Symbol("error"))]),
+ (Symbol("lager_file_backend"), [(Symbol("file"), nodename + "-debug.log"), (Symbol("level"), Symbol("debug"))]),
+ (Symbol("lager_file_backend"), [(Symbol("file"), nodename + "-console.log"), (Symbol("level"), Symbol("info"))]),
+ ])
+ ]
+
+ if nodetype in ("frontendnodes", "storagenodes"):
+ plopconfig += [
+ (Symbol("entry_root_path"), paths["db"] + "certentries/"),
+ ]
+ if nodetype == "frontendnodes":
+ plopconfig += [
+ (Symbol("index_path"), paths["db"] + "index"),
+ ]
+ elif nodetype == "storagenodes":
+ plopconfig += [
+ (Symbol("newentries_path"), paths["db"] + "newentries"),
+ ]
+ if nodetype in ("frontendnodes", "storagenodes"):
+ plopconfig += [
+ (Symbol("entryhash_root_path"), paths["db"] + "entryhash/"),
+ (Symbol("indexforhash_root_path"), paths["db"] + "certindex/"),
+ ]
+ if nodetype == "frontendnodes":
+ plopconfig += [
+ (Symbol("sth_path"), paths["db"] + "sth"),
+ ]
+
+ signingnode = config["signingnodes"][0]
+ mergenodenames = [node["name"] for node in config["mergenodes"]]
+ storagenodeaddresses = ["https://%s/ct/storage/" % node["address"] for node in config["storagenodes"]]
+ frontendnodenames = [node["name"] for node in config["frontendnodes"]]
+
+ allowed_clients = []
+ allowed_servers = []
+
+ if nodetype == "frontendnodes":
+ storagenodenames = [node["name"] for node in config["storagenodes"]]
+ plopconfig.append((Symbol("storage_nodes"), storagenodeaddresses))
+ plopconfig.append((Symbol("storage_nodes_quorum"), config["storage-quorum-size"]))
+ services = [Symbol("ht")]
+ allowed_clients += allowed_clients_frontend(mergenodenames)
+ allowed_clients += allowed_clients_public()
+ allowed_servers += allowed_servers_frontend([signingnode["name"]], storagenodenames)
+ elif nodetype == "storagenodes":
+ allowed_clients += allowed_clients_storage(frontendnodenames, mergenodenames)
+ services = []
+ elif nodetype == "signingnodes":
+ allowed_clients += allowed_clients_signing(frontendnodenames, mergenodenames)
+ services = [Symbol("sign")]
+
+ plopconfig += [
+ (Symbol("publickey_path"), paths["publickeys"]),
+ (Symbol("services"), services),
+ ]
+ if nodetype == "signingnodes":
+ plopconfig.append((Symbol("log_private_key"), paths["logprivatekey"]))
+ plopconfig += [
+ (Symbol("log_public_key"), paths["logpublickey"]),
+ (Symbol("own_key"), (nodename, "%s/%s-private.pem" % (paths["privatekeys"], nodename))),
+ ]
+ if nodetype == "frontendnodes":
+ plopconfig.append((Symbol("signing_node"), "https://%s/ct/signing/" % signingnode["address"]))
+ plopconfig += [
+ (Symbol("allowed_clients"), allowed_clients),
+ (Symbol("allowed_servers"), allowed_servers),
+ ]
+
+ erlangconfig = [
+ (Symbol("sasl"), saslconfig),
+ (Symbol("catlfish"), catlfishconfig),
+ (Symbol("lager"), lagerconfig),
+ (Symbol("plop"), plopconfig),
+ ]
+
+ print >>configfile, gen_erlang(erlangconfig) + ".\n"
+
+ configfile.close()
+
+
+def gen_testmakefile(config, testmakefile, machines):
+ configfile = open(testmakefile, "w")
+ frontendnodenames = [node["name"] for node in config["frontendnodes"]]
+ storagenodenames = [node["name"] for node in config["storagenodes"]]
+ signingnodename = [node["name"] for node in config["signingnodes"]]
+
+ frontendnodeaddresses = [node["publicaddress"] for node in config["frontendnodes"]]
+ storagenodeaddresses = [node["address"] for node in config["storagenodes"]]
+ signingnodeaddresses = [node["address"] for node in config["signingnodes"]]
+
+ print >>configfile, "NODES=" + " ".join(frontendnodenames+storagenodenames+signingnodename)
+ print >>configfile, "MACHINES=" + " ".join([str(e) for e in range(1, machines+1)])
+ print >>configfile, "TESTURLS=" + " ".join(frontendnodeaddresses+storagenodeaddresses+signingnodeaddresses)
+ print >>configfile, "BASEURL=" + config["baseurl"]
+
+ configfile.close()
+
+
+def main():
+ parser = argparse.ArgumentParser(description="")
+ parser.add_argument('--config', help="System configuration", required=True)
+ parser.add_argument('--localconfig', help="Local configuration")
+ parser.add_argument("--testmakefile", metavar="file", help="Generate makefile variables for test")
+ parser.add_argument("--machines", type=int, metavar="n", help="Number of machines")
+ args = parser.parse_args()
+
+ config = yaml.load(open(args.config))
+ if args.testmakefile and args.machines:
+ gen_testmakefile(config, args.testmakefile, args.machines)
+ elif args.localconfig:
+ localconfig = yaml.load(open(args.localconfig))
+ localnodes = localconfig["localnodes"]
+ for localnode in localnodes:
+ gen_config(localnode, config, localconfig)
+ else:
+ print >>sys.stderr, "Nothing to do"
+ sys.exit(1)
+
+main()
diff --git a/tools/merge.py b/tools/merge.py
index dd8de07..75e72ae 100755
--- a/tools/merge.py
+++ b/tools/merge.py
@@ -15,28 +15,31 @@ import ecdsa
import hashlib
import urlparse
import os
+import yaml
from certtools import build_merkle_tree, create_sth_signature, check_sth_signature, get_eckey_from_file, timing_point, http_request
parser = argparse.ArgumentParser(description="")
-parser.add_argument("--baseurl", metavar="url", help="Base URL for CT server", required=True)
-parser.add_argument("--frontend", action="append", metavar="url", help="Base URL for frontend server", required=True)
-parser.add_argument("--storage", action="append", metavar="url", help="Base URL for storage server", required=True)
-parser.add_argument("--mergedb", metavar="dir", help="Merge database directory", required=True)
-parser.add_argument("--signing", metavar="url", help="Base URL for signing server", required=True)
-parser.add_argument("--own-keyname", metavar="keyname", help="The key name of the merge node", required=True)
-parser.add_argument("--own-keyfile", metavar="keyfile", help="The file containing the private key of the merge node", required=True)
+parser.add_argument('--config', help="System configuration", required=True)
+parser.add_argument('--localconfig', help="Local configuration", required=True)
parser.add_argument("--nomerge", action='store_true', help="Don't actually do merge")
parser.add_argument("--timing", action='store_true', help="Print timing information")
args = parser.parse_args()
-ctbaseurl = args.baseurl
-frontendnodes = args.frontend
-storagenodes = args.storage
+config = yaml.load(open(args.config))
+localconfig = yaml.load(open(args.localconfig))
-chainsdir = args.mergedb + "/chains"
-logorderfile = args.mergedb + "/logorder"
+ctbaseurl = config["baseurl"]
+frontendnodes = config["frontendnodes"]
+storagenodes = config["storagenodes"]
+paths = localconfig["paths"]
+mergedb = paths["mergedb"]
-own_key = (args.own_keyname, args.own_keyfile)
+signingnode = config["signingnodes"][0]
+
+chainsdir = mergedb + "/chains"
+logorderfile = mergedb + "/logorder"
+
+own_key = (localconfig["nodename"], "%s/%s-private.pem" % (paths["privatekeys"], localconfig["nodename"]))
hashed_dir = True
@@ -77,9 +80,9 @@ def add_to_logorder(key):
f.write(base64.b16encode(key) + "\n")
f.close()
-def get_new_entries(baseurl):
+def get_new_entries(node, baseurl):
try:
- result = http_request(baseurl + "ct/storage/fetchnewentries", key=own_key)
+ result = http_request(baseurl + "ct/storage/fetchnewentries", key=own_key, verifynode=node, publickeydir=paths["publickeys"])
parsed_result = json.loads(result)
if parsed_result.get(u"result") == u"ok":
return [base64.b64decode(entry) for entry in parsed_result[u"entries"]]
@@ -89,10 +92,10 @@ def get_new_entries(baseurl):
print "ERROR: fetchnewentries", e.read()
sys.exit(1)
-def get_entries(baseurl, hashes):
+def get_entries(node, baseurl, hashes):
try:
params = urllib.urlencode({"hash":[base64.b64encode(hash) for hash in hashes]}, doseq=True)
- result = http_request(baseurl + "ct/storage/getentry?" + params, key=own_key)
+ result = http_request(baseurl + "ct/storage/getentry?" + params, key=own_key, verifynode=node, publickeydir=paths["publickeys"])
parsed_result = json.loads(result)
if parsed_result.get(u"result") == u"ok":
entries = dict([(base64.b64decode(entry["hash"]), base64.b64decode(entry["entry"])) for entry in parsed_result[u"entries"]])
@@ -105,9 +108,9 @@ def get_entries(baseurl, hashes):
print "ERROR: getentry", e.read()
sys.exit(1)
-def get_curpos(baseurl):
+def get_curpos(node, baseurl):
try:
- result = http_request(baseurl + "ct/frontend/currentposition", key=own_key)
+ result = http_request(baseurl + "ct/frontend/currentposition", key=own_key, verifynode=node, publickeydir=paths["publickeys"])
parsed_result = json.loads(result)
if parsed_result.get(u"result") == u"ok":
return parsed_result[u"position"]
@@ -117,10 +120,10 @@ def get_curpos(baseurl):
print "ERROR: currentposition", e.read()
sys.exit(1)
-def sendlog(baseurl, submission):
+def sendlog(node, baseurl, submission):
try:
result = http_request(baseurl + "ct/frontend/sendlog",
- json.dumps(submission), key=own_key)
+ json.dumps(submission), key=own_key, verifynode=node, publickeydir=paths["publickeys"])
return json.loads(result)
except urllib2.HTTPError, e:
print "ERROR: sendlog", e.read()
@@ -133,10 +136,11 @@ def sendlog(baseurl, submission):
print "========================"
raise e
-def sendentry(baseurl, entry, hash):
+def sendentry(node, baseurl, entry, hash):
try:
result = http_request(baseurl + "ct/frontend/sendentry",
- json.dumps({"entry":base64.b64encode(entry), "treeleafhash":base64.b64encode(hash)}), key=own_key)
+ json.dumps({"entry":base64.b64encode(entry), "treeleafhash":base64.b64encode(hash)}), key=own_key,
+ verifynode=node, publickeydir=paths["publickeys"])
return json.loads(result)
except urllib2.HTTPError, e:
print "ERROR: sendentry", e.read()
@@ -149,10 +153,10 @@ def sendentry(baseurl, entry, hash):
print "========================"
raise e
-def sendsth(baseurl, submission):
+def sendsth(node, baseurl, submission):
try:
result = http_request(baseurl + "ct/frontend/sendsth",
- json.dumps(submission), key=own_key)
+ json.dumps(submission), key=own_key, verifynode=node, publickeydir=paths["publickeys"])
return json.loads(result)
except urllib2.HTTPError, e:
print "ERROR: sendsth", e.read()
@@ -165,9 +169,9 @@ def sendsth(baseurl, submission):
print "========================"
raise e
-def get_missingentries(baseurl):
+def get_missingentries(node, baseurl):
try:
- result = http_request(baseurl + "ct/frontend/missingentries", key=own_key)
+ result = http_request(baseurl + "ct/frontend/missingentries", key=own_key, verifynode=node, publickeydir=paths["publickeys"])
parsed_result = json.loads(result)
if parsed_result.get(u"result") == u"ok":
return parsed_result[u"entries"]
@@ -193,10 +197,10 @@ new_entries = set()
entries_to_fetch = {}
for storagenode in storagenodes:
- print "getting new entries from", storagenode
- new_entries_per_node[storagenode] = set(get_new_entries(storagenode))
- new_entries.update(new_entries_per_node[storagenode])
- entries_to_fetch[storagenode] = []
+ print "getting new entries from", storagenode["name"]
+ new_entries_per_node[storagenode["name"]] = set(get_new_entries(storagenode["name"], "https://%s/" % storagenode["address"]))
+ new_entries.update(new_entries_per_node[storagenode["name"]])
+ entries_to_fetch[storagenode["name"]] = []
timing_point(timing, "get new entries")
@@ -209,16 +213,16 @@ if args.nomerge:
for hash in new_entries:
for storagenode in storagenodes:
- if hash in new_entries_per_node[storagenode]:
- entries_to_fetch[storagenode].append(hash)
+ if hash in new_entries_per_node[storagenode["name"]]:
+ entries_to_fetch[storagenode["name"]].append(hash)
break
added_entries = 0
for storagenode in storagenodes:
- print "getting", len(entries_to_fetch[storagenode]), "entries from", storagenode
- for chunk in chunks(entries_to_fetch[storagenode], 100):
- entries = get_entries(storagenode, chunk)
+ print "getting", len(entries_to_fetch[storagenode["name"]]), "entries from", storagenode["name"]
+ for chunk in chunks(entries_to_fetch[storagenode["name"]], 100):
+ entries = get_entries(storagenode["name"], "https://%s/" % storagenode["address"], chunk)
for hash in chunk:
entry = entries[hash]
write_chain(hash, entry)
@@ -235,7 +239,7 @@ root_hash = tree[-1][0]
timestamp = int(time.time() * 1000)
tree_head_signature = create_sth_signature(tree_size, timestamp,
- root_hash, args.signing, key=own_key)
+ root_hash, "https://%s/" % signingnode["address"], key=own_key)
sth = {"tree_size": tree_size, "timestamp": timestamp,
"sha256_root_hash": base64.b64encode(root_hash),
@@ -251,14 +255,16 @@ if args.timing:
print "root hash", base64.b16encode(root_hash)
for frontendnode in frontendnodes:
+ nodeaddress = "https://%s/" % frontendnode["address"]
+ nodename = frontendnode["name"]
timing = timing_point()
- print "distributing for node", frontendnode
- curpos = get_curpos(frontendnode)
+ print "distributing for node", nodename
+ curpos = get_curpos(nodename, nodeaddress)
timing_point(timing, "get curpos")
print "current position", curpos
entries = [base64.b64encode(entry) for entry in logorder[curpos:]]
for chunk in chunks(entries, 1000):
- sendlogresult = sendlog(frontendnode, {"start": curpos, "hashes": chunk})
+ sendlogresult = sendlog(nodename, nodeaddress, {"start": curpos, "hashes": chunk})
if sendlogresult["result"] != "ok":
print "sendlog:", sendlogresult
sys.exit(1)
@@ -267,17 +273,17 @@ for frontendnode in frontendnodes:
sys.stdout.flush()
timing_point(timing, "sendlog")
print "log sent"
- missingentries = get_missingentries(frontendnode)
+ missingentries = get_missingentries(nodename, nodeaddress)
timing_point(timing, "get missing")
print "missing entries:", len(missingentries)
for missingentry in missingentries:
hash = base64.b64decode(missingentry)
- sendentryresult = sendentry(frontendnode, read_chain(hash), hash)
+ sendentryresult = sendentry(nodename, nodeaddress, read_chain(hash), hash)
if sendentryresult["result"] != "ok":
print "send sth:", sendentryresult
sys.exit(1)
timing_point(timing, "send missing")
- sendsthresult = sendsth(frontendnode, sth)
+ sendsthresult = sendsth(nodename, nodeaddress, sth)
if sendsthresult["result"] != "ok":
print "send sth:", sendsthresult
sys.exit(1)
diff --git a/tools/testcase1.py b/tools/testcase1.py
index 7b3229d..4502b56 100755
--- a/tools/testcase1.py
+++ b/tools/testcase1.py
@@ -15,9 +15,9 @@ import itertools
from certtools import *
baseurls = ["https://127.0.0.1:8080/"]
-certfiles = ["testcerts/cert1.txt", "testcerts/cert2.txt",
- "testcerts/cert3.txt", "testcerts/cert4.txt",
- "testcerts/cert5.txt"]
+certfiles = ["../tools/testcerts/cert1.txt", "../tools/testcerts/cert2.txt",
+ "../tools/testcerts/cert3.txt", "../tools/testcerts/cert4.txt",
+ "../tools/testcerts/cert5.txt"]
cc1 = get_certs_from_file(certfiles[0])
cc2 = get_certs_from_file(certfiles[1])
@@ -138,10 +138,8 @@ def get_and_check_entry(timestamp, chain, leaf_index, baseurl):
len(submittedcertchain))
def merge():
- return subprocess.call(["./merge.py", "--baseurl", "https://127.0.0.1:8080/",
- "--frontend", "https://127.0.0.1:8082/", "--storage", "https://127.0.0.1:8081/",
- "--mergedb", "../rel/mergedb", "--signing", "https://127.0.0.1:8088/",
- "--own-keyname", "merge-1", "--own-keyfile", "../rel/privatekeys/merge-1-private.pem"])
+ return subprocess.call(["../tools/merge.py", "--config", "../test/catlfish-test.cfg",
+ "--localconfig", "../test/catlfish-test-local-merge.cfg"])
mergeresult = merge()
assert_equal(mergeresult, 0, "merge", quiet=True)