summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorLinus Nordberg <linus@nordberg.se>2015-03-25 11:00:21 +0100
committerLinus Nordberg <linus@nordberg.se>2015-03-25 11:00:21 +0100
commit54e0457f1e4c4376bdecc891a2d5ae423eb2266d (patch)
tree8a756b221ded2625f186ff15b150a99365abe067
parentf5ee5ed3e1b445c52efd7978357adf5552643fd4 (diff)
parent842b07ef461483fcc297cc56e128918ddd273932 (diff)
Merge remote-tracking branch 'refs/remotes/map/compileconfig2'
Conflicts: .gitignore
-rw-r--r--.gitignore1
-rw-r--r--Makefile76
-rw-r--r--test/catlfish-test-local-1.cfg17
-rw-r--r--test/catlfish-test-local-merge.cfg8
-rw-r--r--test/catlfish-test-local-signing.cfg12
-rw-r--r--test/catlfish-test.cfg19
-rw-r--r--test/config/frontend-1.config61
-rw-r--r--test/config/signing-1.config35
-rw-r--r--test/config/storage-1.config39
-rw-r--r--tools/certtools.py102
-rwxr-xr-xtools/compileconfig.py283
-rwxr-xr-xtools/merge.py99
-rw-r--r--tools/precerttools.py102
-rw-r--r--tools/rfc2459.py927
-rwxr-xr-xtools/submitcert.py46
-rwxr-xr-xtools/testcase1.py120
-rwxr-xr-xtools/validatestore.py96
-rwxr-xr-xtools/verifysct.py17
18 files changed, 1763 insertions, 297 deletions
diff --git a/.gitignore b/.gitignore
index 3684b0f..ca93487 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,2 +1,3 @@
*.beam
rel
+test/test.mk
diff --git a/Makefile b/Makefile
index e1a81fb..b54bc50 100644
--- a/Makefile
+++ b/Makefile
@@ -16,38 +16,34 @@ release: all
cp storage_node.config rel
cp storage_node_httpd.conf rel
mkdir rel/catlfish
- mkdir rel/db
- mkdir rel/mergedb
- mkdir rel/mergedb/chains
- touch rel/mergedb/logorder
- printf "0" > rel/db/treesize
cp -r webroot rel/catlfish
test -d rel/catlfish/webroot/log || mkdir rel/catlfish/webroot/log
+-include test/test.mk
+
tests-prepare:
+ rm -r rel/mergedb || true
+ mkdir rel/mergedb
+ mkdir rel/mergedb/chains
+ touch rel/mergedb/logorder
rm -r rel/known_roots || true
mkdir rel/known_roots
cp tools/testcerts/roots/* rel/known_roots
-
- mkdir -p test/nodes/frontend-1/log
- mkdir -p test/nodes/storage-1/log
- mkdir -p test/nodes/storage-2/log
- mkdir -p test/nodes/signing-1/log
- cp test/config/frontend-1.config rel
- cp test/config/storage-1.config rel
- cp test/config/signing-1.config rel
cp -r test/config/privatekeys rel
cp -r test/config/publickeys rel
rm -r rel/tests || true
- mkdir -p rel/tests/machine/machine-1/db
- printf "0" > rel/tests/machine/machine-1/db/treesize
- mkdir -p rel/tests/machine/machine-2/db
- printf "0" > rel/tests/machine/machine-2/db/treesize
- touch rel/tests/machine/machine-1/db/index
- touch rel/tests/machine/machine-1/db/newentries
-
-NODES=frontend-1 storage-1 signing-1
-TESTURLS=https://127.0.0.1:8080/ https://127.0.0.1:8081/ https://127.0.0.1:8082/ https://127.0.0.1:8088/
+ @for machine in $(MACHINES); do \
+ tools/compileconfig.py --config=test/catlfish-test.cfg --localconfig test/catlfish-test-local-$$machine.cfg ; \
+ mkdir -p rel/tests/machine/machine-$$machine/db ; \
+ printf "0" > rel/tests/machine/machine-$$machine/db/treesize ; \
+ touch rel/tests/machine/machine-$$machine/db/index ; \
+ touch rel/tests/machine/machine-$$machine/db/newentries ; \
+ done
+ tools/compileconfig.py --config=test/catlfish-test.cfg --localconfig test/catlfish-test-local-signing.cfg
+ @for node in $(NODES); do \
+ mkdir -p test/nodes/$$node/log ; \
+ cp test/config/$$node.config rel ; \
+ done
tests-start:
@for node in $(NODES); do \
@@ -57,45 +53,49 @@ tests-start:
echo "waiting for system to start" ; \
sleep 0.5 ; \
allstarted=1 ; \
+ notstarted= ; \
for testurl in $(TESTURLS); do \
- if curl -s -k $$testurl > /dev/null ; then : ; else allstarted=0 ; fi ; \
+ if curl -s -k https://$$testurl > /dev/null ; then : ; else allstarted=0 ; notstarted="$$testurl $$notstarted" ; fi ; \
: ; \
done ; \
- if [ $$allstarted -eq 1 ]; then break ; fi ; \
+ if [ $$allstarted -eq 1 ]; then break ; \
+ elif [ $$i -eq 10 ]; then echo Not started: $$notstarted ; fi ; \
done
tests-run:
- @(cd tools ; python testcase1.py ) || echo "Tests failed"
- @(cd tools ; python fetchallcerts.py https://127.0.0.1:8080/) || echo "Verification failed"
- @(cd tools ; python submitcert.py --store testcerts/cert1.txt --check-sct --sct-file=../rel/submittedcerts https://127.0.0.1:8080/) || echo "Submission failed"
- @(cd tools ; python submitcert.py --store testcerts/cert2.txt --check-sct --sct-file=../rel/submittedcerts https://127.0.0.1:8080/) || echo "Submission failed"
- @(cd tools ; python submitcert.py --store testcerts/cert3.txt --check-sct --sct-file=../rel/submittedcerts https://127.0.0.1:8080/) || echo "Submission failed"
- @(cd tools ; python submitcert.py --store testcerts/cert4.txt --check-sct --sct-file=../rel/submittedcerts https://127.0.0.1:8080/) || echo "Submission failed"
- @(cd tools ; python submitcert.py --store testcerts/cert5.txt --check-sct --sct-file=../rel/submittedcerts https://127.0.0.1:8080/) || echo "Submission failed"
- @(cd tools ; python submitcert.py --store testcerts/pre1.txt --check-sct --sct-file=../rel/submittedcerts https://127.0.0.1:8080/) || echo "Submission failed"
- @(cd tools ; python submitcert.py --store testcerts/pre2.txt --check-sct --sct-file=../rel/submittedcerts https://127.0.0.1:8080/) || echo "Submission failed"
- @(cd tools ; python merge.py --baseurl https://127.0.0.1:8080/ --frontend https://127.0.0.1:8082/ --storage https://127.0.0.1:8081/ --mergedb ../rel/mergedb --signing https://127.0.0.1:8088/ --own-keyname merge-1 --own-keyfile ../rel/privatekeys/merge-1-private.pem) || echo "Merge failed"
+ @(cd rel && python ../tools/testcase1.py ) || (echo "Tests failed" ; false)
+ @(cd rel && python ../tools/fetchallcerts.py $(BASEURL)) || (echo "Verification failed" ; false)
+ @(cd rel && rm -f submittedcerts)
+ @(cd rel && python ../tools/submitcert.py --parallel=1 --store ../tools/testcerts/cert1.txt --check-sct --sct-file=submittedcerts $(BASEURL)) || (echo "Submission failed" ; false)
+ @(cd rel && python ../tools/submitcert.py --parallel=1 --store ../tools/testcerts/cert2.txt --check-sct --sct-file=submittedcerts $(BASEURL)) || (echo "Submission failed" ; false)
+ @(cd rel && python ../tools/submitcert.py --parallel=1 --store ../tools/testcerts/cert3.txt --check-sct --sct-file=submittedcerts $(BASEURL)) || (echo "Submission failed" ; false)
+ @(cd rel && python ../tools/submitcert.py --parallel=1 --store ../tools/testcerts/cert4.txt --check-sct --sct-file=submittedcerts $(BASEURL)) || (echo "Submission failed" ; false)
+ @(cd rel && python ../tools/submitcert.py --parallel=1 --store ../tools/testcerts/cert5.txt --check-sct --sct-file=submittedcerts $(BASEURL)) || (echo "Submission failed" ; false)
+ @(cd rel && python ../tools/submitcert.py --parallel=1 --store ../tools/testcerts/pre1.txt --check-sct --sct-file=submittedcerts $(BASEURL)) || (echo "Submission failed" ; false)
+ @(cd rel && python ../tools/submitcert.py --parallel=1 --store ../tools/testcerts/pre2.txt --check-sct --sct-file=submittedcerts $(BASEURL)) || (echo "Submission failed" ; false)
+ @(cd rel && python ../tools/merge.py --config ../test/catlfish-test.cfg --localconfig ../test/catlfish-test-local-merge.cfg) || (echo "Merge failed" ; false)
tests-run2:
- @(cd tools ; python verifysct.py --sct-file=../rel/submittedcerts --parallel 1 https://127.0.0.1:8080/) || echo "Verification of SCT:s failed"
+ @(cd rel ; python ../tools/verifysct.py --sct-file=submittedcerts --parallel 1 $(BASEURL)) || echo "Verification of SCT:s failed"
tests-stop:
@for node in $(NODES); do \
- ./tools/halt.py ./rel/bin/to_erl test/nodes/$$node/ ; \
+ ./tools/halt.py to_erl test/nodes/$$node/ ; \
done
tests-wait:
sleep 5
tests:
+ tools/compileconfig.py --config=test/catlfish-test.cfg --testmakefile=test/test.mk --machines 1
@make tests-prepare
@make tests-start
- @make tests-run
+ @make tests-run || (make tests-stop ; false)
@make tests-wait
@make tests-stop
@make tests-wait
@make tests-start
- @make tests-run2
+ @make tests-run2 || (make tests-stop ; false)
@make tests-wait
@make tests-stop
diff --git a/test/catlfish-test-local-1.cfg b/test/catlfish-test-local-1.cfg
new file mode 100644
index 0000000..5e9a593
--- /dev/null
+++ b/test/catlfish-test-local-1.cfg
@@ -0,0 +1,17 @@
+localnodes:
+ - frontend-1
+ - storage-1
+
+paths:
+ configdir: test/config/
+ knownroots: known_roots
+ https_certfile: catlfish/webroot/certs/webcert.pem
+ https_keyfile: catlfish/webroot/keys/webkey.pem
+ https_cacertfile: catlfish/webroot/certs/webcert.pem
+ db: tests/machine/machine-1/db/
+ publickeys: publickeys
+ logpublickey: test/eckey-public.pem
+ privatekeys: privatekeys
+
+#options:
+# - sctcaching
diff --git a/test/catlfish-test-local-merge.cfg b/test/catlfish-test-local-merge.cfg
new file mode 100644
index 0000000..b7f5009
--- /dev/null
+++ b/test/catlfish-test-local-merge.cfg
@@ -0,0 +1,8 @@
+nodename: merge-1
+
+paths:
+ mergedb: ../rel/mergedb
+ https_cacertfile: catlfish/webroot/certs/webcert.pem
+ publickeys: publickeys
+ logpublickey: test/eckey-public.pem
+ privatekeys: privatekeys
diff --git a/test/catlfish-test-local-signing.cfg b/test/catlfish-test-local-signing.cfg
new file mode 100644
index 0000000..047c02b
--- /dev/null
+++ b/test/catlfish-test-local-signing.cfg
@@ -0,0 +1,12 @@
+localnodes:
+ - signing-1
+
+paths:
+ configdir: test/config/
+ https_certfile: catlfish/webroot/certs/webcert.pem
+ https_keyfile: catlfish/webroot/keys/webkey.pem
+ https_cacertfile: catlfish/webroot/certs/webcert.pem
+ publickeys: publickeys
+ logpublickey: test/eckey-public.pem
+ logprivatekey: test/eckey.pem
+ privatekeys: privatekeys
diff --git a/test/catlfish-test.cfg b/test/catlfish-test.cfg
new file mode 100644
index 0000000..7a4bb18
--- /dev/null
+++ b/test/catlfish-test.cfg
@@ -0,0 +1,19 @@
+baseurl: https://127.0.0.1:8080/
+
+frontendnodes:
+ - name: frontend-1
+ publicaddress: 127.0.0.1:8080
+ address: 127.0.0.1:8082
+
+storagenodes:
+ - name: storage-1
+ address: 127.0.0.1:8081
+
+signingnodes:
+ - name: signing-1
+ address: 127.0.0.1:8088
+
+mergenodes:
+ - name: merge-1
+
+storage-quorum-size: 1
diff --git a/test/config/frontend-1.config b/test/config/frontend-1.config
deleted file mode 100644
index 8215027..0000000
--- a/test/config/frontend-1.config
+++ /dev/null
@@ -1,61 +0,0 @@
-%% catlfish configuration file (-*- erlang -*-)
-
-[{sasl,
- [{sasl_error_logger, false},
- {errlog_type, error},
- {error_logger_mf_dir, "log"},
- {error_logger_mf_maxbytes, 10485760}, % 10 MB
- {error_logger_mf_maxfiles, 10}]},
- {catlfish,
- [{known_roots_path, "known_roots"},
- {sctcache_root_path, "tests/machine/machine-1/db/sctcache/"},
- {https_servers,
- [{external_https_api, "127.0.0.1", 8080, v1},
- {frontend_https_api, "127.0.0.1", 8082, frontend}
- ]},
- {https_certfile, "catlfish/webroot/certs/webcert.pem"},
- {https_keyfile, "catlfish/webroot/keys/webkey.pem"},
- {https_cacertfile, "catlfish/webroot/certs/webcert.pem"}
- ]},
- {lager,
- [{handlers,
- [{lager_console_backend, info},
- {lager_file_backend, [{file, "frontend-1-error.log"}, {level, error}]},
- {lager_file_backend, [{file, "frontend-1-debug.log"}, {level, debug}]},
- {lager_file_backend, [{file, "frontend-1-console.log"}, {level, info}]}
- ]}
- ]},
- {plop,
- [{entry_root_path, "tests/machine/machine-1/db/certentries/"},
- {index_path, "tests/machine/machine-1/db/index"},
- {entryhash_root_path, "tests/machine/machine-1/db/entryhash/"},
- {treesize_path, "tests/machine/machine-1/db/treesize"},
- {indexforhash_root_path, "tests/machine/machine-1/db/certindex/"},
- {sth_path, "tests/machine/machine-1/db/sth"},
- {storage_nodes, ["https://127.0.0.1:8081/ct/storage/"]},
- {storage_nodes_quorum, 1},
- {publickey_path, "publickeys"},
- {services, [ht]},
- {log_public_key, "test/eckey-public.pem"},
- {own_key, {"frontend-1", "privatekeys/frontend-1-private.pem"}},
- {signing_node, "https://127.0.0.1:8088/ct/signing/"},
- {allowed_clients, [{"/ct/frontend/sendentry", ["merge-1"]},
- {"/ct/frontend/sendlog", ["merge-1"]},
- {"/ct/frontend/sendsth", ["merge-1"]},
- {"/ct/frontend/currentposition", ["merge-1"]},
- {"/ct/frontend/missingentries", ["merge-1"]},
- {"/ct/v1/add-chain", noauth},
- {"/ct/v1/add-pre-chain", noauth},
- {"/ct/v1/get-sth", noauth},
- {"/ct/v1/get-sth-consistency", noauth},
- {"/ct/v1/get-proof-by-hash", noauth},
- {"/ct/v1/get-entries", noauth},
- {"/ct/v1/get-entry-and-proof", noauth},
- {"/ct/v1/get-roots", noauth}
- ]},
- {allowed_servers, [{"/ct/storage/sendentry", ["storage-1"]},
- {"/ct/storage/entrycommitted", ["storage-1"]},
- {"/ct/signing/sct", ["signing-1"]},
- {"/ct/signing/sth", ["signing-1"]}
- ]}
- ]}].
diff --git a/test/config/signing-1.config b/test/config/signing-1.config
deleted file mode 100644
index a11bdeb..0000000
--- a/test/config/signing-1.config
+++ /dev/null
@@ -1,35 +0,0 @@
-%% catlfish configuration file (-*- erlang -*-)
-
-[{sasl,
- [{sasl_error_logger, false},
- {errlog_type, error},
- {error_logger_mf_dir, "log"},
- {error_logger_mf_maxbytes, 10485760}, % 10 MB
- {error_logger_mf_maxfiles, 10}]},
- {catlfish,
- [{known_roots_path, "known_roots"},
- {https_servers,
- [{signing_https_api, "127.0.0.1", 8088, signing}
- ]},
- {https_certfile, "catlfish/webroot/certs/webcert.pem"},
- {https_keyfile, "catlfish/webroot/keys/webkey.pem"},
- {https_cacertfile, "catlfish/webroot/certs/webcert.pem"}
- ]},
- {lager,
- [{handlers,
- [{lager_console_backend, info},
- {lager_file_backend, [{file, "signing-1-error.log"}, {level, error}]},
- {lager_file_backend, [{file, "signing-1-debug.log"}, {level, debug}]},
- {lager_file_backend, [{file, "signing-1-console.log"}, {level, info}]}
- ]}
- ]},
- {plop,
- [{publickey_path, "publickeys"},
- {services, [sign]},
- {log_private_key, "test/eckey.pem"},
- {log_public_key, "test/eckey-public.pem"},
- {own_key, {"signing-1", "privatekeys/signing-1-private.pem"}},
- {allowed_clients, [{"/ct/signing/sct", ["frontend-1"]},
- {"/ct/signing/sth", ["merge-1"]}
- ]}
- ]}].
diff --git a/test/config/storage-1.config b/test/config/storage-1.config
deleted file mode 100644
index 005a8ad..0000000
--- a/test/config/storage-1.config
+++ /dev/null
@@ -1,39 +0,0 @@
-%% catlfish configuration file (-*- erlang -*-)
-
-[{sasl,
- [{sasl_error_logger, false},
- {errlog_type, error},
- {error_logger_mf_dir, "log"},
- {error_logger_mf_maxbytes, 10485760}, % 10 MB
- {error_logger_mf_maxfiles, 10}]},
- {catlfish,
- [{https_servers,
- [{storage_https_api, "127.0.0.1", 8081, storage}
- ]},
- {https_certfile, "catlfish/webroot/certs/webcert.pem"},
- {https_keyfile, "catlfish/webroot/keys/webkey.pem"},
- {https_cacertfile, "catlfish/webroot/certs/webcert.pem"}
- ]},
- {lager,
- [{handlers,
- [{lager_console_backend, info},
- {lager_file_backend, [{file, "storage-1-error.log"}, {level, error}]},
- {lager_file_backend, [{file, "storage-1-debug.log"}, {level, debug}]},
- {lager_file_backend, [{file, "storage-1-console.log"}, {level, info}]}
- ]}
- ]},
- {plop,
- [{entry_root_path, "tests/machine/machine-1/db/certentries/"},
- {index_path, "tests/machine/machine-1/db/index"},
- {newentries_path, "tests/machine/machine-1/db/newentries"},
- {entryhash_root_path, "tests/machine/machine-1/db/entryhash/"},
- {treesize_path, "tests/machine/machine-1/db/treesize"},
- {indexforhash_root_path, "tests/machine/machine-1/db/certindex/"},
- {publickey_path, "publickeys"},
- {own_key, {"storage-1", "privatekeys/storage-1-private.pem"}},
- {allowed_clients, [{"/ct/storage/sendentry", ["frontend-1"]},
- {"/ct/storage/entrycommitted", ["frontend-1"]},
- {"/ct/storage/fetchnewentries", ["merge-1"]},
- {"/ct/storage/getentry", noauth}
- ]}
-]}].
diff --git a/tools/certtools.py b/tools/certtools.py
index ffd631c..2c97dfb 100644
--- a/tools/certtools.py
+++ b/tools/certtools.py
@@ -61,11 +61,20 @@ def get_certs_from_string(s):
f = cStringIO.StringIO(s)
return get_pemlike_from_file(f, "CERTIFICATE")
+def get_precerts_from_string(s):
+ f = cStringIO.StringIO(s)
+ return get_pemlike_from_file(f, "PRECERTIFICATE")
+
def get_eckey_from_file(keyfile):
keys = get_pemlike(keyfile, "EC PRIVATE KEY")
assert len(keys) == 1
return keys[0]
+def get_public_key_from_file(keyfile):
+ keys = get_pemlike(keyfile, "PUBLIC KEY")
+ assert len(keys) == 1
+ return keys[0]
+
def get_root_cert(issuer):
accepted_certs = \
json.loads(open("googlelog-accepted-certs.txt").read())["certificates"]
@@ -80,7 +89,7 @@ def get_root_cert(issuer):
return root_cert
def get_sth(baseurl):
- result = urllib2.urlopen(baseurl + "ct/v1/get-sth", context=ssl.SSLContext(ssl.PROTOCOL_TLSv1)).read()
+ result = urllib2.urlopen(baseurl + "ct/v1/get-sth").read()
return json.loads(result)
def get_proof_by_hash(baseurl, hash, tree_size):
@@ -88,7 +97,7 @@ def get_proof_by_hash(baseurl, hash, tree_size):
params = urllib.urlencode({"hash":base64.b64encode(hash),
"tree_size":tree_size})
result = \
- urllib2.urlopen(baseurl + "ct/v1/get-proof-by-hash?" + params, context=ssl.SSLContext(ssl.PROTOCOL_TLSv1)).read()
+ urllib2.urlopen(baseurl + "ct/v1/get-proof-by-hash?" + params).read()
return json.loads(result)
except urllib2.HTTPError, e:
print "ERROR:", e.read()
@@ -99,7 +108,7 @@ def get_consistency_proof(baseurl, tree_size1, tree_size2):
params = urllib.urlencode({"first":tree_size1,
"second":tree_size2})
result = \
- urllib2.urlopen(baseurl + "ct/v1/get-sth-consistency?" + params, context=ssl.SSLContext(ssl.PROTOCOL_TLSv1)).read()
+ urllib2.urlopen(baseurl + "ct/v1/get-sth-consistency?" + params).read()
return json.loads(result)["consistency"]
except urllib2.HTTPError, e:
print "ERROR:", e.read()
@@ -122,7 +131,25 @@ def unpack_tls_array(packed_data, length_len):
def add_chain(baseurl, submission):
try:
- result = urllib2.urlopen(baseurl + "ct/v1/add-chain", json.dumps(submission), context=ssl.SSLContext(ssl.PROTOCOL_TLSv1)).read()
+ result = urllib2.urlopen(baseurl + "ct/v1/add-chain", json.dumps(submission)).read()
+ return json.loads(result)
+ except urllib2.HTTPError, e:
+ print "ERROR", e.code,":", e.read()
+ if e.code == 400:
+ return None
+ sys.exit(1)
+ except ValueError, e:
+ print "==== FAILED REQUEST ===="
+ print submission
+ print "======= RESPONSE ======="
+ print result
+ print "========================"
+ raise e
+
+def add_prechain(baseurl, submission):
+ try:
+ result = urllib2.urlopen(baseurl + "ct/v1/add-pre-chain",
+ json.dumps(submission)).read()
return json.loads(result)
except urllib2.HTTPError, e:
print "ERROR", e.code,":", e.read()
@@ -140,7 +167,7 @@ def add_chain(baseurl, submission):
def get_entries(baseurl, start, end):
try:
params = urllib.urlencode({"start":start, "end":end})
- result = urllib2.urlopen(baseurl + "ct/v1/get-entries?" + params, context=ssl.SSLContext(ssl.PROTOCOL_TLSv1)).read()
+ result = urllib2.urlopen(baseurl + "ct/v1/get-entries?" + params).read()
return json.loads(result)
except urllib2.HTTPError, e:
print "ERROR:", e.read()
@@ -183,7 +210,26 @@ def check_signature(baseurl, signature, data):
vk.verify(unpacked_signature, data, hashfunc=hashlib.sha256,
sigdecode=ecdsa.util.sigdecode_der)
-def http_request(url, data=None, key=None):
+def parse_auth_header(authheader):
+ splittedheader = authheader.split(";")
+ (signature, rawoptions) = (splittedheader[0], splittedheader[1:])
+ options = dict([(e.partition("=")[0], e.partition("=")[2]) for e in rawoptions])
+ return (base64.b64decode(signature), options)
+
+def check_auth_header(authheader, expected_key, publickeydir, data, path):
+ if expected_key == None:
+ return True
+ (signature, options) = parse_auth_header(authheader)
+ keyname = options.get("key")
+ if keyname != expected_key:
+ raise Exception("Response claimed to come from %s, expected %s" % (keyname, expected_key))
+ publickey = get_public_key_from_file(publickeydir + "/" + keyname + ".pem")
+ vk = ecdsa.VerifyingKey.from_der(publickey)
+ vk.verify(signature, "%s\0%s\0%s" % ("REPLY", path, data), hashfunc=hashlib.sha256,
+ sigdecode=ecdsa.util.sigdecode_der)
+ return True
+
+def http_request(url, data=None, key=None, verifynode=None, publickeydir="."):
req = urllib2.Request(url, data)
(keyname, keyfile) = key
privatekey = get_eckey_from_file(keyfile)
@@ -197,8 +243,11 @@ def http_request(url, data=None, key=None):
signature = sk.sign("%s\0%s\0%s" % (method, parsed_url.path, data), hashfunc=hashlib.sha256,
sigencode=ecdsa.util.sigencode_der)
req.add_header('X-Catlfish-Auth', base64.b64encode(signature) + ";key=" + keyname)
- result = urllib2.urlopen(req, context=ssl.SSLContext(ssl.PROTOCOL_TLSv1)).read()
- return result
+ result = urllib2.urlopen(req)
+ authheader = result.info().get('X-Catlfish-Auth')
+ data = result.read()
+ check_auth_header(authheader, verifynode, publickeydir, data, parsed_url.path)
+ return data
def get_signature(baseurl, data, key=None):
try:
@@ -235,7 +284,7 @@ def create_sth_signature(tree_size, timestamp, root_hash, baseurl, key=None):
return create_signature(baseurl, tree_head, key=key)
-def check_sct_signature(baseurl, leafcert, sct):
+def check_sct_signature(baseurl, signed_entry, sct, precert=False):
publickey = base64.decodestring(publickeys[baseurl])
calculated_logid = hashlib.sha256(publickey).digest()
received_logid = base64.decodestring(sct["id"])
@@ -249,9 +298,12 @@ def check_sct_signature(baseurl, leafcert, sct):
version = struct.pack(">b", sct["sct_version"])
signature_type = struct.pack(">b", 0)
timestamp = struct.pack(">Q", sct["timestamp"])
- entry_type = struct.pack(">H", 0)
+ if precert:
+ entry_type = struct.pack(">H", 1)
+ else:
+ entry_type = struct.pack(">H", 0)
signed_struct = version + signature_type + timestamp + \
- entry_type + tls_array(leafcert, 3) + \
+ entry_type + signed_entry + \
tls_array(base64.decodestring(sct["extensions"]), 2)
check_signature(baseurl, signature, signed_struct)
@@ -267,6 +319,25 @@ def pack_mtl(timestamp, leafcert):
merkle_tree_leaf = version + leaf_type + timestamped_entry
return merkle_tree_leaf
+def pack_mtl_precert(timestamp, cleanedcert, issuer_key_hash):
+ entry_type = struct.pack(">H", 1)
+ extensions = ""
+
+ timestamped_entry = struct.pack(">Q", timestamp) + entry_type + \
+ pack_precert(cleanedcert, issuer_key_hash) + tls_array(extensions, 2)
+ version = struct.pack(">b", 0)
+ leaf_type = struct.pack(">b", 0)
+ merkle_tree_leaf = version + leaf_type + timestamped_entry
+ return merkle_tree_leaf
+
+def pack_precert(cleanedcert, issuer_key_hash):
+ assert len(issuer_key_hash) == 32
+
+ return issuer_key_hash + tls_array(cleanedcert, 3)
+
+def pack_cert(cert):
+ return tls_array(cert, 3)
+
def unpack_mtl(merkle_tree_leaf):
version = merkle_tree_leaf[0:1]
leaf_type = merkle_tree_leaf[1:2]
@@ -353,6 +424,14 @@ def get_hash_from_certfile(cert):
return base64.b16decode(line[len("Leafhash: "):])
return None
+def get_timestamp_from_certfile(cert):
+ for line in cert.split("\n"):
+ if line.startswith("-----"):
+ return None
+ if line.startswith("Timestamp: "):
+ return int(line[len("Timestamp: "):])
+ return None
+
def get_proof(store, tree_size, n):
hash = get_hash_from_certfile(get_one_cert(store, n))
return get_proof_by_hash(args.baseurl, hash, tree_size)
@@ -586,6 +665,7 @@ def verify_consistency_proof(consistency_proof, first, second, oldhash_input):
def verify_inclusion_proof(inclusion_proof, index, treesize, leafhash):
chain = zip([(index, 0)] + nodes_for_index(index, treesize), [leafhash] + inclusion_proof)
+ assert len(nodes_for_index(index, treesize)) == len(inclusion_proof)
(_, hash) = reduce(lambda e1, e2: combine_two_hashes(e1, e2, treesize), chain)
return hash
diff --git a/tools/compileconfig.py b/tools/compileconfig.py
new file mode 100755
index 0000000..30424c5
--- /dev/null
+++ b/tools/compileconfig.py
@@ -0,0 +1,283 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2014, NORDUnet A/S.
+# See LICENSE for licensing information.
+
+import argparse
+import sys
+import yaml
+import re
+
+class Symbol(str):
+ pass
+
+clean_string = re.compile(r'^[-.:_/A-Za-z0-9 ]*$')
+clean_symbol = re.compile(r'^[_A-Za-z0-9]*$')
+
+def quote_erlang_string(s):
+ if clean_string.match(s):
+ return '"' + s + '"'
+ else:
+ return "[" + ",".join([str(ord(c)) for c in s]) + "]"
+
+def quote_erlang_symbol(s):
+ if clean_symbol.match(s):
+ return s
+ elif clean_string.match(s):
+ return "'" + s + "'"
+ else:
+ print >>sys.stderr, "Cannot generate symbol", s
+ sys.exit(1)
+
+def gen_erlang(term, level=1):
+ indent = " " * level
+ separator = ",\n" + indent
+ if isinstance(term, Symbol):
+ return quote_erlang_symbol(term)
+ elif isinstance(term, basestring):
+ return quote_erlang_string(term)
+ elif isinstance(term, int):
+ return str(term)
+ elif isinstance(term, tuple):
+ tuplecontents = [gen_erlang(e, level=level+1) for e in term]
+ if "\n" not in "".join(tuplecontents):
+ separator = ", "
+ return "{" + separator.join(tuplecontents) + "}"
+ elif isinstance(term, list):
+ listcontents = [gen_erlang(e, level=level+1) for e in term]
+ return "[" + separator.join(listcontents) + "]"
+ else:
+ print "unknown type", type(term)
+ sys.exit(1)
+
+saslconfig = [(Symbol("sasl_error_logger"), Symbol("false")),
+ (Symbol("errlog_type"), Symbol("error")),
+ (Symbol("error_logger_mf_dir"), "log"),
+ (Symbol("error_logger_mf_maxbytes"), 10485760),
+ (Symbol("error_logger_mf_maxfiles"), 10),
+ ]
+
+def parse_address(address):
+ parsed_address = address.split(":")
+ if len(parsed_address) != 2:
+ print >>sys.stderr, "Invalid address format", address
+ sys.exit(1)
+ return (parsed_address[0], int(parsed_address[1]))
+
+def get_node_config(nodename, config):
+ nodetype = None
+ nodeconfig = None
+ for t in ["frontendnodes", "storagenodes", "signingnodes"]:
+ for node in config[t]:
+ if node["name"] == nodename:
+ nodetype = t
+ nodeconfig = node
+ if nodeconfig == None:
+ print >>sys.stderr, "Cannot find config for node", nodename
+ sys.exit(1)
+ return (nodetype, nodeconfig)
+
+def gen_https_servers(nodetype, nodeconfig):
+ if nodetype == "frontendnodes":
+ (publichost, publicport) = parse_address(nodeconfig["publicaddress"])
+ (host, port) = parse_address(nodeconfig["address"])
+ return [(Symbol("external_https_api"), publichost, publicport, Symbol("v1")),
+ (Symbol("frontend_https_api"), host, port, Symbol("frontend"))]
+ elif nodetype == "storagenodes":
+ (host, port) = parse_address(nodeconfig["address"])
+ return [(Symbol("storage_https_api"), host, port, Symbol("storage"))]
+ elif nodetype == "signingnodes":
+ (host, port) = parse_address(nodeconfig["address"])
+ return [(Symbol("signing_https_api"), host, port, Symbol("signing"))]
+
+def allowed_clients_frontend(mergenodenames):
+ return [
+ ("/ct/frontend/sendentry", mergenodenames),
+ ("/ct/frontend/sendlog", mergenodenames),
+ ("/ct/frontend/sendsth", mergenodenames),
+ ("/ct/frontend/currentposition", mergenodenames),
+ ("/ct/frontend/missingentries", mergenodenames),
+ ]
+
+def allowed_clients_public():
+ noauth = Symbol("noauth")
+ return [
+ ("/ct/v1/add-chain", noauth),
+ ("/ct/v1/add-pre-chain", noauth),
+ ("/ct/v1/get-sth", noauth),
+ ("/ct/v1/get-sth-consistency", noauth),
+ ("/ct/v1/get-proof-by-hash", noauth),
+ ("/ct/v1/get-entries", noauth),
+ ("/ct/v1/get-entry-and-proof", noauth),
+ ("/ct/v1/get-roots", noauth),
+ ]
+
+def allowed_clients_signing(frontendnodenames, mergenodenames):
+ return [
+ ("/ct/signing/sct", frontendnodenames),
+ ("/ct/signing/sth", mergenodenames),
+ ]
+
+def allowed_clients_storage(frontendnodenames, mergenodenames):
+ return [
+ ("/ct/storage/sendentry", frontendnodenames),
+ ("/ct/storage/entrycommitted", frontendnodenames),
+ ("/ct/storage/fetchnewentries", mergenodenames),
+ ("/ct/storage/getentry", mergenodenames),
+ ]
+
+def allowed_servers_frontend(signingnodenames, storagenodenames):
+ return [
+ ("/ct/storage/sendentry", storagenodenames),
+ ("/ct/storage/entrycommitted", storagenodenames),
+ ("/ct/signing/sct", signingnodenames),
+ ]
+
+def gen_config(nodename, config, localconfig):
+ print "generating config for", nodename
+ paths = localconfig["paths"]
+ options = localconfig.get("options", [])
+
+ configfile = open(paths["configdir"] + nodename + ".config", "w")
+ print >>configfile, "%% catlfish configuration file (-*- erlang -*-)"
+
+ (nodetype, nodeconfig) = get_node_config(nodename, config)
+ https_servers = gen_https_servers(nodetype, nodeconfig)
+
+ catlfishconfig = []
+ plopconfig = []
+
+ if nodetype == "frontendnodes":
+ catlfishconfig.append((Symbol("known_roots_path"), localconfig["paths"]["knownroots"]))
+ if "sctcaching" in options:
+ catlfishconfig.append((Symbol("sctcache_root_path"), paths["db"] + "sctcache/"))
+
+ catlfishconfig += [
+ (Symbol("https_servers"), https_servers),
+ (Symbol("https_certfile"), paths["https_certfile"]),
+ (Symbol("https_keyfile"), paths["https_keyfile"]),
+ (Symbol("https_cacertfile"), paths["https_cacertfile"]),
+ ]
+
+ lagerconfig = [
+ (Symbol("handlers"), [
+ (Symbol("lager_console_backend"), Symbol("info")),
+ (Symbol("lager_file_backend"), [(Symbol("file"), nodename + "-error.log"), (Symbol("level"), Symbol("error"))]),
+ (Symbol("lager_file_backend"), [(Symbol("file"), nodename + "-debug.log"), (Symbol("level"), Symbol("debug"))]),
+ (Symbol("lager_file_backend"), [(Symbol("file"), nodename + "-console.log"), (Symbol("level"), Symbol("info"))]),
+ ])
+ ]
+
+ if nodetype in ("frontendnodes", "storagenodes"):
+ plopconfig += [
+ (Symbol("entry_root_path"), paths["db"] + "certentries/"),
+ ]
+ if nodetype == "frontendnodes":
+ plopconfig += [
+ (Symbol("index_path"), paths["db"] + "index"),
+ ]
+ elif nodetype == "storagenodes":
+ plopconfig += [
+ (Symbol("newentries_path"), paths["db"] + "newentries"),
+ ]
+ if nodetype in ("frontendnodes", "storagenodes"):
+ plopconfig += [
+ (Symbol("entryhash_root_path"), paths["db"] + "entryhash/"),
+ (Symbol("indexforhash_root_path"), paths["db"] + "certindex/"),
+ ]
+ if nodetype == "frontendnodes":
+ plopconfig += [
+ (Symbol("sth_path"), paths["db"] + "sth"),
+ ]
+
+ signingnode = config["signingnodes"][0]
+ mergenodenames = [node["name"] for node in config["mergenodes"]]
+ storagenodeaddresses = ["https://%s/ct/storage/" % node["address"] for node in config["storagenodes"]]
+ frontendnodenames = [node["name"] for node in config["frontendnodes"]]
+
+ allowed_clients = []
+ allowed_servers = []
+
+ if nodetype == "frontendnodes":
+ storagenodenames = [node["name"] for node in config["storagenodes"]]
+ plopconfig.append((Symbol("storage_nodes"), storagenodeaddresses))
+ plopconfig.append((Symbol("storage_nodes_quorum"), config["storage-quorum-size"]))
+ services = [Symbol("ht")]
+ allowed_clients += allowed_clients_frontend(mergenodenames)
+ allowed_clients += allowed_clients_public()
+ allowed_servers += allowed_servers_frontend([signingnode["name"]], storagenodenames)
+ elif nodetype == "storagenodes":
+ allowed_clients += allowed_clients_storage(frontendnodenames, mergenodenames)
+ services = []
+ elif nodetype == "signingnodes":
+ allowed_clients += allowed_clients_signing(frontendnodenames, mergenodenames)
+ services = [Symbol("sign")]
+
+ plopconfig += [
+ (Symbol("publickey_path"), paths["publickeys"]),
+ (Symbol("services"), services),
+ ]
+ if nodetype == "signingnodes":
+ plopconfig.append((Symbol("log_private_key"), paths["logprivatekey"]))
+ plopconfig += [
+ (Symbol("log_public_key"), paths["logpublickey"]),
+ (Symbol("own_key"), (nodename, "%s/%s-private.pem" % (paths["privatekeys"], nodename))),
+ ]
+ if nodetype == "frontendnodes":
+ plopconfig.append((Symbol("signing_node"), "https://%s/ct/signing/" % signingnode["address"]))
+ plopconfig += [
+ (Symbol("allowed_clients"), allowed_clients),
+ (Symbol("allowed_servers"), allowed_servers),
+ ]
+
+ erlangconfig = [
+ (Symbol("sasl"), saslconfig),
+ (Symbol("catlfish"), catlfishconfig),
+ (Symbol("lager"), lagerconfig),
+ (Symbol("plop"), plopconfig),
+ ]
+
+ print >>configfile, gen_erlang(erlangconfig) + ".\n"
+
+ configfile.close()
+
+
+def gen_testmakefile(config, testmakefile, machines):
+ configfile = open(testmakefile, "w")
+ frontendnodenames = [node["name"] for node in config["frontendnodes"]]
+ storagenodenames = [node["name"] for node in config["storagenodes"]]
+ signingnodename = [node["name"] for node in config["signingnodes"]]
+
+ frontendnodeaddresses = [node["publicaddress"] for node in config["frontendnodes"]]
+ storagenodeaddresses = [node["address"] for node in config["storagenodes"]]
+ signingnodeaddresses = [node["address"] for node in config["signingnodes"]]
+
+ print >>configfile, "NODES=" + " ".join(frontendnodenames+storagenodenames+signingnodename)
+ print >>configfile, "MACHINES=" + " ".join([str(e) for e in range(1, machines+1)])
+ print >>configfile, "TESTURLS=" + " ".join(frontendnodeaddresses+storagenodeaddresses+signingnodeaddresses)
+ print >>configfile, "BASEURL=" + config["baseurl"]
+
+ configfile.close()
+
+
+def main():
+ parser = argparse.ArgumentParser(description="")
+ parser.add_argument('--config', help="System configuration", required=True)
+ parser.add_argument('--localconfig', help="Local configuration")
+ parser.add_argument("--testmakefile", metavar="file", help="Generate makefile variables for test")
+ parser.add_argument("--machines", type=int, metavar="n", help="Number of machines")
+ args = parser.parse_args()
+
+ config = yaml.load(open(args.config))
+ if args.testmakefile and args.machines:
+ gen_testmakefile(config, args.testmakefile, args.machines)
+ elif args.localconfig:
+ localconfig = yaml.load(open(args.localconfig))
+ localnodes = localconfig["localnodes"]
+ for localnode in localnodes:
+ gen_config(localnode, config, localconfig)
+ else:
+ print >>sys.stderr, "Nothing to do"
+ sys.exit(1)
+
+main()
diff --git a/tools/merge.py b/tools/merge.py
index 1b94581..75e72ae 100755
--- a/tools/merge.py
+++ b/tools/merge.py
@@ -15,27 +15,31 @@ import ecdsa
import hashlib
import urlparse
import os
+import yaml
from certtools import build_merkle_tree, create_sth_signature, check_sth_signature, get_eckey_from_file, timing_point, http_request
parser = argparse.ArgumentParser(description="")
-parser.add_argument("--baseurl", metavar="url", help="Base URL for CT server", required=True)
-parser.add_argument("--frontend", action="append", metavar="url", help="Base URL for frontend server", required=True)
-parser.add_argument("--storage", action="append", metavar="url", help="Base URL for storage server", required=True)
-parser.add_argument("--mergedb", metavar="dir", help="Merge database directory", required=True)
-parser.add_argument("--signing", metavar="url", help="Base URL for signing server", required=True)
-parser.add_argument("--own-keyname", metavar="keyname", help="The key name of the merge node", required=True)
-parser.add_argument("--own-keyfile", metavar="keyfile", help="The file containing the private key of the merge node", required=True)
+parser.add_argument('--config', help="System configuration", required=True)
+parser.add_argument('--localconfig', help="Local configuration", required=True)
parser.add_argument("--nomerge", action='store_true', help="Don't actually do merge")
+parser.add_argument("--timing", action='store_true', help="Print timing information")
args = parser.parse_args()
-ctbaseurl = args.baseurl
-frontendnodes = args.frontend
-storagenodes = args.storage
+config = yaml.load(open(args.config))
+localconfig = yaml.load(open(args.localconfig))
-chainsdir = args.mergedb + "/chains"
-logorderfile = args.mergedb + "/logorder"
+ctbaseurl = config["baseurl"]
+frontendnodes = config["frontendnodes"]
+storagenodes = config["storagenodes"]
+paths = localconfig["paths"]
+mergedb = paths["mergedb"]
-own_key = (args.own_keyname, args.own_keyfile)
+signingnode = config["signingnodes"][0]
+
+chainsdir = mergedb + "/chains"
+logorderfile = mergedb + "/logorder"
+
+own_key = (localconfig["nodename"], "%s/%s-private.pem" % (paths["privatekeys"], localconfig["nodename"]))
hashed_dir = True
@@ -76,9 +80,9 @@ def add_to_logorder(key):
f.write(base64.b16encode(key) + "\n")
f.close()
-def get_new_entries(baseurl):
+def get_new_entries(node, baseurl):
try:
- result = http_request(baseurl + "ct/storage/fetchnewentries", key=own_key)
+ result = http_request(baseurl + "ct/storage/fetchnewentries", key=own_key, verifynode=node, publickeydir=paths["publickeys"])
parsed_result = json.loads(result)
if parsed_result.get(u"result") == u"ok":
return [base64.b64decode(entry) for entry in parsed_result[u"entries"]]
@@ -88,10 +92,10 @@ def get_new_entries(baseurl):
print "ERROR: fetchnewentries", e.read()
sys.exit(1)
-def get_entries(baseurl, hashes):
+def get_entries(node, baseurl, hashes):
try:
params = urllib.urlencode({"hash":[base64.b64encode(hash) for hash in hashes]}, doseq=True)
- result = http_request(baseurl + "ct/storage/getentry?" + params, key=own_key)
+ result = http_request(baseurl + "ct/storage/getentry?" + params, key=own_key, verifynode=node, publickeydir=paths["publickeys"])
parsed_result = json.loads(result)
if parsed_result.get(u"result") == u"ok":
entries = dict([(base64.b64decode(entry["hash"]), base64.b64decode(entry["entry"])) for entry in parsed_result[u"entries"]])
@@ -104,9 +108,9 @@ def get_entries(baseurl, hashes):
print "ERROR: getentry", e.read()
sys.exit(1)
-def get_curpos(baseurl):
+def get_curpos(node, baseurl):
try:
- result = http_request(baseurl + "ct/frontend/currentposition", key=own_key)
+ result = http_request(baseurl + "ct/frontend/currentposition", key=own_key, verifynode=node, publickeydir=paths["publickeys"])
parsed_result = json.loads(result)
if parsed_result.get(u"result") == u"ok":
return parsed_result[u"position"]
@@ -116,10 +120,10 @@ def get_curpos(baseurl):
print "ERROR: currentposition", e.read()
sys.exit(1)
-def sendlog(baseurl, submission):
+def sendlog(node, baseurl, submission):
try:
result = http_request(baseurl + "ct/frontend/sendlog",
- json.dumps(submission), key=own_key)
+ json.dumps(submission), key=own_key, verifynode=node, publickeydir=paths["publickeys"])
return json.loads(result)
except urllib2.HTTPError, e:
print "ERROR: sendlog", e.read()
@@ -132,10 +136,11 @@ def sendlog(baseurl, submission):
print "========================"
raise e
-def sendentry(baseurl, entry, hash):
+def sendentry(node, baseurl, entry, hash):
try:
result = http_request(baseurl + "ct/frontend/sendentry",
- json.dumps({"entry":base64.b64encode(entry), "treeleafhash":base64.b64encode(hash)}), key=own_key)
+ json.dumps({"entry":base64.b64encode(entry), "treeleafhash":base64.b64encode(hash)}), key=own_key,
+ verifynode=node, publickeydir=paths["publickeys"])
return json.loads(result)
except urllib2.HTTPError, e:
print "ERROR: sendentry", e.read()
@@ -148,10 +153,10 @@ def sendentry(baseurl, entry, hash):
print "========================"
raise e
-def sendsth(baseurl, submission):
+def sendsth(node, baseurl, submission):
try:
result = http_request(baseurl + "ct/frontend/sendsth",
- json.dumps(submission), key=own_key)
+ json.dumps(submission), key=own_key, verifynode=node, publickeydir=paths["publickeys"])
return json.loads(result)
except urllib2.HTTPError, e:
print "ERROR: sendsth", e.read()
@@ -164,9 +169,9 @@ def sendsth(baseurl, submission):
print "========================"
raise e
-def get_missingentries(baseurl):
+def get_missingentries(node, baseurl):
try:
- result = http_request(baseurl + "ct/frontend/missingentries", key=own_key)
+ result = http_request(baseurl + "ct/frontend/missingentries", key=own_key, verifynode=node, publickeydir=paths["publickeys"])
parsed_result = json.loads(result)
if parsed_result.get(u"result") == u"ok":
return parsed_result[u"entries"]
@@ -192,10 +197,10 @@ new_entries = set()
entries_to_fetch = {}
for storagenode in storagenodes:
- print "getting new entries from", storagenode
- new_entries_per_node[storagenode] = set(get_new_entries(storagenode))
- new_entries.update(new_entries_per_node[storagenode])
- entries_to_fetch[storagenode] = []
+ print "getting new entries from", storagenode["name"]
+ new_entries_per_node[storagenode["name"]] = set(get_new_entries(storagenode["name"], "https://%s/" % storagenode["address"]))
+ new_entries.update(new_entries_per_node[storagenode["name"]])
+ entries_to_fetch[storagenode["name"]] = []
timing_point(timing, "get new entries")
@@ -208,16 +213,16 @@ if args.nomerge:
for hash in new_entries:
for storagenode in storagenodes:
- if hash in new_entries_per_node[storagenode]:
- entries_to_fetch[storagenode].append(hash)
+ if hash in new_entries_per_node[storagenode["name"]]:
+ entries_to_fetch[storagenode["name"]].append(hash)
break
added_entries = 0
for storagenode in storagenodes:
- print "getting", len(entries_to_fetch[storagenode]), "entries from", storagenode
- for chunk in chunks(entries_to_fetch[storagenode], 100):
- entries = get_entries(storagenode, chunk)
+ print "getting", len(entries_to_fetch[storagenode["name"]]), "entries from", storagenode["name"]
+ for chunk in chunks(entries_to_fetch[storagenode["name"]], 100):
+ entries = get_entries(storagenode["name"], "https://%s/" % storagenode["address"], chunk)
for hash in chunk:
entry = entries[hash]
write_chain(hash, entry)
@@ -234,7 +239,7 @@ root_hash = tree[-1][0]
timestamp = int(time.time() * 1000)
tree_head_signature = create_sth_signature(tree_size, timestamp,
- root_hash, args.signing, key=own_key)
+ root_hash, "https://%s/" % signingnode["address"], key=own_key)
sth = {"tree_size": tree_size, "timestamp": timestamp,
"sha256_root_hash": base64.b64encode(root_hash),
@@ -244,19 +249,22 @@ check_sth_signature(ctbaseurl, sth)
timing_point(timing, "build sth")
-print timing["deltatimes"]
+if args.timing:
+ print timing["deltatimes"]
print "root hash", base64.b16encode(root_hash)
for frontendnode in frontendnodes:
+ nodeaddress = "https://%s/" % frontendnode["address"]
+ nodename = frontendnode["name"]
timing = timing_point()
- print "distributing for node", frontendnode
- curpos = get_curpos(frontendnode)
+ print "distributing for node", nodename
+ curpos = get_curpos(nodename, nodeaddress)
timing_point(timing, "get curpos")
print "current position", curpos
entries = [base64.b64encode(entry) for entry in logorder[curpos:]]
for chunk in chunks(entries, 1000):
- sendlogresult = sendlog(frontendnode, {"start": curpos, "hashes": chunk})
+ sendlogresult = sendlog(nodename, nodeaddress, {"start": curpos, "hashes": chunk})
if sendlogresult["result"] != "ok":
print "sendlog:", sendlogresult
sys.exit(1)
@@ -265,19 +273,20 @@ for frontendnode in frontendnodes:
sys.stdout.flush()
timing_point(timing, "sendlog")
print "log sent"
- missingentries = get_missingentries(frontendnode)
+ missingentries = get_missingentries(nodename, nodeaddress)
timing_point(timing, "get missing")
print "missing entries:", len(missingentries)
for missingentry in missingentries:
hash = base64.b64decode(missingentry)
- sendentryresult = sendentry(frontendnode, read_chain(hash), hash)
+ sendentryresult = sendentry(nodename, nodeaddress, read_chain(hash), hash)
if sendentryresult["result"] != "ok":
print "send sth:", sendentryresult
sys.exit(1)
timing_point(timing, "send missing")
- sendsthresult = sendsth(frontendnode, sth)
+ sendsthresult = sendsth(nodename, nodeaddress, sth)
if sendsthresult["result"] != "ok":
print "send sth:", sendsthresult
sys.exit(1)
timing_point(timing, "send sth")
- print timing["deltatimes"]
+ if args.timing:
+ print timing["deltatimes"]
diff --git a/tools/precerttools.py b/tools/precerttools.py
new file mode 100644
index 0000000..13ac572
--- /dev/null
+++ b/tools/precerttools.py
@@ -0,0 +1,102 @@
+# Copyright (c) 2014, NORDUnet A/S.
+# See LICENSE for licensing information.
+
+import sys
+import hashlib
+import rfc2459
+from pyasn1.type import univ, tag
+from pyasn1.codec.der import encoder, decoder
+
+def cleanextensions(extensions):
+ result = rfc2459.Extensions().subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 3))
+ for idx in range(len(extensions)):
+ extension = extensions.getComponentByPosition(idx)
+ if extension.getComponentByName("extnID") == univ.ObjectIdentifier("1.3.6.1.4.1.11129.2.4.3"):
+ pass
+ else:
+ result.setComponentByPosition(len(result), extension)
+ return result
+
+def decode_any(anydata, asn1Spec=None):
+ (wrapper, _) = decoder.decode(anydata)
+ (data, _) = decoder.decode(wrapper, asn1Spec=asn1Spec)
+ return data
+
+def get_subject(cert):
+ (asn1,rest) = decoder.decode(cert, asn1Spec=rfc2459.Certificate())
+ assert rest == ''
+ tbsCertificate = asn1.getComponentByName("tbsCertificate")
+ subject = tbsCertificate.getComponentByName("subject")
+ extensions = tbsCertificate.getComponentByName("extensions")
+ keyid_wrapper = get_extension(extensions, rfc2459.id_ce_subjectKeyIdentifier)
+ keyid = decode_any(keyid_wrapper, asn1Spec=rfc2459.KeyIdentifier())
+ return (subject, keyid)
+
+def cleanprecert(precert, issuer=None):
+ (asn1,rest) = decoder.decode(precert, asn1Spec=rfc2459.Certificate())
+ assert rest == ''
+ tbsCertificate = asn1.getComponentByName("tbsCertificate")
+
+ extensions = tbsCertificate.getComponentByName("extensions")
+ tbsCertificate.setComponentByName("extensions", cleanextensions(extensions))
+
+ if issuer:
+ (issuer_subject, keyid) = get_subject(issuer)
+ tbsCertificate.setComponentByName("issuer", issuer_subject)
+ authkeyid = rfc2459.AuthorityKeyIdentifier()
+ authkeyid.setComponentByName("keyIdentifier",
+ rfc2459.KeyIdentifier(str(keyid)).subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0)))
+ authkeyid_wrapper = univ.OctetString(encoder.encode(authkeyid))
+ authkeyid_wrapper2 = encoder.encode(authkeyid_wrapper)
+ set_extension(extensions, rfc2459.id_ce_authorityKeyIdentifier, authkeyid_wrapper2)
+ return encoder.encode(tbsCertificate)
+
+def get_extension(extensions, id):
+ for idx in range(len(extensions)):
+ extension = extensions.getComponentByPosition(idx)
+ if extension.getComponentByName("extnID") == id:
+ return extension.getComponentByName("extnValue")
+ return None
+
+def set_extension(extensions, id, value):
+ result = rfc2459.Extensions().subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 3))
+ for idx in range(len(extensions)):
+ extension = extensions.getComponentByPosition(idx)
+ if extension.getComponentByName("extnID") == id:
+ extension.setComponentByName("extnValue", value)
+
+def get_cert_key_hash(cert):
+ (asn1,rest) = decoder.decode(cert, asn1Spec=rfc2459.Certificate())
+ assert rest == ''
+ tbsCertificate = asn1.getComponentByName("tbsCertificate")
+ key = encoder.encode(tbsCertificate.getComponentByName("subjectPublicKeyInfo"))
+ hash = hashlib.sha256()
+ hash.update(key)
+ return hash.digest()
+
+def printcert(cert, outfile=sys.stdout):
+ (asn1,rest) = decoder.decode(cert, asn1Spec=rfc2459.Certificate())
+ assert rest == ''
+ print >>outfile, asn1.prettyPrint()
+
+def printtbscert(cert, outfile=sys.stdout):
+ (asn1,rest) = decoder.decode(cert, asn1Spec=rfc2459.TBSCertificate())
+ assert rest == ''
+ print >>outfile, asn1.prettyPrint()
+
+ext_key_usage_precert_signing_cert = univ.ObjectIdentifier("1.3.6.1.4.1.11129.2.4.4")
+
+def get_ext_key_usage(cert):
+ (asn1,rest) = decoder.decode(cert, asn1Spec=rfc2459.Certificate())
+ assert rest == ''
+ tbsCertificate = asn1.getComponentByName("tbsCertificate")
+ extensions = tbsCertificate.getComponentByName("extensions")
+ for idx in range(len(extensions)):
+ extension = extensions.getComponentByPosition(idx)
+ if extension.getComponentByName("extnID") == rfc2459.id_ce_extKeyUsage:
+ ext_key_usage_wrapper_binary = extension.getComponentByName("extnValue")
+ (ext_key_usage_wrapper, _) = decoder.decode(ext_key_usage_wrapper_binary)
+ (ext_key_usage, _) = decoder.decode(ext_key_usage_wrapper)#, asn1Spec=rfc2459.ExtKeyUsageSyntax())
+ return list(ext_key_usage)
+ return []
+
diff --git a/tools/rfc2459.py b/tools/rfc2459.py
new file mode 100644
index 0000000..0ce9c6d
--- /dev/null
+++ b/tools/rfc2459.py
@@ -0,0 +1,927 @@
+# Copyright (c) 2005-2013, Ilya Etingof <ilya@glas.net>
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright notice,
+# this list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above copyright notice,
+# this list of conditions and the following disclaimer in the documentation
+# and/or other materials provided with the distribution.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
+# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+# POSSIBILITY OF SUCH DAMAGE.
+#
+# X.509 message syntax
+#
+# ASN.1 source from:
+# http://www.trl.ibm.com/projects/xml/xss4j/data/asn1/grammars/x509.asn
+# http://www.ietf.org/rfc/rfc2459.txt
+#
+# Sample captures from:
+# http://wiki.wireshark.org/SampleCaptures/
+#
+from pyasn1.type import tag,namedtype,namedval,univ,constraint,char,useful
+
+MAX = 64 # XXX ?
+
+#
+# PKIX1Explicit88
+#
+
+# Upper Bounds
+ub_name = univ.Integer(32768)
+ub_common_name = univ.Integer(64)
+ub_locality_name = univ.Integer(128)
+ub_state_name = univ.Integer(128)
+ub_organization_name = univ.Integer(64)
+ub_organizational_unit_name = univ.Integer(64)
+ub_title = univ.Integer(64)
+ub_match = univ.Integer(128)
+ub_emailaddress_length = univ.Integer(128)
+ub_common_name_length = univ.Integer(64)
+ub_country_name_alpha_length = univ.Integer(2)
+ub_country_name_numeric_length = univ.Integer(3)
+ub_domain_defined_attributes = univ.Integer(4)
+ub_domain_defined_attribute_type_length = univ.Integer(8)
+ub_domain_defined_attribute_value_length = univ.Integer(128)
+ub_domain_name_length = univ.Integer(16)
+ub_extension_attributes = univ.Integer(256)
+ub_e163_4_number_length = univ.Integer(15)
+ub_e163_4_sub_address_length = univ.Integer(40)
+ub_generation_qualifier_length = univ.Integer(3)
+ub_given_name_length = univ.Integer(16)
+ub_initials_length = univ.Integer(5)
+ub_integer_options = univ.Integer(256)
+ub_numeric_user_id_length = univ.Integer(32)
+ub_organization_name_length = univ.Integer(64)
+ub_organizational_unit_name_length = univ.Integer(32)
+ub_organizational_units = univ.Integer(4)
+ub_pds_name_length = univ.Integer(16)
+ub_pds_parameter_length = univ.Integer(30)
+ub_pds_physical_address_lines = univ.Integer(6)
+ub_postal_code_length = univ.Integer(16)
+ub_surname_length = univ.Integer(40)
+ub_terminal_id_length = univ.Integer(24)
+ub_unformatted_address_length = univ.Integer(180)
+ub_x121_address_length = univ.Integer(16)
+
+class UniversalString(char.UniversalString): pass
+class BMPString(char.BMPString): pass
+class UTF8String(char.UTF8String): pass
+
+id_pkix = univ.ObjectIdentifier('1.3.6.1.5.5.7')
+id_pe = univ.ObjectIdentifier('1.3.6.1.5.5.7.1')
+id_qt = univ.ObjectIdentifier('1.3.6.1.5.5.7.2')
+id_kp = univ.ObjectIdentifier('1.3.6.1.5.5.7.3')
+id_ad = univ.ObjectIdentifier('1.3.6.1.5.5.7.48')
+
+id_qt_cps = univ.ObjectIdentifier('1.3.6.1.5.5.7.2.1')
+id_qt_unotice = univ.ObjectIdentifier('1.3.6.1.5.5.7.2.2')
+
+id_ad_ocsp = univ.ObjectIdentifier('1.3.6.1.5.5.7.48.1')
+id_ad_caIssuers = univ.ObjectIdentifier('1.3.6.1.5.5.7.48.2')
+
+class AttributeValue(univ.Any): pass
+
+class AttributeType(univ.ObjectIdentifier): pass
+
+class AttributeTypeAndValue(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('type', AttributeType()),
+ namedtype.NamedType('value', AttributeValue())
+ )
+
+class Attribute(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('type', AttributeType()),
+ namedtype.NamedType('vals', univ.SetOf(componentType=AttributeValue()))
+ )
+
+id_at = univ.ObjectIdentifier('2.5.4')
+id_at_name = univ.ObjectIdentifier('2.5.4.41')
+id_at_sutname = univ.ObjectIdentifier('2.5.4.4')
+id_at_givenName = univ.ObjectIdentifier('2.5.4.42')
+id_at_initials = univ.ObjectIdentifier('2.5.4.43')
+id_at_generationQualifier = univ.ObjectIdentifier('2.5.4.44')
+
+class X520name(univ.Choice):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('teletexString', char.TeletexString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_name))),
+ namedtype.NamedType('printableString', char.PrintableString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_name))),
+ namedtype.NamedType('universalString', char.UniversalString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_name))),
+ namedtype.NamedType('utf8String', char.UTF8String().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_name))),
+ namedtype.NamedType('bmpString', char.BMPString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_name)))
+ )
+
+id_at_commonName = univ.ObjectIdentifier('2.5.4.3')
+
+class X520CommonName(univ.Choice):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('teletexString', char.TeletexString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_common_name))),
+ namedtype.NamedType('printableString', char.PrintableString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_common_name))),
+ namedtype.NamedType('universalString', char.UniversalString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_common_name))),
+ namedtype.NamedType('utf8String', char.UTF8String().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_common_name))),
+ namedtype.NamedType('bmpString', char.BMPString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_common_name)))
+ )
+
+id_at_localityName = univ.ObjectIdentifier('2.5.4.7')
+
+class X520LocalityName(univ.Choice):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('teletexString', char.TeletexString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_locality_name))),
+ namedtype.NamedType('printableString', char.PrintableString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_locality_name))),
+ namedtype.NamedType('universalString', char.UniversalString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_locality_name))),
+ namedtype.NamedType('utf8String', char.UTF8String().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_locality_name))),
+ namedtype.NamedType('bmpString', char.BMPString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_locality_name)))
+ )
+
+id_at_stateOrProvinceName = univ.ObjectIdentifier('2.5.4.8')
+
+class X520StateOrProvinceName(univ.Choice):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('teletexString', char.TeletexString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_state_name))),
+ namedtype.NamedType('printableString', char.PrintableString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_state_name))),
+ namedtype.NamedType('universalString', char.UniversalString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_state_name))),
+ namedtype.NamedType('utf8String', char.UTF8String().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_state_name))),
+ namedtype.NamedType('bmpString', char.BMPString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_state_name)))
+ )
+
+id_at_organizationName = univ.ObjectIdentifier('2.5.4.10')
+
+class X520OrganizationName(univ.Choice):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('teletexString', char.TeletexString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_organization_name))),
+ namedtype.NamedType('printableString', char.PrintableString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_organization_name))),
+ namedtype.NamedType('universalString', char.UniversalString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_organization_name))),
+ namedtype.NamedType('utf8String', char.UTF8String().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_organization_name))),
+ namedtype.NamedType('bmpString', char.BMPString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_organization_name)))
+ )
+
+id_at_organizationalUnitName = univ.ObjectIdentifier('2.5.4.11')
+
+class X520OrganizationalUnitName(univ.Choice):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('teletexString', char.TeletexString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_organizational_unit_name))),
+ namedtype.NamedType('printableString', char.PrintableString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_organizational_unit_name))),
+ namedtype.NamedType('universalString', char.UniversalString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_organizational_unit_name))),
+ namedtype.NamedType('utf8String', char.UTF8String().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_organizational_unit_name))),
+ namedtype.NamedType('bmpString', char.BMPString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_organizational_unit_name)))
+ )
+
+id_at_title = univ.ObjectIdentifier('2.5.4.12')
+
+class X520Title(univ.Choice):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('teletexString', char.TeletexString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_title))),
+ namedtype.NamedType('printableString', char.PrintableString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_title))),
+ namedtype.NamedType('universalString', char.UniversalString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_title))),
+ namedtype.NamedType('utf8String', char.UTF8String().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_title))),
+ namedtype.NamedType('bmpString', char.BMPString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_title)))
+ )
+
+id_at_dnQualifier = univ.ObjectIdentifier('2.5.4.46')
+
+class X520dnQualifier(char.PrintableString): pass
+
+id_at_countryName = univ.ObjectIdentifier('2.5.4.6')
+
+class X520countryName(char.PrintableString):
+ subtypeSpec = char.PrintableString.subtypeSpec + constraint.ValueSizeConstraint(2, 2)
+
+pkcs_9 = univ.ObjectIdentifier('1.2.840.113549.1.9')
+
+emailAddress = univ.ObjectIdentifier('1.2.840.113549.1.9.1')
+
+class Pkcs9email(char.IA5String):
+ subtypeSpec = char.IA5String.subtypeSpec + constraint.ValueSizeConstraint(1, ub_emailaddress_length)
+
+# ----
+
+class DSAPrivateKey(univ.Sequence):
+ """PKIX compliant DSA private key structure"""
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('version', univ.Integer(namedValues=namedval.NamedValues(('v1', 0)))),
+ namedtype.NamedType('p', univ.Integer()),
+ namedtype.NamedType('q', univ.Integer()),
+ namedtype.NamedType('g', univ.Integer()),
+ namedtype.NamedType('public', univ.Integer()),
+ namedtype.NamedType('private', univ.Integer())
+ )
+
+# ----
+
+class RelativeDistinguishedName(univ.SetOf):
+ componentType = AttributeTypeAndValue()
+
+class RDNSequence(univ.SequenceOf):
+ componentType = RelativeDistinguishedName()
+
+class Name(univ.Choice):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('', RDNSequence())
+ )
+
+class DirectoryString(univ.Choice):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('teletexString', char.TeletexString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, MAX))),
+ namedtype.NamedType('printableString', char.PrintableString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, MAX))),
+ namedtype.NamedType('universalString', char.UniversalString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, MAX))),
+ namedtype.NamedType('utf8String', char.UTF8String().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, MAX))),
+ namedtype.NamedType('bmpString', char.BMPString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, MAX))),
+ namedtype.NamedType('ia5String', char.IA5String().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, MAX))) # hm, this should not be here!? XXX
+ )
+
+# certificate and CRL specific structures begin here
+
+class AlgorithmIdentifier(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('algorithm', univ.ObjectIdentifier()),
+ namedtype.OptionalNamedType('parameters', univ.Any())
+ )
+
+class Extension(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('extnID', univ.ObjectIdentifier()),
+ namedtype.DefaultedNamedType('critical', univ.Boolean('False')),
+ namedtype.NamedType('extnValue', univ.Any())
+ )
+
+class Extensions(univ.SequenceOf):
+ componentType = Extension()
+ sizeSpec = univ.SequenceOf.sizeSpec + constraint.ValueSizeConstraint(1, MAX)
+
+class SubjectPublicKeyInfo(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('algorithm', AlgorithmIdentifier()),
+ namedtype.NamedType('subjectPublicKey', univ.BitString())
+ )
+
+class UniqueIdentifier(univ.BitString): pass
+
+class Time(univ.Choice):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('utcTime', useful.UTCTime()),
+ namedtype.NamedType('generalTime', useful.GeneralizedTime())
+ )
+
+class Validity(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('notBefore', Time()),
+ namedtype.NamedType('notAfter', Time())
+ )
+
+class CertificateSerialNumber(univ.Integer): pass
+
+class Version(univ.Integer):
+ namedValues = namedval.NamedValues(
+ ('v1', 0), ('v2', 1), ('v3', 2)
+ )
+
+class TBSCertificate(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.DefaultedNamedType('version', Version('v1').subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))),
+ namedtype.NamedType('serialNumber', CertificateSerialNumber()),
+ namedtype.NamedType('signature', AlgorithmIdentifier()),
+ namedtype.NamedType('issuer', Name()),
+ namedtype.NamedType('validity', Validity()),
+ namedtype.NamedType('subject', Name()),
+ namedtype.NamedType('subjectPublicKeyInfo', SubjectPublicKeyInfo()),
+ namedtype.OptionalNamedType('issuerUniqueID', UniqueIdentifier().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))),
+ namedtype.OptionalNamedType('subjectUniqueID', UniqueIdentifier().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 2))),
+ namedtype.OptionalNamedType('extensions', Extensions().subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 3)))
+ )
+
+class Certificate(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('tbsCertificate', TBSCertificate()),
+ namedtype.NamedType('signatureAlgorithm', AlgorithmIdentifier()),
+ namedtype.NamedType('signatureValue', univ.BitString())
+ )
+
+# CRL structures
+
+class RevokedCertificate(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('userCertificate', CertificateSerialNumber()),
+ namedtype.NamedType('revocationDate', Time()),
+ namedtype.OptionalNamedType('crlEntryExtensions', Extensions())
+ )
+
+class TBSCertList(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.OptionalNamedType('version', Version()),
+ namedtype.NamedType('signature', AlgorithmIdentifier()),
+ namedtype.NamedType('issuer', Name()),
+ namedtype.NamedType('thisUpdate', Time()),
+ namedtype.OptionalNamedType('nextUpdate', Time()),
+ namedtype.OptionalNamedType('revokedCertificates', univ.SequenceOf(componentType=RevokedCertificate())),
+ namedtype.OptionalNamedType('crlExtensions', Extensions().subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0)))
+ )
+
+class CertificateList(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('tbsCertList', TBSCertList()),
+ namedtype.NamedType('signatureAlgorithm', AlgorithmIdentifier()),
+ namedtype.NamedType('signature', univ.BitString())
+ )
+
+# Algorithm OIDs and parameter structures
+
+pkcs_1 = univ.ObjectIdentifier('1.2.840.113549.1.1')
+rsaEncryption = univ.ObjectIdentifier('1.2.840.113549.1.1.1')
+md2WithRSAEncryption = univ.ObjectIdentifier('1.2.840.113549.1.1.2')
+md5WithRSAEncryption = univ.ObjectIdentifier('1.2.840.113549.1.1.4')
+sha1WithRSAEncryption = univ.ObjectIdentifier('1.2.840.113549.1.1.5')
+id_dsa_with_sha1 = univ.ObjectIdentifier('1.2.840.10040.4.3')
+
+class Dss_Sig_Value(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('r', univ.Integer()),
+ namedtype.NamedType('s', univ.Integer())
+ )
+
+dhpublicnumber = univ.ObjectIdentifier('1.2.840.10046.2.1')
+
+class ValidationParms(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('seed', univ.BitString()),
+ namedtype.NamedType('pgenCounter', univ.Integer())
+ )
+
+class DomainParameters(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('p', univ.Integer()),
+ namedtype.NamedType('g', univ.Integer()),
+ namedtype.NamedType('q', univ.Integer()),
+ namedtype.NamedType('j', univ.Integer()),
+ namedtype.OptionalNamedType('validationParms', ValidationParms())
+ )
+
+id_dsa = univ.ObjectIdentifier('1.2.840.10040.4.1')
+
+class Dss_Parms(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('p', univ.Integer()),
+ namedtype.NamedType('q', univ.Integer()),
+ namedtype.NamedType('g', univ.Integer())
+ )
+
+# x400 address syntax starts here
+
+teletex_domain_defined_attributes = univ.Integer(6)
+
+class TeletexDomainDefinedAttribute(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('type', char.TeletexString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_domain_defined_attribute_type_length))),
+ namedtype.NamedType('value', char.TeletexString())
+ )
+
+class TeletexDomainDefinedAttributes(univ.SequenceOf):
+ componentType = TeletexDomainDefinedAttribute()
+ subtypeSpec = univ.SequenceOf.subtypeSpec + constraint.ValueSizeConstraint(1, ub_domain_defined_attributes)
+
+terminal_type = univ.Integer(23)
+
+class TerminalType(univ.Integer):
+ subtypeSpec = univ.Integer.subtypeSpec + constraint.ValueSizeConstraint(0, ub_integer_options)
+ namedValues = namedval.NamedValues(
+ ('telex', 3),
+ ('teletelex', 4),
+ ('g3-facsimile', 5),
+ ('g4-facsimile', 6),
+ ('ia5-terminal', 7),
+ ('videotex', 8)
+ )
+
+class PresentationAddress(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.OptionalNamedType('pSelector', univ.OctetString().subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))),
+ namedtype.OptionalNamedType('sSelector', univ.OctetString().subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))),
+ namedtype.OptionalNamedType('tSelector', univ.OctetString().subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 2))),
+ namedtype.OptionalNamedType('nAddresses', univ.SetOf(componentType=univ.OctetString()).subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 3), subtypeSpec=constraint.ValueSizeConstraint(1, MAX))),
+ )
+
+extended_network_address = univ.Integer(22)
+
+class E163_4_address(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('number', char.NumericString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_e163_4_number_length), explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))),
+ namedtype.OptionalNamedType('sub-address', char.NumericString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_e163_4_sub_address_length), explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1)))
+ )
+
+class ExtendedNetworkAddress(univ.Choice):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('e163-4-address', E163_4_address()),
+ namedtype.NamedType('psap-address', PresentationAddress().subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0)))
+ )
+
+class PDSParameter(univ.Set):
+ componentType = namedtype.NamedTypes(
+ namedtype.OptionalNamedType('printable-string', char.PrintableString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_pds_parameter_length))),
+ namedtype.OptionalNamedType('teletex-string', char.TeletexString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_pds_parameter_length)))
+ )
+
+local_postal_attributes = univ.Integer(21)
+
+class LocalPostalAttributes(PDSParameter): pass
+
+class UniquePostalName(PDSParameter): pass
+
+unique_postal_name = univ.Integer(20)
+
+poste_restante_address = univ.Integer(19)
+
+class PosteRestanteAddress(PDSParameter): pass
+
+post_office_box_address = univ.Integer(18)
+
+class PostOfficeBoxAddress(PDSParameter): pass
+
+street_address = univ.Integer(17)
+
+class StreetAddress(PDSParameter): pass
+
+class UnformattedPostalAddress(univ.Set):
+ componentType = namedtype.NamedTypes(
+ namedtype.OptionalNamedType('printable-address', univ.SequenceOf(componentType=char.PrintableString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_pds_parameter_length)).subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_pds_physical_address_lines)))),
+ namedtype.OptionalNamedType('teletex-string', char.TeletexString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_unformatted_address_length)))
+ )
+
+physical_delivery_office_name = univ.Integer(10)
+
+class PhysicalDeliveryOfficeName(PDSParameter): pass
+
+physical_delivery_office_number = univ.Integer(11)
+
+class PhysicalDeliveryOfficeNumber(PDSParameter): pass
+
+extension_OR_address_components = univ.Integer(12)
+
+class ExtensionORAddressComponents(PDSParameter): pass
+
+physical_delivery_personal_name = univ.Integer(13)
+
+class PhysicalDeliveryPersonalName(PDSParameter): pass
+
+physical_delivery_organization_name = univ.Integer(14)
+
+class PhysicalDeliveryOrganizationName(PDSParameter): pass
+
+extension_physical_delivery_address_components = univ.Integer(15)
+
+class ExtensionPhysicalDeliveryAddressComponents(PDSParameter): pass
+
+unformatted_postal_address = univ.Integer(16)
+
+postal_code = univ.Integer(9)
+
+class PostalCode(univ.Choice):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('numeric-code', char.NumericString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_postal_code_length))),
+ namedtype.NamedType('printable-code', char.PrintableString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_postal_code_length)))
+ )
+
+class PhysicalDeliveryCountryName(univ.Choice):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('x121-dcc-code', char.NumericString().subtype(subtypeSpec=constraint.ValueSizeConstraint(ub_country_name_numeric_length, ub_country_name_numeric_length))),
+ namedtype.NamedType('iso-3166-alpha2-code', char.PrintableString().subtype(subtypeSpec=constraint.ValueSizeConstraint(ub_country_name_alpha_length, ub_country_name_alpha_length)))
+ )
+
+class PDSName(char.PrintableString):
+ subtypeSpec = char.PrintableString.subtypeSpec + constraint.ValueSizeConstraint(1, ub_pds_name_length)
+
+physical_delivery_country_name = univ.Integer(8)
+
+class TeletexOrganizationalUnitName(char.TeletexString):
+ subtypeSpec = char.TeletexString.subtypeSpec + constraint.ValueSizeConstraint(1, ub_organizational_unit_name_length)
+
+pds_name = univ.Integer(7)
+
+teletex_organizational_unit_names = univ.Integer(5)
+
+class TeletexOrganizationalUnitNames(univ.SequenceOf):
+ componentType = TeletexOrganizationalUnitName()
+ subtypeSpec = univ.SequenceOf.subtypeSpec + constraint.ValueSizeConstraint(1, ub_organizational_units)
+
+teletex_personal_name = univ.Integer(4)
+
+class TeletexPersonalName(univ.Set):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('surname', char.TeletexString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_surname_length), explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))),
+ namedtype.OptionalNamedType('given-name', char.TeletexString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_given_name_length), explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))),
+ namedtype.OptionalNamedType('initials', char.TeletexString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_initials_length), explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 2))),
+ namedtype.OptionalNamedType('generation-qualifier', char.TeletexString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_generation_qualifier_length), explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 3)))
+ )
+
+teletex_organization_name = univ.Integer(3)
+
+class TeletexOrganizationName(char.TeletexString):
+ subtypeSpec = char.TeletexString.subtypeSpec + constraint.ValueSizeConstraint(1, ub_organization_name_length)
+
+teletex_common_name = univ.Integer(2)
+
+class TeletexCommonName(char.TeletexString):
+ subtypeSpec = char.TeletexString.subtypeSpec + constraint.ValueSizeConstraint(1, ub_common_name_length)
+
+class CommonName(char.PrintableString):
+ subtypeSpec = char.PrintableString.subtypeSpec + constraint.ValueSizeConstraint(1, ub_common_name_length)
+
+common_name = univ.Integer(1)
+
+class ExtensionAttribute(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('extension-attribute-type', univ.Integer().subtype(subtypeSpec=constraint.ValueSizeConstraint(0, ub_extension_attributes), explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))),
+ namedtype.NamedType('extension-attribute-value', univ.Any().subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1)))
+ )
+
+class ExtensionAttributes(univ.SetOf):
+ componentType = ExtensionAttribute()
+ subtypeSpec = univ.SetOf.subtypeSpec + constraint.ValueSizeConstraint(1, ub_extension_attributes)
+
+class BuiltInDomainDefinedAttribute(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('type', char.PrintableString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_domain_defined_attribute_type_length))),
+ namedtype.NamedType('value', char.PrintableString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_domain_defined_attribute_value_length)))
+ )
+
+class BuiltInDomainDefinedAttributes(univ.SequenceOf):
+ componentType = BuiltInDomainDefinedAttribute()
+ subtypeSpec = univ.SequenceOf.subtypeSpec + constraint.ValueSizeConstraint(1, ub_domain_defined_attributes)
+
+class OrganizationalUnitName(char.PrintableString):
+ subtypeSpec = univ.SequenceOf.subtypeSpec + constraint.ValueSizeConstraint(1, ub_organizational_unit_name_length)
+
+class OrganizationalUnitNames(univ.SequenceOf):
+ componentType = OrganizationalUnitName()
+ subtypeSpec = univ.SequenceOf.subtypeSpec + constraint.ValueSizeConstraint(1, ub_organizational_units)
+
+class PersonalName(univ.Set):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('surname', char.PrintableString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_surname_length), explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))),
+ namedtype.OptionalNamedType('given-name', char.PrintableString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_given_name_length), explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))),
+ namedtype.OptionalNamedType('initials', char.PrintableString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_initials_length), explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 2))),
+ namedtype.OptionalNamedType('generation-qualifier', char.PrintableString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_generation_qualifier_length), explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 3)))
+ )
+
+class NumericUserIdentifier(char.NumericString):
+ subtypeSpec = char.NumericString.subtypeSpec + constraint.ValueSizeConstraint(1, ub_numeric_user_id_length)
+
+class OrganizationName(char.PrintableString):
+ subtypeSpec = char.PrintableString.subtypeSpec + constraint.ValueSizeConstraint(1, ub_organization_name_length)
+
+class PrivateDomainName(univ.Choice):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('numeric', char.NumericString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_domain_name_length))),
+ namedtype.NamedType('printable', char.PrintableString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_domain_name_length)))
+ )
+
+class TerminalIdentifier(char.PrintableString):
+ subtypeSpec = char.PrintableString.subtypeSpec + constraint.ValueSizeConstraint(1, ub_terminal_id_length)
+
+class X121Address(char.NumericString):
+ subtypeSpec = char.NumericString.subtypeSpec + constraint.ValueSizeConstraint(1, ub_x121_address_length)
+
+class NetworkAddress(X121Address): pass
+
+class AdministrationDomainName(univ.Choice):
+ tagSet = univ.Choice.tagSet.tagExplicitly(
+ tag.Tag(tag.tagClassApplication, tag.tagFormatConstructed, 2)
+ )
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('numeric', char.NumericString().subtype(subtypeSpec=constraint.ValueSizeConstraint(0, ub_domain_name_length))),
+ namedtype.NamedType('printable', char.PrintableString().subtype(subtypeSpec=constraint.ValueSizeConstraint(0, ub_domain_name_length)))
+ )
+
+class CountryName(univ.Choice):
+ tagSet = univ.Choice.tagSet.tagExplicitly(
+ tag.Tag(tag.tagClassApplication, tag.tagFormatConstructed, 1)
+ )
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('x121-dcc-code', char.NumericString().subtype(subtypeSpec=constraint.ValueSizeConstraint(ub_country_name_numeric_length, ub_country_name_numeric_length))),
+ namedtype.NamedType('iso-3166-alpha2-code', char.PrintableString().subtype(subtypeSpec=constraint.ValueSizeConstraint(ub_country_name_alpha_length, ub_country_name_alpha_length)))
+ )
+
+class BuiltInStandardAttributes(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.OptionalNamedType('country-name', CountryName()),
+ namedtype.OptionalNamedType('administration-domain-name', AdministrationDomainName()),
+ namedtype.OptionalNamedType('network-address', NetworkAddress().subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))),
+ namedtype.OptionalNamedType('terminal-identifier', TerminalIdentifier().subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))),
+ namedtype.OptionalNamedType('private-domain-name', PrivateDomainName().subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 2))),
+ namedtype.OptionalNamedType('organization-name', OrganizationName().subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 3))),
+ namedtype.OptionalNamedType('numeric-user-identifier', NumericUserIdentifier().subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 4))),
+ namedtype.OptionalNamedType('personal-name', PersonalName().subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 5))),
+ namedtype.OptionalNamedType('organizational-unit-names', OrganizationalUnitNames().subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 6)))
+ )
+
+class ORAddress(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('built-in-standard-attributes', BuiltInStandardAttributes()),
+ namedtype.OptionalNamedType('built-in-domain-defined-attributes', BuiltInDomainDefinedAttributes()),
+ namedtype.OptionalNamedType('extension-attributes', ExtensionAttributes())
+ )
+
+#
+# PKIX1Implicit88
+#
+
+id_ce_invalidityDate = univ.ObjectIdentifier('2.5.29.24')
+
+class InvalidityDate(useful.GeneralizedTime): pass
+
+id_holdinstruction_none = univ.ObjectIdentifier('2.2.840.10040.2.1')
+id_holdinstruction_callissuer = univ.ObjectIdentifier('2.2.840.10040.2.2')
+id_holdinstruction_reject = univ.ObjectIdentifier('2.2.840.10040.2.3')
+
+holdInstruction = univ.ObjectIdentifier('2.2.840.10040.2')
+
+id_ce_holdInstructionCode = univ.ObjectIdentifier('2.5.29.23')
+
+class HoldInstructionCode(univ.ObjectIdentifier): pass
+
+id_ce_cRLReasons = univ.ObjectIdentifier('2.5.29.21')
+
+class CRLReason(univ.Enumerated):
+ namedValues = namedval.NamedValues(
+ ('unspecified', 0),
+ ('keyCompromise', 1),
+ ('cACompromise', 2),
+ ('affiliationChanged', 3),
+ ('superseded', 4),
+ ('cessationOfOperation', 5),
+ ('certificateHold', 6),
+ ('removeFromCRL', 8)
+ )
+
+id_ce_cRLNumber = univ.ObjectIdentifier('2.5.29.20')
+
+class CRLNumber(univ.Integer):
+ subtypeSpec = univ.SequenceOf.subtypeSpec + constraint.ValueSizeConstraint(0, MAX)
+
+class BaseCRLNumber(CRLNumber): pass
+
+id_kp_serverAuth = univ.ObjectIdentifier('1.3.6.1.5.5.7.3.1.1')
+id_kp_clientAuth = univ.ObjectIdentifier('1.3.6.1.5.5.7.3.2')
+id_kp_codeSigning = univ.ObjectIdentifier('1.3.6.1.5.5.7.3.3')
+id_kp_emailProtection = univ.ObjectIdentifier('1.3.6.1.5.5.7.3.4')
+id_kp_ipsecEndSystem = univ.ObjectIdentifier('1.3.6.1.5.5.7.3.5')
+id_kp_ipsecTunnel = univ.ObjectIdentifier('1.3.6.1.5.5.7.3.6')
+id_kp_ipsecUser = univ.ObjectIdentifier('1.3.6.1.5.5.7.3.7')
+id_kp_timeStamping = univ.ObjectIdentifier('1.3.6.1.5.5.7.3.8')
+id_pe_authorityInfoAccess = univ.ObjectIdentifier('1.3.6.1.5.5.7.1.1')
+id_ce_extKeyUsage = univ.ObjectIdentifier('2.5.29.37')
+
+class KeyPurposeId(univ.ObjectIdentifier): pass
+
+class ExtKeyUsageSyntax(univ.SequenceOf):
+ componentType = KeyPurposeId()
+ subtypeSpec = univ.SequenceOf.subtypeSpec + constraint.ValueSizeConstraint(1, MAX)
+
+class ReasonFlags(univ.BitString):
+ namedValues = namedval.NamedValues(
+ ('unused', 0),
+ ('keyCompromise', 1),
+ ('cACompromise', 2),
+ ('affiliationChanged', 3),
+ ('superseded', 4),
+ ('cessationOfOperation', 5),
+ ('certificateHold', 6)
+ )
+
+
+class SkipCerts(univ.Integer):
+ subtypeSpec = univ.Integer.subtypeSpec + constraint.ValueSizeConstraint(0, MAX)
+
+id_ce_policyConstraints = univ.ObjectIdentifier('2.5.29.36')
+
+class PolicyConstraints(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.OptionalNamedType('requireExplicitPolicy', SkipCerts().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0))),
+ namedtype.OptionalNamedType('inhibitPolicyMapping', SkipCerts().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 1)))
+ )
+
+id_ce_basicConstraints = univ.ObjectIdentifier('2.5.29.19')
+
+class BasicConstraints(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('cA', univ.Boolean(False)),
+ namedtype.OptionalNamedType('pathLenConstraint', univ.Integer().subtype(subtypeSpec=constraint.ValueRangeConstraint(0, MAX)))
+ )
+
+id_ce_subjectDirectoryAttributes = univ.ObjectIdentifier('2.5.29.9')
+
+class SubjectDirectoryAttributes(univ.SequenceOf):
+ componentType = Attribute()
+ subtypeSpec = univ.SequenceOf.subtypeSpec + constraint.ValueSizeConstraint(1, MAX)
+
+class EDIPartyName(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.OptionalNamedType('nameAssigner', DirectoryString().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))),
+ namedtype.NamedType('partyName', DirectoryString().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1)))
+ )
+
+class AnotherName(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('type-id', univ.ObjectIdentifier()),
+ namedtype.NamedType('value', univ.Any().subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0)))
+ )
+
+class GeneralName(univ.Choice):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('otherName', AnotherName().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))),
+ namedtype.NamedType('rfc822Name', char.IA5String().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))),
+ namedtype.NamedType('dNSName', char.IA5String().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 2))),
+ namedtype.NamedType('x400Address', ORAddress().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 3))),
+ namedtype.NamedType('directoryName', Name().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 4))),
+ namedtype.NamedType('ediPartyName', EDIPartyName().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 5))),
+ namedtype.NamedType('uniformResourceIdentifier', char.IA5String().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 6))),
+ namedtype.NamedType('iPAddress', univ.OctetString().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 7))),
+ namedtype.NamedType('registeredID', univ.ObjectIdentifier().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 8)))
+ )
+
+class GeneralNames(univ.SequenceOf):
+ componentType = GeneralName()
+ subtypeSpec = univ.SequenceOf.subtypeSpec + constraint.ValueSizeConstraint(1, MAX)
+
+class AccessDescription(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('accessMethod', univ.ObjectIdentifier()),
+ namedtype.NamedType('accessLocation', GeneralName())
+ )
+
+class AuthorityInfoAccessSyntax(univ.SequenceOf):
+ componentType = AccessDescription()
+ subtypeSpec = univ.SequenceOf.subtypeSpec + constraint.ValueSizeConstraint(1, MAX)
+
+id_ce_deltaCRLIndicator = univ.ObjectIdentifier('2.5.29.27')
+
+class DistributionPointName(univ.Choice):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('fullName', GeneralNames().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0))),
+ namedtype.NamedType('nameRelativeToCRLIssuer', RelativeDistinguishedName().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 1)))
+ )
+
+class DistributionPoint(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.OptionalNamedType('distributionPoint', DistributionPointName().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0))),
+ namedtype.OptionalNamedType('reasons', ReasonFlags().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))),
+ namedtype.OptionalNamedType('cRLIssuer', GeneralNames().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 2)))
+ )
+class BaseDistance(univ.Integer):
+ subtypeSpec = univ.Integer.subtypeSpec + constraint.ValueRangeConstraint(0, MAX)
+
+id_ce_cRLDistributionPoints = univ.ObjectIdentifier('2.5.29.31')
+
+class CRLDistPointsSyntax(univ.SequenceOf):
+ componentType = DistributionPoint
+ subtypeSpec = univ.SequenceOf.subtypeSpec + constraint.ValueSizeConstraint(1, MAX)
+id_ce_issuingDistributionPoint = univ.ObjectIdentifier('2.5.29.28')
+
+class IssuingDistributionPoint(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.OptionalNamedType('distributionPoint', DistributionPointName().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0))),
+ namedtype.NamedType('onlyContainsUserCerts', univ.Boolean(False).subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))),
+ namedtype.NamedType('onlyContainsCACerts', univ.Boolean(False).subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 2))),
+ namedtype.OptionalNamedType('onlySomeReasons', ReasonFlags().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 3))),
+ namedtype.NamedType('indirectCRL', univ.Boolean(False).subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 4)))
+ )
+
+class GeneralSubtree(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('base', GeneralName()),
+ namedtype.NamedType('minimum', BaseDistance(0).subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0))),
+ namedtype.OptionalNamedType('maximum', BaseDistance().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 1)))
+ )
+
+class GeneralSubtrees(univ.SequenceOf):
+ componentType = GeneralSubtree()
+ subtypeSpec = univ.SequenceOf.subtypeSpec + constraint.ValueSizeConstraint(1, MAX)
+
+id_ce_nameConstraints = univ.ObjectIdentifier('2.5.29.30')
+
+class NameConstraints(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.OptionalNamedType('permittedSubtrees', GeneralSubtrees().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0))),
+ namedtype.OptionalNamedType('excludedSubtrees', GeneralSubtrees().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0)))
+ )
+
+
+class DisplayText(univ.Choice):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('visibleString', char.VisibleString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, 200))),
+ namedtype.NamedType('bmpString', char.BMPString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, 200))),
+ namedtype.NamedType('utf8String', char.UTF8String().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, 200)))
+ )
+
+class NoticeReference(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('organization', DisplayText()),
+ namedtype.NamedType('noticeNumbers', univ.SequenceOf(componentType=univ.Integer()))
+ )
+
+class UserNotice(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.OptionalNamedType('noticeRef', NoticeReference()),
+ namedtype.OptionalNamedType('explicitText', DisplayText())
+ )
+
+class CPSuri(char.IA5String): pass
+
+class PolicyQualifierId(univ.ObjectIdentifier):
+ subtypeSpec = univ.ObjectIdentifier.subtypeSpec + constraint.SingleValueConstraint(id_qt_cps, id_qt_unotice)
+
+class CertPolicyId(univ.ObjectIdentifier): pass
+
+class PolicyQualifierInfo(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('policyQualifierId', PolicyQualifierId()),
+ namedtype.NamedType('qualifier', univ.Any())
+ )
+
+id_ce_certificatePolicies = univ.ObjectIdentifier('2.5.29.32')
+
+class PolicyInformation(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('policyIdentifier', CertPolicyId()),
+ namedtype.OptionalNamedType('policyQualifiers', univ.SequenceOf(componentType=PolicyQualifierInfo()).subtype(subtypeSpec=constraint.ValueSizeConstraint(1, MAX)))
+ )
+
+class CertificatePolicies(univ.SequenceOf):
+ componentType = PolicyInformation()
+ subtypeSpec = univ.SequenceOf.subtypeSpec + constraint.ValueSizeConstraint(1, MAX)
+
+id_ce_policyMappings = univ.ObjectIdentifier('2.5.29.33')
+
+class PolicyMapping(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('issuerDomainPolicy', CertPolicyId()),
+ namedtype.NamedType('subjectDomainPolicy', CertPolicyId())
+ )
+
+class PolicyMappings(univ.SequenceOf):
+ componentType = PolicyMapping()
+ subtypeSpec = univ.SequenceOf.subtypeSpec + constraint.ValueSizeConstraint(1, MAX)
+
+id_ce_privateKeyUsagePeriod = univ.ObjectIdentifier('2.5.29.16')
+
+class PrivateKeyUsagePeriod(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.OptionalNamedType('notBefore', useful.GeneralizedTime().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))),
+ namedtype.OptionalNamedType('notAfter', useful.GeneralizedTime().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1)))
+ )
+
+id_ce_keyUsage = univ.ObjectIdentifier('2.5.29.15')
+
+class KeyUsage(univ.BitString):
+ namedValues = namedval.NamedValues(
+ ('digitalSignature', 0),
+ ('nonRepudiation', 1),
+ ('keyEncipherment', 2),
+ ('dataEncipherment', 3),
+ ('keyAgreement', 4),
+ ('keyCertSign', 5),
+ ('cRLSign', 6),
+ ('encipherOnly', 7),
+ ('decipherOnly', 8)
+ )
+
+id_ce = univ.ObjectIdentifier('2.5.29')
+
+id_ce_authorityKeyIdentifier = univ.ObjectIdentifier('2.5.29.35')
+
+class KeyIdentifier(univ.OctetString): pass
+
+id_ce_subjectKeyIdentifier = univ.ObjectIdentifier('2.5.29.14')
+
+class SubjectKeyIdentifier(KeyIdentifier): pass
+
+class AuthorityKeyIdentifier(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.OptionalNamedType('keyIdentifier', KeyIdentifier().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))),
+ namedtype.OptionalNamedType('authorityCertIssuer', GeneralNames().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))),
+ namedtype.OptionalNamedType('authorityCertSerialNumber', CertificateSerialNumber().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 2)))
+ )
+
+id_ce_certificateIssuer = univ.ObjectIdentifier('2.5.29.29')
+
+class CertificateIssuer(GeneralNames): pass
+
+id_ce_subjectAltName = univ.ObjectIdentifier('2.5.29.17')
+
+class SubjectAltName(GeneralNames): pass
+
+id_ce_issuerAltName = univ.ObjectIdentifier('2.5.29.18')
+
+class IssuerAltName(GeneralNames): pass
diff --git a/tools/submitcert.py b/tools/submitcert.py
index 9f0be67..2e8cc33 100755
--- a/tools/submitcert.py
+++ b/tools/submitcert.py
@@ -13,6 +13,11 @@ import struct
import hashlib
import itertools
from certtools import *
+from certtools import *
+try:
+ from precerttools import *
+except ImportError:
+ pass
import os
import signal
import select
@@ -44,10 +49,28 @@ sth = get_sth(baseurl)
def submitcert((certfile, cert)):
timing = timing_point()
certchain = get_certs_from_string(cert)
+ precerts = get_precerts_from_string(cert)
+ assert len(precerts) == 0 or len(precerts) == 1
+ precert = precerts[0] if precerts else None
timing_point(timing, "readcerts")
try:
- result = add_chain(baseurl, {"chain":map(base64.b64encode, certchain)})
+ if precert:
+ if ext_key_usage_precert_signing_cert in get_ext_key_usage(certchain[0]):
+ issuer_key_hash = get_cert_key_hash(certchain[1])
+ issuer = certchain[1]
+ else:
+ issuer_key_hash = get_cert_key_hash(certchain[0])
+ issuer = None
+ cleanedcert = cleanprecert(precert, issuer=issuer)
+ signed_entry = pack_precert(cleanedcert, issuer_key_hash)
+ leafcert = cleanedcert
+ result = add_prechain(baseurl, {"chain":map(base64.b64encode, [precert] + certchain)})
+ else:
+ signed_entry = pack_cert(certchain[0])
+ leafcert = certchain[0]
+ issuer_key_hash = None
+ result = add_chain(baseurl, {"chain":map(base64.b64encode, certchain)})
except SystemExit:
print "EXIT:", certfile
select.select([], [], [], 1.0)
@@ -61,7 +84,7 @@ def submitcert((certfile, cert)):
try:
if args.check_sct:
- check_sct_signature(baseurl, certchain[0], result)
+ check_sct_signature(baseurl, signed_entry, result, precert=precert)
timing_point(timing, "checksig")
except AssertionError, e:
print "ERROR:", certfile, e
@@ -75,7 +98,7 @@ def submitcert((certfile, cert)):
if lookup_in_log:
- merkle_tree_leaf = pack_mtl(result["timestamp"], certchain[0])
+ merkle_tree_leaf = pack_mtl(result["timestamp"], leafcert)
leaf_hash = get_leaf_hash(merkle_tree_leaf)
@@ -113,7 +136,7 @@ def submitcert((certfile, cert)):
print "and submitted chain has length", len(submittedcertchain)
timing_point(timing, "lookup")
- return ((certchain[0], result), timing["deltatimes"])
+ return ((leafcert, issuer_key_hash, result), timing["deltatimes"])
def get_ncerts(certfiles):
n = 0
@@ -136,9 +159,12 @@ def get_all_certificates(certfiles):
else:
yield (certfile, open(certfile).read())
-def save_sct(sct, sth):
+def save_sct(sct, sth, leafcert, issuer_key_hash):
sctlog = open(args.sct_file, "a")
- json.dump({"leafcert": base64.b64encode(leafcert), "sct": sct, "sth": sth}, sctlog)
+ sctentry = {"leafcert": base64.b64encode(leafcert), "sct": sct, "sth": sth}
+ if issuer_key_hash:
+ sctentry["issuer_key_hash"] = base64.b64encode(issuer_key_hash)
+ json.dump(sctentry, sctlog)
sctlog.write("\n")
sctlog.close()
@@ -157,8 +183,8 @@ certs = get_all_certificates(certfiles)
(result, timing) = submitcert(certs.next())
if result != None:
nsubmitted += 1
- (leafcert, sct) = result
- save_sct(sct, sth)
+ (leafcert, issuer_key_hash, sct) = result
+ save_sct(sct, sth, leafcert, issuer_key_hash)
if args.pre_warm:
select.select([], [], [], 3.0)
@@ -175,8 +201,8 @@ try:
sys.exit(1)
if result != None:
nsubmitted += 1
- (leafcert, sct) = result
- save_sct(sct, sth)
+ (leafcert, issuer_key_hash, sct) = result
+ save_sct(sct, sth, leafcert, issuer_key_hash)
deltatime = datetime.datetime.now() - starttime
deltatime_f = deltatime.seconds + deltatime.microseconds / 1000000.0
rate = nsubmitted / deltatime_f
diff --git a/tools/testcase1.py b/tools/testcase1.py
index 73613fb..4502b56 100755
--- a/tools/testcase1.py
+++ b/tools/testcase1.py
@@ -14,10 +14,10 @@ import hashlib
import itertools
from certtools import *
-baseurl = "https://127.0.0.1:8080/"
-certfiles = ["testcerts/cert1.txt", "testcerts/cert2.txt",
- "testcerts/cert3.txt", "testcerts/cert4.txt",
- "testcerts/cert5.txt"]
+baseurls = ["https://127.0.0.1:8080/"]
+certfiles = ["../tools/testcerts/cert1.txt", "../tools/testcerts/cert2.txt",
+ "../tools/testcerts/cert3.txt", "../tools/testcerts/cert4.txt",
+ "../tools/testcerts/cert5.txt"]
cc1 = get_certs_from_file(certfiles[0])
cc2 = get_certs_from_file(certfiles[1])
@@ -51,7 +51,7 @@ def assert_equal(actual, expected, name, quiet=False, nodata=False):
elif not quiet:
print_success("%s was correct", name)
-def print_and_check_tree_size(expected):
+def print_and_check_tree_size(expected, baseurl):
global failures
sth = get_sth(baseurl)
try:
@@ -61,24 +61,26 @@ def print_and_check_tree_size(expected):
except ecdsa.keys.BadSignatureError, e:
print_error("bad STH signature")
tree_size = sth["tree_size"]
- assert_equal(tree_size, expected, "tree size")
+ assert_equal(tree_size, expected, "tree size", quiet=True)
-def do_add_chain(chain):
+def do_add_chain(chain, baseurl):
global failures
try:
result = add_chain(baseurl, {"chain":map(base64.b64encode, chain)})
except ValueError, e:
print_error("%s", e)
try:
- check_sct_signature(baseurl, chain[0], result)
+ signed_entry = pack_cert(chain[0])
+ check_sct_signature(baseurl, signed_entry, result)
except AssertionError, e:
print_error("%s", e)
except ecdsa.keys.BadSignatureError, e:
+ print e
print_error("bad SCT signature")
print_success("signature check succeeded")
return result
-def get_and_validate_proof(timestamp, chain, leaf_index, nentries):
+def get_and_validate_proof(timestamp, chain, leaf_index, nentries, baseurl):
cert = chain[0]
merkle_tree_leaf = pack_mtl(timestamp, cert)
leaf_hash = get_leaf_hash(merkle_tree_leaf)
@@ -86,31 +88,31 @@ def get_and_validate_proof(timestamp, chain, leaf_index, nentries):
proof = get_proof_by_hash(baseurl, leaf_hash, sth["tree_size"])
leaf_index = proof["leaf_index"]
inclusion_proof = [base64.b64decode(e) for e in proof["audit_path"]]
- assert_equal(leaf_index, leaf_index, "leaf_index")
- assert_equal(len(inclusion_proof), nentries, "audit_path length")
+ assert_equal(leaf_index, leaf_index, "leaf_index", quiet=True)
+ assert_equal(len(inclusion_proof), nentries, "audit_path length", quiet=True)
calc_root_hash = verify_inclusion_proof(inclusion_proof, leaf_index, sth["tree_size"], leaf_hash)
root_hash = base64.b64decode(sth["sha256_root_hash"])
- assert_equal(root_hash, calc_root_hash, "verified root hash", nodata=True)
- get_and_check_entry(timestamp, chain, leaf_index)
+ assert_equal(root_hash, calc_root_hash, "verified root hash", nodata=True, quiet=True)
+ get_and_check_entry(timestamp, chain, leaf_index, baseurl)
-def get_and_validate_consistency_proof(sth1, sth2, size1, size2):
+def get_and_validate_consistency_proof(sth1, sth2, size1, size2, baseurl):
consistency_proof = [base64.decodestring(entry) for entry in get_consistency_proof(baseurl, size1, size2)]
(old_treehead, new_treehead) = verify_consistency_proof(consistency_proof, size1, size2, sth1)
#print repr(sth1), repr(old_treehead)
#print repr(sth2), repr(new_treehead)
- assert_equal(old_treehead, sth1, "sth1", nodata=True)
- assert_equal(new_treehead, sth2, "sth2", nodata=True)
+ assert_equal(old_treehead, sth1, "sth1", nodata=True, quiet=True)
+ assert_equal(new_treehead, sth2, "sth2", nodata=True, quiet=True)
-def get_and_check_entry(timestamp, chain, leaf_index):
+def get_and_check_entry(timestamp, chain, leaf_index, baseurl):
entries = get_entries(baseurl, leaf_index, leaf_index)
assert_equal(len(entries), 1, "get_entries", quiet=True)
fetched_entry = entries["entries"][0]
merkle_tree_leaf = pack_mtl(timestamp, chain[0])
leaf_input = base64.decodestring(fetched_entry["leaf_input"])
- assert_equal(leaf_input, merkle_tree_leaf, "entry", nodata=True)
+ assert_equal(leaf_input, merkle_tree_leaf, "entry", nodata=True, quiet=True)
extra_data = base64.decodestring(fetched_entry["extra_data"])
certchain = decode_certificate_chain(extra_data)
@@ -118,7 +120,7 @@ def get_and_check_entry(timestamp, chain, leaf_index):
for (submittedcert, fetchedcert, i) in zip(submittedcertchain,
certchain, itertools.count(1)):
- assert_equal(fetchedcert, submittedcert, "cert %d in chain" % (i,))
+ assert_equal(fetchedcert, submittedcert, "cert %d in chain" % (i,), quiet=True)
if len(certchain) == len(submittedcertchain) + 1:
last_issuer = get_cert_info(submittedcertchain[-1])["issuer"]
@@ -136,106 +138,114 @@ def get_and_check_entry(timestamp, chain, leaf_index):
len(submittedcertchain))
def merge():
- return subprocess.call(["./merge.py", "--baseurl", "https://127.0.0.1:8080/", "--frontend", "https://127.0.0.1:8082/", "--storage", "https://127.0.0.1:8081/", "--mergedb", "../rel/mergedb", "--signing", "https://127.0.0.1:8088/", "--own-keyname", "merge-1", "--own-keyfile", "../rel/privatekeys/merge-1-private.pem"])
+ return subprocess.call(["../tools/merge.py", "--config", "../test/catlfish-test.cfg",
+ "--localconfig", "../test/catlfish-test-local-merge.cfg"])
mergeresult = merge()
assert_equal(mergeresult, 0, "merge", quiet=True)
-print_and_check_tree_size(0)
+for baseurl in baseurls:
+ print_and_check_tree_size(0, baseurl)
testgroup("cert1")
-result1 = do_add_chain(cc1)
+result1 = do_add_chain(cc1, baseurls[0])
mergeresult = merge()
assert_equal(mergeresult, 0, "merge", quiet=True)
size_sth = {}
-print_and_check_tree_size(1)
-size_sth[1] = base64.b64decode(get_sth(baseurl)["sha256_root_hash"])
+for baseurl in baseurls:
+ print_and_check_tree_size(1, baseurl)
+size_sth[1] = base64.b64decode(get_sth(baseurls[0])["sha256_root_hash"])
-result2 = do_add_chain(cc1)
+result2 = do_add_chain(cc1, baseurls[0])
assert_equal(result2["timestamp"], result1["timestamp"], "timestamp")
mergeresult = merge()
assert_equal(mergeresult, 0, "merge", quiet=True)
-print_and_check_tree_size(1)
-size1_v2_sth = base64.b64decode(get_sth(baseurl)["sha256_root_hash"])
+for baseurl in baseurls:
+ print_and_check_tree_size(1, baseurl)
+size1_v2_sth = base64.b64decode(get_sth(baseurls[0])["sha256_root_hash"])
assert_equal(size_sth[1], size1_v2_sth, "sth", nodata=True)
# TODO: add invalid cert and check that it generates an error
# and that treesize still is 1
-get_and_validate_proof(result1["timestamp"], cc1, 0, 0)
+get_and_validate_proof(result1["timestamp"], cc1, 0, 0, baseurls[0])
testgroup("cert2")
-result3 = do_add_chain(cc2)
+result3 = do_add_chain(cc2, baseurls[0])
mergeresult = merge()
assert_equal(mergeresult, 0, "merge", quiet=True)
-print_and_check_tree_size(2)
-size_sth[2] = base64.b64decode(get_sth(baseurl)["sha256_root_hash"])
+for baseurl in baseurls:
+ print_and_check_tree_size(2, baseurl)
+size_sth[2] = base64.b64decode(get_sth(baseurls[0])["sha256_root_hash"])
-get_and_validate_proof(result1["timestamp"], cc1, 0, 1)
-get_and_validate_proof(result3["timestamp"], cc2, 1, 1)
+get_and_validate_proof(result1["timestamp"], cc1, 0, 1, baseurls[0])
+get_and_validate_proof(result3["timestamp"], cc2, 1, 1, baseurls[0])
testgroup("cert3")
-result4 = do_add_chain(cc3)
+result4 = do_add_chain(cc3, baseurls[0])
mergeresult = merge()
assert_equal(mergeresult, 0, "merge", quiet=True)
-print_and_check_tree_size(3)
-size_sth[3] = base64.b64decode(get_sth(baseurl)["sha256_root_hash"])
+for baseurl in baseurls:
+ print_and_check_tree_size(3, baseurl)
+size_sth[3] = base64.b64decode(get_sth(baseurls[0])["sha256_root_hash"])
-get_and_validate_proof(result1["timestamp"], cc1, 0, 2)
-get_and_validate_proof(result3["timestamp"], cc2, 1, 2)
-get_and_validate_proof(result4["timestamp"], cc3, 2, 1)
+get_and_validate_proof(result1["timestamp"], cc1, 0, 2, baseurls[0])
+get_and_validate_proof(result3["timestamp"], cc2, 1, 2, baseurls[0])
+get_and_validate_proof(result4["timestamp"], cc3, 2, 1, baseurls[0])
testgroup("cert4")
-result5 = do_add_chain(cc4)
+result5 = do_add_chain(cc4, baseurls[0])
mergeresult = merge()
assert_equal(mergeresult, 0, "merge", quiet=True)
-print_and_check_tree_size(4)
-size_sth[4] = base64.b64decode(get_sth(baseurl)["sha256_root_hash"])
+for baseurl in baseurls:
+ print_and_check_tree_size(4, baseurl)
+size_sth[4] = base64.b64decode(get_sth(baseurls[0])["sha256_root_hash"])
-get_and_validate_proof(result1["timestamp"], cc1, 0, 2)
-get_and_validate_proof(result3["timestamp"], cc2, 1, 2)
-get_and_validate_proof(result4["timestamp"], cc3, 2, 2)
-get_and_validate_proof(result5["timestamp"], cc4, 3, 2)
+get_and_validate_proof(result1["timestamp"], cc1, 0, 2, baseurls[0])
+get_and_validate_proof(result3["timestamp"], cc2, 1, 2, baseurls[0])
+get_and_validate_proof(result4["timestamp"], cc3, 2, 2, baseurls[0])
+get_and_validate_proof(result5["timestamp"], cc4, 3, 2, baseurls[0])
testgroup("cert5")
-result6 = do_add_chain(cc5)
+result6 = do_add_chain(cc5, baseurls[0])
mergeresult = merge()
assert_equal(mergeresult, 0, "merge", quiet=True)
-print_and_check_tree_size(5)
-size_sth[5] = base64.b64decode(get_sth(baseurl)["sha256_root_hash"])
+for baseurl in baseurls:
+ print_and_check_tree_size(5, baseurl)
+size_sth[5] = base64.b64decode(get_sth(baseurls[0])["sha256_root_hash"])
-get_and_validate_proof(result1["timestamp"], cc1, 0, 3)
-get_and_validate_proof(result3["timestamp"], cc2, 1, 3)
-get_and_validate_proof(result4["timestamp"], cc3, 2, 3)
-get_and_validate_proof(result5["timestamp"], cc4, 3, 3)
-get_and_validate_proof(result6["timestamp"], cc5, 4, 1)
+get_and_validate_proof(result1["timestamp"], cc1, 0, 3, baseurls[0])
+get_and_validate_proof(result3["timestamp"], cc2, 1, 3, baseurls[0])
+get_and_validate_proof(result4["timestamp"], cc3, 2, 3, baseurls[0])
+get_and_validate_proof(result5["timestamp"], cc4, 3, 3, baseurls[0])
+get_and_validate_proof(result6["timestamp"], cc5, 4, 1, baseurls[0])
mergeresult = merge()
assert_equal(mergeresult, 0, "merge", quiet=True)
for first_size in range(1, 5):
for second_size in range(first_size + 1, 6):
- get_and_validate_consistency_proof(size_sth[first_size], size_sth[second_size], first_size, second_size)
+ get_and_validate_consistency_proof(size_sth[first_size], size_sth[second_size], first_size, second_size, baseurls[0])
print "-------"
if failures:
diff --git a/tools/validatestore.py b/tools/validatestore.py
new file mode 100755
index 0000000..74963e0
--- /dev/null
+++ b/tools/validatestore.py
@@ -0,0 +1,96 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2014, NORDUnet A/S.
+# See LICENSE for licensing information.
+
+import argparse
+import urllib2
+import urllib
+import json
+import base64
+import sys
+import struct
+import hashlib
+import itertools
+from certtools import *
+try:
+ from precerttools import *
+except ImportError:
+ pass
+import os
+import signal
+import select
+import zipfile
+import traceback
+
+parser = argparse.ArgumentParser(description='')
+parser.add_argument('--store', default=None, metavar="dir", help='Get certificates from directory dir')
+parser.add_argument('--parallel', type=int, default=1, metavar="n", help="Number of parallel workers")
+args = parser.parse_args()
+
+from multiprocessing import Pool
+
+certfilepath = args.store
+
+if certfilepath[-1] == "/":
+ certfiles = [certfilepath + filename for filename in sorted(os.listdir(certfilepath)) if os.path.isfile(certfilepath + filename)]
+else:
+ certfiles = [certfilepath]
+
+def submitcert((certfile, cert)):
+ try:
+ certchain = get_certs_from_string(cert)
+ if len(certchain) == 0:
+ return True
+ precerts = get_precerts_from_string(cert)
+ hash = get_hash_from_certfile(cert)
+ timestamp = get_timestamp_from_certfile(cert)
+ assert len(precerts) == 0 or len(precerts) == 1
+ precert = precerts[0] if precerts else None
+ if precert:
+ if ext_key_usage_precert_signing_cert in get_ext_key_usage(certchain[0]):
+ issuer_key_hash = get_cert_key_hash(certchain[1])
+ issuer = certchain[1]
+ else:
+ issuer_key_hash = get_cert_key_hash(certchain[0])
+ issuer = None
+ cleanedcert = cleanprecert(precert, issuer=issuer)
+ mtl = pack_mtl_precert(timestamp, cleanedcert, issuer_key_hash)
+ leaf_hash = get_leaf_hash(mtl)
+ else:
+ mtl = pack_mtl(timestamp, certchain[0])
+ leaf_hash = get_leaf_hash(mtl)
+ if leaf_hash == hash:
+ return True
+ else:
+ print certfile, repr(leaf_hash), repr(hash), precert != None
+ return None
+ except Exception, e:
+ print certfile
+ traceback.print_exc()
+ raise e
+
+def get_all_certificates(certfiles):
+ for certfile in certfiles:
+ if certfile.endswith(".zip"):
+ zf = zipfile.ZipFile(certfile)
+ for name in zf.namelist():
+ yield (name, zf.read(name))
+ zf.close()
+ else:
+ yield (certfile, open(certfile).read())
+
+p = Pool(args.parallel, lambda: signal.signal(signal.SIGINT, signal.SIG_IGN))
+
+certs = get_all_certificates(certfiles)
+
+try:
+ for result in p.imap_unordered(submitcert, certs):
+ if result == None:
+ print "error"
+ p.terminate()
+ p.join()
+ sys.exit(1)
+except KeyboardInterrupt:
+ p.terminate()
+ p.join()
diff --git a/tools/verifysct.py b/tools/verifysct.py
index 699a0ad..27ab4c9 100755
--- a/tools/verifysct.py
+++ b/tools/verifysct.py
@@ -34,8 +34,16 @@ def verifysct(sctentry):
timing = timing_point()
leafcert = base64.b64decode(sctentry["leafcert"])
+ if "issuer_key_hash" in sctentry:
+ issuer_key_hash = base64.b64decode(sctentry["issuer_key_hash"])
+ else:
+ issuer_key_hash = None
try:
- check_sct_signature(baseurl, leafcert, sctentry["sct"])
+ if issuer_key_hash:
+ signed_entry = pack_precert(leafcert, issuer_key_hash)
+ else:
+ signed_entry = pack_cert(leafcert)
+ check_sct_signature(baseurl, signed_entry, sctentry["sct"], precert=issuer_key_hash)
timing_point(timing, "checksig")
except AssertionError, e:
print "ERROR:", e
@@ -47,7 +55,10 @@ def verifysct(sctentry):
print "ERROR: bad signature"
return (None, None)
- merkle_tree_leaf = pack_mtl(sctentry["sct"]["timestamp"], leafcert)
+ if issuer_key_hash:
+ merkle_tree_leaf = pack_mtl_precert(sctentry["sct"]["timestamp"], leafcert, issuer_key_hash)
+ else:
+ merkle_tree_leaf = pack_mtl(sctentry["sct"]["timestamp"], leafcert)
leaf_hash = get_leaf_hash(merkle_tree_leaf)
@@ -76,7 +87,7 @@ def verifysct(sctentry):
p = Pool(args.parallel, lambda: signal.signal(signal.SIGINT, signal.SIG_IGN))
sctfile = open(args.sct_file)
-scts = [json.loads(row) for row in sctfile]
+scts = (json.loads(row) for row in sctfile)
nverified = 0
lastprinted = 0