From 8a7bcb91f440502b24d92842e0c89f2fcdf90c04 Mon Sep 17 00:00:00 2001 From: Magnus Ahltorp Date: Sun, 8 Mar 2015 00:03:36 +0100 Subject: merge.py: Store certs in hashed directory structure --- tools/merge.py | 21 +++++++++++++++++++-- 1 file changed, 19 insertions(+), 2 deletions(-) (limited to 'tools') diff --git a/tools/merge.py b/tools/merge.py index c137f4b..1b94581 100755 --- a/tools/merge.py +++ b/tools/merge.py @@ -14,6 +14,7 @@ import time import ecdsa import hashlib import urlparse +import os from certtools import build_merkle_tree, create_sth_signature, check_sth_signature, get_eckey_from_file, timing_point, http_request parser = argparse.ArgumentParser(description="") @@ -36,6 +37,8 @@ logorderfile = args.mergedb + "/logorder" own_key = (args.own_keyname, args.own_keyfile) +hashed_dir = True + def parselogrow(row): return base64.b16decode(row) @@ -44,12 +47,26 @@ def get_logorder(): return [parselogrow(row.rstrip()) for row in f] def write_chain(key, value): - f = open(chainsdir + "/" + base64.b16encode(key), "w") + filename = base64.b16encode(key) + if hashed_dir: + path = chainsdir + "/" + filename[0:2] + "/" + filename[2:4] + "/" + filename[4:6] + try: + os.makedirs(path) + except Exception, e: + print e + else: + path = chainsdir + f = open(path + "/" + filename, "w") f.write(value) f.close() def read_chain(key): - f = open(chainsdir + "/" + base64.b16encode(key), "r") + filename = base64.b16encode(key) + path = chainsdir + "/" + filename[0:2] + "/" + filename[2:4] + "/" + filename[4:6] + try: + f = open(path + "/" + filename, "r") + except IOError, e: + f = open(chainsdir + "/" + filename, "r") value = f.read() f.close() return value -- cgit v1.1 From d1d2185b420d873a97bc78c5e07482accaf574fc Mon Sep 17 00:00:00 2001 From: Linus Nordberg Date: Thu, 19 Mar 2015 17:49:41 +0100 Subject: Add precert handling. --- tools/certtools.py | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) (limited to 'tools') diff --git a/tools/certtools.py b/tools/certtools.py index 222497f..0e639f2 100644 --- a/tools/certtools.py +++ b/tools/certtools.py @@ -6,6 +6,7 @@ import json import base64 import urllib import urllib2 +import ssl import urlparse import struct import sys @@ -79,7 +80,7 @@ def get_root_cert(issuer): return root_cert def get_sth(baseurl): - result = urllib2.urlopen(baseurl + "ct/v1/get-sth").read() + result = urllib2.urlopen(baseurl + "ct/v1/get-sth", context=ssl.SSLContext(ssl.PROTOCOL_TLSv1)).read() return json.loads(result) def get_proof_by_hash(baseurl, hash, tree_size): @@ -87,7 +88,7 @@ def get_proof_by_hash(baseurl, hash, tree_size): params = urllib.urlencode({"hash":base64.b64encode(hash), "tree_size":tree_size}) result = \ - urllib2.urlopen(baseurl + "ct/v1/get-proof-by-hash?" + params).read() + urllib2.urlopen(baseurl + "ct/v1/get-proof-by-hash?" + params, context=ssl.SSLContext(ssl.PROTOCOL_TLSv1)).read() return json.loads(result) except urllib2.HTTPError, e: print "ERROR:", e.read() @@ -98,7 +99,7 @@ def get_consistency_proof(baseurl, tree_size1, tree_size2): params = urllib.urlencode({"first":tree_size1, "second":tree_size2}) result = \ - urllib2.urlopen(baseurl + "ct/v1/get-sth-consistency?" + params).read() + urllib2.urlopen(baseurl + "ct/v1/get-sth-consistency?" + params, context=ssl.SSLContext(ssl.PROTOCOL_TLSv1)).read() return json.loads(result)["consistency"] except urllib2.HTTPError, e: print "ERROR:", e.read() @@ -121,8 +122,7 @@ def unpack_tls_array(packed_data, length_len): def add_chain(baseurl, submission): try: - result = urllib2.urlopen(baseurl + "ct/v1/add-chain", - json.dumps(submission)).read() + result = urllib2.urlopen(baseurl + "ct/v1/add-chain", json.dumps(submission), context=ssl.SSLContext(ssl.PROTOCOL_TLSv1)).read() return json.loads(result) except urllib2.HTTPError, e: print "ERROR", e.code,":", e.read() @@ -140,7 +140,7 @@ def add_chain(baseurl, submission): def get_entries(baseurl, start, end): try: params = urllib.urlencode({"start":start, "end":end}) - result = urllib2.urlopen(baseurl + "ct/v1/get-entries?" + params).read() + result = urllib2.urlopen(baseurl + "ct/v1/get-entries?" + params, context=ssl.SSLContext(ssl.PROTOCOL_TLSv1)).read() return json.loads(result) except urllib2.HTTPError, e: print "ERROR:", e.read() @@ -197,7 +197,7 @@ def http_request(url, data=None, key=None): signature = sk.sign("%s\0%s\0%s" % (method, parsed_url.path, data), hashfunc=hashlib.sha256, sigencode=ecdsa.util.sigencode_der) req.add_header('X-Catlfish-Auth', base64.b64encode(signature) + ";key=" + keyname) - result = urllib2.urlopen(req).read() + result = urllib2.urlopen(req, context=ssl.SSLContext(ssl.PROTOCOL_TLSv1)).read() return result def get_signature(baseurl, data, key=None): -- cgit v1.1 From 1812b745e7a3d39b75d295edfa8b9287d2c6a479 Mon Sep 17 00:00:00 2001 From: Linus Nordberg Date: Sat, 21 Mar 2015 01:27:57 +0100 Subject: Add submission and verification of two precerts to the tests. One of them is signed by an ordinare cert, the other by a precertificate --- tools/testcerts/pre1.txt | 79 ++++++++++++++++++++++++++++++ tools/testcerts/pre2.txt | 106 ++++++++++++++++++++++++++++++++++++++++ tools/testcerts/roots/root4.pem | 19 +++++++ tools/testcerts/roots/root5.pem | 29 +++++++++++ 4 files changed, 233 insertions(+) create mode 100644 tools/testcerts/pre1.txt create mode 100644 tools/testcerts/pre2.txt create mode 100644 tools/testcerts/roots/root4.pem create mode 100644 tools/testcerts/roots/root5.pem (limited to 'tools') diff --git a/tools/testcerts/pre1.txt b/tools/testcerts/pre1.txt new file mode 100644 index 0000000..776c38e --- /dev/null +++ b/tools/testcerts/pre1.txt @@ -0,0 +1,79 @@ +Timestamp: 1383337821156 +Leafhash: A4892155FE9929177BCA785A73C15351A3EE2AF6F163DE40C15802BDE0F41302 +-----BEGIN PRECERTIFICATE----- +MIIGqDCCBZCgAwIBAgIQCxvJV1NZEuon0JIojHqH+DANBgkqhkiG9w0BAQsFADBNMQswCQYDVQQG +EwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMScwJQYDVQQDEx5EaWdpQ2VydCBTSEEyIFNlY3Vy +ZSBTZXJ2ZXIgQ0EwHhcNMTMxMTAxMDAwMDAwWhcNMTQxMTA2MTIwMDAwWjBkMQswCQYDVQQGEwJV +UzENMAsGA1UECBMEVXRhaDENMAsGA1UEBxMETGVoaTEXMBUGA1UEChMORGlnaUNlcnQsIEluYy4x +HjAcBgNVBAMTFWVtYmVkLmN0LmRpZ2ljZXJ0LmNvbTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCC +AQoCggEBANExEGl1kTCQJNWXQuTH3m4DWx7xh9Tq+EXHlhorVtgUmHLmBPn7FGC3MH51q0MXN6K7 +huQVXa9LRmCdPRNlNPSkWUqpCVTEqBZrTPuAGEs01+XgXsyhP3uwBxWZkkKJ0FJ4tu7RVHXXgmSC ++JQkSgI4MUNuMaIHvWEpEKsmov9kcQZGUTPnwEg90PyVLlbKypRoFM0dynpslh6FUH4OEAuCx4h1 +tsAN2KHk/ajYE0ND+FN0gBf5qXuY+njUEsDaGiAVKgAb16wOk//0xWy4cTWeHnyLObrsZ3F11GVl +8cK1x0dNGxgeVfH6yTB8BJu/2wqaQSAdzf14Cie5D8YUXf0CAwEAAaOCA2swggNnMB8GA1UdIwQY +MBaAFA+AYRyCMWHVLyjnjUY4tCzhxtniMB0GA1UdDgQWBBT8yxF+UXTw/RIW5igB3ZSRrSSkFzAg +BgNVHREEGTAXghVlbWJlZC5jdC5kaWdpY2VydC5jb20wDgYDVR0PAQH/BAQDAgWgMB0GA1UdJQQW +MBQGCCsGAQUFBwMBBggrBgEFBQcDAjBrBgNVHR8EZDBiMC+gLaArhilodHRwOi8vY3JsMy5kaWdp +Y2VydC5jb20vc3NjYS1zaGEyLWcxLmNybDAvoC2gK4YpaHR0cDovL2NybDQuZGlnaWNlcnQuY29t +L3NzY2Etc2hhMi1nMS5jcmwwggHEBgNVHSAEggG7MIIBtzCCAbMGCWCGSAGG/WwBATCCAaQwOgYI +KwYBBQUHAgEWLmh0dHA6Ly93d3cuZGlnaWNlcnQuY29tL3NzbC1jcHMtcmVwb3NpdG9yeS5odG0w +ggFkBggrBgEFBQcCAjCCAVYeggFSAEEAbgB5ACAAdQBzAGUAIABvAGYAIAB0AGgAaQBzACAAQwBl +AHIAdABpAGYAaQBjAGEAdABlACAAYwBvAG4AcwB0AGkAdAB1AHQAZQBzACAAYQBjAGMAZQBwAHQA +YQBuAGMAZQAgAG8AZgAgAHQAaABlACAARABpAGcAaQBDAGUAcgB0ACAAQwBQAC8AQwBQAFMAIABh +AG4AZAAgAHQAaABlACAAUgBlAGwAeQBpAG4AZwAgAFAAYQByAHQAeQAgAEEAZwByAGUAZQBtAGUA +bgB0ACAAdwBoAGkAYwBoACAAbABpAG0AaQB0ACAAbABpAGEAYgBpAGwAaQB0AHkAIABhAG4AZAAg +AGEAcgBlACAAaQBuAGMAbwByAHAAbwByAGEAdABlAGQAIABoAGUAcgBlAGkAbgAgAGIAeQAgAHIA +ZQBmAGUAcgBlAG4AYwBlAC4wfAYIKwYBBQUHAQEEcDBuMCQGCCsGAQUFBzABhhhodHRwOi8vb2Nz +cC5kaWdpY2VydC5jb20wRgYIKwYBBQUHMAKGOmh0dHA6Ly9jYWNlcnRzLmRpZ2ljZXJ0LmNvbS9E +aWdpQ2VydFNIQTJTZWN1cmVTZXJ2ZXJDQS5jcnQwDAYDVR0TAQH/BAIwADATBgorBgEEAdZ5AgQD +AQH/BAIFADANBgkqhkiG9w0BAQsFAAOCAQEAbHgFxzrmkXjRdQdlHj4Ey2U8rTOetMqjddrXR1DZ +9E12vp8yWB+LkSVASutpgzxNawj/rv1w1ODdJWMTra12R1MnxqoVytSEmbE0gjgxahdWWiV8yTFB +4tMFRHvCCwmIJqhRwjufnRs1q1+9YMxZ6reCG4kg29qgtQhh8V9vCrGfQja/4cBHa6O7w407FPra +b2NIqtJB/47fOdACkVdFjbOVSWielDtTv7QNPi3OUfNwNE/Qqh1k5MOBDP1gif1AFzl5Z7plUos5 +3533VCBjrcOWp8WXUtNlIedlxjarUaTKSRpZVdRzY9ugvou9JLVF1SuDIAXQ3+tN44bjAjERug== +-----END PRECERTIFICATE----- + +-----BEGIN CERTIFICATE----- +MIIElDCCA3ygAwIBAgIQAf2j627KdciIQ4tyS8+8kTANBgkqhkiG9w0BAQsFADBhMQswCQYDVQQG +EwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNlcnQuY29tMSAw +HgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBDQTAeFw0xMzAzMDgxMjAwMDBaFw0yMzAzMDgx +MjAwMDBaME0xCzAJBgNVBAYTAlVTMRUwEwYDVQQKEwxEaWdpQ2VydCBJbmMxJzAlBgNVBAMTHkRp +Z2lDZXJ0IFNIQTIgU2VjdXJlIFNlcnZlciBDQTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoC +ggEBANyuWJBNwcQwFZA1W248ghX1LFy949v/cUP6ZCWA1O4Yok3wZtAKc24RmDYXZK83nf36QYSv +x6+M/hpzTc8zl5CilodTgyu5pnVILR1WN3vaMTIa16yrBvSqXUu3R0bdKpPDkC55gIDvEwRqFDu1 +m5K+wgdlTvza/P96rtxcflUxDOg5B6TXvi/TC2rSsd9f/ld0Uzs1gN2ujkSYs58O09rg1/RrKatE +p0tYhG2SS4HD2nOLEpdIkARFdRrdNzGXkujNVA075ME/OV4uuPNcfhCOhkEAjUVmR7ChZc6gqikJ +TvOX6+guqw9ypzAO+sf0/RR3w6RbKFfCs/mC/bdFWJsCAwEAAaOCAVowggFWMBIGA1UdEwEB/wQI +MAYBAf8CAQAwDgYDVR0PAQH/BAQDAgGGMDQGCCsGAQUFBwEBBCgwJjAkBggrBgEFBQcwAYYYaHR0 +cDovL29jc3AuZGlnaWNlcnQuY29tMHsGA1UdHwR0MHIwN6A1oDOGMWh0dHA6Ly9jcmwzLmRpZ2lj +ZXJ0LmNvbS9EaWdpQ2VydEdsb2JhbFJvb3RDQS5jcmwwN6A1oDOGMWh0dHA6Ly9jcmw0LmRpZ2lj +ZXJ0LmNvbS9EaWdpQ2VydEdsb2JhbFJvb3RDQS5jcmwwPQYDVR0gBDYwNDAyBgRVHSAAMCowKAYI +KwYBBQUHAgEWHGh0dHBzOi8vd3d3LmRpZ2ljZXJ0LmNvbS9DUFMwHQYDVR0OBBYEFA+AYRyCMWHV +LyjnjUY4tCzhxtniMB8GA1UdIwQYMBaAFAPeUDVW0Uy7ZvCj4hsbw5eyPdFVMA0GCSqGSIb3DQEB +CwUAA4IBAQAjPt9L0jFCpbZ+QlwaRMxp0Wi0XUvgBCFsS+JtzLHgl4+mUwnNqipl5TlPHoOlblyY +oiQm5vuh7ZPHLgLGTUq/sELfeNqzqPlt/yGFUzZgTHbO7Djc1lGA8MXW5dRNJ2Srm8c+cftIl7gz +bckTB+6WohsYFfZcTEDts8Ls/3HB40f/1LkAtDdC2iDJ6m6K7hQGrn2iWZiIqBtvLfTyyRRfJs8s +jX7tN8Cp1Tm5gr8ZDOo0rwAhaPitc+LJMto4JQtV05od8GiG7S5BNO98pVAdvzr508EIDObtHopY +JeS4d60tbvVS3bR0j6tJLp07kzQoH3jOlOrHvdPJbRzeXDLz +-----END CERTIFICATE----- + +-----BEGIN CERTIFICATE----- +MIIDrzCCApegAwIBAgIQCDvgVpBCRrGhdWrJWZHHSjANBgkqhkiG9w0BAQUFADBhMQswCQYDVQQG +EwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNlcnQuY29tMSAw +HgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBDQTAeFw0wNjExMTAwMDAwMDBaFw0zMTExMTAw +MDAwMDBaMGExCzAJBgNVBAYTAlVTMRUwEwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3 +dy5kaWdpY2VydC5jb20xIDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IENBMIIBIjANBgkq +hkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA4jvhEXLeqKTTo1eqUKKPC3eQyaKl7hLOllsBCSDMAZOn +TjC3U/dDxGkAV53ijSLdhwZAAIEJzs4bg7/fzTtxRuLWZscFs3YnFo97nh6Vfe63SKMI2tavegw5 +BmV/Sl0fvBf4q77uKNd0f3p4mVmFaG5cIzJLv07A6Fpt43C/dxC//AH2hdmoRBBYMql1GNXRor5H +4idq9Joz+EkIYIvUX7Q6hL+hqkpMfT7PT19sdl6gSzeRntwi5m3OFBqOasv+zbMUZBfHWymeMr/y +7vrTC0LUq7dBMtoM1O/4gdW7jVg/tRvoSSiicNoxBN33shbyTApOB6jtSj1etX+jkMOvJwIDAQAB +o2MwYTAOBgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUA95QNVbRTLtm +8KPiGxvDl7I90VUwHwYDVR0jBBgwFoAUA95QNVbRTLtm8KPiGxvDl7I90VUwDQYJKoZIhvcNAQEF +BQADggEBAMucN6pIExIK+t1EnE9SsPTfrgT1eXkIoyQY/EsrhMAtudXH/vTBH1jLuG2cenTnmCmr +EbXjcKChzUyImZOMkXDiqw8cvpOp/2PV5Adg06O/nVsJ8dWO41P0jmP6P6fbtGbfYmbW0W5BjfIt +tep3Sp+dWOIrWcBAI+0tKIJFPnlUkiaY4IBIqDfv8NZ5YBberOgOzW6sRBc4L0na4UU+Krk2U886 +UAb3LujEV0lsYSEY1QSteDwsOoBrp+uvFRTp2InBuThs4pFsiv9kuXclVzDAGySj4dzp30d8tbQk +CAUw7C29C79Fv1C5qfPrmAESrciIxpg0X40KPMbp1ZWVbd4= +-----END CERTIFICATE----- + diff --git a/tools/testcerts/pre2.txt b/tools/testcerts/pre2.txt new file mode 100644 index 0000000..4c86537 --- /dev/null +++ b/tools/testcerts/pre2.txt @@ -0,0 +1,106 @@ +Timestamp: 1399629239033 +Leafhash: 758B8612DFED6A3321215C0586C0AC9F43137CD2BBF043C86301D66DC7D1205A +-----BEGIN PRECERTIFICATE----- +MIIFFzCCBAGgAwIBAgIgd+115NyVfYOnRINB2wJy2eaQRbJ6j8Zau5IdwBNpmzowCwYJKoZIhvcN +AQELMGYxLDAqBgNVBAMMI1ByZS1jZXJ0aWZpY2F0ZSBTaWduaW5nIENlcnRpZmljYXRlMRAwDgYD +VQQLDAdDQSBUZWFtMRcwFQYDVQQKDA5UQUlXQU4tQ0EgSU5DLjELMAkGA1UEBhMCVFcwHhcNMTQw +NTA5MDk1MzU3WhcNMTQwNTE2MTU1OTU5WjB0MR0wGwYDVQQDDBRjdHRlc3QwNS50d2NhLmNvbS50 +dzELMAkGA1UECwwCUkQxFzAVBgNVBAoMDlRBSVdBTi1DQSBJTkMuMQ8wDQYDVQQHDAZUYWlwZWkx +DzANBgNVBAgMBlRhaXdhbjELMAkGA1UEBhMCVFcwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEK +AoIBAQDSgb3MYpsqjkNqcOJHIlEmy8ldCzXtmJfoLfvW1g8JyaGgKR6B98ceg1whThF1tPy8aqJv +fEXGivb+2El1BmxTNvSZ+bOkOT0UsD2hiIgUppD6b/ICWQrIvwrBTNnfJtrwvGD/rygpVTZQoekX +IVdapI95Cfn+36YXqjX7ixgItEx3t/nzOqBxJNI0p52m9l1sowi2/hGmvc/xqC0Cti4m177c8gP0 +u4oKQRJVF2690F748KfzIMcbS7KbDDDVhtWqwgKaRLvqD+gJAUZ1QYEyzDr5Xhhi1O0FXfhyeeCj +mRUJBENmhqElt9C1HugaBsno37JP1AQdsuVg776qQQ1PAgMBAAGjggGlMIIBoTArBgNVHSMEJDAi +gCCVnLtVYCn+QZohG69CSwl1Y2OhEQ7LbPhnh353anz2ezApBgNVHQ4EIgQgt6NL2avrK2PUt8X1 +oG0rd0Wd2ZVDVuJru2T6Z4/eJUEwPwYDVR0fBDgwNjA0oDKgMIYuaHR0cDovL2N0dGVzdC50d2Nh +LmNvbS50dy9zc2xzZXJ2ZXIvY3R0ZXN0LmNybDAMBgNVHRMBAf8EAjAAMA4GA1UdDwEB/wQEAwID +qDAdBgNVHSUEFjAUBggrBgEFBQcDAQYIKwYBBQUHAwIwJQYDVR0RBB4wHIIUY3R0ZXN0MDUudHdj +YS5jb20udHeHBMCoAckwOgYIKwYBBQUHAQEELjAsMCoGCCsGAQUFBzABhh5odHRwOi8vY3R0ZXN0 +LnR3Y2EuY29tLnR3L29jc3AwUQYDVR0gBEowSDBGBgdggR4DAQgFMDswIgYIKwYBBQUHAgEWFmh0 +dHA6Ly93d3cudHdjYS5jb20udHcwFQYIKwYBBQUHAgIwCRoHMC4xLjEuMzATBgorBgEEAdZ5AgQD +AQH/BAIFADALBgkqhkiG9w0BAQsDggEBAIkN6er89ss6KAZOH/ZpTPbXhO/J8NNq7vJBxhD4z56R +aRTJpr7Fla9zr8K3aD7bbBUpVeMqER3YA7eeOR8ITBqzMN9SpjdpDlBLcI/6S+7iUVRw4+UvEVqL +0xlCfdxftGLX+T77y7/qqLxyH+QVuSS4sKzTCfspqAaxteK32A5nWKAiJFUI/ise67o3T9f015hR +7rHj+U2AomLQwnyiMg4u3D2mYzK9q7VDGJfKIW6wrFYS/lQsFKyb4sUTyGG9VuzgSDIjCXJag7fs +MZ+/shgsVOTzHUVeHGuKsPcpps0Yvu2W3DybsVoBwtS/vePPnfNfCrDqM9vZCTurvG4KaS4= +-----END PRECERTIFICATE----- + +-----BEGIN CERTIFICATE----- +MIIEUTCCAzugAwIBAgIEATNR3TALBgkqhkiG9w0BAQswVDELMAkGA1UEBhMCVFcxFzAVBgNVBAoT +DlRBSVdBTi1DQSBJTkMuMRAwDgYDVQQLEwdDQSBUZWFtMRowGAYDVQQDExFSRCBUV0NBIENUVEVT +VCBDQTAeFw0xNDA1MDkwOTQzMjZaFw0xNTA1MDkxNTU5NTlaMGYxLDAqBgNVBAMMI1ByZS1jZXJ0 +aWZpY2F0ZSBTaWduaW5nIENlcnRpZmljYXRlMRAwDgYDVQQLDAdDQSBUZWFtMRcwFQYDVQQKDA5U +QUlXQU4tQ0EgSU5DLjELMAkGA1UEBhMCVFcwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIB +AQCtFIow0xs7VQ42AEck0o+D8pDDOvIclTPJG7j5+wc7lz1wOwbqP8w06Qa/18tg3sdk16dYFg9k +pIeOU7suaWgeHifBjjj9iXTELH4U0RP3HwxlM23WArt9a5OKM5KJlA2T9obppnfsN9fm6ZGX4TTY +JqV8x2vgXSkHhVwxl8wnZoywHlHlgThvVVi+/DzZUD8FIXz2/dPeMtSTfHQ6LqIhee9YMIVgqg/f +tPb5lOhrJEmAl56mJWi1haVYmxZDSa4+1XCJkOxEzQDPpAvIrXVgAQzr6A5jIHZ7VucTEQ5U/9lx +Gckzv6CFDRxYyjSpBZsxML/d4A1P9nKdWcABqO9PAgMBAAGjggEbMIIBFzArBgNVHSMEJDAigCCE +xPSrbrwoBcYxPScQhJ7WOGJB5N3Efkav81dvue7NsjApBgNVHQ4EIgQglZy7VWAp/kGaIRuvQksJ +dWNjoREOy2z4Z4d+d2p89nswPwYDVR0fBDgwNjA0oDKgMIYuaHR0cDovL2N0dGVzdC50d2NhLmNv +bS50dy9zc2xzZXJ2ZXIvY3R0ZXN0LmNybDASBgNVHRMBAf8ECDAGAQH/AgEAMBUGA1UdJQQOMAwG +CisGAQQB1nkCBAQwUQYDVR0gBEowSDBGBgdggR4DAQgFMDswIgYIKwYBBQUHAgEWFmh0dHA6Ly93 +d3cudHdjYS5jb20udHcwFQYIKwYBBQUHAgIwCRoHMC4xLjEuMzALBgkqhkiG9w0BAQsDggEBAN8v +hr/zNUNSSikqAtRVZVgcJTuN3yTlaXX4hMJFAWrbBqJuN++cE6A2BBTkaLpEZajVqPKL5AxL5KWM +dMFNkpF3i0UKbf4vnpfrQprsamDX5tKqPCAOKa8yL82CBkimOCmLx24WN+VtNitYzh/MqspApNM7 +7wCO8ncvFtS4sC1Gj5M9CjVhxKmMe15O4SZr9aZpGP7raT4CE3X95APKX5yyiAVwPcOPdPkfRRLQ +gHko60NbxaeayH5sfWa2dNPEjbOkz0SKaXurV9pzrj/2FZNhgsnRsGIJhx2BLm7FoeUC45RarDJD +YrscJ6DBR83YwJXsaFCyB7l5CP7L13Wr98E= +-----END CERTIFICATE----- + +-----BEGIN CERTIFICATE----- +MIIEvjCCAqagAwIBAgIQQAEzUd0AAAAAAAAAFzPdqzANBgkqhkiG9w0BAQUFADBiMQswCQYDVQQG +EwJUVzEbMBkGA1UEChMSVFdDQSBSRCBEZXBhcnRtZW50MRAwDgYDVQQLEwdDQSBUZWFtMSQwIgYD +VQQDExtSRCBUV0NBIFJvb3QgQ0EgNDA5NiBTaGEyNTYwHhcNMTQwNTA5MDMyMDUyWhcNMTUwNTA5 +MTU1OTU5WjBUMQswCQYDVQQGEwJUVzEXMBUGA1UEChMOVEFJV0FOLUNBIElOQy4xEDAOBgNVBAsT +B0NBIFRlYW0xGjAYBgNVBAMTEVJEIFRXQ0EgQ1RURVNUIENBMIIBIjANBgkqhkiG9w0BAQEFAAOC +AQ8AMIIBCgKCAQEA6xAMprFXHtOkmXIgL63aTx4S20IzdrcdDTMQvwnyYLBhJf8lWz5qeOY37SaC +4VXILP54qVdML+YBa2BAQfgu6kS+/f73Bp3pSHx8LIHQCt5jdgkNS3OVYK8nQdbWXWeA64bCFdE/ +tlelHSTHtIKXE+v7ug+P5Q/RRBf0Dzi/M1fXTXqXeAga3LaPGPT7o6lZZJh7hp25aJxChIa/1X8x +99sPx/BqO/WHyYKBCU9Ym05yQFel8mpCgzSbqscKTbKPkvm0ejDANX/WCEziJ3IzR5G9kPoL/zYZ +ofIqYJMIYRsQRlD/n1ILnMxwdhN3EFlZ0e5xkyIm9TaCqeCZsdFJWQIDAQABo34wfDArBgNVHSME +JDAigCCwvM16BvA51cl2uO30/ohdOMPVrVBVG5BZ4teNnteYnTApBgNVHQ4EIgQghMT0q268KAXG +MT0nEISe1jhiQeTdxH5Gr/NXb7nuzbIwDgYDVR0PAQH/BAQDAgEGMBIGA1UdEwEB/wQIMAYBAf8C +AQAwDQYJKoZIhvcNAQEFBQADggIBABDkaI3GMCKBfJSfnpmxmiU1ht3cfq/9/hpJSzE6k+of5esV +D3bYW9nnKScCcBy7poeOoc3C7p9fQtsLZbNfhYpG4/Aq0aVYGtZxw/FCWnXi9rUXpSLZh1yW1uV9 +KBj2D8yzGIx99mpHifjjeoCWG0TW/aaHeIolJm2DhkPTLWjd/urN1TP8YqhEiKMHZI3SFWeeH/BV +WJKE5rX8vtLW1FPnlRPE+Z/FAa52qOyN4ie0A9IhNPs3gtc+bnhdcZaHnxkySqUvWxqQxkzAGaiO +VnPlnSlnMCn5I2KOT0XVWYOyU9PP1//V/baDftv7VpT5AOtIaz8mQ6Lp4AIcoPFeU8cgJNZhXgmp +NOv/dW8lWXH6RYxdM7NFmv98Wk3rKLCzOzR6kuXnARKOJghZf4FV+6Fvjgjf/8wLnzhSdCEbyL7A +znkOSKc9wzUcZCxF8aTWtRT8HYIu8diZo0CzPxN8OyDl5mPsYexhguPHOXyLv/EljZ8yCdy/SsgQ +JPzuqKu2a3RD4des15EzbnJOxn4DSeqoUfSfaU/KVfmUKpBEJ3ouD2SLAZ7L+4F6NPCte3HEE2kN +tOmQIwe65htXmLJxDB+dwMjE4dkA2sETaN2dQ9DqpCNkpNxuNdis/uacAAXnDNddPIlR2moCtUx8 ++Y7wlcqBHdmmg5rbFBuBN+esL8J8 +-----END CERTIFICATE----- + +-----BEGIN CERTIFICATE----- +MIIFyTCCA7GgAwIBAgIQQAEzK0EAAAAAAAAAFSWxNjANBgkqhkiG9w0BAQsFADBiMQswCQYDVQQG +EwJUVzEbMBkGA1UEChMSVFdDQSBSRCBEZXBhcnRtZW50MRAwDgYDVQQLEwdDQSBUZWFtMSQwIgYD +VQQDExtSRCBUV0NBIFJvb3QgQ0EgNDA5NiBTaGEyNTYwHhcNMTMwNjI1MDMwNzIyWhcNMzMwNjI1 +MDMwNzI2WjBiMQswCQYDVQQGEwJUVzEbMBkGA1UEChMSVFdDQSBSRCBEZXBhcnRtZW50MRAwDgYD +VQQLEwdDQSBUZWFtMSQwIgYDVQQDExtSRCBUV0NBIFJvb3QgQ0EgNDA5NiBTaGEyNTYwggIiMA0G +CSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC2Saqg7eNGISFsG+pQfB/e+VTbpg+KmAAydUrJyCPj +74Gl/MKNeMW6AqUUSiKQq+HTnrHI+I2e85cgAxbSbhXp6utJuOjfsZE5lr7KDkfok9hdMA7YvKuk +y5dLK9Qcvhj4olt3DU0GKdWgKKtMWg4WOx+Wgu50C/TGyeiMx754O09a0YXlDLji84aQbxUWCP+X +hq+LXyGqilcTe+wSVjUHWfJJz8ZeVNCz/WXBn2Sljf614T1AkeU9pTnEkJRd/S+eVNVE8gLiAJSF +/ffHTHGRZoPCTDS26hzSpBAC+va0T4IWvgGJtPNInReXGPeydxHJbsJjwyPQ9n5iclUZmAeKcG7a +Wow/xrU36euBDIp877djj5lbtb0Rq35slDAGLVy/ouLkcrurPZdJGkhcpACMi4sKK98cx/XnzP9o +wV+bDYyYlXSl3tv88CidywHI6VPN6Aio4ipsAOmol1AxbkJ+W9INiQzbdmYXD2v3c0Kvcq4/bZMw +wofoGWGBALF3VYd6aYUnaCHD9gYTPrMHVsMrYDbvlIDkORVL950xvi1SfbRRo36LtYLjupFiJOlP +xS0DxWN6tVarS+1SyHsdEJYKw+b2ty5Sko5JkCedgSXHPhkL2ap3OfHtegSDpIgWL7ydpaoTyD3y +Fev6doCPC6cnHitwBCDpoEqNIm+JK2JZYQIDAQABo3sweTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0T +AQH/BAUwAwEB/zArBgNVHSMEJDAigCCwvM16BvA51cl2uO30/ohdOMPVrVBVG5BZ4teNnteYnTAp +BgNVHQ4EIgQgsLzNegbwOdXJdrjt9P6IXTjD1a1QVRuQWeLXjZ7XmJ0wDQYJKoZIhvcNAQELBQAD +ggIBAGSVKdVIynZnTpFaK3F2jjaC2oaJ1L8CA6e5OjWi6WUshKG4kJzLASD/U8XQXA8rVSuWShmz +B7ccm4fy1SmnSvsi4JA5mSDqcIOmwZmIYBAd/+8QK/sijzyO2MNPpqSupuhWxAakHSG8/3Leij2Q +P2GEXejDq3ewtS/gT1ZVI/ZSlIYxChsKZ3UEkl4XhUhL8fir/5Z+g6WdTFvXUB3wc/JA/MZ+h5Nu +BsrnrTlxet0vu3UlyOELsF5pMe1WGayR2A56LRL3UKhjFrUJSCTYMBiqAMS3Fsvk+RXttPYtcpiB +uheX8M/X8g2WTLOklS9/QYy1VmIWZcrfExHrMxQ8FCrxMfQn8fNlkMADmcRbQYeVHHZGx7MQEjBw +py45jzcPudJTx8Ccz6r0YSxoumC9reS0hASQ/NdXh6vcWfT8qsqYohL/k9J0PbfgJuIExAStIs+Y +nn4N7HgNftijy+l0sS//rMhVcofUaJzhJcbUe4TX/SL8ZHFkSkhUSPdDd1DR+r1IWKDKd/2FxMn3 ++oKBVsjPdL0HBwwHFQja8TBb5E3vYo4XKKEOGIuFa7NcSq0pF7pK85K0XIypAwgJCXffWP9SynDo +eK+ZbSOZNOCvH67ZRUQnWo1nZds+6OplhSpWkYDYN834wXEU4zbHRvtymCbIeMZzAXzdsJM2i3zy +7bTu +-----END CERTIFICATE----- + diff --git a/tools/testcerts/roots/root4.pem b/tools/testcerts/roots/root4.pem new file mode 100644 index 0000000..3fdb770 --- /dev/null +++ b/tools/testcerts/roots/root4.pem @@ -0,0 +1,19 @@ +-----BEGIN CERTIFICATE----- +MIIDrzCCApegAwIBAgIQCDvgVpBCRrGhdWrJWZHHSjANBgkqhkiG9w0BAQUFADBhMQswCQYDVQQG +EwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNlcnQuY29tMSAw +HgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBDQTAeFw0wNjExMTAwMDAwMDBaFw0zMTExMTAw +MDAwMDBaMGExCzAJBgNVBAYTAlVTMRUwEwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3 +dy5kaWdpY2VydC5jb20xIDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IENBMIIBIjANBgkq +hkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA4jvhEXLeqKTTo1eqUKKPC3eQyaKl7hLOllsBCSDMAZOn +TjC3U/dDxGkAV53ijSLdhwZAAIEJzs4bg7/fzTtxRuLWZscFs3YnFo97nh6Vfe63SKMI2tavegw5 +BmV/Sl0fvBf4q77uKNd0f3p4mVmFaG5cIzJLv07A6Fpt43C/dxC//AH2hdmoRBBYMql1GNXRor5H +4idq9Joz+EkIYIvUX7Q6hL+hqkpMfT7PT19sdl6gSzeRntwi5m3OFBqOasv+zbMUZBfHWymeMr/y +7vrTC0LUq7dBMtoM1O/4gdW7jVg/tRvoSSiicNoxBN33shbyTApOB6jtSj1etX+jkMOvJwIDAQAB +o2MwYTAOBgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUA95QNVbRTLtm +8KPiGxvDl7I90VUwHwYDVR0jBBgwFoAUA95QNVbRTLtm8KPiGxvDl7I90VUwDQYJKoZIhvcNAQEF +BQADggEBAMucN6pIExIK+t1EnE9SsPTfrgT1eXkIoyQY/EsrhMAtudXH/vTBH1jLuG2cenTnmCmr +EbXjcKChzUyImZOMkXDiqw8cvpOp/2PV5Adg06O/nVsJ8dWO41P0jmP6P6fbtGbfYmbW0W5BjfIt +tep3Sp+dWOIrWcBAI+0tKIJFPnlUkiaY4IBIqDfv8NZ5YBberOgOzW6sRBc4L0na4UU+Krk2U886 +UAb3LujEV0lsYSEY1QSteDwsOoBrp+uvFRTp2InBuThs4pFsiv9kuXclVzDAGySj4dzp30d8tbQk +CAUw7C29C79Fv1C5qfPrmAESrciIxpg0X40KPMbp1ZWVbd4= +-----END CERTIFICATE----- diff --git a/tools/testcerts/roots/root5.pem b/tools/testcerts/roots/root5.pem new file mode 100644 index 0000000..096fd18 --- /dev/null +++ b/tools/testcerts/roots/root5.pem @@ -0,0 +1,29 @@ +-----BEGIN CERTIFICATE----- +MIIFyTCCA7GgAwIBAgIQQAEzK0EAAAAAAAAAFSWxNjANBgkqhkiG9w0BAQsFADBiMQswCQYDVQQG +EwJUVzEbMBkGA1UEChMSVFdDQSBSRCBEZXBhcnRtZW50MRAwDgYDVQQLEwdDQSBUZWFtMSQwIgYD +VQQDExtSRCBUV0NBIFJvb3QgQ0EgNDA5NiBTaGEyNTYwHhcNMTMwNjI1MDMwNzIyWhcNMzMwNjI1 +MDMwNzI2WjBiMQswCQYDVQQGEwJUVzEbMBkGA1UEChMSVFdDQSBSRCBEZXBhcnRtZW50MRAwDgYD +VQQLEwdDQSBUZWFtMSQwIgYDVQQDExtSRCBUV0NBIFJvb3QgQ0EgNDA5NiBTaGEyNTYwggIiMA0G +CSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC2Saqg7eNGISFsG+pQfB/e+VTbpg+KmAAydUrJyCPj +74Gl/MKNeMW6AqUUSiKQq+HTnrHI+I2e85cgAxbSbhXp6utJuOjfsZE5lr7KDkfok9hdMA7YvKuk +y5dLK9Qcvhj4olt3DU0GKdWgKKtMWg4WOx+Wgu50C/TGyeiMx754O09a0YXlDLji84aQbxUWCP+X +hq+LXyGqilcTe+wSVjUHWfJJz8ZeVNCz/WXBn2Sljf614T1AkeU9pTnEkJRd/S+eVNVE8gLiAJSF +/ffHTHGRZoPCTDS26hzSpBAC+va0T4IWvgGJtPNInReXGPeydxHJbsJjwyPQ9n5iclUZmAeKcG7a +Wow/xrU36euBDIp877djj5lbtb0Rq35slDAGLVy/ouLkcrurPZdJGkhcpACMi4sKK98cx/XnzP9o +wV+bDYyYlXSl3tv88CidywHI6VPN6Aio4ipsAOmol1AxbkJ+W9INiQzbdmYXD2v3c0Kvcq4/bZMw +wofoGWGBALF3VYd6aYUnaCHD9gYTPrMHVsMrYDbvlIDkORVL950xvi1SfbRRo36LtYLjupFiJOlP +xS0DxWN6tVarS+1SyHsdEJYKw+b2ty5Sko5JkCedgSXHPhkL2ap3OfHtegSDpIgWL7ydpaoTyD3y +Fev6doCPC6cnHitwBCDpoEqNIm+JK2JZYQIDAQABo3sweTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0T +AQH/BAUwAwEB/zArBgNVHSMEJDAigCCwvM16BvA51cl2uO30/ohdOMPVrVBVG5BZ4teNnteYnTAp +BgNVHQ4EIgQgsLzNegbwOdXJdrjt9P6IXTjD1a1QVRuQWeLXjZ7XmJ0wDQYJKoZIhvcNAQELBQAD +ggIBAGSVKdVIynZnTpFaK3F2jjaC2oaJ1L8CA6e5OjWi6WUshKG4kJzLASD/U8XQXA8rVSuWShmz +B7ccm4fy1SmnSvsi4JA5mSDqcIOmwZmIYBAd/+8QK/sijzyO2MNPpqSupuhWxAakHSG8/3Leij2Q +P2GEXejDq3ewtS/gT1ZVI/ZSlIYxChsKZ3UEkl4XhUhL8fir/5Z+g6WdTFvXUB3wc/JA/MZ+h5Nu +BsrnrTlxet0vu3UlyOELsF5pMe1WGayR2A56LRL3UKhjFrUJSCTYMBiqAMS3Fsvk+RXttPYtcpiB +uheX8M/X8g2WTLOklS9/QYy1VmIWZcrfExHrMxQ8FCrxMfQn8fNlkMADmcRbQYeVHHZGx7MQEjBw +py45jzcPudJTx8Ccz6r0YSxoumC9reS0hASQ/NdXh6vcWfT8qsqYohL/k9J0PbfgJuIExAStIs+Y +nn4N7HgNftijy+l0sS//rMhVcofUaJzhJcbUe4TX/SL8ZHFkSkhUSPdDd1DR+r1IWKDKd/2FxMn3 ++oKBVsjPdL0HBwwHFQja8TBb5E3vYo4XKKEOGIuFa7NcSq0pF7pK85K0XIypAwgJCXffWP9SynDo +eK+ZbSOZNOCvH67ZRUQnWo1nZds+6OplhSpWkYDYN834wXEU4zbHRvtymCbIeMZzAXzdsJM2i3zy +7bTu +-----END CERTIFICATE----- -- cgit v1.1 From 1075b68f287e957cd73c8cdb9517293b4c920eec Mon Sep 17 00:00:00 2001 From: Magnus Ahltorp Date: Mon, 16 Mar 2015 10:06:42 +0100 Subject: Add submission of precerts --- tools/certtools.py | 23 +++++++++++++++++++++++ tools/submitcert.py | 8 +++++++- 2 files changed, 30 insertions(+), 1 deletion(-) (limited to 'tools') diff --git a/tools/certtools.py b/tools/certtools.py index 0e639f2..b0a1c97 100644 --- a/tools/certtools.py +++ b/tools/certtools.py @@ -61,6 +61,10 @@ def get_certs_from_string(s): f = cStringIO.StringIO(s) return get_pemlike_from_file(f, "CERTIFICATE") +def get_precerts_from_string(s): + f = cStringIO.StringIO(s) + return get_pemlike_from_file(f, "PRECERTIFICATE") + def get_eckey_from_file(keyfile): keys = get_pemlike(keyfile, "EC PRIVATE KEY") assert len(keys) == 1 @@ -137,6 +141,24 @@ def add_chain(baseurl, submission): print "========================" raise e +def add_prechain(baseurl, submission): + try: + result = urllib2.urlopen(baseurl + "ct/v1/add-pre-chain", + json.dumps(submission)).read() + return json.loads(result) + except urllib2.HTTPError, e: + print "ERROR", e.code,":", e.read() + if e.code == 400: + return None + sys.exit(1) + except ValueError, e: + print "==== FAILED REQUEST ====" + print submission + print "======= RESPONSE =======" + print result + print "========================" + raise e + def get_entries(baseurl, start, end): try: params = urllib.urlencode({"start":start, "end":end}) @@ -586,5 +608,6 @@ def verify_consistency_proof(consistency_proof, first, second, oldhash_input): def verify_inclusion_proof(inclusion_proof, index, treesize, leafhash): chain = zip([(index, 0)] + nodes_for_index(index, treesize), [leafhash] + inclusion_proof) + assert len(nodes_for_index(index, treesize)) == len(inclusion_proof) (_, hash) = reduce(lambda e1, e2: combine_two_hashes(e1, e2, treesize), chain) return hash diff --git a/tools/submitcert.py b/tools/submitcert.py index 9f0be67..1c79544 100755 --- a/tools/submitcert.py +++ b/tools/submitcert.py @@ -44,10 +44,16 @@ sth = get_sth(baseurl) def submitcert((certfile, cert)): timing = timing_point() certchain = get_certs_from_string(cert) + precerts = get_precerts_from_string(cert) + assert len(precerts) == 0 or len(precerts) == 1 + precert = precerts[0] if precerts else None timing_point(timing, "readcerts") try: - result = add_chain(baseurl, {"chain":map(base64.b64encode, certchain)}) + if precert: + result = add_prechain(baseurl, {"chain":map(base64.b64encode, [precert] + certchain)}) + else: + result = add_chain(baseurl, {"chain":map(base64.b64encode, certchain)}) except SystemExit: print "EXIT:", certfile select.select([], [], [], 1.0) -- cgit v1.1 From 15d5d6fd5cffdea185d18fbd4feb62afa23b9d12 Mon Sep 17 00:00:00 2001 From: Magnus Ahltorp Date: Wed, 18 Mar 2015 01:03:18 +0100 Subject: Added validatestore.py --- tools/certtools.py | 20 ++ tools/precerttools.py | 102 ++++++ tools/rfc2459.py | 927 +++++++++++++++++++++++++++++++++++++++++++++++++ tools/validatestore.py | 96 +++++ 4 files changed, 1145 insertions(+) create mode 100644 tools/precerttools.py create mode 100644 tools/rfc2459.py create mode 100755 tools/validatestore.py (limited to 'tools') diff --git a/tools/certtools.py b/tools/certtools.py index b0a1c97..1436863 100644 --- a/tools/certtools.py +++ b/tools/certtools.py @@ -289,6 +289,18 @@ def pack_mtl(timestamp, leafcert): merkle_tree_leaf = version + leaf_type + timestamped_entry return merkle_tree_leaf +def pack_mtl_precert(timestamp, cleanedcert, issuer_key_hash): + entry_type = struct.pack(">H", 1) + extensions = "" + assert len(issuer_key_hash) == 32 + + timestamped_entry = struct.pack(">Q", timestamp) + entry_type + \ + issuer_key_hash + tls_array(cleanedcert, 3) + tls_array(extensions, 2) + version = struct.pack(">b", 0) + leaf_type = struct.pack(">b", 0) + merkle_tree_leaf = version + leaf_type + timestamped_entry + return merkle_tree_leaf + def unpack_mtl(merkle_tree_leaf): version = merkle_tree_leaf[0:1] leaf_type = merkle_tree_leaf[1:2] @@ -375,6 +387,14 @@ def get_hash_from_certfile(cert): return base64.b16decode(line[len("Leafhash: "):]) return None +def get_timestamp_from_certfile(cert): + for line in cert.split("\n"): + if line.startswith("-----"): + return None + if line.startswith("Timestamp: "): + return int(line[len("Timestamp: "):]) + return None + def get_proof(store, tree_size, n): hash = get_hash_from_certfile(get_one_cert(store, n)) return get_proof_by_hash(args.baseurl, hash, tree_size) diff --git a/tools/precerttools.py b/tools/precerttools.py new file mode 100644 index 0000000..13ac572 --- /dev/null +++ b/tools/precerttools.py @@ -0,0 +1,102 @@ +# Copyright (c) 2014, NORDUnet A/S. +# See LICENSE for licensing information. + +import sys +import hashlib +import rfc2459 +from pyasn1.type import univ, tag +from pyasn1.codec.der import encoder, decoder + +def cleanextensions(extensions): + result = rfc2459.Extensions().subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 3)) + for idx in range(len(extensions)): + extension = extensions.getComponentByPosition(idx) + if extension.getComponentByName("extnID") == univ.ObjectIdentifier("1.3.6.1.4.1.11129.2.4.3"): + pass + else: + result.setComponentByPosition(len(result), extension) + return result + +def decode_any(anydata, asn1Spec=None): + (wrapper, _) = decoder.decode(anydata) + (data, _) = decoder.decode(wrapper, asn1Spec=asn1Spec) + return data + +def get_subject(cert): + (asn1,rest) = decoder.decode(cert, asn1Spec=rfc2459.Certificate()) + assert rest == '' + tbsCertificate = asn1.getComponentByName("tbsCertificate") + subject = tbsCertificate.getComponentByName("subject") + extensions = tbsCertificate.getComponentByName("extensions") + keyid_wrapper = get_extension(extensions, rfc2459.id_ce_subjectKeyIdentifier) + keyid = decode_any(keyid_wrapper, asn1Spec=rfc2459.KeyIdentifier()) + return (subject, keyid) + +def cleanprecert(precert, issuer=None): + (asn1,rest) = decoder.decode(precert, asn1Spec=rfc2459.Certificate()) + assert rest == '' + tbsCertificate = asn1.getComponentByName("tbsCertificate") + + extensions = tbsCertificate.getComponentByName("extensions") + tbsCertificate.setComponentByName("extensions", cleanextensions(extensions)) + + if issuer: + (issuer_subject, keyid) = get_subject(issuer) + tbsCertificate.setComponentByName("issuer", issuer_subject) + authkeyid = rfc2459.AuthorityKeyIdentifier() + authkeyid.setComponentByName("keyIdentifier", + rfc2459.KeyIdentifier(str(keyid)).subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))) + authkeyid_wrapper = univ.OctetString(encoder.encode(authkeyid)) + authkeyid_wrapper2 = encoder.encode(authkeyid_wrapper) + set_extension(extensions, rfc2459.id_ce_authorityKeyIdentifier, authkeyid_wrapper2) + return encoder.encode(tbsCertificate) + +def get_extension(extensions, id): + for idx in range(len(extensions)): + extension = extensions.getComponentByPosition(idx) + if extension.getComponentByName("extnID") == id: + return extension.getComponentByName("extnValue") + return None + +def set_extension(extensions, id, value): + result = rfc2459.Extensions().subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 3)) + for idx in range(len(extensions)): + extension = extensions.getComponentByPosition(idx) + if extension.getComponentByName("extnID") == id: + extension.setComponentByName("extnValue", value) + +def get_cert_key_hash(cert): + (asn1,rest) = decoder.decode(cert, asn1Spec=rfc2459.Certificate()) + assert rest == '' + tbsCertificate = asn1.getComponentByName("tbsCertificate") + key = encoder.encode(tbsCertificate.getComponentByName("subjectPublicKeyInfo")) + hash = hashlib.sha256() + hash.update(key) + return hash.digest() + +def printcert(cert, outfile=sys.stdout): + (asn1,rest) = decoder.decode(cert, asn1Spec=rfc2459.Certificate()) + assert rest == '' + print >>outfile, asn1.prettyPrint() + +def printtbscert(cert, outfile=sys.stdout): + (asn1,rest) = decoder.decode(cert, asn1Spec=rfc2459.TBSCertificate()) + assert rest == '' + print >>outfile, asn1.prettyPrint() + +ext_key_usage_precert_signing_cert = univ.ObjectIdentifier("1.3.6.1.4.1.11129.2.4.4") + +def get_ext_key_usage(cert): + (asn1,rest) = decoder.decode(cert, asn1Spec=rfc2459.Certificate()) + assert rest == '' + tbsCertificate = asn1.getComponentByName("tbsCertificate") + extensions = tbsCertificate.getComponentByName("extensions") + for idx in range(len(extensions)): + extension = extensions.getComponentByPosition(idx) + if extension.getComponentByName("extnID") == rfc2459.id_ce_extKeyUsage: + ext_key_usage_wrapper_binary = extension.getComponentByName("extnValue") + (ext_key_usage_wrapper, _) = decoder.decode(ext_key_usage_wrapper_binary) + (ext_key_usage, _) = decoder.decode(ext_key_usage_wrapper)#, asn1Spec=rfc2459.ExtKeyUsageSyntax()) + return list(ext_key_usage) + return [] + diff --git a/tools/rfc2459.py b/tools/rfc2459.py new file mode 100644 index 0000000..0ce9c6d --- /dev/null +++ b/tools/rfc2459.py @@ -0,0 +1,927 @@ +# Copyright (c) 2005-2013, Ilya Etingof +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, +# this list of conditions and the following disclaimer. +# +# * Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE +# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# +# X.509 message syntax +# +# ASN.1 source from: +# http://www.trl.ibm.com/projects/xml/xss4j/data/asn1/grammars/x509.asn +# http://www.ietf.org/rfc/rfc2459.txt +# +# Sample captures from: +# http://wiki.wireshark.org/SampleCaptures/ +# +from pyasn1.type import tag,namedtype,namedval,univ,constraint,char,useful + +MAX = 64 # XXX ? + +# +# PKIX1Explicit88 +# + +# Upper Bounds +ub_name = univ.Integer(32768) +ub_common_name = univ.Integer(64) +ub_locality_name = univ.Integer(128) +ub_state_name = univ.Integer(128) +ub_organization_name = univ.Integer(64) +ub_organizational_unit_name = univ.Integer(64) +ub_title = univ.Integer(64) +ub_match = univ.Integer(128) +ub_emailaddress_length = univ.Integer(128) +ub_common_name_length = univ.Integer(64) +ub_country_name_alpha_length = univ.Integer(2) +ub_country_name_numeric_length = univ.Integer(3) +ub_domain_defined_attributes = univ.Integer(4) +ub_domain_defined_attribute_type_length = univ.Integer(8) +ub_domain_defined_attribute_value_length = univ.Integer(128) +ub_domain_name_length = univ.Integer(16) +ub_extension_attributes = univ.Integer(256) +ub_e163_4_number_length = univ.Integer(15) +ub_e163_4_sub_address_length = univ.Integer(40) +ub_generation_qualifier_length = univ.Integer(3) +ub_given_name_length = univ.Integer(16) +ub_initials_length = univ.Integer(5) +ub_integer_options = univ.Integer(256) +ub_numeric_user_id_length = univ.Integer(32) +ub_organization_name_length = univ.Integer(64) +ub_organizational_unit_name_length = univ.Integer(32) +ub_organizational_units = univ.Integer(4) +ub_pds_name_length = univ.Integer(16) +ub_pds_parameter_length = univ.Integer(30) +ub_pds_physical_address_lines = univ.Integer(6) +ub_postal_code_length = univ.Integer(16) +ub_surname_length = univ.Integer(40) +ub_terminal_id_length = univ.Integer(24) +ub_unformatted_address_length = univ.Integer(180) +ub_x121_address_length = univ.Integer(16) + +class UniversalString(char.UniversalString): pass +class BMPString(char.BMPString): pass +class UTF8String(char.UTF8String): pass + +id_pkix = univ.ObjectIdentifier('1.3.6.1.5.5.7') +id_pe = univ.ObjectIdentifier('1.3.6.1.5.5.7.1') +id_qt = univ.ObjectIdentifier('1.3.6.1.5.5.7.2') +id_kp = univ.ObjectIdentifier('1.3.6.1.5.5.7.3') +id_ad = univ.ObjectIdentifier('1.3.6.1.5.5.7.48') + +id_qt_cps = univ.ObjectIdentifier('1.3.6.1.5.5.7.2.1') +id_qt_unotice = univ.ObjectIdentifier('1.3.6.1.5.5.7.2.2') + +id_ad_ocsp = univ.ObjectIdentifier('1.3.6.1.5.5.7.48.1') +id_ad_caIssuers = univ.ObjectIdentifier('1.3.6.1.5.5.7.48.2') + +class AttributeValue(univ.Any): pass + +class AttributeType(univ.ObjectIdentifier): pass + +class AttributeTypeAndValue(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.NamedType('type', AttributeType()), + namedtype.NamedType('value', AttributeValue()) + ) + +class Attribute(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.NamedType('type', AttributeType()), + namedtype.NamedType('vals', univ.SetOf(componentType=AttributeValue())) + ) + +id_at = univ.ObjectIdentifier('2.5.4') +id_at_name = univ.ObjectIdentifier('2.5.4.41') +id_at_sutname = univ.ObjectIdentifier('2.5.4.4') +id_at_givenName = univ.ObjectIdentifier('2.5.4.42') +id_at_initials = univ.ObjectIdentifier('2.5.4.43') +id_at_generationQualifier = univ.ObjectIdentifier('2.5.4.44') + +class X520name(univ.Choice): + componentType = namedtype.NamedTypes( + namedtype.NamedType('teletexString', char.TeletexString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_name))), + namedtype.NamedType('printableString', char.PrintableString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_name))), + namedtype.NamedType('universalString', char.UniversalString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_name))), + namedtype.NamedType('utf8String', char.UTF8String().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_name))), + namedtype.NamedType('bmpString', char.BMPString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_name))) + ) + +id_at_commonName = univ.ObjectIdentifier('2.5.4.3') + +class X520CommonName(univ.Choice): + componentType = namedtype.NamedTypes( + namedtype.NamedType('teletexString', char.TeletexString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_common_name))), + namedtype.NamedType('printableString', char.PrintableString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_common_name))), + namedtype.NamedType('universalString', char.UniversalString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_common_name))), + namedtype.NamedType('utf8String', char.UTF8String().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_common_name))), + namedtype.NamedType('bmpString', char.BMPString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_common_name))) + ) + +id_at_localityName = univ.ObjectIdentifier('2.5.4.7') + +class X520LocalityName(univ.Choice): + componentType = namedtype.NamedTypes( + namedtype.NamedType('teletexString', char.TeletexString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_locality_name))), + namedtype.NamedType('printableString', char.PrintableString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_locality_name))), + namedtype.NamedType('universalString', char.UniversalString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_locality_name))), + namedtype.NamedType('utf8String', char.UTF8String().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_locality_name))), + namedtype.NamedType('bmpString', char.BMPString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_locality_name))) + ) + +id_at_stateOrProvinceName = univ.ObjectIdentifier('2.5.4.8') + +class X520StateOrProvinceName(univ.Choice): + componentType = namedtype.NamedTypes( + namedtype.NamedType('teletexString', char.TeletexString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_state_name))), + namedtype.NamedType('printableString', char.PrintableString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_state_name))), + namedtype.NamedType('universalString', char.UniversalString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_state_name))), + namedtype.NamedType('utf8String', char.UTF8String().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_state_name))), + namedtype.NamedType('bmpString', char.BMPString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_state_name))) + ) + +id_at_organizationName = univ.ObjectIdentifier('2.5.4.10') + +class X520OrganizationName(univ.Choice): + componentType = namedtype.NamedTypes( + namedtype.NamedType('teletexString', char.TeletexString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_organization_name))), + namedtype.NamedType('printableString', char.PrintableString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_organization_name))), + namedtype.NamedType('universalString', char.UniversalString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_organization_name))), + namedtype.NamedType('utf8String', char.UTF8String().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_organization_name))), + namedtype.NamedType('bmpString', char.BMPString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_organization_name))) + ) + +id_at_organizationalUnitName = univ.ObjectIdentifier('2.5.4.11') + +class X520OrganizationalUnitName(univ.Choice): + componentType = namedtype.NamedTypes( + namedtype.NamedType('teletexString', char.TeletexString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_organizational_unit_name))), + namedtype.NamedType('printableString', char.PrintableString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_organizational_unit_name))), + namedtype.NamedType('universalString', char.UniversalString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_organizational_unit_name))), + namedtype.NamedType('utf8String', char.UTF8String().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_organizational_unit_name))), + namedtype.NamedType('bmpString', char.BMPString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_organizational_unit_name))) + ) + +id_at_title = univ.ObjectIdentifier('2.5.4.12') + +class X520Title(univ.Choice): + componentType = namedtype.NamedTypes( + namedtype.NamedType('teletexString', char.TeletexString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_title))), + namedtype.NamedType('printableString', char.PrintableString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_title))), + namedtype.NamedType('universalString', char.UniversalString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_title))), + namedtype.NamedType('utf8String', char.UTF8String().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_title))), + namedtype.NamedType('bmpString', char.BMPString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_title))) + ) + +id_at_dnQualifier = univ.ObjectIdentifier('2.5.4.46') + +class X520dnQualifier(char.PrintableString): pass + +id_at_countryName = univ.ObjectIdentifier('2.5.4.6') + +class X520countryName(char.PrintableString): + subtypeSpec = char.PrintableString.subtypeSpec + constraint.ValueSizeConstraint(2, 2) + +pkcs_9 = univ.ObjectIdentifier('1.2.840.113549.1.9') + +emailAddress = univ.ObjectIdentifier('1.2.840.113549.1.9.1') + +class Pkcs9email(char.IA5String): + subtypeSpec = char.IA5String.subtypeSpec + constraint.ValueSizeConstraint(1, ub_emailaddress_length) + +# ---- + +class DSAPrivateKey(univ.Sequence): + """PKIX compliant DSA private key structure""" + componentType = namedtype.NamedTypes( + namedtype.NamedType('version', univ.Integer(namedValues=namedval.NamedValues(('v1', 0)))), + namedtype.NamedType('p', univ.Integer()), + namedtype.NamedType('q', univ.Integer()), + namedtype.NamedType('g', univ.Integer()), + namedtype.NamedType('public', univ.Integer()), + namedtype.NamedType('private', univ.Integer()) + ) + +# ---- + +class RelativeDistinguishedName(univ.SetOf): + componentType = AttributeTypeAndValue() + +class RDNSequence(univ.SequenceOf): + componentType = RelativeDistinguishedName() + +class Name(univ.Choice): + componentType = namedtype.NamedTypes( + namedtype.NamedType('', RDNSequence()) + ) + +class DirectoryString(univ.Choice): + componentType = namedtype.NamedTypes( + namedtype.NamedType('teletexString', char.TeletexString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, MAX))), + namedtype.NamedType('printableString', char.PrintableString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, MAX))), + namedtype.NamedType('universalString', char.UniversalString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, MAX))), + namedtype.NamedType('utf8String', char.UTF8String().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, MAX))), + namedtype.NamedType('bmpString', char.BMPString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, MAX))), + namedtype.NamedType('ia5String', char.IA5String().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, MAX))) # hm, this should not be here!? XXX + ) + +# certificate and CRL specific structures begin here + +class AlgorithmIdentifier(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.NamedType('algorithm', univ.ObjectIdentifier()), + namedtype.OptionalNamedType('parameters', univ.Any()) + ) + +class Extension(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.NamedType('extnID', univ.ObjectIdentifier()), + namedtype.DefaultedNamedType('critical', univ.Boolean('False')), + namedtype.NamedType('extnValue', univ.Any()) + ) + +class Extensions(univ.SequenceOf): + componentType = Extension() + sizeSpec = univ.SequenceOf.sizeSpec + constraint.ValueSizeConstraint(1, MAX) + +class SubjectPublicKeyInfo(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.NamedType('algorithm', AlgorithmIdentifier()), + namedtype.NamedType('subjectPublicKey', univ.BitString()) + ) + +class UniqueIdentifier(univ.BitString): pass + +class Time(univ.Choice): + componentType = namedtype.NamedTypes( + namedtype.NamedType('utcTime', useful.UTCTime()), + namedtype.NamedType('generalTime', useful.GeneralizedTime()) + ) + +class Validity(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.NamedType('notBefore', Time()), + namedtype.NamedType('notAfter', Time()) + ) + +class CertificateSerialNumber(univ.Integer): pass + +class Version(univ.Integer): + namedValues = namedval.NamedValues( + ('v1', 0), ('v2', 1), ('v3', 2) + ) + +class TBSCertificate(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.DefaultedNamedType('version', Version('v1').subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))), + namedtype.NamedType('serialNumber', CertificateSerialNumber()), + namedtype.NamedType('signature', AlgorithmIdentifier()), + namedtype.NamedType('issuer', Name()), + namedtype.NamedType('validity', Validity()), + namedtype.NamedType('subject', Name()), + namedtype.NamedType('subjectPublicKeyInfo', SubjectPublicKeyInfo()), + namedtype.OptionalNamedType('issuerUniqueID', UniqueIdentifier().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))), + namedtype.OptionalNamedType('subjectUniqueID', UniqueIdentifier().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 2))), + namedtype.OptionalNamedType('extensions', Extensions().subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 3))) + ) + +class Certificate(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.NamedType('tbsCertificate', TBSCertificate()), + namedtype.NamedType('signatureAlgorithm', AlgorithmIdentifier()), + namedtype.NamedType('signatureValue', univ.BitString()) + ) + +# CRL structures + +class RevokedCertificate(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.NamedType('userCertificate', CertificateSerialNumber()), + namedtype.NamedType('revocationDate', Time()), + namedtype.OptionalNamedType('crlEntryExtensions', Extensions()) + ) + +class TBSCertList(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.OptionalNamedType('version', Version()), + namedtype.NamedType('signature', AlgorithmIdentifier()), + namedtype.NamedType('issuer', Name()), + namedtype.NamedType('thisUpdate', Time()), + namedtype.OptionalNamedType('nextUpdate', Time()), + namedtype.OptionalNamedType('revokedCertificates', univ.SequenceOf(componentType=RevokedCertificate())), + namedtype.OptionalNamedType('crlExtensions', Extensions().subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0))) + ) + +class CertificateList(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.NamedType('tbsCertList', TBSCertList()), + namedtype.NamedType('signatureAlgorithm', AlgorithmIdentifier()), + namedtype.NamedType('signature', univ.BitString()) + ) + +# Algorithm OIDs and parameter structures + +pkcs_1 = univ.ObjectIdentifier('1.2.840.113549.1.1') +rsaEncryption = univ.ObjectIdentifier('1.2.840.113549.1.1.1') +md2WithRSAEncryption = univ.ObjectIdentifier('1.2.840.113549.1.1.2') +md5WithRSAEncryption = univ.ObjectIdentifier('1.2.840.113549.1.1.4') +sha1WithRSAEncryption = univ.ObjectIdentifier('1.2.840.113549.1.1.5') +id_dsa_with_sha1 = univ.ObjectIdentifier('1.2.840.10040.4.3') + +class Dss_Sig_Value(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.NamedType('r', univ.Integer()), + namedtype.NamedType('s', univ.Integer()) + ) + +dhpublicnumber = univ.ObjectIdentifier('1.2.840.10046.2.1') + +class ValidationParms(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.NamedType('seed', univ.BitString()), + namedtype.NamedType('pgenCounter', univ.Integer()) + ) + +class DomainParameters(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.NamedType('p', univ.Integer()), + namedtype.NamedType('g', univ.Integer()), + namedtype.NamedType('q', univ.Integer()), + namedtype.NamedType('j', univ.Integer()), + namedtype.OptionalNamedType('validationParms', ValidationParms()) + ) + +id_dsa = univ.ObjectIdentifier('1.2.840.10040.4.1') + +class Dss_Parms(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.NamedType('p', univ.Integer()), + namedtype.NamedType('q', univ.Integer()), + namedtype.NamedType('g', univ.Integer()) + ) + +# x400 address syntax starts here + +teletex_domain_defined_attributes = univ.Integer(6) + +class TeletexDomainDefinedAttribute(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.NamedType('type', char.TeletexString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_domain_defined_attribute_type_length))), + namedtype.NamedType('value', char.TeletexString()) + ) + +class TeletexDomainDefinedAttributes(univ.SequenceOf): + componentType = TeletexDomainDefinedAttribute() + subtypeSpec = univ.SequenceOf.subtypeSpec + constraint.ValueSizeConstraint(1, ub_domain_defined_attributes) + +terminal_type = univ.Integer(23) + +class TerminalType(univ.Integer): + subtypeSpec = univ.Integer.subtypeSpec + constraint.ValueSizeConstraint(0, ub_integer_options) + namedValues = namedval.NamedValues( + ('telex', 3), + ('teletelex', 4), + ('g3-facsimile', 5), + ('g4-facsimile', 6), + ('ia5-terminal', 7), + ('videotex', 8) + ) + +class PresentationAddress(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.OptionalNamedType('pSelector', univ.OctetString().subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))), + namedtype.OptionalNamedType('sSelector', univ.OctetString().subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))), + namedtype.OptionalNamedType('tSelector', univ.OctetString().subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 2))), + namedtype.OptionalNamedType('nAddresses', univ.SetOf(componentType=univ.OctetString()).subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 3), subtypeSpec=constraint.ValueSizeConstraint(1, MAX))), + ) + +extended_network_address = univ.Integer(22) + +class E163_4_address(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.NamedType('number', char.NumericString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_e163_4_number_length), explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))), + namedtype.OptionalNamedType('sub-address', char.NumericString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_e163_4_sub_address_length), explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))) + ) + +class ExtendedNetworkAddress(univ.Choice): + componentType = namedtype.NamedTypes( + namedtype.NamedType('e163-4-address', E163_4_address()), + namedtype.NamedType('psap-address', PresentationAddress().subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))) + ) + +class PDSParameter(univ.Set): + componentType = namedtype.NamedTypes( + namedtype.OptionalNamedType('printable-string', char.PrintableString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_pds_parameter_length))), + namedtype.OptionalNamedType('teletex-string', char.TeletexString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_pds_parameter_length))) + ) + +local_postal_attributes = univ.Integer(21) + +class LocalPostalAttributes(PDSParameter): pass + +class UniquePostalName(PDSParameter): pass + +unique_postal_name = univ.Integer(20) + +poste_restante_address = univ.Integer(19) + +class PosteRestanteAddress(PDSParameter): pass + +post_office_box_address = univ.Integer(18) + +class PostOfficeBoxAddress(PDSParameter): pass + +street_address = univ.Integer(17) + +class StreetAddress(PDSParameter): pass + +class UnformattedPostalAddress(univ.Set): + componentType = namedtype.NamedTypes( + namedtype.OptionalNamedType('printable-address', univ.SequenceOf(componentType=char.PrintableString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_pds_parameter_length)).subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_pds_physical_address_lines)))), + namedtype.OptionalNamedType('teletex-string', char.TeletexString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_unformatted_address_length))) + ) + +physical_delivery_office_name = univ.Integer(10) + +class PhysicalDeliveryOfficeName(PDSParameter): pass + +physical_delivery_office_number = univ.Integer(11) + +class PhysicalDeliveryOfficeNumber(PDSParameter): pass + +extension_OR_address_components = univ.Integer(12) + +class ExtensionORAddressComponents(PDSParameter): pass + +physical_delivery_personal_name = univ.Integer(13) + +class PhysicalDeliveryPersonalName(PDSParameter): pass + +physical_delivery_organization_name = univ.Integer(14) + +class PhysicalDeliveryOrganizationName(PDSParameter): pass + +extension_physical_delivery_address_components = univ.Integer(15) + +class ExtensionPhysicalDeliveryAddressComponents(PDSParameter): pass + +unformatted_postal_address = univ.Integer(16) + +postal_code = univ.Integer(9) + +class PostalCode(univ.Choice): + componentType = namedtype.NamedTypes( + namedtype.NamedType('numeric-code', char.NumericString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_postal_code_length))), + namedtype.NamedType('printable-code', char.PrintableString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_postal_code_length))) + ) + +class PhysicalDeliveryCountryName(univ.Choice): + componentType = namedtype.NamedTypes( + namedtype.NamedType('x121-dcc-code', char.NumericString().subtype(subtypeSpec=constraint.ValueSizeConstraint(ub_country_name_numeric_length, ub_country_name_numeric_length))), + namedtype.NamedType('iso-3166-alpha2-code', char.PrintableString().subtype(subtypeSpec=constraint.ValueSizeConstraint(ub_country_name_alpha_length, ub_country_name_alpha_length))) + ) + +class PDSName(char.PrintableString): + subtypeSpec = char.PrintableString.subtypeSpec + constraint.ValueSizeConstraint(1, ub_pds_name_length) + +physical_delivery_country_name = univ.Integer(8) + +class TeletexOrganizationalUnitName(char.TeletexString): + subtypeSpec = char.TeletexString.subtypeSpec + constraint.ValueSizeConstraint(1, ub_organizational_unit_name_length) + +pds_name = univ.Integer(7) + +teletex_organizational_unit_names = univ.Integer(5) + +class TeletexOrganizationalUnitNames(univ.SequenceOf): + componentType = TeletexOrganizationalUnitName() + subtypeSpec = univ.SequenceOf.subtypeSpec + constraint.ValueSizeConstraint(1, ub_organizational_units) + +teletex_personal_name = univ.Integer(4) + +class TeletexPersonalName(univ.Set): + componentType = namedtype.NamedTypes( + namedtype.NamedType('surname', char.TeletexString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_surname_length), explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))), + namedtype.OptionalNamedType('given-name', char.TeletexString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_given_name_length), explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))), + namedtype.OptionalNamedType('initials', char.TeletexString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_initials_length), explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 2))), + namedtype.OptionalNamedType('generation-qualifier', char.TeletexString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_generation_qualifier_length), explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 3))) + ) + +teletex_organization_name = univ.Integer(3) + +class TeletexOrganizationName(char.TeletexString): + subtypeSpec = char.TeletexString.subtypeSpec + constraint.ValueSizeConstraint(1, ub_organization_name_length) + +teletex_common_name = univ.Integer(2) + +class TeletexCommonName(char.TeletexString): + subtypeSpec = char.TeletexString.subtypeSpec + constraint.ValueSizeConstraint(1, ub_common_name_length) + +class CommonName(char.PrintableString): + subtypeSpec = char.PrintableString.subtypeSpec + constraint.ValueSizeConstraint(1, ub_common_name_length) + +common_name = univ.Integer(1) + +class ExtensionAttribute(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.NamedType('extension-attribute-type', univ.Integer().subtype(subtypeSpec=constraint.ValueSizeConstraint(0, ub_extension_attributes), explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))), + namedtype.NamedType('extension-attribute-value', univ.Any().subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))) + ) + +class ExtensionAttributes(univ.SetOf): + componentType = ExtensionAttribute() + subtypeSpec = univ.SetOf.subtypeSpec + constraint.ValueSizeConstraint(1, ub_extension_attributes) + +class BuiltInDomainDefinedAttribute(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.NamedType('type', char.PrintableString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_domain_defined_attribute_type_length))), + namedtype.NamedType('value', char.PrintableString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_domain_defined_attribute_value_length))) + ) + +class BuiltInDomainDefinedAttributes(univ.SequenceOf): + componentType = BuiltInDomainDefinedAttribute() + subtypeSpec = univ.SequenceOf.subtypeSpec + constraint.ValueSizeConstraint(1, ub_domain_defined_attributes) + +class OrganizationalUnitName(char.PrintableString): + subtypeSpec = univ.SequenceOf.subtypeSpec + constraint.ValueSizeConstraint(1, ub_organizational_unit_name_length) + +class OrganizationalUnitNames(univ.SequenceOf): + componentType = OrganizationalUnitName() + subtypeSpec = univ.SequenceOf.subtypeSpec + constraint.ValueSizeConstraint(1, ub_organizational_units) + +class PersonalName(univ.Set): + componentType = namedtype.NamedTypes( + namedtype.NamedType('surname', char.PrintableString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_surname_length), explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))), + namedtype.OptionalNamedType('given-name', char.PrintableString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_given_name_length), explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))), + namedtype.OptionalNamedType('initials', char.PrintableString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_initials_length), explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 2))), + namedtype.OptionalNamedType('generation-qualifier', char.PrintableString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_generation_qualifier_length), explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 3))) + ) + +class NumericUserIdentifier(char.NumericString): + subtypeSpec = char.NumericString.subtypeSpec + constraint.ValueSizeConstraint(1, ub_numeric_user_id_length) + +class OrganizationName(char.PrintableString): + subtypeSpec = char.PrintableString.subtypeSpec + constraint.ValueSizeConstraint(1, ub_organization_name_length) + +class PrivateDomainName(univ.Choice): + componentType = namedtype.NamedTypes( + namedtype.NamedType('numeric', char.NumericString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_domain_name_length))), + namedtype.NamedType('printable', char.PrintableString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_domain_name_length))) + ) + +class TerminalIdentifier(char.PrintableString): + subtypeSpec = char.PrintableString.subtypeSpec + constraint.ValueSizeConstraint(1, ub_terminal_id_length) + +class X121Address(char.NumericString): + subtypeSpec = char.NumericString.subtypeSpec + constraint.ValueSizeConstraint(1, ub_x121_address_length) + +class NetworkAddress(X121Address): pass + +class AdministrationDomainName(univ.Choice): + tagSet = univ.Choice.tagSet.tagExplicitly( + tag.Tag(tag.tagClassApplication, tag.tagFormatConstructed, 2) + ) + componentType = namedtype.NamedTypes( + namedtype.NamedType('numeric', char.NumericString().subtype(subtypeSpec=constraint.ValueSizeConstraint(0, ub_domain_name_length))), + namedtype.NamedType('printable', char.PrintableString().subtype(subtypeSpec=constraint.ValueSizeConstraint(0, ub_domain_name_length))) + ) + +class CountryName(univ.Choice): + tagSet = univ.Choice.tagSet.tagExplicitly( + tag.Tag(tag.tagClassApplication, tag.tagFormatConstructed, 1) + ) + componentType = namedtype.NamedTypes( + namedtype.NamedType('x121-dcc-code', char.NumericString().subtype(subtypeSpec=constraint.ValueSizeConstraint(ub_country_name_numeric_length, ub_country_name_numeric_length))), + namedtype.NamedType('iso-3166-alpha2-code', char.PrintableString().subtype(subtypeSpec=constraint.ValueSizeConstraint(ub_country_name_alpha_length, ub_country_name_alpha_length))) + ) + +class BuiltInStandardAttributes(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.OptionalNamedType('country-name', CountryName()), + namedtype.OptionalNamedType('administration-domain-name', AdministrationDomainName()), + namedtype.OptionalNamedType('network-address', NetworkAddress().subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))), + namedtype.OptionalNamedType('terminal-identifier', TerminalIdentifier().subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))), + namedtype.OptionalNamedType('private-domain-name', PrivateDomainName().subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 2))), + namedtype.OptionalNamedType('organization-name', OrganizationName().subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 3))), + namedtype.OptionalNamedType('numeric-user-identifier', NumericUserIdentifier().subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 4))), + namedtype.OptionalNamedType('personal-name', PersonalName().subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 5))), + namedtype.OptionalNamedType('organizational-unit-names', OrganizationalUnitNames().subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 6))) + ) + +class ORAddress(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.NamedType('built-in-standard-attributes', BuiltInStandardAttributes()), + namedtype.OptionalNamedType('built-in-domain-defined-attributes', BuiltInDomainDefinedAttributes()), + namedtype.OptionalNamedType('extension-attributes', ExtensionAttributes()) + ) + +# +# PKIX1Implicit88 +# + +id_ce_invalidityDate = univ.ObjectIdentifier('2.5.29.24') + +class InvalidityDate(useful.GeneralizedTime): pass + +id_holdinstruction_none = univ.ObjectIdentifier('2.2.840.10040.2.1') +id_holdinstruction_callissuer = univ.ObjectIdentifier('2.2.840.10040.2.2') +id_holdinstruction_reject = univ.ObjectIdentifier('2.2.840.10040.2.3') + +holdInstruction = univ.ObjectIdentifier('2.2.840.10040.2') + +id_ce_holdInstructionCode = univ.ObjectIdentifier('2.5.29.23') + +class HoldInstructionCode(univ.ObjectIdentifier): pass + +id_ce_cRLReasons = univ.ObjectIdentifier('2.5.29.21') + +class CRLReason(univ.Enumerated): + namedValues = namedval.NamedValues( + ('unspecified', 0), + ('keyCompromise', 1), + ('cACompromise', 2), + ('affiliationChanged', 3), + ('superseded', 4), + ('cessationOfOperation', 5), + ('certificateHold', 6), + ('removeFromCRL', 8) + ) + +id_ce_cRLNumber = univ.ObjectIdentifier('2.5.29.20') + +class CRLNumber(univ.Integer): + subtypeSpec = univ.SequenceOf.subtypeSpec + constraint.ValueSizeConstraint(0, MAX) + +class BaseCRLNumber(CRLNumber): pass + +id_kp_serverAuth = univ.ObjectIdentifier('1.3.6.1.5.5.7.3.1.1') +id_kp_clientAuth = univ.ObjectIdentifier('1.3.6.1.5.5.7.3.2') +id_kp_codeSigning = univ.ObjectIdentifier('1.3.6.1.5.5.7.3.3') +id_kp_emailProtection = univ.ObjectIdentifier('1.3.6.1.5.5.7.3.4') +id_kp_ipsecEndSystem = univ.ObjectIdentifier('1.3.6.1.5.5.7.3.5') +id_kp_ipsecTunnel = univ.ObjectIdentifier('1.3.6.1.5.5.7.3.6') +id_kp_ipsecUser = univ.ObjectIdentifier('1.3.6.1.5.5.7.3.7') +id_kp_timeStamping = univ.ObjectIdentifier('1.3.6.1.5.5.7.3.8') +id_pe_authorityInfoAccess = univ.ObjectIdentifier('1.3.6.1.5.5.7.1.1') +id_ce_extKeyUsage = univ.ObjectIdentifier('2.5.29.37') + +class KeyPurposeId(univ.ObjectIdentifier): pass + +class ExtKeyUsageSyntax(univ.SequenceOf): + componentType = KeyPurposeId() + subtypeSpec = univ.SequenceOf.subtypeSpec + constraint.ValueSizeConstraint(1, MAX) + +class ReasonFlags(univ.BitString): + namedValues = namedval.NamedValues( + ('unused', 0), + ('keyCompromise', 1), + ('cACompromise', 2), + ('affiliationChanged', 3), + ('superseded', 4), + ('cessationOfOperation', 5), + ('certificateHold', 6) + ) + + +class SkipCerts(univ.Integer): + subtypeSpec = univ.Integer.subtypeSpec + constraint.ValueSizeConstraint(0, MAX) + +id_ce_policyConstraints = univ.ObjectIdentifier('2.5.29.36') + +class PolicyConstraints(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.OptionalNamedType('requireExplicitPolicy', SkipCerts().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0))), + namedtype.OptionalNamedType('inhibitPolicyMapping', SkipCerts().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 1))) + ) + +id_ce_basicConstraints = univ.ObjectIdentifier('2.5.29.19') + +class BasicConstraints(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.NamedType('cA', univ.Boolean(False)), + namedtype.OptionalNamedType('pathLenConstraint', univ.Integer().subtype(subtypeSpec=constraint.ValueRangeConstraint(0, MAX))) + ) + +id_ce_subjectDirectoryAttributes = univ.ObjectIdentifier('2.5.29.9') + +class SubjectDirectoryAttributes(univ.SequenceOf): + componentType = Attribute() + subtypeSpec = univ.SequenceOf.subtypeSpec + constraint.ValueSizeConstraint(1, MAX) + +class EDIPartyName(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.OptionalNamedType('nameAssigner', DirectoryString().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))), + namedtype.NamedType('partyName', DirectoryString().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))) + ) + +class AnotherName(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.NamedType('type-id', univ.ObjectIdentifier()), + namedtype.NamedType('value', univ.Any().subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))) + ) + +class GeneralName(univ.Choice): + componentType = namedtype.NamedTypes( + namedtype.NamedType('otherName', AnotherName().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))), + namedtype.NamedType('rfc822Name', char.IA5String().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))), + namedtype.NamedType('dNSName', char.IA5String().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 2))), + namedtype.NamedType('x400Address', ORAddress().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 3))), + namedtype.NamedType('directoryName', Name().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 4))), + namedtype.NamedType('ediPartyName', EDIPartyName().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 5))), + namedtype.NamedType('uniformResourceIdentifier', char.IA5String().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 6))), + namedtype.NamedType('iPAddress', univ.OctetString().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 7))), + namedtype.NamedType('registeredID', univ.ObjectIdentifier().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 8))) + ) + +class GeneralNames(univ.SequenceOf): + componentType = GeneralName() + subtypeSpec = univ.SequenceOf.subtypeSpec + constraint.ValueSizeConstraint(1, MAX) + +class AccessDescription(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.NamedType('accessMethod', univ.ObjectIdentifier()), + namedtype.NamedType('accessLocation', GeneralName()) + ) + +class AuthorityInfoAccessSyntax(univ.SequenceOf): + componentType = AccessDescription() + subtypeSpec = univ.SequenceOf.subtypeSpec + constraint.ValueSizeConstraint(1, MAX) + +id_ce_deltaCRLIndicator = univ.ObjectIdentifier('2.5.29.27') + +class DistributionPointName(univ.Choice): + componentType = namedtype.NamedTypes( + namedtype.NamedType('fullName', GeneralNames().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0))), + namedtype.NamedType('nameRelativeToCRLIssuer', RelativeDistinguishedName().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 1))) + ) + +class DistributionPoint(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.OptionalNamedType('distributionPoint', DistributionPointName().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0))), + namedtype.OptionalNamedType('reasons', ReasonFlags().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))), + namedtype.OptionalNamedType('cRLIssuer', GeneralNames().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 2))) + ) +class BaseDistance(univ.Integer): + subtypeSpec = univ.Integer.subtypeSpec + constraint.ValueRangeConstraint(0, MAX) + +id_ce_cRLDistributionPoints = univ.ObjectIdentifier('2.5.29.31') + +class CRLDistPointsSyntax(univ.SequenceOf): + componentType = DistributionPoint + subtypeSpec = univ.SequenceOf.subtypeSpec + constraint.ValueSizeConstraint(1, MAX) +id_ce_issuingDistributionPoint = univ.ObjectIdentifier('2.5.29.28') + +class IssuingDistributionPoint(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.OptionalNamedType('distributionPoint', DistributionPointName().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0))), + namedtype.NamedType('onlyContainsUserCerts', univ.Boolean(False).subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))), + namedtype.NamedType('onlyContainsCACerts', univ.Boolean(False).subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 2))), + namedtype.OptionalNamedType('onlySomeReasons', ReasonFlags().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 3))), + namedtype.NamedType('indirectCRL', univ.Boolean(False).subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 4))) + ) + +class GeneralSubtree(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.NamedType('base', GeneralName()), + namedtype.NamedType('minimum', BaseDistance(0).subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0))), + namedtype.OptionalNamedType('maximum', BaseDistance().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 1))) + ) + +class GeneralSubtrees(univ.SequenceOf): + componentType = GeneralSubtree() + subtypeSpec = univ.SequenceOf.subtypeSpec + constraint.ValueSizeConstraint(1, MAX) + +id_ce_nameConstraints = univ.ObjectIdentifier('2.5.29.30') + +class NameConstraints(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.OptionalNamedType('permittedSubtrees', GeneralSubtrees().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0))), + namedtype.OptionalNamedType('excludedSubtrees', GeneralSubtrees().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0))) + ) + + +class DisplayText(univ.Choice): + componentType = namedtype.NamedTypes( + namedtype.NamedType('visibleString', char.VisibleString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, 200))), + namedtype.NamedType('bmpString', char.BMPString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, 200))), + namedtype.NamedType('utf8String', char.UTF8String().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, 200))) + ) + +class NoticeReference(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.NamedType('organization', DisplayText()), + namedtype.NamedType('noticeNumbers', univ.SequenceOf(componentType=univ.Integer())) + ) + +class UserNotice(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.OptionalNamedType('noticeRef', NoticeReference()), + namedtype.OptionalNamedType('explicitText', DisplayText()) + ) + +class CPSuri(char.IA5String): pass + +class PolicyQualifierId(univ.ObjectIdentifier): + subtypeSpec = univ.ObjectIdentifier.subtypeSpec + constraint.SingleValueConstraint(id_qt_cps, id_qt_unotice) + +class CertPolicyId(univ.ObjectIdentifier): pass + +class PolicyQualifierInfo(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.NamedType('policyQualifierId', PolicyQualifierId()), + namedtype.NamedType('qualifier', univ.Any()) + ) + +id_ce_certificatePolicies = univ.ObjectIdentifier('2.5.29.32') + +class PolicyInformation(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.NamedType('policyIdentifier', CertPolicyId()), + namedtype.OptionalNamedType('policyQualifiers', univ.SequenceOf(componentType=PolicyQualifierInfo()).subtype(subtypeSpec=constraint.ValueSizeConstraint(1, MAX))) + ) + +class CertificatePolicies(univ.SequenceOf): + componentType = PolicyInformation() + subtypeSpec = univ.SequenceOf.subtypeSpec + constraint.ValueSizeConstraint(1, MAX) + +id_ce_policyMappings = univ.ObjectIdentifier('2.5.29.33') + +class PolicyMapping(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.NamedType('issuerDomainPolicy', CertPolicyId()), + namedtype.NamedType('subjectDomainPolicy', CertPolicyId()) + ) + +class PolicyMappings(univ.SequenceOf): + componentType = PolicyMapping() + subtypeSpec = univ.SequenceOf.subtypeSpec + constraint.ValueSizeConstraint(1, MAX) + +id_ce_privateKeyUsagePeriod = univ.ObjectIdentifier('2.5.29.16') + +class PrivateKeyUsagePeriod(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.OptionalNamedType('notBefore', useful.GeneralizedTime().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))), + namedtype.OptionalNamedType('notAfter', useful.GeneralizedTime().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))) + ) + +id_ce_keyUsage = univ.ObjectIdentifier('2.5.29.15') + +class KeyUsage(univ.BitString): + namedValues = namedval.NamedValues( + ('digitalSignature', 0), + ('nonRepudiation', 1), + ('keyEncipherment', 2), + ('dataEncipherment', 3), + ('keyAgreement', 4), + ('keyCertSign', 5), + ('cRLSign', 6), + ('encipherOnly', 7), + ('decipherOnly', 8) + ) + +id_ce = univ.ObjectIdentifier('2.5.29') + +id_ce_authorityKeyIdentifier = univ.ObjectIdentifier('2.5.29.35') + +class KeyIdentifier(univ.OctetString): pass + +id_ce_subjectKeyIdentifier = univ.ObjectIdentifier('2.5.29.14') + +class SubjectKeyIdentifier(KeyIdentifier): pass + +class AuthorityKeyIdentifier(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.OptionalNamedType('keyIdentifier', KeyIdentifier().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))), + namedtype.OptionalNamedType('authorityCertIssuer', GeneralNames().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))), + namedtype.OptionalNamedType('authorityCertSerialNumber', CertificateSerialNumber().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 2))) + ) + +id_ce_certificateIssuer = univ.ObjectIdentifier('2.5.29.29') + +class CertificateIssuer(GeneralNames): pass + +id_ce_subjectAltName = univ.ObjectIdentifier('2.5.29.17') + +class SubjectAltName(GeneralNames): pass + +id_ce_issuerAltName = univ.ObjectIdentifier('2.5.29.18') + +class IssuerAltName(GeneralNames): pass diff --git a/tools/validatestore.py b/tools/validatestore.py new file mode 100755 index 0000000..74963e0 --- /dev/null +++ b/tools/validatestore.py @@ -0,0 +1,96 @@ +#!/usr/bin/env python + +# Copyright (c) 2014, NORDUnet A/S. +# See LICENSE for licensing information. + +import argparse +import urllib2 +import urllib +import json +import base64 +import sys +import struct +import hashlib +import itertools +from certtools import * +try: + from precerttools import * +except ImportError: + pass +import os +import signal +import select +import zipfile +import traceback + +parser = argparse.ArgumentParser(description='') +parser.add_argument('--store', default=None, metavar="dir", help='Get certificates from directory dir') +parser.add_argument('--parallel', type=int, default=1, metavar="n", help="Number of parallel workers") +args = parser.parse_args() + +from multiprocessing import Pool + +certfilepath = args.store + +if certfilepath[-1] == "/": + certfiles = [certfilepath + filename for filename in sorted(os.listdir(certfilepath)) if os.path.isfile(certfilepath + filename)] +else: + certfiles = [certfilepath] + +def submitcert((certfile, cert)): + try: + certchain = get_certs_from_string(cert) + if len(certchain) == 0: + return True + precerts = get_precerts_from_string(cert) + hash = get_hash_from_certfile(cert) + timestamp = get_timestamp_from_certfile(cert) + assert len(precerts) == 0 or len(precerts) == 1 + precert = precerts[0] if precerts else None + if precert: + if ext_key_usage_precert_signing_cert in get_ext_key_usage(certchain[0]): + issuer_key_hash = get_cert_key_hash(certchain[1]) + issuer = certchain[1] + else: + issuer_key_hash = get_cert_key_hash(certchain[0]) + issuer = None + cleanedcert = cleanprecert(precert, issuer=issuer) + mtl = pack_mtl_precert(timestamp, cleanedcert, issuer_key_hash) + leaf_hash = get_leaf_hash(mtl) + else: + mtl = pack_mtl(timestamp, certchain[0]) + leaf_hash = get_leaf_hash(mtl) + if leaf_hash == hash: + return True + else: + print certfile, repr(leaf_hash), repr(hash), precert != None + return None + except Exception, e: + print certfile + traceback.print_exc() + raise e + +def get_all_certificates(certfiles): + for certfile in certfiles: + if certfile.endswith(".zip"): + zf = zipfile.ZipFile(certfile) + for name in zf.namelist(): + yield (name, zf.read(name)) + zf.close() + else: + yield (certfile, open(certfile).read()) + +p = Pool(args.parallel, lambda: signal.signal(signal.SIGINT, signal.SIG_IGN)) + +certs = get_all_certificates(certfiles) + +try: + for result in p.imap_unordered(submitcert, certs): + if result == None: + print "error" + p.terminate() + p.join() + sys.exit(1) +except KeyboardInterrupt: + p.terminate() + p.join() -- cgit v1.1 From 0a76e4d080a8349456d04434dcb2d4b381eb8ec4 Mon Sep 17 00:00:00 2001 From: Magnus Ahltorp Date: Wed, 18 Mar 2015 14:27:18 +0100 Subject: Added precert handling for SCT calculation --- tools/certtools.py | 20 +++++++++++++++----- tools/submitcert.py | 38 +++++++++++++++++++++++++++++--------- tools/verifysct.py | 17 ++++++++++++++--- 3 files changed, 58 insertions(+), 17 deletions(-) (limited to 'tools') diff --git a/tools/certtools.py b/tools/certtools.py index 1436863..cc423af 100644 --- a/tools/certtools.py +++ b/tools/certtools.py @@ -257,7 +257,7 @@ def create_sth_signature(tree_size, timestamp, root_hash, baseurl, key=None): return create_signature(baseurl, tree_head, key=key) -def check_sct_signature(baseurl, leafcert, sct): +def check_sct_signature(baseurl, signed_entry, sct, precert=False): publickey = base64.decodestring(publickeys[baseurl]) calculated_logid = hashlib.sha256(publickey).digest() received_logid = base64.decodestring(sct["id"]) @@ -271,9 +271,12 @@ def check_sct_signature(baseurl, leafcert, sct): version = struct.pack(">b", sct["sct_version"]) signature_type = struct.pack(">b", 0) timestamp = struct.pack(">Q", sct["timestamp"]) - entry_type = struct.pack(">H", 0) + if precert: + entry_type = struct.pack(">H", 1) + else: + entry_type = struct.pack(">H", 0) signed_struct = version + signature_type + timestamp + \ - entry_type + tls_array(leafcert, 3) + \ + entry_type + signed_entry + \ tls_array(base64.decodestring(sct["extensions"]), 2) check_signature(baseurl, signature, signed_struct) @@ -292,15 +295,22 @@ def pack_mtl(timestamp, leafcert): def pack_mtl_precert(timestamp, cleanedcert, issuer_key_hash): entry_type = struct.pack(">H", 1) extensions = "" - assert len(issuer_key_hash) == 32 timestamped_entry = struct.pack(">Q", timestamp) + entry_type + \ - issuer_key_hash + tls_array(cleanedcert, 3) + tls_array(extensions, 2) + pack_precert(cleanedcert, issuer_key_hash) + tls_array(extensions, 2) version = struct.pack(">b", 0) leaf_type = struct.pack(">b", 0) merkle_tree_leaf = version + leaf_type + timestamped_entry return merkle_tree_leaf +def pack_precert(cleanedcert, issuer_key_hash): + assert len(issuer_key_hash) == 32 + + return issuer_key_hash + tls_array(cleanedcert, 3) + +def pack_cert(cert): + return tls_array(cert, 3) + def unpack_mtl(merkle_tree_leaf): version = merkle_tree_leaf[0:1] leaf_type = merkle_tree_leaf[1:2] diff --git a/tools/submitcert.py b/tools/submitcert.py index 1c79544..2e8cc33 100755 --- a/tools/submitcert.py +++ b/tools/submitcert.py @@ -13,6 +13,11 @@ import struct import hashlib import itertools from certtools import * +from certtools import * +try: + from precerttools import * +except ImportError: + pass import os import signal import select @@ -51,8 +56,20 @@ def submitcert((certfile, cert)): try: if precert: + if ext_key_usage_precert_signing_cert in get_ext_key_usage(certchain[0]): + issuer_key_hash = get_cert_key_hash(certchain[1]) + issuer = certchain[1] + else: + issuer_key_hash = get_cert_key_hash(certchain[0]) + issuer = None + cleanedcert = cleanprecert(precert, issuer=issuer) + signed_entry = pack_precert(cleanedcert, issuer_key_hash) + leafcert = cleanedcert result = add_prechain(baseurl, {"chain":map(base64.b64encode, [precert] + certchain)}) else: + signed_entry = pack_cert(certchain[0]) + leafcert = certchain[0] + issuer_key_hash = None result = add_chain(baseurl, {"chain":map(base64.b64encode, certchain)}) except SystemExit: print "EXIT:", certfile @@ -67,7 +84,7 @@ def submitcert((certfile, cert)): try: if args.check_sct: - check_sct_signature(baseurl, certchain[0], result) + check_sct_signature(baseurl, signed_entry, result, precert=precert) timing_point(timing, "checksig") except AssertionError, e: print "ERROR:", certfile, e @@ -81,7 +98,7 @@ def submitcert((certfile, cert)): if lookup_in_log: - merkle_tree_leaf = pack_mtl(result["timestamp"], certchain[0]) + merkle_tree_leaf = pack_mtl(result["timestamp"], leafcert) leaf_hash = get_leaf_hash(merkle_tree_leaf) @@ -119,7 +136,7 @@ def submitcert((certfile, cert)): print "and submitted chain has length", len(submittedcertchain) timing_point(timing, "lookup") - return ((certchain[0], result), timing["deltatimes"]) + return ((leafcert, issuer_key_hash, result), timing["deltatimes"]) def get_ncerts(certfiles): n = 0 @@ -142,9 +159,12 @@ def get_all_certificates(certfiles): else: yield (certfile, open(certfile).read()) -def save_sct(sct, sth): +def save_sct(sct, sth, leafcert, issuer_key_hash): sctlog = open(args.sct_file, "a") - json.dump({"leafcert": base64.b64encode(leafcert), "sct": sct, "sth": sth}, sctlog) + sctentry = {"leafcert": base64.b64encode(leafcert), "sct": sct, "sth": sth} + if issuer_key_hash: + sctentry["issuer_key_hash"] = base64.b64encode(issuer_key_hash) + json.dump(sctentry, sctlog) sctlog.write("\n") sctlog.close() @@ -163,8 +183,8 @@ certs = get_all_certificates(certfiles) (result, timing) = submitcert(certs.next()) if result != None: nsubmitted += 1 - (leafcert, sct) = result - save_sct(sct, sth) + (leafcert, issuer_key_hash, sct) = result + save_sct(sct, sth, leafcert, issuer_key_hash) if args.pre_warm: select.select([], [], [], 3.0) @@ -181,8 +201,8 @@ try: sys.exit(1) if result != None: nsubmitted += 1 - (leafcert, sct) = result - save_sct(sct, sth) + (leafcert, issuer_key_hash, sct) = result + save_sct(sct, sth, leafcert, issuer_key_hash) deltatime = datetime.datetime.now() - starttime deltatime_f = deltatime.seconds + deltatime.microseconds / 1000000.0 rate = nsubmitted / deltatime_f diff --git a/tools/verifysct.py b/tools/verifysct.py index 699a0ad..27ab4c9 100755 --- a/tools/verifysct.py +++ b/tools/verifysct.py @@ -34,8 +34,16 @@ def verifysct(sctentry): timing = timing_point() leafcert = base64.b64decode(sctentry["leafcert"]) + if "issuer_key_hash" in sctentry: + issuer_key_hash = base64.b64decode(sctentry["issuer_key_hash"]) + else: + issuer_key_hash = None try: - check_sct_signature(baseurl, leafcert, sctentry["sct"]) + if issuer_key_hash: + signed_entry = pack_precert(leafcert, issuer_key_hash) + else: + signed_entry = pack_cert(leafcert) + check_sct_signature(baseurl, signed_entry, sctentry["sct"], precert=issuer_key_hash) timing_point(timing, "checksig") except AssertionError, e: print "ERROR:", e @@ -47,7 +55,10 @@ def verifysct(sctentry): print "ERROR: bad signature" return (None, None) - merkle_tree_leaf = pack_mtl(sctentry["sct"]["timestamp"], leafcert) + if issuer_key_hash: + merkle_tree_leaf = pack_mtl_precert(sctentry["sct"]["timestamp"], leafcert, issuer_key_hash) + else: + merkle_tree_leaf = pack_mtl(sctentry["sct"]["timestamp"], leafcert) leaf_hash = get_leaf_hash(merkle_tree_leaf) @@ -76,7 +87,7 @@ def verifysct(sctentry): p = Pool(args.parallel, lambda: signal.signal(signal.SIGINT, signal.SIG_IGN)) sctfile = open(args.sct_file) -scts = [json.loads(row) for row in sctfile] +scts = (json.loads(row) for row in sctfile) nverified = 0 lastprinted = 0 -- cgit v1.1 From 47e16395eeb7c1825d507a31df9bc89f9b24c10d Mon Sep 17 00:00:00 2001 From: Magnus Ahltorp Date: Thu, 19 Mar 2015 10:57:11 +0100 Subject: Fix signature calculation in test --- tools/testcase1.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) (limited to 'tools') diff --git a/tools/testcase1.py b/tools/testcase1.py index 73613fb..dc62d17 100755 --- a/tools/testcase1.py +++ b/tools/testcase1.py @@ -70,7 +70,8 @@ def do_add_chain(chain): except ValueError, e: print_error("%s", e) try: - check_sct_signature(baseurl, chain[0], result) + signed_entry = pack_cert(chain[0]) + check_sct_signature(baseurl, signed_entry, result) except AssertionError, e: print_error("%s", e) except ecdsa.keys.BadSignatureError, e: -- cgit v1.1 From 26c5e8f248454f62329361a4504c78820e24b649 Mon Sep 17 00:00:00 2001 From: Magnus Ahltorp Date: Mon, 23 Mar 2015 09:32:55 +0100 Subject: Support multiple baseurls in test --- tools/testcase1.py | 113 +++++++++++++++++++++++++++++------------------------ 1 file changed, 62 insertions(+), 51 deletions(-) (limited to 'tools') diff --git a/tools/testcase1.py b/tools/testcase1.py index dc62d17..7b3229d 100755 --- a/tools/testcase1.py +++ b/tools/testcase1.py @@ -14,7 +14,7 @@ import hashlib import itertools from certtools import * -baseurl = "https://127.0.0.1:8080/" +baseurls = ["https://127.0.0.1:8080/"] certfiles = ["testcerts/cert1.txt", "testcerts/cert2.txt", "testcerts/cert3.txt", "testcerts/cert4.txt", "testcerts/cert5.txt"] @@ -51,7 +51,7 @@ def assert_equal(actual, expected, name, quiet=False, nodata=False): elif not quiet: print_success("%s was correct", name) -def print_and_check_tree_size(expected): +def print_and_check_tree_size(expected, baseurl): global failures sth = get_sth(baseurl) try: @@ -61,9 +61,9 @@ def print_and_check_tree_size(expected): except ecdsa.keys.BadSignatureError, e: print_error("bad STH signature") tree_size = sth["tree_size"] - assert_equal(tree_size, expected, "tree size") + assert_equal(tree_size, expected, "tree size", quiet=True) -def do_add_chain(chain): +def do_add_chain(chain, baseurl): global failures try: result = add_chain(baseurl, {"chain":map(base64.b64encode, chain)}) @@ -75,11 +75,12 @@ def do_add_chain(chain): except AssertionError, e: print_error("%s", e) except ecdsa.keys.BadSignatureError, e: + print e print_error("bad SCT signature") print_success("signature check succeeded") return result -def get_and_validate_proof(timestamp, chain, leaf_index, nentries): +def get_and_validate_proof(timestamp, chain, leaf_index, nentries, baseurl): cert = chain[0] merkle_tree_leaf = pack_mtl(timestamp, cert) leaf_hash = get_leaf_hash(merkle_tree_leaf) @@ -87,31 +88,31 @@ def get_and_validate_proof(timestamp, chain, leaf_index, nentries): proof = get_proof_by_hash(baseurl, leaf_hash, sth["tree_size"]) leaf_index = proof["leaf_index"] inclusion_proof = [base64.b64decode(e) for e in proof["audit_path"]] - assert_equal(leaf_index, leaf_index, "leaf_index") - assert_equal(len(inclusion_proof), nentries, "audit_path length") + assert_equal(leaf_index, leaf_index, "leaf_index", quiet=True) + assert_equal(len(inclusion_proof), nentries, "audit_path length", quiet=True) calc_root_hash = verify_inclusion_proof(inclusion_proof, leaf_index, sth["tree_size"], leaf_hash) root_hash = base64.b64decode(sth["sha256_root_hash"]) - assert_equal(root_hash, calc_root_hash, "verified root hash", nodata=True) - get_and_check_entry(timestamp, chain, leaf_index) + assert_equal(root_hash, calc_root_hash, "verified root hash", nodata=True, quiet=True) + get_and_check_entry(timestamp, chain, leaf_index, baseurl) -def get_and_validate_consistency_proof(sth1, sth2, size1, size2): +def get_and_validate_consistency_proof(sth1, sth2, size1, size2, baseurl): consistency_proof = [base64.decodestring(entry) for entry in get_consistency_proof(baseurl, size1, size2)] (old_treehead, new_treehead) = verify_consistency_proof(consistency_proof, size1, size2, sth1) #print repr(sth1), repr(old_treehead) #print repr(sth2), repr(new_treehead) - assert_equal(old_treehead, sth1, "sth1", nodata=True) - assert_equal(new_treehead, sth2, "sth2", nodata=True) + assert_equal(old_treehead, sth1, "sth1", nodata=True, quiet=True) + assert_equal(new_treehead, sth2, "sth2", nodata=True, quiet=True) -def get_and_check_entry(timestamp, chain, leaf_index): +def get_and_check_entry(timestamp, chain, leaf_index, baseurl): entries = get_entries(baseurl, leaf_index, leaf_index) assert_equal(len(entries), 1, "get_entries", quiet=True) fetched_entry = entries["entries"][0] merkle_tree_leaf = pack_mtl(timestamp, chain[0]) leaf_input = base64.decodestring(fetched_entry["leaf_input"]) - assert_equal(leaf_input, merkle_tree_leaf, "entry", nodata=True) + assert_equal(leaf_input, merkle_tree_leaf, "entry", nodata=True, quiet=True) extra_data = base64.decodestring(fetched_entry["extra_data"]) certchain = decode_certificate_chain(extra_data) @@ -119,7 +120,7 @@ def get_and_check_entry(timestamp, chain, leaf_index): for (submittedcert, fetchedcert, i) in zip(submittedcertchain, certchain, itertools.count(1)): - assert_equal(fetchedcert, submittedcert, "cert %d in chain" % (i,)) + assert_equal(fetchedcert, submittedcert, "cert %d in chain" % (i,), quiet=True) if len(certchain) == len(submittedcertchain) + 1: last_issuer = get_cert_info(submittedcertchain[-1])["issuer"] @@ -137,106 +138,116 @@ def get_and_check_entry(timestamp, chain, leaf_index): len(submittedcertchain)) def merge(): - return subprocess.call(["./merge.py", "--baseurl", "https://127.0.0.1:8080/", "--frontend", "https://127.0.0.1:8082/", "--storage", "https://127.0.0.1:8081/", "--mergedb", "../rel/mergedb", "--signing", "https://127.0.0.1:8088/", "--own-keyname", "merge-1", "--own-keyfile", "../rel/privatekeys/merge-1-private.pem"]) + return subprocess.call(["./merge.py", "--baseurl", "https://127.0.0.1:8080/", + "--frontend", "https://127.0.0.1:8082/", "--storage", "https://127.0.0.1:8081/", + "--mergedb", "../rel/mergedb", "--signing", "https://127.0.0.1:8088/", + "--own-keyname", "merge-1", "--own-keyfile", "../rel/privatekeys/merge-1-private.pem"]) mergeresult = merge() assert_equal(mergeresult, 0, "merge", quiet=True) -print_and_check_tree_size(0) +for baseurl in baseurls: + print_and_check_tree_size(0, baseurl) testgroup("cert1") -result1 = do_add_chain(cc1) +result1 = do_add_chain(cc1, baseurls[0]) mergeresult = merge() assert_equal(mergeresult, 0, "merge", quiet=True) size_sth = {} -print_and_check_tree_size(1) -size_sth[1] = base64.b64decode(get_sth(baseurl)["sha256_root_hash"]) +for baseurl in baseurls: + print_and_check_tree_size(1, baseurl) +size_sth[1] = base64.b64decode(get_sth(baseurls[0])["sha256_root_hash"]) -result2 = do_add_chain(cc1) +result2 = do_add_chain(cc1, baseurls[0]) assert_equal(result2["timestamp"], result1["timestamp"], "timestamp") mergeresult = merge() assert_equal(mergeresult, 0, "merge", quiet=True) -print_and_check_tree_size(1) -size1_v2_sth = base64.b64decode(get_sth(baseurl)["sha256_root_hash"]) +for baseurl in baseurls: + print_and_check_tree_size(1, baseurl) +size1_v2_sth = base64.b64decode(get_sth(baseurls[0])["sha256_root_hash"]) assert_equal(size_sth[1], size1_v2_sth, "sth", nodata=True) # TODO: add invalid cert and check that it generates an error # and that treesize still is 1 -get_and_validate_proof(result1["timestamp"], cc1, 0, 0) +get_and_validate_proof(result1["timestamp"], cc1, 0, 0, baseurls[0]) testgroup("cert2") -result3 = do_add_chain(cc2) +result3 = do_add_chain(cc2, baseurls[0]) mergeresult = merge() assert_equal(mergeresult, 0, "merge", quiet=True) -print_and_check_tree_size(2) -size_sth[2] = base64.b64decode(get_sth(baseurl)["sha256_root_hash"]) +for baseurl in baseurls: + print_and_check_tree_size(2, baseurl) +size_sth[2] = base64.b64decode(get_sth(baseurls[0])["sha256_root_hash"]) -get_and_validate_proof(result1["timestamp"], cc1, 0, 1) -get_and_validate_proof(result3["timestamp"], cc2, 1, 1) +get_and_validate_proof(result1["timestamp"], cc1, 0, 1, baseurls[0]) +get_and_validate_proof(result3["timestamp"], cc2, 1, 1, baseurls[0]) testgroup("cert3") -result4 = do_add_chain(cc3) +result4 = do_add_chain(cc3, baseurls[0]) mergeresult = merge() assert_equal(mergeresult, 0, "merge", quiet=True) -print_and_check_tree_size(3) -size_sth[3] = base64.b64decode(get_sth(baseurl)["sha256_root_hash"]) +for baseurl in baseurls: + print_and_check_tree_size(3, baseurl) +size_sth[3] = base64.b64decode(get_sth(baseurls[0])["sha256_root_hash"]) -get_and_validate_proof(result1["timestamp"], cc1, 0, 2) -get_and_validate_proof(result3["timestamp"], cc2, 1, 2) -get_and_validate_proof(result4["timestamp"], cc3, 2, 1) +get_and_validate_proof(result1["timestamp"], cc1, 0, 2, baseurls[0]) +get_and_validate_proof(result3["timestamp"], cc2, 1, 2, baseurls[0]) +get_and_validate_proof(result4["timestamp"], cc3, 2, 1, baseurls[0]) testgroup("cert4") -result5 = do_add_chain(cc4) +result5 = do_add_chain(cc4, baseurls[0]) mergeresult = merge() assert_equal(mergeresult, 0, "merge", quiet=True) -print_and_check_tree_size(4) -size_sth[4] = base64.b64decode(get_sth(baseurl)["sha256_root_hash"]) +for baseurl in baseurls: + print_and_check_tree_size(4, baseurl) +size_sth[4] = base64.b64decode(get_sth(baseurls[0])["sha256_root_hash"]) -get_and_validate_proof(result1["timestamp"], cc1, 0, 2) -get_and_validate_proof(result3["timestamp"], cc2, 1, 2) -get_and_validate_proof(result4["timestamp"], cc3, 2, 2) -get_and_validate_proof(result5["timestamp"], cc4, 3, 2) +get_and_validate_proof(result1["timestamp"], cc1, 0, 2, baseurls[0]) +get_and_validate_proof(result3["timestamp"], cc2, 1, 2, baseurls[0]) +get_and_validate_proof(result4["timestamp"], cc3, 2, 2, baseurls[0]) +get_and_validate_proof(result5["timestamp"], cc4, 3, 2, baseurls[0]) testgroup("cert5") -result6 = do_add_chain(cc5) +result6 = do_add_chain(cc5, baseurls[0]) mergeresult = merge() assert_equal(mergeresult, 0, "merge", quiet=True) -print_and_check_tree_size(5) -size_sth[5] = base64.b64decode(get_sth(baseurl)["sha256_root_hash"]) +for baseurl in baseurls: + print_and_check_tree_size(5, baseurl) +size_sth[5] = base64.b64decode(get_sth(baseurls[0])["sha256_root_hash"]) -get_and_validate_proof(result1["timestamp"], cc1, 0, 3) -get_and_validate_proof(result3["timestamp"], cc2, 1, 3) -get_and_validate_proof(result4["timestamp"], cc3, 2, 3) -get_and_validate_proof(result5["timestamp"], cc4, 3, 3) -get_and_validate_proof(result6["timestamp"], cc5, 4, 1) +get_and_validate_proof(result1["timestamp"], cc1, 0, 3, baseurls[0]) +get_and_validate_proof(result3["timestamp"], cc2, 1, 3, baseurls[0]) +get_and_validate_proof(result4["timestamp"], cc3, 2, 3, baseurls[0]) +get_and_validate_proof(result5["timestamp"], cc4, 3, 3, baseurls[0]) +get_and_validate_proof(result6["timestamp"], cc5, 4, 1, baseurls[0]) mergeresult = merge() assert_equal(mergeresult, 0, "merge", quiet=True) for first_size in range(1, 5): for second_size in range(first_size + 1, 6): - get_and_validate_consistency_proof(size_sth[first_size], size_sth[second_size], first_size, second_size) + get_and_validate_consistency_proof(size_sth[first_size], size_sth[second_size], first_size, second_size, baseurls[0]) print "-------" if failures: -- cgit v1.1 From 510ecc5702ea36395addc733789756ce8075517e Mon Sep 17 00:00:00 2001 From: Magnus Ahltorp Date: Mon, 23 Mar 2015 09:36:24 +0100 Subject: Make timing printouts optional in merge.py --- tools/merge.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) (limited to 'tools') diff --git a/tools/merge.py b/tools/merge.py index 1b94581..dd8de07 100755 --- a/tools/merge.py +++ b/tools/merge.py @@ -26,6 +26,7 @@ parser.add_argument("--signing", metavar="url", help="Base URL for signing serve parser.add_argument("--own-keyname", metavar="keyname", help="The key name of the merge node", required=True) parser.add_argument("--own-keyfile", metavar="keyfile", help="The file containing the private key of the merge node", required=True) parser.add_argument("--nomerge", action='store_true', help="Don't actually do merge") +parser.add_argument("--timing", action='store_true', help="Print timing information") args = parser.parse_args() ctbaseurl = args.baseurl @@ -244,7 +245,8 @@ check_sth_signature(ctbaseurl, sth) timing_point(timing, "build sth") -print timing["deltatimes"] +if args.timing: + print timing["deltatimes"] print "root hash", base64.b16encode(root_hash) @@ -280,4 +282,5 @@ for frontendnode in frontendnodes: print "send sth:", sendsthresult sys.exit(1) timing_point(timing, "send sth") - print timing["deltatimes"] + if args.timing: + print timing["deltatimes"] -- cgit v1.1 From d645194006bf3c81372073af9784f7d993096444 Mon Sep 17 00:00:00 2001 From: Magnus Ahltorp Date: Mon, 23 Mar 2015 16:12:13 +0100 Subject: Generate config from master config. Verify responses in merge.py. --- tools/certtools.py | 43 ++++++-- tools/compileconfig.py | 283 +++++++++++++++++++++++++++++++++++++++++++++++++ tools/merge.py | 92 ++++++++-------- tools/testcase1.py | 12 +-- 4 files changed, 372 insertions(+), 58 deletions(-) create mode 100755 tools/compileconfig.py (limited to 'tools') diff --git a/tools/certtools.py b/tools/certtools.py index cc423af..939d9f1 100644 --- a/tools/certtools.py +++ b/tools/certtools.py @@ -70,6 +70,11 @@ def get_eckey_from_file(keyfile): assert len(keys) == 1 return keys[0] +def get_public_key_from_file(keyfile): + keys = get_pemlike(keyfile, "PUBLIC KEY") + assert len(keys) == 1 + return keys[0] + def get_root_cert(issuer): accepted_certs = \ json.loads(open("googlelog-accepted-certs.txt").read())["certificates"] @@ -84,7 +89,7 @@ def get_root_cert(issuer): return root_cert def get_sth(baseurl): - result = urllib2.urlopen(baseurl + "ct/v1/get-sth", context=ssl.SSLContext(ssl.PROTOCOL_TLSv1)).read() + result = urllib2.urlopen(baseurl + "ct/v1/get-sth").read() return json.loads(result) def get_proof_by_hash(baseurl, hash, tree_size): @@ -92,7 +97,7 @@ def get_proof_by_hash(baseurl, hash, tree_size): params = urllib.urlencode({"hash":base64.b64encode(hash), "tree_size":tree_size}) result = \ - urllib2.urlopen(baseurl + "ct/v1/get-proof-by-hash?" + params, context=ssl.SSLContext(ssl.PROTOCOL_TLSv1)).read() + urllib2.urlopen(baseurl + "ct/v1/get-proof-by-hash?" + params).read() return json.loads(result) except urllib2.HTTPError, e: print "ERROR:", e.read() @@ -103,7 +108,7 @@ def get_consistency_proof(baseurl, tree_size1, tree_size2): params = urllib.urlencode({"first":tree_size1, "second":tree_size2}) result = \ - urllib2.urlopen(baseurl + "ct/v1/get-sth-consistency?" + params, context=ssl.SSLContext(ssl.PROTOCOL_TLSv1)).read() + urllib2.urlopen(baseurl + "ct/v1/get-sth-consistency?" + params).read() return json.loads(result)["consistency"] except urllib2.HTTPError, e: print "ERROR:", e.read() @@ -126,7 +131,7 @@ def unpack_tls_array(packed_data, length_len): def add_chain(baseurl, submission): try: - result = urllib2.urlopen(baseurl + "ct/v1/add-chain", json.dumps(submission), context=ssl.SSLContext(ssl.PROTOCOL_TLSv1)).read() + result = urllib2.urlopen(baseurl + "ct/v1/add-chain", json.dumps(submission)).read() return json.loads(result) except urllib2.HTTPError, e: print "ERROR", e.code,":", e.read() @@ -162,7 +167,7 @@ def add_prechain(baseurl, submission): def get_entries(baseurl, start, end): try: params = urllib.urlencode({"start":start, "end":end}) - result = urllib2.urlopen(baseurl + "ct/v1/get-entries?" + params, context=ssl.SSLContext(ssl.PROTOCOL_TLSv1)).read() + result = urllib2.urlopen(baseurl + "ct/v1/get-entries?" + params).read() return json.loads(result) except urllib2.HTTPError, e: print "ERROR:", e.read() @@ -205,7 +210,26 @@ def check_signature(baseurl, signature, data): vk.verify(unpacked_signature, data, hashfunc=hashlib.sha256, sigdecode=ecdsa.util.sigdecode_der) -def http_request(url, data=None, key=None): +def parse_auth_header(authheader): + splittedheader = authheader.split(";") + (signature, rawoptions) = (splittedheader[0], splittedheader[1:]) + options = dict([(e.partition("=")[0], e.partition("=")[2]) for e in rawoptions]) + return (base64.b64decode(signature), options) + +def check_auth_header(authheader, expected_key, publickeydir, data, path): + if expected_key == None: + return True + (signature, options) = parse_auth_header(authheader) + keyname = options.get("key") + if keyname != expected_key: + raise Exception("Response claimed to come from %s, expected %s" % (keyname, expected_key)) + publickey = get_public_key_from_file(publickeydir + "/" + keyname + ".pem") + vk = ecdsa.VerifyingKey.from_der(publickey) + vk.verify(signature, "%s\0%s\0%s" % ("REPLY", path, data), hashfunc=hashlib.sha256, + sigdecode=ecdsa.util.sigdecode_der) + return True + +def http_request(url, data=None, key=None, verifynode=None, publickeydir="."): req = urllib2.Request(url, data) (keyname, keyfile) = key privatekey = get_eckey_from_file(keyfile) @@ -219,8 +243,11 @@ def http_request(url, data=None, key=None): signature = sk.sign("%s\0%s\0%s" % (method, parsed_url.path, data), hashfunc=hashlib.sha256, sigencode=ecdsa.util.sigencode_der) req.add_header('X-Catlfish-Auth', base64.b64encode(signature) + ";key=" + keyname) - result = urllib2.urlopen(req, context=ssl.SSLContext(ssl.PROTOCOL_TLSv1)).read() - return result + result = urllib2.urlopen(req) + authheader = result.info().get('X-Catlfish-Auth') + data = result.read() + check_auth_header(authheader, verifynode, publickeydir, data, parsed_url.path) + return data def get_signature(baseurl, data, key=None): try: diff --git a/tools/compileconfig.py b/tools/compileconfig.py new file mode 100755 index 0000000..30424c5 --- /dev/null +++ b/tools/compileconfig.py @@ -0,0 +1,283 @@ +#!/usr/bin/env python + +# Copyright (c) 2014, NORDUnet A/S. +# See LICENSE for licensing information. + +import argparse +import sys +import yaml +import re + +class Symbol(str): + pass + +clean_string = re.compile(r'^[-.:_/A-Za-z0-9 ]*$') +clean_symbol = re.compile(r'^[_A-Za-z0-9]*$') + +def quote_erlang_string(s): + if clean_string.match(s): + return '"' + s + '"' + else: + return "[" + ",".join([str(ord(c)) for c in s]) + "]" + +def quote_erlang_symbol(s): + if clean_symbol.match(s): + return s + elif clean_string.match(s): + return "'" + s + "'" + else: + print >>sys.stderr, "Cannot generate symbol", s + sys.exit(1) + +def gen_erlang(term, level=1): + indent = " " * level + separator = ",\n" + indent + if isinstance(term, Symbol): + return quote_erlang_symbol(term) + elif isinstance(term, basestring): + return quote_erlang_string(term) + elif isinstance(term, int): + return str(term) + elif isinstance(term, tuple): + tuplecontents = [gen_erlang(e, level=level+1) for e in term] + if "\n" not in "".join(tuplecontents): + separator = ", " + return "{" + separator.join(tuplecontents) + "}" + elif isinstance(term, list): + listcontents = [gen_erlang(e, level=level+1) for e in term] + return "[" + separator.join(listcontents) + "]" + else: + print "unknown type", type(term) + sys.exit(1) + +saslconfig = [(Symbol("sasl_error_logger"), Symbol("false")), + (Symbol("errlog_type"), Symbol("error")), + (Symbol("error_logger_mf_dir"), "log"), + (Symbol("error_logger_mf_maxbytes"), 10485760), + (Symbol("error_logger_mf_maxfiles"), 10), + ] + +def parse_address(address): + parsed_address = address.split(":") + if len(parsed_address) != 2: + print >>sys.stderr, "Invalid address format", address + sys.exit(1) + return (parsed_address[0], int(parsed_address[1])) + +def get_node_config(nodename, config): + nodetype = None + nodeconfig = None + for t in ["frontendnodes", "storagenodes", "signingnodes"]: + for node in config[t]: + if node["name"] == nodename: + nodetype = t + nodeconfig = node + if nodeconfig == None: + print >>sys.stderr, "Cannot find config for node", nodename + sys.exit(1) + return (nodetype, nodeconfig) + +def gen_https_servers(nodetype, nodeconfig): + if nodetype == "frontendnodes": + (publichost, publicport) = parse_address(nodeconfig["publicaddress"]) + (host, port) = parse_address(nodeconfig["address"]) + return [(Symbol("external_https_api"), publichost, publicport, Symbol("v1")), + (Symbol("frontend_https_api"), host, port, Symbol("frontend"))] + elif nodetype == "storagenodes": + (host, port) = parse_address(nodeconfig["address"]) + return [(Symbol("storage_https_api"), host, port, Symbol("storage"))] + elif nodetype == "signingnodes": + (host, port) = parse_address(nodeconfig["address"]) + return [(Symbol("signing_https_api"), host, port, Symbol("signing"))] + +def allowed_clients_frontend(mergenodenames): + return [ + ("/ct/frontend/sendentry", mergenodenames), + ("/ct/frontend/sendlog", mergenodenames), + ("/ct/frontend/sendsth", mergenodenames), + ("/ct/frontend/currentposition", mergenodenames), + ("/ct/frontend/missingentries", mergenodenames), + ] + +def allowed_clients_public(): + noauth = Symbol("noauth") + return [ + ("/ct/v1/add-chain", noauth), + ("/ct/v1/add-pre-chain", noauth), + ("/ct/v1/get-sth", noauth), + ("/ct/v1/get-sth-consistency", noauth), + ("/ct/v1/get-proof-by-hash", noauth), + ("/ct/v1/get-entries", noauth), + ("/ct/v1/get-entry-and-proof", noauth), + ("/ct/v1/get-roots", noauth), + ] + +def allowed_clients_signing(frontendnodenames, mergenodenames): + return [ + ("/ct/signing/sct", frontendnodenames), + ("/ct/signing/sth", mergenodenames), + ] + +def allowed_clients_storage(frontendnodenames, mergenodenames): + return [ + ("/ct/storage/sendentry", frontendnodenames), + ("/ct/storage/entrycommitted", frontendnodenames), + ("/ct/storage/fetchnewentries", mergenodenames), + ("/ct/storage/getentry", mergenodenames), + ] + +def allowed_servers_frontend(signingnodenames, storagenodenames): + return [ + ("/ct/storage/sendentry", storagenodenames), + ("/ct/storage/entrycommitted", storagenodenames), + ("/ct/signing/sct", signingnodenames), + ] + +def gen_config(nodename, config, localconfig): + print "generating config for", nodename + paths = localconfig["paths"] + options = localconfig.get("options", []) + + configfile = open(paths["configdir"] + nodename + ".config", "w") + print >>configfile, "%% catlfish configuration file (-*- erlang -*-)" + + (nodetype, nodeconfig) = get_node_config(nodename, config) + https_servers = gen_https_servers(nodetype, nodeconfig) + + catlfishconfig = [] + plopconfig = [] + + if nodetype == "frontendnodes": + catlfishconfig.append((Symbol("known_roots_path"), localconfig["paths"]["knownroots"])) + if "sctcaching" in options: + catlfishconfig.append((Symbol("sctcache_root_path"), paths["db"] + "sctcache/")) + + catlfishconfig += [ + (Symbol("https_servers"), https_servers), + (Symbol("https_certfile"), paths["https_certfile"]), + (Symbol("https_keyfile"), paths["https_keyfile"]), + (Symbol("https_cacertfile"), paths["https_cacertfile"]), + ] + + lagerconfig = [ + (Symbol("handlers"), [ + (Symbol("lager_console_backend"), Symbol("info")), + (Symbol("lager_file_backend"), [(Symbol("file"), nodename + "-error.log"), (Symbol("level"), Symbol("error"))]), + (Symbol("lager_file_backend"), [(Symbol("file"), nodename + "-debug.log"), (Symbol("level"), Symbol("debug"))]), + (Symbol("lager_file_backend"), [(Symbol("file"), nodename + "-console.log"), (Symbol("level"), Symbol("info"))]), + ]) + ] + + if nodetype in ("frontendnodes", "storagenodes"): + plopconfig += [ + (Symbol("entry_root_path"), paths["db"] + "certentries/"), + ] + if nodetype == "frontendnodes": + plopconfig += [ + (Symbol("index_path"), paths["db"] + "index"), + ] + elif nodetype == "storagenodes": + plopconfig += [ + (Symbol("newentries_path"), paths["db"] + "newentries"), + ] + if nodetype in ("frontendnodes", "storagenodes"): + plopconfig += [ + (Symbol("entryhash_root_path"), paths["db"] + "entryhash/"), + (Symbol("indexforhash_root_path"), paths["db"] + "certindex/"), + ] + if nodetype == "frontendnodes": + plopconfig += [ + (Symbol("sth_path"), paths["db"] + "sth"), + ] + + signingnode = config["signingnodes"][0] + mergenodenames = [node["name"] for node in config["mergenodes"]] + storagenodeaddresses = ["https://%s/ct/storage/" % node["address"] for node in config["storagenodes"]] + frontendnodenames = [node["name"] for node in config["frontendnodes"]] + + allowed_clients = [] + allowed_servers = [] + + if nodetype == "frontendnodes": + storagenodenames = [node["name"] for node in config["storagenodes"]] + plopconfig.append((Symbol("storage_nodes"), storagenodeaddresses)) + plopconfig.append((Symbol("storage_nodes_quorum"), config["storage-quorum-size"])) + services = [Symbol("ht")] + allowed_clients += allowed_clients_frontend(mergenodenames) + allowed_clients += allowed_clients_public() + allowed_servers += allowed_servers_frontend([signingnode["name"]], storagenodenames) + elif nodetype == "storagenodes": + allowed_clients += allowed_clients_storage(frontendnodenames, mergenodenames) + services = [] + elif nodetype == "signingnodes": + allowed_clients += allowed_clients_signing(frontendnodenames, mergenodenames) + services = [Symbol("sign")] + + plopconfig += [ + (Symbol("publickey_path"), paths["publickeys"]), + (Symbol("services"), services), + ] + if nodetype == "signingnodes": + plopconfig.append((Symbol("log_private_key"), paths["logprivatekey"])) + plopconfig += [ + (Symbol("log_public_key"), paths["logpublickey"]), + (Symbol("own_key"), (nodename, "%s/%s-private.pem" % (paths["privatekeys"], nodename))), + ] + if nodetype == "frontendnodes": + plopconfig.append((Symbol("signing_node"), "https://%s/ct/signing/" % signingnode["address"])) + plopconfig += [ + (Symbol("allowed_clients"), allowed_clients), + (Symbol("allowed_servers"), allowed_servers), + ] + + erlangconfig = [ + (Symbol("sasl"), saslconfig), + (Symbol("catlfish"), catlfishconfig), + (Symbol("lager"), lagerconfig), + (Symbol("plop"), plopconfig), + ] + + print >>configfile, gen_erlang(erlangconfig) + ".\n" + + configfile.close() + + +def gen_testmakefile(config, testmakefile, machines): + configfile = open(testmakefile, "w") + frontendnodenames = [node["name"] for node in config["frontendnodes"]] + storagenodenames = [node["name"] for node in config["storagenodes"]] + signingnodename = [node["name"] for node in config["signingnodes"]] + + frontendnodeaddresses = [node["publicaddress"] for node in config["frontendnodes"]] + storagenodeaddresses = [node["address"] for node in config["storagenodes"]] + signingnodeaddresses = [node["address"] for node in config["signingnodes"]] + + print >>configfile, "NODES=" + " ".join(frontendnodenames+storagenodenames+signingnodename) + print >>configfile, "MACHINES=" + " ".join([str(e) for e in range(1, machines+1)]) + print >>configfile, "TESTURLS=" + " ".join(frontendnodeaddresses+storagenodeaddresses+signingnodeaddresses) + print >>configfile, "BASEURL=" + config["baseurl"] + + configfile.close() + + +def main(): + parser = argparse.ArgumentParser(description="") + parser.add_argument('--config', help="System configuration", required=True) + parser.add_argument('--localconfig', help="Local configuration") + parser.add_argument("--testmakefile", metavar="file", help="Generate makefile variables for test") + parser.add_argument("--machines", type=int, metavar="n", help="Number of machines") + args = parser.parse_args() + + config = yaml.load(open(args.config)) + if args.testmakefile and args.machines: + gen_testmakefile(config, args.testmakefile, args.machines) + elif args.localconfig: + localconfig = yaml.load(open(args.localconfig)) + localnodes = localconfig["localnodes"] + for localnode in localnodes: + gen_config(localnode, config, localconfig) + else: + print >>sys.stderr, "Nothing to do" + sys.exit(1) + +main() diff --git a/tools/merge.py b/tools/merge.py index dd8de07..75e72ae 100755 --- a/tools/merge.py +++ b/tools/merge.py @@ -15,28 +15,31 @@ import ecdsa import hashlib import urlparse import os +import yaml from certtools import build_merkle_tree, create_sth_signature, check_sth_signature, get_eckey_from_file, timing_point, http_request parser = argparse.ArgumentParser(description="") -parser.add_argument("--baseurl", metavar="url", help="Base URL for CT server", required=True) -parser.add_argument("--frontend", action="append", metavar="url", help="Base URL for frontend server", required=True) -parser.add_argument("--storage", action="append", metavar="url", help="Base URL for storage server", required=True) -parser.add_argument("--mergedb", metavar="dir", help="Merge database directory", required=True) -parser.add_argument("--signing", metavar="url", help="Base URL for signing server", required=True) -parser.add_argument("--own-keyname", metavar="keyname", help="The key name of the merge node", required=True) -parser.add_argument("--own-keyfile", metavar="keyfile", help="The file containing the private key of the merge node", required=True) +parser.add_argument('--config', help="System configuration", required=True) +parser.add_argument('--localconfig', help="Local configuration", required=True) parser.add_argument("--nomerge", action='store_true', help="Don't actually do merge") parser.add_argument("--timing", action='store_true', help="Print timing information") args = parser.parse_args() -ctbaseurl = args.baseurl -frontendnodes = args.frontend -storagenodes = args.storage +config = yaml.load(open(args.config)) +localconfig = yaml.load(open(args.localconfig)) -chainsdir = args.mergedb + "/chains" -logorderfile = args.mergedb + "/logorder" +ctbaseurl = config["baseurl"] +frontendnodes = config["frontendnodes"] +storagenodes = config["storagenodes"] +paths = localconfig["paths"] +mergedb = paths["mergedb"] -own_key = (args.own_keyname, args.own_keyfile) +signingnode = config["signingnodes"][0] + +chainsdir = mergedb + "/chains" +logorderfile = mergedb + "/logorder" + +own_key = (localconfig["nodename"], "%s/%s-private.pem" % (paths["privatekeys"], localconfig["nodename"])) hashed_dir = True @@ -77,9 +80,9 @@ def add_to_logorder(key): f.write(base64.b16encode(key) + "\n") f.close() -def get_new_entries(baseurl): +def get_new_entries(node, baseurl): try: - result = http_request(baseurl + "ct/storage/fetchnewentries", key=own_key) + result = http_request(baseurl + "ct/storage/fetchnewentries", key=own_key, verifynode=node, publickeydir=paths["publickeys"]) parsed_result = json.loads(result) if parsed_result.get(u"result") == u"ok": return [base64.b64decode(entry) for entry in parsed_result[u"entries"]] @@ -89,10 +92,10 @@ def get_new_entries(baseurl): print "ERROR: fetchnewentries", e.read() sys.exit(1) -def get_entries(baseurl, hashes): +def get_entries(node, baseurl, hashes): try: params = urllib.urlencode({"hash":[base64.b64encode(hash) for hash in hashes]}, doseq=True) - result = http_request(baseurl + "ct/storage/getentry?" + params, key=own_key) + result = http_request(baseurl + "ct/storage/getentry?" + params, key=own_key, verifynode=node, publickeydir=paths["publickeys"]) parsed_result = json.loads(result) if parsed_result.get(u"result") == u"ok": entries = dict([(base64.b64decode(entry["hash"]), base64.b64decode(entry["entry"])) for entry in parsed_result[u"entries"]]) @@ -105,9 +108,9 @@ def get_entries(baseurl, hashes): print "ERROR: getentry", e.read() sys.exit(1) -def get_curpos(baseurl): +def get_curpos(node, baseurl): try: - result = http_request(baseurl + "ct/frontend/currentposition", key=own_key) + result = http_request(baseurl + "ct/frontend/currentposition", key=own_key, verifynode=node, publickeydir=paths["publickeys"]) parsed_result = json.loads(result) if parsed_result.get(u"result") == u"ok": return parsed_result[u"position"] @@ -117,10 +120,10 @@ def get_curpos(baseurl): print "ERROR: currentposition", e.read() sys.exit(1) -def sendlog(baseurl, submission): +def sendlog(node, baseurl, submission): try: result = http_request(baseurl + "ct/frontend/sendlog", - json.dumps(submission), key=own_key) + json.dumps(submission), key=own_key, verifynode=node, publickeydir=paths["publickeys"]) return json.loads(result) except urllib2.HTTPError, e: print "ERROR: sendlog", e.read() @@ -133,10 +136,11 @@ def sendlog(baseurl, submission): print "========================" raise e -def sendentry(baseurl, entry, hash): +def sendentry(node, baseurl, entry, hash): try: result = http_request(baseurl + "ct/frontend/sendentry", - json.dumps({"entry":base64.b64encode(entry), "treeleafhash":base64.b64encode(hash)}), key=own_key) + json.dumps({"entry":base64.b64encode(entry), "treeleafhash":base64.b64encode(hash)}), key=own_key, + verifynode=node, publickeydir=paths["publickeys"]) return json.loads(result) except urllib2.HTTPError, e: print "ERROR: sendentry", e.read() @@ -149,10 +153,10 @@ def sendentry(baseurl, entry, hash): print "========================" raise e -def sendsth(baseurl, submission): +def sendsth(node, baseurl, submission): try: result = http_request(baseurl + "ct/frontend/sendsth", - json.dumps(submission), key=own_key) + json.dumps(submission), key=own_key, verifynode=node, publickeydir=paths["publickeys"]) return json.loads(result) except urllib2.HTTPError, e: print "ERROR: sendsth", e.read() @@ -165,9 +169,9 @@ def sendsth(baseurl, submission): print "========================" raise e -def get_missingentries(baseurl): +def get_missingentries(node, baseurl): try: - result = http_request(baseurl + "ct/frontend/missingentries", key=own_key) + result = http_request(baseurl + "ct/frontend/missingentries", key=own_key, verifynode=node, publickeydir=paths["publickeys"]) parsed_result = json.loads(result) if parsed_result.get(u"result") == u"ok": return parsed_result[u"entries"] @@ -193,10 +197,10 @@ new_entries = set() entries_to_fetch = {} for storagenode in storagenodes: - print "getting new entries from", storagenode - new_entries_per_node[storagenode] = set(get_new_entries(storagenode)) - new_entries.update(new_entries_per_node[storagenode]) - entries_to_fetch[storagenode] = [] + print "getting new entries from", storagenode["name"] + new_entries_per_node[storagenode["name"]] = set(get_new_entries(storagenode["name"], "https://%s/" % storagenode["address"])) + new_entries.update(new_entries_per_node[storagenode["name"]]) + entries_to_fetch[storagenode["name"]] = [] timing_point(timing, "get new entries") @@ -209,16 +213,16 @@ if args.nomerge: for hash in new_entries: for storagenode in storagenodes: - if hash in new_entries_per_node[storagenode]: - entries_to_fetch[storagenode].append(hash) + if hash in new_entries_per_node[storagenode["name"]]: + entries_to_fetch[storagenode["name"]].append(hash) break added_entries = 0 for storagenode in storagenodes: - print "getting", len(entries_to_fetch[storagenode]), "entries from", storagenode - for chunk in chunks(entries_to_fetch[storagenode], 100): - entries = get_entries(storagenode, chunk) + print "getting", len(entries_to_fetch[storagenode["name"]]), "entries from", storagenode["name"] + for chunk in chunks(entries_to_fetch[storagenode["name"]], 100): + entries = get_entries(storagenode["name"], "https://%s/" % storagenode["address"], chunk) for hash in chunk: entry = entries[hash] write_chain(hash, entry) @@ -235,7 +239,7 @@ root_hash = tree[-1][0] timestamp = int(time.time() * 1000) tree_head_signature = create_sth_signature(tree_size, timestamp, - root_hash, args.signing, key=own_key) + root_hash, "https://%s/" % signingnode["address"], key=own_key) sth = {"tree_size": tree_size, "timestamp": timestamp, "sha256_root_hash": base64.b64encode(root_hash), @@ -251,14 +255,16 @@ if args.timing: print "root hash", base64.b16encode(root_hash) for frontendnode in frontendnodes: + nodeaddress = "https://%s/" % frontendnode["address"] + nodename = frontendnode["name"] timing = timing_point() - print "distributing for node", frontendnode - curpos = get_curpos(frontendnode) + print "distributing for node", nodename + curpos = get_curpos(nodename, nodeaddress) timing_point(timing, "get curpos") print "current position", curpos entries = [base64.b64encode(entry) for entry in logorder[curpos:]] for chunk in chunks(entries, 1000): - sendlogresult = sendlog(frontendnode, {"start": curpos, "hashes": chunk}) + sendlogresult = sendlog(nodename, nodeaddress, {"start": curpos, "hashes": chunk}) if sendlogresult["result"] != "ok": print "sendlog:", sendlogresult sys.exit(1) @@ -267,17 +273,17 @@ for frontendnode in frontendnodes: sys.stdout.flush() timing_point(timing, "sendlog") print "log sent" - missingentries = get_missingentries(frontendnode) + missingentries = get_missingentries(nodename, nodeaddress) timing_point(timing, "get missing") print "missing entries:", len(missingentries) for missingentry in missingentries: hash = base64.b64decode(missingentry) - sendentryresult = sendentry(frontendnode, read_chain(hash), hash) + sendentryresult = sendentry(nodename, nodeaddress, read_chain(hash), hash) if sendentryresult["result"] != "ok": print "send sth:", sendentryresult sys.exit(1) timing_point(timing, "send missing") - sendsthresult = sendsth(frontendnode, sth) + sendsthresult = sendsth(nodename, nodeaddress, sth) if sendsthresult["result"] != "ok": print "send sth:", sendsthresult sys.exit(1) diff --git a/tools/testcase1.py b/tools/testcase1.py index 7b3229d..4502b56 100755 --- a/tools/testcase1.py +++ b/tools/testcase1.py @@ -15,9 +15,9 @@ import itertools from certtools import * baseurls = ["https://127.0.0.1:8080/"] -certfiles = ["testcerts/cert1.txt", "testcerts/cert2.txt", - "testcerts/cert3.txt", "testcerts/cert4.txt", - "testcerts/cert5.txt"] +certfiles = ["../tools/testcerts/cert1.txt", "../tools/testcerts/cert2.txt", + "../tools/testcerts/cert3.txt", "../tools/testcerts/cert4.txt", + "../tools/testcerts/cert5.txt"] cc1 = get_certs_from_file(certfiles[0]) cc2 = get_certs_from_file(certfiles[1]) @@ -138,10 +138,8 @@ def get_and_check_entry(timestamp, chain, leaf_index, baseurl): len(submittedcertchain)) def merge(): - return subprocess.call(["./merge.py", "--baseurl", "https://127.0.0.1:8080/", - "--frontend", "https://127.0.0.1:8082/", "--storage", "https://127.0.0.1:8081/", - "--mergedb", "../rel/mergedb", "--signing", "https://127.0.0.1:8088/", - "--own-keyname", "merge-1", "--own-keyfile", "../rel/privatekeys/merge-1-private.pem"]) + return subprocess.call(["../tools/merge.py", "--config", "../test/catlfish-test.cfg", + "--localconfig", "../test/catlfish-test-local-merge.cfg"]) mergeresult = merge() assert_equal(mergeresult, 0, "merge", quiet=True) -- cgit v1.1 From 94282d502072f894f3168ef8c2c7527fe4a69e52 Mon Sep 17 00:00:00 2001 From: Linus Nordberg Date: Wed, 25 Mar 2015 09:55:03 +0100 Subject: Add tools/fetchacert.py, fetching exactly one chain from a log. Also move a piece of common code from fetchallcerts.py to certtools.py. --- tools/certtools.py | 10 ++++++++++ tools/fetchacert.py | 22 ++++++++++++++++++++++ tools/fetchallcerts.py | 10 ---------- 3 files changed, 32 insertions(+), 10 deletions(-) create mode 100755 tools/fetchacert.py (limited to 'tools') diff --git a/tools/certtools.py b/tools/certtools.py index 0e639f2..ffd631c 100644 --- a/tools/certtools.py +++ b/tools/certtools.py @@ -588,3 +588,13 @@ def verify_inclusion_proof(inclusion_proof, index, treesize, leafhash): chain = zip([(index, 0)] + nodes_for_index(index, treesize), [leafhash] + inclusion_proof) (_, hash) = reduce(lambda e1, e2: combine_two_hashes(e1, e2, treesize), chain) return hash + +def extract_original_entry(entry): + leaf_input = base64.decodestring(entry["leaf_input"]) + (leaf_cert, timestamp, issuer_key_hash) = unpack_mtl(leaf_input) + extra_data = base64.decodestring(entry["extra_data"]) + if issuer_key_hash != None: + (precert, extra_data) = extract_precertificate(extra_data) + leaf_cert = precert + certchain = decode_certificate_chain(extra_data) + return ([leaf_cert] + certchain, timestamp, issuer_key_hash) diff --git a/tools/fetchacert.py b/tools/fetchacert.py new file mode 100755 index 0000000..82ea7c1 --- /dev/null +++ b/tools/fetchacert.py @@ -0,0 +1,22 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +import argparse +import base64 +from certtools import * + +parser = argparse.ArgumentParser(description='') +parser.add_argument('baseurl', help="Base URL for CT server") +parser.add_argument('index', type=int, help="Index for entry to fetch") +args = parser.parse_args() + +rawentries = get_entries(args.baseurl, args.index, args.index)["entries"] +entry = extract_original_entry(rawentries[0]) +(chain, _timestamp, _issuer_key_hash) = entry +s = "" +for cert in chain: + s += "-----BEGIN CERTIFICATE-----\n" + s += base64.encodestring(cert).rstrip() + "\n" + s += "-----END CERTIFICATE-----\n" + s += "\n" +print s diff --git a/tools/fetchallcerts.py b/tools/fetchallcerts.py index 398c563..e0ea92f 100755 --- a/tools/fetchallcerts.py +++ b/tools/fetchallcerts.py @@ -24,16 +24,6 @@ parser.add_argument('--store', default=None, metavar="dir", help='Store certific parser.add_argument('--write-sth', action='store_true', help='Write STH') args = parser.parse_args() -def extract_original_entry(entry): - leaf_input = base64.decodestring(entry["leaf_input"]) - (leaf_cert, timestamp, issuer_key_hash) = unpack_mtl(leaf_input) - extra_data = base64.decodestring(entry["extra_data"]) - if issuer_key_hash != None: - (precert, extra_data) = extract_precertificate(extra_data) - leaf_cert = precert - certchain = decode_certificate_chain(extra_data) - return ([leaf_cert] + certchain, timestamp, issuer_key_hash) - def get_entries_wrapper(baseurl, start, end): fetched_entries = 0 while start + fetched_entries < (end + 1): -- cgit v1.1 From db1c5cf0eeba9b47a3f89bebc1abd9985ee9fef7 Mon Sep 17 00:00:00 2001 From: Magnus Ahltorp Date: Thu, 26 Mar 2015 23:59:30 +0100 Subject: Provide function for calculating entryhash from entry --- tools/compileconfig.py | 2 ++ 1 file changed, 2 insertions(+) (limited to 'tools') diff --git a/tools/compileconfig.py b/tools/compileconfig.py index 30424c5..e3d9ee0 100755 --- a/tools/compileconfig.py +++ b/tools/compileconfig.py @@ -188,6 +188,8 @@ def gen_config(nodename, config, localconfig): if nodetype == "frontendnodes": plopconfig += [ (Symbol("sth_path"), paths["db"] + "sth"), + (Symbol("entryhash_from_entry"), + (Symbol("catlfish"), Symbol("entryhash_from_entry"))), ] signingnode = config["signingnodes"][0] -- cgit v1.1 From 6cd3c80c61711cabf9e308da1963b56f6dd49cfd Mon Sep 17 00:00:00 2001 From: Magnus Ahltorp Date: Fri, 27 Mar 2015 03:08:06 +0100 Subject: Handle multiple signing nodes --- tools/compileconfig.py | 7 ++++--- tools/merge.py | 11 ++++++++--- 2 files changed, 12 insertions(+), 6 deletions(-) (limited to 'tools') diff --git a/tools/compileconfig.py b/tools/compileconfig.py index e3d9ee0..52f10e8 100755 --- a/tools/compileconfig.py +++ b/tools/compileconfig.py @@ -192,7 +192,8 @@ def gen_config(nodename, config, localconfig): (Symbol("catlfish"), Symbol("entryhash_from_entry"))), ] - signingnode = config["signingnodes"][0] + signingnodes = config["signingnodes"] + signingnodeaddresses = ["https://%s/ct/signing/" % node["address"] for node in config["signingnodes"]] mergenodenames = [node["name"] for node in config["mergenodes"]] storagenodeaddresses = ["https://%s/ct/storage/" % node["address"] for node in config["storagenodes"]] frontendnodenames = [node["name"] for node in config["frontendnodes"]] @@ -207,7 +208,7 @@ def gen_config(nodename, config, localconfig): services = [Symbol("ht")] allowed_clients += allowed_clients_frontend(mergenodenames) allowed_clients += allowed_clients_public() - allowed_servers += allowed_servers_frontend([signingnode["name"]], storagenodenames) + allowed_servers += allowed_servers_frontend([node["name"] for node in signingnodes], storagenodenames) elif nodetype == "storagenodes": allowed_clients += allowed_clients_storage(frontendnodenames, mergenodenames) services = [] @@ -226,7 +227,7 @@ def gen_config(nodename, config, localconfig): (Symbol("own_key"), (nodename, "%s/%s-private.pem" % (paths["privatekeys"], nodename))), ] if nodetype == "frontendnodes": - plopconfig.append((Symbol("signing_node"), "https://%s/ct/signing/" % signingnode["address"])) + plopconfig.append((Symbol("signing_nodes"), signingnodeaddresses)) plopconfig += [ (Symbol("allowed_clients"), allowed_clients), (Symbol("allowed_servers"), allowed_servers), diff --git a/tools/merge.py b/tools/merge.py index 75e72ae..e6fae24 100755 --- a/tools/merge.py +++ b/tools/merge.py @@ -34,7 +34,7 @@ storagenodes = config["storagenodes"] paths = localconfig["paths"] mergedb = paths["mergedb"] -signingnode = config["signingnodes"][0] +signingnodes = config["signingnodes"] chainsdir = mergedb + "/chains" logorderfile = mergedb + "/logorder" @@ -238,8 +238,13 @@ tree_size = len(logorder) root_hash = tree[-1][0] timestamp = int(time.time() * 1000) -tree_head_signature = create_sth_signature(tree_size, timestamp, - root_hash, "https://%s/" % signingnode["address"], key=own_key) +for signingnode in signingnodes: + try: + tree_head_signature = create_sth_signature(tree_size, timestamp, + root_hash, "https://%s/" % signingnode["address"], key=own_key) + break + except urllib2.URLError: + pass sth = {"tree_size": tree_size, "timestamp": timestamp, "sha256_root_hash": base64.b64encode(root_hash), -- cgit v1.1 From 2d8d55bb9b6672ebe829b185beb05d4a399167f5 Mon Sep 17 00:00:00 2001 From: Magnus Ahltorp Date: Fri, 27 Mar 2015 17:33:35 +0100 Subject: Allow local administrator to override IP address and port for web server Closes CATLFISH-30 --- tools/certkeys.py | 4 ++++ tools/compileconfig.py | 21 +++++++++++++++------ 2 files changed, 19 insertions(+), 6 deletions(-) (limited to 'tools') diff --git a/tools/certkeys.py b/tools/certkeys.py index 3c459e9..52d61be 100644 --- a/tools/certkeys.py +++ b/tools/certkeys.py @@ -8,6 +8,10 @@ publickeys = { "MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAE4qWq6afhBUi0OdcWUYhyJLNXTkGqQ9" "PMS5lqoCgkV2h1ZvpNjBH2u8UbgcOQwqDo66z6BWQJGolozZYmNHE2kQ==", + "https://localhost:8080/": + "MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAE4qWq6afhBUi0OdcWUYhyJLNXTkGqQ9" + "PMS5lqoCgkV2h1ZvpNjBH2u8UbgcOQwqDo66z6BWQJGolozZYmNHE2kQ==", + "https://flimsy.ct.nordu.net/": "MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAE4qWq6afhBUi0OdcWUYhyJLNXTkGqQ9" "PMS5lqoCgkV2h1ZvpNjBH2u8UbgcOQwqDo66z6BWQJGolozZYmNHE2kQ==", diff --git a/tools/compileconfig.py b/tools/compileconfig.py index 52f10e8..88d6b51 100755 --- a/tools/compileconfig.py +++ b/tools/compileconfig.py @@ -77,17 +77,24 @@ def get_node_config(nodename, config): sys.exit(1) return (nodetype, nodeconfig) -def gen_https_servers(nodetype, nodeconfig): +def gen_https_servers(nodetype, nodeconfig, bind_address, bind_publicaddress): + if bind_address: + (host, port) = parse_address(bind_address) + else: + (_, port) = parse_address(nodeconfig["address"]) + host = "0.0.0.0" if nodetype == "frontendnodes": - (publichost, publicport) = parse_address(nodeconfig["publicaddress"]) - (host, port) = parse_address(nodeconfig["address"]) + if bind_publicaddress: + (publichost, publicport) = parse_address(bind_publicaddress) + else: + (_, publicport) = parse_address(nodeconfig["publicaddress"]) + publichost = "0.0.0.0" + return [(Symbol("external_https_api"), publichost, publicport, Symbol("v1")), (Symbol("frontend_https_api"), host, port, Symbol("frontend"))] elif nodetype == "storagenodes": - (host, port) = parse_address(nodeconfig["address"]) return [(Symbol("storage_https_api"), host, port, Symbol("storage"))] elif nodetype == "signingnodes": - (host, port) = parse_address(nodeconfig["address"]) return [(Symbol("signing_https_api"), host, port, Symbol("signing"))] def allowed_clients_frontend(mergenodenames): @@ -136,13 +143,15 @@ def allowed_servers_frontend(signingnodenames, storagenodenames): def gen_config(nodename, config, localconfig): print "generating config for", nodename paths = localconfig["paths"] + bind_address = localconfig.get("addresses", {}).get(nodename) + bind_publicaddress = localconfig.get("publicaddresses", {}).get(nodename) options = localconfig.get("options", []) configfile = open(paths["configdir"] + nodename + ".config", "w") print >>configfile, "%% catlfish configuration file (-*- erlang -*-)" (nodetype, nodeconfig) = get_node_config(nodename, config) - https_servers = gen_https_servers(nodetype, nodeconfig) + https_servers = gen_https_servers(nodetype, nodeconfig, bind_address, bind_publicaddress) catlfishconfig = [] plopconfig = [] -- cgit v1.1 From 22cefc84254cae1f57195da819eba69dbacb5a6e Mon Sep 17 00:00:00 2001 From: Magnus Ahltorp Date: Mon, 30 Mar 2015 15:41:13 +0200 Subject: Allow non-TLS http Closes CATLFISH-31 --- tools/compileconfig.py | 24 ++++++++++++++++++------ 1 file changed, 18 insertions(+), 6 deletions(-) (limited to 'tools') diff --git a/tools/compileconfig.py b/tools/compileconfig.py index 88d6b51..4996994 100755 --- a/tools/compileconfig.py +++ b/tools/compileconfig.py @@ -77,7 +77,7 @@ def get_node_config(nodename, config): sys.exit(1) return (nodetype, nodeconfig) -def gen_https_servers(nodetype, nodeconfig, bind_address, bind_publicaddress): +def gen_http_servers(nodetype, nodeconfig, bind_address, bind_publicaddress, bind_publichttpaddress): if bind_address: (host, port) = parse_address(bind_address) else: @@ -90,12 +90,22 @@ def gen_https_servers(nodetype, nodeconfig, bind_address, bind_publicaddress): (_, publicport) = parse_address(nodeconfig["publicaddress"]) publichost = "0.0.0.0" - return [(Symbol("external_https_api"), publichost, publicport, Symbol("v1")), - (Symbol("frontend_https_api"), host, port, Symbol("frontend"))] + http_servers = [] + https_servers = [] + if bind_publichttpaddress: + (publichttphost, publichttpport) = parse_address(bind_publichttpaddress) + http_servers.append((Symbol("external_http_api"), publichttphost, publichttpport, Symbol("v1"))) + https_servers.append((Symbol("external_https_api"), publichost, publicport, Symbol("v1"))) + https_servers.append((Symbol("frontend_https_api"), host, port, Symbol("frontend"))) + return (http_servers, + https_servers) + elif nodetype == "storagenodes": - return [(Symbol("storage_https_api"), host, port, Symbol("storage"))] + return ([], + [(Symbol("storage_https_api"), host, port, Symbol("storage"))]) elif nodetype == "signingnodes": - return [(Symbol("signing_https_api"), host, port, Symbol("signing"))] + return ([], + [(Symbol("signing_https_api"), host, port, Symbol("signing"))]) def allowed_clients_frontend(mergenodenames): return [ @@ -145,13 +155,14 @@ def gen_config(nodename, config, localconfig): paths = localconfig["paths"] bind_address = localconfig.get("addresses", {}).get(nodename) bind_publicaddress = localconfig.get("publicaddresses", {}).get(nodename) + bind_publichttpaddress = localconfig.get("publichttpaddresses", {}).get(nodename) options = localconfig.get("options", []) configfile = open(paths["configdir"] + nodename + ".config", "w") print >>configfile, "%% catlfish configuration file (-*- erlang -*-)" (nodetype, nodeconfig) = get_node_config(nodename, config) - https_servers = gen_https_servers(nodetype, nodeconfig, bind_address, bind_publicaddress) + (http_servers, https_servers) = gen_http_servers(nodetype, nodeconfig, bind_address, bind_publicaddress, bind_publichttpaddress=bind_publichttpaddress) catlfishconfig = [] plopconfig = [] @@ -163,6 +174,7 @@ def gen_config(nodename, config, localconfig): catlfishconfig += [ (Symbol("https_servers"), https_servers), + (Symbol("http_servers"), http_servers), (Symbol("https_certfile"), paths["https_certfile"]), (Symbol("https_keyfile"), paths["https_keyfile"]), (Symbol("https_cacertfile"), paths["https_cacertfile"]), -- cgit v1.1 From 6b62ebbf1de5b9e55b04e9cfafd0620f1374c2d4 Mon Sep 17 00:00:00 2001 From: Magnus Ahltorp Date: Tue, 31 Mar 2015 14:27:23 +0200 Subject: Cleanup tests and use urllib2.build_opener Remove unused files Generate test config files directly in release directory Move test database files to "tests" directory Generate log key when preparing tests Report error when STH not found in v1.erl Make merge, fetchallcerts, submitcert, verifysct, and testcase1 take log key as argument --- tools/certkeys.py | 8 -------- tools/certtools.py | 50 ++++++++++++++++++++++++++++++++------------------ tools/compileconfig.py | 2 +- tools/create-key.sh | 4 ++++ tools/fetchallcerts.py | 5 ++++- tools/merge.py | 16 ++++++++++++---- tools/submitcert.py | 5 ++++- tools/testcase1.py | 12 ++++++++---- tools/verifysct.py | 5 ++++- 9 files changed, 69 insertions(+), 38 deletions(-) create mode 100755 tools/create-key.sh (limited to 'tools') diff --git a/tools/certkeys.py b/tools/certkeys.py index 52d61be..43646ef 100644 --- a/tools/certkeys.py +++ b/tools/certkeys.py @@ -4,14 +4,6 @@ publickeys = { "MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEfahLEimAoz2t01p3uMziiLOl/fHTD" "M0YDOhBRuiBARsV4UvxG2LdNgoIGLrtCzWE0J5APC2em4JlvR8EEEFMoA==", - "https://127.0.0.1:8080/": - "MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAE4qWq6afhBUi0OdcWUYhyJLNXTkGqQ9" - "PMS5lqoCgkV2h1ZvpNjBH2u8UbgcOQwqDo66z6BWQJGolozZYmNHE2kQ==", - - "https://localhost:8080/": - "MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAE4qWq6afhBUi0OdcWUYhyJLNXTkGqQ9" - "PMS5lqoCgkV2h1ZvpNjBH2u8UbgcOQwqDo66z6BWQJGolozZYmNHE2kQ==", - "https://flimsy.ct.nordu.net/": "MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAE4qWq6afhBUi0OdcWUYhyJLNXTkGqQ9" "PMS5lqoCgkV2h1ZvpNjBH2u8UbgcOQwqDo66z6BWQJGolozZYmNHE2kQ==", diff --git a/tools/certtools.py b/tools/certtools.py index 2c97dfb..da5021a 100644 --- a/tools/certtools.py +++ b/tools/certtools.py @@ -88,8 +88,15 @@ def get_root_cert(issuer): return root_cert +def urlopen(url, data=None): + try: + opener = urllib2.build_opener(urllib2.HTTPSHandler(context=None)) + except TypeError: + opener = urllib2.build_opener(urllib2.HTTPSHandler()) + return opener.open(url, data) + def get_sth(baseurl): - result = urllib2.urlopen(baseurl + "ct/v1/get-sth").read() + result = urlopen(baseurl + "ct/v1/get-sth").read() return json.loads(result) def get_proof_by_hash(baseurl, hash, tree_size): @@ -97,7 +104,7 @@ def get_proof_by_hash(baseurl, hash, tree_size): params = urllib.urlencode({"hash":base64.b64encode(hash), "tree_size":tree_size}) result = \ - urllib2.urlopen(baseurl + "ct/v1/get-proof-by-hash?" + params).read() + urlopen(baseurl + "ct/v1/get-proof-by-hash?" + params).read() return json.loads(result) except urllib2.HTTPError, e: print "ERROR:", e.read() @@ -108,7 +115,7 @@ def get_consistency_proof(baseurl, tree_size1, tree_size2): params = urllib.urlencode({"first":tree_size1, "second":tree_size2}) result = \ - urllib2.urlopen(baseurl + "ct/v1/get-sth-consistency?" + params).read() + urlopen(baseurl + "ct/v1/get-sth-consistency?" + params).read() return json.loads(result)["consistency"] except urllib2.HTTPError, e: print "ERROR:", e.read() @@ -131,7 +138,7 @@ def unpack_tls_array(packed_data, length_len): def add_chain(baseurl, submission): try: - result = urllib2.urlopen(baseurl + "ct/v1/add-chain", json.dumps(submission)).read() + result = urlopen(baseurl + "ct/v1/add-chain", json.dumps(submission)).read() return json.loads(result) except urllib2.HTTPError, e: print "ERROR", e.code,":", e.read() @@ -148,7 +155,7 @@ def add_chain(baseurl, submission): def add_prechain(baseurl, submission): try: - result = urllib2.urlopen(baseurl + "ct/v1/add-pre-chain", + result = urlopen(baseurl + "ct/v1/add-pre-chain", json.dumps(submission)).read() return json.loads(result) except urllib2.HTTPError, e: @@ -167,7 +174,7 @@ def add_prechain(baseurl, submission): def get_entries(baseurl, start, end): try: params = urllib.urlencode({"start":start, "end":end}) - result = urllib2.urlopen(baseurl + "ct/v1/get-entries?" + params).read() + result = urlopen(baseurl + "ct/v1/get-entries?" + params).read() return json.loads(result) except urllib2.HTTPError, e: print "ERROR:", e.read() @@ -198,8 +205,9 @@ def encode_signature(hash_alg, signature_alg, unpacked_signature): signature += tls_array(unpacked_signature, 2) return signature -def check_signature(baseurl, signature, data): - publickey = base64.decodestring(publickeys[baseurl]) +def check_signature(baseurl, signature, data, publickey=None): + if publickey == None: + publickey = base64.decodestring(publickeys[baseurl]) (hash_alg, signature_alg, unpacked_signature) = decode_signature(signature) assert hash_alg == 4, \ "hash_alg is %d, expected 4" % (hash_alg,) # sha256 @@ -230,20 +238,25 @@ def check_auth_header(authheader, expected_key, publickeydir, data, path): return True def http_request(url, data=None, key=None, verifynode=None, publickeydir="."): - req = urllib2.Request(url, data) + try: + opener = urllib2.build_opener(urllib2.HTTPSHandler(context=None)) + except TypeError: + opener = urllib2.build_opener(urllib2.HTTPSHandler()) + (keyname, keyfile) = key privatekey = get_eckey_from_file(keyfile) sk = ecdsa.SigningKey.from_der(privatekey) parsed_url = urlparse.urlparse(url) if data == None: - data = parsed_url.query + data_to_sign = parsed_url.query method = "GET" else: + data_to_sign = data method = "POST" - signature = sk.sign("%s\0%s\0%s" % (method, parsed_url.path, data), hashfunc=hashlib.sha256, + signature = sk.sign("%s\0%s\0%s" % (method, parsed_url.path, data_to_sign), hashfunc=hashlib.sha256, sigencode=ecdsa.util.sigencode_der) - req.add_header('X-Catlfish-Auth', base64.b64encode(signature) + ";key=" + keyname) - result = urllib2.urlopen(req) + opener.addheaders = [('X-Catlfish-Auth', base64.b64encode(signature) + ";key=" + keyname)] + result = opener.open(url, data) authheader = result.info().get('X-Catlfish-Auth') data = result.read() check_auth_header(authheader, verifynode, publickeydir, data, parsed_url.path) @@ -263,7 +276,7 @@ def create_signature(baseurl, data, key=None): unpacked_signature = get_signature(baseurl, data, key) return encode_signature(4, 3, unpacked_signature) -def check_sth_signature(baseurl, sth): +def check_sth_signature(baseurl, sth, publickey=None): signature = base64.decodestring(sth["tree_head_signature"]) version = struct.pack(">b", 0) @@ -273,7 +286,7 @@ def check_sth_signature(baseurl, sth): hash = base64.decodestring(sth["sha256_root_hash"]) tree_head = version + signature_type + timestamp + tree_size + hash - check_signature(baseurl, signature, tree_head) + check_signature(baseurl, signature, tree_head, publickey=publickey) def create_sth_signature(tree_size, timestamp, root_hash, baseurl, key=None): version = struct.pack(">b", 0) @@ -284,8 +297,9 @@ def create_sth_signature(tree_size, timestamp, root_hash, baseurl, key=None): return create_signature(baseurl, tree_head, key=key) -def check_sct_signature(baseurl, signed_entry, sct, precert=False): - publickey = base64.decodestring(publickeys[baseurl]) +def check_sct_signature(baseurl, signed_entry, sct, precert=False, publickey=None): + if publickey == None: + publickey = base64.decodestring(publickeys[baseurl]) calculated_logid = hashlib.sha256(publickey).digest() received_logid = base64.decodestring(sct["id"]) assert calculated_logid == received_logid, \ @@ -306,7 +320,7 @@ def check_sct_signature(baseurl, signed_entry, sct, precert=False): entry_type + signed_entry + \ tls_array(base64.decodestring(sct["extensions"]), 2) - check_signature(baseurl, signature, signed_struct) + check_signature(baseurl, signature, signed_struct, publickey=publickey) def pack_mtl(timestamp, leafcert): entry_type = struct.pack(">H", 0) diff --git a/tools/compileconfig.py b/tools/compileconfig.py index 4996994..c239bd0 100755 --- a/tools/compileconfig.py +++ b/tools/compileconfig.py @@ -158,7 +158,7 @@ def gen_config(nodename, config, localconfig): bind_publichttpaddress = localconfig.get("publichttpaddresses", {}).get(nodename) options = localconfig.get("options", []) - configfile = open(paths["configdir"] + nodename + ".config", "w") + configfile = open(paths["configdir"] + "/" + nodename + ".config", "w") print >>configfile, "%% catlfish configuration file (-*- erlang -*-)" (nodetype, nodeconfig) = get_node_config(nodename, config) diff --git a/tools/create-key.sh b/tools/create-key.sh new file mode 100755 index 0000000..9d29c86 --- /dev/null +++ b/tools/create-key.sh @@ -0,0 +1,4 @@ +#!/bin/sh + +openssl ecparam -name prime256v1 -genkey -noout -out $1-private.pem +openssl ec -in $1-private.pem -pubout -out $1.pem diff --git a/tools/fetchallcerts.py b/tools/fetchallcerts.py index e0ea92f..395fe69 100755 --- a/tools/fetchallcerts.py +++ b/tools/fetchallcerts.py @@ -22,6 +22,7 @@ parser = argparse.ArgumentParser(description='') parser.add_argument('baseurl', help="Base URL for CT server") parser.add_argument('--store', default=None, metavar="dir", help='Store certificates in directory dir') parser.add_argument('--write-sth', action='store_true', help='Write STH') +parser.add_argument('--publickey', default=None, metavar="file", help='Public key for the CT log') args = parser.parse_args() def get_entries_wrapper(baseurl, start, end): @@ -39,8 +40,10 @@ def print_layer(layer): for entry in layer: print base64.b16encode(entry) +logpublickey = get_public_key_from_file(args.publickey) if args.publickey else None + sth = get_sth(args.baseurl) -check_sth_signature(args.baseurl, sth) +check_sth_signature(args.baseurl, sth, publickey=logpublickey) tree_size = sth["tree_size"] root_hash = base64.decodestring(sth["sha256_root_hash"]) diff --git a/tools/merge.py b/tools/merge.py index e6fae24..f9c93d9 100755 --- a/tools/merge.py +++ b/tools/merge.py @@ -16,7 +16,9 @@ import hashlib import urlparse import os import yaml -from certtools import build_merkle_tree, create_sth_signature, check_sth_signature, get_eckey_from_file, timing_point, http_request +from certtools import build_merkle_tree, create_sth_signature, \ + check_sth_signature, get_eckey_from_file, timing_point, http_request, \ + get_public_key_from_file parser = argparse.ArgumentParser(description="") parser.add_argument('--config', help="System configuration", required=True) @@ -41,6 +43,8 @@ logorderfile = mergedb + "/logorder" own_key = (localconfig["nodename"], "%s/%s-private.pem" % (paths["privatekeys"], localconfig["nodename"])) +logpublickey = get_public_key_from_file(paths["logpublickey"]) + hashed_dir = True def parselogrow(row): @@ -238,19 +242,23 @@ tree_size = len(logorder) root_hash = tree[-1][0] timestamp = int(time.time() * 1000) +tree_head_signature = None for signingnode in signingnodes: try: tree_head_signature = create_sth_signature(tree_size, timestamp, root_hash, "https://%s/" % signingnode["address"], key=own_key) break - except urllib2.URLError: - pass + except urllib2.URLError, e: + print e +if tree_head_signature == None: + print >>sys.stderr, "Could not contact any signing nodes" + sys.exit(1) sth = {"tree_size": tree_size, "timestamp": timestamp, "sha256_root_hash": base64.b64encode(root_hash), "tree_head_signature": base64.b64encode(tree_head_signature)} -check_sth_signature(ctbaseurl, sth) +check_sth_signature(ctbaseurl, sth, publickey=logpublickey) timing_point(timing, "build sth") diff --git a/tools/submitcert.py b/tools/submitcert.py index 2e8cc33..ba4b337 100755 --- a/tools/submitcert.py +++ b/tools/submitcert.py @@ -30,6 +30,7 @@ parser.add_argument('--sct-file', default=None, metavar="file", help='Store SCT: parser.add_argument('--parallel', type=int, default=16, metavar="n", help="Number of parallel submits") parser.add_argument('--check-sct', action='store_true', help="Check SCT signature") parser.add_argument('--pre-warm', action='store_true', help="Wait 3 seconds after first submit") +parser.add_argument('--publickey', default=None, metavar="file", help='Public key for the CT log') args = parser.parse_args() from multiprocessing import Pool @@ -37,6 +38,8 @@ from multiprocessing import Pool baseurl = args.baseurl certfilepath = args.store +logpublickey = get_public_key_from_file(args.publickey) if args.publickey else None + lookup_in_log = False if certfilepath[-1] == "/": @@ -84,7 +87,7 @@ def submitcert((certfile, cert)): try: if args.check_sct: - check_sct_signature(baseurl, signed_entry, result, precert=precert) + check_sct_signature(baseurl, signed_entry, result, precert=precert, publickey=logpublickey) timing_point(timing, "checksig") except AssertionError, e: print "ERROR:", certfile, e diff --git a/tools/testcase1.py b/tools/testcase1.py index 4502b56..1d46230 100755 --- a/tools/testcase1.py +++ b/tools/testcase1.py @@ -14,7 +14,9 @@ import hashlib import itertools from certtools import * -baseurls = ["https://127.0.0.1:8080/"] +baseurls = [sys.argv[1]] +logpublickeyfile = sys.argv[2] + certfiles = ["../tools/testcerts/cert1.txt", "../tools/testcerts/cert2.txt", "../tools/testcerts/cert3.txt", "../tools/testcerts/cert4.txt", "../tools/testcerts/cert5.txt"] @@ -28,6 +30,8 @@ cc5 = get_certs_from_file(certfiles[4]) failures = 0 indentation = "" +logpublickey = get_public_key_from_file(logpublickeyfile) + def testgroup(name): global indentation print name + ":" @@ -55,7 +59,7 @@ def print_and_check_tree_size(expected, baseurl): global failures sth = get_sth(baseurl) try: - check_sth_signature(baseurl, sth) + check_sth_signature(baseurl, sth, publickey=logpublickey) except AssertionError, e: print_error("%s", e) except ecdsa.keys.BadSignatureError, e: @@ -71,13 +75,13 @@ def do_add_chain(chain, baseurl): print_error("%s", e) try: signed_entry = pack_cert(chain[0]) - check_sct_signature(baseurl, signed_entry, result) + check_sct_signature(baseurl, signed_entry, result, publickey=logpublickey) + print_success("signature check succeeded") except AssertionError, e: print_error("%s", e) except ecdsa.keys.BadSignatureError, e: print e print_error("bad SCT signature") - print_success("signature check succeeded") return result def get_and_validate_proof(timestamp, chain, leaf_index, nentries, baseurl): diff --git a/tools/verifysct.py b/tools/verifysct.py index 27ab4c9..4b8e38a 100755 --- a/tools/verifysct.py +++ b/tools/verifysct.py @@ -22,12 +22,15 @@ parser = argparse.ArgumentParser(description='') parser.add_argument('baseurl', help="Base URL for CT server") parser.add_argument('--sct-file', default=None, metavar="dir", help='SCT:s to verify') parser.add_argument('--parallel', type=int, default=16, metavar="n", help="Number of parallel verifications") +parser.add_argument('--publickey', default=None, metavar="file", help='Public key for the CT log') args = parser.parse_args() from multiprocessing import Pool baseurl = args.baseurl +logpublickey = get_public_key_from_file(args.publickey) if args.publickey else None + sth = get_sth(baseurl) def verifysct(sctentry): @@ -43,7 +46,7 @@ def verifysct(sctentry): signed_entry = pack_precert(leafcert, issuer_key_hash) else: signed_entry = pack_cert(leafcert) - check_sct_signature(baseurl, signed_entry, sctentry["sct"], precert=issuer_key_hash) + check_sct_signature(baseurl, signed_entry, sctentry["sct"], precert=issuer_key_hash, publickey=logpublickey) timing_point(timing, "checksig") except AssertionError, e: print "ERROR:", e -- cgit v1.1 From e4476f7e71e0ebbfe5594bc90d22a9d74d638211 Mon Sep 17 00:00:00 2001 From: Linus Nordberg Date: Tue, 31 Mar 2015 16:39:22 +0200 Subject: Pass an SSL context to urrllib2. --- tools/certtools.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) (limited to 'tools') diff --git a/tools/certtools.py b/tools/certtools.py index da5021a..498a2e0 100644 --- a/tools/certtools.py +++ b/tools/certtools.py @@ -90,8 +90,8 @@ def get_root_cert(issuer): def urlopen(url, data=None): try: - opener = urllib2.build_opener(urllib2.HTTPSHandler(context=None)) - except TypeError: + opener = urllib2.build_opener(urllib2.HTTPSHandler(context=ssl.SSLContext(ssl.PROTOCOL_TLSv1))) + except AttributeError: opener = urllib2.build_opener(urllib2.HTTPSHandler()) return opener.open(url, data) @@ -239,8 +239,8 @@ def check_auth_header(authheader, expected_key, publickeydir, data, path): def http_request(url, data=None, key=None, verifynode=None, publickeydir="."): try: - opener = urllib2.build_opener(urllib2.HTTPSHandler(context=None)) - except TypeError: + opener = urllib2.build_opener(urllib2.HTTPSHandler(context=ssl.SSLContext(ssl.PROTOCOL_TLSv1))) + except AttributeError: opener = urllib2.build_opener(urllib2.HTTPSHandler()) (keyname, keyfile) = key -- cgit v1.1