From d1d2185b420d873a97bc78c5e07482accaf574fc Mon Sep 17 00:00:00 2001 From: Linus Nordberg Date: Thu, 19 Mar 2015 17:49:41 +0100 Subject: Add precert handling. --- src/catlfish.erl | 226 ++++++++++++++++++++++++++++++++++------------- src/v1.erl | 61 ++++++------- src/x509.erl | 255 ++++++++++++++++++++++++++++++++++++++--------------- tools/certtools.py | 14 +-- 4 files changed, 389 insertions(+), 167 deletions(-) diff --git a/src/catlfish.erl b/src/catlfish.erl index 2fd9dc7..3b81baa 100644 --- a/src/catlfish.erl +++ b/src/catlfish.erl @@ -2,7 +2,7 @@ %%% See LICENSE for licensing information. -module(catlfish). --export([add_chain/2, entries/2, entry_and_proof/2]). +-export([add_chain/3, entries/2, entry_and_proof/2]). -export([known_roots/0, update_known_roots/0]). -export([init_cache_table/0]). -include_lib("eunit/include/eunit.hrl"). @@ -21,53 +21,89 @@ -record(timestamped_entry, {timestamp :: integer(), entry_type :: entry_type(), - signed_entry :: binary(), + signed_entry :: signed_x509_entry() | + signed_precert_entry(), extensions = <<>> :: binary()}). -type timestamped_entry() :: #timestamped_entry{}. --spec serialise(mtl() | timestamped_entry()) -> binary(). -serialise(#timestamped_entry{timestamp = Timestamp} = E) -> - list_to_binary( - [<>, - serialise_entry_type(E#timestamped_entry.entry_type), - encode_tls_vector(E#timestamped_entry.signed_entry, 3), - encode_tls_vector(E#timestamped_entry.extensions, 2)]); +-record(signed_x509_entry, {asn1_cert :: binary()}). +-type signed_x509_entry() :: #signed_x509_entry{}. +-record(signed_precert_entry, {issuer_key_hash :: binary(), + tbs_certificate :: binary()}). +-type signed_precert_entry() :: #signed_precert_entry{}. + +-spec serialise(mtl() | timestamped_entry() | + signed_x509_entry() | signed_precert_entry()) -> binary(). +%% @doc Serialise a MerkleTreeLeaf as per RFC6962 Section 3.4. serialise(#mtl{leaf_version = LeafVersion, leaf_type = LeafType, entry = TimestampedEntry}) -> list_to_binary( [serialise_leaf_version(LeafVersion), serialise_leaf_type(LeafType), - serialise(TimestampedEntry)]). + serialise(TimestampedEntry)]); +%% @doc Serialise a TimestampedEntry as per RFC6962 Section 3.4. +serialise(#timestamped_entry{timestamp = Timestamp, + entry_type = EntryType, + signed_entry = SignedEntry, + extensions = Extensions}) -> + list_to_binary( + [<>, + serialise_entry_type(EntryType), + serialise(SignedEntry), + encode_tls_vector(Extensions, 2)]); +%% @doc Serialise an ASN1.Cert as per RFC6962 Section 3.1. +serialise(#signed_x509_entry{asn1_cert = Cert}) -> + encode_tls_vector(Cert, 3); +%% @doc Serialise a PreCert as per RFC6962 Section 3.2. +serialise(#signed_precert_entry{ + issuer_key_hash = IssuerKeyHash, + tbs_certificate = TBSCertificate}) when is_binary(IssuerKeyHash), + size(IssuerKeyHash) == 32 -> + list_to_binary( + [IssuerKeyHash, + encode_tls_vector(TBSCertificate, 3)]). serialise_leaf_version(v1) -> <<0:8>>; serialise_leaf_version(v2) -> <<1:8>>. +deserialise_leaf_version(<<0:8>>) -> + v1; +deserialise_leaf_version(<<1:8>>) -> + v2. serialise_leaf_type(timestamped_entry) -> <<0:8>>. -%% serialise_leaf_type(_) -> -%% <<>>. +deserialise_leaf_type(<<0:8>>) -> + timestamped_entry. serialise_entry_type(x509_entry) -> <<0:16>>; serialise_entry_type(precert_entry) -> <<1:16>>. +deserialise_entry_type(<<0:16>>) -> + x509_entry; +deserialise_entry_type(<<1:16>>) -> + precert_entry. serialise_signature_type(certificate_timestamp) -> <<0:8>>; serialise_signature_type(tree_hash) -> <<1:8>>. +%% deserialise_signature_type(<<0:8>>) -> +%% certificate_timestamp; +%% deserialise_signature_type(<<1:8>>) -> +%% tree_hash. -build_mtl(Timestamp, LeafCert) -> - TSE = #timestamped_entry{timestamp = Timestamp, - entry_type = x509_entry, - signed_entry = LeafCert}, - MTL = #mtl{leaf_version = v1, - leaf_type = timestamped_entry, - entry = TSE}, - serialise(MTL). +%% build_mtl(Timestamp, LeafCert) -> +%% TSE = #timestamped_entry{timestamp = Timestamp, +%% entry_type = x509_entry, +%% signed_entry = LeafCert}, +%% MTL = #mtl{leaf_version = v1, +%% leaf_type = timestamped_entry, +%% entry = TSE}, +%% serialise(MTL). calc_sct(TimestampedEntry) -> plop:serialise(plop:spt(list_to_binary([<>, @@ -86,37 +122,48 @@ get_sct(Hash, TimestampedEntry) -> SCT end; _ -> - SCT = calc_sct(TimestampedEntry) + calc_sct(TimestampedEntry) end. --spec add_chain(binary(), [binary()]) -> nonempty_string(). -add_chain(LeafCert, CertChain) -> +-spec add_chain(binary(), [binary()], normal|precert) -> nonempty_string(). +add_chain(LeafCert, CertChain, Type) -> EntryHash = crypto:hash(sha256, [LeafCert | CertChain]), + EntryType = case Type of + normal -> x509_entry; + precert -> precert_entry + end, {TimestampedEntry, Hash} = case plop:get(EntryHash) of notfound -> Timestamp = plop:generate_timestamp(), - TSE = #timestamped_entry{timestamp = Timestamp, - entry_type = x509_entry, - signed_entry = LeafCert}, - MTL = #mtl{leaf_version = v1, - leaf_type = timestamped_entry, - entry = TSE}, - MTLHash = ht:leaf_hash(serialise(MTL)), - ok = plop:add( - serialise_logentry(Timestamp, LeafCert, CertChain), - MTLHash, - EntryHash), + TSE = timestamped_entry(Timestamp, EntryType, LeafCert, CertChain), + MTLText = serialise(#mtl{leaf_version = v1, + leaf_type = timestamped_entry, + entry = TSE}), + MTLHash = ht:leaf_hash(MTLText), + ExtraData = + case Type of + normal -> CertChain; + precert -> [LeafCert | CertChain] + end, + LogEntry = + list_to_binary( + [encode_tls_vector(MTLText, 4), + encode_tls_vector( + encode_tls_vector( + list_to_binary( + [encode_tls_vector(C, 3) || C <- ExtraData]), + 3), + 4)]), + ok = plop:add(LogEntry, MTLHash, EntryHash), {TSE, MTLHash}; - {_Index, MTLHash, Entry} -> - <> = Entry, - %% TODO: Perform a costly db consistency check against - %% unpacked LogEntry (w/ LeafCert and CertChain) - {#timestamped_entry{timestamp = Timestamp, - entry_type = x509_entry, - signed_entry = LeafCert}, - MTLHash} + {_Index, MTLHash, DBEntry} -> + {MTLText, _ExtraData} = unpack_entry(DBEntry), + MTL = deserialise_mtl(MTLText), + MTLText = serialise(MTL), % verify FIXME: remove + {MTL#mtl.entry, MTLHash} end, + SCT_sig = get_sct(Hash, TimestampedEntry), {[{sct_version, ?PROTOCOL_VERSION}, {id, base64:encode(plop:get_logid())}, @@ -124,15 +171,73 @@ add_chain(LeafCert, CertChain) -> {extensions, base64:encode(<<>>)}, {signature, base64:encode(SCT_sig)}]}. --spec serialise_logentry(integer(), binary(), [binary()]) -> binary(). -serialise_logentry(Timestamp, LeafCert, CertChain) -> - list_to_binary( - [<>, - list_to_binary( - [encode_tls_vector(LeafCert, 3), - encode_tls_vector( - list_to_binary( - [encode_tls_vector(X, 3) || X <- CertChain]), 3)])]). +-spec timestamped_entry(integer(), entry_type(), binary(), binary()) -> + timestamped_entry(). +timestamped_entry(Timestamp, EntryType, LeafCert, CertChain) -> + SignedEntry = + case EntryType of + x509_entry -> + #signed_x509_entry{asn1_cert = LeafCert}; + precert_entry -> + {DetoxedLeafTBSCert, IssuerKeyHash} = + x509:detox(LeafCert, CertChain), + #signed_precert_entry{ + issuer_key_hash = IssuerKeyHash, + tbs_certificate = DetoxedLeafTBSCert} + end, + #timestamped_entry{timestamp = Timestamp, + entry_type = EntryType, + signed_entry = SignedEntry}. + +%% -spec serialise_logentry(integer(), binary(), [binary()]) -> binary(). +%% serialise_logentry(Timestamp, LeafCert, CertChain) -> +%% list_to_binary( +%% [<>, +%% list_to_binary( +%% [encode_tls_vector(LeafCert, 3), +%% encode_tls_vector( +%% list_to_binary( +%% [encode_tls_vector(X, 3) || X <- CertChain]), 3)])]). + +-spec deserialise_mtl(binary()) -> mtl(). +deserialise_mtl(Data) -> + <> = Data, + #mtl{leaf_version = deserialise_leaf_version(LeafVersionBin), + leaf_type = deserialise_leaf_type(LeafTypeBin), + entry = deserialise_timestampedentry(TimestampedEntryBin)}. + +-spec deserialise_timestampedentry(binary()) -> timestamped_entry(). +deserialise_timestampedentry(Data) -> + <> = Data, + EntryType = deserialise_entry_type(EntryTypeBin), + {SignedEntry, ExtensionsBin} = + case EntryType of + x509_entry -> + deserialise_signed_x509_entry(RestData); + precert_entry -> + deserialise_signed_precert_entry(RestData) + end, + {Extensions, <<>>} = decode_tls_vector(ExtensionsBin, 2), + #timestamped_entry{timestamp = Timestamp, + entry_type = EntryType, + signed_entry = SignedEntry, + extensions = Extensions}. + +-spec deserialise_signed_x509_entry(binary()) -> {signed_x509_entry(), binary()}. +deserialise_signed_x509_entry(Data) -> + {E, D} = decode_tls_vector(Data, 3), + {#signed_x509_entry{asn1_cert = E}, D}. + +-spec deserialise_signed_precert_entry(binary()) -> + {signed_precert_entry(), binary()}. +deserialise_signed_precert_entry(Data) -> + <> = Data, + {TBSCertificate, RestData2} = decode_tls_vector(RestData, 3), + {#signed_precert_entry{issuer_key_hash = IssuerKeyHash, + tbs_certificate = TBSCertificate}, + RestData2}. -spec entries(non_neg_integer(), non_neg_integer()) -> list(). entries(Start, End) -> @@ -142,10 +247,9 @@ entries(Start, End) -> entry_and_proof(Index, TreeSize) -> case plop:inclusion_and_entry(Index, TreeSize) of {ok, Entry, Path} -> - {Timestamp, LeafCertVector, CertChainVector} = unpack_entry(Entry), - MTL = build_mtl(Timestamp, LeafCertVector), + {MTL, ExtraData} = unpack_entry(Entry), {[{leaf_input, base64:encode(MTL)}, - {extra_data, base64:encode(CertChainVector)}, + {extra_data, base64:encode(ExtraData)}, {audit_path, [base64:encode(X) || X <- Path]}]}; {notfound, Msg} -> {[{success, false}, @@ -161,20 +265,20 @@ init_cache_table() -> ets:new(?CACHE_TABLE, [set, public, named_table]). %% Private functions. +-spec unpack_entry(binary()) -> {binary(), binary()}. unpack_entry(Entry) -> - <> = Entry, - {LeafCertVector, CertChainVector} = decode_tls_vector(LogEntry, 3), - {Timestamp, LeafCertVector, CertChainVector}. + {MTL, Rest} = decode_tls_vector(Entry, 4), + {ExtraData, <<>>} = decode_tls_vector(Rest, 4), + {MTL, ExtraData}. -spec x_entries([{non_neg_integer(), binary(), binary()}]) -> list(). x_entries([]) -> []; x_entries([H|T]) -> {_Index, _Hash, Entry} = H, - {Timestamp, LeafCertVector, CertChainVector} = unpack_entry(Entry), - MTL = build_mtl(Timestamp, LeafCertVector), - [{[{leaf_input, base64:encode(MTL)}, {extra_data, base64:encode(CertChainVector)}]} | - x_entries(T)]. + {MTL, ExtraData} = unpack_entry(Entry), + [{[{leaf_input, base64:encode(MTL)}, + {extra_data, base64:encode(ExtraData)}]} | x_entries(T)]. -spec encode_tls_vector(binary(), non_neg_integer()) -> binary(). encode_tls_vector(Binary, LengthLen) -> diff --git a/src/v1.erl b/src/v1.erl index 006990d..e672182 100644 --- a/src/v1.erl +++ b/src/v1.erl @@ -9,34 +9,10 @@ %% Public functions, i.e. part of URL. request(post, "ct/v1/add-chain", Input) -> - case (catch mochijson2:decode(Input)) of - {error, E} -> - html("add-chain: bad input:", E); - {struct, [{<<"chain">>, ChainBase64}]} -> - case (catch [base64:decode(X) || X <- ChainBase64]) of - {'EXIT', _} -> - html("add-chain: invalid base64-encoded chain: ", - [ChainBase64]); - [LeafCert | CertChain] -> - Roots = catlfish:known_roots(), - case x509:normalise_chain(Roots, [LeafCert|CertChain]) of - {ok, [Leaf | Chain]} -> - lager:info("adding ~p", - [x509:cert_string(LeafCert)]), - success(catlfish:add_chain(Leaf, Chain)); - {error, Reason} -> - lager:info("rejecting ~p: ~p", - [x509:cert_string(LeafCert), Reason]), - html("add-chain: invalid chain", Reason) - end; - Invalid -> - html("add-chain: chain is not a list: ", [Invalid]) - end; - _ -> html("add-chain: missing input: chain", Input) - end; + add_chain(Input, normal); -request(post, "ct/v1/add-pre-chain", _Input) -> - niy(); +request(post, "ct/v1/add-pre-chain", Input) -> + add_chain(Input, precert); request(get, "ct/v1/get-sth", _Query) -> R = plop:sth(), @@ -130,8 +106,33 @@ html(Text, Input) -> "~p~n" ++ "~n", [Text, Input])}. -niy() -> - html("NIY - Not Implemented Yet|", []). - success(Data) -> {200, [{"Content-Type", "text/json"}], mochijson2:encode(Data)}. + +-spec add_chain(any(), normal|precert) -> any(). +add_chain(Input, Type) -> + case (catch mochijson2:decode(Input)) of + {error, E} -> + html("add-chain: bad input:", E); + {struct, [{<<"chain">>, ChainBase64}]} -> + case (catch [base64:decode(X) || X <- ChainBase64]) of + {'EXIT', _} -> + html("add-chain: invalid base64-encoded chain: ", + [ChainBase64]); + [LeafCert | CertChain] -> + case x509:normalise_chain(catlfish:known_roots(), + [LeafCert|CertChain]) of + {ok, [Leaf | Chain]} -> + lager:info("adding ~p cert ~p", + [Type, x509:cert_string(LeafCert)]), + success(catlfish:add_chain(Leaf, Chain, Type)); + {error, Reason} -> + lager:info("rejecting ~p: ~p", + [x509:cert_string(LeafCert), Reason]), + html("add-chain: invalid chain", Reason) + end; + Invalid -> + html("add-chain: chain is not a list: ", [Invalid]) + end; + _ -> html("add-chain: missing input: chain", Input) + end. diff --git a/src/x509.erl b/src/x509.erl index 5a0e871..43b90b3 100644 --- a/src/x509.erl +++ b/src/x509.erl @@ -3,10 +3,10 @@ -module(x509). -export([normalise_chain/2, cert_string/1, read_pemfiles_from_dir/1, - self_signed/1]). - + self_signed/1, detox/2]). -include_lib("public_key/include/public_key.hrl"). -include_lib("eunit/include/eunit.hrl"). +-import(lists, [nth/2, filter/2]). -type reason() :: {chain_too_long | root_unknown | @@ -18,15 +18,49 @@ -spec normalise_chain([binary()], [binary()]) -> {ok, [binary()]} | {error, reason()}. normalise_chain(AcceptableRootCerts, CertChain) -> - case valid_chain_p(AcceptableRootCerts, CertChain, ?MAX_CHAIN_LENGTH) of + case normalise_chain(AcceptableRootCerts, CertChain, ?MAX_CHAIN_LENGTH) of {false, Reason} -> {error, Reason}; {true, Root} -> - [Leaf | Chain] = CertChain, - {ok, [detox_precert(Leaf) | Chain] ++ Root} + {ok, CertChain ++ Root} end. -%%%%%%%%%%%%%%%%%%%% +cert_string(Der) -> + mochihex:to_hex(crypto:hash(sha, Der)). + +-spec read_pemfiles_from_dir(file:filename()) -> [binary()]. +%% @doc Reading certificates from files. Flattening the result -- all +%% certs in all files are returned in a single list. +read_pemfiles_from_dir(Dir) -> + case file:list_dir(Dir) of + {error, enoent} -> + lager:error("directory does not exist: ~p", [Dir]), + []; + {error, Reason} -> + lager:error("unable to read directory ~p: ~p", [Dir, Reason]), + []; + {ok, Filenames} -> + Files = lists:filter( + fun(F) -> + string:equal(".pem", filename:extension(F)) + end, + Filenames), + ders_from_pemfiles(Dir, Files) + end. + +-spec self_signed([binary()]) -> [binary()]. +%% @doc Return a list of certs in L that are self signed. +self_signed(L) -> + lists:filter(fun(Cert) -> signed_by_p(Cert, Cert) end, L). + +%% @doc Return the detoxed cet in LeafDer and the issuer leaf hash. +-spec detox(binary(), [binary()]) -> {binary(), binary()}. +detox(LeafDer, ChainDer) -> + detox_precert(LeafDer, nth(1, ChainDer), nth(2, ChainDer)). + +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% +%% Private functions. + %% @doc Verify that the leaf cert or precert has a valid chain back to %% an acceptable root cert. The order of certificates in the second %% argument is: leaf cert in head, chain in tail. Order of first @@ -37,12 +71,12 @@ normalise_chain(AcceptableRootCerts, CertChain) -> %% amongst the acceptable root certs. Otherwise it contains exactly %% one element, a CA cert from the acceptable root certs signing the %% root of the chain. --spec valid_chain_p([binary()], [binary()], integer()) -> - {false, reason()} | {true, list()}. -valid_chain_p(_, _, MaxChainLength) when MaxChainLength =< 0 -> +-spec normalise_chain([binary()], [binary()], integer()) -> + {false, reason()} | {true, list()}. +normalise_chain(_, _, MaxChainLength) when MaxChainLength =< 0 -> %% Chain too long. {false, chain_too_long}; -valid_chain_p(AcceptableRootCerts, [TopCert], MaxChainLength) -> +normalise_chain(AcceptableRootCerts, [TopCert], MaxChainLength) -> %% Check root of chain. case lists:member(TopCert, AcceptableRootCerts) of true -> @@ -58,9 +92,9 @@ valid_chain_p(AcceptableRootCerts, [TopCert], MaxChainLength) -> Root -> {true, [Root]} end end; -valid_chain_p(AcceptableRootCerts, [BottomCert|Rest], MaxChainLength) -> +normalise_chain(AcceptableRootCerts, [BottomCert|Rest], MaxChainLength) -> case signed_by_p(BottomCert, hd(Rest)) of - true -> valid_chain_p(AcceptableRootCerts, Rest, MaxChainLength - 1); + true -> normalise_chain(AcceptableRootCerts, Rest, MaxChainLength - 1); false -> {false, signature_mismatch} end. @@ -90,9 +124,10 @@ encoded_tbs_cert(DerCert) -> PKIXCert, EncodedTBSCert. +-spec extract_verify_data(#'Certificate'{}, binary()) -> {ok, tuple()} | error. +%% @doc Return DER encoded TBScertificate, digest type and signature. %% Code from pubkey_cert:extract_verify_data/2. --spec verifydata_from_cert(#'Certificate'{}, binary()) -> {ok, tuple()} | error. -verifydata_from_cert(Cert, DerCert) -> +extract_verify_data(Cert, DerCert) -> PlainText = encoded_tbs_cert(DerCert), {_, Sig} = Cert#'Certificate'.signature, SigAlgRecord = Cert#'Certificate'.signatureAlgorithm, @@ -114,7 +149,7 @@ verify_sig(Cert, DerCert, % Certificate to verify. tbsCertificate = #'TBSCertificate'{ subjectPublicKeyInfo = IssuerSPKI}}) -> %% Dig out digest, digest type and signature from Cert/DerCert. - case verifydata_from_cert(Cert, DerCert) of + case extract_verify_data(Cert, DerCert) of error -> false; {ok, Tuple} -> verify_sig2(IssuerSPKI, Tuple) end. @@ -159,9 +194,6 @@ signed_by_p(DerCert, IssuerDerCert) when is_binary(DerCert), DerCert, public_key:pkix_decode_cert(IssuerDerCert, plain)). -cert_string(Der) -> - mochihex:to_hex(crypto:hash(sha, Der)). - parsable_cert_p(Der) -> case (catch public_key:pkix_decode_cert(Der, plain)) of #'Certificate'{} -> @@ -175,54 +207,139 @@ parsable_cert_p(Der) -> false end. --spec self_signed([binary()]) -> [binary()]. -self_signed(L) -> - lists:filter(fun(Cert) -> signed_by_p(Cert, Cert) end, L). - -%%%%%%%%%%%%%%%%%%%% -%% Precertificates according to draft-ietf-trans-rfc6962-bis-04. +%% Precerts according to RFC6962. %% Submitted precerts have a special critical poison extension -- OID %% 1.3.6.1.4.1.11129.2.4.3, whose extnValue OCTET STRING contains %% ASN.1 NULL data (0x05 0x00). %% They are signed with either the CA cert that will sign the final -%% cert or Precertificate Signing Certificate directly signed by the +%% cert or a Precertificate Signing Certificate directly signed by the %% CA cert that will sign the final cert. A Precertificate Signing %% Certificate has CA:true and Extended Key Usage: Certificate %% Transparency, OID 1.3.6.1.4.1.11129.2.4.4. -%% A PreCert in a SignedCertificateTimestamp does _not_ contain the -%% poison extension, nor a Precertificate Signing Certificate. This -%% means that we might have to 1) remove poison extensions in leaf -%% certs, 2) remove "poisoned signatures", 3) change issuer and -%% Authority Key Identifier of leaf certs. +%% PreCert in SignedCertificateTimestamp does _not_ contain the poison +%% extension, nor does it have an issuer which is a Precertificate +%% Signing Certificate. This means that we have to 1) remove the +%% poison extension and 2) potentially change issuer and Authority Key +%% Identifier. See RFC6962 Section 3.2. + +%% Changes in draft-ietf-trans-rfc6962-bis-??: TODO. + +-spec detox_precert(binary(), binary(), binary()) -> {binary(), binary()}. +%% @doc Return {DetoxedLeaf, IssuerPubKeyHash} where i) DetoxedLeaf is +%% the tbsCertificate w/o poison and adjusted issuer and authkeyid; +%% and ii) IssuerPubKeyHash is the hash over issuing cert's public +%% key. +detox_precert(LeafDer, ParentDer, GrandParentDer) -> + Leaf = public_key:pkix_decode_cert(LeafDer, plain), + Parent = public_key:pkix_decode_cert(ParentDer, plain), + GrandParent = public_key:pkix_decode_cert(GrandParentDer, plain), + DetoxedLeafTBS = remove_poison_ext(Leaf), + + %% If parent is a precert signing cert, change issuer and + %% potential authority key id to refer to grandparent. + {C, IssuerKeyHash} = + case is_precert_signer(Parent) of + true -> + {set_issuer_and_authkeyid(DetoxedLeafTBS, Parent), + extract_pub_key(GrandParent)}; + false -> + {DetoxedLeafTBS, extract_pub_key(Parent)} + end, + {public_key:pkix_encode('TBSCertificate', C, plain), + crypto:hash(sha256, public_key:pkix_encode( + 'SubjectPublicKeyInfo', IssuerKeyHash, plain))}. + +-spec extract_pub_key(#'Certificate'{}) -> #'SubjectPublicKeyInfo'{}. +extract_pub_key(#'Certificate'{ + tbsCertificate = #'TBSCertificate'{ + subjectPublicKeyInfo = SPKI}}) -> + SPKI. + +-spec set_issuer_and_authkeyid(#'TBSCertificate'{}, #'Certificate'{}) -> + #'TBSCertificate'{}. +%% @doc Return Cert with issuer and AuthorityKeyIdentifier from Parent. +set_issuer_and_authkeyid(TBSCert, + #'Certificate'{ + tbsCertificate = + #'TBSCertificate'{ + issuer = ParentIssuer, + extensions = ParentExtensions}}) -> + case pubkey_cert:select_extension(?'id-ce-authorityKeyIdentifier', + ParentExtensions) of + undefined -> + lager:debug("setting issuer only", []), + TBSCert#'TBSCertificate'{issuer = ParentIssuer}; + ParentAuthKeyExt -> + NewExtensions = + lists:map( + fun(E) -> + case E of + #'Extension'{extnID = + ?'id-ce-authorityKeyIdentifier'} -> + lager:debug("swapping auth key id to ~p", + [ParentAuthKeyExt]), + ParentAuthKeyExt; + Orig -> + Orig + end + end, + TBSCert#'TBSCertificate'.extensions), + TBSCert#'TBSCertificate'{issuer = ParentIssuer, + extensions = NewExtensions} + end. --spec detox_precert([#'Certificate'{}]) -> [#'Certificate'{}]. -detox_precert(CertChain) -> - CertChain. % NYI +-define(CA_POISON_OID, {1,3,6,1,4,1,11129,2,4,4}). + +-spec is_precert_signer(#'Certificate'{}) -> boolean(). +is_precert_signer(#'Certificate'{tbsCertificate = TBSCert}) -> + Extensions = pubkey_cert:extensions_list(TBSCert#'TBSCertificate'.extensions), + %% NOTE: It's OK to look at only the first extension found since + %% "A certificate MUST NOT include more than one instance of a + %% particular extension." --RFC5280 Sect 4.2 + case pubkey_cert:select_extension(?'id-ce-extKeyUsage', Extensions) of + #'Extension'{extnValue = Val} -> + case 'OTP-PUB-KEY':decode('ExtKeyUsageSyntax', Val) of + %% NOTE: We require that the poisoned OID is the + %% _only_ extkeyusage present. RFC6962 Sect 3.1 is not + %% really clear. + {ok, [?CA_POISON_OID]} -> is_ca(TBSCert); + _ -> false + end; + _ -> false + end. -%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% --spec read_pemfiles_from_dir(file:filename()) -> [binary()]. -%% @doc Reading certificates from files. Flattening the result -- all -%% certs in all files are returned in a single list. -read_pemfiles_from_dir(Dir) -> - case file:list_dir(Dir) of - {error, enoent} -> - lager:error("directory does not exist: ~p", [Dir]), - []; - {error, Reason} -> - lager:error("unable to read directory ~p: ~p", [Dir, Reason]), - []; - {ok, Filenames} -> - Files = lists:filter( - fun(F) -> - string:equal(".pem", filename:extension(F)) - end, - Filenames), - ders_from_pemfiles(Dir, Files) +is_ca(#'TBSCertificate'{extensions = Extensions}) -> + case pubkey_cert:select_extension(?'id-ce-basicConstraints', Extensions) of + #'Extension'{critical = true, extnValue = Val} -> + case 'OTP-PUB-KEY':decode('BasicConstraints', Val) of + {ok, {'BasicConstraints', true, _}} -> true; + _ -> false + end; + _ -> false end. +-spec remove_poison_ext(#'Certificate'{}) -> #'TBSCertificate'{}. +remove_poison_ext(#'Certificate'{tbsCertificate = TBSCert}) -> + Extensions = pubkey_cert:extensions_list(TBSCert#'TBSCertificate'.extensions), + SanitisedExtensions = + filter(fun(E) -> not poisoned_leaf_p(E) end, Extensions), + NewTBSCert = TBSCert#'TBSCertificate'{extensions = SanitisedExtensions}, + NewTBSCert. + +-define(LEAF_POISON_OID, {1,3,6,1,4,1,11129,2,4,3}). +-define(LEAF_POISON_VAL, [5,0]). + +poisoned_leaf_p(#'Extension'{extnID = ?LEAF_POISON_OID, + critical = true, + extnValue = ?LEAF_POISON_VAL}) -> + true; +poisoned_leaf_p(_) -> + false. + +%%%% PEM files. ders_from_pemfiles(Dir, Filenames) -> lists:flatten( [ders_from_pemfile(filename:join(Dir, X)) || X <- Filenames]). @@ -297,21 +414,21 @@ valid_cert_test_() -> %% 'OTP-PUB-KEY':Func('OTP-X520countryname', Value0) %% FIXME: This error doesn't make much sense -- is my %% environment borked? - ?_assertMatch({true, _}, valid_chain_p(lists:nth(1, Chains), - lists:nth(1, Chains), 10)), + ?_assertMatch({true, _}, normalise_chain(lists:nth(1, Chains), + lists:nth(1, Chains), 10)), %% Self-signed so fail. ?_assertMatch({false, root_unknown}, - valid_chain_p(KnownRoots, - lists:nth(2, Chains), 10)), + normalise_chain(KnownRoots, + lists:nth(2, Chains), 10)), %% Leaf signed by known CA, pass. - ?_assertMatch({true, _}, valid_chain_p(KnownRoots, - lists:nth(3, Chains), 10)), + ?_assertMatch({true, _}, normalise_chain(KnownRoots, + lists:nth(3, Chains), 10)), %% Proper 3-depth chain with root in KnownRoots, pass. %% Bug CATLFISH-19 --> [info] rejecting "3ee62cb678014c14d22ebf96f44cc899adea72f1": chain_broken %% leaf sha1: 3ee62cb678014c14d22ebf96f44cc899adea72f1 %% leaf Subject: C=KR, O=Government of Korea, OU=Group of Server, OU=\xEA\xB5\x90\xEC\x9C\xA1\xEA\xB3\xBC\xED\x95\x99\xEA\xB8\xB0\xEC\x88\xA0\xEB\xB6\x80, CN=www.berea.ac.kr, CN=haksa.bits.ac.kr - ?_assertMatch({true, _}, valid_chain_p(KnownRoots, - lists:nth(4, Chains), 3)), + ?_assertMatch({true, _}, normalise_chain(KnownRoots, + lists:nth(4, Chains), 3)), %% Verify against self, pass. %% Bug CATLFISH-??, can't handle issuer keytype ECPoint. %% Issuer sha1: 6969562e4080f424a1e7199f14baf3ee58ab6abb @@ -333,21 +450,21 @@ chain_test_() -> chain_test(C0, C1) -> [ %% Root not in chain but in trust store. - ?_assertEqual({true, [C1]}, valid_chain_p([C1], [C0], 10)), - ?_assertEqual({true, [C1]}, valid_chain_p([C1], [C0], 2)), + ?_assertEqual({true, [C1]}, normalise_chain([C1], [C0], 10)), + ?_assertEqual({true, [C1]}, normalise_chain([C1], [C0], 2)), %% Chain too long. - ?_assertMatch({false, chain_too_long}, valid_chain_p([C1], [C0], 1)), + ?_assertMatch({false, chain_too_long}, normalise_chain([C1], [C0], 1)), %% Root in chain and in trust store. - ?_assertEqual({true, []}, valid_chain_p([C1], [C0, C1], 2)), + ?_assertEqual({true, []}, normalise_chain([C1], [C0, C1], 2)), %% Chain too long. - ?_assertMatch({false, chain_too_long}, valid_chain_p([C1], [C0, C1], 1)), + ?_assertMatch({false, chain_too_long}, normalise_chain([C1], [C0, C1], 1)), %% Root not in trust store. - ?_assertMatch({false, root_unknown}, valid_chain_p([], [C0, C1], 10)), + ?_assertMatch({false, root_unknown}, normalise_chain([], [C0, C1], 10)), %% Selfsigned. Actually OK. - ?_assertMatch({true, []}, valid_chain_p([C0], [C0], 10)), - ?_assertMatch({true, []}, valid_chain_p([C0], [C0], 1)), + ?_assertMatch({true, []}, normalise_chain([C0], [C0], 10)), + ?_assertMatch({true, []}, normalise_chain([C0], [C0], 1)), %% Max chain length 0 is not OK. - ?_assertMatch({false, chain_too_long}, valid_chain_p([C0], [C0], 0)) + ?_assertMatch({false, chain_too_long}, normalise_chain([C0], [C0], 0)) ]. %%-spec read_certs(file:filename()) -> [string:string()]. diff --git a/tools/certtools.py b/tools/certtools.py index 222497f..0e639f2 100644 --- a/tools/certtools.py +++ b/tools/certtools.py @@ -6,6 +6,7 @@ import json import base64 import urllib import urllib2 +import ssl import urlparse import struct import sys @@ -79,7 +80,7 @@ def get_root_cert(issuer): return root_cert def get_sth(baseurl): - result = urllib2.urlopen(baseurl + "ct/v1/get-sth").read() + result = urllib2.urlopen(baseurl + "ct/v1/get-sth", context=ssl.SSLContext(ssl.PROTOCOL_TLSv1)).read() return json.loads(result) def get_proof_by_hash(baseurl, hash, tree_size): @@ -87,7 +88,7 @@ def get_proof_by_hash(baseurl, hash, tree_size): params = urllib.urlencode({"hash":base64.b64encode(hash), "tree_size":tree_size}) result = \ - urllib2.urlopen(baseurl + "ct/v1/get-proof-by-hash?" + params).read() + urllib2.urlopen(baseurl + "ct/v1/get-proof-by-hash?" + params, context=ssl.SSLContext(ssl.PROTOCOL_TLSv1)).read() return json.loads(result) except urllib2.HTTPError, e: print "ERROR:", e.read() @@ -98,7 +99,7 @@ def get_consistency_proof(baseurl, tree_size1, tree_size2): params = urllib.urlencode({"first":tree_size1, "second":tree_size2}) result = \ - urllib2.urlopen(baseurl + "ct/v1/get-sth-consistency?" + params).read() + urllib2.urlopen(baseurl + "ct/v1/get-sth-consistency?" + params, context=ssl.SSLContext(ssl.PROTOCOL_TLSv1)).read() return json.loads(result)["consistency"] except urllib2.HTTPError, e: print "ERROR:", e.read() @@ -121,8 +122,7 @@ def unpack_tls_array(packed_data, length_len): def add_chain(baseurl, submission): try: - result = urllib2.urlopen(baseurl + "ct/v1/add-chain", - json.dumps(submission)).read() + result = urllib2.urlopen(baseurl + "ct/v1/add-chain", json.dumps(submission), context=ssl.SSLContext(ssl.PROTOCOL_TLSv1)).read() return json.loads(result) except urllib2.HTTPError, e: print "ERROR", e.code,":", e.read() @@ -140,7 +140,7 @@ def add_chain(baseurl, submission): def get_entries(baseurl, start, end): try: params = urllib.urlencode({"start":start, "end":end}) - result = urllib2.urlopen(baseurl + "ct/v1/get-entries?" + params).read() + result = urllib2.urlopen(baseurl + "ct/v1/get-entries?" + params, context=ssl.SSLContext(ssl.PROTOCOL_TLSv1)).read() return json.loads(result) except urllib2.HTTPError, e: print "ERROR:", e.read() @@ -197,7 +197,7 @@ def http_request(url, data=None, key=None): signature = sk.sign("%s\0%s\0%s" % (method, parsed_url.path, data), hashfunc=hashlib.sha256, sigencode=ecdsa.util.sigencode_der) req.add_header('X-Catlfish-Auth', base64.b64encode(signature) + ";key=" + keyname) - result = urllib2.urlopen(req).read() + result = urllib2.urlopen(req, context=ssl.SSLContext(ssl.PROTOCOL_TLSv1)).read() return result def get_signature(baseurl, data, key=None): -- cgit v1.1