summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorJosef Gustafsson <josef.gson@gmail.com>2015-09-07 11:57:47 +0200
committerJosef Gustafsson <josef.gson@gmail.com>2015-09-07 11:57:47 +0200
commit4e02f8c8fab56ca030c9ecf9cc834b7d9ece916e (patch)
tree65c1c94e5dc28929c166ea67e2e476f524319337
parent1be99e982addacb96724c22c23bc5786cd2de9cd (diff)
matching strings from cert info against domain-names of interest
-rwxr-xr-xtools/josef_auditor.py2
-rwxr-xr-xtools/josef_experimental.py67
-rwxr-xr-xtools/josef_experimental_auditor.py45
3 files changed, 58 insertions, 56 deletions
diff --git a/tools/josef_auditor.py b/tools/josef_auditor.py
index 29032d1..0f12037 100755
--- a/tools/josef_auditor.py
+++ b/tools/josef_auditor.py
@@ -24,7 +24,7 @@ base_urls = [
"https://ct.googleapis.com/pilot/",
"https://ct.googleapis.com/rocketeer/",
"https://ct.ws.symantec.com/",
- # "https://ctlog.api.venafi.com/",
+ "https://ctlog.api.venafi.com/",
]
parser = argparse.ArgumentParser(description="")
parser.add_argument('--audit', action='store_true', help="run lightweight auditor verifying consistency in STH")
diff --git a/tools/josef_experimental.py b/tools/josef_experimental.py
index 4377b8b..4cb16f8 100755
--- a/tools/josef_experimental.py
+++ b/tools/josef_experimental.py
@@ -62,14 +62,16 @@ def get_proof_by_index(baseurl, index, tree_size):
sys.exit(1)
-base_urls = ["https://plausible.ct.nordu.net/",
- "https://ct1.digicert-ct.com/log/",
- "https://ct.izenpe.com/",
- "https://log.certly.io/",
- "https://ctlog.api.venafi.com/",
- "https://ct.googleapis.com/aviator/",
- "https://ct.googleapis.com/pilot/",
- "https://ct.googleapis.com/rocketeer/",
+base_urls = [
+ "https://plausible.ct.nordu.net/",
+ # "https://ct1.digicert-ct.com/log/",
+ # "https://ct.izenpe.com/",
+ # "https://log.certly.io/",
+ # "https://ctlog.api.venafi.com/",
+ # "https://ct.googleapis.com/aviator/",
+ # "https://ct.googleapis.com/pilot/",
+ # "https://ct.googleapis.com/rocketeer/",
+ # "https://ct.ws.symantec.com/",
]
logkeys = {}
@@ -86,43 +88,28 @@ logkeys["https://ctlog.api.venafi.com/"] = get_public_key_from_file("../../venaf
import Crypto.PublicKey.RSA as RSA
from Crypto.Hash import SHA256
-for url in base_urls:
- sth = get_sth(url)
- signature = base64.b64decode(sth["tree_head_signature"])
- key = logkeys[url]
- root_hash = base64.b64decode(sth["sha256_root_hash"])
+monitored_domains = [
+ "google.com",
+ "preishelden.de",
+ "liu.se",
+ "nordu.net",
+ "symantec.com",
+]
- hash_alg, signature_alg, unpacked_signature = decode_signature(signature)
- if signature_alg == 1:
+raw_entry = get_entries(base_urls[0], 1000, 1000)["entries"]
+orig_entries = []
- # rsa_key = RSA.importKey(key)
- # verifier = PKCS1_v1_5.new(rsa_key)
-
- # version = struct.pack(">b", 0)
- # signature_type = struct.pack(">b", 1)
- # timestamp = struct.pack(">Q", sth["timestamp"])
- # tree_size = struct.pack(">Q", sth["tree_size"])
- # hash = base64.decodestring(sth["sha256_root_hash"])
+for item in raw_entry:
+ print item
- # tree_head = version + signature_type + timestamp + tree_size + hash
- # h = SHA256.new(tree_head)
+ cert_info = get_cert_info(item)
+ for md in monitored_domains:
+ if md in cert_info["subject"]:
+ print md + " certifed by " + cert_info["issuer"]
- # print verifier
- # print verifier.verify(h, unpacked_signature)
- print "RSA Signature from " + url
- check_sth_signature(url, sth, key)
+ print "\n\n"
+ print item
- elif signature_alg == 3:
- print "ECDSA signature from " + url
- check_sth_signature(url, sth, key)
- else:
- print "Unknown signature algorithm from " + url
-# print sth
-# print "\n\n" + signature
-# print "\n\n" + key
-# print rsa_key
-
-# print "\n\n" + rsa_key.verify(root_hash, signature) \ No newline at end of file
diff --git a/tools/josef_experimental_auditor.py b/tools/josef_experimental_auditor.py
index 96a84ac..690f883 100755
--- a/tools/josef_experimental_auditor.py
+++ b/tools/josef_experimental_auditor.py
@@ -16,16 +16,17 @@ NAGIOS_UNKNOWN = 3
DEFAULT_CUR_FILE = 'all-sth.json'
base_urls = [
- # "https://plausible.ct.nordu.net/",
- # "https://ct1.digicert-ct.com/log/",
- "https://ct.izenpe.com/",
- # "https://log.certly.io/",
- # "https://ct.googleapis.com/aviator/",
- # "https://ct.googleapis.com/pilot/",
- # "https://ct.googleapis.com/rocketeer/",
- "https://ct.ws.symantec.com/",
- # "https://ctlog.api.venafi.com/",
- ]
+ # "https://plausible.ct.nordu.net/",
+ # "https://ct1.digicert-ct.com/log/",
+ "https://ct.izenpe.com/",
+ # "https://log.certly.io/",
+ # "https://ct.googleapis.com/aviator/",
+ # "https://ct.googleapis.com/pilot/",
+ # "https://ct.googleapis.com/rocketeer/",
+ "https://ct.ws.symantec.com/",
+ "https://ctlog.api.venafi.com/",
+]
+
parser = argparse.ArgumentParser(description="")
parser.add_argument('--audit', action='store_true', help="run lightweight auditor verifying consistency in STH")
parser.add_argument('--monitor', action='store_true', help="run full monitoring for all logs")
@@ -44,6 +45,15 @@ parser.add_argument('--cur-sth',
timings = {}
errors = []
+monitored_domains = [
+ # "google.com",
+ "preishelden.de",
+ "liu.se",
+ "nordu.net",
+ "symantec.com",
+ ".se",
+]
+
class UTC(datetime.tzinfo):
def utcoffset(self, dt):
return datetime.timedelta(hours=0)
@@ -186,6 +196,14 @@ def verify_inclusion_all(old, new):
print time.strftime('%H:%M:%S') + " ERROR: Failed to prove inclusion of all new entries in " + url
errors.append(time.strftime('%H:%M:%S') + " ERROR: Failed to prove inclusion of all new entries in " + url)
+def check_domain(raw_entry):
+ orig_entry = extract_original_entry(raw_entry)
+ cert_info = get_cert_info(orig_entry[0][0])
+ for md in monitored_domains:
+ if md in cert_info["subject"]:
+ print md + " (" + cert_info["subject"].split("CN=")[1] + ") certifed by " + cert_info["issuer"]
+
+
def fetch_and_increment_subtree(old_sth, new_sth_in, subtree, base_url):
try:
sth = old_sth[base_url]
@@ -199,6 +217,7 @@ def fetch_and_increment_subtree(old_sth, new_sth_in, subtree, base_url):
new_leafs = []
for item in entries:
+ check_domain(item)
new_leafs.append(get_leaf_hash(base64.b64decode(item["leaf_input"])))
idx += len(new_leafs)
print time.strftime('%H:%M:%S') + " Got entries " + str(pre_size) + " to " + str(idx) + " from " + base_url
@@ -215,15 +234,12 @@ def fetch_and_build_subtree(old_sth, base_url):
subtree = [[]]
idx = 0
- # print time.strftime('%H:%M:%S') + " Getting all entries from " + base_url
while idx < sth["tree_size"]:
pre_size = idx
- ### DEBUG!!
- # entries = get_entries(base_url, idx, sth["tree_size"]-100)["entries"]
entries = get_entries(base_url, idx, sth["tree_size"]-1)["entries"]
-
new_leafs = []
for item in entries:
+ check_domain(item)
new_leafs.append(get_leaf_hash(base64.b64decode(item["leaf_input"])))
idx += len(new_leafs)
print time.strftime('%H:%M:%S') + " Got entries " + str(pre_size) + " to " + str(idx) + " from " + base_url
@@ -427,7 +443,6 @@ def main(args):
if args.monitor:
# Run for one log only
- # url = base_urls[0]
all_subtrees = {}
print time.strftime('%H:%M:%S') + " Building trees from entries. This may take a while, go get coffee or something..."
for url in base_urls: