summaryrefslogtreecommitdiff
path: root/tools/josef_auditor.py
blob: 38c0ba680b4042d8127e8c6a66cad3d42382e420 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
#!/usr/bin/python
# -*- coding: utf-8 -*-		

import time
import base64
import argparse
# from pympler.asizeof import asizeof
from certtools import *


base_urls = ["https://plausible.ct.nordu.net/",
			"https://ct1.digicert-ct.com/log/", 
			"https://ct.izenpe.com/",
			"https://log.certly.io/", 
			"https://ct.googleapis.com/aviator/", 
			"https://ct.googleapis.com/pilot/",
			"https://ct.googleapis.com/rocketeer/",
			]

logkeys = {}
logkeys["https://plausible.ct.nordu.net/"] = get_public_key_from_file("../../plausible-logkey.pem")
logkeys["https://ct.googleapis.com/rocketeer/"] = get_public_key_from_file("../../rocketeer-logkey.pem")
logkeys["https://ct.googleapis.com/aviator/"] = get_public_key_from_file("../../aviator-logkey.pem")
logkeys["https://ct.googleapis.com/pilot/"] = get_public_key_from_file("../../pilot-logkey.pem")
logkeys["https://log.certly.io/"] = get_public_key_from_file("../../certly-logkey.pem")
logkeys["https://ct.izenpe.com/"] = get_public_key_from_file("../../izenpe-logkey.pem")
logkeys["https://ct1.digicert-ct.com/log/"] = get_public_key_from_file("../../digicert-logkey.pem")

parser = argparse.ArgumentParser(description="")
parser.add_argument('--audit', action='store_true', help="run lightweight auditor ensuring consistency in STH")
parser.add_argument('--build-sth', action='store_true', help="get all entries and construct STH")


def reduce_layer(layer):
	new_layer = []
	while len(layer) > 1:
		e1 = layer.pop(0)
		e2 = layer.pop(0)
		new_layer.append(internal_hash((e1,e2)))
	return new_layer

def reduce_tree(entries, layers):
	if len(entries) == 0 and layers is []:
		return [[hashlib.sha256().digest()]]
  
	layer_idx = 0
	layers[layer_idx] += entries

	while len(layers[layer_idx]) > 1:
		if len(layers) == layer_idx + 1:
			layers.append([])

		layers[layer_idx + 1] += reduce_layer(layers[layer_idx]) 
		layer_idx += 1
	return layers

def reduce_subtree_to_root(layers):
	while len(layers) > 1:
		layers[1] += next_merkle_layer(layers[0])
		del layers[0]

	if len(layers[0]) > 1:
		return next_merkle_layer(layers[0])
	return layers[0]


# Get STH and verify signature
def fetch_all_sth():
	sths = {}
	for base_url in base_urls:
		try:
			sths[base_url] =  get_sth(base_url)
		except:
			print "Failed to retrieve STH from " + base_url
			sths[base_url] = None
			continue

		try:
			check_sth_signature(base_url, sths[base_url], logkeys[base_url])
		except:
			print "Could not verify signature from " + base_url + "!!!"
			continue
	return sths


def verify_consistency(old, new):
	for url in old:
		if old[url] is not None:
			if old[url]["tree_size"]!= new[url]["tree_size"]:
				consistency_proof = get_consistency_proof(url, old[url]["tree_size"], new[url]["tree_size"] )
				decoded_consistency_proof = []
				for item in consistency_proof:
					decoded_consistency_proof.append(base64.b64decode(item))
				res = verify_consistency_proof(decoded_consistency_proof, old[url]["tree_size"], new[url]["tree_size"], old[url]["sha256_root_hash"])
				
				if old[url]["sha256_root_hash"] != str(base64.b64encode(res[0])):
					print "Verification of old hash failed!!!"
					print old[url]["sha256_root_hash"], str(base64.b64encode(res[0]))
				elif new[url]["sha256_root_hash"] != str(base64.b64encode(res[1])):
					print "Verification of new hash failed!!!"
					print new[url]["sha256_root_hash"], str(base64.b64encode(res[1]))
				else:
					print time.strftime("%H:%M:%S", time.gmtime()) + " New STH from " + url + ", timestamp: " + str(new[url]["timestamp"]) + ", size: " + str(new[url]["tree_size"]) + "...OK."



def fetch_and_build_tree(old_sth, base_url):
	sth = old_sth[base_url]
	subtree = [[]]
	idx = 0

	print "Getting all entries from " + base_url
	while idx < sth["tree_size"]:
		pre_size = idx
		entries = get_entries(base_url, idx, sth["tree_size"])["entries"]
		
		new_leafs = []
		for item in entries:
			new_leafs.append(get_leaf_hash(base64.b64decode(item["leaf_input"])))
		idx += len(new_leafs)
		print "Got entries " + str(pre_size) + " to " + str(idx) #+ " (tree size: " + str(asizeof(subtree)) + " B)"
		subtree = reduce_tree(new_leafs, subtree)

	root = base64.b64encode(reduce_subtree_to_root(subtree)[0])

	if root == sth["sha256_root_hash"]:
		print "Verifying root hashes...OK."
	else:
		print "ERROR: Failed to verify root hashes!"
		print "STH root: " + sth["sha256_root_hash"]
		print "Tree root: " + root


def main(args):
	print "Started " + time.strftime("%H:%M:%S", time.gmtime())
	old_sth = fetch_all_sth()

	if args.build_sth:
		print "Building trees from entries. This may take a while, go get coffee or something..."
		# for url in base_urls:
		# 	fetch_and_build_tree(old_sth, url)
		fetch_and_build_tree(old_sth, base_urls[0])

	if args.audit:
		print "Running auditor for " +str(len(base_urls)) + " logs..."

		while True:
			time.sleep(1*60-4)
			new_sth = fetch_all_sth()
			verify_consistency(old_sth, new_sth)
			old_sth = new_sth





if __name__ == '__main__':
    main(parser.parse_args())