summaryrefslogtreecommitdiff
path: root/monitor/josef_monitor.py
blob: 0e02a3c847439d13ece5ddef5be2a83da396d556 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
#!/usr/bin/python
# -*- coding: utf-8 -*-  

# Selected dependencies
# python-dev (apt)
# pycrypto (pip)
# leveldb (pip)

import time
import datetime
import base64
import argparse
import errno
from copy import deepcopy
from josef_lib import *
from josef_reader import monitored_domain
from josef_leveldb import db_add_certs, db_open
import os.path

# Import from config file
if os.path.isfile("monitor_conf.py"):
    from monitor_conf import ctlogs, OUTPUT_DIR, INTERVAL, DEFAULT_CERT_FILE, DB_PATH, MONITORED_DOMAINS, DOMAINS_FILE
else:
    print "Config file not found!"
    sys.exit()

if not os.path.exists(OUTPUT_DIR):
    os.makedirs(OUTPUT_DIR)

if not os.path.exists(DB_PATH):
    os.makedirs(DB_PATH)


parser = argparse.ArgumentParser(description="")

class ctlog:
    def __init__(self, name, url, key, log_id=None):
        self.name = name
        self.url = url
        self.key = key
        self.log_id = log_id
        self.logfile = OUTPUT_DIR + name + ".log"
        self.savefile = OUTPUT_DIR + name + "-state-info.json"
        self.subtree = [[]]
        self.sth = None
        self.entries = 0
        self.root_hash = None

        self.log("Starting monitor")


    def incremental_build(self):
        # Keeps state current during build, partial builds are possible.
        self.sth = get_sth(self.url)
        start_size = self.entries
        while self.entries < self.sth["tree_size"]:
            tmp_size = self.entries
            self.subtree, self.entries = self.fetch_and_increment_subtree(self.entries, self.sth["tree_size"] -1, self.url, self.subtree)
            if tmp_size != self.entries:
                self.log("Got entries " + str(tmp_size) + " to " \
                + str(self.entries -1 ) + " of " + str(self.sth["tree_size"]-1))

        if self.entries != start_size:
            if verify_subtree(self.sth, self.subtree, self.url):
                self.log("Successfully build tree with " + str(self.entries - start_size) + \
                    " new entries. Size: " + str(self.entries))
            else:
                self.log("ERROR Failed to build tree from entries.")


    def fetch_and_increment_subtree(self, first, last, url, subtree =[[]]):
        new_leafs = []
        if first <= last:
            entries = get_entries(url, first, last)["entries"]
            tmp_cert_data = []
            for item in entries:
                tmp_data = check_domain(item, url)
                entry_hash = get_leaf_hash(base64.b64decode(item["leaf_input"]))
                if tmp_data:
                    tmp_data["leaf_hash"] = base64.b64encode(entry_hash)
                    tmp_cert_data.append(tmp_data)
                new_leafs.append(entry_hash)
            if DB_PATH:
                db_add_certs(DB_PATH, tmp_cert_data)
            if DEFAULT_CERT_FILE:
                append_file(DEFAULT_CERT_FILE, tmp_cert_data)
            subtree = reduce_tree(new_leafs, subtree)
        return subtree, len(new_leafs) + first


    def to_dict(self):
        d = {}
        d["entries"] = self.entries
        d["subtree"] = encode_tree(self.subtree)
        d["sth"] = self.sth
        return d

    def save(self):
        self.log("Saving state to file")
        open(self.savefile, 'w').write(json.dumps(self.to_dict()))

    def load(self):
        self.log("Loading state from file")
        try:
            f = open(self.savefile)
            s = f.read()
            d = json.loads(s)
            self.subtree = decode_tree(d["subtree"])
            self.sth = d["sth"]
            self.entries = d["entries"]
        except IOError, e:
            if e.errno == errno.ENOENT:
                return None
            raise e

    def log(self, string):
        s = time.strftime('%Y-%m-%d, %H:%M:%S') + " " + string
        with open(self.logfile, 'a') as f:
            f.write(s + "\n")
            f.close()

    def update_sth(self):
        new_sth = get_sth(self.url)

        try:
            check_sth_signature(self.url, new_sth, None)
        except:
            self.log("ERROR: Could not verify STH signature")
            print "ERROR: Could not verify STH signature from " + self.url

        sth_time = datetime.datetime.fromtimestamp(new_sth['timestamp'] / 1000, UTC()).strftime("%Y-%m-%d %H:%M:%S")
        if new_sth["timestamp"] != self.sth["timestamp"]:
            self.log("STH updated. Size: " + str(new_sth["tree_size"]) + ", Time: " + sth_time)
            self.sth = new_sth


    def update_roots(self):
        roots = get_all_roots(self.url)
        new_root_hash = str(hash(str(roots)))
        
        if new_root_hash != self.root_hash:
            self.root_hash = new_root_hash
            cert_dir = OUTPUT_DIR + self.name + "-roots"
            if not os.path.exists(cert_dir):
                os.makedirs(cert_dir)

            hash_list = []
            for cert in roots:
                h = str(hash(str(cert)))
                hash_list.append(h)

            loaded_list = os.listdir(cert_dir)

            added, removed = compare_lists(hash_list, loaded_list)

            if len(added) != 0:
                print str(len(added)) + " new roots found for " + self.name
            if len(removed) != 0:
                print str(len(removed)) + " roots removed for " + self.name
            
            for item in removed:
                data = open(cert_dir + "/" + item).read()

                root_cert = base64.decodestring(data)
                subject = get_cert_info(root_cert)["subject"]
                issuer = get_cert_info(root_cert)["issuer"]
                if subject == issuer:
                    print "Removed Root: " + item + ", " + subject
                    self.log("Removed Root: " + item + ", " + subject)
                else: 
                    print "WTF? Not a root..."

            for item in added:
                root_cert = base64.decodestring(roots[hash_list.index(item)])
                subject = get_cert_info(root_cert)["subject"]
                issuer = get_cert_info(root_cert)["issuer"]
                if subject == issuer:
                    print "New Root: " + item + ", " + subject
                    self.log("New Root: " + item + ", " + subject)
                else: 
                    print "WTF? Not a root..."

                fn = cert_dir + "/" + item
                tempname = fn + ".new"
                data = roots[hash_list.index(item)]
                open(tempname, 'w').write(data)
                mv_file(tempname, fn)


    def verify_progress(self, old):
        new = self.sth
        try:
            if new["tree_size"] == old["tree_size"]:
                if old["sha256_root_hash"] != new["sha256_root_hash"]:
                    s = time.strftime('%H:%M:%S') + " CRITICAL: root hash is different for same tree size"
                    self.log(s)
                    print s
            elif new["tree_size"] < old["tree_size"]:
                s = time.strftime('%H:%M:%S') + " CRITICAL: new tree smaller than previous tree (%d < %d)" % \
                  (new["tree_size"], old["tree_size"])
                self.log(s)
                print s
            else:
                age = time.time() - new["timestamp"]/1000
                sth_time = datetime.datetime.fromtimestamp(new['timestamp'] / 1000, UTC()).strftime("%Y-%m-%d %H:%M:%S")
                roothash = new['sha256_root_hash']
                if age > 24 * 3600:
                    s = "CRITICAL: STH is older than 24h: %s UTC" % (sth_time)
                    self.log(s)
                    print s
                elif age > 12 * 3600:
                    s = "WARNING: STH is older than 12h: %s UTC" % (sth_time)
                    self.log(s)
                    # print s
                elif age > 6 * 3600:
                    s = "WARNING: STH is older than 6h: %s UTC" % (sth_time)
                    self.log(s)
                    # print s
        except:
            s = " ERROR: Failed to verify progress"
            self.log(s)
            print s


    def verify_consistency(self, old):
        new = self.sth
        # for url in old:
        try:
            if old["tree_size"]!= new["tree_size"]:
                consistency_proof = get_consistency_proof(self.url, old["tree_size"], new["tree_size"])
                decoded_consistency_proof = []
                for item in consistency_proof:
                    decoded_consistency_proof.append(base64.b64decode(item))
                res = verify_consistency_proof(decoded_consistency_proof, old["tree_size"], new["tree_size"], old["sha256_root_hash"])
                
                if old["sha256_root_hash"] != str(base64.b64encode(res[0])):
                    s = " Verification of old hash failed! " + \
                        old["sha256_root_hash"], str(base64.b64encode(res[0]))
                    self.log(s)
                    print s
                elif new["sha256_root_hash"] != str(base64.b64encode(res[1])):
                    s = " Verification of new hash failed! " + \
                        new["sha256_root_hash"], str(base64.b64encode(res[1]))
                    self.log(s)
                    print s
                # else:
                #     s = "New STH, timestamp: " + str(new["timestamp"]) + \
                #         ", size: " + str(new["tree_size"]) + "...OK."
                #     self.log(s)

        except:
            self.log("ERROR: Could not verify consistency!")
            print "ERROR: Could not verify consistency for " + self.url


def verify_inclusion_all(old, new):
    for url in old:
        try:
            if old[url] and new[url]:
                if old[url]["tree_size"]!= new[url]["tree_size"]:
                    entries = []

                    while len(entries) + old[url]["tree_size"]!= new[url]["tree_size"]:
                        entries += get_entries(url, str(int(old[url]["tree_size"]) + len(entries)), new[url]["tree_size"] -1)["entries"]
                        print "Got " + str(len(entries)) + " entries..."

                    success = True
                    for i in entries:
                        h = get_leaf_hash(base64.b64decode(i["leaf_input"]))
                        if not verify_inclusion_by_hash(url, h):
                            success = False

                    if success:
                        print time.strftime("%H:%M:%S") + " Verifying inclusion for " + str(len(entries)) + " new entries in " + url + " ...OK"
                    else:
                        print time.strftime('%H:%M:%S') + " ERROR: Failed to prove inclusion of all new entries in " + url
                        errors.append(time.strftime('%H:%M:%S') + " ERROR: Failed to prove inclusion of all new entries in " + url)
        except:
            print time.strftime('%H:%M:%S') + " ERROR: Failed to prove inclusion of all new entries in " + url
            errors.append(time.strftime('%H:%M:%S') + " ERROR: Failed to prove inclusion of all new entries in " + url)

def check_domain(raw_entry, log=None):
    orig_entry = extract_original_entry(raw_entry)
    try:
        cert_info = my_get_cert_info(orig_entry[0][0])
        if log:
            cert_info["log"] = log[8:-1] # strip generic URL stuff
        return cert_info
    except IndexError:
        return None

def verify_subtree(sth, subtree, base_url):
    try:
        tmp = deepcopy(subtree)
        root = base64.b64encode(reduce_subtree_to_root(tmp)[0])

        if root == sth["sha256_root_hash"]:
            # print time.strftime('%H:%M:%S') + " Verifying root hashes for " + base_url + "...OK."
            return True
        else:
            print time.strftime('%H:%M:%S') + " ERROR: Failed to verify root hashes! STH root: " \
                + sth["sha256_root_hash"] + ", Tree root: " + root
            return False
    except:
        print time.strftime('%H:%M:%S') + " ERROR: Failed to build STH for " + base_url
        return False

def verify_inclusion_by_hash(base_url, leaf_hash):
    try: 
        tmp_sth = get_sth(base_url)
        proof = get_proof_by_hash(base_url, leaf_hash, tmp_sth["tree_size"])

        decoded_inclusion_proof = []
        for item in proof["audit_path"]:
            decoded_inclusion_proof.append(base64.b64decode(item))
        
        root = base64.b64encode(verify_inclusion_proof(decoded_inclusion_proof, proof["leaf_index"], tmp_sth["tree_size"], leaf_hash))

        if tmp_sth["sha256_root_hash"] == root:
            return True
        else:
            print time.strftime('%H:%M:%S') + " ERROR: Could not prove inclusion for entry " + str(proof["leaf_index"]) + " in " + base_url
            return False
    except:
        print time.strftime('%H:%M:%S') + " ERROR: Could not prove inclusion for hashed entry in " + base_url
        return False

def verify_inclusion_by_index(base_url, index):
    try: 
        tmp_sth = get_sth(base_url)
        proof = get_proof_by_index(base_url, index, tmp_sth["tree_size"])

        decoded_inclusion_proof = []
        for item in proof["audit_path"]:
            decoded_inclusion_proof.append(base64.b64decode(item))

        root = base64.b64encode(verify_inclusion_proof(decoded_inclusion_proof, index, tmp_sth["tree_size"], get_leaf_hash(base64.b64decode(proof["leaf_input"]))))

        if tmp_sth["sha256_root_hash"] == root:
            print time.strftime('%H:%M:%S') + " Verifying inclusion for entry " + str(index) + " in " + base_url + "...OK."
        else:
            print time.strftime('%H:%M:%S') + " ERROR: Could not prove inclusion for entry " + str(index) + " in " + base_url
            errors.append(time.strftime('%H:%M:%S') + " ERROR: Could not prove inclusion for entry " + str(index) + " in " + base_url)
    except:
        print time.strftime('%H:%M:%S') + " ERROR: Could not prove inclusion for entry " + str(index) + " in " + base_url
        errors.append(time.strftime('%H:%M:%S') + " ERROR: Could not prove inclusion for entry " + str(index) + " in " + base_url)

def get_proof_by_index(baseurl, index, tree_size):
    try:
        params = urllib.urlencode({"leaf_index":index,
                                   "tree_size":tree_size})
        result = \
          urlopen(baseurl + "ct/v1/get-entry-and-proof?" + params).read()
        return json.loads(result)
    except urllib2.HTTPError, e:
        print "ERROR:", e.read()
        sys.exit(0)

def get_all_roots(base_url):
    result = urlopen(base_url + "ct/v1/get-roots").read()
    certs = json.loads(result)["certificates"]
    print time.strftime('%H:%M:%S') + " Received " + str(len(certs)) + " certs from " + base_url
    return certs


def setup_domain_monitoring():
    monitored_domains = []
    try:
        with open(DOMAINS_FILE) as fp:
            for line in fp:
                tmp = json.loads(line)
                for domain in tmp:
                    md = monitored_domain(domain["url"])
                    md.load_entries(domain["entries"])
                    monitored_domains.append(md)
    except IOError:
        pass

    for md in MONITORED_DOMAINS: 
        tmp = monitored_domain(md)
        if not tmp in monitored_domains:
            print "New domain (not in file) " + md
            tmp.set()   
            monitored_domains.append(tmp)
    return monitored_domains

def main(args):
    global DB
    monitored_domains = setup_domain_monitoring()

    # Create logs
    logs = []
    try:
        for item in ctlogs:
            logs.append(ctlog(item["name"], item["url"], item["key"], item["id"]))

        print time.strftime('%H:%M:%S') + " Setting up monitor for " + str(len(logs)) + " logs..."
        # Set up state 
        for log in logs:
            if os.path.isfile(log.savefile):
                log.load()
        # Build what was not loaded
        # try:
        for log in logs:
            log.incremental_build()

        # Main loop: Monitor
        print time.strftime('%H:%M:%S') + " Running... (see logfiles for output)"
        while True:
            time.sleep(INTERVAL)
            for log in logs:
                old_sth = log.sth
                log.update_sth() # Should this be done is later checks fail? (reorder?)
                if old_sth["timestamp"] != log.sth["timestamp"]:
                    log.verify_progress(old_sth)
                    log.verify_consistency(old_sth)
                    log.incremental_build()

            for md in monitored_domains:
                md.update()

    # Normal exit oof the program
    except KeyboardInterrupt:
        print time.strftime('%H:%M:%S') + ' Received interrupt from user. Saving and exiting....'
        for log in logs:
            log.save()

        # Save info about monitored domains
        domain_dict = []
        for md in monitored_domains:
            domain_dict.append(md.to_dict())
        open(DOMAINS_FILE, 'w').write(json.dumps(domain_dict))

    # Something went horribly wrong!
    except Exception, err:
        print Exception, err
        for log in logs:
            log.save()

        # Save info about monitored domains
        domain_dict = []
        for md in monitored_domains:
            domain_dict.append(md.to_dict())
        open(DOMAINS_FILE, 'w').write(json.dumps(domain_dict))



if __name__ == '__main__':
    main(parser.parse_args())