summaryrefslogtreecommitdiff
path: root/monitor/josef_experimental.py
blob: 53f0147cc540ef65a4724307b6cd1e5466a307ae (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
#!/usr/bin/python
# -*- coding: utf-8 -*-     

import sys
from josef_lib import *
import leveldb
import argparse
import json
from josef_leveldb import *
from datetime import datetime as dt



# parser = argparse.ArgumentParser(description="")
# parser.add_argument('--domain', default=None, help="RTFM")
# parser.add_argument('--log', default=None, help="RTFM")
# parser.add_argument('--exclude-expired', action='store_true', help="RTFM")

# args = parser.parse_args()

monitored_domains = [
    "google.com",
    "preishelden.de",
    "liu.se",
    "nordu.net",
    "symantec.com",
]


db = db_open("./tmpdatabase")

db_add_domain(db, "www.cox.a.com", "{dummydata}")
print db_lookup_domain(db, "www.cox.a.com")
db_add_domain(db, "www.cox.b.com", "{dummydata}")
print db_lookup_domain(db, "www.cox.b.com")
db_add_domain(db, "www.cox.a.com", "{dummydata3}")
print db_lookup_domain(db, "www.cox.a.com")
# print db.Get("com")
# print db.Get("a.com")
# print db.Get("cox.a.com")
# print db.Get("www.cox.a.com")


# if args.domain:
#     db = db_open()
#     raw = db_lookup_domain(db, args.domain)
# else:
#     print "No domain selected!"
#     sys.exit()

# cur_time = dt.now()
# count_valid = 0
# count_all = 0
# for item in raw:
#     # print item + '}', type(item)
#     try:
#         entry = json.loads((item + '}').replace("'", '"'))
#     except:
#         print (item + '}').replace("'", '"')
#     # print entry, type(entry)
#     success = True
#     not_after_time = dt.strptime(entry["not_after"], "%b %d %H:%M:%S %Y GMT")
#     not_before_time = dt.strptime(entry["not_before"], "%b %d %H:%M:%S %Y GMT")


#     if args.log:
#         if args.log in entry["log"]:
#             pass
#         else:
#             success = False

#     if cur_time > not_after_time:
#         expired = True
#     elif cur_time < not_before_time:
#         expired = True
#     else:
#         expired = False

#     # Exclude expired
#     if args.exclude_expired and expired:
#         success = False
        
    
#     # Set count matches
#     if success:
#         count_all += 1
#         if not expired:
#             count_valid += 1

#     # Print matching
#     if success:
#         s = entry["subject"].split("CN=")[1] + \
#         " certified by " + entry["issuer"].split("CN=")[1] + \
#         " (" + entry["log"] + ") "
#         if expired:
#             print "(NOT VALID) " + s
#         else:
#             print "(VALID) " + s


# print str(count_all) + " matches found."


# print res
# print "Found " + str(len(res)) + " results"
# print db.Get("wush.net")
# print db.Get("wush.net")

# f = open("output/cert_data.json")
# max_count  = 1
# for line in f:
#     # print max_count
#     # try:
#     tmp = json.loads(line)
#     # print tmp
#     # d  = tmp["subject"].split("CN=")[1]
#     db_add_cert(tmp)
#     # print d

#     max_count -= 1
#     if max_count == 0:
#         break