summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rwxr-xr-xmonitor/josef_experimental.py38
-rwxr-xr-xmonitor/josef_leveldb.py175
-rwxr-xr-xmonitor/josef_monitor.py94
-rwxr-xr-xmonitor/josef_reader.py67
-rw-r--r--monitor/monitor.cfg7
-rw-r--r--monitor/monitor_conf.py (renamed from monitor/logs.py)20
-rwxr-xr-xtools/josef_experimental.py179
7 files changed, 333 insertions, 247 deletions
diff --git a/monitor/josef_experimental.py b/monitor/josef_experimental.py
new file mode 100755
index 0000000..10d48bb
--- /dev/null
+++ b/monitor/josef_experimental.py
@@ -0,0 +1,38 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+
+import sys
+from josef_lib import *
+import leveldb
+from josef_leveldb import *
+
+SEP = ";"
+
+
+db = db_open()
+res = db_lookup_domain(db, "*.google.com")
+print res
+print "Found " + str(len(res)) + " results"
+# print db.Get("wush.net")
+# print db.Get("wush.net")
+
+# f = open("output/cert_data.json")
+# max_count = 1
+# for line in f:
+# # print max_count
+# # try:
+# tmp = json.loads(line)
+# # print tmp
+# # d = tmp["subject"].split("CN=")[1]
+# db_add_cert(tmp)
+# # print d
+
+# max_count -= 1
+# if max_count == 0:
+# break
+
+
+
+
+
+
diff --git a/monitor/josef_leveldb.py b/monitor/josef_leveldb.py
new file mode 100755
index 0000000..e985e8d
--- /dev/null
+++ b/monitor/josef_leveldb.py
@@ -0,0 +1,175 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+
+import sys
+from josef_lib import *
+import leveldb
+
+SEP = ";"
+# db = None
+
+
+def match_domain(d1, d2):
+ # Exact match
+ if d1 == d2:
+ return True
+
+ # Wildcard match
+ d1l = d1.split('.')
+ d2l = d2.split('.')
+
+ if d1l[0] == '*':
+ # print d1l[1:], d2l[-(len(d1l)-1):]
+ if d1l[1:] == d2l[-(len(d1l)-1):]:
+ return True
+
+ if d2l[0] == '*':
+ # print d2l[1:], d1l[-(len(d2l)-1):]
+ if d2l[1:] == d1l[-(len(d2l)-1):]:
+ return True
+
+ # No match
+ return False
+
+
+def db_open(fn='./cert_db'):
+ db = leveldb.LevelDB(fn)
+ return db
+
+def db_append(db, key, val):
+ if db is None:
+ print "ERROR: NO DATABASE SET!"
+ return
+
+ try:
+ tmp = db.Get(key)
+ except KeyError:
+ tmp = ""
+ tmpl = tmp.split(SEP)
+ if val in tmpl:
+ pass
+ else:
+ tmpl.append(val)
+ db.Put(key,SEP.join(tmpl))
+
+def db_add_domain(db, domain, data):
+ if db is None:
+ print "ERROR: NO DATABASE SET!"
+ return
+
+ tmpl = domain.split('.')
+ k = ""
+ for item in reversed(tmpl):
+ next_k = item + '.' + k
+ if k != "":
+ db_append(db, k[:-1], next_k[:-1])
+ k = next_k
+ db.Delete(k[:-1])
+ db_append(db, k[:-1], data)
+
+
+def db_add_certs(db, data):
+ if db is None:
+ print "ERROR: NO DATABASE SET!"
+ return
+ # print data, type(data)
+ for cert in data:
+ try:
+ db_add_domain(db, cert["subject"].split("CN=")[1], str(cert))
+ except:
+ # print "Failed adding Subject in " + str(cert)
+ pass
+ try:
+ for line in cert["SAN"].split("DNS:")[1:]:
+ db_add_domain(db, line, str(cert))
+ except:
+ # print "Failed adding SAN in " + str(cert)
+ pass
+
+
+
+def db_lookup_domain(db, domain):
+ domain_list = domain.split('.')
+ res = []
+
+ cur_domain = domain_list.pop()
+ intermediate = db.Get(cur_domain).split(SEP)
+
+ while True:
+ try:
+ cur_domain = domain_list.pop() + "." + cur_domain
+ except IndexError:
+ return res
+ # Prune
+ next_level = []
+ for item in intermediate:
+ if match_domain(cur_domain, item):
+ # print item
+ try:
+ tmp = db.Get(item)
+ if tmp[1] == '{':
+ res.append(tmp[1:-1])
+ next_level += tmp.split(SEP)
+ except KeyError:
+ # print "Could not find " + item
+ pass
+
+ else:
+ intermediate.remove(item)
+ intermediate = next_level
+ try:
+ intermediate.remove("")
+ except ValueError:
+ pass
+
+ return res
+
+
+# db_open()
+# # print db_lookup_domain("*.cox.com")
+# print db.Get("wush.net")
+
+# f = open("output/cert_data.json")
+# max_count = 1
+# for line in f:
+# # print max_count
+# # try:
+# tmp = json.loads(line)
+# # print tmp
+# # d = tmp["subject"].split("CN=")[1]
+# db_add_cert(tmp)
+# # print d
+
+# max_count -= 1
+# if max_count == 0:
+# break
+ # except:
+ # pass
+
+ # tmp_res = ""
+ # # print domain_list
+ # # print tmp_res[:-1]
+ # last = False
+
+ # for i in range(3):
+ # try:
+ # except:
+ # last = True
+ # new_res_list = []
+ # print len(tmp_res_list)
+ # print tmp_res
+ # for item in tmp_res_list:
+ # if not last:
+ # if match_domain(tmp_res, item):
+ # new_res_list.append(item)
+ # else:
+ # res.append(item)
+ # # print item
+ # tmp_res_list = new_res_list
+ # return res
+
+
+
+
+
+
diff --git a/monitor/josef_monitor.py b/monitor/josef_monitor.py
index 4fb99ee..2812c37 100755
--- a/monitor/josef_monitor.py
+++ b/monitor/josef_monitor.py
@@ -8,23 +8,19 @@ import argparse
import errno
from copy import deepcopy
from josef_lib import *
-from logs import ctlogs
+from josef_leveldb import db_add_certs, db_open
import os.path
-# NAGIOS_OK = 0
-# NAGIOS_WARN = 1
-# NAGIOS_CRIT = 2
-# NAGIOS_UNKNOWN = 3
+# Import from config file
+if os.path.isfile("monitor_conf.py"):
+ from monitor_conf import ctlogs, OUTPUT_DIR, INTERVAL, DEFAULT_CERT_FILE, DB_PATH
+else:
+ print "Config file not found!"
+ sys.exit()
-INTERVAL = 30 # interval (in seconds) between updates
-
-OUTPUT_DIR = "output/"
-DEFAULT_CERT_FILE = OUTPUT_DIR + "cert_data.json"
+DB = None
parser = argparse.ArgumentParser(description="")
-# TODO implement silent mode
-# parser.add_argument('--silent', action='store_true', help="Dont output to stdout. logging only")
-
class ctlog:
def __init__(self, name, url, key):
@@ -43,10 +39,14 @@ class ctlog:
def incremental_build(self):
# Keeps state current during build, partial builds are possible.
self.sth = get_sth(self.url)
- self.log("Building....")
+ # self.log("Building....")
start_size = self.entries
while self.entries < self.sth["tree_size"]:
+ tmp_size = self.entries
self.subtree, self.entries = fetch_and_increment_subtree(self.entries, self.sth["tree_size"] -1, self.url, self.subtree)
+ if tmp_size != self.entries:
+ self.log("Got entries " + str(tmp_size) + " to " \
+ + str(self.entries -1 ) + " of " + str(self.sth["tree_size"]-1))
if self.entries != start_size:
if verify_subtree(self.sth, self.subtree, self.url):
@@ -54,8 +54,8 @@ class ctlog:
" new entries. Size: " + str(self.entries))
else:
self.log("ERROR Failed to build tree from entries.")
- else:
- self.log("No new entries.")
+ # else:
+ # self.log("No new entries.")
@@ -85,7 +85,8 @@ class ctlog:
raise e
def log(self, string):
- s = time.strftime('%H:%M:%S') + " " + string
+ # TODO change to UTC?
+ s = time.strftime('%Y-%m-%d, %H:%M:%S') + " " + string
with open(self.logfile, 'a') as f:
f.write(s + "\n")
f.close()
@@ -140,28 +141,35 @@ class ctlog:
print s
-def verify_consistency(old, new):
- for url in old:
+ def verify_consistency(self, old):
+ new = self.sth
+ # for url in old:
try:
- if old[url] and new[url] and old[url]["tree_size"]!= new[url]["tree_size"]:
- consistency_proof = get_consistency_proof(url, old[url]["tree_size"], new[url]["tree_size"])
+ if old["tree_size"]!= new["tree_size"]:
+ consistency_proof = get_consistency_proof(self.url, old["tree_size"], new["tree_size"])
decoded_consistency_proof = []
for item in consistency_proof:
decoded_consistency_proof.append(base64.b64decode(item))
- res = verify_consistency_proof(decoded_consistency_proof, old[url]["tree_size"], new[url]["tree_size"], old[url]["sha256_root_hash"])
+ res = verify_consistency_proof(decoded_consistency_proof, old["tree_size"], new["tree_size"], old["sha256_root_hash"])
- if old[url]["sha256_root_hash"] != str(base64.b64encode(res[0])):
- print time.strftime('%H:%M:%S') + " Verification of old hash failed! " + old[url]["sha256_root_hash"], str(base64.b64encode(res[0]))
- errors.append(time.strftime('%H:%M:%S') + " ERROR: Failed to verify consistency for " + url + ", tree size " + old[url]["tree_size"])
- elif new[url]["sha256_root_hash"] != str(base64.b64encode(res[1])):
- print time.strftime('%H:%M:%S') + " Verification of new hash failed! " + new[url]["sha256_root_hash"], str(base64.b64encode(res[1]))
- errors.append(time.strftime('%H:%M:%S') + " ERROR: Failed to verify consistency for " + url + ", tree size " + new[url]["tree_size"])
- else:
- print time.strftime("%H:%M:%S") + " New STH from " + url + ", timestamp: " + \
- str(new[url]["timestamp"]) + ", size: " + str(new[url]["tree_size"]) + "...OK."
+ if old["sha256_root_hash"] != str(base64.b64encode(res[0])):
+ s = " Verification of old hash failed! " + \
+ old["sha256_root_hash"], str(base64.b64encode(res[0]))
+ self.log(s)
+ print s
+ elif new["sha256_root_hash"] != str(base64.b64encode(res[1])):
+ s = " Verification of new hash failed! " + \
+ new["sha256_root_hash"], str(base64.b64encode(res[1]))
+ self.log(s)
+ print s
+ # else:
+ # s = "New STH, timestamp: " + str(new["timestamp"]) + \
+ # ", size: " + str(new["tree_size"]) + "...OK."
+ # self.log(s)
except:
- print "ERROR: Could not verify consistency for " + url
+ self.log("ERROR: Could not verify consistency!")
+ print "ERROR: Could not verify consistency for " + self.url
def verify_inclusion_all(old, new):
for url in old:
@@ -197,6 +205,7 @@ def check_domain(raw_entry, log=None):
return cert_info
def fetch_and_increment_subtree(first, last, url, subtree =[[]]):
+ global DB
# try:
new_leafs = []
if first <= last:
@@ -205,10 +214,11 @@ def fetch_and_increment_subtree(first, last, url, subtree =[[]]):
for item in entries:
tmp_cert_data.append(check_domain(item, url))
new_leafs.append(get_leaf_hash(base64.b64decode(item["leaf_input"])))
- append_file(DEFAULT_CERT_FILE, tmp_cert_data)
- print time.strftime('%H:%M:%S') + " Got entries " + str(first) + " to " \
- + str(first + len(new_leafs) -1 ) + " of " + str(last) +" entries from " + url
-
+ if DEFAULT_CERT_FILE:
+ if DB is None:
+ append_file(DEFAULT_CERT_FILE, tmp_cert_data)
+ else:
+ db_add_certs(DB, tmp_cert_data)
subtree = reduce_tree(new_leafs, subtree)
# except:
# print "Failed to build subtree :("
@@ -220,7 +230,7 @@ def verify_subtree(sth, subtree, base_url):
root = base64.b64encode(reduce_subtree_to_root(tmp)[0])
if root == sth["sha256_root_hash"]:
- print time.strftime('%H:%M:%S') + " Verifying root hashes for " + base_url + "...OK."
+ # print time.strftime('%H:%M:%S') + " Verifying root hashes for " + base_url + "...OK."
return True
else:
print time.strftime('%H:%M:%S') + " ERROR: Failed to verify root hashes! STH root: " \
@@ -297,14 +307,16 @@ def get_all_roots(base_url):
def main(args):
- # TODO cleanup files
-
+ global DB
# Create logs
logs = []
+ if DB_PATH:
+ DB = db_open(DB_PATH)
try:
for item in ctlogs:
logs.append(ctlog(item, ctlogs[item][0], ctlogs[item][1]))
+ print time.strftime('%H:%M:%S') + " Setting up monitor for " + str(len(logs)) + " logs..."
# Set up state
for log in logs:
if os.path.isfile(log.savefile):
@@ -314,6 +326,7 @@ def main(args):
log.incremental_build()
# Main loop: Monitor
+ print time.strftime('%H:%M:%S') + " Running... (see logfiles for output)"
while True:
time.sleep(INTERVAL)
for log in logs:
@@ -321,9 +334,8 @@ def main(args):
log.update_sth() # Should this be done is later checks fail? (reorder?)
if old_sth["timestamp"] != log.sth["timestamp"]:
log.verify_progress(old_sth)
+ log.verify_consistency(old_sth)
log.incremental_build()
- # TODO check consistency proof
- pass
# Unreachable... usually.
for log in logs:
@@ -331,7 +343,7 @@ def main(args):
except KeyboardInterrupt:
- print 'Received interrupt from user. Saving and exiting....'
+ print time.strftime('%H:%M:%S') + ' Received interrupt from user. Saving and exiting....'
for log in logs:
log.save()
diff --git a/monitor/josef_reader.py b/monitor/josef_reader.py
index bf415f7..c2653c1 100755
--- a/monitor/josef_reader.py
+++ b/monitor/josef_reader.py
@@ -2,13 +2,15 @@
# -*- coding: utf-8 -*-
import sys
-from certtools import *
+from josef_lib import *
import argparse
+from datetime import datetime as dt
parser = argparse.ArgumentParser(description="")
parser.add_argument('--domain', default=None, help="RTFM")
+parser.add_argument('--log', default=None, help="RTFM")
parser.add_argument('--exclude-expired', action='store_true', help="RTFM")
args = parser.parse_args()
@@ -23,12 +25,16 @@ monitored_domains = [
-# data = []
-f = open("plausible_cert_data.json")
+cur_time = dt.now()
+count_valid = 0
+count_all = 0
+f = open("output/cert_data.json")
for line in f:
tmp = json.loads(line)
try:
success = True
+ not_after_time = dt.strptime(tmp["not_after"], "%b %d %H:%M:%S %Y GMT")
+ not_before_time = dt.strptime(tmp["not_before"], "%b %d %H:%M:%S %Y GMT")
if args.domain:
if args.domain in tmp["subject"].split("CN=")[1] or \
@@ -36,25 +42,52 @@ for line in f:
pass
else:
success = False
+ else:
+ print "No domain selected!"
+ sys.exit()
- if args.exclude_expired:
- print "EXCLUDE EXPIRED NOT IMPLEMENTED YET"
+ if args.log:
+ if args.log in tmp["log"]:
+ pass
+ else:
+ success = False
+ if cur_time > not_after_time:
+ expired = True
+ elif cur_time < not_before_time:
+ expired = True
+ else:
+ expired = False
+
+ # Exclude expired
+ if args.exclude_expired and expired:
+ success = False
+
+
+ # Set count matches
+ if success:
+ count_all += 1
+ if not expired:
+ count_valid += 1
+ # Print matching
if success:
- print tmp["subject"].split("CN=")[1] + " certified by " + tmp["issuer"].split("CN=")[1]
+ s = tmp["subject"].split("CN=")[1] + \
+ " certified by " + tmp["issuer"].split("CN=")[1] + \
+ " (" + tmp["log"] + ") "
+ if expired:
+ print "(NOT VALID) " + s
+ else:
+ print "(VALID) " + s
+
+
+
except:
pass
f.close()
-
-# for item in data[10000:]:
-# try:
-# s = item["subject"].split("CN=")[1]
-# print "\n" + s
-# print item["SAN"]
-# except:
-# pass
-
-# print "\nTotal entries: " + str(len(data))
-
+print str(count_all) + " matches found."
+# if count_valid == 0:
+# print "No matching certificates found."
+# else:
+# print str(count_valid) + " of " + str(count_all) + " certs valid. (" + str(int(float(count_valid)/float(count_all)*100)) + "%)"
diff --git a/monitor/monitor.cfg b/monitor/monitor.cfg
deleted file mode 100644
index f686809..0000000
--- a/monitor/monitor.cfg
+++ /dev/null
@@ -1,7 +0,0 @@
-# CONFIG FILE FOR MONITOR
-# LOGS
-
-
-# CHECKS
-
-# OUTPUT
diff --git a/monitor/logs.py b/monitor/monitor_conf.py
index 2925017..69fc5ba 100644
--- a/monitor/logs.py
+++ b/monitor/monitor_conf.py
@@ -1,12 +1,26 @@
+# All configuration for the CT monitor is done from this file!
+# interval (in seconds) between updates
+INTERVAL = 30
+
+# Directories for various output files
+OUTPUT_DIR = "output/"
+
+# Output file for certificate data.
+# Set to None to disable
+DEFAULT_CERT_FILE = OUTPUT_DIR + "cert_data.json"
+
+DB_PATH = './cert_db'
+
+# CT logs and associated keys
ctlogs = {
# "pilot":
# ["https://ct.googleapis.com/pilot/",
# "MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEfahLEimAoz2t01p3uMziiLOl/fHTDM0YDOhBRuiBARsV4UvxG2LdNgoIGLrtCzWE0J5APC2em4JlvR8EEEFMoA=="],
- # "plausible":
- # ["https://plausible.ct.nordu.net/",
- # "MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAE9UV9+jO2MCTzkabodO2F7LM03MUBc8MrdAtkcW6v6GA9taTTw9QJqofm0BbdAsbtJL/unyEf0zIkRgXjjzaYqQ=="],
+ "plausible":
+ ["https://plausible.ct.nordu.net/",
+ "MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAE9UV9+jO2MCTzkabodO2F7LM03MUBc8MrdAtkcW6v6GA9taTTw9QJqofm0BbdAsbtJL/unyEf0zIkRgXjjzaYqQ=="],
"digicert":
["https://ct1.digicert-ct.com/log/",
diff --git a/tools/josef_experimental.py b/tools/josef_experimental.py
deleted file mode 100755
index 7f27945..0000000
--- a/tools/josef_experimental.py
+++ /dev/null
@@ -1,179 +0,0 @@
-#!/usr/bin/python
-# -*- coding: utf-8 -*-
-
-import time
-import base64
-import urllib
-import urllib2
-import sys
-import subprocess
-
-# from pympler.asizeof import asizeof
-from certtools import *
-from Crypto.Signature import PKCS1_v1_5
-
-def reduce_leafs_to_root(layer0):
- if len(layer0) == 0:
- return [[hashlib.sha256().digest()]]
- current_layer = layer0
- while len(current_layer) > 1:
- current_layer = next_merkle_layer(current_layer)
- return current_layer
-
-def reduce_layer(layer):
- new_layer = []
- while len(layer) > 1:
- e1 = layer.pop(0)
- e2 = layer.pop(0)
- new_layer.append(internal_hash((e1,e2)))
- return new_layer
-
-def reduce_tree(entries, layers):
- if len(entries) == 0 and layers is []:
- return [[hashlib.sha256().digest()]]
-
- layer_idx = 0
- layers[layer_idx] += entries
-
- while len(layers[layer_idx]) > 1:
- if len(layers) == layer_idx + 1:
- layers.append([])
-
- layers[layer_idx + 1] += reduce_layer(layers[layer_idx])
- layer_idx += 1
- return layers
-
-def reduce_subtree_to_root(layers):
- while len(layers) > 1:
- layers[1] += next_merkle_layer(layers[0])
- del layers[0]
-
- if len(layers[0]) > 1:
- return next_merkle_layer(layers[0])
- return layers[0]
-
-def get_proof_by_index(baseurl, index, tree_size):
- try:
- params = urllib.urlencode({"leaf_index":index,
- "tree_size":tree_size})
- result = \
- urlopen(baseurl + "ct/v1/get-entry-and-proof?" + params).read()
- return json.loads(result)
- except urllib2.HTTPError, e:
- print "ERROR:", e.read()
- sys.exit(1)
-
-def my_get_cert_info(s):
- p = subprocess.Popen(
- ["openssl", "x509", "-fingerprint", "-text", "-noout", "-inform", "der"],
- # ["openssl", "x509", "-noout", "-subject", "-issuer", "-inform", "der"],
- stdin=subprocess.PIPE, stdout=subprocess.PIPE,
- stderr=subprocess.PIPE)
- parsed = p.communicate(s)
- if parsed[1]:
- print "ERROR:", parsed[1]
- sys.exit(1)
- # result = []
- result = {}
- prev = ""
- for line in parsed[0].split("\n"):
- if "Subject:" in line:
- result["subject"] = line.split("Subject: ")[1]
- # print line.split("Subject: ")[1]
- if "Issuer:" in line:
- result["issuer"] = line.split("Issuer: ")[1]
- # print line.split("Issuer: ")[1]
- if "Subject Alternative Name" in prev:
- result["SAN"] = line.lstrip()
- # print line.lstrip()
- if "Not After" in line:
- result["not_after"] = line.split(": ")[1]
- if "Not Before" in line:
- result["not_before"] = line.split(": ")[1]
- prev = line
- return result
-
-def read_sth(fn):
- try:
- f = open(fn)
- except IOError, e:
- if e.errno == errno.ENOENT:
- return None
- raise e
- return json.loads(f.read())
-
-base_urls = [
- "https://plausible.ct.nordu.net/",
- # "https://ct1.digicert-ct.com/log/",
- # "https://ct.izenpe.com/",
- # "https://log.certly.io/",
- # "https://ctlog.api.venafi.com/",
- # "https://ct.googleapis.com/aviator/",
- # "https://ct.googleapis.com/pilot/",
- # "https://ct.googleapis.com/rocketeer/",
- # "https://ct.ws.symantec.com/",
- ]
-
-logkeys = {}
-logkeys["https://plausible.ct.nordu.net/"] = get_public_key_from_file("../../plausible-logkey.pem")
-logkeys["https://ct.googleapis.com/rocketeer/"] = get_public_key_from_file("../../rocketeer-logkey.pem")
-logkeys["https://ct.googleapis.com/aviator/"] = get_public_key_from_file("../../aviator-logkey.pem")
-logkeys["https://ct.googleapis.com/pilot/"] = get_public_key_from_file("../../pilot-logkey.pem")
-logkeys["https://log.certly.io/"] = get_public_key_from_file("../../certly-logkey.pem")
-logkeys["https://ct.izenpe.com/"] = get_public_key_from_file("../../izenpe-logkey.pem")
-logkeys["https://ct1.digicert-ct.com/log/"] = get_public_key_from_file("../../digicert-logkey.pem")
-logkeys["https://ctlog.api.venafi.com/"] = get_public_key_from_file("../../venafi-logkey.pem")
-
-
-import Crypto.PublicKey.RSA as RSA
-from Crypto.Hash import SHA256
-
-monitored_domains = [
- "google.com",
- "preishelden.de",
- "liu.se",
- "nordu.net",
- "symantec.com",
-]
-
-data = []
-# data = read_sth("cert_data.json")
-f = open("cert_data.json")
-for line in f:
- data.append(json.loads(line))
-ss = []
-for item in data:
- try:
- s = item["subject"].split("CN=")[1]
- print s
- except:
- # if not item["subject"] in ss:
- # print item["subject"]
- # ss.append(item["subject"])
- pass
-
-print "\nTotal entries: " + str(len(data))
-
-# base_url = base_urls[0]
-
-# entries = get_entries(base_url, 11, 11)["entries"]
-# for item in entries:
-# orig_entry = extract_original_entry(item)
-# cert_info = my_get_cert_info(orig_entry[0][0])
- # prev = ""
- # res = {}
- # for line in cert_info:
- # if "Subject:" in line:
- # res["subject"] = line.split("Subject: ")[1]
- # # print line.split("Subject: ")[1]
- # if "Issuer:" in line:
- # res["issuer"] = line.split("Issuer: ")[1]
- # # print line.split("Issuer: ")[1]
- # if "Subject Alternative Name" in prev:
- # res["SAN"] = line.lstrip()
- # # print line.lstrip()
- # if "Not After" in line:
- # res["not_after"] = line.split(": ")[1]
-
- # prev = line
- # print cert_info