1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2014 Kungliga Tekniska Högskolan
# (KTH Royal Institute of Technology, Stockholm, Sweden).
# See LICENSE for licensing information.
import json
import base64
import urllib
import urllib2
import sys
frontendnodes = ["https://127.0.0.1:8082/"]
storagenodes = ["https://127.0.0.1:8081/"]
chainsdir = "../rel/mergedb/chains"
logorderfile = "../rel/mergedb/logorder"
def parselogrow(row):
return base64.b16decode(row)
def get_logorder():
f = open(logorderfile, "r")
return [parselogrow(row.rstrip()) for row in f]
def write_chain(key, value):
f = open(chainsdir + "/" + base64.b16encode(key), "w")
f.write(value)
f.close()
def read_chain(key):
f = open(chainsdir + "/" + base64.b16encode(key), "r")
value = f.read()
f.close()
return value
def add_to_logorder(key):
f = open(logorderfile, "a")
f.write(base64.b16encode(key) + "\n")
f.close()
def get_new_entries(baseurl):
try:
result = urllib2.urlopen(baseurl + "ct/storage/fetchnewentries").read()
parsed_result = json.loads(result)
if parsed_result.get(u"result") == u"ok":
return parsed_result[u"entries"]
print "ERROR: fetchnewentries", parsed_result
sys.exit(1)
except urllib2.HTTPError, e:
print "ERROR: fetchnewentries", e.read()
sys.exit(1)
def get_curpos(baseurl):
try:
result = urllib2.urlopen(baseurl + "ct/frontend/currentposition").read()
parsed_result = json.loads(result)
if parsed_result.get(u"result") == u"ok":
return parsed_result[u"position"]
print "ERROR: currentposition", parsed_result
sys.exit(1)
except urllib2.HTTPError, e:
print "ERROR: currentposition", e.read()
sys.exit(1)
def sendlog(baseurl, submission):
try:
result = urllib2.urlopen(baseurl + "ct/frontend/sendlog",
json.dumps(submission)).read()
return json.loads(result)
except urllib2.HTTPError, e:
print "ERROR: sendlog", e.read()
sys.exit(1)
except ValueError, e:
print "==== FAILED REQUEST ===="
print submission
print "======= RESPONSE ======="
print result
print "========================"
raise e
def sendsth(baseurl, submission):
try:
result = urllib2.urlopen(baseurl + "ct/frontend/sendsth",
json.dumps(submission)).read()
return json.loads(result)
except urllib2.HTTPError, e:
print "ERROR: sendsth", e.read()
sys.exit(1)
except ValueError, e:
print "==== FAILED REQUEST ===="
print submission
print "======= RESPONSE ======="
print result
print "========================"
raise e
def get_missingentries(baseurl):
try:
result = urllib2.urlopen(baseurl + "ct/frontend/missingentries").read()
parsed_result = json.loads(result)
if parsed_result.get(u"result") == u"ok":
return parsed_result[u"entries"]
print "ERROR: missingentries", parsed_result
sys.exit(1)
except urllib2.HTTPError, e:
print "ERROR: missingentries", e.read()
sys.exit(1)
logorder = get_logorder()
certsinlog = set(logorder)
new_entries = [entry for storagenode in storagenodes for entry in get_new_entries(storagenode)]
for new_entry in new_entries:
hash = base64.b64decode(new_entry["hash"])
entry = base64.b64decode(new_entry["entry"])
if hash not in certsinlog:
write_chain(hash, entry)
add_to_logorder(hash)
logorder.append(hash)
certsinlog.add(hash)
print "added", base64.b16encode(hash)
for frontendnode in frontendnodes:
curpos = get_curpos(frontendnode)
entries = [base64.b64encode(entry) for entry in logorder[curpos:]]
sendlog(frontendnode, {"start": curpos, "hashes": entries})
missingentries = get_missingentries(frontendnode)
print "missing entries:", missingentries
# XXX: no test case for missing entries yet, waiting to implement
sendsth(frontendnode, {"tree_size": len(logorder)})
|