summaryrefslogtreecommitdiff
path: root/tools/mergetools.py
diff options
context:
space:
mode:
authorLinus Nordberg <linus@nordu.net>2015-09-24 16:47:32 +0200
committerLinus Nordberg <linus@nordu.net>2015-09-27 13:38:30 +0200
commit38722592047855cedd4ef2701854638bd50e0467 (patch)
tree229a7e5718b0d4c9750918d654484dba354f5194 /tools/mergetools.py
parent7fd70ad913ecdb2585d50d27763d00f30a1e5a6f (diff)
Merge is now run by shell script tools/merge.
tools/merge run merge_fetch.py, merge_backup.py, merge_sth.py and merge_dist.py sequentially. TODO: test backupquorum != 0
Diffstat (limited to 'tools/mergetools.py')
-rw-r--r--tools/mergetools.py55
1 files changed, 52 insertions, 3 deletions
diff --git a/tools/mergetools.py b/tools/mergetools.py
index 7644dac..86f9255 100644
--- a/tools/mergetools.py
+++ b/tools/mergetools.py
@@ -9,14 +9,43 @@ import struct
import urllib
import urllib2
import json
+import yaml
+import argparse
from certtools import get_leaf_hash, http_request, get_leaf_hash
def parselogrow(row):
return base64.b16decode(row, casefold=True)
-def get_logorder(filename):
- f = open(filename, "r")
- return [parselogrow(row.rstrip()) for row in f]
+def get_logorder(filename, items=-1):
+ logorder = []
+ n = 0
+ for row in open(filename, "r"):
+ if n == items:
+ break
+ logorder.append(parselogrow(row.rstrip()))
+ n += 1
+ return logorder
+
+def get_nfetched(currentsizefile, logorderfile):
+ try:
+ limit = json.loads(open(currentsizefile).read())
+ except (IOError, ValueError):
+ return -1
+ if limit['index'] >= 0:
+ with open(logorderfile, 'r') as f:
+ f.seek(limit['index']*65)
+ assert f.read(64).lower() == limit['hash']
+ return limit['index'] + 1
+
+def get_sth(filename):
+ try:
+ sth = json.loads(open(filename, 'r').read())
+ except (IOError, ValueError):
+ sth = {'tree_size': -1,
+ 'timestamp': 0,
+ 'sha256_root_hash': '',
+ 'tree_head_signature': ''}
+ return sth
def read_chain_open(chainsdir, filename):
path = chainsdir + "/" + \
@@ -104,6 +133,9 @@ def verify_entry(verifycert, entry, ehash):
def hexencode(key):
return base64.b16encode(key).lower()
+def hexdecode(s):
+ return base64.b16decode(s.upper())
+
def write_chain(key, value, chainsdir, hashed_dir=True):
filename = hexencode(key)
if hashed_dir:
@@ -356,3 +388,20 @@ def get_missingentriesforbackup(node, baseurl, own_key, paths):
def chunks(l, n):
return [l[i:i+n] for i in range(0, len(l), n)]
+
+def parse_args():
+ parser = argparse.ArgumentParser(description="")
+ parser.add_argument('--config', help="System configuration",
+ required=True)
+ parser.add_argument('--localconfig', help="Local configuration",
+ required=True)
+ parser.add_argument('--interval', type=int, metavar="n",
+ help="Repeate every N seconds")
+ parser.add_argument("--timing", action='store_true',
+ help="Print timing information")
+ args = parser.parse_args()
+
+ config = yaml.load(open(args.config))
+ localconfig = yaml.load(open(args.localconfig))
+
+ return (args, config, localconfig)