From 15d65c756fe89aca6cbcc754dc648853ca334095 Mon Sep 17 00:00:00 2001 From: Magnus Ahltorp Date: Wed, 9 Mar 2016 06:58:08 +0100 Subject: Use python requests package instead of urllib2 --- tools/mergetools.py | 97 +++++++++++++++++------------------------------------ 1 file changed, 30 insertions(+), 67 deletions(-) (limited to 'tools/mergetools.py') diff --git a/tools/mergetools.py b/tools/mergetools.py index 3dbe517..f6e8bd5 100644 --- a/tools/mergetools.py +++ b/tools/mergetools.py @@ -6,11 +6,10 @@ import base64 import hashlib import sys import struct -import urllib -import urllib2 import json import yaml import argparse +import requests from certtools import get_leaf_hash, http_request, get_leaf_hash def parselogrow(row): @@ -172,18 +171,15 @@ def get_new_entries(node, baseurl, own_key, paths): entry in parsed_result[u"entries"]] print >>sys.stderr, "ERROR: fetchnewentries", parsed_result sys.exit(1) - except urllib2.URLError, e: - print >>sys.stderr, "ERROR: fetchnewentries", e.reason - sys.exit(1) - except urllib2.HTTPError, e: - print >>sys.stderr, "ERROR: fetchnewentries", e.read() + except requests.exceptions.HTTPError, e: + print >>sys.stderr, "ERROR: fetchnewentries", e.response sys.exit(1) def get_entries(node, baseurl, own_key, paths, hashes): try: - params = urllib.urlencode({"hash":[base64.b64encode(ehash) for \ - ehash in hashes]}, doseq=True) - result = http_request(baseurl + "plop/v1/storage/getentry?" + params, + params = {"hash":[base64.b64encode(ehash) for ehash in hashes]} + result = http_request(baseurl + "plop/v1/storage/getentry", + params=params, key=own_key, verifynode=node, publickeydir=paths["publickeys"]) parsed_result = json.loads(result) @@ -196,11 +192,8 @@ def get_entries(node, baseurl, own_key, paths, hashes): return entries print >>sys.stderr, "ERROR: getentry", parsed_result sys.exit(1) - except urllib2.URLError, e: - print >>sys.stderr, "ERROR: getentry", e.reason - sys.exit(1) - except urllib2.HTTPError, e: - print >>sys.stderr, "ERROR: getentry", e.read() + except requests.exceptions.HTTPError, e: + print >>sys.stderr, "ERROR: getentry", e.request.url, e.response sys.exit(1) def get_curpos(node, baseurl, own_key, paths): @@ -213,11 +206,8 @@ def get_curpos(node, baseurl, own_key, paths): return parsed_result[u"position"] print >>sys.stderr, "ERROR: currentposition", parsed_result sys.exit(1) - except urllib2.URLError, e: - print >>sys.stderr, "ERROR: currentposition", e.reason - sys.exit(1) - except urllib2.HTTPError, e: - print >>sys.stderr, "ERROR: currentposition", e.read() + except requests.exceptions.HTTPError, e: + print >>sys.stderr, "ERROR: currentposition", e.response sys.exit(1) def get_verifiedsize(node, baseurl, own_key, paths): @@ -230,11 +220,8 @@ def get_verifiedsize(node, baseurl, own_key, paths): return parsed_result[u"size"] print >>sys.stderr, "ERROR: verifiedsize", parsed_result sys.exit(1) - except urllib2.URLError, e: - print >>sys.stderr, "ERROR: verifiedsize", e.reason - sys.exit(1) - except urllib2.HTTPError, e: - print >>sys.stderr, "ERROR: verifiedsize", e.read() + except requests.exceptions.HTTPError, e: + print >>sys.stderr, "ERROR: verifiedsize", e.response sys.exit(1) @@ -244,11 +231,8 @@ def sendlog(node, baseurl, own_key, paths, submission): json.dumps(submission), key=own_key, verifynode=node, publickeydir=paths["publickeys"]) return json.loads(result) - except urllib2.URLError, e: - print >>sys.stderr, "ERROR: sendlog", e.reason - sys.exit(1) - except urllib2.HTTPError, e: - print >>sys.stderr, "ERROR: sendlog", e.read() + except requests.exceptions.HTTPError, e: + print >>sys.stderr, "ERROR: sendlog", e.response sys.stderr.flush() return None except ValueError, e: @@ -266,11 +250,8 @@ def backup_sendlog(node, baseurl, own_key, paths, submission): json.dumps(submission), key=own_key, verifynode=node, publickeydir=paths["publickeys"]) return json.loads(result) - except urllib2.URLError, e: - print >>sys.stderr, "ERROR: backup_sendlog", e.reason - sys.exit(1) - except urllib2.HTTPError, e: - print >>sys.stderr, "ERROR: backup_sendlog", e.read() + except requests.exceptions.HTTPError, e: + print >>sys.stderr, "ERROR: backup_sendlog", e.response sys.stderr.flush() return None except ValueError, e: @@ -290,11 +271,8 @@ def sendentry(node, baseurl, own_key, paths, entry, ehash): "treeleafhash":base64.b64encode(ehash)}), key=own_key, verifynode=node, publickeydir=paths["publickeys"]) return json.loads(result) - except urllib2.URLError, e: - print >>sys.stderr, "ERROR: sendentry", e.reason - sys.exit(1) - except urllib2.HTTPError, e: - print >>sys.stderr, "ERROR: sendentry", e.read() + except requests.exceptions.HTTPError, e: + print >>sys.stderr, "ERROR: sendentry", e.reponse sys.exit(1) except ValueError, e: print >>sys.stderr, "==== FAILED REQUEST ====" @@ -316,11 +294,8 @@ def sendentries_merge(node, baseurl, own_key, paths, entries): json.dumps(json_entries), key=own_key, verifynode=node, publickeydir=paths["publickeys"]) return json.loads(result) - except urllib2.URLError, e: - print >>sys.stderr, "ERROR: sendentry_merge", e.reason - sys.exit(1) - except urllib2.HTTPError, e: - print >>sys.stderr, "ERROR: sendentry_merge", e.read() + except requests.exceptions.HTTPError, e: + print >>sys.stderr, "ERROR: sendentry_merge", e.response sys.exit(1) except ValueError, e: print >>sys.stderr, "==== FAILED REQUEST ====" @@ -337,8 +312,8 @@ def sendsth(node, baseurl, own_key, paths, submission): json.dumps(submission), key=own_key, verifynode=node, publickeydir=paths["publickeys"]) return json.loads(result) - except urllib2.HTTPError, e: - print >>sys.stderr, "ERROR: sendsth", e.read() + except requests.exceptions.HTTPError, e: + print >>sys.stderr, "ERROR: sendsth", e.response sys.exit(1) except ValueError, e: print >>sys.stderr, "==== FAILED REQUEST ====" @@ -355,11 +330,8 @@ def verifyroot(node, baseurl, own_key, paths, treesize): json.dumps({"tree_size":treesize}), key=own_key, verifynode=node, publickeydir=paths["publickeys"]) return json.loads(result) - except urllib2.URLError, e: - print >>sys.stderr, "ERROR: verifyroot", e.reason - sys.exit(1) - except urllib2.HTTPError, e: - print >>sys.stderr, "ERROR: verifyroot", e.read() + except requests.exceptions.HTTPError, e: + print >>sys.stderr, "ERROR: verifyroot", e.response sys.exit(1) except ValueError, e: print >>sys.stderr, "==== FAILED REQUEST ====" @@ -376,11 +348,8 @@ def setverifiedsize(node, baseurl, own_key, paths, treesize): json.dumps({"size":treesize}), key=own_key, verifynode=node, publickeydir=paths["publickeys"]) return json.loads(result) - except urllib2.URLError, e: - print >>sys.stderr, "ERROR: setverifiedsize", e.reason - sys.exit(1) - except urllib2.HTTPError, e: - print >>sys.stderr, "ERROR: setverifiedsize", e.read() + except requests.exceptions.HTTPError, e: + print >>sys.stderr, "ERROR: setverifiedsize", e.response sys.exit(1) except ValueError, e: print >>sys.stderr, "==== FAILED REQUEST ====" @@ -401,11 +370,8 @@ def get_missingentries(node, baseurl, own_key, paths): return parsed_result[u"entries"] print >>sys.stderr, "ERROR: missingentries", parsed_result sys.exit(1) - except urllib2.URLError, e: - print >>sys.stderr, "ERROR: missingentries", e.reason - sys.exit(1) - except urllib2.HTTPError, e: - print >>sys.stderr, "ERROR: missingentries", e.read() + except requests.exceptions.HTTPError, e: + print >>sys.stderr, "ERROR: missingentries", e.response sys.exit(1) def get_missingentriesforbackup(node, baseurl, own_key, paths): @@ -418,11 +384,8 @@ def get_missingentriesforbackup(node, baseurl, own_key, paths): return parsed_result[u"entries"] print >>sys.stderr, "ERROR: missingentriesforbackup", parsed_result sys.exit(1) - except urllib2.URLError, e: - print >>sys.stderr, "ERROR: missingentriesforbackup", e.reason - sys.exit(1) - except urllib2.HTTPError, e: - print >>sys.stderr, "ERROR: missingentriesforbackup", e.read() + except requests.exceptions.HTTPError, e: + print >>sys.stderr, "ERROR: missingentriesforbackup", e.response sys.exit(1) def chunks(l, n): -- cgit v1.1 From 06c6290ac4f0507374dfbf703e6577dfe48dfae7 Mon Sep 17 00:00:00 2001 From: Magnus Ahltorp Date: Tue, 15 Mar 2016 12:52:51 +0100 Subject: Do detection of where log ends before sending new hashes Try to send entries until missing entries is empty --- tools/mergetools.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) (limited to 'tools/mergetools.py') diff --git a/tools/mergetools.py b/tools/mergetools.py index f6e8bd5..ec4fd2a 100644 --- a/tools/mergetools.py +++ b/tools/mergetools.py @@ -286,13 +286,14 @@ def sendentry(node, baseurl, own_key, paths, entry, ehash): def sendentry_merge(node, baseurl, own_key, paths, entry, ehash): return sendentries_merge(node, baseurl, own_key, paths, [(ehash, entry)]) -def sendentries_merge(node, baseurl, own_key, paths, entries): +def sendentries_merge(node, baseurl, own_key, paths, entries, session=None): try: json_entries = [{"entry":base64.b64encode(entry), "treeleafhash":base64.b64encode(hash)} for hash, entry in entries] result = http_request( baseurl + "plop/v1/merge/sendentry", json.dumps(json_entries), - key=own_key, verifynode=node, publickeydir=paths["publickeys"]) + key=own_key, verifynode=node, publickeydir=paths["publickeys"], + session=session) return json.loads(result) except requests.exceptions.HTTPError, e: print >>sys.stderr, "ERROR: sendentry_merge", e.response -- cgit v1.1