summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rwxr-xr-xtools/merge_fetch.py30
-rw-r--r--tools/mergetools.py4
2 files changed, 18 insertions, 16 deletions
diff --git a/tools/merge_fetch.py b/tools/merge_fetch.py
index 3028b30..ddd2f06 100755
--- a/tools/merge_fetch.py
+++ b/tools/merge_fetch.py
@@ -7,6 +7,7 @@
import sys
import struct
import subprocess
+import requests
from time import sleep
from mergetools import get_logorder, verify_entry, get_new_entries, \
chunks, fsync_logorder, get_entries, add_to_logorder, \
@@ -63,20 +64,21 @@ def merge_fetch(args, config, localconfig):
print >>sys.stderr, "getting %d entries from %s:" % \
(len(entries_to_fetch[storagenode["name"]]), storagenode["name"]),
sys.stderr.flush()
- for chunk in chunks(entries_to_fetch[storagenode["name"]], 100):
- entries = get_entries(storagenode["name"],
- "https://%s/" % storagenode["address"],
- own_key, paths, chunk)
- for ehash in chunk:
- entry = entries[ehash]
- verify_entry(verifycert, entry, ehash)
- chainsdb.add(ehash, entry)
- add_to_logorder(logorderfile, ehash)
- logorder.append(ehash)
- certsinlog.add(ehash)
- added_entries += 1
- print >>sys.stderr, added_entries,
- sys.stderr.flush()
+ with requests.sessions.Session() as session:
+ for chunk in chunks(entries_to_fetch[storagenode["name"]], 100):
+ entries = get_entries(storagenode["name"],
+ "https://%s/" % storagenode["address"],
+ own_key, paths, chunk, session=session)
+ for ehash in chunk:
+ entry = entries[ehash]
+ verify_entry(verifycert, entry, ehash)
+ chainsdb.add(ehash, entry)
+ add_to_logorder(logorderfile, ehash)
+ logorder.append(ehash)
+ certsinlog.add(ehash)
+ added_entries += 1
+ print >>sys.stderr, added_entries,
+ sys.stderr.flush()
print >>sys.stderr
sys.stderr.flush()
chainsdb.commit()
diff --git a/tools/mergetools.py b/tools/mergetools.py
index ff3d08c..94901a9 100644
--- a/tools/mergetools.py
+++ b/tools/mergetools.py
@@ -179,13 +179,13 @@ def get_new_entries(node, baseurl, own_key, paths):
print >>sys.stderr, "ERROR: fetchnewentries", e.response
sys.exit(1)
-def get_entries(node, baseurl, own_key, paths, hashes):
+def get_entries(node, baseurl, own_key, paths, hashes, session=None):
try:
params = {"hash":[base64.b64encode(ehash) for ehash in hashes]}
result = http_request(baseurl + "plop/v1/storage/getentry",
params=params,
key=own_key, verifynode=node,
- publickeydir=paths["publickeys"])
+ publickeydir=paths["publickeys"], session=session)
parsed_result = json.loads(result)
if parsed_result.get(u"result") == u"ok":
entries = dict([(base64.b64decode(entry["hash"]),