diff options
author | Linus Nordberg <linus@nordu.net> | 2017-03-15 17:17:58 +0100 |
---|---|---|
committer | Linus Nordberg <linus@nordu.net> | 2017-03-15 17:17:58 +0100 |
commit | 598d6ae6d00644c7f6e318cf5a4928ee5a8eb9ca (patch) | |
tree | 7653518afa13e7ccc1a3925603d36d85570cb88c /merge/src/merge_dist.erl | |
parent | 7e41c7c7630c4a96567029e6b4d7688a7df6ccee (diff) | |
parent | 8bb572816040a8ecda50be9687cd1ddc76436f65 (diff) |
Merge branch 'map-statusserver'
Diffstat (limited to 'merge/src/merge_dist.erl')
-rw-r--r-- | merge/src/merge_dist.erl | 44 |
1 files changed, 25 insertions, 19 deletions
diff --git a/merge/src/merge_dist.erl b/merge/src/merge_dist.erl index f8f0c7c..3c38401 100644 --- a/merge/src/merge_dist.erl +++ b/merge/src/merge_dist.erl @@ -48,7 +48,9 @@ dist(noentry, State) -> Timer = erlang:start_timer(1000, self(), dist), {noreply, State#state{timer = Timer}}; dist({struct, PropList} = STH, - #state{node_address = NodeAddress, sth_timestamp = LastTimestamp} = State) -> + #state{node_address = NodeAddress, + node_name = NodeName, + sth_timestamp = LastTimestamp} = State) -> Treesize = proplists:get_value(<<"tree_size">>, PropList), Timestamp = proplists:get_value(<<"timestamp">>, PropList), RootHash = base64:decode(proplists:get_value(<<"sha256_root_hash">>, PropList)), @@ -60,8 +62,10 @@ dist({struct, PropList} = STH, try lager:info("~p: starting dist, sth at ~B, logorder at ~B", [NodeAddress, Treesize, Logordersize]), - ok = do_dist(NodeAddress, min(Treesize, Logordersize)), - ok = publish_sth(NodeAddress, STH), + statusreport:report("merge_dist", NodeName, "targetsth", Treesize), + ok = do_dist(NodeAddress, NodeName, min(Treesize, Logordersize)), + ok = publish_sth(NodeName, NodeAddress, STH), + statusreport:report("merge_dist", NodeName, "sth", Treesize), lager:info("~p: Published STH with size ~B and timestamp " ++ "~p.", [NodeAddress, Treesize, Timestamp]), Timestamp @@ -82,52 +86,54 @@ dist({struct, PropList} = STH, %% @doc Has nonlocal return because of throw further down in %% merge_util:request/4. -do_dist(NodeAddress, Size) -> - {ok, VerifiedSize} = frontend_get_verifiedsize(NodeAddress), +do_dist(NodeAddress, NodeName, Size) -> + {ok, VerifiedSize} = frontend_get_verifiedsize(NodeName, NodeAddress), lager:debug("~p: verifiedsize ~B", [NodeAddress, VerifiedSize]), true = VerifiedSize =< Size, - do_dist(NodeAddress, VerifiedSize, Size - VerifiedSize). + do_dist(NodeAddress, NodeName, VerifiedSize, Size - VerifiedSize). -do_dist(_, _, 0) -> +do_dist(_, _, _, 0) -> ok; -do_dist(NodeAddress, Start, NTotal) -> +do_dist(NodeAddress, NodeName, Start, NTotal) -> DistMaxWindow = application:get_env(plop, merge_dist_winsize, 1000), N = min(DistMaxWindow, NTotal), Hashes = index:getrange(logorder, Start, Start + N - 1), SendlogChunksize = application:get_env(plop, merge_dist_sendlog_chunksize, 1000), SendentriesChunksize = application:get_env(plop, merge_dist_sendentries_chunksize, 100), - ok = merge_util:sendlog(NodeAddress, Start, Hashes, SendlogChunksize), - {ok, HashesMissingEncoded} = merge_util:missingentries(NodeAddress), + ok = merge_util:sendlog(NodeAddress, NodeName, Start, Hashes, SendlogChunksize), + statusreport:report("merge_dist", NodeName, "sendlog", Start + N), + {ok, HashesMissingEncoded} = merge_util:missingentries(NodeAddress, NodeName), lager:debug("number of missing entries: ~B", [length(HashesMissingEncoded)]), HashesMissing = lists:map(fun base64:decode/1, HashesMissingEncoded), - ok = merge_util:sendentries(NodeAddress, HashesMissing, SendentriesChunksize), - {ok, NewSize} = frontend_verify_entries(NodeAddress, Start + N), + ok = merge_util:sendentries(NodeAddress, NodeName, HashesMissing, SendentriesChunksize), + {ok, NewSize} = frontend_verify_entries(NodeName, NodeAddress, Start + N), lager:info("~p: Done distributing ~B out of ~B entries.", [NodeAddress, NewSize-Start, NTotal]), + statusreport:report("merge_dist", NodeName, "verified", Start + N), true = NTotal >= NewSize - Start, - do_dist(NodeAddress, NewSize, NTotal - (NewSize - Start)). + do_dist(NodeAddress, NodeName, NewSize, NTotal - (NewSize - Start)). -frontend_get_verifiedsize(NodeAddress) -> - frontend_verify_entries(NodeAddress, 0). +frontend_get_verifiedsize(NodeName, NodeAddress) -> + frontend_verify_entries(NodeName, NodeAddress, 0). -frontend_verify_entries(NodeAddress, Size) -> +frontend_verify_entries(NodeName, NodeAddress, Size) -> DebugTag = io_lib:format("verify-entries ~B", [Size]), URL = NodeAddress ++ "verify-entries", Headers = [{"Content-Type", "text/json"}], RequestBody = list_to_binary(mochijson2:encode({[{"verify_to", Size}]})), - case merge_util:request(DebugTag, URL, Headers, RequestBody) of + case merge_util:request(DebugTag, URL, NodeName, Headers, RequestBody) of {<<"ok">>, PropList} -> {ok, proplists:get_value(<<"verified">>, PropList)}; Err -> throw({request_error, result, DebugTag, Err}) end. -publish_sth(NodeAddress, STH) -> +publish_sth(NodeName, NodeAddress, STH) -> DebugTag = "publish-sth", URL = NodeAddress ++ "publish-sth", Headers = [{"Content-Type", "text/json"}], RequestBody = list_to_binary(mochijson2:encode(STH)), - case merge_util:request(DebugTag, URL, Headers, RequestBody) of + case merge_util:request(DebugTag, URL, NodeName, Headers, RequestBody) of {<<"ok">>, _} -> ok; Err -> |