summaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
authorStuart Thackray <stuart.thackray@gmail.com>2018-12-11 08:53:29 +0200
committerStuart Thackray <stuart.thackray@gmail.com>2018-12-11 08:53:29 +0200
commitebfa797c1f5d038b99beaf658757d974412a15c7 (patch)
tree9765880a7f0119c265d85f8bac7afea8d9542080 /src
parent71187514dabdd94aa333495d92df84a2e750099f (diff)
parent8e28561d4e14ea85d42d17ab5a0f17f5f1c696d2 (diff)
Update from Upstream
Diffstat (limited to 'src')
-rw-r--r--src/cth_retry.erl159
-rw-r--r--src/r3.erl55
-rw-r--r--src/rebar.app.src12
-rw-r--r--src/rebar.hrl37
-rw-r--r--src/rebar3.erl186
-rw-r--r--src/rebar_agent.erl256
-rw-r--r--src/rebar_api.erl55
-rw-r--r--src/rebar_app_discover.erl312
-rw-r--r--src/rebar_app_info.erl317
-rw-r--r--src/rebar_app_utils.erl211
-rw-r--r--src/rebar_base_compiler.erl179
-rw-r--r--src/rebar_compiler.erl315
-rw-r--r--src/rebar_compiler_erl.erl359
-rw-r--r--src/rebar_compiler_mib.erl101
-rw-r--r--src/rebar_compiler_xrl.erl64
-rw-r--r--src/rebar_compiler_yrl.erl51
-rw-r--r--src/rebar_config.erl119
-rw-r--r--src/rebar_core.erl48
-rw-r--r--src/rebar_dialyzer_format.erl248
-rw-r--r--src/rebar_digraph.erl26
-rw-r--r--src/rebar_dir.erl196
-rw-r--r--src/rebar_dist_utils.erl36
-rw-r--r--src/rebar_env.erl86
-rw-r--r--src/rebar_erlc_compiler.erl197
-rw-r--r--src/rebar_fetch.erl118
-rw-r--r--src/rebar_file_utils.erl247
-rw-r--r--src/rebar_git_resource.erl265
-rw-r--r--src/rebar_hex_repos.erl142
-rw-r--r--src/rebar_hg_resource.erl143
-rw-r--r--src/rebar_hooks.erl67
-rw-r--r--src/rebar_log.erl17
-rw-r--r--src/rebar_opts.erl252
-rw-r--r--src/rebar_otp_app.erl41
-rw-r--r--src/rebar_packages.erl483
-rw-r--r--src/rebar_paths.erl211
-rw-r--r--src/rebar_pkg_resource.erl414
-rw-r--r--src/rebar_plugins.erl45
-rw-r--r--src/rebar_prv_alias.erl138
-rw-r--r--src/rebar_prv_app_discovery.erl12
-rw-r--r--src/rebar_prv_as.erl12
-rw-r--r--src/rebar_prv_bare_compile.erl12
-rw-r--r--src/rebar_prv_clean.erl10
-rw-r--r--src/rebar_prv_common_test.erl365
-rw-r--r--src/rebar_prv_compile.erl140
-rw-r--r--src/rebar_prv_cover.erl179
-rw-r--r--src/rebar_prv_deps.erl28
-rw-r--r--src/rebar_prv_deps_tree.erl14
-rw-r--r--src/rebar_prv_dialyzer.erl190
-rw-r--r--src/rebar_prv_do.erl24
-rw-r--r--src/rebar_prv_edoc.erl61
-rw-r--r--src/rebar_prv_escriptize.erl59
-rw-r--r--src/rebar_prv_eunit.erl77
-rw-r--r--src/rebar_prv_get_deps.erl37
-rw-r--r--src/rebar_prv_help.erl7
-rw-r--r--src/rebar_prv_install_deps.erl164
-rw-r--r--src/rebar_prv_local_install.erl29
-rw-r--r--src/rebar_prv_local_upgrade.erl41
-rw-r--r--src/rebar_prv_lock.erl9
-rw-r--r--src/rebar_prv_new.erl27
-rw-r--r--src/rebar_prv_packages.erl108
-rw-r--r--src/rebar_prv_path.erl24
-rw-r--r--src/rebar_prv_plugins.erl24
-rw-r--r--src/rebar_prv_plugins_upgrade.erl2
-rw-r--r--src/rebar_prv_report.erl14
-rw-r--r--src/rebar_prv_repos.erl47
-rw-r--r--src/rebar_prv_shell.erl118
-rw-r--r--src/rebar_prv_unlock.erl21
-rw-r--r--src/rebar_prv_update.erl221
-rw-r--r--src/rebar_prv_upgrade.erl112
-rw-r--r--src/rebar_prv_xref.erl67
-rw-r--r--src/rebar_relx.erl17
-rw-r--r--src/rebar_resource.erl44
-rw-r--r--src/rebar_resource_v2.erl147
-rw-r--r--src/rebar_state.erl165
-rw-r--r--src/rebar_string.erl44
-rw-r--r--src/rebar_templater.erl56
-rw-r--r--src/rebar_utils.erl499
77 files changed, 7122 insertions, 2313 deletions
diff --git a/src/cth_retry.erl b/src/cth_retry.erl
new file mode 100644
index 0000000..7056c71
--- /dev/null
+++ b/src/cth_retry.erl
@@ -0,0 +1,159 @@
+-module(cth_retry).
+
+%% Callbacks
+-export([id/1]).
+-export([init/2]).
+
+-export([pre_init_per_suite/3]).
+-export([post_init_per_suite/4]).
+-export([pre_end_per_suite/3]).
+-export([post_end_per_suite/4]).
+
+-export([pre_init_per_group/3]).
+-export([post_init_per_group/4]).
+-export([pre_end_per_group/3]).
+-export([post_end_per_group/4]).
+
+-export([pre_init_per_testcase/3]).
+-export([post_end_per_testcase/4]).
+
+-export([on_tc_fail/3]).
+-export([on_tc_skip/3, on_tc_skip/4]).
+
+-export([terminate/1]).
+
+-record(state, {id, suite, groups, acc=[]}).
+
+%% @doc Return a unique id for this CTH.
+id(_Opts) ->
+ {?MODULE, make_ref()}.
+
+%% @doc Always called before any other callback function. Use this to initiate
+%% any common state.
+init(Id, _Opts) ->
+ {ok, #state{id=Id}}.
+
+%% @doc Called before init_per_suite is called.
+pre_init_per_suite(Suite,Config,State) ->
+ {Config, State#state{suite=Suite, groups=[]}}.
+
+%% @doc Called after init_per_suite.
+post_init_per_suite(_Suite,_Config,Return,State) ->
+ {Return, State}.
+
+%% @doc Called before end_per_suite.
+pre_end_per_suite(_Suite,Config,State) ->
+ {Config, State}.
+
+%% @doc Called after end_per_suite.
+post_end_per_suite(_Suite,_Config,Return,State) ->
+ {Return, State#state{suite=undefined, groups=[]}}.
+
+%% @doc Called before each init_per_group.
+pre_init_per_group(_Group,Config,State) ->
+ {Config, State}.
+
+%% @doc Called after each init_per_group.
+post_init_per_group(Group,_Config,Return, State=#state{groups=Groups}) ->
+ {Return, State#state{groups=[Group|Groups]}}.
+
+%% @doc Called after each end_per_group.
+pre_end_per_group(_Group,Config,State) ->
+ {Config, State}.
+
+%% @doc Called after each end_per_group.
+post_end_per_group(_Group,_Config,Return, State=#state{groups=Groups}) ->
+ {Return, State#state{groups=tl(Groups)}}.
+
+%% @doc Called before each test case.
+pre_init_per_testcase(_TC,Config,State) ->
+ {Config, State}.
+
+%% @doc Called after each test case.
+post_end_per_testcase(_TC,_Config,ok,State) ->
+ {ok, State};
+post_end_per_testcase(TC,_Config,Error,State=#state{suite=Suite, groups=Groups, acc=Acc}) ->
+ Test = case TC of
+ {_Group, Case} -> Case;
+ TC -> TC
+ end,
+ {Error, State#state{acc=[{Suite, Groups, Test}|Acc]}}.
+
+%% @doc Called after post_init_per_suite, post_end_per_suite, post_init_per_group,
+%% post_end_per_group and post_end_per_testcase if the suite, group or test case failed.
+on_tc_fail(_TC, _Reason, State) ->
+ State.
+
+%% @doc Called when a test case is skipped by either user action
+%% or due to an init function failing. (>= 19.3)
+on_tc_skip(Suite, TC, {tc_auto_skip, _}, State=#state{suite=Suite, groups=Groups, acc=Acc}) ->
+ NewAcc = case TC of
+ init_per_testcase -> Acc;
+ end_per_testcase -> Acc;
+ {init_per_group,_} -> Acc;
+ {end_per_group, _} -> Acc;
+ init_per_suite -> Acc;
+ end_per_suite -> Acc;
+ {_Group, Case} -> [{Suite, Groups, Case}|Acc];
+ TC -> [{Suite, Groups, TC}|Acc]
+ end,
+ State#state{suite=Suite, acc=NewAcc};
+on_tc_skip(Suite, _TC, _Reason, State) ->
+ State#state{suite=Suite}.
+
+%% @doc Called when a test case is skipped by either user action
+%% or due to an init function failing. (Pre-19.3)
+on_tc_skip(TC, {tc_auto_skip, _}, State=#state{suite=Suite, groups=Groups, acc=Acc}) ->
+ NewAcc = case TC of
+ init_per_testcase -> Acc;
+ end_per_testcase -> Acc;
+ {init_per_group,_} -> Acc;
+ {end_per_group, _} -> Acc;
+ init_per_suite -> Acc;
+ end_per_suite -> Acc;
+ {_Group, Case} -> [{Suite, Groups, Case}|Acc];
+ TC -> [{Suite, Groups, TC}|Acc]
+ end,
+ State#state{acc=NewAcc};
+on_tc_skip(_TC, _Reason, State) ->
+ State.
+
+%% @doc Called when the scope of the CTH is done
+terminate(#state{acc=[]}) ->
+ ok;
+terminate(#state{acc=Acc}) ->
+ Spec = to_spec(Acc),
+ {ok, Cwd} = file:get_cwd(),
+ Path = filename:join(lists:droplast(filename:split(Cwd))++["retry.spec"]),
+ io:format(user,
+ "EXPERIMENTAL: Writing retry specification at ~s~n"
+ " call rebar3 ct with '--retry' to re-run failing cases.~n",
+ [Path]),
+ file:write_file(Path, Spec),
+ ok.
+
+%%% Helpers
+to_spec(List) ->
+ [to_spec_entry(X) || X <- merge(List)].
+
+merge([]) -> [];
+merge([{Suite, Groups, Case}|T]) when is_atom(Case) ->
+ merge([{Suite, Groups, [Case]}|T]);
+merge([{Suite, Groups, Cases}, {Suite, Groups, Case} | T]) ->
+ merge([{Suite, Groups, [Case|Cases]}|T]);
+merge([{Suite, Groups, Cases} | T]) ->
+ [{Suite, Groups, Cases} | merge(T)].
+
+to_spec_entry({Suite, [], Cases}) ->
+ Dir = filename:dirname(proplists:get_value(source, Suite:module_info(compile))),
+ io_lib:format("~p.~n", [{cases, Dir, Suite, Cases}]);
+to_spec_entry({Suite, Groups, Cases}) ->
+ Dir = filename:dirname(proplists:get_value(source, Suite:module_info(compile))),
+ ExpandedGroups = expand_groups(lists:reverse(Groups)),
+ io_lib:format("~p.~n", [{groups, Dir, Suite, ExpandedGroups, {cases,Cases}}]).
+
+expand_groups([Group]) ->
+ {Group, []};
+expand_groups([H|T]) ->
+ {H,[],[expand_groups(T)]}.
+
diff --git a/src/r3.erl b/src/r3.erl
index 5e8b26d..a79cc3a 100644
--- a/src/r3.erl
+++ b/src/r3.erl
@@ -1,7 +1,58 @@
-%%% external alias for rebar_agent
+%%% @doc external alias for `rebar_agent' for more convenient
+%%% calls from a shell.
-module(r3).
--export([do/1, do/2]).
+-export([do/1, do/2, async_do/1, async_do/2, break/0, resume/0]).
+-export(['$handle_undefined_function'/2]).
+-include("rebar.hrl").
+%% @doc alias for `rebar_agent:do/1'
+-spec do(atom()) -> ok | {error, term()}.
do(Command) -> rebar_agent:do(Command).
+%% @doc alias for `rebar_agent:do/2'
+-spec do(atom(), atom()) -> ok | {error, term()}.
do(Namespace, Command) -> rebar_agent:do(Namespace, Command).
+
+%% @async_doc alias for `rebar_agent:async_do/1'
+-spec async_do(atom()) -> ok | {error, term()}.
+async_do(Command) -> rebar_agent:async_do(Command).
+
+%% @async_doc alias for `rebar_agent:async_do/2'
+-spec async_do(atom(), atom()) -> ok | {error, term()}.
+async_do(Namespace, Command) -> rebar_agent:async_do(Namespace, Command).
+
+break() ->
+ case whereis(rebar_agent) of % is the shell running
+ undefined ->
+ ok;
+ Pid ->
+ {dictionary, Dict} = process_info(Pid, dictionary),
+ case lists:keyfind(cmd_type, 1, Dict) of
+ {cmd_type, async} ->
+ Self = self(),
+ Ref = make_ref(),
+ spawn_link(fun() ->
+ register(r3_breakpoint_handler, self()),
+ receive
+ resume ->
+ Self ! Ref
+ end
+ end),
+ io:format(user, "~n=== BREAK ===~n", []),
+ receive
+ Ref -> ok
+ end;
+ _ ->
+ ?DEBUG("ignoring breakpoint since command is not run "
+ "in async mode", []),
+ ok
+ end
+ end.
+
+resume() ->
+ r3_breakpoint_handler ! resume,
+ ok.
+
+%% @private defer to rebar_agent
+'$handle_undefined_function'(Cmd, Args) ->
+ rebar_agent:'$handle_undefined_function'(Cmd, Args).
diff --git a/src/rebar.app.src b/src/rebar.app.src
index bd0f871..6058efc 100644
--- a/src/rebar.app.src
+++ b/src/rebar.app.src
@@ -8,6 +8,7 @@
{registered, []},
{applications, [kernel,
stdlib,
+ hipe,
sasl,
compiler,
crypto,
@@ -23,12 +24,13 @@
erlware_commons,
providers,
bbmustache,
- ssl_verify_hostname,
+ ssl_verify_fun,
certifi,
cth_readable,
relx,
cf,
inets,
+ hex_core,
eunit_formatters]},
{env, [
%% Default log level
@@ -38,6 +40,9 @@
{pkg, rebar_pkg_resource},
{hg, rebar_hg_resource}]},
+ {compilers, [rebar_compiler_xrl, rebar_compiler_yrl,
+ rebar_compiler_mib, rebar_compiler_erl]},
+
{providers, [rebar_prv_app_discovery,
rebar_prv_as,
rebar_prv_bare_compile,
@@ -52,6 +57,7 @@
rebar_prv_edoc,
rebar_prv_escriptize,
rebar_prv_eunit,
+ rebar_prv_get_deps,
rebar_prv_help,
rebar_prv_install_deps,
rebar_prv_local_install,
@@ -65,6 +71,7 @@
rebar_prv_release,
rebar_prv_relup,
rebar_prv_report,
+ rebar_prv_repos,
rebar_prv_shell,
rebar_prv_state,
rebar_prv_tar,
@@ -72,6 +79,7 @@
rebar_prv_update,
rebar_prv_upgrade,
rebar_prv_version,
- rebar_prv_xref]}
+ rebar_prv_xref,
+ rebar_prv_alias]} % must run last to prevent overloads
]}
]}.
diff --git a/src/rebar.hrl b/src/rebar.hrl
index f96ed5e..f11302d 100644
--- a/src/rebar.hrl
+++ b/src/rebar.hrl
@@ -22,17 +22,38 @@
-define(DEFAULT_PLUGINS_DIR, "plugins").
-define(DEFAULT_TEST_DEPS_DIR, "test/lib").
-define(DEFAULT_RELEASE_DIR, "rel").
--define(DEFAULT_CONFIG_FILE, "rebar.config").
-define(CONFIG_VERSION, "1.1.0").
-define(DEFAULT_CDN, "https://repo.hex.pm/").
-define(REMOTE_PACKAGE_DIR, "tarballs").
--define(REMOTE_REGISTRY_FILE, "registry.ets.gz").
-define(LOCK_FILE, "rebar.lock").
-
--define(PACKAGE_INDEX_VERSION, 3).
+-define(DEFAULT_COMPILER_SOURCE_FORMAT, relative).
+-define(PACKAGE_INDEX_VERSION, 5).
-define(PACKAGE_TABLE, package_index).
-define(INDEX_FILE, "packages.idx").
--define(REGISTRY_FILE, "registry").
+-define(HEX_AUTH_FILE, "hex.config").
+-define(PUBLIC_HEX_REPO, <<"hexpm">>).
+
+%% ignore this function in all modules
+%% not every module that exports it and relies on it being called implements provider
+-ignore_xref([{format_error, 1}]).
+
+%% the package record is used in a select match spec which upsets dialyzer
+%% this is the suggested workaround from Tobias
+%% http://erlang.org/pipermail/erlang-questions/2009-February/041445.html
+-type ms_field() :: '$1' | '_'.
+
+%% TODO: change package and requirement keys to be required (:=) after dropping support for OTP-18
+-record(package, {key :: {unicode:unicode_binary() | ms_field(), unicode:unicode_binary() | ms_field(),
+ unicode:unicode_binary() | ms_field()},
+ checksum :: binary() | ms_field(),
+ retired :: boolean() | ms_field(),
+ dependencies :: [#{package => unicode:unicode_binary(),
+ requirement => unicode:unicode_binary()}] | ms_field()}).
+
+-record(resource, {type :: atom(),
+ module :: module(),
+ state :: term(),
+ implementation :: rebar_resource | rebar_resource_v2}).
-ifdef(namespaced_types).
-type rebar_dict() :: dict:dict().
@@ -52,6 +73,12 @@
-type rebar_set() :: set().
-endif.
+-ifdef(fun_stacktrace).
+-define(WITH_STACKTRACE(T, R, S), T:R -> S = erlang:get_stacktrace(),).
+-else.
+-define(WITH_STACKTRACE(T, R, S), T:R:S ->).
+-endif.
+
-define(GRAPH_VSN, 2).
-type v() :: {digraph:vertex(), term()} | 'false'.
-type e() :: {digraph:vertex(), digraph:vertex()}.
diff --git a/src/rebar3.erl b/src/rebar3.erl
index d3ea15f..a490a15 100644
--- a/src/rebar3.erl
+++ b/src/rebar3.erl
@@ -24,6 +24,16 @@
%% OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
%% THE SOFTWARE.
%% -------------------------------------------------------------------
+%%
+%% @doc Main module for rebar3. Supports two interfaces; one for escripts,
+%% and one for usage as a library (although rebar3 makes a lot of
+%% assumptions about its environment, making it a bit tricky to use as
+%% a lib).
+%%
+%% This module's job is mostly to set up the root environment for rebar3
+%% and handle global options (mostly all from the ENV) and make them
+%% accessible to the rest of the run.
+%% @end
-module(rebar3).
-export([main/0,
@@ -43,27 +53,28 @@
%% Public API
%% ====================================================================
-%% For running with:
-%% erl +sbtu +A0 -noinput -mode minimal -boot start_clean -s rebar3 main -extra "$@"
+%% @doc For running with:
+%% erl +sbtu +A1 -noinput -mode minimal -boot start_clean -s rebar3 main -extra "$@"
-spec main() -> no_return().
main() ->
List = init:get_plain_arguments(),
main(List).
-%% escript Entry point
+%% @doc escript Entry point
-spec main(list()) -> no_return().
main(Args) ->
try run(Args) of
{ok, _State} ->
erlang:halt(0);
Error ->
- handle_error(Error)
+ handle_error(Error, [])
catch
- _:Error ->
- handle_error(Error)
+ ?WITH_STACKTRACE(_,Error,Stacktrace)
+ handle_error(Error, Stacktrace)
end.
-%% Erlang-API entry point
+%% @doc Erlang-API entry point
+-spec run(rebar_state:t(), [string()]) -> {ok, rebar_state:t()} | {error, term()}.
run(BaseState, Commands) ->
start_and_load_apps(api),
BaseState1 = rebar_state:set(BaseState, task, Commands),
@@ -78,6 +89,11 @@ run(BaseState, Commands) ->
%% Internal functions
%% ====================================================================
+%% @private sets up the rebar3 environment based on the command line
+%% arguments passed, if they have any relevance; used to translate
+%% from the escript call-site into a common one with the library
+%% usage.
+-spec run([any(), ...]) -> {ok, rebar_state:t()} | {error, term()}.
run(RawArgs) ->
start_and_load_apps(command_line),
@@ -87,7 +103,7 @@ run(RawArgs) ->
case erlang:system_info(version) of
"6.1" ->
?WARN("Due to a filelib bug in Erlang 17.1 it is recommended"
- "you update to a newer release.", []);
+ "you update to a newer release.", []);
_ ->
ok
end,
@@ -95,7 +111,14 @@ run(RawArgs) ->
{BaseState2, _Args1} = set_options(BaseState1, {[], []}),
run_aux(BaseState2, RawArgs).
+%% @private Junction point between the CLI and library entry points.
+%% From here on the module's role is a shared path here to finish
+%% up setting the environment for the run.
+-spec run_aux(rebar_state:t(), [string()]) ->
+ {ok, rebar_state:t()} | {error, term()}.
run_aux(State, RawArgs) ->
+ io:setopts([{encoding, unicode}]),
+ %% Profile override; can only support one profile
State1 = case os:getenv("REBAR_PROFILE") of
false ->
State;
@@ -108,6 +131,7 @@ run_aux(State, RawArgs) ->
rebar_utils:check_min_otp_version(rebar_state:get(State1, minimum_otp_vsn, undefined)),
rebar_utils:check_blacklisted_otp_versions(rebar_state:get(State1, blacklisted_otp_vsns, undefined)),
+ %% Change the default hex CDN
State2 = case os:getenv("HEX_CDN") of
false ->
State1;
@@ -115,8 +139,17 @@ run_aux(State, RawArgs) ->
rebar_state:set(State1, rebar_packages_cdn, CDN)
end,
+ Compilers = application:get_env(rebar, compilers, []),
+ State0 = rebar_state:compilers(State2, Compilers),
+
+ %% TODO: this means use of REBAR_PROFILE=profile will replace the repos with
+ %% the repos defined in the profile. But it will not work with `as profile`.
+ %% Maybe it shouldn't work with either to be consistent?
+ Resources = application:get_env(rebar, resources, []),
+ State2_ = rebar_state:create_resources(Resources, State0),
+
%% bootstrap test profile
- State3 = rebar_state:add_to_profile(State2, test, test_state(State1)),
+ State3 = rebar_state:add_to_profile(State2_, test, test_state(State1)),
%% Process each command, resetting any state between each one
BaseDir = rebar_state:get(State, base_dir, ?DEFAULT_BASE_DIR),
@@ -142,30 +175,43 @@ run_aux(State, RawArgs) ->
State10 = rebar_state:code_paths(State9, default, code:get_path()),
- rebar_core:init_command(rebar_state:command_args(State10, Args), Task).
+ case rebar_core:init_command(rebar_state:command_args(State10, Args), Task) of
+ {ok, State11} ->
+ case rebar_state:get(State11, caller, command_line) of
+ api ->
+ rebar_paths:unset_paths([deps, plugins], State11),
+ {ok, State11};
+ _ ->
+ {ok, State11}
+ end;
+ Other ->
+ Other
+ end.
+
+
+%% @doc set up base configuration having to do with verbosity, where
+%% to find config files, and so on, and return an internal rebar3 state term.
+-spec init_config() -> rebar_state:t().
init_config() ->
+ rebar_utils:set_httpc_options(),
+
%% Initialize logging system
Verbosity = log_level(),
ok = rebar_log:init(command_line, Verbosity),
- Config = case os:getenv("REBAR_CONFIG") of
- false ->
- rebar_config:consult_file(?DEFAULT_CONFIG_FILE);
- ConfigFile ->
- rebar_config:consult_file(ConfigFile)
- end,
+ Config = rebar_config:consult_root(),
Config1 = rebar_config:merge_locks(Config, rebar_config:consult_lock_file(?LOCK_FILE)),
%% If $HOME/.config/rebar3/rebar.config exists load and use as global config
GlobalConfigFile = rebar_dir:global_config(),
State = case filelib:is_regular(GlobalConfigFile) of
true ->
- ?DEBUG("Load global config file ~s", [GlobalConfigFile]),
+ ?DEBUG("Load global config file ~ts", [GlobalConfigFile]),
try state_from_global_config(Config1, GlobalConfigFile)
catch
_:_ ->
- ?WARN("Global config ~s exists but can not be read. Ignoring global config values.", [GlobalConfigFile]),
+ ?WARN("Global config ~ts exists but can not be read. Ignoring global config values.", [GlobalConfigFile]),
rebar_state:new(Config1)
end;
false ->
@@ -193,6 +239,17 @@ init_config() ->
%% Initialize vsn cache
rebar_state:set(State1, vsn_cache, dict:new()).
+%% @doc Parse basic rebar3 arguments to find the top-level task
+%% to be run; this parsing is only partial from the point of view that
+%% runs done with arguments like `as $PROFILE do $TASK' will just
+%% return `as', which is then in charge of doing a more dynamic
+%% dispatch.
+%% If no arguments are given, the `help' task is returned.
+%% If special arguments like `-h' or `-v' are translated to `help'
+%% and `version' tasks.
+%% The unparsed parts of arguments are returned in:
+%% `{Task, Rest}'.
+-spec parse_args([string()]) -> {atom(), [string()]}.
parse_args([]) ->
parse_args(["help"]);
parse_args([H | Rest]) when H =:= "-h"
@@ -204,6 +261,8 @@ parse_args([H | Rest]) when H =:= "-v"
parse_args([Task | RawRest]) ->
{list_to_atom(Task), RawRest}.
+%% @private actually not too sure what this does anymore.
+-spec set_options(rebar_state:t(),{[any()],[any()]}) -> {rebar_state:t(),[any()]}.
set_options(State, {Options, NonOptArgs}) ->
GlobalDefines = proplists:get_all_values(defines, Options),
@@ -216,9 +275,8 @@ set_options(State, {Options, NonOptArgs}) ->
{rebar_state:set(State2, task, Task), NonOptArgs}.
-%%
-%% get log level based on getopt option
-%%
+%% @doc get log level based on getopt options and ENV
+-spec log_level() -> integer().
log_level() ->
case os:getenv("QUIET") of
Q when Q == false; Q == "" ->
@@ -233,18 +291,16 @@ log_level() ->
rebar_log:error_level()
end.
-%%
-%% show version information and halt
-%%
+%% @doc show version information
+-spec version() -> ok.
version() ->
{ok, Vsn} = application:get_key(rebar, vsn),
- ?CONSOLE("rebar ~s on Erlang/OTP ~s Erts ~s",
+ ?CONSOLE("rebar ~ts on Erlang/OTP ~ts Erts ~ts",
[Vsn, erlang:system_info(otp_release), erlang:system_info(version)]).
+%% @private set global flag based on getopt option boolean value
%% TODO: Actually make it 'global'
-%%
-%% set global flag based on getopt option boolean value
-%%
+-spec set_global_flag(rebar_state:t(), list(), term()) -> rebar_state:t().
set_global_flag(State, Options, Flag) ->
Value = case proplists:get_bool(Flag, Options) of
true ->
@@ -254,9 +310,9 @@ set_global_flag(State, Options, Flag) ->
end,
rebar_state:set(State, Flag, Value).
-%%
-%% options accepted via getopt
-%%
+
+%% @doc options accepted via getopt
+-spec global_option_spec_list() -> [{atom(), char(), string(), atom(), string()}, ...].
global_option_spec_list() ->
[
%% {Name, ShortOpt, LongOpt, ArgSpec, HelpMsg}
@@ -265,38 +321,46 @@ global_option_spec_list() ->
{task, undefined, undefined, string, "Task to run."}
].
-handle_error(rebar_abort) ->
+%% @private translate unhandled errors and internal return codes into proper
+%% erroneous program exits.
+-spec handle_error(term(), term()) -> no_return().
+handle_error(rebar_abort, _) ->
erlang:halt(1);
-handle_error({error, rebar_abort}) ->
+handle_error({error, rebar_abort}, _) ->
erlang:halt(1);
-handle_error({error, {Module, Reason}}) ->
+handle_error({error, {Module, Reason}}, Stacktrace) ->
case code:which(Module) of
non_existing ->
- ?CRASHDUMP("~p: ~p~n~p~n~n", [Module, Reason, erlang:get_stacktrace()]),
+ ?CRASHDUMP("~p: ~p~n~p~n~n", [Module, Reason, Stacktrace]),
?ERROR("Uncaught error in rebar_core. Run with DEBUG=1 to stacktrace or consult rebar3.crashdump", []),
?DEBUG("Uncaught error: ~p ~p", [Module, Reason]),
?INFO("When submitting a bug report, please include the output of `rebar3 report \"your command\"`", []);
_ ->
- ?ERROR("~s", [Module:format_error(Reason)])
+ ?ERROR("~ts", [Module:format_error(Reason)])
end,
erlang:halt(1);
-handle_error({error, Error}) when is_list(Error) ->
- ?ERROR("~s", [Error]),
+handle_error({error, Error}, _) when is_list(Error) ->
+ ?ERROR("~ts", [Error]),
erlang:halt(1);
-handle_error(Error) ->
+handle_error(Error, StackTrace) ->
%% Nothing should percolate up from rebar_core;
%% Dump this error to console
- ?CRASHDUMP("Error: ~p~n~p~n~n", [Error, erlang:get_stacktrace()]),
+ ?CRASHDUMP("Error: ~p~n~p~n~n", [Error, StackTrace]),
?ERROR("Uncaught error in rebar_core. Run with DEBUG=1 to see stacktrace or consult rebar3.crashdump", []),
?DEBUG("Uncaught error: ~p", [Error]),
- case erlang:get_stacktrace() of
+ case StackTrace of
[] -> ok;
Trace ->
- ?DEBUG("Stack trace to the error location: ~p", [Trace])
+ ?DEBUG("Stack trace to the error location:~n~p", [Trace])
end,
?INFO("When submitting a bug report, please include the output of `rebar3 report \"your command\"`", []),
erlang:halt(1).
+%% @private Boot Erlang dependencies; problem is that escripts don't auto-boot
+%% stuff the way releases do and we have to do it by hand.
+%% This also lets us detect and show nicer errors when a critical lib is
+%% not supported
+-spec start_and_load_apps(command_line|api) -> term().
start_and_load_apps(Caller) ->
_ = application:load(rebar),
%% Make sure crypto is running
@@ -304,9 +368,12 @@ start_and_load_apps(Caller) ->
ensure_running(asn1, Caller),
ensure_running(public_key, Caller),
ensure_running(ssl, Caller),
- inets:start(),
+ ensure_running(inets, Caller),
inets:start(httpc, [{profile, rebar}]).
+%% @doc Make sure a required app is running, or display an error message
+%% and abort if there's a problem.
+-spec ensure_running(atom(), command_line|api) -> ok | no_return().
ensure_running(App, Caller) ->
case application:start(App) of
ok -> ok;
@@ -323,40 +390,55 @@ ensure_running(App, Caller) ->
throw(rebar_abort)
end.
+-spec state_from_global_config([term()], file:filename()) -> rebar_state:t().
state_from_global_config(Config, GlobalConfigFile) ->
- rebar_utils:set_httpc_options(),
GlobalConfigTerms = rebar_config:consult_file(GlobalConfigFile),
GlobalConfig = rebar_state:new(GlobalConfigTerms),
%% We don't want to worry about global plugin install state effecting later
%% usage. So we throw away the global profile state used for plugin install.
- GlobalConfigThrowAway = rebar_state:current_profiles(GlobalConfig, [global]),
- GlobalState = case rebar_state:get(GlobalConfigThrowAway, plugins, []) of
+ GlobalConfigThrowAway0 = rebar_state:current_profiles(GlobalConfig, [global]),
+
+ Resources = application:get_env(rebar, resources, []),
+ GlobalConfigThrowAway = rebar_state:create_resources(Resources, GlobalConfigThrowAway0),
+
+ Compilers = application:get_env(rebar, compilers, []),
+ GlobalConfigThrowAway1 = rebar_state:compilers(GlobalConfigThrowAway, Compilers),
+
+ GlobalState = case rebar_state:get(GlobalConfigThrowAway1, plugins, []) of
[] ->
- GlobalConfigThrowAway;
+ GlobalConfigThrowAway1;
GlobalPluginsToInstall ->
rebar_plugins:handle_plugins(global,
GlobalPluginsToInstall,
- GlobalConfigThrowAway)
+ GlobalConfigThrowAway1)
end,
GlobalPlugins = rebar_state:providers(GlobalState),
GlobalConfig2 = rebar_state:set(GlobalConfig, plugins, []),
- GlobalConfig3 = rebar_state:set(GlobalConfig2, {plugins, global}, rebar_state:get(GlobalConfigThrowAway, plugins, [])),
+ GlobalConfig3 = rebar_state:set(GlobalConfig2, {plugins, global},
+ rebar_state:get(GlobalConfigThrowAway1, plugins, [])),
rebar_state:providers(rebar_state:new(GlobalConfig3, Config), GlobalPlugins).
+-spec test_state(rebar_state:t()) -> [{'extra_src_dirs',[string()]} | {'erl_opts',[any()]}].
test_state(State) ->
- ErlOpts = rebar_state:get(State, erl_opts, []),
+ %% Fetch the test profile's erl_opts only
+ Opts = rebar_state:opts(State),
+ Profiles = rebar_opts:get(Opts, profiles, []),
+ ProfileOpts = proplists:get_value(test, Profiles, []),
+ ErlOpts = proplists:get_value(erl_opts, ProfileOpts, []),
TestOpts = safe_define_test_macro(ErlOpts),
[{extra_src_dirs, ["test"]}, {erl_opts, TestOpts}].
+-spec safe_define_test_macro([any()]) -> [any()] | [{'d',atom()} | any()].
safe_define_test_macro(Opts) ->
%% defining a compile macro twice results in an exception so
%% make sure 'TEST' is only defined once
case test_defined(Opts) of
- true -> [];
- false -> [{d, 'TEST'}]
+ true -> Opts;
+ false -> [{d, 'TEST'}|Opts]
end.
+-spec test_defined([{d, atom()} | {d, atom(), term()} | term()]) -> boolean().
test_defined([{d, 'TEST'}|_]) -> true;
test_defined([{d, 'TEST', true}|_]) -> true;
test_defined([_|Rest]) -> test_defined(Rest);
diff --git a/src/rebar_agent.erl b/src/rebar_agent.erl
index 95818d8..b4734f1 100644
--- a/src/rebar_agent.erl
+++ b/src/rebar_agent.erl
@@ -1,5 +1,8 @@
+%%% @doc Runs a process that holds a rebar3 state and can be used
+%%% to statefully maintain loaded project state into a running VM.
-module(rebar_agent).
--export([start_link/1, do/1, do/2]).
+-export([start_link/1, do/1, do/2, async_do/1, async_do/2]).
+-export(['$handle_undefined_function'/2]).
-export([init/1,
handle_call/3, handle_cast/2, handle_info/2,
code_change/3, terminate/2]).
@@ -10,47 +13,125 @@
cwd,
show_warning=true}).
+%% @doc boots an agent server; requires a full rebar3 state already.
+%% By default (within rebar3), this isn't called; `rebar_prv_shell'
+%% enters and transforms into this module
+-spec start_link(rebar_state:t()) -> {ok, pid()}.
start_link(State) ->
gen_server:start_link({local, ?MODULE}, ?MODULE, State, []).
+%% @doc runs a given command in the agent's context.
+-spec do(atom()) -> ok | {error, term()}.
do(Command) when is_atom(Command) ->
- gen_server:call(?MODULE, {cmd, Command}, infinity).
+ gen_server:call(?MODULE, {cmd, Command}, infinity);
+do(Args) when is_list(Args) ->
+ gen_server:call(?MODULE, {cmd, default, do, Args}, infinity).
+%% @doc runs a given command in the agent's context, under a given
+%% namespace.
+-spec do(atom(), atom()) -> ok | {error, term()}.
do(Namespace, Command) when is_atom(Namespace), is_atom(Command) ->
- gen_server:call(?MODULE, {cmd, Namespace, Command}, infinity).
+ gen_server:call(?MODULE, {cmd, Namespace, Command}, infinity);
+do(Namespace, Args) when is_atom(Namespace), is_list(Args) ->
+ gen_server:call(?MODULE, {cmd, Namespace, do, Args}, infinity).
+-spec async_do(atom()) -> ok | {error, term()}.
+async_do(Command) when is_atom(Command) ->
+ gen_server:cast(?MODULE, {cmd, Command});
+async_do(Args) when is_list(Args) ->
+ gen_server:cast(?MODULE, {cmd, default, do, Args}).
+
+-spec async_do(atom(), atom()) -> ok.
+async_do(Namespace, Command) when is_atom(Namespace), is_atom(Command) ->
+ gen_server:cast(?MODULE, {cmd, Namespace, Command});
+async_do(Namespace, Args) when is_atom(Namespace), is_list(Args) ->
+ gen_server:cast(?MODULE, {cmd, Namespace, do, Args}).
+
+'$handle_undefined_function'(Cmd, [Namespace, Args]) ->
+ gen_server:call(?MODULE, {cmd, Namespace, Cmd, Args}, infinity);
+'$handle_undefined_function'(Cmd, [Args]) ->
+ gen_server:call(?MODULE, {cmd, default, Cmd, Args}, infinity);
+'$handle_undefined_function'(Cmd, []) ->
+ gen_server:call(?MODULE, {cmd, default, Cmd}, infinity).
+
+%%%%%%%%%%%%%%%%%
+%%% CALLBACKS %%%
+%%%%%%%%%%%%%%%%%
+
+%% @private
init(State) ->
Cwd = rebar_dir:get_cwd(),
{ok, #state{state=State, cwd=Cwd}}.
+%% @private
handle_call({cmd, Command}, _From, State=#state{state=RState, cwd=Cwd}) ->
MidState = maybe_show_warning(State),
- {Res, NewRState} = run(default, Command, RState, Cwd),
+ put(cmd_type, sync),
+ {Res, NewRState} = run(default, Command, "", RState, Cwd),
+ put(cmd_type, undefined),
{reply, Res, MidState#state{state=NewRState}, hibernate};
handle_call({cmd, Namespace, Command}, _From, State = #state{state=RState, cwd=Cwd}) ->
MidState = maybe_show_warning(State),
- {Res, NewRState} = run(Namespace, Command, RState, Cwd),
+ put(cmd_type, sync),
+ {Res, NewRState} = run(Namespace, Command, "", RState, Cwd),
+ put(cmd_type, undefined),
+ {reply, Res, MidState#state{state=NewRState}, hibernate};
+handle_call({cmd, Namespace, Command, Args}, _From, State = #state{state=RState, cwd=Cwd}) ->
+ MidState = maybe_show_warning(State),
+ put(cmd_type, sync),
+ {Res, NewRState} = run(Namespace, Command, Args, RState, Cwd),
+ put(cmd_type, undefined),
{reply, Res, MidState#state{state=NewRState}, hibernate};
handle_call(_Call, _From, State) ->
{noreply, State}.
+%% @private
+handle_cast({cmd, Command}, State=#state{state=RState, cwd=Cwd}) ->
+ MidState = maybe_show_warning(State),
+ put(cmd_type, async),
+ {_, NewRState} = run(default, Command, "", RState, Cwd),
+ put(cmd_type, undefined),
+ {noreply, MidState#state{state=NewRState}, hibernate};
+handle_cast({cmd, Namespace, Command}, State = #state{state=RState, cwd=Cwd}) ->
+ MidState = maybe_show_warning(State),
+ put(cmd_type, async),
+ {_, NewRState} = run(Namespace, Command, "", RState, Cwd),
+ put(cmd_type, undefined),
+ {noreply, MidState#state{state=NewRState}, hibernate};
+handle_cast({cmd, Namespace, Command, Args}, State = #state{state=RState, cwd=Cwd}) ->
+ MidState = maybe_show_warning(State),
+ put(cmd_type, async),
+ {_, NewRState} = run(Namespace, Command, Args, RState, Cwd),
+ put(cmd_type, undefined),
+ {noreply, MidState#state{state=NewRState}, hibernate};
handle_cast(_Cast, State) ->
{noreply, State}.
+%% @private
handle_info(_Info, State) ->
{noreply, State}.
+%% @private
code_change(_OldVsn, State, _Extra) ->
{ok, State}.
+%% @private
terminate(_Reason, _State) ->
ok.
-run(Namespace, Command, RState, Cwd) ->
+%%%%%%%%%%%%%%%
+%%% PRIVATE %%%
+%%%%%%%%%%%%%%%
+
+%% @private runs the actual command and maintains the state changes
+-spec run(atom(), atom(), string(), rebar_state:t(), file:filename()) ->
+ {ok, rebar_state:t()} | {{error, term()}, rebar_state:t()}.
+run(Namespace, Command, StrArgs, RState, Cwd) ->
try
case rebar_dir:get_cwd() of
Cwd ->
- Args = [atom_to_list(Namespace), atom_to_list(Command)],
+ PArgs = getopt:tokenize(StrArgs),
+ Args = [atom_to_list(Namespace), atom_to_list(Command)] ++ PArgs,
CmdState0 = refresh_state(RState, Cwd),
CmdState1 = rebar_state:set(CmdState0, task, atom_to_list(Command)),
CmdState = rebar_state:set(CmdState1, caller, api),
@@ -69,57 +150,168 @@ run(Namespace, Command, RState, Cwd) ->
{{error, cwd_changed}, RState}
end
catch
- Type:Reason ->
- ?DEBUG("Agent Stacktrace: ~p", [erlang:get_stacktrace()]),
+ ?WITH_STACKTRACE(Type, Reason, Stacktrace)
+ ?DEBUG("Agent Stacktrace: ~p", [Stacktrace]),
{{error, {Type, Reason}}, RState}
end.
+%% @private function to display a warning for the feature only once
+-spec maybe_show_warning(#state{}) -> #state{}.
maybe_show_warning(S=#state{show_warning=true}) ->
?WARN("This feature is experimental and may be modified or removed at any time.", []),
S#state{show_warning=false};
maybe_show_warning(State) ->
State.
+%% @private based on a rebar3 state term, reload paths in a way
+%% that makes sense.
+-spec refresh_paths(rebar_state:t()) -> ok.
refresh_paths(RState) ->
- ToRefresh = (rebar_state:code_paths(RState, all_deps)
- ++ [filename:join([rebar_app_info:out_dir(App), "test"])
- || App <- rebar_state:project_apps(RState)]
- %% make sure to never reload self; halt()s the VM
- ) -- [filename:dirname(code:which(?MODULE))],
+ RefreshPaths = application:get_env(rebar, refresh_paths, [all_deps, test]),
+ ToRefresh = parse_refresh_paths(RefreshPaths, RState, []),
%% Modules from apps we can't reload without breaking functionality
- Blacklist = [ec_cmd_log, providers, cf, cth_readable],
+ Blacklist = lists:usort(
+ application:get_env(rebar, refresh_paths_blacklist, [])
+ ++ [rebar, erlware_commons, providers, cf, cth_readable]),
%% Similar to rebar_utils:update_code/1, but also forces a reload
%% of used modules. Also forces to reload all of ebin/ instead
%% of just the modules in the .app file, because 'extra_src_dirs'
%% allows to load and compile files that are not to be kept
%% in the app file.
- lists:foreach(fun(Path) ->
- Name = filename:basename(Path, "/ebin"),
- Files = filelib:wildcard(filename:join([Path, "*.beam"])),
- Modules = [list_to_atom(filename:basename(F, ".beam"))
- || F <- Files],
- App = list_to_atom(Name),
+ [refresh_path(Path, Blacklist) || Path <- ToRefresh],
+ ok.
+
+refresh_path(Path, Blacklist) ->
+ Name = filename:basename(Path, "/ebin"),
+ App = list_to_atom(Name),
+ case App of
+ test -> % skip
+ code:add_patha(Path),
+ ok;
+ _ ->
application:load(App),
case application:get_key(App, modules) of
undefined ->
- code:add_patha(Path),
- ok;
- {ok, Mods} ->
- case {length(Mods), length(Mods -- Blacklist)} of
- {X,X} ->
- ?DEBUG("reloading ~p from ~s", [Modules, Path]),
- code:replace_path(App, Path),
- [begin code:purge(M), code:delete(M), code:load_file(M) end
- || M <- Modules];
- {_,_} ->
+ code:add_patha(Path);
+ {ok, _Mods} ->
+ case lists:member(App, Blacklist) of
+ false ->
+ refresh_path_do(Path, App);
+ true ->
?DEBUG("skipping app ~p, stable copy required", [App])
end
end
- end, ToRefresh).
+ end.
+refresh_path_do(Path, App) ->
+ Files = filelib:wildcard(filename:join([Path, "*.beam"])),
+ Modules = [list_to_atom(filename:basename(F, ".beam"))
+ || F <- Files],
+ ?DEBUG("reloading ~p from ~ts", [Modules, Path]),
+ code:replace_path(App, Path),
+ reload_modules(Modules).
+
+%% @private parse refresh_paths option
+%% no_deps means only project_apps's ebin path
+%% no_test means no test path
+%% OtherPath.
+parse_refresh_paths([all_deps | RefreshPaths], RState, Acc) ->
+ Paths = rebar_state:code_paths(RState, all_deps),
+ parse_refresh_paths(RefreshPaths, RState, Paths ++ Acc);
+parse_refresh_paths([project_apps | RefreshPaths], RState, Acc) ->
+ Paths = [filename:join([rebar_app_info:out_dir(App), "ebin"])
+ || App <- rebar_state:project_apps(RState)],
+ parse_refresh_paths(RefreshPaths, RState, Paths ++ Acc);
+parse_refresh_paths([test | RefreshPaths], RState, Acc) ->
+ Paths = [filename:join([rebar_app_info:out_dir(App), "test"])
+ || App <- rebar_state:project_apps(RState)],
+ parse_refresh_paths(RefreshPaths, RState, Paths ++ Acc);
+parse_refresh_paths([RefreshPath0 | RefreshPaths], RState, Acc) when is_list(RefreshPath0) ->
+ case filelib:is_dir(RefreshPath0) of
+ true ->
+ RefreshPath0 =
+ case filename:basename(RefreshPath0) of
+ "ebin" -> RefreshPath0;
+ _ -> filename:join([RefreshPath0, "ebin"])
+ end,
+ parse_refresh_paths(RefreshPaths, RState, [RefreshPath0 | Acc]);
+ false ->
+ parse_refresh_paths(RefreshPaths, RState, Acc)
+ end;
+parse_refresh_paths([_ | RefreshPaths], RState, Acc) ->
+ parse_refresh_paths(RefreshPaths, RState, Acc);
+parse_refresh_paths([], _RState, Acc) ->
+ lists:usort(Acc).
+%% @private from a disk config, reload and reapply with the current
+%% profiles; used to find changes in the config from a prior run.
+-spec refresh_state(rebar_state:t(), file:filename()) -> rebar_state:t().
refresh_state(RState, _Dir) ->
lists:foldl(
fun(F, State) -> F(State) end,
rebar3:init_config(),
[fun(S) -> rebar_state:apply_profiles(S, rebar_state:current_profiles(RState)) end]
).
+
+%% @private takes a list of modules and reloads them
+-spec reload_modules([module()]) -> term().
+reload_modules([]) -> noop;
+reload_modules(Modules0) ->
+ Modules = [M || M <- Modules0, is_changed(M)],
+ reload_modules(Modules, erlang:function_exported(code, prepare_loading, 1)).
+
+%% @spec is_changed(atom()) -> boolean()
+%% @doc true if the loaded module is a beam with a vsn attribute
+%% and does not match the on-disk beam file, returns false otherwise.
+is_changed(M) ->
+ try
+ module_vsn(M:module_info(attributes)) =/= module_vsn(code:get_object_code(M))
+ catch _:_ ->
+ false
+ end.
+
+module_vsn({M, Beam, _Fn}) ->
+ % Because the vsn can set by -vsn(X) in module.
+ % So didn't use beam_lib:version/1 to get the vsn.
+ % So if set -vsn(X) in module, it will always reload the module.
+ {ok, {M, <<Vsn:128>>}} = beam_lib:md5(Beam),
+ Vsn;
+module_vsn(Attrs) when is_list(Attrs) ->
+ {_, Vsn} = lists:keyfind(vsn, 1, Attrs),
+ Vsn.
+
+%% @private reloading modules, when there are modules to actually reload
+reload_modules(Modules, true) ->
+ %% OTP 19 and later -- use atomic loading and ignore unloadable mods
+ case code:prepare_loading(Modules) of
+ {ok, Prepared} ->
+ [code:purge(M) || M <- Modules],
+ code:finish_loading(Prepared);
+ {error, ModRsns} ->
+ Blacklist =
+ lists:foldr(fun({ModError, Error}, Acc) ->
+ case Error of
+ % perhaps cover other cases of failure?
+ on_load_not_allowed ->
+ reload_modules([ModError], false),
+ [ModError|Acc];
+ _ ->
+ ?DEBUG("Module ~p failed to atomic load because ~p", [ModError, Error]),
+ [ModError|Acc]
+ end
+ end,
+ [], ModRsns
+ ),
+ reload_modules(Modules -- Blacklist, true)
+ end;
+reload_modules(Modules, false) ->
+ %% Older versions, use a more ad-hoc mechanism.
+ lists:foreach(fun(M) ->
+ code:delete(M),
+ code:purge(M),
+ case code:load_file(M) of
+ {module, M} -> ok;
+ {error, Error} ->
+ ?DEBUG("Module ~p failed to load because ~p", [M, Error])
+ end
+ end, Modules
+ ).
diff --git a/src/rebar_api.erl b/src/rebar_api.erl
index 6ebc500..00eb054 100644
--- a/src/rebar_api.erl
+++ b/src/rebar_api.erl
@@ -1,4 +1,4 @@
-%%% Packages rebar.hrl features and macros into a more generic API
+%%% @doc Packages rebar.hrl features and macros into a more generic API
%%% that can be used by plugin builders.
-module(rebar_api).
-include("rebar.hrl").
@@ -9,6 +9,8 @@
expand_env_variable/3,
get_arch/0,
wordsize/0,
+ set_paths/2,
+ unset_paths/2,
add_deps_to_path/1,
restore_code_path/1,
processing_base_dir/1,
@@ -30,42 +32,77 @@ abort() -> ?FAIL.
abort(Str, Args) -> ?ABORT(Str, Args).
%% @doc Prints to the console, including a newline
+-spec console(string(), list()) -> ok.
console(Str, Args) -> ?CONSOLE(Str, Args).
%% @doc logs with severity `debug'
+-spec debug(string(), list()) -> ok.
debug(Str, Args) -> ?DEBUG(Str, Args).
+
%% @doc logs with severity `info'
+-spec info(string(), list()) -> ok.
info(Str, Args) -> ?INFO(Str, Args).
+
%% @doc logs with severity `warn'
+-spec warn(string(), list()) -> ok.
warn(Str, Args) -> ?WARN(Str, Args).
+
%% @doc logs with severity `error'
+-spec error(string(), list()) -> ok.
error(Str, Args) -> ?ERROR(Str, Args).
-%%
-%% Given env. variable FOO we want to expand all references to
-%% it in InStr. References can have two forms: $FOO and ${FOO}
-%% The end of form $FOO is delimited with whitespace or eol
-%%
+%% @doc Given env. variable `FOO' we want to expand all references to
+%% it in `InStr'. References can have two forms: `$FOO' and `${FOO}'
+%% The end of form `$FOO' is delimited with whitespace or EOL
+-spec expand_env_variable(string(), string(), term()) -> string().
expand_env_variable(InStr, VarName, RawVarValue) ->
rebar_utils:expand_env_variable(InStr, VarName, RawVarValue).
+%% @doc returns the sytem architecture, in strings like
+%% `"19.0.4-x86_64-unknown-linux-gnu-64"'.
+-spec get_arch() -> string().
get_arch() ->
rebar_utils:get_arch().
+%% @doc returns the size of a word on the system, as a string
+-spec wordsize() -> string().
wordsize() ->
rebar_utils:wordsize().
+%% @doc Set code paths. Takes arguments of the form
+%% `[plugins, deps]' or `[deps, plugins]' and ensures the
+%% project's app and dependencies are set in the right order
+%% for the next bit of execution
+-spec set_paths(rebar_paths:targets(), rebar_state:t()) -> ok.
+set_paths(List, State) ->
+ rebar_paths:set_paths(List, State).
+
+%% @doc Unsets code paths. Takes arguments of the form
+%% `[plugins, deps]' or `[deps, plugins]' and ensures the
+%% paths are no longer active.
+-spec unset_paths(rebar_paths:targets(), rebar_state:t()) -> ok.
+unset_paths(List, State) ->
+ rebar_paths:unset_paths(List, State).
-%% Add deps to the code path
+%% @doc Add deps to the code path
+-spec add_deps_to_path(rebar_state:t()) -> ok.
add_deps_to_path(State) ->
code:add_pathsa(rebar_state:code_paths(State, all_deps)).
-%% Revert to only having the beams necessary for running rebar3 and plugins in the path
+%% @doc Revert to only having the beams necessary for running rebar3 and
+%% plugins in the path
+-spec restore_code_path(rebar_state:t()) -> true | {error, term()}.
restore_code_path(State) ->
rebar_utils:cleanup_code_path(rebar_state:code_paths(State, default)).
+%% @doc checks if the current working directory is the base directory
+%% for the project.
+-spec processing_base_dir(rebar_state:t()) -> boolean().
processing_base_dir(State) ->
rebar_dir:processing_base_dir(State).
+%% @doc returns the SSL options adequate for the project based on
+%% its configuration, including for validation of certs.
+-spec ssl_opts(string()) -> [term()].
ssl_opts(Url) ->
- rebar_pkg_resource:ssl_opts(Url).
+ rebar_utils:ssl_opts(Url).
diff --git a/src/rebar_app_discover.erl b/src/rebar_app_discover.erl
index 67acf54..21dea29 100644
--- a/src/rebar_app_discover.erl
+++ b/src/rebar_app_discover.erl
@@ -1,3 +1,5 @@
+%%% @doc utility functions to do the basic discovery of apps
+%%% and layout for the project.
-module(rebar_app_discover).
-export([do/2,
@@ -5,16 +7,23 @@
find_unbuilt_apps/1,
find_apps/1,
find_apps/2,
+ find_apps/4,
find_app/2,
find_app/3]).
-include("rebar.hrl").
-include_lib("providers/include/providers.hrl").
+%% @doc from the base directory, find all the applications
+%% at the top level and their dependencies based on the configuration
+%% and profile information.
+-spec do(rebar_state:t(), [file:filename()]) -> rebar_state:t().
do(State, LibDirs) ->
BaseDir = rebar_state:dir(State),
Dirs = [filename:join(BaseDir, LibDir) || LibDir <- LibDirs],
- Apps = find_apps(Dirs, all),
+ RebarOpts = rebar_state:opts(State),
+ SrcDirs = rebar_dir:src_dirs(RebarOpts, ["src"]),
+ Apps = find_apps(Dirs, SrcDirs, all, State),
ProjectDeps = rebar_state:deps_names(State),
DepsDir = rebar_dir:deps_dir(State),
CurrentProfiles = rebar_state:current_profiles(State),
@@ -43,18 +52,22 @@ do(State, LibDirs) ->
Name = rebar_app_info:name(AppInfo),
case enable(State, AppInfo) of
true ->
- {AppInfo1, StateAcc1} = merge_deps(AppInfo, StateAcc),
+ {AppInfo1, StateAcc1} = merge_opts(AppInfo, StateAcc),
OutDir = filename:join(DepsDir, Name),
AppInfo2 = rebar_app_info:out_dir(AppInfo1, OutDir),
ProjectDeps1 = lists:delete(Name, ProjectDeps),
rebar_state:project_apps(StateAcc1
,rebar_app_info:deps(AppInfo2, ProjectDeps1));
false ->
- ?INFO("Ignoring ~s", [Name]),
+ ?INFO("Ignoring ~ts", [Name]),
StateAcc
end
end, State1, SortedApps).
+%% @doc checks whether there is an app at the top level (and returns its
+%% name) or the 'root' atom in case we're in an umbrella project.
+-spec define_root_app([rebar_app_info:t()], rebar_state:t()) ->
+ root | binary().
define_root_app(Apps, State) ->
RootDir = rebar_dir:root_dir(State),
case ec_lists:find(fun(X) ->
@@ -67,36 +80,46 @@ define_root_app(Apps, State) ->
root
end.
+%% @doc formatting errors from the module.
+-spec format_error(term()) -> iodata().
format_error({module_list, File}) ->
io_lib:format("Error reading module list from ~p~n", [File]);
format_error({missing_module, Module}) ->
io_lib:format("Module defined in app file missing: ~p~n", [Module]).
-merge_deps(AppInfo, State) ->
+%% @doc merges configuration of a project app and the top level state
+%% some configuration like erl_opts must be merged into a subapp's opts
+%% while plugins and hooks need to be kept defined to only either the
+%% top level state or an individual application.
+-spec merge_opts(rebar_app_info:t(), rebar_state:t()) ->
+ {rebar_app_info:t(), rebar_state:t()}.
+merge_opts(AppInfo, State) ->
%% These steps make sure that hooks and artifacts are run in the context of
%% the application they are defined at. If an umbrella structure is used and
- %% they are deifned at the top level they will instead run in the context of
+ %% they are defined at the top level they will instead run in the context of
%% the State and at the top level, not as part of an application.
- Default = reset_hooks(rebar_state:default(State)),
- {C, State1} = project_app_config(AppInfo, State),
- AppInfo0 = rebar_app_info:update_opts(AppInfo, Default, C),
+ CurrentProfiles = rebar_state:current_profiles(State),
+ {AppInfo1, State1} = maybe_reset_hooks_plugins(AppInfo, State),
- CurrentProfiles = rebar_state:current_profiles(State1),
- Name = rebar_app_info:name(AppInfo0),
+ Name = rebar_app_info:name(AppInfo1),
%% We reset the opts here to default so no profiles are applied multiple times
- AppInfo1 = rebar_app_info:apply_overrides(rebar_state:get(State1, overrides, []), AppInfo0),
- AppInfo2 = rebar_app_info:apply_profiles(AppInfo1, CurrentProfiles),
+ AppInfo2 = rebar_app_info:apply_overrides(rebar_state:get(State1, overrides, []), AppInfo1),
+ AppInfo3 = rebar_app_info:apply_profiles(AppInfo2, CurrentProfiles),
%% Will throw an exception if checks fail
- rebar_app_info:verify_otp_vsn(AppInfo2),
+ rebar_app_info:verify_otp_vsn(AppInfo3),
State2 = lists:foldl(fun(Profile, StateAcc) ->
- handle_profile(Profile, Name, AppInfo2, StateAcc)
+ handle_profile(Profile, Name, AppInfo3, StateAcc)
end, State1, lists:reverse(CurrentProfiles)),
- {AppInfo2, State2}.
+ {AppInfo3, State2}.
+%% @doc Applies a given profile for an app, ensuring the deps
+%% match the context it will require.
+-spec handle_profile(atom(), binary(), rebar_app_info:t(), rebar_state:t()) ->
+ rebar_state:t().
handle_profile(Profile, Name, AppInfo, State) ->
TopParsedDeps = rebar_state:get(State, {parsed_deps, Profile}, {[], []}),
TopLevelProfileDeps = rebar_state:get(State, {deps, Profile}, []),
@@ -113,6 +136,12 @@ handle_profile(Profile, Name, AppInfo, State) ->
State2 = rebar_state:set(State1, {deps, Profile}, ProfileDeps2),
rebar_state:set(State2, {parsed_deps, Profile}, TopParsedDeps++ParsedDeps).
+%% @doc parses all the known dependencies for a given profile
+-spec parse_profile_deps(Profile, Name, Deps, Opts, rebar_state:t()) -> [rebar_app_info:t()] when
+ Profile :: atom(),
+ Name :: binary(),
+ Deps :: [term()], % TODO: refine types
+ Opts :: term(). % TODO: refine types
parse_profile_deps(Profile, Name, Deps, Opts, State) ->
DepsDir = rebar_prv_install_deps:profile_dep_dir(State, Profile),
Locks = rebar_state:get(State, {locks, Profile}, []),
@@ -123,77 +152,195 @@ parse_profile_deps(Profile, Name, Deps, Opts, State) ->
,Locks
,1).
-project_app_config(AppInfo, State) ->
- C = rebar_config:consult(rebar_app_info:dir(AppInfo)),
+%% reset the State hooks if there is a top level application
+-spec maybe_reset_hooks_plugins(AppInfo, State) -> {AppInfo, State} when
+ AppInfo :: rebar_app_info:t(),
+ State :: rebar_state:t().
+maybe_reset_hooks_plugins(AppInfo, State) ->
Dir = rebar_app_info:dir(AppInfo),
- Opts = maybe_reset_hooks(Dir, rebar_state:opts(State), State),
- {C, rebar_state:opts(State, Opts)}.
-
-%% Here we check if the app is at the root of the project.
-%% If it is, then drop the hooks from the config so they aren't run twice
-maybe_reset_hooks(Dir, Opts, State) ->
+ CurrentProfiles = rebar_state:current_profiles(State),
case ec_file:real_dir_path(rebar_dir:root_dir(State)) of
Dir ->
- reset_hooks(Opts);
+ Opts = reset_hooks(rebar_state:opts(State), CurrentProfiles),
+ State1 = rebar_state:opts(State, Opts),
+
+ %% set plugins to empty since this is an app at the top level
+ %% and top level plugins are installed in run_aux
+ AppInfo1 = rebar_app_info:set(rebar_app_info:set(AppInfo, {plugins,default}, []), plugins, []),
+
+ {AppInfo1, State1};
_ ->
- Opts
+ %% if not in the top root directory then we need to merge in the
+ %% default state opts to this subapp's opts
+ Default = reset_hooks(rebar_state:default(State), CurrentProfiles),
+ AppInfo1 = rebar_app_info:update_opts(AppInfo, Default),
+ {AppInfo1, State}
end.
-reset_hooks(Opts) ->
- lists:foldl(fun(Key, OptsAcc) ->
- rebar_opts:set(OptsAcc, Key, [])
- end, Opts, [post_hooks, pre_hooks, provider_hooks, artifacts]).
--spec all_app_dirs(list(file:name())) -> list(file:name()).
+%% @doc make the hooks empty for a given set of options
+-spec reset_hooks(Opts, Profiles) ->
+ Opts when
+ Opts :: rebar_dict(),
+ Profiles :: [atom()].
+reset_hooks(Opts, CurrentProfiles) ->
+ AllHooks = [post_hooks, pre_hooks, provider_hooks, artifacts],
+ Opts1 = lists:foldl(fun(Key, OptsAcc) ->
+ rebar_opts:set(OptsAcc, Key, [])
+ end, Opts, AllHooks),
+ Profiles = rebar_opts:get(Opts1, profiles, []),
+ Profiles1 = lists:map(fun({P, ProfileOpts}) ->
+ case lists:member(P, CurrentProfiles) of
+ true ->
+ {P, [X || X={Key, _} <- ProfileOpts,
+ not lists:member(Key, AllHooks)]};
+ false ->
+ {P, ProfileOpts}
+ end
+ end, Profiles),
+ rebar_opts:set(Opts1, profiles, Profiles1).
+
+%% @private find the directories for all apps, while detecting their source dirs
+%% Returns the app dir with the respective src_dirs for them, in that order,
+%% for every app found.
+-spec all_app_dirs([file:name()]) -> [{file:name(), [file:name()]}].
all_app_dirs(LibDirs) ->
lists:flatmap(fun(LibDir) ->
- app_dirs(LibDir)
+ {_, SrcDirs} = find_config_src(LibDir, ["src"]),
+ app_dirs(LibDir, SrcDirs)
end, LibDirs).
-app_dirs(LibDir) ->
- Path1 = filename:join([LibDir,
- "src",
- "*.app.src"]),
-
- Path2 = filename:join([LibDir,
- "src",
- "*.app.src.script"]),
-
- Path3 = filename:join([LibDir,
- "ebin",
- "*.app"]),
+%% @private find the directories for all apps based on their source dirs
+%% Returns the app dir with the respective src_dirs for them, in that order,
+%% for every app found.
+-spec all_app_dirs([file:name()], [file:name()]) -> [{file:name(), [file:name()]}].
+all_app_dirs(LibDirs, SrcDirs) ->
+ lists:flatmap(fun(LibDir) -> app_dirs(LibDir, SrcDirs) end, LibDirs).
+
+%% @private find the directories based on the library directories.
+%% Returns the app dir with the respective src_dirs for them, in that order,
+%% for every app found.
+%%
+%% The function returns the src directories since they might have been
+%% detected in a top-level loop and we want to skip further detection
+%% starting now.
+-spec app_dirs([file:name()], [file:name()]) -> [{file:name(), [file:name()]}].
+app_dirs(LibDir, SrcDirs) ->
+ Paths = lists:append([
+ [filename:join([LibDir, SrcDir, "*.app.src"]),
+ filename:join([LibDir, SrcDir, "*.app.src.script"])]
+ || SrcDir <- SrcDirs
+ ]),
+ EbinPath = filename:join([LibDir, "ebin", "*.app"]),
lists:usort(lists:foldl(fun(Path, Acc) ->
- Files = filelib:wildcard(ec_cnv:to_list(Path)),
- [app_dir(File) || File <- Files] ++ Acc
- end, [], [Path1, Path2, Path3])).
+ Files = filelib:wildcard(rebar_utils:to_list(Path)),
+ [{app_dir(File), SrcDirs}
+ || File <- Files] ++ Acc
+ end, [], [EbinPath | Paths])).
+%% @doc find all apps that haven't been built in a list of directories
+-spec find_unbuilt_apps([file:filename_all()]) -> [rebar_app_info:t()].
find_unbuilt_apps(LibDirs) ->
find_apps(LibDirs, invalid).
+%% @doc for each directory passed, find all apps that are valid.
+%% Returns all the related app info records.
-spec find_apps([file:filename_all()]) -> [rebar_app_info:t()].
find_apps(LibDirs) ->
find_apps(LibDirs, valid).
+%% @doc for each directory passed, find all apps according
+%% to the validity rule passed in. Returns all the related
+%% app info records.
-spec find_apps([file:filename_all()], valid | invalid | all) -> [rebar_app_info:t()].
find_apps(LibDirs, Validate) ->
- rebar_utils:filtermap(fun(AppDir) ->
- find_app(AppDir, Validate)
- end, all_app_dirs(LibDirs)).
-
+ rebar_utils:filtermap(
+ fun({AppDir, AppSrcDirs}) ->
+ find_app(rebar_app_info:new(), AppDir, AppSrcDirs, Validate)
+ end,
+ all_app_dirs(LibDirs)
+ ).
+
+%% @doc for each directory passed, with the configured source directories,
+%% find all apps according to the validity rule passed in.
+%% Returns all the related app info records.
+-spec find_apps([file:filename_all()], [file:filename_all()], valid | invalid | all, rebar_state:t()) -> [rebar_app_info:t()].
+find_apps(LibDirs, SrcDirs, Validate, State) ->
+ rebar_utils:filtermap(
+ fun({AppDir, AppSrcDirs}) ->
+ find_app(rebar_app_info:new(), AppDir, AppSrcDirs, Validate, State)
+ end,
+ all_app_dirs(LibDirs, SrcDirs)
+ ).
+
+%% @doc check that a given app in a directory is there, and whether it's
+%% valid or not based on the second argument. Returns the related
+%% app info record.
-spec find_app(file:filename_all(), valid | invalid | all) -> {true, rebar_app_info:t()} | false.
find_app(AppDir, Validate) ->
- find_app(rebar_app_info:new(), AppDir, Validate).
-
+ {Config, SrcDirs} = find_config_src(AppDir, ["src"]),
+ AppInfo = rebar_app_info:update_opts(rebar_app_info:dir(rebar_app_info:new(), AppDir),
+ dict:new(), Config),
+ find_app_(AppInfo, AppDir, SrcDirs, Validate).
+
+%% @doc check that a given app in a directory is there, and whether it's
+%% valid or not based on the second argument. Returns the related
+%% app info record.
+-spec find_app(rebar_app_info:t(), file:filename_all(), valid | invalid | all) ->
+ {true, rebar_app_info:t()} | false.
find_app(AppInfo, AppDir, Validate) ->
+ %% if no src dir is passed, figure it out from the app info, with a default
+ %% of src/
+ AppOpts = rebar_app_info:opts(AppInfo),
+ SrcDirs = rebar_dir:src_dirs(AppOpts, ["src"]),
+ find_app_(AppInfo, AppDir, SrcDirs, Validate).
+
+%% @doc check that a given app in a directory is there, and whether it's
+%% valid or not based on the second argument. The third argument includes
+%% the directories where source files can be located. Returns the related
+%% app info record.
+-spec find_app(rebar_app_info:t(), file:filename_all(),
+ [file:filename_all()], valid | invalid | all, rebar_state:t()) ->
+ {true, rebar_app_info:t()} | false.
+find_app(AppInfo, AppDir, SrcDirs, Validate, State) ->
+ AppInfo1 = case ec_file:real_dir_path(rebar_dir:root_dir(State)) of
+ AppDir ->
+ Opts = rebar_state:opts(State),
+ rebar_app_info:default(rebar_app_info:opts(AppInfo, Opts), Opts);
+ _ ->
+ Config = rebar_config:consult(AppDir),
+ rebar_app_info:update_opts(AppInfo, rebar_app_info:opts(AppInfo), Config)
+ end,
+ find_app_(AppInfo1, AppDir, SrcDirs, Validate).
+
+find_app(AppInfo, AppDir, SrcDirs, Validate) ->
+ Config = rebar_config:consult(AppDir),
+ AppInfo1 = rebar_app_info:update_opts(AppInfo, rebar_app_info:opts(AppInfo), Config),
+ find_app_(AppInfo1, AppDir, SrcDirs, Validate).
+
+-spec find_app_(rebar_app_info:t(), file:filename_all(),
+ [file:filename_all()], valid | invalid | all) ->
+ {true, rebar_app_info:t()} | false.
+find_app_(AppInfo, AppDir, SrcDirs, Validate) ->
AppFile = filelib:wildcard(filename:join([AppDir, "ebin", "*.app"])),
- AppSrcFile = filelib:wildcard(filename:join([AppDir, "src", "*.app.src"])),
- AppSrcScriptFile = filelib:wildcard(filename:join([AppDir, "src", "*.app.src.script"])),
+ AppSrcFile = lists:append(
+ [filelib:wildcard(filename:join([AppDir, SrcDir, "*.app.src"]))
+ || SrcDir <- SrcDirs]
+ ),
+ AppSrcScriptFile = lists:append(
+ [filelib:wildcard(filename:join([AppDir, SrcDir, "*.app.src.script"]))
+ || SrcDir <- SrcDirs]
+ ),
try_handle_app_file(AppInfo, AppFile, AppDir, AppSrcFile, AppSrcScriptFile, Validate).
+%% @doc find the directory that an appfile has
+-spec app_dir(file:filename()) -> file:filename().
app_dir(AppFile) ->
filename:join(rebar_utils:droplast(filename:split(filename:dirname(AppFile)))).
+%% @doc populates an app info record based on an app directory and its
+%% app file.
-spec create_app_info(rebar_app_info:t(), file:name(), file:name()) -> rebar_app_info:t().
create_app_info(AppInfo, AppDir, AppFile) ->
[{application, AppName, AppDetails}] = rebar_config:consult_app_file(AppFile),
@@ -215,8 +362,15 @@ create_app_info(AppInfo, AppDir, AppFile) ->
end,
rebar_app_info:dir(rebar_app_info:valid(AppInfo2, Valid), AppDir).
-%% Read in and parse the .app file if it is availabe. Do the same for
+%% @doc Read in and parse the .app file if it is availabe. Do the same for
%% the .app.src file if it exists.
+-spec try_handle_app_file(AppInfo, AppFile, AppDir, AppSrcFile, AppSrcScriptFile, valid | invalid | all) ->
+ {true, AppInfo} | false when
+ AppInfo :: rebar_app_info:t(),
+ AppFile :: file:filename(),
+ AppDir :: file:filename(),
+ AppSrcFile :: file:filename(),
+ AppSrcScriptFile :: file:filename().
try_handle_app_file(AppInfo, [], AppDir, [], AppSrcScriptFile, Validate) ->
try_handle_app_src_file(AppInfo, [], AppDir, AppSrcScriptFile, Validate);
try_handle_app_file(AppInfo, [], AppDir, AppSrcFile, _, Validate) ->
@@ -254,32 +408,64 @@ try_handle_app_file(AppInfo0, [File], AppDir, AppSrcFile, _, Validate) ->
end
catch
throw:{error, {Module, Reason}} ->
- ?DEBUG("Falling back to app.src file because .app failed: ~s", [Module:format_error(Reason)]),
+ ?DEBUG("Falling back to app.src file because .app failed: ~ts", [Module:format_error(Reason)]),
try_handle_app_src_file(AppInfo0, File, AppDir, AppSrcFile, Validate)
end;
try_handle_app_file(_AppInfo, Other, _AppDir, _AppSrcFile, _, _Validate) ->
throw({error, {multiple_app_files, Other}}).
-%% Read in the .app.src file if we aren't looking for a valid (already built) app
-try_handle_app_src_file(_AppInfo, _, _AppDir, [], _Validate) ->
- false;
+%% @doc Read in the .app.src file if we aren't looking for a valid (already
+%% built) app.
+-spec try_handle_app_src_file(AppInfo, AppFile, AppDir, AppSrcFile, valid | invalid | all) ->
+ {true, AppInfo} | false when
+ AppInfo :: rebar_app_info:t(),
+ AppFile :: file:filename(),
+ AppDir :: file:filename(),
+ AppSrcFile :: file:filename().
+try_handle_app_src_file(AppInfo, _, _AppDir, [], _Validate) ->
+ %% if .app and .app.src are not found check for a mix config file
+ %% it is assumed a plugin will build the application, including
+ %% a .app after this step
+ case filelib:is_file(filename:join(rebar_app_info:dir(AppInfo), "mix.exs")) of
+ true ->
+ {true, rebar_app_info:project_type(AppInfo, mix)};
+ false ->
+ false
+ end;
try_handle_app_src_file(_AppInfo, _, _AppDir, _AppSrcFile, valid) ->
false;
try_handle_app_src_file(AppInfo, _, AppDir, [File], Validate) when Validate =:= invalid
- ; Validate =:= all ->
- AppInfo1 = create_app_info(AppInfo, AppDir, File),
+ ; Validate =:= all ->
+ AppInfo1 = rebar_app_info:app_file(AppInfo, undefined),
+ AppInfo2 = create_app_info(AppInfo1, AppDir, File),
case filename:extension(File) of
".script" ->
- {true, rebar_app_info:app_file_src_script(AppInfo1, File)};
+ {true, rebar_app_info:app_file_src_script(AppInfo2, File)};
_ ->
- {true, rebar_app_info:app_file_src(AppInfo1, File)}
+ {true, rebar_app_info:app_file_src(AppInfo2, File)}
end;
try_handle_app_src_file(_AppInfo, _, _AppDir, Other, _Validate) ->
throw({error, {multiple_app_files, Other}}).
+%% @doc checks whether the given app is not blacklisted in the config.
+-spec enable(rebar_state:t(), rebar_app_info:t()) -> boolean().
enable(State, AppInfo) ->
not lists:member(to_atom(rebar_app_info:name(AppInfo)),
rebar_state:get(State, excluded_apps, [])).
+%% @private convert a binary to an atom.
+-spec to_atom(binary()) -> atom().
to_atom(Bin) ->
list_to_atom(binary_to_list(Bin)).
+
+%% @private when looking for unknown apps, it's possible they have a
+%% rebar.config file specifying non-standard src_dirs. Check for a
+%% possible config file and extract src_dirs from it.
+find_config_src(AppDir, Default) ->
+ case rebar_config:consult(AppDir) of
+ [] ->
+ {[], Default};
+ Terms ->
+ %% TODO: handle profiles I guess, but we don't have that info
+ {Terms, proplists:get_value(src_dirs, Terms, Default)}
+ end.
diff --git a/src/rebar_app_info.erl b/src/rebar_app_info.erl
index cf3b82e..d051a15 100644
--- a/src/rebar_app_info.erl
+++ b/src/rebar_app_info.erl
@@ -7,6 +7,8 @@
new/4,
new/5,
update_opts/3,
+ update_opts/2,
+ update_opts_deps/2,
discover/1,
name/1,
name/2,
@@ -22,7 +24,6 @@
parent/2,
original_vsn/1,
original_vsn/2,
- ebin_dir/1,
priv_dir/1,
applications/1,
applications/2,
@@ -36,6 +37,8 @@
dir/2,
out_dir/1,
out_dir/2,
+ ebin_dir/1,
+ ebin_dir/2,
default/1,
default/2,
opts/1,
@@ -43,16 +46,18 @@
get/2,
get/3,
set/3,
- resource_type/1,
- resource_type/2,
source/1,
source/2,
+ project_type/1,
+ project_type/2,
is_lock/1,
is_lock/2,
is_checkout/1,
is_checkout/2,
valid/1,
valid/2,
+ is_available/1,
+ is_available/2,
verify_otp_vsn/1,
has_all_artifacts/1,
@@ -66,13 +71,16 @@
-include("rebar.hrl").
-include_lib("providers/include/providers.hrl").
--export_type([t/0]).
+-export_type([t/0,
+ project_type/0]).
+
+-type project_type() :: rebar3 | mix | undefined.
--record(app_info_t, {name :: binary(),
+-record(app_info_t, {name :: binary() | undefined,
app_file_src :: file:filename_all() | undefined,
app_file_src_script:: file:filename_all() | undefined,
app_file :: file:filename_all() | undefined,
- original_vsn :: binary() | string() | undefined,
+ original_vsn :: binary() | undefined,
parent=root :: binary() | root,
app_details=[] :: list(),
applications=[] :: list(),
@@ -83,11 +91,13 @@
dep_level=0 :: integer(),
dir :: file:name(),
out_dir :: file:name(),
- resource_type :: pkg | src,
+ ebin_dir :: file:name(),
source :: string() | tuple() | checkout | undefined,
is_lock=false :: boolean(),
is_checkout=false :: boolean(),
- valid :: boolean()}).
+ valid :: boolean() | undefined,
+ project_type :: project_type(),
+ is_available=false :: boolean()}).
%%============================================================================
%% types
@@ -103,54 +113,64 @@
new() ->
#app_info_t{}.
+%% @doc Build a new app info value with only the app name set.
-spec new(atom() | binary() | string()) ->
{ok, t()}.
new(AppName) ->
- {ok, #app_info_t{name=ec_cnv:to_binary(AppName)}}.
+ {ok, #app_info_t{name=rebar_utils:to_binary(AppName)}}.
+%% @doc Build a new app info value with only the name and version set.
-spec new(atom() | binary() | string(), binary() | string()) ->
{ok, t()}.
new(AppName, Vsn) ->
- {ok, #app_info_t{name=ec_cnv:to_binary(AppName),
+ {ok, #app_info_t{name=rebar_utils:to_binary(AppName),
original_vsn=Vsn}}.
%% @doc build a complete version of the app info with all fields set.
-spec new(atom() | binary() | string(), binary() | string(), file:name()) ->
{ok, t()}.
new(AppName, Vsn, Dir) ->
- {ok, #app_info_t{name=ec_cnv:to_binary(AppName),
+ {ok, #app_info_t{name=rebar_utils:to_binary(AppName),
original_vsn=Vsn,
- dir=ec_cnv:to_list(Dir),
- out_dir=ec_cnv:to_list(Dir)}}.
+ dir=rebar_utils:to_list(Dir),
+ out_dir=rebar_utils:to_list(Dir),
+ ebin_dir=filename:join(rebar_utils:to_list(Dir), "ebin")}}.
%% @doc build a complete version of the app info with all fields set.
-spec new(atom() | binary() | string(), binary() | string(), file:name(), list()) ->
{ok, t()}.
new(AppName, Vsn, Dir, Deps) ->
- {ok, #app_info_t{name=ec_cnv:to_binary(AppName),
+ {ok, #app_info_t{name=rebar_utils:to_binary(AppName),
original_vsn=Vsn,
- dir=ec_cnv:to_list(Dir),
- out_dir=ec_cnv:to_list(Dir),
+ dir=rebar_utils:to_list(Dir),
+ out_dir=rebar_utils:to_list(Dir),
+ ebin_dir=filename:join(rebar_utils:to_list(Dir), "ebin"),
deps=Deps}}.
%% @doc build a complete version of the app info with all fields set.
-spec new(atom() | binary(), atom() | binary() | string(), binary() | string(), file:name(), list()) ->
{ok, t()}.
new(Parent, AppName, Vsn, Dir, Deps) ->
- {ok, #app_info_t{name=ec_cnv:to_binary(AppName),
+ {ok, #app_info_t{name=rebar_utils:to_binary(AppName),
parent=Parent,
original_vsn=Vsn,
- dir=ec_cnv:to_list(Dir),
- out_dir=ec_cnv:to_list(Dir),
+ dir=rebar_utils:to_list(Dir),
+ out_dir=rebar_utils:to_list(Dir),
+ ebin_dir=filename:join(rebar_utils:to_list(Dir), "ebin"),
deps=Deps}}.
+%% @doc update the opts based on the contents of a config
+%% file for the app
+-spec update_opts(t(), rebar_dict(), [any()]) -> t().
update_opts(AppInfo, Opts, Config) ->
- LockDeps = case resource_type(AppInfo) of
- pkg ->
- Deps = deps(AppInfo),
- [{{locks, default}, Deps}, {{deps, default}, Deps}];
+ LockDeps = case source(AppInfo) of
+ Tuple when is_tuple(Tuple) andalso element(1, Tuple) =:= pkg ->
+ %% Deps are set separate for packages
+ %% instead of making it seem we have no deps
+ %% don't set anything here.
+ [];
_ ->
- deps_from_config(dir(AppInfo), Config)
+ deps_from_config(dir(AppInfo), proplists:get_value(deps, Config, []))
end,
Plugins = proplists:get_value(plugins, Config, []),
@@ -160,13 +180,32 @@ update_opts(AppInfo, Opts, Config) ->
NewOpts = rebar_opts:merge_opts(LocalOpts, Opts),
- AppInfo#app_info_t{opts=NewOpts
- ,default=NewOpts}.
+ AppInfo#app_info_t{opts=NewOpts,
+ default=NewOpts}.
-deps_from_config(Dir, Config) ->
+%% @doc update current app info opts by merging in a new dict of opts
+-spec update_opts(t(), rebar_dict()) -> t().
+update_opts(AppInfo=#app_info_t{opts=LocalOpts}, Opts) ->
+ NewOpts = rebar_opts:merge_opts(LocalOpts, Opts),
+ AppInfo#app_info_t{opts=NewOpts,
+ default=NewOpts}.
+
+%% @doc update the opts based on new deps, usually from an app's hex registry metadata
+-spec update_opts_deps(t(), [any()]) -> t().
+update_opts_deps(AppInfo=#app_info_t{opts=Opts}, Deps) ->
+ LocalOpts = dict:from_list([{{locks, default}, Deps}, {{deps, default}, Deps}]),
+ NewOpts = rebar_opts:merge_opts(LocalOpts, Opts),
+ AppInfo#app_info_t{opts=NewOpts,
+ default=NewOpts,
+ deps=Deps}.
+
+
+%% @private extract the deps for an app in `Dir' based on its config file data
+-spec deps_from_config(file:filename(), [any()]) -> [{tuple(), any()}, ...].
+deps_from_config(Dir, ConfigDeps) ->
case rebar_config:consult_lock_file(filename:join(Dir, ?LOCK_FILE)) of
[] ->
- [{{deps, default}, proplists:get_value(deps, Config, [])}];
+ [{{deps, default}, ConfigDeps}];
D ->
%% We want the top level deps only from the lock file.
%% This ensures deterministic overrides for configs.
@@ -184,30 +223,48 @@ discover(Dir) ->
not_found
end.
+%% @doc get the name of the app.
-spec name(t()) -> binary().
name(#app_info_t{name=Name}) ->
Name.
+%% @doc set the name of the app.
-spec name(t(), atom() | binary() | string()) -> t().
name(AppInfo=#app_info_t{}, AppName) ->
- AppInfo#app_info_t{name=ec_cnv:to_binary(AppName)}.
+ AppInfo#app_info_t{name=rebar_utils:to_binary(AppName)}.
+%% @doc get the dictionary of options for the app.
+-spec opts(t()) -> rebar_dict().
opts(#app_info_t{opts=Opts}) ->
Opts.
+%% @doc set the dictionary of options for the app.
+-spec opts(t(), rebar_dict()) -> t().
opts(AppInfo, Opts) ->
AppInfo#app_info_t{opts=Opts}.
+%% @doc get the dictionary of options under the default profile.
+%% Represents a root set prior to applying other profiles.
+-spec default(t()) -> rebar_dict().
default(#app_info_t{default=Default}) ->
Default.
+%% @doc set the dictionary of options under the default profile.
+%% Useful when re-applying profile.
+-spec default(t(), rebar_dict()) -> t().
default(AppInfo, Default) ->
AppInfo#app_info_t{default=Default}.
+%% @doc look up a value in the dictionary of options; fails if
+%% the key for it does not exist.
+-spec get(t(), term()) -> term().
get(AppInfo, Key) ->
{ok, Value} = dict:find(Key, AppInfo#app_info_t.opts),
Value.
+%% @doc look up a value in the dictionary of options; returns
+%% a `Default' value otherwise.
+-spec get(t(), term(), term()) -> term().
get(AppInfo, Key, Default) ->
case dict:find(Key, AppInfo#app_info_t.opts) of
{ok, Value} ->
@@ -216,31 +273,35 @@ get(AppInfo, Key, Default) ->
Default
end.
+%% @doc sets a given value in the dictionary of options for the app.
-spec set(t(), any(), any()) -> t().
set(AppInfo=#app_info_t{opts=Opts}, Key, Value) ->
AppInfo#app_info_t{opts = dict:store(Key, Value, Opts)}.
+%% @doc finds the .app.src file for an app, if any.
-spec app_file_src(t()) -> file:filename_all() | undefined.
-app_file_src(#app_info_t{app_file_src=undefined, dir=Dir, name=Name}) ->
- AppFileSrc = filename:join([ec_cnv:to_list(Dir), "src", ec_cnv:to_list(Name)++".app.src"]),
- case filelib:is_file(AppFileSrc) of
- true ->
- AppFileSrc;
- false ->
- undefined
+app_file_src(#app_info_t{app_file_src=undefined, dir=Dir, name=Name, opts=Opts}) ->
+ CandidatePaths = [filename:join([rebar_utils:to_list(Dir), Src, rebar_utils:to_list(Name)++".app.src"])
+ || Src <- rebar_opts:get(Opts, src_dirs, ["src"])],
+ case lists:dropwhile(fun(Path) -> not filelib:is_file(Path) end, CandidatePaths) of
+ [] -> undefined;
+ [AppFileSrc|_] -> AppFileSrc
end;
app_file_src(#app_info_t{app_file_src=AppFileSrc}) ->
- ec_cnv:to_list(AppFileSrc).
+ rebar_utils:to_list(AppFileSrc).
+%% @doc sets the .app.src file for an app. An app without such a file
+%% can explicitly be set with `undefined'.
-spec app_file_src(t(), file:filename_all() | undefined) -> t().
app_file_src(AppInfo=#app_info_t{}, undefined) ->
AppInfo#app_info_t{app_file_src=undefined};
app_file_src(AppInfo=#app_info_t{}, AppFileSrc) ->
- AppInfo#app_info_t{app_file_src=ec_cnv:to_list(AppFileSrc)}.
+ AppInfo#app_info_t{app_file_src=rebar_utils:to_list(AppFileSrc)}.
+%% @doc finds the .app.src.script file for an app, if any.
-spec app_file_src_script(t()) -> file:filename_all() | undefined.
app_file_src_script(#app_info_t{app_file_src_script=undefined, dir=Dir, name=Name}) ->
- AppFileSrcScript = filename:join([ec_cnv:to_list(Dir), "src", ec_cnv:to_list(Name)++".app.src.script"]),
+ AppFileSrcScript = filename:join([rebar_utils:to_list(Dir), "src", rebar_utils:to_list(Name)++".app.src.script"]),
case filelib:is_file(AppFileSrcScript) of
true ->
AppFileSrcScript;
@@ -248,17 +309,20 @@ app_file_src_script(#app_info_t{app_file_src_script=undefined, dir=Dir, name=Nam
undefined
end;
app_file_src_script(#app_info_t{app_file_src_script=AppFileSrcScript}) ->
- ec_cnv:to_list(AppFileSrcScript).
+ rebar_utils:to_list(AppFileSrcScript).
+%% @doc sets the .app.src.script file for an app. An app without such a file
+%% can explicitly be set with `undefined'.
-spec app_file_src_script(t(), file:filename_all()) -> t().
app_file_src_script(AppInfo=#app_info_t{}, undefined) ->
AppInfo#app_info_t{app_file_src_script=undefined};
app_file_src_script(AppInfo=#app_info_t{}, AppFileSrcScript) ->
- AppInfo#app_info_t{app_file_src_script=ec_cnv:to_list(AppFileSrcScript)}.
+ AppInfo#app_info_t{app_file_src_script=rebar_utils:to_list(AppFileSrcScript)}.
+%% @doc finds the .app file for an app, if any.
-spec app_file(t()) -> file:filename_all() | undefined.
app_file(#app_info_t{app_file=undefined, out_dir=Dir, name=Name}) ->
- AppFile = filename:join([ec_cnv:to_list(Dir), "ebin", ec_cnv:to_list(Name)++".app"]),
+ AppFile = filename:join([rebar_utils:to_list(Dir), "ebin", rebar_utils:to_list(Name)++".app"]),
case filelib:is_file(AppFile) of
true ->
AppFile;
@@ -268,136 +332,209 @@ app_file(#app_info_t{app_file=undefined, out_dir=Dir, name=Name}) ->
app_file(#app_info_t{app_file=AppFile}) ->
AppFile.
--spec app_file(t(), file:filename_all()) -> t().
+%% @doc sets the .app file for an app.
+-spec app_file(t(), file:filename_all() | undefined) -> t().
app_file(AppInfo=#app_info_t{}, AppFile) ->
AppInfo#app_info_t{app_file=AppFile}.
+%% @doc returns the information stored in the app's app file,
+%% or if none, from the .app.src file.
-spec app_details(t()) -> list().
app_details(AppInfo=#app_info_t{app_details=[]}) ->
case app_file(AppInfo) of
undefined ->
- rebar_file_utils:try_consult(app_file_src(AppInfo));
+ try rebar_config:consult_app_file(app_file_src(AppInfo)) of
+ [] -> [];
+ [{application, _Name, AppDetails}] -> AppDetails
+ catch
+ _:_ ->
+ []
+ end;
AppFile ->
- try
- rebar_file_utils:try_consult(AppFile)
+ try rebar_file_utils:try_consult(AppFile) of
+ [] -> [];
+ [{application, _Name, AppDetails}] -> AppDetails
catch
throw:{error, {Module, Reason}} ->
- ?DEBUG("Warning, falling back to .app.src because of: ~s",
+ ?DEBUG("Warning, falling back to .app.src because of: ~ts",
[Module:format_error(Reason)]),
- rebar_file_utils:try_consult(app_file_src(AppInfo))
+ case rebar_config:consult_app_file(app_file_src(AppInfo)) of
+ [] -> [];
+ [{application, _Name, AppDetails}] -> AppDetails
+ end
end
end;
app_details(#app_info_t{app_details=AppDetails}) ->
AppDetails.
+%% @doc stores the information that would be returned from the
+%% app file, when reading from `app_details/1'.
-spec app_details(t(), list()) -> t().
app_details(AppInfo=#app_info_t{}, AppDetails) ->
AppInfo#app_info_t{app_details=AppDetails}.
+%% @doc returns the app's parent in the dep tree.
+-spec parent(t()) -> root | binary().
parent(#app_info_t{parent=Parent}) ->
Parent.
+%% @doc sets the app's parent.
-spec parent(t(), binary() | root) -> t().
parent(AppInfo=#app_info_t{}, Parent) ->
AppInfo#app_info_t{parent=Parent}.
--spec original_vsn(t()) -> string().
+%% @doc returns the original version of the app (unevaluated if
+%% asking for a semver)
+-spec original_vsn(t()) -> binary().
original_vsn(#app_info_t{original_vsn=Vsn}) ->
Vsn.
--spec original_vsn(t(), string()) -> t().
+%% @doc stores the original version of the app (unevaluated if
+%% asking for a semver)
+-spec original_vsn(t(), binary() | string()) -> t().
original_vsn(AppInfo=#app_info_t{}, Vsn) ->
AppInfo#app_info_t{original_vsn=Vsn}.
+%% @doc returns the list of applications the app depends on.
-spec applications(t()) -> list().
applications(#app_info_t{applications=Applications}) ->
Applications.
+%% @doc sets the list of applications the app depends on.
+%% Should be obtained from the app file.
-spec applications(t(), list()) -> t().
applications(AppInfo=#app_info_t{}, Applications) ->
AppInfo#app_info_t{applications=Applications}.
+%% @doc returns the list of active profiles
-spec profiles(t()) -> list().
profiles(#app_info_t{profiles=Profiles}) ->
Profiles.
+%% @doc sets the list of active profiles
-spec profiles(t(), list()) -> t().
profiles(AppInfo=#app_info_t{}, Profiles) ->
AppInfo#app_info_t{profiles=Profiles}.
+%% @doc returns the list of dependencies
-spec deps(t()) -> list().
deps(#app_info_t{deps=Deps}) ->
Deps.
+%% @doc sets the list of dependencies.
-spec deps(t(), list()) -> t().
deps(AppInfo=#app_info_t{}, Deps) ->
AppInfo#app_info_t{deps=Deps}.
-dep_level(AppInfo=#app_info_t{}, Level) ->
- AppInfo#app_info_t{dep_level=Level}.
-
+%% @doc returns the level the app has in the lock files or in the
+%% dep tree.
+-spec dep_level(t()) -> non_neg_integer().
dep_level(#app_info_t{dep_level=Level}) ->
Level.
+%% @doc sets the level the app has in the lock files or in the
+%% dep tree.
+-spec dep_level(t(), non_neg_integer()) -> t().
+dep_level(AppInfo=#app_info_t{}, Level) ->
+ AppInfo#app_info_t{dep_level=Level}.
+
+%% @doc returns the directory that contains the app.
-spec dir(t()) -> file:name().
dir(#app_info_t{dir=Dir}) ->
Dir.
+%% @doc sets the directory that contains the app.
-spec dir(t(), file:name()) -> t().
dir(AppInfo=#app_info_t{out_dir=undefined}, Dir) ->
- AppInfo#app_info_t{dir=ec_cnv:to_list(Dir),
- out_dir=ec_cnv:to_list(Dir)};
+ AppInfo#app_info_t{dir=rebar_utils:to_list(Dir),
+ out_dir=rebar_utils:to_list(Dir)};
dir(AppInfo=#app_info_t{}, Dir) ->
- AppInfo#app_info_t{dir=ec_cnv:to_list(Dir)}.
+ AppInfo#app_info_t{dir=rebar_utils:to_list(Dir)}.
+%% @doc returns the directory where build artifacts for the app
+%% should go
-spec out_dir(t()) -> file:name().
out_dir(#app_info_t{out_dir=OutDir}) ->
OutDir.
+%% @doc sets the directory where build artifacts for the app
+%% should go
-spec out_dir(t(), file:name()) -> t().
out_dir(AppInfo=#app_info_t{}, OutDir) ->
- AppInfo#app_info_t{out_dir=ec_cnv:to_list(OutDir)}.
+ AppInfo#app_info_t{out_dir=rebar_utils:to_list(OutDir),
+ ebin_dir=filename:join(rebar_utils:to_list(OutDir), "ebin")}.
+%% @doc gets the directory where ebin files for the app should go
-spec ebin_dir(t()) -> file:name().
-ebin_dir(#app_info_t{out_dir=OutDir}) ->
- ec_cnv:to_list(filename:join(OutDir, "ebin")).
-
+ebin_dir(#app_info_t{ebin_dir=undefined,
+ out_dir=OutDir}) ->
+ filename:join(rebar_utils:to_list(OutDir), "ebin");
+ebin_dir(#app_info_t{ebin_dir=EbinDir}) ->
+ EbinDir.
+
+%% @doc sets the directory where beam files should go
+-spec ebin_dir(t(), file:name()) -> t().
+ebin_dir(AppInfo, EbinDir) ->
+ AppInfo#app_info_t{ebin_dir=EbinDir}.
+
+%% @doc gets the directory where private files for the app should go
-spec priv_dir(t()) -> file:name().
priv_dir(#app_info_t{out_dir=OutDir}) ->
- ec_cnv:to_list(filename:join(OutDir, "priv")).
+ rebar_utils:to_list(filename:join(OutDir, "priv")).
--spec resource_type(t(), pkg | src) -> t().
-resource_type(AppInfo=#app_info_t{}, Type) ->
- AppInfo#app_info_t{resource_type=Type}.
-
--spec resource_type(t()) -> pkg | src.
-resource_type(#app_info_t{resource_type=ResourceType}) ->
- ResourceType.
+%% @doc finds the source specification for the app
+-spec source(t()) -> string() | tuple().
+source(#app_info_t{source=Source}) ->
+ Source.
+%% @doc sets the source specification for the app
-spec source(t(), string() | tuple() | checkout) -> t().
source(AppInfo=#app_info_t{}, Source) ->
AppInfo#app_info_t{source=Source}.
--spec source(t()) -> string() | tuple().
-source(#app_info_t{source=Source}) ->
- Source.
+%% @doc returns the lock status for the app
+-spec is_lock(t()) -> boolean().
+is_lock(#app_info_t{is_lock=IsLock}) ->
+ IsLock.
+%% @doc sets the lock status for the app
-spec is_lock(t(), boolean()) -> t().
is_lock(AppInfo=#app_info_t{}, IsLock) ->
AppInfo#app_info_t{is_lock=IsLock}.
--spec is_lock(t()) -> boolean().
-is_lock(#app_info_t{is_lock=IsLock}) ->
- IsLock.
+%% @doc returns whether the app is a checkout app or not
+-spec is_checkout(t()) -> boolean().
+is_checkout(#app_info_t{is_checkout=IsCheckout}) ->
+ IsCheckout.
+%% @doc sets whether the app is a checkout app or not
-spec is_checkout(t(), boolean()) -> t().
is_checkout(AppInfo=#app_info_t{}, IsCheckout) ->
AppInfo#app_info_t{is_checkout=IsCheckout}.
--spec is_checkout(t()) -> boolean().
-is_checkout(#app_info_t{is_checkout=IsCheckout}) ->
- IsCheckout.
+%% @doc returns whether the app source exists in the deps dir
+-spec is_available(t()) -> boolean().
+is_available(#app_info_t{is_available=IsAvailable}) ->
+ IsAvailable.
+
+%% @doc sets whether the app's source is available
+%% only set if the app's source is found in the expected dep directory
+-spec is_available(t(), boolean()) -> t().
+is_available(AppInfo=#app_info_t{}, IsAvailable) ->
+ AppInfo#app_info_t{is_available=IsAvailable}.
+
+%% @doc
+-spec project_type(t()) -> atom().
+project_type(#app_info_t{project_type=ProjectType}) ->
+ ProjectType.
+
+%% @doc
+-spec project_type(t(), atom()) -> t().
+project_type(AppInfo=#app_info_t{}, ProjectType) ->
+ AppInfo#app_info_t{project_type=ProjectType}.
+
+%% @doc returns whether the app is valid (built) or not
-spec valid(t()) -> boolean().
valid(AppInfo=#app_info_t{valid=undefined}) ->
case rebar_app_utils:validate_application_info(AppInfo) =:= true
@@ -410,14 +547,22 @@ valid(AppInfo=#app_info_t{valid=undefined}) ->
valid(#app_info_t{valid=Valid}) ->
Valid.
+%% @doc sets whether the app is valid (built) or not. If left unset,
+%% rebar3 will do the detection of the status itself.
-spec valid(t(), boolean()) -> t().
valid(AppInfo=#app_info_t{}, Valid) ->
AppInfo#app_info_t{valid=Valid}.
+%% @doc checks whether the app can be built with the current
+%% Erlang/OTP version. If the check fails, the function raises
+%% an exception and displays an error.
+-spec verify_otp_vsn(t()) -> ok | no_return().
verify_otp_vsn(AppInfo) ->
rebar_utils:check_min_otp_version(rebar_app_info:get(AppInfo, minimum_otp_vsn, undefined)),
rebar_utils:check_blacklisted_otp_versions(rebar_app_info:get(AppInfo, blacklisted_otp_vsns, [])).
+%% @doc checks whether all the build artifacts for an app to be considered
+%% valid are present.
-spec has_all_artifacts(#app_info_t{}) -> true | {false, file:filename()}.
has_all_artifacts(AppInfo) ->
Artifacts = rebar_app_info:get(AppInfo, artifacts, []),
@@ -427,13 +572,17 @@ has_all_artifacts(AppInfo) ->
,{out_dir, OutDir}],
all(OutDir, Context, Artifacts).
+%% @private checks that all files/artifacts in the directory are found.
+%% Template evaluation must happen and a bbmustache context needs to
+%% be provided.
+-spec all(file:filename(), term(), [string()]) -> true | {false, string()}.
all(_, _, []) ->
true;
all(Dir, Context, [File|Artifacts]) ->
FilePath = filename:join(Dir, rebar_templater:render(File, Context)),
case filelib:is_regular(FilePath) of
false ->
- ?DEBUG("Missing artifact ~s", [FilePath]),
+ ?DEBUG("Missing artifact ~ts", [FilePath]),
{false, File};
true ->
all(Dir, Context, Artifacts)
@@ -441,15 +590,23 @@ all(Dir, Context, [File|Artifacts]) ->
%%%%%
+%% @doc given a set of override rules, modify the app info accordingly
+-spec apply_overrides(list(), t()) -> t().
apply_overrides(Overrides, AppInfo) ->
Name = binary_to_atom(rebar_app_info:name(AppInfo), utf8),
Opts = rebar_opts:apply_overrides(opts(AppInfo), Name, Overrides),
AppInfo#app_info_t{default=Opts, opts=Opts}.
+%% @doc adds a new profile with its own config to the app data
+-spec add_to_profile(t(), atom(), [{_,_}]) -> t().
add_to_profile(AppInfo, Profile, KVs) when is_atom(Profile), is_list(KVs) ->
Opts = rebar_opts:add_to_profile(opts(AppInfo), Profile, KVs),
AppInfo#app_info_t{opts=Opts}.
+%% @doc applies and merges the profile configuration in the specified order
+%% of profiles (or for a single profile) and returns an app info record
+%% with the resulting configuration
+-spec apply_profiles(t(), atom() | [atom(),...]) -> t().
apply_profiles(AppInfo, Profile) when not is_list(Profile) ->
apply_profiles(AppInfo, [Profile]);
apply_profiles(AppInfo, [default]) ->
@@ -481,9 +638,13 @@ apply_profiles(AppInfo=#app_info_t{default = Defaults, profiles=CurrentProfiles}
end, Defaults, AppliedProfiles),
AppInfo#app_info_t{profiles = AppliedProfiles, opts=NewOpts}.
+%% @private drops duplicated profile definitions
+-spec deduplicate(list()) -> list().
deduplicate(Profiles) ->
do_deduplicate(lists:reverse(Profiles), []).
+%% @private drops duplicated profile definitions
+-spec do_deduplicate(list(), list()) -> list().
do_deduplicate([], Acc) ->
Acc;
do_deduplicate([Head | Rest], Acc) ->
diff --git a/src/rebar_app_utils.erl b/src/rebar_app_utils.erl
index d256cac..5fe5ba6 100644
--- a/src/rebar_app_utils.erl
+++ b/src/rebar_app_utils.erl
@@ -34,6 +34,7 @@
validate_application_info/2,
parse_deps/5,
parse_deps/6,
+ expand_deps_sources/2,
dep_to_app/7,
format_error/1]).
@@ -44,10 +45,14 @@
%% Public API
%% ===================================================================
+%% @doc finds the proper app info record for a given app name in a list of
+%% such records.
-spec find(binary(), [rebar_app_info:t()]) -> {ok, rebar_app_info:t()} | error.
find(Name, Apps) ->
ec_lists:find(fun(App) -> rebar_app_info:name(App) =:= Name end, Apps).
+%% @doc finds the proper app info record for a given app name at a given version
+%% in a list of such records.
-spec find(binary(), binary(), [rebar_app_info:t()]) -> {ok, rebar_app_info:t()} | error.
find(Name, Vsn, Apps) ->
ec_lists:find(fun(App) ->
@@ -55,11 +60,18 @@ find(Name, Vsn, Apps) ->
andalso rebar_app_info:original_vsn(App) =:= Vsn
end, Apps).
+%% @doc checks if a given file is .app.src file
is_app_src(Filename) ->
%% If removing the extension .app.src yields a shorter name,
%% this is an .app.src file.
Filename =/= filename:rootname(Filename, ".app.src").
+%% @doc translates the name of the .app.src[.script] file to where
+%% its .app counterpart should be stored.
+-spec app_src_to_app(OutDir, SrcFilename) -> OutFilename when
+ OutDir :: file:filename(),
+ SrcFilename :: file:filename(),
+ OutFilename :: file:filename().
app_src_to_app(OutDir, Filename) ->
AppFile =
case lists:suffix(".app.src", Filename) of
@@ -72,10 +84,16 @@ app_src_to_app(OutDir, Filename) ->
filelib:ensure_dir(AppFile),
AppFile.
+%% @doc checks whether the .app file has all the required data to be valid,
+%% and cross-references it with compiled modules on disk
-spec validate_application_info(rebar_app_info:t()) -> boolean().
validate_application_info(AppInfo) ->
validate_application_info(AppInfo, rebar_app_info:app_details(AppInfo)).
+%% @doc checks whether the .app file has all the required data to be valid
+%% and cross-references it with compiled modules on disk.
+%% The app info is passed explicitly as a second argument.
+-spec validate_application_info(rebar_app_info:t(), list()) -> boolean().
validate_application_info(AppInfo, AppDetail) ->
EbinDir = rebar_app_info:ebin_dir(AppInfo),
case rebar_app_info:app_file(AppInfo) of
@@ -90,13 +108,37 @@ validate_application_info(AppInfo, AppDetail) ->
end
end.
--spec parse_deps(binary(), list(), rebar_state:t(), list(), integer()) -> [rebar_app_info:t()].
+%% @doc parses all dependencies from the root of the project
+-spec parse_deps(Dir, Deps, State, Locks, Level) -> [rebar_app_info:t()] when
+ Dir :: file:filename(),
+ Deps :: [tuple() | atom() | binary()], % TODO: meta to source() | lock()
+ State :: rebar_state:t(),
+ Locks :: [tuple()], % TODO: meta to [lock()]
+ Level :: non_neg_integer().
parse_deps(DepsDir, Deps, State, Locks, Level) ->
parse_deps(root, DepsDir, Deps, State, Locks, Level).
+%% @doc runs `parse_dep/6' for a set of dependencies.
+-spec parse_deps(Parent, Dir, Deps, State, Locks, Level) -> [rebar_app_info:t()] when
+ Parent :: root | binary(),
+ Dir :: file:filename(),
+ Deps :: [tuple() | atom() | binary()], % TODO: meta to source() | lock()
+ State :: rebar_state:t(),
+ Locks :: [tuple()], % TODO: meta to [lock()]
+ Level :: non_neg_integer().
parse_deps(Parent, DepsDir, Deps, State, Locks, Level) ->
[parse_dep(Dep, Parent, DepsDir, State, Locks, Level) || Dep <- Deps].
+%% @doc for a given dep, return its app info record. The function
+%% also has to choose whether to define the dep from its immediate spec
+%% (if it is a newer thing) or from the locks specified in the lockfile.
+-spec parse_dep(Dep, Parent, Dir, State, Locks, Level) -> rebar_app_info:t() when
+ Dep :: tuple() | atom() | binary(), % TODO: meta to source() | lock()
+ Parent :: root | binary(),
+ Dir :: file:filename(),
+ State :: rebar_state:t(),
+ Locks :: [tuple()], % TODO: meta to [lock()]
+ Level :: non_neg_integer().
parse_dep(Dep, Parent, DepsDir, State, Locks, Level) ->
Name = case Dep of
Dep when is_tuple(Dep) ->
@@ -104,7 +146,7 @@ parse_dep(Dep, Parent, DepsDir, State, Locks, Level) ->
Dep ->
Dep
end,
- case lists:keyfind(ec_cnv:to_binary(Name), 1, Locks) of
+ case lists:keyfind(rebar_utils:to_binary(Name), 1, Locks) of
false ->
parse_dep(Parent, Dep, DepsDir, false, State);
LockedDep ->
@@ -117,19 +159,29 @@ parse_dep(Dep, Parent, DepsDir, State, Locks, Level) ->
end
end.
+%% @doc converts a dependency definition and a location for it on disk
+%% into an app info tuple representing it.
+-spec parse_dep(Parent, Dep, Dir, IsLock, State) -> rebar_app_info:t() when
+ Parent :: root | binary(),
+ Dep :: tuple() | atom() | binary(), % TODO: meta to source() | lock()
+ Dir :: file:filename(),
+ IsLock :: boolean(),
+ State :: rebar_state:t().
parse_dep(Parent, {Name, Vsn, {pkg, PkgName}}, DepsDir, IsLock, State) ->
- {PkgName1, PkgVsn} = {ec_cnv:to_binary(PkgName), ec_cnv:to_binary(Vsn)},
+ {PkgName1, PkgVsn} = {rebar_utils:to_binary(PkgName),
+ rebar_utils:to_binary(Vsn)},
dep_to_app(Parent, DepsDir, Name, PkgVsn, {pkg, PkgName1, PkgVsn, undefined}, IsLock, State);
parse_dep(Parent, {Name, {pkg, PkgName}}, DepsDir, IsLock, State) ->
%% Package dependency with different package name from app name
- dep_to_app(Parent, DepsDir, Name, undefined, {pkg, ec_cnv:to_binary(PkgName), undefined, undefined}, IsLock, State);
+ dep_to_app(Parent, DepsDir, Name, undefined, {pkg, rebar_utils:to_binary(PkgName), undefined, undefined}, IsLock, State);
parse_dep(Parent, {Name, Vsn}, DepsDir, IsLock, State) when is_list(Vsn); is_binary(Vsn) ->
%% Versioned Package dependency
- {PkgName, PkgVsn} = {ec_cnv:to_binary(Name), ec_cnv:to_binary(Vsn)},
+ {PkgName, PkgVsn} = {rebar_utils:to_binary(Name),
+ rebar_utils:to_binary(Vsn)},
dep_to_app(Parent, DepsDir, PkgName, PkgVsn, {pkg, PkgName, PkgVsn, undefined}, IsLock, State);
parse_dep(Parent, Name, DepsDir, IsLock, State) when is_atom(Name); is_binary(Name) ->
%% Unversioned package dependency
- dep_to_app(Parent, DepsDir, ec_cnv:to_binary(Name), undefined, {pkg, ec_cnv:to_binary(Name), undefined, undefined}, IsLock, State);
+ dep_to_app(Parent, DepsDir, rebar_utils:to_binary(Name), undefined, {pkg, rebar_utils:to_binary(Name), undefined, undefined}, IsLock, State);
parse_dep(Parent, {Name, Source}, DepsDir, IsLock, State) when is_tuple(Source) ->
dep_to_app(Parent, DepsDir, Name, [], Source, IsLock, State);
parse_dep(Parent, {Name, _Vsn, Source}, DepsDir, IsLock, State) when is_tuple(Source) ->
@@ -152,62 +204,81 @@ parse_dep(Parent, {Name, Source, Level}, DepsDir, IsLock, State) when is_tuple(S
parse_dep(_, Dep, _, _, _) ->
throw(?PRV_ERROR({parse_dep, Dep})).
+%% @doc convert a dependency that has just been fetched into
+%% an app info record related to it
+-spec dep_to_app(Parent, Dir, Name, Vsn, Source, IsLock, State) -> rebar_app_info:t() when
+ Parent :: root | binary(),
+ Dir :: file:filename(),
+ Name :: binary(),
+ Vsn :: iodata() | undefined,
+ Source :: tuple(),
+ IsLock :: boolean(),
+ State :: rebar_state:t().
dep_to_app(Parent, DepsDir, Name, Vsn, Source, IsLock, State) ->
- CheckoutsDir = ec_cnv:to_list(rebar_dir:checkouts_dir(State, Name)),
+ CheckoutsDir = rebar_utils:to_list(rebar_dir:checkouts_dir(State, Name)),
AppInfo = case rebar_app_info:discover(CheckoutsDir) of
- {ok, App} ->
- rebar_app_info:source(rebar_app_info:is_checkout(App, true), checkout);
- not_found ->
- Dir = ec_cnv:to_list(filename:join(DepsDir, Name)),
- {ok, AppInfo0} =
- case rebar_app_info:discover(Dir) of
- {ok, App} ->
- {ok, rebar_app_info:parent(App, Parent)};
- not_found ->
- rebar_app_info:new(Parent, Name, Vsn, Dir, [])
- end,
- update_source(AppInfo0, Source, State)
- end,
- C = rebar_config:consult(rebar_app_info:dir(AppInfo)),
- AppInfo1 = rebar_app_info:update_opts(AppInfo, rebar_app_info:opts(AppInfo), C),
- Overrides = rebar_state:get(State, overrides, []),
- AppInfo2 = rebar_app_info:set(AppInfo1, overrides, rebar_app_info:get(AppInfo, overrides, [])++Overrides),
- AppInfo3 = rebar_app_info:apply_overrides(rebar_app_info:get(AppInfo2, overrides, []), AppInfo2),
- AppInfo4 = rebar_app_info:apply_profiles(AppInfo3, [default, prod]),
- AppInfo5 = rebar_app_info:profiles(AppInfo4, [default]),
+ {ok, App} ->
+ rebar_app_info:source(rebar_app_info:is_checkout(App, true), checkout);
+ not_found ->
+ Dir = rebar_utils:to_list(filename:join(DepsDir, Name)),
+ {ok, AppInfo0} =
+ case rebar_app_info:discover(Dir) of
+ {ok, App} ->
+ App1 = rebar_app_info:name(App, Name),
+ {ok, rebar_app_info:is_available(rebar_app_info:parent(App1, Parent),
+ true)};
+ not_found ->
+ rebar_app_info:new(Parent, Name, Vsn, Dir, [])
+ end,
+ rebar_app_info:source(AppInfo0, Source)
+ end,
+ Overrides = rebar_app_info:get(AppInfo, overrides, []) ++ rebar_state:get(State, overrides, []),
+ AppInfo2 = rebar_app_info:set(AppInfo, overrides, Overrides),
+ AppInfo5 = rebar_app_info:profiles(AppInfo2, [default]),
rebar_app_info:is_lock(AppInfo5, IsLock).
+%% @doc Takes a given application app_info record along with the project.
+%% If the app is a package, resolve and expand the package definition.
+-spec expand_deps_sources(rebar_app_info:t(), rebar_state:t()) ->
+ rebar_app_info:t().
+expand_deps_sources(Dep, State) ->
+ update_source(Dep, rebar_app_info:source(Dep), State).
+
+%% @doc sets the source for a given dependency or app along with metadata
+%% around version if required.
+-spec update_source(rebar_app_info:t(), Source, rebar_state:t()) ->
+ rebar_app_info:t() when
+ Source :: rebar_resource_v2:source().
update_source(AppInfo, {pkg, PkgName, PkgVsn, Hash}, State) ->
- {PkgName1, PkgVsn1} = case PkgVsn of
- undefined ->
- get_package(PkgName, "0", State);
- <<"~>", Vsn/binary>> ->
- [Vsn1] = binary:split(Vsn, [<<" ">>], [trim_all, global]),
- get_package(PkgName, Vsn1, State);
- _ ->
- {PkgName, PkgVsn}
- end,
- %% store the expected hash for the dependency
- Hash1 = case Hash of
- undefined -> % unknown, define the hash since we know the dep
- rebar_packages:registry_checksum({pkg, PkgName1, PkgVsn1, Hash}, State);
- _ -> % keep as is
- Hash
- end,
- AppInfo1 = rebar_app_info:source(AppInfo, {pkg, PkgName1, PkgVsn1, Hash1}),
- Deps = rebar_packages:deps(PkgName1
- ,PkgVsn1
- ,State),
- AppInfo2 = rebar_app_info:resource_type(rebar_app_info:deps(AppInfo1, Deps), pkg),
- rebar_app_info:original_vsn(AppInfo2, PkgVsn1);
+ case rebar_packages:resolve_version(PkgName, PkgVsn, Hash,
+ ?PACKAGE_TABLE, State) of
+ {ok, Package, RepoConfig} ->
+ #package{key={_, PkgVsn1, _},
+ checksum=Hash1,
+ dependencies=Deps,
+ retired=Retired} = Package,
+ maybe_warn_retired(PkgName, PkgVsn1, Hash, Retired),
+ PkgVsn2 = list_to_binary(lists:flatten(ec_semver:format(PkgVsn1))),
+ AppInfo1 = rebar_app_info:source(AppInfo, {pkg, PkgName, PkgVsn2, Hash1, RepoConfig}),
+ rebar_app_info:update_opts_deps(AppInfo1, Deps);
+ not_found ->
+ throw(?PRV_ERROR({missing_package, PkgName, PkgVsn}));
+ {error, {invalid_vsn, InvalidVsn}} ->
+ throw(?PRV_ERROR({invalid_vsn, PkgName, InvalidVsn}))
+ end;
update_source(AppInfo, Source, _State) ->
rebar_app_info:source(AppInfo, Source).
-
-format_error({missing_package, Package}) ->
- io_lib:format("Package not found in registry: ~s", [Package]);
+%% @doc convert a given exception's payload into an io description.
+-spec format_error(any()) -> iolist().
+format_error({missing_package, Name, undefined}) ->
+ io_lib:format("Package not found in any repo: ~ts", [rebar_utils:to_binary(Name)]);
+format_error({missing_package, Name, Constraint}) ->
+ io_lib:format("Package not found in any repo: ~ts ~ts", [Name, Constraint]);
format_error({parse_dep, Dep}) ->
io_lib:format("Failed parsing dep ~p", [Dep]);
+format_error({invalid_vsn, Dep, InvalidVsn}) ->
+ io_lib:format("Dep ~ts has invalid version ~ts", [Dep, InvalidVsn]);
format_error(Error) ->
io_lib:format("~p", [Error]).
@@ -215,18 +286,38 @@ format_error(Error) ->
%% Internal functions
%% ===================================================================
-get_package(Dep, Vsn, State) ->
- case rebar_packages:find_highest_matching(Dep, Vsn, ?PACKAGE_TABLE, State) of
- {ok, HighestDepVsn} ->
- {Dep, HighestDepVsn};
- none ->
- throw(?PRV_ERROR({missing_package, ec_cnv:to_binary(Dep)}))
- end.
+maybe_warn_retired(_, _, _, false) ->
+ ok;
+maybe_warn_retired(_, _, Hash, _) when is_binary(Hash) ->
+ %% don't warn if this is a lock
+ ok;
+maybe_warn_retired(Name, Vsn, _, R=#{reason := Reason}) ->
+ Message = maps:get(message, R, ""),
+ ?WARN("Warning: package ~s-~s is retired: (~s) ~s",
+ [Name, ec_semver:format(Vsn), retire_reason(Reason), Message]);
+maybe_warn_retired(_, _, _, _) ->
+ ok.
+
+%% TODO: move to hex_core
+retire_reason('RETIRED_OTHER') ->
+ "other";
+retire_reason('RETIRED_INVALID') ->
+ "invalid";
+retire_reason('RETIRED_SECURITY') ->
+ "security";
+retire_reason('RETIRED_DEPRECATED') ->
+ "deprecated";
+retire_reason('RETIRED_RENAMED') ->
+ "renamed";
+retire_reason(_Other) ->
+ "other".
+%% @private checks that all the beam files have been properly
+%% created.
-spec has_all_beams(file:filename_all(), [module()]) ->
true | ?PRV_ERROR({missing_module, module()}).
has_all_beams(EbinDir, [Module | ModuleList]) ->
- BeamFile = filename:join([EbinDir, ec_cnv:to_list(Module) ++ ".beam"]),
+ BeamFile = filename:join([EbinDir, rebar_utils:to_list(Module) ++ ".beam"]),
case filelib:is_file(BeamFile) of
true ->
has_all_beams(EbinDir, ModuleList);
diff --git a/src/rebar_base_compiler.erl b/src/rebar_base_compiler.erl
index 6b8c7ca..ad81c86 100644
--- a/src/rebar_base_compiler.erl
+++ b/src/rebar_base_compiler.erl
@@ -33,28 +33,73 @@
run/8,
ok_tuple/2,
error_tuple/4,
+ report/1,
+ maybe_report/1,
format_error_source/2]).
--define(DEFAULT_COMPILER_SOURCE_FORMAT, relative).
+-type desc() :: term().
+-type loc() :: {line(), col()} | line().
+-type line() :: integer().
+-type col() :: integer().
+-type err_or_warn() :: {module(), desc()} | {loc(), module(), desc()}.
+
+-type compile_fn_ret() :: ok | {ok, [string()]} | skipped | term().
+-type compile_fn() :: fun((file:filename(), [{_,_}] | rebar_dict()) -> compile_fn_ret()).
+-type compile_fn3() :: fun((file:filename(), file:filename(), [{_,_}] | rebar_dict())
+ -> compile_fn_ret()).
+-type error_tuple() :: {error, [string()], [string()]}.
+-export_type([compile_fn/0, compile_fn_ret/0, error_tuple/0]).
%% ===================================================================
%% Public API
%% ===================================================================
+%% @doc Runs a compile job, applying `compile_fn()' to all files,
+%% starting with `First' files, and then `RestFiles'.
+-spec run(rebar_dict() | [{_,_}] , [First], [Next], compile_fn()) ->
+ compile_fn_ret() when
+ First :: file:filename(),
+ Next :: file:filename().
run(Config, FirstFiles, RestFiles, CompileFn) ->
%% Compile the first files in sequence
compile_each(FirstFiles++RestFiles, Config, CompileFn).
+%% @doc Runs a compile job, applying `compile_fn3()' to all files,
+%% starting with `First' files, and then the other content of `SourceDir'.
+%% Files looked for are those ending in `SourceExt'. Results of the
+%% compilation are put in `TargetDir' with the base file names
+%% postfixed with `SourceExt'.
+-spec run(rebar_dict() | [{_,_}] , [First], SourceDir, SourceExt,
+ TargetDir, TargetExt, compile_fn3()) -> compile_fn_ret() when
+ First :: file:filename(),
+ SourceDir :: file:filename(),
+ TargetDir :: file:filename(),
+ SourceExt :: string(),
+ TargetExt :: string().
run(Config, FirstFiles, SourceDir, SourceExt, TargetDir, TargetExt,
Compile3Fn) ->
run(Config, FirstFiles, SourceDir, SourceExt, TargetDir, TargetExt,
Compile3Fn, [check_last_mod]).
+%% @doc Runs a compile job, applying `compile_fn3()' to all files,
+%% starting with `First' files, and then the other content of `SourceDir'.
+%% Files looked for are those ending in `SourceExt'. Results of the
+%% compilation are put in `TargetDir' with the base file names
+%% postfixed with `SourceExt'.
+%% Additional compile options can be passed in the last argument as
+%% a proplist.
+-spec run(rebar_dict() | [{_,_}] , [First], SourceDir, SourceExt,
+ TargetDir, TargetExt, compile_fn3(), [term()]) -> compile_fn_ret() when
+ First :: file:filename(),
+ SourceDir :: file:filename(),
+ TargetDir :: file:filename(),
+ SourceExt :: string(),
+ TargetExt :: string().
run(Config, FirstFiles, SourceDir, SourceExt, TargetDir, TargetExt,
Compile3Fn, Opts) ->
%% Convert simple extension to proper regex
- SourceExtRe = "^[^._].*\\" ++ SourceExt ++ [$$],
+ SourceExtRe = "^(?!\\._).*\\" ++ SourceExt ++ [$$],
Recursive = proplists:get_value(recursive, Opts, true),
%% Find all possible source files
@@ -68,44 +113,43 @@ run(Config, FirstFiles, SourceDir, SourceExt, TargetDir, TargetExt,
run(Config, FirstFiles, RestFiles,
fun(S, C) ->
- Target = target_file(S, SourceDir, SourceExt,
+ Target = target_file(S, SourceExt,
TargetDir, TargetExt),
simple_compile_wrapper(S, Target, Compile3Fn, C, CheckLastMod)
end).
+%% @doc Format good compiler results with warnings to work with
+%% module internals. Assumes that warnings are not treated as errors.
+-spec ok_tuple(file:filename(), [string()]) -> {ok, [string()]}.
ok_tuple(Source, Ws) ->
{ok, format_warnings(Source, Ws)}.
+%% @doc format error and warning strings for a given source file
+%% according to user preferences.
+-spec error_tuple(file:filename(), [Err], [Warn], rebar_dict() | [{_,_}]) ->
+ error_tuple() when
+ Err :: string(),
+ Warn :: string().
error_tuple(Source, Es, Ws, Opts) ->
{error, format_errors(Source, Es),
format_warnings(Source, Ws, Opts)}.
+%% @doc from a given path, and based on the user-provided options,
+%% format the file path according to the preferences.
+-spec format_error_source(file:filename(), rebar_dict() | [{_,_}]) ->
+ file:filename().
format_error_source(Path, Opts) ->
- Type = case rebar_opts:get(Opts, compiler_source_format,
- ?DEFAULT_COMPILER_SOURCE_FORMAT) of
- V when V == absolute; V == relative; V == build ->
- V;
- Other ->
- ?WARN("Invalid argument ~p for compiler_source_format - "
- "assuming ~s~n", [Other, ?DEFAULT_COMPILER_SOURCE_FORMAT]),
- ?DEFAULT_COMPILER_SOURCE_FORMAT
- end,
- case Type of
- absolute -> resolve_linked_source(Path);
- build -> Path;
- relative ->
- Cwd = rebar_dir:get_cwd(),
- rebar_dir:make_relative_path(resolve_linked_source(Path), Cwd)
- end.
-
-resolve_linked_source(Src) ->
- {Dir, Base} = rebar_file_utils:split_dirname(Src),
- filename:join(rebar_file_utils:resolve_link(Dir), Base).
+ rebar_dir:format_source_file_name(Path, Opts).
%% ===================================================================
%% Internal functions
%% ===================================================================
+%% @private if a check for last modifications is required, do the verification
+%% and possibly skip the compile job.
+-spec simple_compile_wrapper(Source, Target, compile_fn3(), [{_,_}] | rebar_dict(), boolean()) -> compile_fn_ret() when
+ Source :: file:filename(),
+ Target :: file:filename().
simple_compile_wrapper(Source, Target, Compile3Fn, Config, false) ->
Compile3Fn(Source, Target, Config);
simple_compile_wrapper(Source, Target, Compile3Fn, Config, true) ->
@@ -116,51 +160,76 @@ simple_compile_wrapper(Source, Target, Compile3Fn, Config, true) ->
skipped
end.
-target_file(SourceFile, SourceDir, SourceExt, TargetDir, TargetExt) ->
- BaseFile = remove_common_path(SourceFile, SourceDir),
- filename:join([TargetDir, filename:basename(BaseFile, SourceExt) ++ TargetExt]).
-
-remove_common_path(Fname, Path) ->
- remove_common_path1(filename:split(Fname), filename:split(Path)).
-
-remove_common_path1([Part | RestFilename], [Part | RestPath]) ->
- remove_common_path1(RestFilename, RestPath);
-remove_common_path1(FilenameParts, _) ->
- filename:join(FilenameParts).
+%% @private take a basic source set of file fragments and a target location,
+%% create a file path and name for a compile artifact.
+-spec target_file(SourceFile, SourceExt, TargetDir, TargetExt) -> File when
+ SourceFile :: file:filename(),
+ TargetDir :: file:filename(),
+ SourceExt :: string(),
+ TargetExt :: string(),
+ File :: file:filename().
+target_file(SourceFile, SourceExt, TargetDir, TargetExt) ->
+ %% BaseFile = remove_common_path(SourceFile, SourceDir),
+ filename:join([TargetDir, filename:basename(SourceFile, SourceExt) ++ TargetExt]).
+%% @private runs the compile function `CompileFn' on every file
+%% passed internally, along with the related project configuration.
+%% If any errors are encountered, they're reported to stdout.
+-spec compile_each([file:filename()], Config, CompileFn) -> Ret | no_return() when
+ Config :: [{_,_}] | rebar_dict(),
+ CompileFn :: compile_fn(),
+ Ret :: compile_fn_ret().
compile_each([], _Config, _CompileFn) ->
ok;
compile_each([Source | Rest], Config, CompileFn) ->
case CompileFn(Source, Config) of
ok ->
- ?DEBUG("~sCompiled ~s", [rebar_utils:indent(1), filename:basename(Source)]);
+ ?DEBUG("~tsCompiled ~ts", [rebar_utils:indent(1), filename:basename(Source)]);
{ok, Warnings} ->
report(Warnings),
- ?DEBUG("~sCompiled ~s", [rebar_utils:indent(1), filename:basename(Source)]);
+ ?DEBUG("~tsCompiled ~ts", [rebar_utils:indent(1), filename:basename(Source)]);
skipped ->
- ?DEBUG("~sSkipped ~s", [rebar_utils:indent(1), filename:basename(Source)]);
+ ?DEBUG("~tsSkipped ~ts", [rebar_utils:indent(1), filename:basename(Source)]);
Error ->
NewSource = format_error_source(Source, Config),
- ?ERROR("Compiling ~s failed", [NewSource]),
+ ?ERROR("Compiling ~ts failed", [NewSource]),
maybe_report(Error),
?DEBUG("Compilation failed: ~p", [Error]),
?FAIL
end,
compile_each(Rest, Config, CompileFn).
+%% @private Formats and returns errors ready to be output.
+-spec format_errors(string(), [err_or_warn()]) -> [string()].
format_errors(Source, Errors) ->
format_errors(Source, "", Errors).
+%% @private Formats and returns warning strings ready to be output.
+-spec format_warnings(string(), [err_or_warn()]) -> [string()].
format_warnings(Source, Warnings) ->
format_warnings(Source, Warnings, []).
+%% @private Formats and returns warnings; chooses the distinct format they
+%% may have based on whether `warnings_as_errors' option is on.
+-spec format_warnings(string(), [err_or_warn()], rebar_dict() | [{_,_}]) -> [string()].
format_warnings(Source, Warnings, Opts) ->
- Prefix = case lists:member(warnings_as_errors, Opts) of
+ %% `Opts' can be passed in both as a list or a dictionary depending
+ %% on whether the first call to rebar_erlc_compiler was done with
+ %% the type `rebar_dict()' or `rebar_state:t()'.
+ LookupFn = if is_list(Opts) -> fun lists:member/2
+ ; true -> fun dict:is_key/2
+ end,
+ Prefix = case LookupFn(warnings_as_errors, Opts) of
true -> "";
false -> "Warning: "
end,
format_errors(Source, Prefix, Warnings).
+%% @private output compiler errors if they're judged to be reportable.
+-spec maybe_report(Reportable | term()) -> ok when
+ Reportable :: {{error, error_tuple()}, Source} | error_tuple() | ErrProps,
+ ErrProps :: [{error, string()} | Source, ...],
+ Source :: {source, string()}.
maybe_report({{error, {error, _Es, _Ws}=ErrorsAndWarnings}, {source, _}}) ->
maybe_report(ErrorsAndWarnings);
maybe_report([{error, E}, {source, S}]) ->
@@ -171,21 +240,39 @@ maybe_report({error, Es, Ws}) ->
maybe_report(_) ->
ok.
+%% @private Outputs a bunch of strings, including a newline
+-spec report([string()]) -> ok.
report(Messages) ->
- lists:foreach(fun(Msg) -> io:format("~s~n", [Msg]) end, Messages).
+ lists:foreach(fun(Msg) -> io:format("~ts~n", [Msg]) end, Messages).
+%% private format compiler errors into proper outputtable strings
+-spec format_errors(_, Extra, [err_or_warn()]) -> [string()] when
+ Extra :: string().
format_errors(_MainSource, Extra, Errors) ->
- [begin
- [format_error(Source, Extra, Desc) || Desc <- Descs]
- end
+ [[format_error(Source, Extra, Desc) || Desc <- Descs]
|| {Source, Descs} <- Errors].
+%% @private format compiler errors into proper outputtable strings
+-spec format_error(file:filename(), Extra, err_or_warn()) -> string() when
+ Extra :: string().
+format_error(Source, Extra, {Line, Mod=epp, Desc={include,lib,File}}) ->
+ %% Special case for include file errors, overtaking the default one
+ BaseDesc = Mod:format_error(Desc),
+ Friendly = case filename:split(File) of
+ [Lib, "include", _] ->
+ io_lib:format("; Make sure ~s is in your app "
+ "file's 'applications' list", [Lib]);
+ _ ->
+ ""
+ end,
+ FriendlyDesc = BaseDesc ++ Friendly,
+ ?FMT("~ts:~w: ~ts~ts~n", [Source, Line, Extra, FriendlyDesc]);
format_error(Source, Extra, {{Line, Column}, Mod, Desc}) ->
ErrorDesc = Mod:format_error(Desc),
- ?FMT("~s:~w:~w: ~s~s~n", [Source, Line, Column, Extra, ErrorDesc]);
+ ?FMT("~ts:~w:~w: ~ts~ts~n", [Source, Line, Column, Extra, ErrorDesc]);
format_error(Source, Extra, {Line, Mod, Desc}) ->
ErrorDesc = Mod:format_error(Desc),
- ?FMT("~s:~w: ~s~s~n", [Source, Line, Extra, ErrorDesc]);
+ ?FMT("~ts:~w: ~ts~ts~n", [Source, Line, Extra, ErrorDesc]);
format_error(Source, Extra, {Mod, Desc}) ->
ErrorDesc = Mod:format_error(Desc),
- ?FMT("~s: ~s~s~n", [Source, Extra, ErrorDesc]).
+ ?FMT("~ts: ~ts~ts~n", [Source, Extra, ErrorDesc]).
diff --git a/src/rebar_compiler.erl b/src/rebar_compiler.erl
new file mode 100644
index 0000000..7da265c
--- /dev/null
+++ b/src/rebar_compiler.erl
@@ -0,0 +1,315 @@
+-module(rebar_compiler).
+
+-export([compile_all/2,
+ clean/2,
+
+ needs_compile/3,
+ ok_tuple/2,
+ error_tuple/4,
+ maybe_report/1,
+ format_error_source/2,
+ report/1]).
+
+-include("rebar.hrl").
+
+-type extension() :: string().
+-type out_mappings() :: [{extension(), file:filename()}].
+
+-callback context(rebar_app_info:t()) -> #{src_dirs => [file:dirname()],
+ include_dirs => [file:dirname()],
+ src_ext => extension(),
+ out_mappings => out_mappings()}.
+-callback needed_files(digraph:graph(), [file:filename()], out_mappings(),
+ rebar_app_info:t()) ->
+ {{[file:filename()], term()}, {[file:filename()], term()}}.
+-callback dependencies(file:filename(), file:dirname(), [file:dirname()]) -> [file:filename()].
+-callback compile(file:filename(), out_mappings(), rebar_dict(), list()) ->
+ ok | {ok, [string()]} | {ok, [string()], [string()]}.
+-callback clean([file:filename()], rebar_app_info:t()) -> _.
+
+-define(DAG_VSN, 2).
+-define(DAG_FILE, "source.dag").
+-type dag_v() :: {digraph:vertex(), term()} | 'false'.
+-type dag_e() :: {digraph:vertex(), digraph:vertex()}.
+-type dag() :: {list(dag_v()), list(dag_e()), list(string())}.
+-record(dag, {vsn = ?DAG_VSN :: pos_integer(),
+ info = {[], [], []} :: dag()}).
+
+-define(RE_PREFIX, "^(?!\\._)").
+
+compile_all(Compilers, AppInfo) ->
+ EbinDir = rebar_utils:to_list(rebar_app_info:ebin_dir(AppInfo)),
+ %% Make sure that outdir is on the path
+ ok = rebar_file_utils:ensure_dir(EbinDir),
+ true = code:add_patha(filename:absname(EbinDir)),
+
+ %% necessary for erlang:function_exported/3 to work as expected
+ %% called here for clarity as it's required by both opts_changed/2
+ %% and erl_compiler_opts_set/0 in needed_files
+ _ = code:ensure_loaded(compile),
+
+ lists:foreach(fun(CompilerMod) ->
+ run(CompilerMod, AppInfo),
+ run_on_extra_src_dirs(CompilerMod, AppInfo, fun run/2)
+ end, Compilers),
+ ok.
+
+run(CompilerMod, AppInfo) ->
+ #{src_dirs := SrcDirs,
+ include_dirs := InclDirs,
+ src_ext := SrcExt,
+ out_mappings := Mappings} = CompilerMod:context(AppInfo),
+
+ BaseDir = rebar_utils:to_list(rebar_app_info:dir(AppInfo)),
+ EbinDir = rebar_utils:to_list(rebar_app_info:ebin_dir(AppInfo)),
+
+ BaseOpts = rebar_app_info:opts(AppInfo),
+ AbsInclDirs = [filename:join(BaseDir, InclDir) || InclDir <- InclDirs],
+ FoundFiles = find_source_files(BaseDir, SrcExt, SrcDirs, BaseOpts),
+
+ OutDir = rebar_app_info:out_dir(AppInfo),
+ AbsSrcDirs = [filename:join(BaseDir, SrcDir) || SrcDir <- SrcDirs],
+ G = init_dag(CompilerMod, AbsInclDirs, AbsSrcDirs, FoundFiles, OutDir, EbinDir),
+ {{FirstFiles, FirstFileOpts}, {RestFiles, Opts}} = CompilerMod:needed_files(G, FoundFiles,
+ Mappings, AppInfo),
+ true = digraph:delete(G),
+
+ compile_each(FirstFiles, FirstFileOpts, BaseOpts, Mappings, CompilerMod),
+ compile_each(RestFiles, Opts, BaseOpts, Mappings, CompilerMod).
+
+compile_each([], _Opts, _Config, _Outs, _CompilerMod) ->
+ ok;
+compile_each([Source | Rest], Opts, Config, Outs, CompilerMod) ->
+ case CompilerMod:compile(Source, Outs, Config, Opts) of
+ ok ->
+ ?DEBUG("~tsCompiled ~ts", [rebar_utils:indent(1), filename:basename(Source)]);
+ {ok, Warnings} ->
+ report(Warnings),
+ ?DEBUG("~tsCompiled ~ts", [rebar_utils:indent(1), filename:basename(Source)]);
+ skipped ->
+ ?DEBUG("~tsSkipped ~ts", [rebar_utils:indent(1), filename:basename(Source)]);
+ Error ->
+ NewSource = format_error_source(Source, Config),
+ ?ERROR("Compiling ~ts failed", [NewSource]),
+ maybe_report(Error),
+ ?DEBUG("Compilation failed: ~p", [Error]),
+ ?FAIL
+ end,
+ compile_each(Rest, Opts, Config, Outs, CompilerMod).
+
+%% @doc remove compiled artifacts from an AppDir.
+-spec clean([module()], rebar_app_info:t()) -> 'ok'.
+clean(Compilers, AppInfo) ->
+ lists:foreach(fun(CompilerMod) ->
+ clean_(CompilerMod, AppInfo),
+ run_on_extra_src_dirs(CompilerMod, AppInfo, fun clean_/2)
+ end, Compilers).
+
+clean_(CompilerMod, AppInfo) ->
+ #{src_dirs := SrcDirs,
+ src_ext := SrcExt} = CompilerMod:context(AppInfo),
+ BaseDir = rebar_app_info:dir(AppInfo),
+ Opts = rebar_app_info:opts(AppInfo),
+ EbinDir = rebar_app_info:ebin_dir(AppInfo),
+
+ FoundFiles = find_source_files(BaseDir, SrcExt, SrcDirs, Opts),
+ CompilerMod:clean(FoundFiles, AppInfo),
+ rebar_file_utils:rm_rf(dag_file(CompilerMod, EbinDir)).
+
+-spec needs_compile(filename:all(), extension(), [{extension(), file:dirname()}]) -> boolean().
+needs_compile(Source, OutExt, Mappings) ->
+ Ext = filename:extension(Source),
+ BaseName = filename:basename(Source, Ext),
+ {_, OutDir} = lists:keyfind(OutExt, 1, Mappings),
+ Target = filename:join(OutDir, BaseName++OutExt),
+ filelib:last_modified(Source) > filelib:last_modified(Target).
+
+run_on_extra_src_dirs(CompilerMod, AppInfo, Fun) ->
+ ExtraDirs = rebar_dir:extra_src_dirs(rebar_app_info:opts(AppInfo), []),
+ run_on_extra_src_dirs(ExtraDirs, CompilerMod, AppInfo, Fun).
+
+run_on_extra_src_dirs([], _CompilerMod, _AppInfo, _Fun) ->
+ ok;
+run_on_extra_src_dirs([Dir | Rest], CompilerMod, AppInfo, Fun) ->
+ case filelib:is_dir(filename:join(rebar_app_info:dir(AppInfo), Dir)) of
+ true ->
+ EbinDir = filename:join(rebar_app_info:out_dir(AppInfo), Dir),
+ AppInfo1 = rebar_app_info:ebin_dir(AppInfo, EbinDir),
+ AppInfo2 = rebar_app_info:set(AppInfo1, src_dirs, [Dir]),
+ AppInfo3 = rebar_app_info:set(AppInfo2, extra_src_dirs, ["src"]),
+ Fun(CompilerMod, AppInfo3);
+ _ ->
+ ok
+ end,
+ run_on_extra_src_dirs(Rest, CompilerMod, AppInfo, Fun).
+
+%% These functions are here for the ultimate goal of getting rid of
+%% rebar_base_compiler. This can't be done because of existing plugins.
+
+ok_tuple(Source, Ws) ->
+ rebar_base_compiler:ok_tuple(Source, Ws).
+
+error_tuple(Source, Es, Ws, Opts) ->
+ rebar_base_compiler:error_tuple(Source, Es, Ws, Opts).
+
+maybe_report(Reportable) ->
+ rebar_base_compiler:maybe_report(Reportable).
+
+format_error_source(Path, Opts) ->
+ rebar_base_compiler:format_error_source(Path, Opts).
+
+report(Messages) ->
+ rebar_base_compiler:report(Messages).
+
+%% private functions
+
+find_source_files(BaseDir, SrcExt, SrcDirs, Opts) ->
+ SourceExtRe = "^(?!\\._).*\\" ++ SrcExt ++ [$$],
+ lists:flatmap(fun(SrcDir) ->
+ Recursive = rebar_dir:recursive(Opts, SrcDir),
+ rebar_utils:find_files_in_dirs([filename:join(BaseDir, SrcDir)], SourceExtRe, Recursive)
+ end, SrcDirs).
+
+dag_file(CompilerMod, Dir) ->
+ filename:join([rebar_dir:local_cache_dir(Dir), CompilerMod, ?DAG_FILE]).
+
+%% private graph functions
+
+%% Get dependency graph of given Erls files and their dependencies (header files,
+%% parse transforms, behaviours etc.) located in their directories or given
+%% InclDirs. Note that last modification times stored in vertices already respect
+%% dependencies induced by given graph G.
+init_dag(Compiler, InclDirs, SrcDirs, Erls, Dir, EbinDir) ->
+ G = digraph:new([acyclic]),
+ try restore_dag(Compiler, G, InclDirs, Dir)
+ catch
+ _:_ ->
+ ?WARN("Failed to restore ~ts file. Discarding it.~n", [dag_file(Compiler, Dir)]),
+ file:delete(dag_file(Compiler, Dir))
+ end,
+ Dirs = lists:usort(InclDirs ++ SrcDirs),
+ %% A source file may have been renamed or deleted. Remove it from the graph
+ %% and remove any beam file for that source if it exists.
+ Modified = maybe_rm_beams_and_edges(G, EbinDir, Erls),
+ Modified1 = lists:foldl(update_dag_fun(G, Compiler, Dirs), Modified, Erls),
+ if Modified1 -> store_dag(Compiler, G, InclDirs, Dir); not Modified1 -> ok end,
+ G.
+
+maybe_rm_beams_and_edges(G, Dir, Files) ->
+ Vertices = digraph:vertices(G),
+ case lists:filter(fun(File) ->
+ case filename:extension(File) =:= ".erl" of
+ true ->
+ maybe_rm_beam_and_edge(G, Dir, File);
+ false ->
+ false
+ end
+ end, lists:sort(Vertices) -- lists:sort(Files)) of
+ [] ->
+ false;
+ _ ->
+ true
+ end.
+
+maybe_rm_beam_and_edge(G, OutDir, Source) ->
+ %% This is NOT a double check it is the only check that the source file is actually gone
+ case filelib:is_regular(Source) of
+ true ->
+ %% Actually exists, don't delete
+ false;
+ false ->
+ Target = target_base(OutDir, Source) ++ ".beam",
+ ?DEBUG("Source ~ts is gone, deleting previous beam file if it exists ~ts", [Source, Target]),
+ file:delete(Target),
+ digraph:del_vertex(G, Source),
+ true
+ end.
+
+
+target_base(OutDir, Source) ->
+ filename:join(OutDir, filename:basename(Source, ".erl")).
+
+restore_dag(Compiler, G, InclDirs, Dir) ->
+ case file:read_file(dag_file(Compiler, Dir)) of
+ {ok, Data} ->
+ % Since externally passed InclDirs can influence dependency graph (see
+ % modify_dag), we have to check here that they didn't change.
+ #dag{vsn=?DAG_VSN, info={Vs, Es, InclDirs}} =
+ binary_to_term(Data),
+ lists:foreach(
+ fun({V, LastUpdated}) ->
+ digraph:add_vertex(G, V, LastUpdated)
+ end, Vs),
+ lists:foreach(
+ fun({_, V1, V2, _}) ->
+ digraph:add_edge(G, V1, V2)
+ end, Es);
+ {error, _} ->
+ ok
+ end.
+
+store_dag(Compiler, G, InclDirs, Dir) ->
+ Vs = lists:map(fun(V) -> digraph:vertex(G, V) end, digraph:vertices(G)),
+ Es = lists:map(fun(E) -> digraph:edge(G, E) end, digraph:edges(G)),
+ File = dag_file(Compiler, Dir),
+ ok = filelib:ensure_dir(File),
+ Data = term_to_binary(#dag{info={Vs, Es, InclDirs}}, [{compressed, 2}]),
+ file:write_file(File, Data).
+
+update_dag(G, Compiler, Dirs, Source) ->
+ case digraph:vertex(G, Source) of
+ {_, LastUpdated} ->
+ case filelib:last_modified(Source) of
+ 0 ->
+ %% The file doesn't exist anymore,
+ %% erase it from the graph.
+ %% All the edges will be erased automatically.
+ digraph:del_vertex(G, Source),
+ modified;
+ LastModified when LastUpdated < LastModified ->
+ modify_dag(G, Compiler, Source, LastModified, filename:dirname(Source), Dirs);
+ _ ->
+ Modified = lists:foldl(
+ update_dag_fun(G, Compiler, Dirs),
+ false, digraph:out_neighbours(G, Source)),
+ MaxModified = update_max_modified_deps(G, Source),
+ case Modified orelse MaxModified > LastUpdated of
+ true -> modified;
+ false -> unmodified
+ end
+ end;
+ false ->
+ modify_dag(G, Compiler, Source, filelib:last_modified(Source), filename:dirname(Source), Dirs)
+ end.
+
+modify_dag(G, Compiler, Source, LastModified, SourceDir, Dirs) ->
+ AbsIncls = Compiler:dependencies(Source, SourceDir, Dirs),
+ digraph:add_vertex(G, Source, LastModified),
+ digraph:del_edges(G, digraph:out_edges(G, Source)),
+ lists:foreach(
+ fun(Incl) ->
+ update_dag(G, Compiler, Dirs, Incl),
+ digraph:add_edge(G, Source, Incl)
+ end, AbsIncls),
+ modified.
+
+update_dag_fun(G, Compiler, Dirs) ->
+ fun(Erl, Modified) ->
+ case update_dag(G, Compiler, Dirs, Erl) of
+ modified -> true;
+ unmodified -> Modified
+ end
+ end.
+
+update_max_modified_deps(G, Source) ->
+ MaxModified =
+ lists:foldl(fun(File, Acc) ->
+ case digraph:vertex(G, File) of
+ {_, MaxModified} when MaxModified > Acc ->
+ MaxModified;
+ _ ->
+ Acc
+ end
+ end, 0, [Source | digraph:out_neighbours(G, Source)]),
+ digraph:add_vertex(G, Source, MaxModified),
+ MaxModified.
diff --git a/src/rebar_compiler_erl.erl b/src/rebar_compiler_erl.erl
new file mode 100644
index 0000000..0a560cd
--- /dev/null
+++ b/src/rebar_compiler_erl.erl
@@ -0,0 +1,359 @@
+-module(rebar_compiler_erl).
+
+-behaviour(rebar_compiler).
+
+-export([context/1,
+ needed_files/4,
+ dependencies/3,
+ compile/4,
+ clean/2]).
+
+-include("rebar.hrl").
+
+context(AppInfo) ->
+ EbinDir = rebar_app_info:ebin_dir(AppInfo),
+ Mappings = [{".beam", EbinDir}],
+
+ OutDir = rebar_app_info:dir(AppInfo),
+ SrcDirs = rebar_dir:src_dirs(rebar_app_info:opts(AppInfo), ["src"]),
+ ExistingSrcDirs = lists:filter(fun(D) ->
+ ec_file:is_dir(filename:join(OutDir, D))
+ end, SrcDirs),
+
+ RebarOpts = rebar_app_info:opts(AppInfo),
+ ErlOpts = rebar_opts:erl_opts(RebarOpts),
+ ErlOptIncludes = proplists:get_all_values(i, ErlOpts),
+ InclDirs = lists:map(fun(Incl) -> filename:absname(Incl) end, ErlOptIncludes),
+
+ #{src_dirs => ExistingSrcDirs,
+ include_dirs => [filename:join([OutDir, "include"]) | InclDirs],
+ src_ext => ".erl",
+ out_mappings => Mappings}.
+
+
+needed_files(Graph, FoundFiles, _, AppInfo) ->
+ OutDir = rebar_app_info:out_dir(AppInfo),
+ Dir = rebar_app_info:dir(AppInfo),
+ EbinDir = rebar_app_info:ebin_dir(AppInfo),
+ RebarOpts = rebar_app_info:opts(AppInfo),
+ ErlOpts = rebar_opts:erl_opts(RebarOpts),
+ ?DEBUG("erlopts ~p", [ErlOpts]),
+ ?DEBUG("files to compile ~p", [FoundFiles]),
+
+ %% Make sure that the ebin dir is on the path
+ ok = rebar_file_utils:ensure_dir(EbinDir),
+ true = code:add_patha(filename:absname(EbinDir)),
+
+ {ParseTransforms, Rest} = split_source_files(FoundFiles, ErlOpts),
+ NeededErlFiles = case needed_files(Graph, ErlOpts, RebarOpts, OutDir, EbinDir, ParseTransforms) of
+ [] ->
+ needed_files(Graph, ErlOpts, RebarOpts, OutDir, EbinDir, Rest);
+ _ ->
+ %% at least one parse transform in the opts needs updating, so recompile all
+ FoundFiles
+ end,
+ {ErlFirstFiles, ErlOptsFirst} = erl_first_files(RebarOpts, ErlOpts, Dir, NeededErlFiles),
+ SubGraph = digraph_utils:subgraph(Graph, NeededErlFiles),
+ DepErlsOrdered = digraph_utils:topsort(SubGraph),
+ OtherErls = lists:reverse(DepErlsOrdered),
+
+ PrivIncludes = [{i, filename:join(OutDir, Src)}
+ || Src <- rebar_dir:all_src_dirs(RebarOpts, ["src"], [])],
+ AdditionalOpts = PrivIncludes ++ [{i, filename:join(OutDir, "include")}, {i, OutDir}, return],
+
+ true = digraph:delete(SubGraph),
+
+ {{ErlFirstFiles, ErlOptsFirst ++ AdditionalOpts},
+ {[Erl || Erl <- OtherErls,
+ not lists:member(Erl, ErlFirstFiles)], ErlOpts ++ AdditionalOpts}}.
+
+dependencies(Source, SourceDir, Dirs) ->
+ {ok, Fd} = file:open(Source, [read]),
+ Incls = parse_attrs(Fd, [], SourceDir),
+ AbsIncls = expand_file_names(Incls, Dirs),
+ ok = file:close(Fd),
+ AbsIncls.
+
+compile(Source, [{_, OutDir}], Config, ErlOpts) ->
+ case compile:file(Source, [{outdir, OutDir} | ErlOpts]) of
+ {ok, _Mod} ->
+ ok;
+ {ok, _Mod, []} ->
+ ok;
+ {ok, _Mod, Ws} ->
+ FormattedWs = format_error_sources(Ws, Config),
+ rebar_compiler:ok_tuple(Source, FormattedWs);
+ {error, Es, Ws} ->
+ error_tuple(Source, Es, Ws, Config, ErlOpts);
+ error ->
+ error
+ end.
+
+clean(Files, AppInfo) ->
+ EbinDir = rebar_app_info:ebin_dir(AppInfo),
+ [begin
+ Source = filename:basename(File, ".erl"),
+ Target = target_base(EbinDir, Source) ++ ".beam",
+ file:delete(Target)
+ end || File <- Files].
+
+%%
+
+error_tuple(Module, Es, Ws, AllOpts, Opts) ->
+ FormattedEs = format_error_sources(Es, AllOpts),
+ FormattedWs = format_error_sources(Ws, AllOpts),
+ rebar_compiler:error_tuple(Module, FormattedEs, FormattedWs, Opts).
+
+format_error_sources(Es, Opts) ->
+ [{rebar_compiler:format_error_source(Src, Opts), Desc}
+ || {Src, Desc} <- Es].
+
+%% Get files which need to be compiled first, i.e. those specified in erl_first_files
+%% and parse_transform options. Also produce specific erl_opts for these first
+%% files, so that yet to be compiled parse transformations are excluded from it.
+erl_first_files(Opts, ErlOpts, Dir, NeededErlFiles) ->
+ ErlFirstFilesConf = rebar_opts:get(Opts, erl_first_files, []),
+ valid_erl_first_conf(ErlFirstFilesConf),
+ NeededSrcDirs = lists:usort(lists:map(fun filename:dirname/1, NeededErlFiles)),
+ %% NOTE: order of files here is important!
+ ErlFirstFiles =
+ [filename:join(Dir, File) || File <- ErlFirstFilesConf,
+ lists:member(filename:join(Dir, File), NeededErlFiles)],
+ {ParseTransforms, ParseTransformsErls} =
+ lists:unzip(lists:flatmap(
+ fun(PT) ->
+ PTerls = [filename:join(D, module_to_erl(PT)) || D <- NeededSrcDirs],
+ [{PT, PTerl} || PTerl <- PTerls, lists:member(PTerl, NeededErlFiles)]
+ end, proplists:get_all_values(parse_transform, ErlOpts))),
+ ErlOptsFirst = lists:filter(fun({parse_transform, PT}) ->
+ not lists:member(PT, ParseTransforms);
+ (_) ->
+ true
+ end, ErlOpts),
+ {ErlFirstFiles ++ ParseTransformsErls, ErlOptsFirst}.
+
+split_source_files(SourceFiles, ErlOpts) ->
+ ParseTransforms = proplists:get_all_values(parse_transform, ErlOpts),
+ lists:partition(fun(Source) ->
+ lists:member(filename_to_atom(Source), ParseTransforms)
+ end, SourceFiles).
+
+filename_to_atom(F) -> list_to_atom(filename:rootname(filename:basename(F))).
+
+%% Get subset of SourceFiles which need to be recompiled, respecting
+%% dependencies induced by given graph G.
+needed_files(Graph, ErlOpts, RebarOpts, Dir, OutDir, SourceFiles) ->
+ lists:filter(fun(Source) ->
+ TargetBase = target_base(OutDir, Source),
+ Target = TargetBase ++ ".beam",
+ PrivIncludes = [{i, filename:join(Dir, Src)}
+ || Src <- rebar_dir:all_src_dirs(RebarOpts, ["src"], [])],
+ AllOpts = [{outdir, filename:dirname(Target)}
+ ,{i, filename:join(Dir, "include")}
+ ,{i, Dir}] ++ PrivIncludes ++ ErlOpts,
+ digraph:vertex(Graph, Source) > {Source, filelib:last_modified(Target)}
+ orelse opts_changed(AllOpts, TargetBase)
+ orelse erl_compiler_opts_set()
+ end, SourceFiles).
+
+target_base(OutDir, Source) ->
+ filename:join(OutDir, filename:basename(Source, ".erl")).
+
+opts_changed(NewOpts, Target) ->
+ TotalOpts = case erlang:function_exported(compile, env_compiler_options, 0) of
+ true -> NewOpts ++ compile:env_compiler_options();
+ false -> NewOpts
+ end,
+ case compile_info(Target) of
+ {ok, Opts} -> lists:any(fun effects_code_generation/1, lists:usort(TotalOpts) -- lists:usort(Opts));
+ _ -> true
+ end.
+
+effects_code_generation(Option) ->
+ case Option of
+ beam -> false;
+ report_warnings -> false;
+ report_errors -> false;
+ return_errors-> false;
+ return_warnings-> false;
+ report -> false;
+ warnings_as_errors -> false;
+ binary -> false;
+ verbose -> false;
+ {cwd,_} -> false;
+ {outdir, _} -> false;
+ _ -> true
+ end.
+
+compile_info(Target) ->
+ case beam_lib:chunks(Target, [compile_info]) of
+ {ok, {_mod, Chunks}} ->
+ CompileInfo = proplists:get_value(compile_info, Chunks, []),
+ {ok, proplists:get_value(options, CompileInfo, [])};
+ {error, beam_lib, Reason} ->
+ ?WARN("Couldn't read debug info from ~p for reason: ~p", [Target, Reason]),
+ {error, Reason}
+ end.
+
+erl_compiler_opts_set() ->
+ EnvSet = case os:getenv("ERL_COMPILER_OPTIONS") of
+ false -> false;
+ _ -> true
+ end,
+ %% return false if changed env opts would have been caught in opts_changed/2
+ EnvSet andalso not erlang:function_exported(compile, env_compiler_options, 0).
+
+valid_erl_first_conf(FileList) ->
+ Strs = filter_file_list(FileList),
+ case rebar_utils:is_list_of_strings(Strs) of
+ true -> true;
+ false -> ?ABORT("An invalid file list (~p) was provided as part of your erl_first_files directive",
+ [FileList])
+ end.
+
+filter_file_list(FileList) ->
+ Atoms = lists:filter( fun(X) -> is_atom(X) end, FileList),
+ case Atoms of
+ [] ->
+ FileList;
+ _ ->
+ atoms_in_erl_first_files_warning(Atoms),
+ lists:filter( fun(X) -> not(is_atom(X)) end, FileList)
+ end.
+
+atoms_in_erl_first_files_warning(Atoms) ->
+ W = "You have provided atoms as file entries in erl_first_files; "
+ "erl_first_files only expects lists of filenames as strings. "
+ "The following modules (~p) may not work as expected and it is advised "
+ "that you change these entires to string format "
+ "(e.g., \"src/module.erl\") ",
+ ?WARN(W, [Atoms]).
+
+module_to_erl(Mod) ->
+ atom_to_list(Mod) ++ ".erl".
+
+parse_attrs(Fd, Includes, Dir) ->
+ case io:parse_erl_form(Fd, "") of
+ {ok, Form, _Line} ->
+ case erl_syntax:type(Form) of
+ attribute ->
+ NewIncludes = process_attr(Form, Includes, Dir),
+ parse_attrs(Fd, NewIncludes, Dir);
+ _ ->
+ parse_attrs(Fd, Includes, Dir)
+ end;
+ {eof, _} ->
+ Includes;
+ _Err ->
+ parse_attrs(Fd, Includes, Dir)
+ end.
+
+process_attr(Form, Includes, Dir) ->
+ AttrName = erl_syntax:atom_value(erl_syntax:attribute_name(Form)),
+ process_attr(AttrName, Form, Includes, Dir).
+
+process_attr(import, Form, Includes, _Dir) ->
+ case erl_syntax_lib:analyze_import_attribute(Form) of
+ {Mod, _Funs} ->
+ [module_to_erl(Mod)|Includes];
+ Mod ->
+ [module_to_erl(Mod)|Includes]
+ end;
+process_attr(file, Form, Includes, _Dir) ->
+ {File, _} = erl_syntax_lib:analyze_file_attribute(Form),
+ [File|Includes];
+process_attr(include, Form, Includes, _Dir) ->
+ [FileNode] = erl_syntax:attribute_arguments(Form),
+ File = erl_syntax:string_value(FileNode),
+ [File|Includes];
+process_attr(include_lib, Form, Includes, Dir) ->
+ [FileNode] = erl_syntax:attribute_arguments(Form),
+ RawFile = erl_syntax:string_value(FileNode),
+ maybe_expand_include_lib_path(RawFile, Dir) ++ Includes;
+process_attr(behavior, Form, Includes, _Dir) ->
+ process_attr(behaviour, Form, Includes, _Dir);
+process_attr(behaviour, Form, Includes, _Dir) ->
+ [FileNode] = erl_syntax:attribute_arguments(Form),
+ File = module_to_erl(erl_syntax:atom_value(FileNode)),
+ [File|Includes];
+process_attr(compile, Form, Includes, _Dir) ->
+ [Arg] = erl_syntax:attribute_arguments(Form),
+ case erl_syntax:concrete(Arg) of
+ {parse_transform, Mod} ->
+ [module_to_erl(Mod)|Includes];
+ {core_transform, Mod} ->
+ [module_to_erl(Mod)|Includes];
+ L when is_list(L) ->
+ lists:foldl(
+ fun({parse_transform, Mod}, Acc) ->
+ [module_to_erl(Mod)|Acc];
+ ({core_transform, Mod}, Acc) ->
+ [module_to_erl(Mod)|Acc];
+ (_, Acc) ->
+ Acc
+ end, Includes, L);
+ _ ->
+ Includes
+ end;
+process_attr(_, _Form, Includes, _Dir) ->
+ Includes.
+
+%% NOTE: If, for example, one of the entries in Files, refers to
+%% gen_server.erl, that entry will be dropped. It is dropped because
+%% such an entry usually refers to the beam file, and we don't pass a
+%% list of OTP src dirs for finding gen_server.erl's full path. Also,
+%% if gen_server.erl was modified, it's not rebar's task to compile a
+%% new version of the beam file. Therefore, it's reasonable to drop
+%% such entries. Also see process_attr(behaviour, Form, Includes).
+-spec expand_file_names([file:filename()],
+ [file:filename()]) -> [file:filename()].
+expand_file_names(Files, Dirs) ->
+ %% We check if Files exist by itself or within the directories
+ %% listed in Dirs.
+ %% Return the list of files matched.
+ lists:flatmap(
+ fun(Incl) ->
+ case filelib:is_regular(Incl) of
+ true ->
+ [Incl];
+ false ->
+ rebar_utils:find_files_in_dirs(Dirs, Incl, true)
+ end
+ end, Files).
+
+%% Given a path like "stdlib/include/erl_compile.hrl", return
+%% "OTP_INSTALL_DIR/lib/erlang/lib/stdlib-x.y.z/include/erl_compile.hrl".
+%% Usually a simple [Lib, SubDir, File1] = filename:split(File) should
+%% work, but to not crash when an unusual include_lib path is used,
+%% utilize more elaborate logic.
+maybe_expand_include_lib_path(File, Dir) ->
+ File1 = filename:basename(File),
+ case filename:split(filename:dirname(File)) of
+ [_] ->
+ warn_and_find_path(File, Dir);
+ [Lib | SubDir] ->
+ case code:lib_dir(list_to_atom(Lib), list_to_atom(filename:join(SubDir))) of
+ {error, bad_name} ->
+ warn_and_find_path(File, Dir);
+ AppDir ->
+ [filename:join(AppDir, File1)]
+ end
+ end.
+
+%% The use of -include_lib was probably incorrect by the user but lets try to make it work.
+%% We search in the outdir and outdir/../include to see if the header exists.
+warn_and_find_path(File, Dir) ->
+ SrcHeader = filename:join(Dir, File),
+ case filelib:is_regular(SrcHeader) of
+ true ->
+ [SrcHeader];
+ false ->
+ IncludeDir = filename:join(rebar_utils:droplast(filename:split(Dir))++["include"]),
+ IncludeHeader = filename:join(IncludeDir, File),
+ case filelib:is_regular(IncludeHeader) of
+ true ->
+ [filename:join(IncludeDir, File)];
+ false ->
+ []
+ end
+ end.
diff --git a/src/rebar_compiler_mib.erl b/src/rebar_compiler_mib.erl
new file mode 100644
index 0000000..499976d
--- /dev/null
+++ b/src/rebar_compiler_mib.erl
@@ -0,0 +1,101 @@
+-module(rebar_compiler_mib).
+
+-behaviour(rebar_compiler).
+
+-export([context/1,
+ needed_files/4,
+ dependencies/3,
+ compile/4,
+ clean/2]).
+
+-include("rebar.hrl").
+-include_lib("stdlib/include/erl_compile.hrl").
+
+context(AppInfo) ->
+ Dir = rebar_app_info:dir(AppInfo),
+ Mappings = [{".bin", filename:join([Dir, "priv", "mibs"])},
+ {".hrl", filename:join(Dir, "include")}],
+
+ #{src_dirs => ["mibs"],
+ include_dirs => [],
+ src_ext => ".mib",
+ out_mappings => Mappings}.
+
+needed_files(_, FoundFiles, _, AppInfo) ->
+ RebarOpts = rebar_app_info:opts(AppInfo),
+ MibFirstConf = rebar_opts:get(RebarOpts, mib_first_files, []),
+ valid_mib_first_conf(MibFirstConf),
+ Dir = rebar_app_info:dir(AppInfo),
+ MibFirstFiles = [filename:join(Dir, File) || File <- MibFirstConf],
+
+ %% Remove first files from found files
+ RestFiles = [Source || Source <- FoundFiles, not lists:member(Source, MibFirstFiles)],
+
+ Opts = rebar_opts:get(rebar_app_info:opts(AppInfo), mib_opts, []),
+ {{MibFirstFiles, Opts}, {RestFiles, Opts}}.
+
+valid_mib_first_conf(FileList) ->
+ Strs = filter_file_list(FileList),
+ case rebar_utils:is_list_of_strings(Strs) of
+ true -> true;
+ false -> ?ABORT("An invalid file list (~p) was provided as part of your mib_first_files directive",
+ [FileList])
+ end.
+
+filter_file_list(FileList) ->
+ Atoms = lists:filter( fun(X) -> is_atom(X) end, FileList),
+ case Atoms of
+ [] ->
+ FileList;
+ _ ->
+ atoms_in_mib_first_files_warning(Atoms),
+ lists:filter( fun(X) -> not(is_atom(X)) end, FileList)
+ end.
+
+atoms_in_mib_first_files_warning(Atoms) ->
+ W = "You have provided atoms as file entries in mib_first_files; "
+ "mib_first_files only expects lists of filenames as strings. "
+ "The following MIBs (~p) may not work as expected and it is advised "
+ "that you change these entires to string format "
+ "(e.g., \"mibs/SOME-MIB.mib\") ",
+ ?WARN(W, [Atoms]).
+
+
+dependencies(_, _, _) ->
+ [].
+
+compile(Source, OutDirs, _, Opts) ->
+ {_, BinOut} = lists:keyfind(".bin", 1, OutDirs),
+ {_, HrlOut} = lists:keyfind(".hrl", 1, OutDirs),
+
+ ok = rebar_file_utils:ensure_dir(BinOut),
+ ok = rebar_file_utils:ensure_dir(HrlOut),
+ Mib = filename:join(BinOut, filename:basename(Source, ".mib")),
+ HrlFilename = Mib ++ ".hrl",
+
+ AllOpts = [{outdir, BinOut}, {i, [BinOut]}] ++ Opts,
+
+ case snmpc:compile(Source, AllOpts) of
+ {ok, _} ->
+ MibToHrlOpts =
+ case proplists:get_value(verbosity, AllOpts, undefined) of
+ undefined ->
+ #options{specific = [],
+ cwd = rebar_dir:get_cwd()};
+ Verbosity ->
+ #options{specific = [{verbosity, Verbosity}],
+ cwd = rebar_dir:get_cwd()}
+ end,
+ ok = snmpc:mib_to_hrl(Mib, Mib, MibToHrlOpts),
+ rebar_file_utils:mv(HrlFilename, HrlOut),
+ ok;
+ {error, compilation_failed} ->
+ ?FAIL
+ end.
+
+clean(MibFiles, AppInfo) ->
+ AppDir = rebar_app_info:dir(AppInfo),
+ MIBs = [filename:rootname(filename:basename(MIB)) || MIB <- MibFiles],
+ rebar_file_utils:delete_each(
+ [filename:join([AppDir, "include", MIB++".hrl"]) || MIB <- MIBs]),
+ ok = rebar_file_utils:rm_rf(filename:join([AppDir, "priv/mibs/*.bin"])).
diff --git a/src/rebar_compiler_xrl.erl b/src/rebar_compiler_xrl.erl
new file mode 100644
index 0000000..35447ed
--- /dev/null
+++ b/src/rebar_compiler_xrl.erl
@@ -0,0 +1,64 @@
+-module(rebar_compiler_xrl).
+
+-behaviour(rebar_compiler).
+
+-export([context/1,
+ needed_files/4,
+ dependencies/3,
+ compile/4,
+ clean/2]).
+
+-export([update_opts/2]).
+
+context(AppInfo) ->
+ Dir = rebar_app_info:dir(AppInfo),
+ Mappings = [{".erl", filename:join([Dir, "src"])}],
+ #{src_dirs => ["src"],
+ include_dirs => [],
+ src_ext => ".xrl",
+ out_mappings => Mappings}.
+
+needed_files(_, FoundFiles, Mappings, AppInfo) ->
+ FirstFiles = [],
+
+ %% Remove first files from found files
+ RestFiles = [Source || Source <- FoundFiles,
+ not lists:member(Source, FirstFiles),
+ rebar_compiler:needs_compile(Source, ".erl", Mappings)],
+
+ Opts = rebar_opts:get(rebar_app_info:opts(AppInfo), xrl_opts, []),
+ Opts1 = update_opts(Opts, AppInfo),
+
+ {{FirstFiles, Opts1}, {RestFiles, Opts1}}.
+
+dependencies(_, _, _) ->
+ [].
+
+compile(Source, [{_, _}], _, Opts) ->
+ case leex:file(Source, [{return, true} | Opts]) of
+ {ok, _} ->
+ ok;
+ {ok, _Mod, Ws} ->
+ rebar_compiler:ok_tuple(Source, Ws);
+ {error, Es, Ws} ->
+ rebar_compiler:error_tuple(Source, Es, Ws, Opts)
+ end.
+
+clean(XrlFiles, _AppInfo) ->
+ rebar_file_utils:delete_each(
+ [rebar_utils:to_list(re:replace(F, "\\.xrl$", ".erl", [unicode]))
+ || F <- XrlFiles]).
+
+%% make includefile options absolute paths
+update_opts(Opts, AppInfo) ->
+ OutDir = rebar_app_info:out_dir(AppInfo),
+ lists:map(fun({includefile, I}) ->
+ case filename:pathtype(I) =:= relative of
+ true ->
+ {includefile, filename:join(OutDir, I)};
+ false ->
+ {includefile, I}
+ end;
+ (O) ->
+ O
+ end, Opts).
diff --git a/src/rebar_compiler_yrl.erl b/src/rebar_compiler_yrl.erl
new file mode 100644
index 0000000..8e52d0e
--- /dev/null
+++ b/src/rebar_compiler_yrl.erl
@@ -0,0 +1,51 @@
+-module(rebar_compiler_yrl).
+
+-behaviour(rebar_compiler).
+
+-export([context/1,
+ needed_files/4,
+ dependencies/3,
+ compile/4,
+ clean/2]).
+
+context(AppInfo) ->
+ Dir = rebar_app_info:dir(AppInfo),
+ Mappings = [{".erl", filename:join([Dir, "src"])}],
+ #{src_dirs => ["src"],
+ include_dirs => [],
+ src_ext => ".yrl",
+ out_mappings => Mappings}.
+
+needed_files(_, FoundFiles, Mappings, AppInfo) ->
+ FirstFiles = [],
+
+ %% Remove first files from found files
+ RestFiles = [Source || Source <- FoundFiles,
+ not lists:member(Source, FirstFiles),
+ rebar_compiler:needs_compile(Source, ".erl", Mappings)],
+
+ Opts = rebar_opts:get(rebar_app_info:opts(AppInfo), yrl_opts, []),
+ Opts1 = rebar_compiler_xrl:update_opts(Opts, AppInfo),
+
+ {{FirstFiles, Opts1}, {RestFiles, Opts1}}.
+
+dependencies(_, _, _) ->
+ [].
+
+compile(Source, [{_, OutDir}], _, Opts) ->
+ BaseName = filename:basename(Source, ".yrl"),
+ Target = filename:join([OutDir, BaseName]),
+ AllOpts = [{parserfile, Target}, {return, true} | Opts],
+ case yecc:file(Source, AllOpts) of
+ {ok, _} ->
+ ok;
+ {ok, _Mod, Ws} ->
+ rebar_compiler:ok_tuple(Source, Ws);
+ {error, Es, Ws} ->
+ rebar_compiler:error_tuple(Source, Es, Ws, AllOpts)
+ end.
+
+clean(YrlFiles, _AppInfo) ->
+ rebar_file_utils:delete_each(
+ [rebar_utils:to_list(re:replace(F, "\\.yrl$", ".erl", [unicode]))
+ || F <- YrlFiles]).
diff --git a/src/rebar_config.erl b/src/rebar_config.erl
index b50c030..2651ca1 100644
--- a/src/rebar_config.erl
+++ b/src/rebar_config.erl
@@ -26,7 +26,8 @@
%% -------------------------------------------------------------------
-module(rebar_config).
--export([consult/1
+-export([consult_root/0
+ ,consult/1
,consult_app_file/1
,consult_file/1
,consult_lock_file/1
@@ -39,17 +40,31 @@
-include("rebar.hrl").
-include_lib("providers/include/providers.hrl").
+-define(DEFAULT_CONFIG_FILE, "rebar.config").
+
%% ===================================================================
%% Public API
%% ===================================================================
+%% @doc reads the default config file at the top of a full project
+-spec consult_root() -> [any()].
+consult_root() ->
+ consult_file(config_file()).
+
+%% @doc reads the default config file in a given directory.
-spec consult(file:name()) -> [any()].
consult(Dir) ->
consult_file(filename:join(Dir, ?DEFAULT_CONFIG_FILE)).
+%% @doc reads a given app file, including the `.script' variations,
+%% if any can be found.
+-spec consult_app_file(file:filename()) -> [any()].
consult_app_file(File) ->
consult_file_(File).
+%% @doc reads the lock file for the project, and re-formats its
+%% content to match the internals for rebar3.
+-spec consult_lock_file(file:filename()) -> [any()]. % TODO: refine lock()
consult_lock_file(File) ->
Terms = consult_file_(File),
case Terms of
@@ -59,7 +74,7 @@ consult_lock_file(File) ->
read_attrs(beta, Locks, []);
[{Vsn, Locks}|Attrs] when is_list(Locks) -> % versioned lock file
%% Because this is the first version of rebar3 to introduce a lock
- %% file, all versionned lock files with a different versions have
+ %% file, all versioned lock files with a different version have
%% to be newer.
case Vsn of
?CONFIG_VERSION ->
@@ -73,6 +88,11 @@ consult_lock_file(File) ->
read_attrs(Vsn, Locks, Attrs)
end.
+%% @private outputs a warning for a newer lockfile format than supported
+%% at most once.
+%% The warning can also be cancelled by configuring the `warn_config_vsn'
+%% OTP env variable.
+-spec warn_vsn_once() -> ok.
warn_vsn_once() ->
Warn = application:get_env(rebar, warn_config_vsn) =/= {ok, false},
application:set_env(rebar, warn_config_vsn, false),
@@ -86,6 +106,11 @@ warn_vsn_once() ->
end.
+%% @doc Converts the internal format for locks into the multi-version
+%% compatible one used within rebar3 lock files.
+%% @end
+%% TODO: refine type for lock()
+-spec write_lock_file(file:filename(), [any()]) -> ok | {error, term()}.
write_lock_file(LockFile, Locks) ->
{NewLocks, Attrs} = write_attrs(Locks),
%% Write locks in the beta format, at least until it's been long
@@ -95,34 +120,46 @@ write_lock_file(LockFile, Locks) ->
file:write_file(LockFile, io_lib:format("~p.~n", [NewLocks]));
_ ->
file:write_file(LockFile,
- io_lib:format("{~p,~n~p}.~n[~n~s~n].~n",
+ io_lib:format("{~p,~n~p}.~n[~n~ts~n].~n",
[?CONFIG_VERSION, NewLocks,
format_attrs(Attrs)]))
end.
-%% Attributes have a special formatting to ensure there's only one per
-%% line in terms of pkg_hash, so we disturb source diffing as little
-%% as possible.
+%% @private Because attributes for packages are fairly large, there is the need
+%% for a special formatting to ensure there's only one entry per lock file
+%% line and that diffs are generally stable.
+-spec format_attrs([term()]) -> iodata().
format_attrs([]) -> [];
format_attrs([{pkg_hash, Vals}|T]) ->
[io_lib:format("{pkg_hash,[~n",[]), format_hashes(Vals), "]}",
- maybe_comma(T) | format_attrs(T)];
-format_attrs([H|T]) ->
- [io_lib:format("~p~s", [H, maybe_comma(T)]) | format_attrs(T)].
+ maybe_comma(T) | format_attrs(T)].
+%% @private format hashing in order to disturb source diffing as little
+%% as possible
+-spec format_hashes([term()]) -> iodata().
format_hashes([]) -> [];
format_hashes([{Pkg,Hash}|T]) ->
[" {", io_lib:format("~p",[Pkg]), ", ", io_lib:format("~p", [Hash]), "}",
maybe_comma(T) | format_hashes(T)].
+%% @private add a comma if we're not done with the full list of terms
+%% to convert.
+-spec maybe_comma([term()]) -> iodata().
maybe_comma([]) -> "";
maybe_comma([_|_]) -> io_lib:format(",~n", []).
+%% @private extract attributes from the lock file and integrate them
+%% into the full-blow internal lock format
+%% @end
+%% TODO: refine typings for lock()
+-spec read_attrs(_, [any()], [any()]) -> [any()].
read_attrs(_Vsn, Locks, Attrs) ->
%% Beta copy does not know how to expand attributes, but
%% is ready to support it.
expand_locks(Locks, extract_pkg_hashes(Attrs)).
+%% @private extract the package hashes from lockfile attributes, if any.
+-spec extract_pkg_hashes(list()) -> [binary()].
extract_pkg_hashes(Attrs) ->
Props = case Attrs of
[First|_] -> First;
@@ -130,6 +167,11 @@ extract_pkg_hashes(Attrs) ->
end,
proplists:get_value(pkg_hash, Props, []).
+%% @private extract attributes from the lock file and integrate them
+%% into the full-blow internal lock format
+%% @end
+%% TODO: refine typings for lock()
+-spec expand_locks(list(), list()) -> list().
expand_locks([], _Hashes) ->
[];
expand_locks([{Name, {pkg,PkgName,Vsn}, Lvl} | Locks], Hashes) ->
@@ -138,6 +180,9 @@ expand_locks([{Name, {pkg,PkgName,Vsn}, Lvl} | Locks], Hashes) ->
expand_locks([Lock|Locks], Hashes) ->
[Lock | expand_locks(Locks, Hashes)].
+%% @private split up extra attributes for locks out of the internal lock
+%% structure for backwards compatibility reasons
+-spec write_attrs(list()) -> {list(), list()}.
write_attrs(Locks) ->
%% No attribute known that needs to be taken out of the structure,
%% just return terms as is.
@@ -147,6 +192,9 @@ write_attrs(Locks) ->
_ -> {NewLocks, [{pkg_hash, lists:sort(Hashes)}]}
end.
+%% @private split up extra attributes for locks out of the internal lock
+%% structure for backwards compatibility reasons
+-spec split_locks(list(), list(), [{_,binary()}]) -> {list(), list()}.
split_locks([], Locks, Hashes) ->
{lists:reverse(Locks), Hashes};
split_locks([{Name, {pkg,PkgName,Vsn,undefined}, Lvl} | Locks], LAcc, HAcc) ->
@@ -156,11 +204,17 @@ split_locks([{Name, {pkg,PkgName,Vsn,Hash}, Lvl} | Locks], LAcc, HAcc) ->
split_locks([Lock|Locks], LAcc, HAcc) ->
split_locks(Locks, [Lock|LAcc], HAcc).
+%% @doc reads a given config file, including the `.script' variations,
+%% if any can be found, and asserts that the config format is in
+%% a key-value format.
+-spec consult_file(file:filename()) -> [{_,_}].
consult_file(File) ->
Terms = consult_file_(File),
true = verify_config_format(Terms),
Terms.
+%% @private reads a given file; if the file has a `.script'-postfixed
+%% counterpart, it is evaluated along with the original file.
-spec consult_file_(file:name()) -> [any()].
consult_file_(File) when is_binary(File) ->
consult_file_(binary_to_list(File));
@@ -180,6 +234,9 @@ consult_file_(File) ->
end
end.
+%% @private checks that a list is in a key-value format.
+%% Raises an exception in any other case.
+-spec verify_config_format([{_,_}]) -> true.
verify_config_format([]) ->
true;
verify_config_format([{_Key, _Value} | T]) ->
@@ -187,11 +244,14 @@ verify_config_format([{_Key, _Value} | T]) ->
verify_config_format([Term | _]) ->
throw(?PRV_ERROR({bad_config_format, Term})).
-%% no lockfile
+%% @doc takes an existing configuration and the content of a lockfile
+%% and merges the locks into the config.
+-spec merge_locks([{_,_}], list()) -> [{_,_}].
merge_locks(Config, []) ->
+%% no lockfile
Config;
-%% lockfile with entries
merge_locks(Config, Locks) ->
+ %% lockfile with entries
ConfigDeps = proplists:get_value(deps, Config, []),
%% We want the top level deps only from the lock file.
%% This ensures deterministic overrides for configs.
@@ -201,6 +261,8 @@ merge_locks(Config, Locks) ->
NewDeps = find_newly_added(ConfigDeps, Locks),
[{{locks, default}, Locks}, {{deps, default}, NewDeps++Deps} | Config].
+%% @doc convert a given exception's payload into an io description.
+-spec format_error(any()) -> iolist().
format_error({bad_config_format, Term}) ->
io_lib:format("Unable to parse config. Term is not in {Key, Value} format:~n~p", [Term]);
format_error({bad_dep_name, Dep}) ->
@@ -210,6 +272,8 @@ format_error({bad_dep_name, Dep}) ->
%% Internal functions
%% ===================================================================
+%% @private consults a consult file, then executes its related script file
+%% with the data returned from the consult.
-spec consult_and_eval(File::file:name_all(), Script::file:name_all()) ->
{ok, Terms::[term()]} |
{error, Reason::term()}.
@@ -226,21 +290,31 @@ consult_and_eval(File, Script) ->
{ok, Term} ->
{ok, [Term]};
Error ->
+ ?ERROR("Error evaluating configuration script at ~p:~n~p~n",
+ [Script, Error]),
Error
end.
+%% @private drops the .script extension from a filename.
+-spec remove_script_ext(file:filename()) -> file:filename().
remove_script_ext(F) ->
filename:rootname(F, ".script").
+%% @private sets up bindings for evaluations from a KV list.
+-spec bs([{_,_}]) -> erl_eval:binding_struct().
bs(Vars) ->
lists:foldl(fun({K,V}, Bs) ->
erl_eval:add_binding(K, V, Bs)
end, erl_eval:new_bindings(), Vars).
-%% Find deps that have been added to the config after the lock was created
+%% @private Find deps that have been added to the config after the lock was created
+-spec find_newly_added(list(), list()) -> list().
find_newly_added(ConfigDeps, LockedDeps) ->
[D || {true, D} <- [check_newly_added(Dep, LockedDeps) || Dep <- ConfigDeps]].
+%% @private checks if a given dependency is not within the lock file.
+%% TODO: refine types for dependencies
+-spec check_newly_added(term(), list()) -> false | {true, term()}.
check_newly_added({_, _}=Dep, LockedDeps) ->
check_newly_added_(Dep, LockedDeps);
check_newly_added({_, _, {pkg, _}}=Dep, LockedDeps) ->
@@ -250,6 +324,10 @@ check_newly_added({Name, _, Source}, LockedDeps) ->
check_newly_added(Dep, LockedDeps) ->
check_newly_added_(Dep, LockedDeps).
+%% @private checks if a given dependency is not within the lock file.
+%% TODO: refine types for dependencies
+%% @end
+-spec check_newly_added_(term(), list()) -> false | {true, term()}.
%% get [raw] deps out of the way
check_newly_added_({Name, Source, Opts}, LockedDeps) when is_tuple(Source),
is_list(Opts) ->
@@ -283,7 +361,7 @@ check_newly_added_({Name, Source}, LockedDeps) ->
false
end;
check_newly_added_(Dep, LockedDeps) when is_atom(Dep) ->
- Name = ec_cnv:to_binary(Dep),
+ Name = rebar_utils:to_binary(Dep),
case lists:keyfind(Name, 1, LockedDeps) of
false ->
{true, Name};
@@ -292,11 +370,22 @@ check_newly_added_(Dep, LockedDeps) when is_atom(Dep) ->
0 ->
{true, Name};
_ ->
- ?WARN("Newly added dep ~s is locked at a lower level. "
- "If you really want to unlock it, use 'rebar3 upgrade ~s'",
+ ?WARN("Newly added dep ~ts is locked at a lower level. "
+ "If you really want to unlock it, use 'rebar3 upgrade ~ts'",
[Name, Name]),
false
end
end;
check_newly_added_(Dep, _) ->
throw(?PRV_ERROR({bad_dep_name, Dep})).
+
+%% @private returns the name/path of the default config file, or its
+%% override from the OS ENV var `REBAR_CONFIG'.
+-spec config_file() -> file:filename().
+config_file() ->
+ case os:getenv("REBAR_CONFIG") of
+ false ->
+ ?DEFAULT_CONFIG_FILE;
+ ConfigFile ->
+ ConfigFile
+ end.
diff --git a/src/rebar_core.erl b/src/rebar_core.erl
index da8c3e6..6a1cdbf 100644
--- a/src/rebar_core.erl
+++ b/src/rebar_core.erl
@@ -24,6 +24,8 @@
%% OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
%% THE SOFTWARE.
%% -------------------------------------------------------------------
+%% @doc Module providing core functionality about command dispatch, namespacing,
+%% and chaining for rebar3.
-module(rebar_core).
-export([init_command/2, process_namespace/2, process_command/2, do/2, format_error/1]).
@@ -31,6 +33,12 @@
-include("rebar.hrl").
-include_lib("providers/include/providers.hrl").
+%% @doc initial command set up; based on the first fragment of the
+%% command, dispatch to special environments. The keywords for
+%% `do' and `as' are implicitly reserved here, barring them from
+%% being used as other commands or namespaces.
+-spec init_command(rebar_state:t(), atom()) ->
+ {ok, rebar_state:t()} | {error, term()}.
init_command(State, do) ->
process_command(rebar_state:namespace(State, default), do);
init_command(State, as) ->
@@ -43,6 +51,14 @@ init_command(State, Command) ->
{error, Reason}
end.
+%% @doc parse the commands starting at the namespace level;
+%% a namespace is found if the first keyword to match is not
+%% belonging to an existing provider, and iff the keyword also
+%% matches a registered namespace.
+%% The command to run is returned last; for namespaces, some
+%% magic is done implicitly calling `do' as an indirect dispatcher.
+-spec process_namespace(rebar_state:t(), atom()) ->
+ {error, term()} | {ok, rebar_state:t(), atom()}.
process_namespace(_State, as) ->
{error, "Namespace 'as' is forbidden"};
process_namespace(State, Command) ->
@@ -61,7 +77,15 @@ process_namespace(State, Command) ->
{ok, rebar_state:namespace(State, default), Command}
end.
--spec process_command(rebar_state:t(), atom()) -> {ok, rebar_state:t()} | {error, string()} | {error, {module(), any()}}.
+%% @doc Dispatches a given command based on the current state.
+%% This requires mapping a command name to a specific provider.
+%% `as' and `do' are still treated as special providers here.
+%% Basic profile application may also be run.
+%%
+%% The function also takes care of expanding a provider to its
+%% dependencies in the proper order.
+-spec process_command(rebar_state:t(), atom()) ->
+ {ok, rebar_state:t()} | {error, string()} | {error, {module(), any()}}.
process_command(State, Command) ->
%% ? rebar_prv_install_deps:setup_env(State),
Providers = rebar_state:providers(State),
@@ -95,19 +119,24 @@ process_command(State, Command) ->
State2 = rebar_state:command_parsed_args(State1, Args),
do(TargetProviders, State2);
{error, {invalid_option, Option}} ->
- {error, io_lib:format("Invalid option ~s on task ~p", [Option, Command])};
+ {error, io_lib:format("Invalid option ~ts on task ~p", [Option, Command])};
{error, {invalid_option_arg, {Option, Arg}}} ->
- {error, io_lib:format("Invalid argument ~s to option ~s", [Arg, Option])};
+ {error, io_lib:format("Invalid argument ~ts to option ~ts", [Arg, Option])};
{error, {missing_option_arg, Option}} ->
- {error, io_lib:format("Missing argument to option ~s", [Option])}
+ {error, io_lib:format("Missing argument to option ~ts", [Option])}
end
end
end.
--spec do([{atom(), atom()}], rebar_state:t()) -> {ok, rebar_state:t()} | {error, string()} | {error, {module(), any()}}.
+%% @doc execute the selected providers. If a chain of providers
+%% has been returned, run them one after the other, while piping
+%% the state from the first into the next one.
+-spec do([{atom(), atom()}], rebar_state:t()) ->
+ {ok, rebar_state:t()} | {error, string()} | {error, {module(), any()}}.
do([], State) ->
{ok, State};
do([ProviderName | Rest], State) ->
+ ?DEBUG("Provider: ~p", [ProviderName]),
%% Special providers like 'as', 'do' or some hooks may be passed
%% as a tuple {Namespace, Name}, otherwise not. Handle them
%% on a per-need basis.
@@ -128,8 +157,7 @@ do([ProviderName | Rest], State) ->
{error, Error} ->
{error, Error}
catch
- error:undef ->
- Stack = erlang:get_stacktrace(),
+ ?WITH_STACKTRACE(error,undef,Stack)
case Stack of
[{ProviderName, do, [_], _}|_] ->
%% This should really only happen if a plugin provider doesn't export do/1
@@ -142,7 +170,9 @@ do([ProviderName | Rest], State) ->
{error, ProviderName}
end.
+%% @doc convert a given exception's payload into an io description.
+-spec format_error(any()) -> iolist().
format_error({bad_provider_namespace, {Namespace, Name}}) ->
- io_lib:format("Undefined command ~s in namespace ~s", [Name, Namespace]);
+ io_lib:format("Undefined command ~ts in namespace ~ts", [Name, Namespace]);
format_error({bad_provider_namespace, Name}) ->
- io_lib:format("Undefined command ~s", [Name]).
+ io_lib:format("Undefined command ~ts", [Name]).
diff --git a/src/rebar_dialyzer_format.erl b/src/rebar_dialyzer_format.erl
index b30c4dc..cb0e958 100644
--- a/src/rebar_dialyzer_format.erl
+++ b/src/rebar_dialyzer_format.erl
@@ -16,21 +16,22 @@
-include("rebar.hrl").
--export([format_warnings/1]).
+-export([format_warnings/2]).
%% Formats a list of warnings in a nice per file way. Note that we reverse
%% the list at the end to 'undo' the reversal by foldl
-format_warnings(Warnings) ->
- {_, Res} = lists:foldl(fun format_warning_/2, {undefined, []}, Warnings),
+format_warnings(Opts, Warnings) ->
+ Fold = fun(Warning, Acc) -> format_warning_(Opts, Warning, Acc) end,
+ {_, Res} = lists:foldl(Fold, {undefined, []}, Warnings),
lists:reverse(Res).
%% If the last seen file is and the file of this warning are the same
%% we skip the file header
-format_warning_(Warning = {_Tag, {File, Line}, Msg}, {File, Acc}) ->
+format_warning_(_Opts, Warning = {_Tag, {File, Line}, Msg}, {File, Acc}) ->
try
String = message_to_string(Msg),
- {File, [lists:flatten(fmt("~!c~4w~!!: ~s", [Line, String])) | Acc]}
+ {File, [lists:flatten(fmt("~!c~4w~!!: ~ts", [Line, String])) | Acc]}
catch
Error:Reason ->
?DEBUG("Failed to pretty format warning: ~p:~p",
@@ -39,22 +40,23 @@ format_warning_(Warning = {_Tag, {File, Line}, Msg}, {File, Acc}) ->
end;
%% With a new file detencted we also write a file header.
-format_warning_(Warning = {_Tag, {File, Line}, Msg}, {_LastFile, Acc}) ->
+format_warning_(Opts, Warning = {_Tag, {SrcFile, Line}, Msg}, {_LastFile, Acc}) ->
try
+ File = rebar_dir:format_source_file_name(SrcFile, Opts),
Base = filename:basename(File),
Dir = filename:dirname(File),
Root = filename:rootname(Base),
Ext = filename:extension(Base),
- Path = re:replace(Dir, "^.*/_build/", "_build/", [{return, list}]),
- Base1 = fmt("~!_c~s~!!~!__~s", [Root, Ext]),
- F = fmt("~!__~s", [filename:join(Path, Base1)]),
+ Path = re:replace(Dir, "^.*/_build/", "_build/", [{return, list}, unicode]),
+ Base1 = fmt("~!_c~ts~!!~!__~ts", [Root, Ext]),
+ F = fmt("~!__~ts", [filename:join(Path, Base1)]),
String = message_to_string(Msg),
- {File, [lists:flatten(fmt("~n~s~n~!c~4w~!!: ~s", [F, Line, String])) | Acc]}
+ {SrcFile, [lists:flatten(fmt("~n~ts~n~!c~4w~!!: ~ts", [F, Line, String])) | Acc]}
catch
- Error:Reason ->
+ ?WITH_STACKTRACE(Error, Reason, Stacktrace)
?DEBUG("Failed to pretty format warning: ~p:~p~n~p",
- [Error, Reason, erlang:get_stacktrace()]),
- {File, [dialyzer:format_warning(Warning, fullpath) | Acc]}
+ [Error, Reason, Stacktrace]),
+ {SrcFile, [dialyzer:format_warning(Warning, fullpath) | Acc]}
end.
fmt(Fmt) ->
@@ -70,53 +72,53 @@ fmt(Fmt, Args) ->
%%----- Warnings for general discrepancies ----------------
message_to_string({apply, [Args, ArgNs, FailReason,
SigArgs, SigRet, Contract]}) ->
- fmt("~!^Fun application with arguments ~!!~s ",
+ fmt("~!^Fun application with arguments ~!!~ts ",
[bad_arg(ArgNs, Args)]) ++
call_or_apply_to_string(ArgNs, FailReason, SigArgs, SigRet, Contract);
message_to_string({app_call, [M, F, Args, Culprit, ExpectedType, FoundType]}) ->
- fmt("~!^The call~!! ~s:~s~s ~!^requires that"
- "~!! ~s ~!^is of type ~!g~s~!^ not ~!r~s",
+ fmt("~!^The call~!! ~ts:~ts~ts ~!^requires that"
+ "~!! ~ts ~!^is of type ~!g~ts~!^ not ~!r~ts",
[M, F, Args, Culprit, ExpectedType, FoundType]);
message_to_string({bin_construction, [Culprit, Size, Seg, Type]}) ->
- fmt("~!^Binary construction will fail since the ~!b~s~!^ field~!!"
- " ~s~!^ in segment~!! ~s~!^ has type~!! ~s",
+ fmt("~!^Binary construction will fail since the ~!b~ts~!^ field~!!"
+ " ~ts~!^ in segment~!! ~ts~!^ has type~!! ~ts",
[Culprit, Size, Seg, Type]);
message_to_string({call, [M, F, Args, ArgNs, FailReason,
SigArgs, SigRet, Contract]}) ->
- fmt("~!^The call~!! ~w:~w~s ", [M, F, bad_arg(ArgNs, Args)]) ++
+ fmt("~!^The call~!! ~w:~w~ts ", [M, F, bad_arg(ArgNs, Args)]) ++
call_or_apply_to_string(ArgNs, FailReason, SigArgs, SigRet, Contract);
message_to_string({call_to_missing, [M, F, A]}) ->
fmt("~!^Call to missing or unexported function ~!!~w:~w/~w",
[M, F, A]);
message_to_string({exact_eq, [Type1, Op, Type2]}) ->
- fmt("~!^The test ~!!~s ~s ~s~!^ can never evaluate to 'true'",
+ fmt("~!^The test ~!!~ts ~ts ~ts~!^ can never evaluate to 'true'",
[Type1, Op, Type2]);
message_to_string({fun_app_args, [Args, Type]}) ->
- fmt("~!^Fun application with arguments ~!!~s~!^ will fail"
- " since the function has type ~!!~s", [Args, Type]);
+ fmt("~!^Fun application with arguments ~!!~ts~!^ will fail"
+ " since the function has type ~!!~ts", [Args, Type]);
message_to_string({fun_app_no_fun, [Op, Type, Arity]}) ->
- fmt("~!^Fun application will fail since ~!!~s ~!^::~!! ~s"
+ fmt("~!^Fun application will fail since ~!!~ts ~!^::~!! ~ts"
" is not a function of arity ~!!~w", [Op, Type, Arity]);
message_to_string({guard_fail, []}) ->
"~!^Clause guard cannot succeed.~!!";
message_to_string({guard_fail, [Arg1, Infix, Arg2]}) ->
- fmt("~!^Guard test ~!!~s ~s ~s~!^ can never succeed",
+ fmt("~!^Guard test ~!!~ts ~ts ~ts~!^ can never succeed",
[Arg1, Infix, Arg2]);
message_to_string({neg_guard_fail, [Arg1, Infix, Arg2]}) ->
- fmt("~!^Guard test not(~!!~s ~s ~s~!^) can never succeed",
+ fmt("~!^Guard test not(~!!~ts ~ts ~ts~!^) can never succeed",
[Arg1, Infix, Arg2]);
message_to_string({guard_fail, [Guard, Args]}) ->
- fmt("~!^Guard test ~!!~w~s~!^ can never succeed",
+ fmt("~!^Guard test ~!!~w~ts~!^ can never succeed",
[Guard, Args]);
message_to_string({neg_guard_fail, [Guard, Args]}) ->
- fmt("~!^Guard test not(~!!~w~s~!^) can never succeed",
+ fmt("~!^Guard test not(~!!~w~ts~!^) can never succeed",
[Guard, Args]);
message_to_string({guard_fail_pat, [Pat, Type]}) ->
- fmt("~!^Clause guard cannot succeed. The ~!!~s~!^ was matched"
- " against the type ~!!~s", [Pat, Type]);
+ fmt("~!^Clause guard cannot succeed. The ~!!~ts~!^ was matched"
+ " against the type ~!!~ts", [Pat, Type]);
message_to_string({improper_list_constr, [TlType]}) ->
fmt("~!^Cons will produce an improper list"
- " since its ~!b2~!!nd~!^ argument is~!! ~s", [TlType]);
+ " since its ~!b2~!!nd~!^ argument is~!! ~ts", [TlType]);
message_to_string({no_return, [Type|Name]}) ->
NameString =
case Name of
@@ -124,59 +126,59 @@ message_to_string({no_return, [Type|Name]}) ->
[F, A] -> fmt("~!^Function ~!r~w/~w ", [F, A])
end,
case Type of
- no_match -> fmt("~s~!^has no clauses that will ever match",[NameString]);
- only_explicit -> fmt("~s~!^only terminates with explicit exception", [NameString]);
- only_normal -> fmt("~s~!^has no local return", [NameString]);
- both -> fmt("~s~!^has no local return", [NameString])
+ no_match -> fmt("~ts~!^has no clauses that will ever match",[NameString]);
+ only_explicit -> fmt("~ts~!^only terminates with explicit exception", [NameString]);
+ only_normal -> fmt("~ts~!^has no local return", [NameString]);
+ both -> fmt("~ts~!^has no local return", [NameString])
end;
message_to_string({record_constr, [RecConstr, FieldDiffs]}) ->
- fmt("~!^Record construction ~!!~s~!^ violates the"
- " declared type of field ~!!~s", [RecConstr, FieldDiffs]);
+ fmt("~!^Record construction ~!!~ts~!^ violates the"
+ " declared type of field ~!!~ts", [RecConstr, FieldDiffs]);
message_to_string({record_constr, [Name, Field, Type]}) ->
fmt("~!^Record construction violates the declared type for ~!!#~w{}~!^"
- " since ~!!~s~!^ cannot be of type ~!!~s",
+ " since ~!!~ts~!^ cannot be of type ~!!~ts",
[Name, Field, Type]);
message_to_string({record_matching, [String, Name]}) ->
- fmt("~!^The ~!!~s~!^ violates the"
+ fmt("~!^The ~!!~ts~!^ violates the"
" declared type for ~!!#~w{}", [String, Name]);
message_to_string({record_match, [Pat, Type]}) ->
- fmt("~!^Matching of ~!!~s~!^ tagged with a record name violates the"
- " declared type of ~!!~s", [Pat, Type]);
+ fmt("~!^Matching of ~!!~ts~!^ tagged with a record name violates the"
+ " declared type of ~!!~ts", [Pat, Type]);
message_to_string({pattern_match, [Pat, Type]}) ->
- fmt("~!^The ~s~!^ can never match the type ~!g~s",
+ fmt("~!^The ~ts~!^ can never match the type ~!g~ts",
[bad_pat(Pat), Type]);
message_to_string({pattern_match_cov, [Pat, Type]}) ->
- fmt("~!^The ~s~!^ can never match since previous"
- " clauses completely covered the type ~!g~s",
+ fmt("~!^The ~ts~!^ can never match since previous"
+ " clauses completely covered the type ~!g~ts",
[bad_pat(Pat), Type]);
message_to_string({unmatched_return, [Type]}) ->
- fmt("~!^Expression produces a value of type ~!!~s~!^,"
+ fmt("~!^Expression produces a value of type ~!!~ts~!^,"
" but this value is unmatched", [Type]);
message_to_string({unused_fun, [F, A]}) ->
fmt("~!^Function ~!r~w/~w~!!~!^ will never be called", [F, A]);
%%----- Warnings for specs and contracts -------------------
message_to_string({contract_diff, [M, F, _A, Contract, Sig]}) ->
- fmt("~!^Type specification ~!!~w:~w~s~!^"
- " is not equal to the success typing: ~!!~w:~w~s",
+ fmt("~!^Type specification ~!!~w:~w~ts~!^"
+ " is not equal to the success typing: ~!!~w:~w~ts",
[M, F, Contract, M, F, Sig]);
message_to_string({contract_subtype, [M, F, _A, Contract, Sig]}) ->
- fmt("~!^Type specification ~!!~w:~w~s~!^"
- " is a subtype of the success typing: ~!!~w:~w~s",
+ fmt("~!^Type specification ~!!~w:~w~ts~!^"
+ " is a subtype of the success typing: ~!!~w:~w~ts",
[M, F, Contract, M, F, Sig]);
message_to_string({contract_supertype, [M, F, _A, Contract, Sig]}) ->
- fmt("~!^Type specification ~!!~w:~w~s~!^"
- " is a supertype of the success typing: ~!!~w:~w~s",
+ fmt("~!^Type specification ~!!~w:~w~ts~!^"
+ " is a supertype of the success typing: ~!!~w:~w~ts",
[M, F, Contract, M, F, Sig]);
message_to_string({contract_range, [Contract, M, F, ArgStrings, Line, CRet]}) ->
- fmt("~!^The contract ~!!~w:~w~s~!^ cannot be right because the"
- " inferred return for ~!!~w~s~!^ on line ~!!~w~!^ is ~!!~s",
+ fmt("~!^The contract ~!!~w:~w~ts~!^ cannot be right because the"
+ " inferred return for ~!!~w~ts~!^ on line ~!!~w~!^ is ~!!~ts",
[M, F, Contract, F, ArgStrings, Line, CRet]);
message_to_string({invalid_contract, [M, F, A, Sig]}) ->
fmt("~!^Invalid type specification for function~!! ~w:~w/~w."
- "~!^ The success typing is~!! ~s", [M, F, A, Sig]);
+ "~!^ The success typing is~!! ~ts", [M, F, A, Sig]);
message_to_string({extra_range, [M, F, A, ExtraRanges, SigRange]}) ->
fmt("~!^The specification for ~!!~w:~w/~w~!^ states that the function"
- " might also return ~!!~s~!^ but the inferred return is ~!!~s",
+ " might also return ~!!~ts~!^ but the inferred return is ~!!~ts",
[M, F, A, ExtraRanges, SigRange]);
message_to_string({overlapping_contract, [M, F, A]}) ->
fmt("~!^Overloaded contract for ~!!~w:~w/~w~!^ has overlapping"
@@ -187,62 +189,62 @@ message_to_string({spec_missing_fun, [M, F, A]}) ->
[M, F, A]);
%%----- Warnings for opaque type violations -------------------
message_to_string({call_with_opaque, [M, F, Args, ArgNs, ExpArgs]}) ->
- fmt("~!^The call ~!!~w:~w~s~!^ contains ~!!~s~!^ when ~!!~s",
+ fmt("~!^The call ~!!~w:~w~ts~!^ contains ~!!~ts~!^ when ~!!~ts",
[M, F, bad_arg(ArgNs, Args), form_positions(ArgNs), form_expected(ExpArgs)]);
message_to_string({call_without_opaque, [M, F, Args, [{N,_,_}|_] = ExpectedTriples]}) ->
- fmt("~!^The call ~!!~w:~w~s ~!^does not have~!! ~s",
+ fmt("~!^The call ~!!~w:~w~ts ~!^does not have~!! ~ts",
[M, F, bad_arg(N, Args), form_expected_without_opaque(ExpectedTriples)]);
message_to_string({opaque_eq, [Type, _Op, OpaqueType]}) ->
- fmt("~!^Attempt to test for equality between a term of type ~!!~s~!^"
- " and a term of opaque type ~!!~s", [Type, OpaqueType]);
+ fmt("~!^Attempt to test for equality between a term of type ~!!~ts~!^"
+ " and a term of opaque type ~!!~ts", [Type, OpaqueType]);
message_to_string({opaque_guard, [Arg1, Infix, Arg2, ArgNs]}) ->
- fmt("~!^Guard test ~!!~s ~s ~s~!^ contains ~!!~s",
+ fmt("~!^Guard test ~!!~ts ~ts ~ts~!^ contains ~!!~ts",
[Arg1, Infix, Arg2, form_positions(ArgNs)]);
message_to_string({opaque_guard, [Guard, Args]}) ->
- fmt("~!^Guard test ~!!~w~s~!^ breaks the opaqueness of its"
+ fmt("~!^Guard test ~!!~w~ts~!^ breaks the opaqueness of its"
" argument", [Guard, Args]);
message_to_string({opaque_match, [Pat, OpaqueType, OpaqueTerm]}) ->
Term = if OpaqueType =:= OpaqueTerm -> "the term";
true -> OpaqueTerm
end,
- fmt("~!^The attempt to match a term of type ~!!~s~!^ against the"
- "~!! ~s~!^ breaks the opaqueness of ~!!~s",
+ fmt("~!^The attempt to match a term of type ~!!~ts~!^ against the"
+ "~!! ~ts~!^ breaks the opaqueness of ~!!~ts",
[OpaqueType, Pat, Term]);
message_to_string({opaque_neq, [Type, _Op, OpaqueType]}) ->
- fmt("~!^Attempt to test for inequality between a term of type ~!!~s"
- "~!^ and a term of opaque type ~!!~s", [Type, OpaqueType]);
+ fmt("~!^Attempt to test for inequality between a term of type ~!!~ts"
+ "~!^ and a term of opaque type ~!!~ts", [Type, OpaqueType]);
message_to_string({opaque_type_test, [Fun, Args, Arg, ArgType]}) ->
- fmt("~!^The type test ~!!~s~s~!^ breaks the opaqueness of the term "
- "~!!~s~s", [Fun, Args, Arg, ArgType]);
+ fmt("~!^The type test ~!!~ts~ts~!^ breaks the opaqueness of the term "
+ "~!!~ts~ts", [Fun, Args, Arg, ArgType]);
message_to_string({opaque_size, [SizeType, Size]}) ->
- fmt("~!^The size ~!!~s~!^ breaks the opaqueness of ~!!~s",
+ fmt("~!^The size ~!!~ts~!^ breaks the opaqueness of ~!!~ts",
[SizeType, Size]);
message_to_string({opaque_call, [M, F, Args, Culprit, OpaqueType]}) ->
- fmt("~!^The call ~!!~s:~s~s~!^ breaks the opaqueness of the term~!!"
- " ~s :: ~s", [M, F, Args, Culprit, OpaqueType]);
+ fmt("~!^The call ~!!~ts:~ts~ts~!^ breaks the opaqueness of the term~!!"
+ " ~ts :: ~ts", [M, F, Args, Culprit, OpaqueType]);
%%----- Warnings for concurrency errors --------------------
message_to_string({race_condition, [M, F, Args, Reason]}) ->
- fmt("~!^The call ~!!~w:~w~s ~s", [M, F, Args, Reason]);
+ fmt("~!^The call ~!!~w:~w~ts ~ts", [M, F, Args, Reason]);
%%----- Warnings for behaviour errors --------------------
message_to_string({callback_type_mismatch, [B, F, A, ST, CT]}) ->
- fmt("~!^The inferred return type of~!! ~w/~w (~s) ~!^"
- "has nothing in common with~!! ~s, ~!^which is the expected"
+ fmt("~!^The inferred return type of~!! ~w/~w (~ts) ~!^"
+ "has nothing in common with~!! ~ts, ~!^which is the expected"
" return type for the callback of~!! ~w ~!^behaviour",
[F, A, ST, CT, B]);
message_to_string({callback_arg_type_mismatch, [B, F, A, N, ST, CT]}) ->
- fmt("~!^The inferred type for the~!! ~s ~!^argument of~!!"
- " ~w/~w (~s) ~!^is not a supertype of~!! ~s~!^, which is"
+ fmt("~!^The inferred type for the~!! ~ts ~!^argument of~!!"
+ " ~w/~w (~ts) ~!^is not a supertype of~!! ~ts~!^, which is"
"expected type for this argument in the callback of the~!! ~w "
"~!^behaviour",
[ordinal(N), F, A, ST, CT, B]);
message_to_string({callback_spec_type_mismatch, [B, F, A, ST, CT]}) ->
- fmt("~!^The return type ~!!~s~!^ in the specification of ~!!"
- "~w/~w~!^ is not a subtype of ~!!~s~!^, which is the expected"
+ fmt("~!^The return type ~!!~ts~!^ in the specification of ~!!"
+ "~w/~w~!^ is not a subtype of ~!!~ts~!^, which is the expected"
" return type for the callback of ~!!~w~!^ behaviour",
[ST, F, A, CT, B]);
message_to_string({callback_spec_arg_type_mismatch, [B, F, A, N, ST, CT]}) ->
- fmt("~!^The specified type for the ~!!~s~!^ argument of ~!!"
- "~w/~w (~s)~!^ is not a supertype of ~!!~s~!^, which is"
+ fmt("~!^The specified type for the ~!!~ts~!^ argument of ~!!"
+ "~w/~w (~ts)~!^ is not a supertype of ~!!~ts~!^, which is"
" expected type for this argument in the callback of the ~!!~w"
"~!^ behaviour", [ordinal(N), F, A, ST, CT, B]);
message_to_string({callback_missing, [B, F, A]}) ->
@@ -272,26 +274,26 @@ call_or_apply_to_string(ArgNs, FailReason, SigArgs, SigRet,
true ->
%% We do not know which argument(s) caused the failure
fmt("~!^will never return since the success typing arguments"
- " are ~!!~s", [SigArgs]);
+ " are ~!!~ts", [SigArgs]);
false ->
fmt("~!^will never return since it differs in the~!!"
- " ~s ~!^argument from the success typing"
- " arguments:~!! ~s",
+ " ~ts ~!^argument from the success typing"
+ " arguments:~!! ~ts",
[PositionString, good_arg(ArgNs, SigArgs)])
end;
only_contract ->
case (ArgNs =:= []) orelse IsOverloaded of
true ->
%% We do not know which arguments caused the failure
- fmt("~!^breaks the contract~!! ~s", [good_arg(ArgNs, Contract)]);
+ fmt("~!^breaks the contract~!! ~ts", [good_arg(ArgNs, Contract)]);
false ->
- fmt("~!^breaks the contract~!! ~s ~!^in the~!!"
- " ~s ~!^argument",
+ fmt("~!^breaks the contract~!! ~ts ~!^in the~!!"
+ " ~ts ~!^argument",
[good_arg(ArgNs, Contract), PositionString])
end;
both ->
fmt("~!^will never return since the success typing is "
- "~!!~s ~!^->~!! ~s ~!^and the contract is ~!!~s",
+ "~!!~ts ~!^->~!! ~ts ~!^and the contract is ~!!~ts",
[good_arg(ArgNs, SigArgs), SigRet,
good_arg(ArgNs, Contract)])
end.
@@ -299,8 +301,8 @@ call_or_apply_to_string(ArgNs, FailReason, SigArgs, SigRet,
form_positions(ArgNs) ->
ArgS = form_position_string(ArgNs),
case ArgNs of
- [_] -> fmt("~!^an opaque term as ~!!~s~!^ argument", [ArgS]);
- [_,_|_] -> fmt("~!^opaque terms as ~!!~s~!^ arguments", [ArgS])
+ [_] -> fmt("~!^an opaque term as ~!!~ts~!^ argument", [ArgS]);
+ [_,_|_] -> fmt("~!^opaque terms as ~!!~ts~!^ arguments", [ArgS])
end.
%% We know which positions N are to blame;
@@ -308,9 +310,9 @@ form_positions(ArgNs) ->
form_expected_without_opaque([{N, T, TStr}]) ->
FStr = case erl_types:t_is_opaque(T) of
true ->
- "~!^an opaque term of type~!g ~s ~!^as ";
+ "~!^an opaque term of type~!g ~ts ~!^as ";
false ->
- "~!^a term of type ~!g~s ~!^(with opaque subterms) as "
+ "~!^a term of type ~!g~ts ~!^(with opaque subterms) as "
end ++ form_position_string([N]) ++ "~!^ argument",
fmt(FStr, [TStr]);
@@ -323,9 +325,9 @@ form_expected(ExpectedArgs) ->
[T] ->
TS = erl_types:t_to_string(T),
case erl_types:t_is_opaque(T) of
- true -> fmt("~!^an opaque term of type ~!!~s~!^ is"
+ true -> fmt("~!^an opaque term of type ~!!~ts~!^ is"
" expected", [TS]);
- false -> fmt("~!^a structured term of type ~!!~s~!^ is"
+ false -> fmt("~!^a structured term of type ~!!~ts~!^ is"
" expected", [TS])
end;
[_,_|_] -> fmt("~!^terms of different types are expected in these"
@@ -338,7 +340,7 @@ form_position_string(ArgNs) ->
[N1] -> ordinal(N1);
[_,_|_] ->
[Last|Prevs] = lists:reverse(ArgNs),
- ", " ++ Head = lists:flatten([fmt(", ~s",[ordinal(N)]) ||
+ ", " ++ Head = lists:flatten([fmt(", ~ts",[ordinal(N)]) ||
N <- lists:reverse(Prevs)]),
Head ++ " and " ++ ordinal(Last)
end.
@@ -350,11 +352,11 @@ ordinal(N) when is_integer(N) -> fmt("~!B~w~!!th", [N]).
%% Format a pattern ad highlight errorous part in red.
bad_pat("pattern " ++ P) ->
- fmt("pattern ~!r~s",[P]);
+ fmt("pattern ~!r~ts",[P]);
bad_pat("variable " ++ P) ->
- fmt("variable ~!r~s",[P]);
+ fmt("variable ~!r~ts",[P]);
bad_pat(P) ->
- fmt("~!r~s",[P]).
+ fmt("~!r~ts",[P]).
bad_arg(N, Args) ->
@@ -368,7 +370,7 @@ good_arg(N, Args) ->
colour_arg(N, C, Args) when is_integer(N) ->
colour_arg([N], C, Args);
colour_arg(Ns, C, Args) ->
- {Args1, Rest} =seperate_args(Args),
+ {Args1, Rest} =separate_args(Args),
Args2 = highlight(Ns, 1, C, Args1),
join_args(Args2) ++ Rest.
@@ -376,53 +378,53 @@ highlight([], _N, _C, Rest) ->
Rest;
highlight([N | Nr], N, g, [Arg | Rest]) ->
- [fmt("~!g~s", [Arg]) | highlight(Nr, N+1, g, Rest)];
+ [fmt("~!g~ts", [Arg]) | highlight(Nr, N+1, g, Rest)];
highlight([N | Nr], N, r, [Arg | Rest]) ->
- [fmt("~!r~s", [Arg]) | highlight(Nr, N+1, r, Rest)];
+ [fmt("~!r~ts", [Arg]) | highlight(Nr, N+1, r, Rest)];
highlight(Ns, N, C, [Arg | Rest]) ->
[Arg | highlight(Ns, N + 1, C, Rest)].
%% Arugments to functions and constraints are passed as
%% strings not as data, this function pulls them apart
-%% to allow interacting with them seperately and not
+%% to allow interacting with them separately and not
%% as one bug chunk of data.
-seperate_args([$( | S]) ->
- seperate_args([], S, "", []).
+separate_args([$( | S]) ->
+ separate_args([], S, "", []).
%% We strip this space since dialyzer is inconsistant in adding or not adding
%% it ....
-seperate_args([], [$,, $\s | R], Arg, Args) ->
- seperate_args([], R, [], [lists:reverse(Arg) | Args]);
+separate_args([], [$,, $\s | R], Arg, Args) ->
+ separate_args([], R, [], [lists:reverse(Arg) | Args]);
-seperate_args([], [$, | R], Arg, Args) ->
- seperate_args([], R, [], [lists:reverse(Arg) | Args]);
+separate_args([], [$, | R], Arg, Args) ->
+ separate_args([], R, [], [lists:reverse(Arg) | Args]);
-seperate_args([], [$) | Rest], Arg, Args) ->
+separate_args([], [$) | Rest], Arg, Args) ->
{lists:reverse([lists:reverse(Arg) | Args]), Rest};
-seperate_args([C | D], [C | R], Arg, Args) ->
- seperate_args(D, R, [C | Arg], Args);
+separate_args([C | D], [C | R], Arg, Args) ->
+ separate_args(D, R, [C | Arg], Args);
%% Brackets
-seperate_args(D, [${ | R], Arg, Args) ->
- seperate_args([$}|D], R, [${ | Arg], Args);
+separate_args(D, [${ | R], Arg, Args) ->
+ separate_args([$}|D], R, [${ | Arg], Args);
-seperate_args(D, [$( | R], Arg, Args) ->
- seperate_args([$)|D], R, [$( | Arg], Args);
+separate_args(D, [$( | R], Arg, Args) ->
+ separate_args([$)|D], R, [$( | Arg], Args);
-seperate_args(D, [$[ | R], Arg, Args) ->
- seperate_args([$]|D], R, [$[ | Arg], Args);
+separate_args(D, [$[ | R], Arg, Args) ->
+ separate_args([$]|D], R, [$[ | Arg], Args);
-seperate_args(D, [$< | R], Arg, Args) ->
- seperate_args([$>|D], R, [$< | Arg], Args);
+separate_args(D, [$< | R], Arg, Args) ->
+ separate_args([$>|D], R, [$< | Arg], Args);
%% 'strings'
-seperate_args(D, [$' | R], Arg, Args) ->
- seperate_args([$'|D], R, [$' | Arg], Args);
-seperate_args(D, [$" | R], Arg, Args) ->
- seperate_args([$"|D], R, [$" | Arg], Args);
+separate_args(D, [$' | R], Arg, Args) ->
+ separate_args([$'|D], R, [$' | Arg], Args);
+separate_args(D, [$" | R], Arg, Args) ->
+ separate_args([$"|D], R, [$" | Arg], Args);
-seperate_args(D, [C | R], Arg, Args) ->
- seperate_args(D, R, [C | Arg], Args).
+separate_args(D, [C | R], Arg, Args) ->
+ separate_args(D, R, [C | Arg], Args).
join_args(Args) ->
- [$(, string:join(Args, ", "), $)].
+ [$(, rebar_string:join(Args, ", "), $)].
diff --git a/src/rebar_digraph.erl b/src/rebar_digraph.erl
index 363253a..776d7b8 100644
--- a/src/rebar_digraph.erl
+++ b/src/rebar_digraph.erl
@@ -1,3 +1,5 @@
+%%% @doc build a digraph of applications in order to figure out dependency
+%%% and compile order.
-module(rebar_digraph).
-export([compile_order/1
@@ -7,7 +9,9 @@
-include("rebar.hrl").
-%% Sort apps with topological sort to get proper build order
+%% @doc Sort apps with topological sort to get proper build order
+-spec compile_order([rebar_app_info:t()]) ->
+ {ok, [rebar_app_info:t()]} | {error, no_sort | {cycles, [[binary(),...]]}}.
compile_order(Apps) ->
Graph = digraph:new(),
lists:foreach(fun(App) ->
@@ -33,6 +37,11 @@ compile_order(Apps) ->
true = digraph:delete(Graph),
Order.
+%% @private Add a package and its dependencies to an existing digraph
+-spec add(digraph:graph(), {PkgName, [Dep]}) -> ok when
+ PkgName :: binary(),
+ Dep :: {Name, term()} | Name,
+ Name :: atom() | iodata().
add(Graph, {PkgName, Deps}) ->
case digraph:vertex(Graph, PkgName) of
false ->
@@ -44,9 +53,9 @@ add(Graph, {PkgName, Deps}) ->
lists:foreach(fun(DepName) ->
Name1 = case DepName of
{Name, _Vsn} ->
- ec_cnv:to_binary(Name);
+ rebar_utils:to_binary(Name);
Name ->
- ec_cnv:to_binary(Name)
+ rebar_utils:to_binary(Name)
end,
V3 = case digraph:vertex(Graph, Name1) of
false ->
@@ -57,6 +66,8 @@ add(Graph, {PkgName, Deps}) ->
digraph:add_edge(Graph, V, V3)
end, Deps).
+%% @doc based on a list of vertices and edges, build a digraph.
+-spec restore_graph({[digraph:vertex()], [digraph:edge()]}) -> digraph:graph().
restore_graph({Vs, Es}) ->
Graph = digraph:new(),
lists:foreach(fun({V, LastUpdated}) ->
@@ -67,6 +78,8 @@ restore_graph({Vs, Es}) ->
end, Es),
Graph.
+%% @doc convert a given exception's payload into an io description.
+-spec format_error(any()) -> iolist().
format_error(no_solution) ->
io_lib:format("No solution for packages found.", []).
@@ -74,22 +87,27 @@ format_error(no_solution) ->
%% Internal Functions
%%====================================================================
+%% @doc alias for `digraph_utils:subgraph/2'.
subgraph(Graph, Vertices) ->
digraph_utils:subgraph(Graph, Vertices).
+%% @private from a list of app names, fetch the proper app info records
+%% for them.
-spec names_to_apps([atom()], [rebar_app_info:t()]) -> [rebar_app_info:t()].
names_to_apps(Names, Apps) ->
[element(2, App) || App <- [find_app_by_name(Name, Apps) || Name <- Names], App =/= error].
+%% @private fetch the proper app info record for a given app name.
-spec find_app_by_name(atom(), [rebar_app_info:t()]) -> {ok, rebar_app_info:t()} | error.
find_app_by_name(Name, Apps) ->
ec_lists:find(fun(App) ->
rebar_app_info:name(App) =:= Name
end, Apps).
-%% The union of all entries in the applications list for an app and
+%% @private The union of all entries in the applications list for an app and
%% the deps listed in its rebar.config is all deps that may be needed
%% for building the app.
+-spec all_apps_deps(rebar_app_info:t()) -> [binary()].
all_apps_deps(App) ->
Applications = lists:usort([atom_to_binary(X, utf8) || X <- rebar_app_info:applications(App)]),
Deps = lists:usort(lists:map(fun({Name, _}) -> Name; (Name) -> Name end, rebar_app_info:deps(App))),
diff --git a/src/rebar_dir.erl b/src/rebar_dir.erl
index 1ec58d4..17bc48e 100644
--- a/src/rebar_dir.erl
+++ b/src/rebar_dir.erl
@@ -1,3 +1,4 @@
+%%% @doc utility functions for directory and path handling of all kind.
-module(rebar_dir).
-export([base_dir/1,
@@ -22,19 +23,25 @@
processing_base_dir/2,
make_relative_path/2,
src_dirs/1, src_dirs/2,
+ src_dir_opts/2, recursive/2,
extra_src_dirs/1, extra_src_dirs/2,
all_src_dirs/1, all_src_dirs/3,
- retarget_path/2]).
+ retarget_path/2,
+ format_source_file_name/2]).
-include("rebar.hrl").
+%% @doc returns the directory root for build artifacts
+%% for the current profile, such as `_build/default/'.
-spec base_dir(rebar_state:t()) -> file:filename_all().
base_dir(State) ->
profile_dir(rebar_state:opts(State), rebar_state:current_profiles(State)).
+%% @doc returns the directory root for build artifacts for a given set
+%% of profiles.
-spec profile_dir(rebar_dict(), [atom()]) -> file:filename_all().
profile_dir(Opts, Profiles) ->
- {BaseDir, ProfilesStrings} = case [ec_cnv:to_list(P) || P <- Profiles] of
+ {BaseDir, ProfilesStrings} = case [rebar_utils:to_list(P) || P <- Profiles] of
["global" | _] -> {?MODULE:global_cache_dir(Opts), [""]};
["bootstrap", "default"] -> {rebar_opts:get(Opts, base_dir, ?DEFAULT_BASE_DIR), ["default"]};
["default"] -> {rebar_opts:get(Opts, base_dir, ?DEFAULT_BASE_DIR), ["default"]};
@@ -42,28 +49,39 @@ profile_dir(Opts, Profiles) ->
%% of profiles to match order passed to `as`
["default"|Rest] -> {rebar_opts:get(Opts, base_dir, ?DEFAULT_BASE_DIR), Rest}
end,
- ProfilesDir = string:join(ProfilesStrings, "+"),
+ ProfilesDir = rebar_string:join(ProfilesStrings, "+"),
filename:join(BaseDir, ProfilesDir).
+%% @doc returns the directory where dependencies should be placed
+%% given the current profile.
-spec deps_dir(rebar_state:t()) -> file:filename_all().
deps_dir(State) ->
filename:join(base_dir(State), rebar_state:get(State, deps_dir, ?DEFAULT_DEPS_DIR)).
+%% @doc returns the directory where a dependency should be placed
+%% given the current profile, based on its app name. Expects to be passed
+%% the result of `deps_dir/1' as a first argument.
-spec deps_dir(file:filename_all(), file:filename_all()) -> file:filename_all().
deps_dir(DepsDir, App) ->
filename:join(DepsDir, App).
+%% @doc returns the absolute path for the project root (by default,
+%% the current working directory for the currently running escript).
root_dir(State) ->
filename:absname(rebar_state:get(State, root_dir, ?DEFAULT_ROOT_DIR)).
+%% @doc returns the expected location of the `_checkouts' directory.
-spec checkouts_dir(rebar_state:t()) -> file:filename_all().
checkouts_dir(State) ->
filename:join(root_dir(State), rebar_state:get(State, checkouts_dir, ?DEFAULT_CHECKOUTS_DIR)).
+%% @doc returns the expected location of a given app in the checkouts
+%% directory for the project.
-spec checkouts_dir(rebar_state:t(), file:filename_all()) -> file:filename_all().
checkouts_dir(State, App) ->
filename:join(checkouts_dir(State), App).
+%% @doc Returns the directory where plugins are located.
-spec plugins_dir(rebar_state:t()) -> file:filename_all().
plugins_dir(State) ->
case lists:member(global, rebar_state:current_profiles(State)) of
@@ -73,33 +91,50 @@ plugins_dir(State) ->
filename:join(base_dir(State), rebar_state:get(State, plugins_dir, ?DEFAULT_PLUGINS_DIR))
end.
+%% @doc returns the list of relative path where the project applications can
+%% be located.
-spec lib_dirs(rebar_state:t()) -> file:filename_all().
lib_dirs(State) ->
rebar_state:get(State, project_app_dirs, ?DEFAULT_PROJECT_APP_DIRS).
+%% @doc returns the user's home directory.
+-spec home_dir() -> file:filename_all().
home_dir() ->
{ok, [[Home]]} = init:get_argument(home),
Home.
+%% @doc returns the directory where the global configuration files for rebar3
+%% may be stored.
+-spec global_config_dir(rebar_state:t()) -> file:filename_all().
global_config_dir(State) ->
Home = home_dir(),
rebar_state:get(State, global_rebar_dir, filename:join([Home, ".config", "rebar3"])).
+%% @doc returns the path of the global rebar.config file
+-spec global_config(rebar_state:t()) -> file:filename_all().
global_config(State) ->
filename:join(global_config_dir(State), "rebar.config").
+%% @doc returns the default path of the global rebar.config file
+-spec global_config() -> file:filename_all().
global_config() ->
Home = home_dir(),
filename:join([Home, ".config", "rebar3", "rebar.config"]).
+%% @doc returns the location for the global cache directory
-spec global_cache_dir(rebar_dict()) -> file:filename_all().
global_cache_dir(Opts) ->
Home = home_dir(),
rebar_opts:get(Opts, global_rebar_dir, filename:join([Home, ".cache", "rebar3"])).
+%% @doc appends the cache directory to the path passed to this function.
+-spec local_cache_dir(file:filename_all()) -> file:filename_all().
local_cache_dir(Dir) ->
filename:join(Dir, ".rebar3").
+%% @doc returns the current working directory, with some specific
+%% conversions and handling done to be cross-platform compatible.
+-spec get_cwd() -> file:filename_all().
get_cwd() ->
{ok, Dir} = file:get_cwd(),
%% On windows cwd may return capital letter for drive,
@@ -108,20 +143,33 @@ get_cwd() ->
%% cwd as soon as it possible.
filename:join([Dir]).
+%% @doc returns the file location for the global template
+%% configuration variables file.
+-spec template_globals(rebar_state:t()) -> file:filename_all().
template_globals(State) ->
filename:join([global_config_dir(State), "templates", "globals"]).
+%% @doc returns the location for the global template directory
+-spec template_dir(rebar_state:t()) -> file:filename_all().
template_dir(State) ->
filename:join([global_config_dir(State), "templates"]).
+%% @doc checks if the current working directory is the base directory
+%% for the project.
+-spec processing_base_dir(rebar_state:t()) -> boolean().
processing_base_dir(State) ->
Cwd = get_cwd(),
processing_base_dir(State, Cwd).
+%% @doc checks if the passed in directory is the base directory for
+%% the project.
+-spec processing_base_dir(rebar_state:t(), file:filename()) -> boolean().
processing_base_dir(State, Dir) ->
AbsDir = filename:absname(Dir),
AbsDir =:= rebar_state:get(State, base_dir).
+%% @doc make a path absolute
+-spec make_absolute_path(file:filename()) -> file:filename().
make_absolute_path(Path) ->
case filename:pathtype(Path) of
absolute ->
@@ -135,73 +183,151 @@ make_absolute_path(Path) ->
filename:join([Dir, Path])
end.
+%% @doc normalizing a path removes all of the `..' and the
+%% `.' segments it may contain.
+-spec make_normalized_path(file:filename()) -> file:filename().
make_normalized_path(Path) ->
AbsPath = make_absolute_path(Path),
Components = filename:split(AbsPath),
make_normalized_path(Components, []).
+%% @private drops path fragments for normalization
+-spec make_normalized_path([string()], [string()]) -> file:filename().
make_normalized_path([], NormalizedPath) ->
filename:join(lists:reverse(NormalizedPath));
make_normalized_path([H|T], NormalizedPath) ->
case H of
+ "." when NormalizedPath == [], T == [] -> make_normalized_path(T, ["."]);
"." -> make_normalized_path(T, NormalizedPath);
- ".." -> make_normalized_path(T, tl(NormalizedPath));
+ ".." when NormalizedPath == [] -> make_normalized_path(T, [".."]);
+ ".." when hd(NormalizedPath) =/= ".." -> make_normalized_path(T, tl(NormalizedPath));
_ -> make_normalized_path(T, [H|NormalizedPath])
end.
+%% @doc take a source and a target path, and relativize the target path
+%% onto the source.
+%%
+%% Example:
+%% ```
+%% 1> rebar_dir:make_relative_path("a/b/c/d/file", "a/b/file").
+%% "c/d/file"
+%% 2> rebar_dir:make_relative_path("a/b/file", "a/b/c/d/file").
+%% "../../file"
+%% '''
+-spec make_relative_path(file:filename(), file:filename()) -> file:filename().
make_relative_path(Source, Target) ->
AbsSource = make_normalized_path(Source),
AbsTarget = make_normalized_path(Target),
do_make_relative_path(filename:split(AbsSource), filename:split(AbsTarget)).
+%% @private based on fragments of paths, replace the number of common
+%% segments by `../' bits, and add the rest of the source alone after it
+-spec do_make_relative_path([string()], [string()]) -> file:filename().
do_make_relative_path([H|T1], [H|T2]) ->
do_make_relative_path(T1, T2);
do_make_relative_path(Source, Target) ->
Base = lists:duplicate(max(length(Target) - 1, 0), ".."),
filename:join(Base ++ Source).
+%%% @doc
+%%% `src_dirs' and `extra_src_dirs' can be configured with options
+%%% like this:
+%%% ```
+%%% {src_dirs,[{"foo",[{recursive,false}]}]}
+%%% {extra_src_dirs,[{"bar",[recursive]}]} (equivalent to {recursive,true})
+%%% '''
+%%% `src_dirs/1,2' and `extra_src_dirs/1,2' return only the list of
+%%% directories for the `src_dirs' and `extra_src_dirs' options
+%%% respectively, while `src_dirs_opts/2' returns the options list for
+%%% the given directory, no matter if it is configured as `src_dirs' or
+%%% `extra_src_dirs'.
-spec src_dirs(rebar_dict()) -> list(file:filename_all()).
src_dirs(Opts) -> src_dirs(Opts, []).
+%% @doc same as `src_dirs/1', but allows to pass in a list of default options.
-spec src_dirs(rebar_dict(), list(file:filename_all())) -> list(file:filename_all()).
src_dirs(Opts, Default) ->
- ErlOpts = rebar_opts:erl_opts(Opts),
- Vs = proplists:get_all_values(src_dirs, ErlOpts),
- case lists:append([rebar_opts:get(Opts, src_dirs, []) | Vs]) of
- [] -> Default;
- Dirs -> lists:usort(Dirs)
- end.
+ src_dirs(src_dirs, Opts, Default).
+%% @doc same as `src_dirs/1', but for the `extra_src_dirs' options
-spec extra_src_dirs(rebar_dict()) -> list(file:filename_all()).
extra_src_dirs(Opts) -> extra_src_dirs(Opts, []).
+%% @doc same as `src_dirs/2', but for the `extra_src_dirs' options
-spec extra_src_dirs(rebar_dict(), list(file:filename_all())) -> list(file:filename_all()).
extra_src_dirs(Opts, Default) ->
+ src_dirs(extra_src_dirs, Opts, Default).
+
+%% @private agnostic version of src_dirs and extra_src_dirs.
+src_dirs(Type, Opts, Default) ->
+ lists:usort([
+ case D0 of
+ {D,_} -> normalize_relative_path(D);
+ _ -> normalize_relative_path(D0)
+ end || D0 <- raw_src_dirs(Type,Opts,Default)]).
+
+%% @private extracts the un-formatted src_dirs or extra_src_dirs
+%% options as configured.
+raw_src_dirs(Type, Opts, Default) ->
ErlOpts = rebar_opts:erl_opts(Opts),
- Vs = proplists:get_all_values(extra_src_dirs, ErlOpts),
- case lists:append([rebar_opts:get(Opts, extra_src_dirs, []) | Vs]) of
+ Vs = proplists:get_all_values(Type, ErlOpts),
+ case lists:append([rebar_opts:get(Opts, Type, []) | Vs]) of
[] -> Default;
- Dirs -> lists:usort(Dirs)
+ Dirs -> Dirs
end.
+%% @private normalizes relative paths so that ./a/b/c/ => a/b/c
+normalize_relative_path(Path) ->
+ make_normalized_path(filename:split(Path), []).
+
+%% @doc returns all the source directories (`src_dirs' and
+%% `extra_src_dirs').
-spec all_src_dirs(rebar_dict()) -> list(file:filename_all()).
all_src_dirs(Opts) -> all_src_dirs(Opts, [], []).
+%% @doc returns all the source directories (`src_dirs' and
+%% `extra_src_dirs') while being able to configure defaults for both.
-spec all_src_dirs(rebar_dict(), list(file:filename_all()), list(file:filename_all())) ->
list(file:filename_all()).
all_src_dirs(Opts, SrcDefault, ExtraDefault) ->
lists:usort(src_dirs(Opts, SrcDefault) ++ extra_src_dirs(Opts, ExtraDefault)).
-%% given a path if that path is an ancestor of an app dir return the path relative to that
-%% apps outdir. if the path is not an ancestor to any app dirs but is an ancestor of the
-%% project root return the path relative to the project base_dir. if it is not an ancestor
+%%% @doc
+%%% Return the list of options for the given src directory
+%%% If the same option is given multiple times for a directory in the
+%%% config, the priority order is: first occurence of `src_dirs'
+%%% followed by first occurence of `extra_src_dirs'.
+-spec src_dir_opts(rebar_dict(), file:filename_all()) -> [{atom(),term()}].
+src_dir_opts(Opts, Dir) ->
+ RawSrcDirs = raw_src_dirs(src_dirs, Opts, []),
+ RawExtraSrcDirs = raw_src_dirs(extra_src_dirs, Opts, []),
+ AllOpts = [Opt || {D, Opt} <- RawSrcDirs++RawExtraSrcDirs, D==Dir],
+ lists:ukeysort(1, proplists:unfold(lists:append(AllOpts))).
+
+%%% @doc
+%%% Return the value of the 'recursive' option for the given directory.
+%%% If not given, the value of 'recursive' in the 'erlc_compiler'
+%%% options is used, and finally the default is 'true'.
+-spec recursive(rebar_dict(), file:filename_all()) -> boolean().
+recursive(Opts, Dir) ->
+ DirOpts = src_dir_opts(Opts, Dir),
+ Default = proplists:get_value(recursive,
+ rebar_opts:get(Opts, erlc_compiler, []),
+ true),
+ R = proplists:get_value(recursive, DirOpts, Default),
+ R.
+
+%% @doc given a path if that path is an ancestor of an app dir, return the path relative to that
+%% apps outdir. If the path is not an ancestor to any app dirs but is an ancestor of the
+%% project root, return the path relative to the project base_dir. If it is not an ancestor
%% of either return it unmodified
-spec retarget_path(rebar_state:t(), string()) -> string().
-
retarget_path(State, Path) ->
ProjectApps = rebar_state:project_apps(State),
retarget_path(State, Path, ProjectApps).
+%% @private worker for retarget_path/2
+%% @end
%% not relative to any apps in project, check to see it's relative to
%% project root
retarget_path(State, Path, []) ->
@@ -217,3 +343,39 @@ retarget_path(State, Path, [App|Rest]) ->
{ok, NewPath} -> filename:join([rebar_app_info:out_dir(App), NewPath]);
{error, badparent} -> retarget_path(State, Path, Rest)
end.
+
+format_source_file_name(Path, Opts) ->
+ Type = case rebar_opts:get(Opts, compiler_source_format,
+ ?DEFAULT_COMPILER_SOURCE_FORMAT) of
+ V when V == absolute; V == relative; V == build ->
+ V;
+ Other ->
+ warn_source_format_once(Other)
+ end,
+ case Type of
+ absolute -> resolve_linked_source(Path);
+ build -> Path;
+ relative ->
+ Cwd = rebar_dir:get_cwd(),
+ rebar_dir:make_relative_path(resolve_linked_source(Path), Cwd)
+ end.
+
+%% @private displays a warning for the compiler source format option
+%% only once
+-spec warn_source_format_once(term()) -> ok.
+warn_source_format_once(Format) ->
+ Warn = application:get_env(rebar, warn_source_format) =/= {ok, false},
+ application:set_env(rebar, warn_source_format, false),
+ case Warn of
+ false ->
+ ok;
+ true ->
+ ?WARN("Invalid argument ~p for compiler_source_format - "
+ "assuming ~ts~n", [Format, ?DEFAULT_COMPILER_SOURCE_FORMAT])
+ end.
+
+%% @private takes a filename and canonicalizes its path if it is a link.
+-spec resolve_linked_source(file:filename()) -> file:filename().
+resolve_linked_source(Src) ->
+ {Dir, Base} = rebar_file_utils:split_dirname(Src),
+ filename:join(rebar_file_utils:resolve_link(Dir), Base).
diff --git a/src/rebar_dist_utils.erl b/src/rebar_dist_utils.erl
index f462826..5de858e 100644
--- a/src/rebar_dist_utils.erl
+++ b/src/rebar_dist_utils.erl
@@ -7,6 +7,9 @@
%%%%%%%%%%%%%%%%%%
%%% PUBLIC API %%%
%%%%%%%%%%%%%%%%%%
+
+%% @doc allows to pick whether to use a short or long name, and
+%% starts the distributed mode for it.
-spec either(Name::atom(), SName::atom(), Opts::[{setcookie,term()}]) -> atom().
either(undefined, undefined, _) ->
'nonode@nohost';
@@ -19,13 +22,19 @@ either(undefined, SName, Opts) ->
either(_, _, _) ->
?ABORT("Cannot have both short and long node names defined", []).
+%% @doc starts a node with a short name.
+-spec short(SName::atom(), Opts::[{setcookie,term()}]) -> term().
short(Name, Opts) ->
start(Name, shortnames, Opts).
+%% @doc starts a node with a long name.
+-spec long(Name::atom(), Opts::[{setcookie,term()}]) -> term().
long(Name, Opts) ->
start(Name, longnames, Opts).
--spec find_options(rebar_state:state()) -> {Long, Short, Opts} when
+%% @doc utility function to extract all distribution options
+%% from a rebar3 state tuple.
+-spec find_options(rebar_state:t()) -> {Long, Short, Opts} when
Long :: atom(),
Short :: atom(),
Opts :: [{setcookie,term()}].
@@ -42,14 +51,27 @@ find_options(State) ->
%%% PRIVATE %%%
%%%%%%%%%%%%%%%
start(Name, Type, Opts) ->
- check_epmd(net_kernel:start([Name, Type])),
+ case dist_up(net_kernel:start([Name, Type])) of
+ false ->
+ start_epmd(),
+ dist_up(net_kernel:start([Name, Type])) orelse warn_dist();
+ true ->
+ ok
+ end,
setup_cookie(Opts).
-check_epmd({error,{{shutdown, {_,net_kernel,{'EXIT',nodistribution}}},_}}) ->
- ?ERROR("Erlang Distribution failed, falling back to nonode@nohost. "
- "Verify that epmd is running and try again.",[]);
-check_epmd(_) ->
- ok.
+dist_up({error,{{shutdown,{_,net_kernel,{'EXIT',nodistribution}}},_}}) -> false;
+dist_up(_) -> true.
+
+start_epmd() ->
+ %% Indirectly boot EPMD through calling Erlang so that we don't risk
+ %% attaching it to the current proc
+ ?CONSOLE("Attempting to start epmd...", []),
+ os:cmd("erl -sname a -eval 'halt(0).'").
+
+warn_dist() ->
+ ?ERROR("Erlang Distribution failed, falling back to nonode@nohost.", []).
+
setup_cookie(Opts) ->
case {node(), proplists:get_value(setcookie, Opts, nocookie)} of
diff --git a/src/rebar_env.erl b/src/rebar_env.erl
new file mode 100644
index 0000000..e9adafb
--- /dev/null
+++ b/src/rebar_env.erl
@@ -0,0 +1,86 @@
+-module(rebar_env).
+
+-export([create_env/1,
+ create_env/2]).
+
+-include("rebar.hrl").
+
+%% @doc The following environment variables are exported when running
+%% a hook (absolute paths):
+%%
+%% REBAR_DEPS_DIR = rebar_dir:deps_dir/1
+%% REBAR_BUILD_DIR = rebar_dir:base_dir/1
+%% REBAR_ROOT_DIR = rebar_dir:root_dir/1
+%% REBAR_CHECKOUTS_DIR = rebar_dir:checkouts_dir/1
+%% REBAR_PLUGINS_DIR = rebar_dir:plugins_dir/1
+%% REBAR_GLOBAL_CONFIG_DIR = rebar_dir:global_config_dir/1
+%% REBAR_GLOBAL_CACHE_DIR = rebar_dir:global_cache_dir/1
+%% REBAR_TEMPLATE_DIR = rebar_dir:template_dir/1
+%% REBAR_APP_DIRS = rebar_dir:lib_dirs/1
+%% REBAR_SRC_DIRS = rebar_dir:src_dirs/1
+%%
+%% autoconf compatible variables
+%% (see: http://www.gnu.org/software/autoconf/manual/autoconf.html#Erlang-Libraries):
+%% ERLANG_ERTS_VER = erlang:system_info(version)
+%% ERLANG_ROOT_DIR = code:root_dir/0
+%% ERLANG_LIB_DIR_erl_interface = code:lib_dir(erl_interface)
+%% ERLANG_LIB_VER_erl_interface = version part of path returned by code:lib_dir(erl_interface)
+%% ERL = ERLANG_ROOT_DIR/bin/erl
+%% ERLC = ERLANG_ROOT_DIR/bin/erl
+%%
+
+-spec create_env(rebar_state:t()) -> proplists:proplist().
+create_env(State) ->
+ Opts = rebar_state:opts(State),
+ create_env(State, Opts).
+
+-spec create_env(rebar_state:t(), rebar_dict()) -> proplists:proplist().
+create_env(State, Opts) ->
+ BaseDir = rebar_dir:base_dir(State),
+ EnvVars = [
+ {"REBAR_DEPS_DIR", filename:absname(rebar_dir:deps_dir(State))},
+ {"REBAR_BUILD_DIR", filename:absname(rebar_dir:base_dir(State))},
+ {"REBAR_ROOT_DIR", filename:absname(rebar_dir:root_dir(State))},
+ {"REBAR_CHECKOUTS_DIR", filename:absname(rebar_dir:checkouts_dir(State))},
+ {"REBAR_PLUGINS_DIR", filename:absname(rebar_dir:plugins_dir(State))},
+ {"REBAR_GLOBAL_CONFIG_DIR", filename:absname(rebar_dir:global_config_dir(State))},
+ {"REBAR_GLOBAL_CACHE_DIR", filename:absname(rebar_dir:global_cache_dir(Opts))},
+ {"REBAR_TEMPLATE_DIR", filename:absname(rebar_dir:template_dir(State))},
+ {"REBAR_APP_DIRS", join_dirs(BaseDir, rebar_dir:lib_dirs(State))},
+ {"REBAR_SRC_DIRS", join_dirs(BaseDir, rebar_dir:all_src_dirs(Opts))},
+ {"ERLANG_ERTS_VER", erlang:system_info(version)},
+ {"ERLANG_ROOT_DIR", code:root_dir()},
+ {"ERL", filename:join([code:root_dir(), "bin", "erl"])},
+ {"ERLC", filename:join([code:root_dir(), "bin", "erlc"])},
+ {"ERLANG_ARCH" , rebar_api:wordsize()},
+ {"ERLANG_TARGET", rebar_api:get_arch()}
+ ],
+ EInterfaceVars = create_erl_interface_env(),
+ lists:append([EnvVars, EInterfaceVars]).
+
+-spec create_erl_interface_env() -> list().
+create_erl_interface_env() ->
+ case code:lib_dir(erl_interface) of
+ {error, bad_name} ->
+ ?WARN("erl_interface is missing. ERLANG_LIB_DIR_erl_interface and "
+ "ERLANG_LIB_VER_erl_interface will not be added to the environment.", []),
+ [];
+ Dir ->
+ [
+ {"ERLANG_LIB_DIR_erl_interface", Dir},
+ {"ERLANG_LIB_VER_erl_interface", re_version(Dir)}
+ ]
+ end.
+
+%% ====================================================================
+%% Internal functions
+%% ====================================================================
+
+join_dirs(BaseDir, Dirs) ->
+ rebar_string:join([filename:join(BaseDir, Dir) || Dir <- Dirs], ":").
+
+re_version(Path) ->
+ case re:run(Path, "^.*-(?<VER>[^/-]*)$", [{capture,[1],list}, unicode]) of
+ nomatch -> "";
+ {match, [Ver]} -> Ver
+ end.
diff --git a/src/rebar_erlc_compiler.erl b/src/rebar_erlc_compiler.erl
index 167f2bb..e52791c 100644
--- a/src/rebar_erlc_compiler.erl
+++ b/src/rebar_erlc_compiler.erl
@@ -47,12 +47,8 @@
-type compile_opts() :: [compile_opt()].
-type compile_opt() :: {recursive, boolean()}.
--record(compile_opts, {
- recursive = true
-}).
-
-define(DEFAULT_OUTDIR, "ebin").
--define(RE_PREFIX, "^[^._]").
+-define(RE_PREFIX, "^(?!\\._)").
%% ===================================================================
%% Public API
@@ -88,34 +84,38 @@
%% 'old_inets'}]}.
%%
-%% @equiv compile(AppInfo, []).
-
+%% @equiv compile(AppInfo, [])
-spec compile(rebar_app_info:t()) -> ok.
compile(AppInfo) when element(1, AppInfo) == app_info_t ->
compile(AppInfo, []).
%% @doc compile an individual application.
-
-spec compile(rebar_app_info:t(), compile_opts()) -> ok.
compile(AppInfo, CompileOpts) when element(1, AppInfo) == app_info_t ->
- Dir = ec_cnv:to_list(rebar_app_info:out_dir(AppInfo)),
+ warn_deprecated(),
+ Dir = rebar_utils:to_list(rebar_app_info:out_dir(AppInfo)),
RebarOpts = rebar_app_info:opts(AppInfo),
+ SrcOpts = [check_last_mod,
+ {recursive, dir_recursive(RebarOpts, "src", CompileOpts)}],
+ MibsOpts = [check_last_mod,
+ {recursive, dir_recursive(RebarOpts, "mibs", CompileOpts)}],
+
rebar_base_compiler:run(RebarOpts,
check_files([filename:join(Dir, File)
|| File <- rebar_opts:get(RebarOpts, xrl_first_files, [])]),
filename:join(Dir, "src"), ".xrl", filename:join(Dir, "src"), ".erl",
- fun compile_xrl/3),
+ fun compile_xrl/3, SrcOpts),
rebar_base_compiler:run(RebarOpts,
check_files([filename:join(Dir, File)
|| File <- rebar_opts:get(RebarOpts, yrl_first_files, [])]),
filename:join(Dir, "src"), ".yrl", filename:join(Dir, "src"), ".erl",
- fun compile_yrl/3),
+ fun compile_yrl/3, SrcOpts),
rebar_base_compiler:run(RebarOpts,
check_files([filename:join(Dir, File)
|| File <- rebar_opts:get(RebarOpts, mib_first_files, [])]),
filename:join(Dir, "mibs"), ".mib", filename:join([Dir, "priv", "mibs"]), ".bin",
- compile_mib(AppInfo)),
+ compile_mib(AppInfo), MibsOpts),
SrcDirs = lists:map(fun(SrcDir) -> filename:join(Dir, SrcDir) end,
rebar_dir:src_dirs(RebarOpts, ["src"])),
@@ -149,6 +149,7 @@ compile(RebarOpts, BaseDir, OutDir) ->
compile(State, BaseDir, OutDir, CompileOpts) when element(1, State) == state_t ->
compile(rebar_state:opts(State), BaseDir, OutDir, CompileOpts);
compile(RebarOpts, BaseDir, OutDir, CompileOpts) ->
+ warn_deprecated(),
SrcDirs = lists:map(fun(SrcDir) -> filename:join(BaseDir, SrcDir) end,
rebar_dir:src_dirs(RebarOpts, ["src"])),
compile_dirs(RebarOpts, BaseDir, SrcDirs, OutDir, CompileOpts),
@@ -162,16 +163,14 @@ compile(RebarOpts, BaseDir, OutDir, CompileOpts) ->
end,
lists:foreach(F, lists:map(fun(SrcDir) -> filename:join(BaseDir, SrcDir) end, ExtraDirs)).
-%% @equiv compile_dirs(Context, BaseDir, [Dir], Dir, [{recursive, false}]).
-
+%% @equiv compile_dirs(Context, BaseDir, [Dir], Dir, [{recursive, false}])
-spec compile_dir(rebar_dict() | rebar_state:t(), file:name(), file:name()) -> ok.
compile_dir(State, BaseDir, Dir) when element(1, State) == state_t ->
compile_dir(rebar_state:opts(State), BaseDir, Dir, [{recursive, false}]);
compile_dir(RebarOpts, BaseDir, Dir) ->
compile_dir(RebarOpts, BaseDir, Dir, [{recursive, false}]).
-%% @equiv compile_dirs(Context, BaseDir, [Dir], Dir, Opts).
-
+%% @equiv compile_dirs(Context, BaseDir, [Dir], Dir, Opts)
-spec compile_dir(rebar_dict() | rebar_state:t(), file:name(), file:name(), compile_opts()) -> ok.
compile_dir(State, BaseDir, Dir, Opts) when element(1, State) == state_t ->
compile_dirs(rebar_state:opts(State), BaseDir, [Dir], Dir, Opts);
@@ -179,7 +178,6 @@ compile_dir(RebarOpts, BaseDir, Dir, Opts) ->
compile_dirs(RebarOpts, BaseDir, [Dir], Dir, Opts).
%% @doc compile a list of directories with the given opts.
-
-spec compile_dirs(rebar_dict() | rebar_state:t(),
file:filename(),
[file:filename()],
@@ -187,13 +185,10 @@ compile_dir(RebarOpts, BaseDir, Dir, Opts) ->
compile_opts()) -> ok.
compile_dirs(State, BaseDir, Dirs, OutDir, CompileOpts) when element(1, State) == state_t ->
compile_dirs(rebar_state:opts(State), BaseDir, Dirs, OutDir, CompileOpts);
-compile_dirs(RebarOpts, BaseDir, SrcDirs, OutDir, Opts) ->
- CompileOpts = parse_opts(Opts),
-
+compile_dirs(RebarOpts, BaseDir, SrcDirs, OutDir, CompileOpts) ->
ErlOpts = rebar_opts:erl_opts(RebarOpts),
?DEBUG("erlopts ~p", [ErlOpts]),
- Recursive = CompileOpts#compile_opts.recursive,
- AllErlFiles = gather_src(SrcDirs, Recursive),
+ AllErlFiles = gather_src(RebarOpts, BaseDir, SrcDirs, CompileOpts),
?DEBUG("files to compile ~p", [AllErlFiles]),
%% Make sure that outdir is on the path
@@ -202,7 +197,13 @@ compile_dirs(RebarOpts, BaseDir, SrcDirs, OutDir, Opts) ->
G = init_erlcinfo(include_abs_dirs(ErlOpts, BaseDir), AllErlFiles, BaseDir, OutDir),
- NeededErlFiles = needed_files(G, ErlOpts, BaseDir, OutDir, AllErlFiles),
+ {ParseTransforms, Rest} = split_source_files(AllErlFiles, ErlOpts),
+ NeededErlFiles = case needed_files(G, ErlOpts, RebarOpts, BaseDir, OutDir, ParseTransforms) of
+ [] -> needed_files(G, ErlOpts, RebarOpts, BaseDir, OutDir, Rest);
+ %% at least one parse transform in the opts needs updating, so recompile all
+ _ -> AllErlFiles
+ end,
+
{ErlFirstFiles, ErlOptsFirst} = erl_first_files(RebarOpts, ErlOpts, BaseDir, NeededErlFiles),
{DepErls, OtherErls} = lists:partition(
fun(Source) -> digraph:in_degree(G, Source) > 0 end,
@@ -218,7 +219,7 @@ compile_dirs(RebarOpts, BaseDir, SrcDirs, OutDir, Opts) ->
true -> ErlOptsFirst;
false -> ErlOpts
end,
- internal_erl_compile(C, BaseDir, S, OutDir, ErlOpts1)
+ internal_erl_compile(C, BaseDir, S, OutDir, ErlOpts1, RebarOpts)
end)
after
true = digraph:delete(SubGraph),
@@ -227,7 +228,6 @@ compile_dirs(RebarOpts, BaseDir, SrcDirs, OutDir, Opts) ->
ok.
%% @doc remove compiled artifacts from an AppDir.
-
-spec clean(rebar_app_info:t()) -> 'ok'.
clean(AppInfo) ->
AppDir = rebar_app_info:out_dir(AppInfo),
@@ -240,8 +240,8 @@ clean(AppInfo) ->
YrlFiles = rebar_utils:find_files(filename:join([AppDir, "src"]), ?RE_PREFIX".*\\.[x|y]rl\$"),
rebar_file_utils:delete_each(
- [ binary_to_list(iolist_to_binary(re:replace(F, "\\.[x|y]rl$", ".erl")))
- || F <- YrlFiles ]),
+ [rebar_utils:to_list(re:replace(F, "\\.[x|y]rl$", ".erl", [unicode]))
+ || F <- YrlFiles]),
BinDirs = ["ebin"|rebar_dir:extra_src_dirs(rebar_app_info:opts(AppInfo))],
ok = clean_dirs(AppDir, BinDirs),
@@ -266,18 +266,29 @@ clean_dirs(AppDir, [Dir|Rest]) ->
%% Internal functions
%% ===================================================================
-gather_src(Dirs, Recursive) ->
- gather_src(Dirs, [], Recursive).
+gather_src(Opts, BaseDir, Dirs, CompileOpts) ->
+ gather_src(Opts, filename:split(BaseDir), Dirs, [], CompileOpts).
+
+gather_src(_Opts, _BaseDirParts, [], Srcs, _CompileOpts) -> Srcs;
+gather_src(Opts, BaseDirParts, [Dir|Rest], Srcs, CompileOpts) ->
+ DirParts = filename:split(Dir),
+ RelDir = case lists:prefix(BaseDirParts,DirParts) of
+ true ->
+ case lists:nthtail(length(BaseDirParts),DirParts) of
+ [] -> ".";
+ RestParts -> filename:join(RestParts)
+ end;
+ false -> Dir
+ end,
+ DirRecursive = dir_recursive(Opts, RelDir, CompileOpts),
+ gather_src(Opts, BaseDirParts, Rest, Srcs ++ rebar_utils:find_files(Dir, ?RE_PREFIX".*\\.erl\$", DirRecursive), CompileOpts).
-gather_src([], Srcs, _Recursive) -> Srcs;
-gather_src([Dir|Rest], Srcs, Recursive) ->
- gather_src(Rest, Srcs ++ rebar_utils:find_files(Dir, ?RE_PREFIX".*\\.erl\$", Recursive), Recursive).
-
%% Get files which need to be compiled first, i.e. those specified in erl_first_files
%% and parse_transform options. Also produce specific erl_opts for these first
%% files, so that yet to be compiled parse transformations are excluded from it.
erl_first_files(Opts, ErlOpts, Dir, NeededErlFiles) ->
ErlFirstFilesConf = rebar_opts:get(Opts, erl_first_files, []),
+ valid_erl_first_conf(ErlFirstFilesConf),
NeededSrcDirs = lists:usort(lists:map(fun filename:dirname/1, NeededErlFiles)),
%% NOTE: order of files here is important!
ErlFirstFiles =
@@ -296,15 +307,29 @@ erl_first_files(Opts, ErlOpts, Dir, NeededErlFiles) ->
end, ErlOpts),
{ErlFirstFiles ++ ParseTransformsErls, ErlOptsFirst}.
+split_source_files(SourceFiles, ErlOpts) ->
+ ParseTransforms = proplists:get_all_values(parse_transform, ErlOpts),
+ lists:partition(fun(Source) ->
+ lists:member(filename_to_atom(Source), ParseTransforms)
+ end, SourceFiles).
+
+filename_to_atom(F) -> list_to_atom(filename:rootname(filename:basename(F))).
+
%% Get subset of SourceFiles which need to be recompiled, respecting
%% dependencies induced by given graph G.
-needed_files(G, ErlOpts, Dir, OutDir, SourceFiles) ->
+needed_files(G, ErlOpts, RebarOpts, Dir, OutDir, SourceFiles) ->
lists:filter(fun(Source) ->
TargetBase = target_base(OutDir, Source),
Target = TargetBase ++ ".beam",
+ PrivIncludes = [{i, filename:join(Dir, Src)}
+ || Src <- rebar_dir:all_src_dirs(RebarOpts, ["src"], [])],
AllOpts = [{outdir, filename:dirname(Target)}
,{i, filename:join(Dir, "include")}
- ,{i, Dir}] ++ ErlOpts,
+ ,{i, Dir}] ++ PrivIncludes ++ ErlOpts,
+ %% necessary for erlang:function_exported/3 to work as expected
+ %% called here for clarity as it's required by both opts_changed/2
+ %% and erl_compiler_opts_set/0
+ _ = code:ensure_loaded(compile),
digraph:vertex(G, Source) > {Source, filelib:last_modified(Target)}
orelse opts_changed(AllOpts, TargetBase)
orelse erl_compiler_opts_set()
@@ -318,18 +343,38 @@ maybe_rm_beam_and_edge(G, OutDir, Source) ->
false;
false ->
Target = target_base(OutDir, Source) ++ ".beam",
- ?DEBUG("Source ~s is gone, deleting previous beam file if it exists ~s", [Source, Target]),
+ ?DEBUG("Source ~ts is gone, deleting previous beam file if it exists ~ts", [Source, Target]),
file:delete(Target),
digraph:del_vertex(G, Source),
true
end.
opts_changed(NewOpts, Target) ->
+ TotalOpts = case erlang:function_exported(compile, env_compiler_options, 0) of
+ true -> NewOpts ++ compile:env_compiler_options();
+ false -> NewOpts
+ end,
case compile_info(Target) of
- {ok, Opts} -> lists:sort(Opts) =/= lists:sort(NewOpts);
+ {ok, Opts} -> lists:any(fun effects_code_generation/1, lists:usort(TotalOpts) -- lists:usort(Opts));
_ -> true
end.
+effects_code_generation(Option) ->
+ case Option of
+ beam -> false;
+ report_warnings -> false;
+ report_errors -> false;
+ return_errors-> false;
+ return_warnings-> false;
+ report -> false;
+ warnings_as_errors -> false;
+ binary -> false;
+ verbose -> false;
+ {cwd,_} -> false;
+ {outdir, _} -> false;
+ _ -> true
+ end.
+
compile_info(Target) ->
case beam_lib:chunks(Target, [compile_info]) of
{ok, {_mod, Chunks}} ->
@@ -341,10 +386,12 @@ compile_info(Target) ->
end.
erl_compiler_opts_set() ->
- case os:getenv("ERL_COMPILER_OPTIONS") of
+ EnvSet = case os:getenv("ERL_COMPILER_OPTIONS") of
false -> false;
_ -> true
- end.
+ end,
+ %% return false if changed env opts would have been caught in opts_changed/2
+ EnvSet andalso not erlang:function_exported(compile, env_compiler_options, 0).
erlcinfo_file(Dir) ->
filename:join(rebar_dir:local_cache_dir(Dir), ?ERLCINFO_FILE).
@@ -358,7 +405,7 @@ init_erlcinfo(InclDirs, Erls, Dir, OutDir) ->
try restore_erlcinfo(G, InclDirs, Dir)
catch
_:_ ->
- ?WARN("Failed to restore ~s file. Discarding it.~n", [erlcinfo_file(Dir)]),
+ ?WARN("Failed to restore ~ts file. Discarding it.~n", [erlcinfo_file(Dir)]),
file:delete(erlcinfo_file(Dir))
end,
Dirs = source_and_include_dirs(InclDirs, Erls),
@@ -504,12 +551,15 @@ expand_file_names(Files, Dirs) ->
end, Files).
-spec internal_erl_compile(rebar_dict(), file:filename(), file:filename(),
- file:filename(), list()) -> ok | {ok, any()} | {error, any(), any()}.
-internal_erl_compile(Opts, Dir, Module, OutDir, ErlOpts) ->
+ file:filename(), list(), rebar_dict()) ->
+ ok | {ok, any()} | {error, any(), any()}.
+internal_erl_compile(Opts, Dir, Module, OutDir, ErlOpts, RebarOpts) ->
Target = target_base(OutDir, Module) ++ ".beam",
ok = filelib:ensure_dir(Target),
- AllOpts = [{outdir, filename:dirname(Target)}] ++ ErlOpts ++
- [{i, filename:join(Dir, "include")}, {i, Dir}, return],
+ PrivIncludes = [{i, filename:join(Dir, Src)}
+ || Src <- rebar_dir:all_src_dirs(RebarOpts, ["src"], [])],
+ AllOpts = [{outdir, filename:dirname(Target)}] ++ ErlOpts ++ PrivIncludes ++
+ [{i, filename:join(Dir, "include")}, {i, Dir}, return],
case compile:file(Module, AllOpts) of
{ok, _Mod} ->
ok;
@@ -554,9 +604,11 @@ compile_mib(AppInfo) ->
MibToHrlOpts =
case proplists:get_value(verbosity, AllOpts, undefined) of
undefined ->
- #options{specific = []};
+ #options{specific = [],
+ cwd = rebar_dir:get_cwd()};
Verbosity ->
- #options{specific = [{verbosity, Verbosity}]}
+ #options{specific = [{verbosity, Verbosity}],
+ cwd = rebar_dir:get_cwd()}
end,
ok = snmpc:mib_to_hrl(Mib, Mib, MibToHrlOpts),
rebar_file_utils:mv(HrlFilename, AppInclude),
@@ -654,6 +706,8 @@ process_attr(include_lib, Form, Includes, Dir) ->
[FileNode] = erl_syntax:attribute_arguments(Form),
RawFile = erl_syntax:string_value(FileNode),
maybe_expand_include_lib_path(RawFile, Dir) ++ Includes;
+process_attr(behavior, Form, Includes, _Dir) ->
+ process_attr(behaviour, Form, Includes, _Dir);
process_attr(behaviour, Form, Includes, _Dir) ->
[FileNode] = erl_syntax:attribute_arguments(Form),
File = module_to_erl(erl_syntax:atom_value(FileNode)),
@@ -738,11 +792,50 @@ outdir(RebarOpts) ->
proplists:get_value(outdir, ErlOpts, ?DEFAULT_OUTDIR).
include_abs_dirs(ErlOpts, BaseDir) ->
- InclDirs = ["include"|proplists:get_all_values(i, ErlOpts)],
- lists:map(fun(Incl) -> filename:join([BaseDir, Incl]) end, InclDirs).
+ ErlOptIncludes = proplists:get_all_values(i, ErlOpts),
+ InclDirs = lists:map(fun(Incl) -> filename:absname(Incl) end, ErlOptIncludes),
+ [filename:join([BaseDir, "include"])|InclDirs].
+
+dir_recursive(Opts, Dir, CompileOpts) when is_list(CompileOpts) ->
+ case proplists:get_value(recursive,CompileOpts) of
+ undefined -> rebar_dir:recursive(Opts, Dir);
+ Recursive -> Recursive
+ end.
-parse_opts(Opts) -> parse_opts(Opts, #compile_opts{}).
+valid_erl_first_conf(FileList) ->
+ Strs = filter_file_list(FileList),
+ case rebar_utils:is_list_of_strings(Strs) of
+ true -> true;
+ false -> ?ABORT("An invalid file list (~p) was provided as part of your erl_first_files directive",
+ [FileList])
+ end.
-parse_opts([], CompileOpts) -> CompileOpts;
-parse_opts([{recursive, Recursive}|Rest], CompileOpts) when Recursive == true; Recursive == false ->
- parse_opts(Rest, CompileOpts#compile_opts{recursive = Recursive}).
+filter_file_list(FileList) ->
+ Atoms = lists:filter( fun(X) -> is_atom(X) end, FileList),
+ case Atoms of
+ [] ->
+ FileList;
+ _ ->
+ atoms_in_erl_first_files_warning(Atoms),
+ lists:filter( fun(X) -> not(is_atom(X)) end, FileList)
+ end.
+
+atoms_in_erl_first_files_warning(Atoms) ->
+ W = "You have provided atoms as file entries in erl_first_files; "
+ "erl_first_files only expects lists of filenames as strings. "
+ "The following modules (~p) may not work as expected and it is advised "
+ "that you change these entires to string format "
+ "(e.g., \"src/module.erl\") ",
+ ?WARN(W, [Atoms]).
+
+warn_deprecated() ->
+ case get({deprecate_warn, ?MODULE}) of
+ undefined ->
+ ?WARN("Calling deprecated ~p compiler module. This module has been "
+ "replaced by rebar_compiler and rebar_compiler_erl, but will "
+ "remain available.", [?MODULE]),
+ put({deprecate_warn, ?MODULE}, true),
+ ok;
+ _ ->
+ ok
+ end.
diff --git a/src/rebar_fetch.erl b/src/rebar_fetch.erl
index 47bfe1d..9c76e0e 100644
--- a/src/rebar_fetch.erl
+++ b/src/rebar_fetch.erl
@@ -7,104 +7,74 @@
%% -------------------------------------------------------------------
-module(rebar_fetch).
--export([lock_source/3,
- download_source/3,
- needs_update/3]).
+-export([lock_source/2,
+ download_source/2,
+ needs_update/2]).
-export([format_error/1]).
-include("rebar.hrl").
-include_lib("providers/include/providers.hrl").
--spec lock_source(file:filename_all(), rebar_resource:resource(), rebar_state:t()) ->
- rebar_resource:resource() | {error, string()}.
-lock_source(AppDir, Source, State) ->
- Resources = rebar_state:resources(State),
- Module = get_resource_type(Source, Resources),
- Module:lock(AppDir, Source).
+-spec lock_source(rebar_app_info:t(), rebar_state:t())
+ -> rebar_resource_v2:source() | {error, string()}.
+lock_source(AppInfo, State) ->
+ rebar_resource_v2:lock(AppInfo, State).
--spec download_source(file:filename_all(), rebar_resource:resource(), rebar_state:t()) ->
- true | {error, any()}.
-download_source(AppDir, Source, State) ->
- try download_source_(AppDir, Source, State) of
- true ->
- true;
- Error ->
- throw(?PRV_ERROR(Error))
+-spec download_source(rebar_app_info:t(), rebar_state:t())
+ -> rebar_app_info:t() | {error, any()}.
+download_source(AppInfo, State) ->
+ AppDir = rebar_app_info:dir(AppInfo),
+ try download_source_(AppInfo, State) of
+ ok ->
+ %% freshly downloaded, update the app info opts to reflect the new config
+ Config = rebar_config:consult(AppDir),
+ AppInfo1 = rebar_app_info:update_opts(AppInfo, rebar_app_info:opts(AppInfo), Config),
+ case rebar_app_discover:find_app(AppInfo1, AppDir, all) of
+ {true, AppInfo2} ->
+ rebar_app_info:is_available(AppInfo2, true);
+ false ->
+ throw(?PRV_ERROR({dep_app_not_found, rebar_app_info:name(AppInfo1)}))
+ end;
+ {error, Reason} ->
+ throw(?PRV_ERROR(Reason))
catch
- C:T ->
- ?DEBUG("rebar_fetch exception ~p ~p ~p", [C, T, erlang:get_stacktrace()]),
- throw(?PRV_ERROR({fetch_fail, Source}))
+ throw:{no_resource, Type, Location} ->
+ throw(?PRV_ERROR({no_resource, Location, Type}));
+ ?WITH_STACKTRACE(C,T,S)
+ ?DEBUG("rebar_fetch exception ~p ~p ~p", [C, T, S]),
+ throw(?PRV_ERROR({fetch_fail, rebar_app_info:source(AppInfo)}))
end.
-download_source_(AppDir, Source, State) ->
- Resources = rebar_state:resources(State),
- Module = get_resource_type(Source, Resources),
+download_source_(AppInfo, State) ->
+ AppDir = rebar_app_info:dir(AppInfo),
TmpDir = ec_file:insecure_mkdtemp(),
- AppDir1 = ec_cnv:to_list(AppDir),
- case Module:download(TmpDir, Source, State) of
- {ok, _} ->
+ AppDir1 = rebar_utils:to_list(AppDir),
+ case rebar_resource_v2:download(TmpDir, AppInfo, State) of
+ ok ->
ec_file:mkdir_p(AppDir1),
code:del_path(filename:absname(filename:join(AppDir1, "ebin"))),
- ec_file:remove(filename:absname(AppDir1), [recursive]),
+ ok = rebar_file_utils:rm_rf(filename:absname(AppDir1)),
?DEBUG("Moving checkout ~p to ~p", [TmpDir, filename:absname(AppDir1)]),
- ok = rebar_file_utils:mv(TmpDir, filename:absname(AppDir1)),
- true;
+ rebar_file_utils:mv(TmpDir, filename:absname(AppDir1));
Error ->
Error
end.
--spec needs_update(file:filename_all(), rebar_resource:resource(), rebar_state:t()) -> boolean() | {error, string()}.
-needs_update(AppDir, Source, State) ->
- Resources = rebar_state:resources(State),
- Module = get_resource_type(Source, Resources),
+-spec needs_update(rebar_app_info:t(), rebar_state:t())
+ -> boolean() | {error, string()}.
+needs_update(AppInfo, State) ->
try
- Module:needs_update(AppDir, Source)
+ rebar_resource_v2:needs_update(AppInfo, State)
catch
_:_ ->
true
end.
-format_error({bad_download, CachePath}) ->
- io_lib:format("Download of package does not match md5sum from server: ~s", [CachePath]);
-format_error({unexpected_hash, CachePath, Expected, Found}) ->
- io_lib:format("The checksum for package at ~s (~s) does not match the "
- "checksum previously locked (~s). Either unlock or "
- "upgrade the package, or make sure you fetched it from "
- "the same index from which it was initially fetched.",
- [CachePath, Found, Expected]);
-format_error({failed_extract, CachePath}) ->
- io_lib:format("Failed to extract package: ~s", [CachePath]);
-format_error({bad_etag, Source}) ->
- io_lib:format("MD5 Checksum comparison failed for: ~s", [Source]);
format_error({fetch_fail, Name, Vsn}) ->
- io_lib:format("Failed to fetch and copy dep: ~s-~s", [Name, Vsn]);
+ io_lib:format("Failed to fetch and copy dep: ~ts-~ts", [Name, Vsn]);
format_error({fetch_fail, Source}) ->
io_lib:format("Failed to fetch and copy dep: ~p", [Source]);
-format_error({bad_checksum, File}) ->
- io_lib:format("Checksum mismatch against tarball in ~s", [File]);
-format_error({bad_registry_checksum, File}) ->
- io_lib:format("Checksum mismatch against registry in ~s", [File]).
-
-get_resource_type({Type, Location}, Resources) ->
- find_resource_module(Type, Location, Resources);
-get_resource_type({Type, Location, _}, Resources) ->
- find_resource_module(Type, Location, Resources);
-get_resource_type({Type, _, _, Location}, Resources) ->
- find_resource_module(Type, Location, Resources);
-get_resource_type(_, _) ->
- rebar_pkg_resource.
-
-find_resource_module(Type, Location, Resources) ->
- case lists:keyfind(Type, 1, Resources) of
- false ->
- case code:which(Type) of
- non_existing ->
- {error, io_lib:format("Cannot handle dependency ~s.~n"
- " No module for resource type ~p", [Location, Type])};
- _ ->
- Type
- end;
- {Type, Module} ->
- Module
- end.
+format_error({dep_app_not_found, AppName}) ->
+ io_lib:format("Dependency failure: source for ~ts does not contain a "
+ "recognizable project and can not be built", [AppName]).
diff --git a/src/rebar_file_utils.erl b/src/rebar_file_utils.erl
index 104c047..a51a557 100644
--- a/src/rebar_file_utils.erl
+++ b/src/rebar_file_utils.erl
@@ -35,6 +35,7 @@
mv/2,
delete_each/1,
write_file_if_contents_differ/2,
+ write_file_if_contents_differ/3,
system_tmpdir/0,
system_tmpdir/1,
reset_dir/1,
@@ -42,7 +43,8 @@
path_from_ancestor/2,
canonical_path/1,
resolve_link/1,
- split_dirname/1]).
+ split_dirname/1,
+ ensure_dir/1]).
-include("rebar.hrl").
@@ -72,14 +74,20 @@ consult_config(State, Filename) ->
[T] -> T;
[] -> []
end,
- SubConfigs = [consult_config(State, Entry ++ ".config") ||
- Entry <- Config, is_list(Entry)
- ],
-
- [Config | lists:merge(SubConfigs)].
+ JoinedConfig = lists:flatmap(
+ fun (SubConfig) when is_list(SubConfig) ->
+ case lists:suffix(".config", SubConfig) of
+ %% since consult_config returns a list in a list we take the head here
+ false -> hd(consult_config(State, SubConfig ++ ".config"));
+ true -> hd(consult_config(State, SubConfig))
+ end;
+ (Entry) -> [Entry]
+ end, Config),
+ %% Backwards compatibility
+ [JoinedConfig].
format_error({bad_term_file, AppFile, Reason}) ->
- io_lib:format("Error reading file ~s: ~s", [AppFile, file:format_error(Reason)]).
+ io_lib:format("Error reading file ~ts: ~ts", [AppFile, file:format_error(Reason)]).
symlink_or_copy(Source, Target) ->
Link = case os:type() of
@@ -100,7 +108,7 @@ symlink_or_copy(Source, Target) ->
T = unicode:characters_to_list(Target),
case filelib:is_dir(S) of
true ->
- win32_symlink(S, T);
+ win32_symlink_or_copy(S, T);
false ->
cp_r([S], T)
end;
@@ -114,20 +122,48 @@ symlink_or_copy(Source, Target) ->
end
end.
-win32_symlink(Source, Target) ->
+%% @private Compatibility function for windows
+win32_symlink_or_copy(Source, Target) ->
Res = rebar_utils:sh(
- ?FMT("cmd /c mklink /j \"~s\" \"~s\"",
+ ?FMT("cmd /c mklink /j \"~ts\" \"~ts\"",
[rebar_utils:escape_double_quotes(filename:nativename(Target)),
rebar_utils:escape_double_quotes(filename:nativename(Source))]),
[{use_stdout, false}, return_on_error]),
- case win32_ok(Res) of
+ case win32_mklink_ok(Res, Target) of
true -> ok;
- false ->
- {error, lists:flatten(
- io_lib:format("Failed to symlink ~s to ~s~n",
- [Source, Target]))}
+ false -> cp_r_win32(Source, drop_last_dir_from_path(Target))
end.
+%% @private specifically pattern match against the output
+%% of the windows 'mklink' shell call; different values from
+%% what win32_ok/1 handles
+win32_mklink_ok({ok, _}, _) ->
+ true;
+win32_mklink_ok({error,{1,"Local NTFS volumes are required to complete the operation.\n"}}, _) ->
+ false;
+win32_mklink_ok({error,{1,"Cannot create a file when that file already exists.\n"}}, Target) ->
+ % File or dir is already in place; find if it is already a symlink (true) or
+ % if it is a directory (copy-required; false)
+ is_symlink(Target);
+win32_mklink_ok(_, _) ->
+ false.
+
+%% @private
+is_symlink(Filename) ->
+ {ok, Info} = file:read_link_info(Filename),
+ Info#file_info.type == symlink.
+
+%% @private
+%% drops the last 'node' of the filename, presumably the last dir such as 'src'
+%% this is because cp_r_win32/2 automatically adds the dir name, to appease
+%% robocopy and be more uniform with POSIX
+drop_last_dir_from_path([]) ->
+ [];
+drop_last_dir_from_path(Path) ->
+ case lists:droplast(filename:split(Path)) of
+ [] -> [];
+ Dirs -> filename:join(Dirs)
+ end.
%% @doc Remove files and directories.
%% Target is a single filename, directoryname or wildcard expression.
@@ -136,7 +172,7 @@ rm_rf(Target) ->
case os:type() of
{unix, _} ->
EscTarget = rebar_utils:escape_chars(Target),
- {ok, []} = rebar_utils:sh(?FMT("rm -rf ~s", [EscTarget]),
+ {ok, []} = rebar_utils:sh(?FMT("rm -rf ~ts", [EscTarget]),
[{use_stdout, false}, abort_on_error]),
ok;
{win32, _} ->
@@ -155,8 +191,12 @@ cp_r(Sources, Dest) ->
case os:type() of
{unix, _} ->
EscSources = [rebar_utils:escape_chars(Src) || Src <- Sources],
- SourceStr = string:join(EscSources, " "),
- {ok, []} = rebar_utils:sh(?FMT("cp -Rp ~s \"~s\"",
+ SourceStr = rebar_string:join(EscSources, " "),
+ % ensure destination exists before copying files into it
+ {ok, []} = rebar_utils:sh(?FMT("mkdir -p ~ts",
+ [rebar_utils:escape_chars(Dest)]),
+ [{use_stdout, false}, abort_on_error]),
+ {ok, []} = rebar_utils:sh(?FMT("cp -Rp ~ts \"~ts\"",
[SourceStr, rebar_utils:escape_double_quotes(Dest)]),
[{use_stdout, false}, abort_on_error]),
ok;
@@ -171,36 +211,122 @@ mv(Source, Dest) ->
{unix, _} ->
EscSource = rebar_utils:escape_chars(Source),
EscDest = rebar_utils:escape_chars(Dest),
- {ok, []} = rebar_utils:sh(?FMT("mv ~s ~s", [EscSource, EscDest]),
- [{use_stdout, false}, abort_on_error]),
- ok;
+ case rebar_utils:sh(?FMT("mv ~ts ~ts", [EscSource, EscDest]),
+ [{use_stdout, false}, abort_on_error]) of
+ {ok, []} ->
+ ok;
+ {ok, Warning} ->
+ ?WARN("mv: ~p", [Warning]),
+ ok
+ end;
{win32, _} ->
- Cmd = case filelib:is_dir(Source) of
- true ->
- ?FMT("robocopy /move /e \"~s\" \"~s\" 1> nul",
- [rebar_utils:escape_double_quotes(filename:nativename(Source)),
- rebar_utils:escape_double_quotes(filename:nativename(Dest))]);
- false ->
- ?FMT("robocopy /move /e \"~s\" \"~s\" \"~s\" 1> nul",
- [rebar_utils:escape_double_quotes(filename:nativename(filename:dirname(Source))),
- rebar_utils:escape_double_quotes(filename:nativename(Dest)),
- rebar_utils:escape_double_quotes(filename:basename(Source))])
- end,
- Res = rebar_utils:sh(Cmd,
- [{use_stdout, false}, return_on_error]),
- case win32_ok(Res) of
- true -> ok;
+ case filelib:is_dir(Source) of
+ true ->
+ SrcDir = filename:nativename(Source),
+ DestDir = case filelib:is_dir(Dest) of
+ true ->
+ %% to simulate unix/posix mv, we have to replicate
+ %% the same directory movement by moving the whole
+ %% top-level directory, not just the insides
+ SrcName = filename:basename(Source),
+ filename:nativename(filename:join(Dest, SrcName));
+ false ->
+ filename:nativename(Dest)
+ end,
+ robocopy_dir(SrcDir, DestDir);
false ->
- {error, lists:flatten(
- io_lib:format("Failed to move ~s to ~s~n",
- [Source, Dest]))}
+ SrcDir = filename:nativename(filename:dirname(Source)),
+ SrcName = filename:basename(Source),
+ DestDir = filename:nativename(filename:dirname(Dest)),
+ DestName = filename:basename(Dest),
+ IsDestDir = filelib:is_dir(Dest),
+ if IsDestDir ->
+ %% if basename and target name are different because
+ %% we move to a directory, then just move there.
+ %% Similarly, if they are the same but we're going to
+ %% a directory, let's just do that directly.
+ FullDestDir = filename:nativename(Dest),
+ robocopy_file(SrcDir, FullDestDir, SrcName)
+ ; SrcName =:= DestName ->
+ %% if basename and target name are the same and both are files,
+ %% we do a regular move with robocopy without rename.
+ robocopy_file(SrcDir, DestDir, DestName)
+ ; SrcName =/= DestName->
+ robocopy_mv_and_rename(Source, Dest, SrcDir, SrcName, DestDir, DestName)
+ end
+
end
end.
+robocopy_mv_and_rename(Source, Dest, SrcDir, SrcName, DestDir, DestName) ->
+ %% If we're moving a file and the origin and
+ %% destination names are different:
+ %% - mktmp
+ %% - robocopy source_dir tmp_dir srcname
+ %% - rename srcname destname (to avoid clobbering)
+ %% - robocopy tmp_dir dest_dir destname
+ %% - remove tmp_dir
+ case ec_file:insecure_mkdtemp() of
+ {error, _Reason} ->
+ {error, lists:flatten(
+ io_lib:format("Failed to move ~ts to ~ts (tmpdir failed)~n",
+ [Source, Dest]))};
+ TmpPath ->
+ case robocopy_file(SrcDir, TmpPath, SrcName) of
+ {error, Reason} ->
+ {error, Reason};
+ ok ->
+ TmpSrc = filename:join(TmpPath, SrcName),
+ TmpDst = filename:join(TmpPath, DestName),
+ case file:rename(TmpSrc, TmpDst) of
+ {error, _} ->
+ {error, lists:flatten(
+ io_lib:format("Failed to move ~ts to ~ts (via rename)~n",
+ [Source, Dest]))};
+ ok ->
+ case robocopy_file(TmpPath, DestDir, DestName) of
+ Err = {error, _} -> Err;
+ OK -> rm_rf(TmpPath), OK
+ end
+ end
+ end
+ end.
+
+robocopy_file(SrcPath, DestPath, FileName) ->
+ Cmd = ?FMT("robocopy /move /e \"~ts\" \"~ts\" \"~ts\"",
+ [rebar_utils:escape_double_quotes(SrcPath),
+ rebar_utils:escape_double_quotes(DestPath),
+ rebar_utils:escape_double_quotes(FileName)]),
+ Res = rebar_utils:sh(Cmd, [{use_stdout, false}, return_on_error]),
+ case win32_ok(Res) of
+ false ->
+ {error, lists:flatten(
+ io_lib:format("Failed to move ~ts to ~ts~n",
+ [filename:join(SrcPath, FileName),
+ filename:join(DestPath, FileName)]))};
+ true ->
+ ok
+ end.
+
+robocopy_dir(Source, Dest) ->
+ Cmd = ?FMT("robocopy /move /e \"~ts\" \"~ts\"",
+ [rebar_utils:escape_double_quotes(Source),
+ rebar_utils:escape_double_quotes(Dest)]),
+ Res = rebar_utils:sh(Cmd,
+ [{use_stdout, false}, return_on_error]),
+ case win32_ok(Res) of
+ true -> ok;
+ false ->
+ {error, lists:flatten(
+ io_lib:format("Failed to move ~ts to ~ts~n",
+ [Source, Dest]))}
+ end.
+
win32_ok({ok, _}) -> true;
win32_ok({error, {Rc, _}}) when Rc<9; Rc=:=16 -> true;
win32_ok(_) -> false.
+
delete_each([]) ->
ok;
delete_each([File | Rest]) ->
@@ -210,12 +336,23 @@ delete_each([File | Rest]) ->
{error, enoent} ->
delete_each(Rest);
{error, Reason} ->
- ?ERROR("Failed to delete file ~s: ~p\n", [File, Reason]),
+ ?ERROR("Failed to delete file ~ts: ~p\n", [File, Reason]),
?FAIL
end.
+%% @doc backwards compat layer to pre-utf8 support
write_file_if_contents_differ(Filename, Bytes) ->
- ToWrite = iolist_to_binary(Bytes),
+ write_file_if_contents_differ(Filename, Bytes, raw).
+
+%% @doc let the user pick the encoding required; there are no good
+%% heuristics for data encoding
+write_file_if_contents_differ(Filename, Bytes, raw) ->
+ write_file_if_contents_differ_(Filename, iolist_to_binary(Bytes));
+write_file_if_contents_differ(Filename, Bytes, utf8) ->
+ write_file_if_contents_differ_(Filename, unicode:characters_to_binary(Bytes, utf8)).
+
+%% @private compare raw strings and check contents
+write_file_if_contents_differ_(Filename, ToWrite) ->
case file:read_file(Filename) of
{ok, ToWrite} ->
ok;
@@ -227,10 +364,10 @@ write_file_if_contents_differ(Filename, Bytes) ->
%% returns an os appropriate tmpdir given a path
-spec system_tmpdir() -> file:filename().
+system_tmpdir() -> system_tmpdir([]).
+
-spec system_tmpdir(PathComponents) -> file:filename() when
PathComponents :: [file:name()].
-
-system_tmpdir() -> system_tmpdir([]).
system_tmpdir(PathComponents) ->
Tmp = case erlang:system_info(system_architecture) of
"win32" ->
@@ -250,7 +387,7 @@ reset_dir(Path) ->
%% delete the directory if it exists
_ = ec_file:remove(Path, [recursive]),
%% recreate the directory
- filelib:ensure_dir(filename:join([Path, "dummy.beam"])).
+ ensure_dir(Path).
%% Linux touch but using erlang functions to work in bot *nix os and
@@ -290,9 +427,8 @@ canonical_path([_|Acc], [".."|Rest]) -> canonical_path(Acc, Rest);
canonical_path([], [".."|Rest]) -> canonical_path([], Rest);
canonical_path(Acc, [Component|Rest]) -> canonical_path([Component|Acc], Rest).
-%% returns canonical target of path if path is a link, otherwise returns path
+%% @doc returns canonical target of path if path is a link, otherwise returns path
-spec resolve_link(string()) -> string().
-
resolve_link(Path) ->
case file:read_link(Path) of
{ok, Target} ->
@@ -300,25 +436,28 @@ resolve_link(Path) ->
{error, _} -> Path
end.
-%% splits a path into dirname and basename
+%% @doc splits a path into dirname and basename
-spec split_dirname(string()) -> {string(), string()}.
-
split_dirname(Path) ->
{filename:dirname(Path), filename:basename(Path)}.
+-spec ensure_dir(filelib:dirname_all()) -> ok | {error, file:posix()}.
+ensure_dir(Path) ->
+ filelib:ensure_dir(filename:join(Path, "fake_file")).
+
%% ===================================================================
%% Internal functions
%% ===================================================================
delete_each_dir_win32([]) -> ok;
delete_each_dir_win32([Dir | Rest]) ->
- {ok, []} = rebar_utils:sh(?FMT("rd /q /s \"~s\"",
+ {ok, []} = rebar_utils:sh(?FMT("rd /q /s \"~ts\"",
[rebar_utils:escape_double_quotes(filename:nativename(Dir))]),
[{use_stdout, false}, return_on_error]),
delete_each_dir_win32(Rest).
xcopy_win32(Source,Dest)->
- %% "xcopy \"~s\" \"~s\" /q /y /e 2> nul", Changed to robocopy to
+ %% "xcopy \"~ts\" \"~ts\" /q /y /e 2> nul", Changed to robocopy to
%% handle long names. May have issues with older windows.
Cmd = case filelib:is_dir(Source) of
true ->
@@ -328,11 +467,11 @@ xcopy_win32(Source,Dest)->
%% must manually add the last fragment of a directory to the `Dest`
%% in order to properly replicate POSIX platforms
NewDest = filename:join([Dest, filename:basename(Source)]),
- ?FMT("robocopy \"~s\" \"~s\" /e /is 1> nul",
+ ?FMT("robocopy \"~ts\" \"~ts\" /e 1> nul",
[rebar_utils:escape_double_quotes(filename:nativename(Source)),
rebar_utils:escape_double_quotes(filename:nativename(NewDest))]);
false ->
- ?FMT("robocopy \"~s\" \"~s\" \"~s\" /e /is 1> nul",
+ ?FMT("robocopy \"~ts\" \"~ts\" \"~ts\" /e 1> nul",
[rebar_utils:escape_double_quotes(filename:nativename(filename:dirname(Source))),
rebar_utils:escape_double_quotes(filename:nativename(Dest)),
rebar_utils:escape_double_quotes(filename:basename(Source))])
@@ -343,7 +482,7 @@ xcopy_win32(Source,Dest)->
true -> ok;
false ->
{error, lists:flatten(
- io_lib:format("Failed to copy ~s to ~s~n",
+ io_lib:format("Failed to copy ~ts to ~ts~n",
[Source, Dest]))}
end.
@@ -371,7 +510,7 @@ cp_r_win32({true, SourceDir}, {false, DestDir}) ->
false ->
%% Specifying a target directory that doesn't currently exist.
%% So let's attempt to create this directory
- case filelib:ensure_dir(filename:join(DestDir, "dummy")) of
+ case ensure_dir(DestDir) of
ok ->
ok = xcopy_win32(SourceDir, DestDir);
{error, Reason} ->
diff --git a/src/rebar_git_resource.erl b/src/rebar_git_resource.erl
index acb9ec0..0ca6627 100644
--- a/src/rebar_git_resource.erl
+++ b/src/rebar_git_resource.erl
@@ -2,22 +2,32 @@
%% ex: ts=4 sw=4 et
-module(rebar_git_resource).
--behaviour(rebar_resource).
+-behaviour(rebar_resource_v2).
--export([lock/2
- ,download/3
- ,needs_update/2
- ,make_vsn/1]).
+-export([init/2,
+ lock/2,
+ download/4,
+ needs_update/2,
+ make_vsn/2]).
-include("rebar.hrl").
%% Regex used for parsing scp style remote url
-define(SCP_PATTERN, "\\A(?<username>[^@]+)@(?<host>[^:]+):(?<path>.+)\\z").
-lock(AppDir, {git, Url, _}) ->
- lock(AppDir, {git, Url});
-lock(AppDir, {git, Url}) ->
- AbortMsg = lists:flatten(io_lib:format("Locking of git dependency failed in ~s", [AppDir])),
+-spec init(atom(), rebar_state:t()) -> {ok, rebar_resource_v2:resource()}.
+init(Type, _State) ->
+ Resource = rebar_resource_v2:new(Type, ?MODULE, #{}),
+ {ok, Resource}.
+
+lock(AppInfo, _) ->
+ check_type_support(),
+ lock_(rebar_app_info:dir(AppInfo), rebar_app_info:source(AppInfo)).
+
+lock_(AppDir, {git, Url, _}) ->
+ lock_(AppDir, {git, Url});
+lock_(AppDir, {git, Url}) ->
+ AbortMsg = lists:flatten(io_lib:format("Locking of git dependency failed in ~ts", [AppDir])),
Dir = rebar_utils:escape_double_quotes(AppDir),
{ok, VsnString} =
case os:type() of
@@ -28,55 +38,58 @@ lock(AppDir, {git, Url}) ->
rebar_utils:sh("git --git-dir=\"" ++ Dir ++ "/.git\" rev-parse --verify HEAD",
[{use_stdout, false}, {debug_abort_on_error, AbortMsg}])
end,
- Ref = string:strip(VsnString, both, $\n),
+ Ref = rebar_string:trim(VsnString, both, "\n"),
{git, Url, {ref, Ref}}.
%% Return true if either the git url or tag/branch/ref is not the same as the currently
%% checked out git repo for the dep
-needs_update(Dir, {git, Url, {tag, Tag}}) ->
+needs_update(AppInfo, _) ->
+ check_type_support(),
+ needs_update_(rebar_app_info:dir(AppInfo), rebar_app_info:source(AppInfo)).
+
+needs_update_(Dir, {git, Url, {tag, Tag}}) ->
{ok, Current} = rebar_utils:sh(?FMT("git describe --tags --exact-match", []),
[{cd, Dir}]),
- Current1 = string:strip(string:strip(Current, both, $\n), both, $\r),
-
- ?DEBUG("Comparing git tag ~s with ~s", [Tag, Current1]),
+ Current1 = rebar_string:trim(rebar_string:trim(Current, both, "\n"),
+ both, "\r"),
+ ?DEBUG("Comparing git tag ~ts with ~ts", [Tag, Current1]),
not ((Current1 =:= Tag) andalso compare_url(Dir, Url));
-needs_update(Dir, {git, Url, {branch, Branch}}) ->
+needs_update_(Dir, {git, Url, {branch, Branch}}) ->
%% Fetch remote so we can check if the branch has changed
SafeBranch = rebar_utils:escape_chars(Branch),
- {ok, _} = rebar_utils:sh(?FMT("git fetch origin ~s", [SafeBranch]),
+ {ok, _} = rebar_utils:sh(?FMT("git fetch origin ~ts", [SafeBranch]),
[{cd, Dir}]),
%% Check for new commits to origin/Branch
- {ok, Current} = rebar_utils:sh(?FMT("git log HEAD..origin/~s --oneline", [SafeBranch]),
+ {ok, Current} = rebar_utils:sh(?FMT("git log HEAD..origin/~ts --oneline", [SafeBranch]),
[{cd, Dir}]),
- ?DEBUG("Checking git branch ~s for updates", [Branch]),
+ ?DEBUG("Checking git branch ~ts for updates", [Branch]),
not ((Current =:= []) andalso compare_url(Dir, Url));
-needs_update(Dir, {git, Url, "master"}) ->
- needs_update(Dir, {git, Url, {branch, "master"}});
-needs_update(Dir, {git, _, Ref}) ->
- {ok, Current} = rebar_utils:sh(?FMT("git rev-parse -q HEAD", []),
+needs_update_(Dir, {git, Url, "master"}) ->
+ needs_update_(Dir, {git, Url, {branch, "master"}});
+needs_update_(Dir, {git, _, Ref}) ->
+ {ok, Current} = rebar_utils:sh(?FMT("git rev-parse --short=7 -q HEAD", []),
[{cd, Dir}]),
- Current1 = string:strip(string:strip(Current, both, $\n), both, $\r),
-
+ Current1 = rebar_string:trim(rebar_string:trim(Current, both, "\n"),
+ both, "\r"),
Ref2 = case Ref of
{ref, Ref1} ->
Length = length(Current1),
- if
- Length >= 7 ->
- lists:sublist(Ref1, Length);
- true ->
- Ref1
+ case Length >= 7 of
+ true -> lists:sublist(Ref1, Length);
+ false -> Ref1
end;
- Ref1 ->
- Ref1
+ _ ->
+ Ref
end,
- ?DEBUG("Comparing git ref ~s with ~s", [Ref1, Current1]),
+ ?DEBUG("Comparing git ref ~ts with ~ts", [Ref2, Current1]),
(Current1 =/= Ref2).
compare_url(Dir, Url) ->
{ok, CurrentUrl} = rebar_utils:sh(?FMT("git config --get remote.origin.url", []),
[{cd, Dir}]),
- CurrentUrl1 = string:strip(string:strip(CurrentUrl, both, $\n), both, $\r),
+ CurrentUrl1 = rebar_string:trim(rebar_string:trim(CurrentUrl, both, "\n"),
+ both, "\r"),
{ok, ParsedUrl} = parse_git_url(Url),
{ok, ParsedCurrentUrl} = parse_git_url(CurrentUrl1),
?DEBUG("Comparing git url ~p with ~p", [ParsedUrl, ParsedCurrentUrl]),
@@ -84,7 +97,7 @@ compare_url(Dir, Url) ->
parse_git_url(Url) ->
%% Checks for standard scp style git remote
- case re:run(Url, ?SCP_PATTERN, [{capture, [host, path], list}]) of
+ case re:run(Url, ?SCP_PATTERN, [{capture, [host, path], list}, unicode]) of
{match, [Host, Path]} ->
{ok, {Host, filename:rootname(Path, ".git")}};
nomatch ->
@@ -99,44 +112,124 @@ parse_git_url(not_scp, Url) ->
{error, Reason}
end.
-download(Dir, {git, Url}, State) ->
+download(TmpDir, AppInfo, State, _) ->
+ check_type_support(),
+ case download_(TmpDir, rebar_app_info:source(AppInfo), State) of
+ {ok, _} ->
+ ok;
+ {error, Reason} ->
+ {error, Reason};
+ Error ->
+ {error, Error}
+ end.
+
+download_(Dir, {git, Url}, State) ->
?WARN("WARNING: It is recommended to use {branch, Name}, {tag, Tag} or {ref, Ref}, otherwise updating the dep may not work as expected.", []),
- download(Dir, {git, Url, {branch, "master"}}, State);
-download(Dir, {git, Url, ""}, State) ->
+ download_(Dir, {git, Url, {branch, "master"}}, State);
+download_(Dir, {git, Url, ""}, State) ->
?WARN("WARNING: It is recommended to use {branch, Name}, {tag, Tag} or {ref, Ref}, otherwise updating the dep may not work as expected.", []),
- download(Dir, {git, Url, {branch, "master"}}, State);
-download(Dir, {git, Url, {branch, Branch}}, _State) ->
+ download_(Dir, {git, Url, {branch, "master"}}, State);
+download_(Dir, {git, Url, {branch, Branch}}, _State) ->
ok = filelib:ensure_dir(Dir),
- rebar_utils:sh(?FMT("git clone ~s ~s -b ~s --single-branch",
- [rebar_utils:escape_chars(Url),
- rebar_utils:escape_chars(filename:basename(Dir)),
- rebar_utils:escape_chars(Branch)]),
- [{cd, filename:dirname(Dir)}]);
-download(Dir, {git, Url, {tag, Tag}}, _State) ->
+ maybe_warn_local_url(Url),
+ git_clone(branch, git_vsn(), Url, Dir, Branch);
+download_(Dir, {git, Url, {tag, Tag}}, _State) ->
ok = filelib:ensure_dir(Dir),
- rebar_utils:sh(?FMT("git clone ~s ~s -b ~s --single-branch",
- [rebar_utils:escape_chars(Url),
- rebar_utils:escape_chars(filename:basename(Dir)),
- rebar_utils:escape_chars(Tag)]),
- [{cd, filename:dirname(Dir)}]);
-download(Dir, {git, Url, {ref, Ref}}, _State) ->
+ maybe_warn_local_url(Url),
+ git_clone(tag, git_vsn(), Url, Dir, Tag);
+download_(Dir, {git, Url, {ref, Ref}}, _State) ->
ok = filelib:ensure_dir(Dir),
- rebar_utils:sh(?FMT("git clone -n ~s ~s",
- [rebar_utils:escape_chars(Url),
- rebar_utils:escape_chars(filename:basename(Dir))]),
- [{cd, filename:dirname(Dir)}]),
- rebar_utils:sh(?FMT("git checkout -q ~s", [Ref]), [{cd, Dir}]);
-download(Dir, {git, Url, Rev}, _State) ->
+ maybe_warn_local_url(Url),
+ git_clone(ref, git_vsn(), Url, Dir, Ref);
+download_(Dir, {git, Url, Rev}, _State) ->
?WARN("WARNING: It is recommended to use {branch, Name}, {tag, Tag} or {ref, Ref}, otherwise updating the dep may not work as expected.", []),
ok = filelib:ensure_dir(Dir),
- rebar_utils:sh(?FMT("git clone -n ~s ~s",
- [rebar_utils:escape_chars(Url),
+ maybe_warn_local_url(Url),
+ git_clone(rev, git_vsn(), Url, Dir, Rev).
+
+maybe_warn_local_url(Url) ->
+ WarnStr = "Local git resources (~ts) are unsupported and may have odd behaviour. "
+ "Use remote git resources, or a plugin for local dependencies.",
+ case parse_git_url(Url) of
+ {error, no_scheme} -> ?WARN(WarnStr, [Url]);
+ {error, {no_default_port, _, _}} -> ?WARN(WarnStr, [Url]);
+ {error, {malformed_url, _, _}} -> ?WARN(WarnStr, [Url]);
+ _ -> ok
+ end.
+
+%% Use different git clone commands depending on git --version
+git_clone(branch,Vsn,Url,Dir,Branch) when Vsn >= {1,7,10}; Vsn =:= undefined ->
+ rebar_utils:sh(?FMT("git clone ~ts ~ts ~ts -b ~ts --single-branch",
+ [git_clone_options(),
+ rebar_utils:escape_chars(Url),
+ rebar_utils:escape_chars(filename:basename(Dir)),
+ rebar_utils:escape_chars(Branch)]),
+ [{cd, filename:dirname(Dir)}]);
+git_clone(branch,_Vsn,Url,Dir,Branch) ->
+ rebar_utils:sh(?FMT("git clone ~ts ~ts ~ts -b ~ts",
+ [git_clone_options(),
+ rebar_utils:escape_chars(Url),
+ rebar_utils:escape_chars(filename:basename(Dir)),
+ rebar_utils:escape_chars(Branch)]),
+ [{cd, filename:dirname(Dir)}]);
+git_clone(tag,Vsn,Url,Dir,Tag) when Vsn >= {1,7,10}; Vsn =:= undefined ->
+ rebar_utils:sh(?FMT("git clone ~ts ~ts ~ts -b ~ts --single-branch",
+ [git_clone_options(),
+ rebar_utils:escape_chars(Url),
+ rebar_utils:escape_chars(filename:basename(Dir)),
+ rebar_utils:escape_chars(Tag)]),
+ [{cd, filename:dirname(Dir)}]);
+git_clone(tag,_Vsn,Url,Dir,Tag) ->
+ rebar_utils:sh(?FMT("git clone ~ts ~ts ~ts -b ~ts",
+ [git_clone_options(),
+ rebar_utils:escape_chars(Url),
+ rebar_utils:escape_chars(filename:basename(Dir)),
+ rebar_utils:escape_chars(Tag)]),
+ [{cd, filename:dirname(Dir)}]);
+git_clone(ref,_Vsn,Url,Dir,Ref) ->
+ rebar_utils:sh(?FMT("git clone ~ts -n ~ts ~ts",
+ [git_clone_options(),
+ rebar_utils:escape_chars(Url),
+ rebar_utils:escape_chars(filename:basename(Dir))]),
+ [{cd, filename:dirname(Dir)}]),
+ rebar_utils:sh(?FMT("git checkout -q ~ts", [Ref]), [{cd, Dir}]);
+git_clone(rev,_Vsn,Url,Dir,Rev) ->
+ rebar_utils:sh(?FMT("git clone ~ts -n ~ts ~ts",
+ [git_clone_options(),
+ rebar_utils:escape_chars(Url),
rebar_utils:escape_chars(filename:basename(Dir))]),
[{cd, filename:dirname(Dir)}]),
- rebar_utils:sh(?FMT("git checkout -q ~s", [rebar_utils:escape_chars(Rev)]),
+ rebar_utils:sh(?FMT("git checkout -q ~ts", [rebar_utils:escape_chars(Rev)]),
[{cd, Dir}]).
-make_vsn(Dir) ->
+git_vsn() ->
+ case application:get_env(rebar, git_vsn) of
+ {ok, Vsn} -> Vsn;
+ undefined ->
+ Vsn = git_vsn_fetch(),
+ application:set_env(rebar, git_vsn, Vsn),
+ Vsn
+ end.
+
+git_vsn_fetch() ->
+ case rebar_utils:sh("git --version",[]) of
+ {ok, VsnStr} ->
+ case re:run(VsnStr, "git version\\h+(\\d)\\.(\\d)\\.(\\d).*", [{capture,[1,2,3],list}, unicode]) of
+ {match,[Maj,Min,Patch]} ->
+ {list_to_integer(Maj),
+ list_to_integer(Min),
+ list_to_integer(Patch)};
+ nomatch ->
+ undefined
+ end;
+ {error, _} ->
+ undefined
+ end.
+
+make_vsn(AppInfo, _) ->
+ make_vsn_(rebar_app_info:dir(AppInfo)).
+
+make_vsn_(Dir) ->
case collect_default_refcount(Dir) of
Vsn={plain, _} ->
Vsn;
@@ -154,10 +247,10 @@ collect_default_refcount(Dir) ->
return_on_error,
{cd, Dir}]) of
{error, _} ->
- ?WARN("Getting log of git dependency failed in ~s. Falling back to version 0.0.0", [rebar_dir:get_cwd()]),
+ ?WARN("Getting log of git dependency failed in ~ts. Falling back to version 0.0.0", [rebar_dir:get_cwd()]),
{plain, "0.0.0"};
{ok, String} ->
- RawRef = string:strip(String, both, $\n),
+ RawRef = rebar_string:trim(String, both, "\n"),
{Tag, TagVsn} = parse_tags(Dir),
{ok, RawCount} =
@@ -178,21 +271,20 @@ collect_default_refcount(Dir) ->
build_vsn_string(Vsn, RawRef, Count) ->
%% Cleanup the tag and the Ref information. Basically leading 'v's and
%% whitespace needs to go away.
- RefTag = [".ref", re:replace(RawRef, "\\s", "", [global])],
+ RefTag = [".ref", re:replace(RawRef, "\\s", "", [global, unicode])],
%% Create the valid [semver](http://semver.org) version from the tag
case Count of
0 ->
- erlang:binary_to_list(erlang:iolist_to_binary(Vsn));
+ rebar_utils:to_list(Vsn);
_ ->
- erlang:binary_to_list(erlang:iolist_to_binary([Vsn, "+build.",
- integer_to_list(Count), RefTag]))
+ rebar_utils:to_list([Vsn, "+build.", integer_to_list(Count), RefTag])
end.
get_patch_count(Dir, RawRef) ->
AbortMsg = "Getting rev-list of git dep failed in " ++ Dir,
- Ref = re:replace(RawRef, "\\s", "", [global]),
- Cmd = io_lib:format("git rev-list ~s..HEAD",
+ Ref = re:replace(RawRef, "\\s", "", [global, unicode]),
+ Cmd = io_lib:format("git rev-list ~ts..HEAD",
[rebar_utils:escape_chars(Ref)]),
{ok, PatchLines} = rebar_utils:sh(Cmd,
[{use_stdout, false},
@@ -203,12 +295,12 @@ get_patch_count(Dir, RawRef) ->
parse_tags(Dir) ->
%% Don't abort on error, we want the bad return to be turned into 0.0.0
- case rebar_utils:sh("git log --oneline --no-walk --tags --decorate",
+ case rebar_utils:sh("git -c color.ui=false log --oneline --no-walk --tags --decorate",
[{use_stdout, false}, return_on_error, {cd, Dir}]) of
{error, _} ->
{undefined, "0.0.0"};
{ok, Line} ->
- case re:run(Line, "(\\(|\\s)(HEAD[^,]*,\\s)tag:\\s(v?([^,\\)]+))", [{capture, [3, 4], list}]) of
+ case re:run(Line, "(\\(|\\s)(HEAD[^,]*,\\s)tag:\\s(v?([^,\\)]+))", [{capture, [3, 4], list}, unicode]) of
{match,[Tag, Vsn]} ->
{Tag, Vsn};
nomatch ->
@@ -216,8 +308,37 @@ parse_tags(Dir) ->
[{use_stdout, false}, return_on_error, {cd, Dir}]) of
{error, _} ->
{undefined, "0.0.0"};
+ %% strip the v prefix if it exists like is done in the above match
+ {ok, [$v | LatestVsn]} ->
+ {undefined, rebar_string:trim(LatestVsn, both, "\n")};
{ok, LatestVsn} ->
- {undefined, string:strip(LatestVsn, both, $\n)}
+ {undefined, rebar_string:trim(LatestVsn,both, "\n")}
end
end
end.
+
+git_clone_options() ->
+ Option = case os:getenv("REBAR_GIT_CLONE_OPTIONS") of
+ false -> "" ; %% env var not set
+ Opt -> %% env var set to empty or others
+ Opt
+ end,
+
+ ?DEBUG("Git clone Option = ~p",[Option]),
+ Option.
+
+check_type_support() ->
+ case get({is_supported, ?MODULE}) of
+ true ->
+ ok;
+ _ ->
+ case rebar_utils:sh("git --version", [{return_on_error, true},
+ {use_stdout, false}]) of
+ {error, _} ->
+ ?ABORT("git not installed", []);
+ _ ->
+ put({is_supported, ?MODULE}, true),
+ ok
+ end
+ end.
+
diff --git a/src/rebar_hex_repos.erl b/src/rebar_hex_repos.erl
new file mode 100644
index 0000000..ebee191
--- /dev/null
+++ b/src/rebar_hex_repos.erl
@@ -0,0 +1,142 @@
+-module(rebar_hex_repos).
+
+-export([from_state/2,
+ get_repo_config/2,
+ auth_config/1,
+ update_auth_config/2,
+ format_error/1]).
+
+-ifdef(TEST).
+%% exported for test purposes
+-export([repos/1, merge_repos/1]).
+-endif.
+
+-include("rebar.hrl").
+-include_lib("providers/include/providers.hrl").
+
+-export_type([repo/0]).
+
+-type repo() :: #{name => unicode:unicode_binary(),
+ api_url => binary(),
+ api_key => binary(),
+ repo_url => binary(),
+ repo_public_key => binary(),
+ repo_verify => binary()}.
+
+from_state(BaseConfig, State) ->
+ HexConfig = rebar_state:get(State, hex, []),
+ Repos = repos(HexConfig),
+ %% auth is stored in a separate config file since the plugin generates and modifies it
+ Auth = ?MODULE:auth_config(State),
+ %% add base config entries that are specific to use by rebar3 and not overridable
+ Repos1 = merge_with_base_and_auth(Repos, BaseConfig, Auth),
+ %% merge organizations parent repo options into each oraganization repo
+ update_organizations(Repos1).
+
+-spec get_repo_config(unicode:unicode_binary(), rebar_state:t() | [repo()])
+ -> {ok, repo()} | error.
+get_repo_config(RepoName, Repos) when is_list(Repos) ->
+ case ec_lists:find(fun(#{name := N}) -> N =:= RepoName end, Repos) of
+ error ->
+ throw(?PRV_ERROR({repo_not_found, RepoName}));
+ {ok, RepoConfig} ->
+ {ok, RepoConfig}
+ end;
+get_repo_config(RepoName, State) ->
+ Resources = rebar_state:resources(State),
+ #{repos := Repos} = rebar_resource_v2:find_resource_state(pkg, Resources),
+ get_repo_config(RepoName, Repos).
+
+merge_with_base_and_auth(Repos, BaseConfig, Auth) ->
+ [maps:merge(maps:get(maps:get(name, Repo), Auth, #{}),
+ maps:merge(Repo, BaseConfig)) || Repo <- Repos].
+
+%% A user's list of repos are merged by name while keeping the order
+%% intact. The order is based on the first use of a repo by name in the
+%% list. The default repo is appended to the user's list.
+repos(HexConfig) ->
+ HexDefaultConfig = default_repo(),
+ case [R || R <- HexConfig, element(1, R) =:= repos] of
+ [] ->
+ [HexDefaultConfig];
+ %% we only care if the first element is a replace entry
+ [{repos, replace, Repos} | _]->
+ merge_repos(Repos);
+ Repos ->
+ RepoList = repo_list(Repos),
+ merge_repos(RepoList ++ [HexDefaultConfig])
+ end.
+
+-spec merge_repos([repo()]) -> [repo()].
+merge_repos(Repos) ->
+ lists:foldl(fun(R=#{name := Name}, ReposAcc) ->
+ %% private organizations include the parent repo before a :
+ case rebar_string:split(Name, <<":">>) of
+ [Repo, Org] ->
+ update_repo_list(R#{name => Name,
+ organization => Org,
+ parent => Repo}, ReposAcc);
+ _ ->
+ update_repo_list(R, ReposAcc)
+ end
+ end, [], Repos).
+
+update_organizations(Repos) ->
+ lists:map(fun(Repo=#{organization := Organization,
+ parent := ParentName}) ->
+ {ok, Parent} = get_repo_config(ParentName, Repos),
+ ParentRepoUrl = rebar_utils:to_list(maps:get(repo_url, Parent)),
+ {ok, RepoUrl} =
+ rebar_utils:url_append_path(ParentRepoUrl,
+ filename:join("repos", rebar_utils:to_list(Organization))),
+ %% still let the organization config override this constructed repo url
+ maps:merge(Parent#{repo_url => rebar_utils:to_binary(RepoUrl)}, Repo);
+ (Repo) ->
+ Repo
+ end, Repos).
+
+update_repo_list(R=#{name := N}, [H=#{name := HN} | Rest]) when N =:= HN ->
+ [maps:merge(R, H) | Rest];
+update_repo_list(R, [H | Rest]) ->
+ [H | update_repo_list(R, Rest)];
+update_repo_list(R, []) ->
+ [R].
+
+default_repo() ->
+ HexDefaultConfig = hex_core:default_config(),
+ HexDefaultConfig#{name => ?PUBLIC_HEX_REPO}.
+
+repo_list([]) ->
+ [];
+repo_list([{repos, Repos} | T]) ->
+ Repos ++ repo_list(T);
+repo_list([{repos, replace, Repos} | T]) ->
+ Repos ++ repo_list(T).
+
+format_error({repo_not_found, RepoName}) ->
+ io_lib:format("The repo ~ts was not found in the configuration.", [RepoName]).
+
+%% auth functions
+
+%% authentication is in a separate config file because the hex plugin updates it
+
+-spec auth_config_file(rebar_state:t()) -> file:filename_all().
+auth_config_file(State) ->
+ filename:join(rebar_dir:global_config_dir(State), ?HEX_AUTH_FILE).
+
+-spec auth_config(rebar_state:t()) -> map().
+auth_config(State) ->
+ case file:consult(auth_config_file(State)) of
+ {ok, [Config]} ->
+ Config;
+ _ ->
+ #{}
+ end.
+
+-spec update_auth_config(map(), rebar_state:t()) -> ok.
+update_auth_config(Updates, State) ->
+ Config = auth_config(State),
+ AuthConfigFile = auth_config_file(State),
+ ok = filelib:ensure_dir(AuthConfigFile),
+ NewConfig = iolist_to_binary([io_lib:print(maps:merge(Config, Updates)) | ".\n"]),
+ ok = file:write_file(AuthConfigFile, NewConfig).
diff --git a/src/rebar_hg_resource.erl b/src/rebar_hg_resource.erl
index 7d03eda..8139d04 100644
--- a/src/rebar_hg_resource.erl
+++ b/src/rebar_hg_resource.erl
@@ -2,39 +2,52 @@
%% ex: ts=4 sw=4 et
-module(rebar_hg_resource).
--behaviour(rebar_resource).
+-behaviour(rebar_resource_v2).
--export([lock/2
- ,download/3
- ,needs_update/2
- ,make_vsn/1]).
+-export([init/2,
+ lock/2,
+ download/4,
+ needs_update/2,
+ make_vsn/2]).
-include("rebar.hrl").
-lock(AppDir, {hg, Url, _}) ->
- lock(AppDir, {hg, Url});
-lock(AppDir, {hg, Url}) ->
+-spec init(atom(), rebar_state:t()) -> {ok, rebar_resource_v2:resource()}.
+init(Type, _State) ->
+ Resource = rebar_resource_v2:new(Type, ?MODULE, #{}),
+ {ok, Resource}.
+
+lock(AppInfo, _) ->
+ check_type_support(),
+ lock_(rebar_app_info:dir(AppInfo), rebar_app_info:source(AppInfo)).
+
+lock_(AppDir, {hg, Url, _}) ->
+ lock_(AppDir, {hg, Url});
+lock_(AppDir, {hg, Url}) ->
Ref = get_ref(AppDir),
{hg, Url, {ref, Ref}}.
%% Return `true' if either the hg url or tag/branch/ref is not the same as
%% the currently checked out repo for the dep
-needs_update(Dir, {hg, Url, {tag, Tag}}) ->
+needs_update(AppInfo, _) ->
+ needs_update_(rebar_app_info:dir(AppInfo), rebar_app_info:source(AppInfo)).
+
+needs_update_(Dir, {hg, Url, {tag, Tag}}) ->
Ref = get_ref(Dir),
{ClosestTag, Distance} = get_tag_distance(Dir, Ref),
- ?DEBUG("Comparing hg tag ~s with ref ~s (closest tag is ~s at distance ~s)",
+ ?DEBUG("Comparing hg tag ~ts with ref ~ts (closest tag is ~ts at distance ~ts)",
[Tag, Ref, ClosestTag, Distance]),
not ((Distance =:= "0") andalso (Tag =:= ClosestTag)
andalso compare_url(Dir, Url));
-needs_update(Dir, {hg, Url, {branch, Branch}}) ->
+needs_update_(Dir, {hg, Url, {branch, Branch}}) ->
Ref = get_ref(Dir),
BRef = get_branch_ref(Dir, Branch),
not ((Ref =:= BRef) andalso compare_url(Dir, Url));
-needs_update(Dir, {hg, Url, "default"}) ->
+needs_update_(Dir, {hg, Url, "default"}) ->
Ref = get_ref(Dir),
BRef = get_branch_ref(Dir, "default"),
not ((Ref =:= BRef) andalso compare_url(Dir, Url));
-needs_update(Dir, {hg, Url, Ref}) ->
+needs_update_(Dir, {hg, Url, Ref}) ->
LocalRef = get_ref(Dir),
TargetRef = case Ref of
{ref, Ref1} ->
@@ -45,54 +58,73 @@ needs_update(Dir, {hg, Url, Ref}) ->
Ref1 ->
Ref1
end,
- ?DEBUG("Comparing hg ref ~s with ~s", [Ref1, LocalRef]),
+ ?DEBUG("Comparing hg ref ~ts with ~ts", [Ref1, LocalRef]),
not ((LocalRef =:= TargetRef) andalso compare_url(Dir, Url)).
-download(Dir, {hg, Url}, State) ->
+download(TmpDir, AppInfo, State, _) ->
+ check_type_support(),
+ case download_(TmpDir, rebar_app_info:source(AppInfo), State) of
+ {ok, _} ->
+ ok;
+ {error, Reason} ->
+ {error, Reason};
+ Error ->
+ {error, Error}
+ end.
+
+download_(Dir, {hg, Url}, State) ->
?WARN("WARNING: It is recommended to use {branch, Name}, {tag, Tag} or {ref, Ref}, otherwise updating the dep may not work as expected.", []),
- download(Dir, {hg, Url, {branch, "default"}}, State);
-download(Dir, {hg, Url, ""}, State) ->
+ download_(Dir, {hg, Url, {branch, "default"}}, State);
+download_(Dir, {hg, Url, ""}, State) ->
?WARN("WARNING: It is recommended to use {branch, Name}, {tag, Tag} or {ref, Ref}, otherwise updating the dep may not work as expected.", []),
- download(Dir, {hg, Url, {branch, "default"}}, State);
-download(Dir, {hg, Url, {branch, Branch}}, _State) ->
+ download_(Dir, {hg, Url, {branch, "default"}}, State);
+download_(Dir, {hg, Url, {branch, Branch}}, _State) ->
ok = filelib:ensure_dir(Dir),
- rebar_utils:sh(?FMT("hg clone -q -b ~s ~s ~s",
+ maybe_warn_local_url(Url),
+ rebar_utils:sh(?FMT("hg clone -q -b ~ts ~ts ~ts",
[rebar_utils:escape_chars(Branch),
rebar_utils:escape_chars(Url),
rebar_utils:escape_chars(filename:basename(Dir))]),
[{cd, filename:dirname(Dir)}]);
-download(Dir, {hg, Url, {tag, Tag}}, _State) ->
+download_(Dir, {hg, Url, {tag, Tag}}, _State) ->
ok = filelib:ensure_dir(Dir),
- rebar_utils:sh(?FMT("hg clone -q -u ~s ~s ~s",
+ maybe_warn_local_url(Url),
+ rebar_utils:sh(?FMT("hg clone -q -u ~ts ~ts ~ts",
[rebar_utils:escape_chars(Tag),
rebar_utils:escape_chars(Url),
rebar_utils:escape_chars(filename:basename(Dir))]),
[{cd, filename:dirname(Dir)}]);
-download(Dir, {hg, Url, {ref, Ref}}, _State) ->
+download_(Dir, {hg, Url, {ref, Ref}}, _State) ->
ok = filelib:ensure_dir(Dir),
- rebar_utils:sh(?FMT("hg clone -q -r ~s ~s ~s",
+ maybe_warn_local_url(Url),
+ rebar_utils:sh(?FMT("hg clone -q -r ~ts ~ts ~ts",
[rebar_utils:escape_chars(Ref),
rebar_utils:escape_chars(Url),
rebar_utils:escape_chars(filename:basename(Dir))]),
[{cd, filename:dirname(Dir)}]);
-download(Dir, {hg, Url, Rev}, _State) ->
+download_(Dir, {hg, Url, Rev}, _State) ->
ok = filelib:ensure_dir(Dir),
- rebar_utils:sh(?FMT("hg clone -q -r ~s ~s ~s",
+ maybe_warn_local_url(Url),
+ rebar_utils:sh(?FMT("hg clone -q -r ~ts ~ts ~ts",
[rebar_utils:escape_chars(Rev),
rebar_utils:escape_chars(Url),
rebar_utils:escape_chars(filename:basename(Dir))]),
[{cd, filename:dirname(Dir)}]).
-make_vsn(Dir) ->
+make_vsn(AppInfo, _) ->
+ check_type_support(),
+ make_vsn_(rebar_app_info:dir(AppInfo)).
+
+make_vsn_(Dir) ->
BaseHg = "hg -R \"" ++ rebar_utils:escape_double_quotes(Dir) ++ "\" ",
Ref = get_ref(Dir),
Cmd = BaseHg ++ "log --template \"{latesttag}+build.{latesttagdistance}.rev.{node|short}\""
" --rev " ++ Ref,
- AbortMsg = io_lib:format("Version resolution of hg dependency failed in ~s", [Dir]),
+ AbortMsg = io_lib:format("Version resolution of hg dependency failed in ~ts", [Dir]),
{ok, VsnString} =
rebar_utils:sh(Cmd,
[{use_stdout, false}, {debug_abort_on_error, AbortMsg}]),
- RawVsn = string:strip(VsnString, both, $\n),
+ RawVsn = rebar_string:trim(VsnString, both, "\n"),
Vsn = case RawVsn of
"null+" ++ Rest -> "0.0.0+" ++ Rest;
@@ -103,43 +135,70 @@ make_vsn(Dir) ->
%%% Internal functions
compare_url(Dir, Url) ->
- CurrentUrl = string:strip(os:cmd("hg -R \"" ++ rebar_utils:escape_double_quotes(Dir) ++"\" paths default"), both, $\n),
- CurrentUrl1 = string:strip(CurrentUrl, both, $\r),
+ CurrentUrl = rebar_string:trim(os:cmd("hg -R \"" ++ rebar_utils:escape_double_quotes(Dir) ++"\" paths default"), both, "\n"),
+ CurrentUrl1 = rebar_string:trim(CurrentUrl, both, "\r"),
parse_hg_url(CurrentUrl1) =:= parse_hg_url(Url).
get_ref(Dir) ->
- AbortMsg = io_lib:format("Get ref of hg dependency failed in ~s", [Dir]),
+ AbortMsg = io_lib:format("Get ref of hg dependency failed in ~ts", [Dir]),
{ok, RefString} =
rebar_utils:sh("hg -R \"" ++ rebar_utils:escape_double_quotes(Dir) ++ "\" --debug id -i",
[{use_stdout, false}, {debug_abort_on_error, AbortMsg}]),
- string:strip(RefString, both, $\n).
+ rebar_string:trim(RefString, both, "\n").
get_tag_distance(Dir, Ref) ->
- AbortMsg = io_lib:format("Get tag distance of hg dependency failed in ~s", [Dir]),
+ AbortMsg = io_lib:format("Get tag distance of hg dependency failed in ~ts", [Dir]),
{ok, LogString} =
rebar_utils:sh("hg -R \"" ++ rebar_utils:escape_double_quotes(Dir) ++ "\" "
"log --template \"{latesttag}-{latesttagdistance}\n\" "
"--rev " ++ rebar_utils:escape_chars(Ref),
[{use_stdout, false}, {debug_abort_on_error, AbortMsg}]),
- Log = string:strip(LogString,
- both, $\n),
- [Tag, Distance] = re:split(Log, "-([0-9]+)$", [{parts,0}, {return, list}]),
+ Log = rebar_string:trim(LogString,
+ both, "\n"),
+ [Tag, Distance] = re:split(Log, "-([0-9]+)$",
+ [{parts,0}, {return,list}, unicode]),
{Tag, Distance}.
get_branch_ref(Dir, Branch) ->
- AbortMsg = io_lib:format("Get branch ref of hg dependency failed in ~s", [Dir]),
+ AbortMsg = io_lib:format("Get branch ref of hg dependency failed in ~ts", [Dir]),
{ok, BranchRefString} =
rebar_utils:sh("hg -R \"" ++ rebar_utils:escape_double_quotes(Dir) ++
"\" log --template \"{node}\n\" --rev " ++ rebar_utils:escape_chars(Branch),
[{use_stdout, false}, {debug_abort_on_error, AbortMsg}]),
- string:strip(BranchRefString, both, $\n).
+ rebar_string:trim(BranchRefString, both, "\n").
+
+
+maybe_warn_local_url(Url) ->
+ try
+ _ = parse_hg_url(Url),
+ ok
+ catch
+ _:_ ->
+ ?WARN("URL format (~ts) unsupported.", [])
+ end.
parse_hg_url("ssh://" ++ HostPath) ->
- [Host | Path] = string:tokens(HostPath, "/"),
+ [Host | Path] = rebar_string:lexemes(HostPath, "/"),
{Host, filename:rootname(filename:join(Path), ".hg")};
parse_hg_url("http://" ++ HostPath) ->
- [Host | Path] = string:tokens(HostPath, "/"),
+ [Host | Path] = rebar_string:lexemes(HostPath, "/"),
{Host, filename:rootname(filename:join(Path), ".hg")};
parse_hg_url("https://" ++ HostPath) ->
- [Host | Path] = string:tokens(HostPath, "/"),
+ [Host | Path] = rebar_string:lexemes(HostPath, "/"),
{Host, filename:rootname(filename:join(Path), ".hg")}.
+
+check_type_support() ->
+ case get({is_supported, ?MODULE}) of
+ true ->
+ ok;
+ false ->
+ case rebar_utils:sh("hg --version", [{return_on_error, true},
+ {use_stdout, false}]) of
+ {error, _} ->
+ ?ABORT("hg not installed", []);
+ _ ->
+ put({is_supported, ?MODULE}, true),
+ ok
+ end
+ end.
+
diff --git a/src/rebar_hooks.erl b/src/rebar_hooks.erl
index d6a0e2b..358458e 100644
--- a/src/rebar_hooks.erl
+++ b/src/rebar_hooks.erl
@@ -42,8 +42,7 @@ run_provider_hooks_(Dir, Type, Command, Providers, TypeHooks, State) ->
[] ->
State;
HookProviders ->
- PluginDepsPaths = lists:usort(rebar_state:code_paths(State, all_plugin_deps)),
- code:add_pathsa(PluginDepsPaths),
+ rebar_paths:set_paths([plugins], State),
Providers1 = rebar_state:providers(State),
State1 = rebar_state:providers(rebar_state:dir(State, Dir), Providers++Providers1),
case rebar_core:do(HookProviders, State1) of
@@ -51,39 +50,16 @@ run_provider_hooks_(Dir, Type, Command, Providers, TypeHooks, State) ->
?DEBUG(format_error({bad_provider, Type, Command, ProviderName}), []),
throw(?PRV_ERROR({bad_provider, Type, Command, ProviderName}));
{ok, State2} ->
- rebar_utils:remove_from_code_path(PluginDepsPaths),
+ rebar_paths:set_paths([deps], State2),
State2
end
end.
format_error({bad_provider, Type, Command, {Name, Namespace}}) ->
- io_lib:format("Unable to run ~s hooks for '~p', command '~p' in namespace '~p' not found.", [Type, Command, Namespace, Name]);
+ io_lib:format("Unable to run ~ts hooks for '~p', command '~p' in namespace '~p' not found.", [Type, Command, Namespace, Name]);
format_error({bad_provider, Type, Command, Name}) ->
- io_lib:format("Unable to run ~s hooks for '~p', command '~p' not found.", [Type, Command, Name]).
+ io_lib:format("Unable to run ~ts hooks for '~p', command '~p' not found.", [Type, Command, Name]).
-%% @doc The following environment variables are exported when running
-%% a hook (absolute paths):
-%%
-%% REBAR_DEPS_DIR = rebar_dir:deps_dir/1
-%% REBAR_BUILD_DIR = rebar_dir:base_dir/1
-%% REBAR_ROOT_DIR = rebar_dir:root_dir/1
-%% REBAR_CHECKOUTS_DIR = rebar_dir:checkouts_dir/1
-%% REBAR_PLUGINS_DIR = rebar_dir:plugins_dir/1
-%% REBAR_GLOBAL_CONFIG_DIR = rebar_dir:global_config_dir/1
-%% REBAR_GLOBAL_CACHE_DIR = rebar_dir:global_cache_dir/1
-%% REBAR_TEMPLATE_DIR = rebar_dir:template_dir/1
-%% REBAR_APP_DIRS = rebar_dir:lib_dirs/1
-%% REBAR_SRC_DIRS = rebar_dir:src_dirs/1
-%%
-%% autoconf compatible variables
-%% (see: http://www.gnu.org/software/autoconf/manual/autoconf.html#Erlang-Libraries):
-%% ERLANG_ERTS_VER = erlang:system_info(version)
-%% ERLANG_ROOT_DIR = code:root_dir/0
-%% ERLANG_LIB_DIR_erl_interface = code:lib_dir(erl_interface)
-%% ERLANG_LIB_VER_erl_interface = version part of path returned by code:lib_dir(erl_interface)
-%% ERL = ERLANG_ROOT_DIR/bin/erl
-%% ERLC = ERLANG_ROOT_DIR/bin/erl
-%%
run_hooks(Dir, pre, Command, Opts, State) ->
run_hooks(Dir, pre_hooks, Command, Opts, State);
run_hooks(Dir, post, Command, Opts, State) ->
@@ -94,7 +70,7 @@ run_hooks(Dir, Type, Command, Opts, State) ->
?DEBUG("run_hooks(~p, ~p, ~p) -> no hooks defined\n", [Dir, Type, Command]),
ok;
Hooks ->
- Env = create_env(State, Opts),
+ Env = rebar_env:create_env(State, Opts),
lists:foreach(fun({_, C, _}=Hook) when C =:= Command ->
apply_hook(Dir, Env, Hook);
({C, _}=Hook) when C =:= Command ->
@@ -114,36 +90,3 @@ apply_hook(Dir, Env, {Arch, Command, Hook}) ->
apply_hook(Dir, Env, {Command, Hook}) ->
Msg = lists:flatten(io_lib:format("Hook for ~p failed!~n", [Command])),
rebar_utils:sh(Hook, [use_stdout, {cd, Dir}, {env, Env}, {abort_on_error, Msg}]).
-
-create_env(State, Opts) ->
- BaseDir = rebar_dir:base_dir(State),
- [
- {"REBAR_DEPS_DIR", filename:absname(rebar_dir:deps_dir(State))},
- {"REBAR_BUILD_DIR", filename:absname(rebar_dir:base_dir(State))},
- {"REBAR_ROOT_DIR", filename:absname(rebar_dir:root_dir(State))},
- {"REBAR_CHECKOUTS_DIR", filename:absname(rebar_dir:checkouts_dir(State))},
- {"REBAR_PLUGINS_DIR", filename:absname(rebar_dir:plugins_dir(State))},
- {"REBAR_GLOBAL_CONFIG_DIR", filename:absname(rebar_dir:global_config_dir(State))},
- {"REBAR_GLOBAL_CACHE_DIR", filename:absname(rebar_dir:global_cache_dir(Opts))},
- {"REBAR_TEMPLATE_DIR", filename:absname(rebar_dir:template_dir(State))},
- {"REBAR_APP_DIRS", join_dirs(BaseDir, rebar_dir:lib_dirs(State))},
- {"REBAR_SRC_DIRS", join_dirs(BaseDir, rebar_dir:all_src_dirs(Opts))},
- {"ERLANG_ERTS_VER", erlang:system_info(version)},
- {"ERLANG_ROOT_DIR", code:root_dir()},
- {"ERLANG_LIB_DIR_erl_interface", code:lib_dir(erl_interface)},
- {"ERLANG_LIB_VER_erl_interface", re_version(code:lib_dir(erl_interface))},
- {"ERL", filename:join([code:root_dir(), "bin", "erl"])},
- {"ERLC", filename:join([code:root_dir(), "bin", "erlc"])},
- {"ERLANG_ARCH" , rebar_api:wordsize()},
- {"ERLANG_TARGET", rebar_api:get_arch()}
-
- ].
-
-join_dirs(BaseDir, Dirs) ->
- string:join([ filename:join(BaseDir, Dir) || Dir <- Dirs ], ":").
-
-re_version(Path) ->
- case re:run(Path, "^.*-(?<VER>[^/-]*)$", [{capture, [1], list}]) of
- nomatch -> "";
- {match, [Ver]} -> Ver
- end.
diff --git a/src/rebar_log.erl b/src/rebar_log.erl
index b1a70c2..7fc2312 100644
--- a/src/rebar_log.erl
+++ b/src/rebar_log.erl
@@ -57,6 +57,8 @@ intensity() ->
high;
"low" ->
low;
+ "none" ->
+ none;
_ ->
?DFLT_INTENSITY
end,
@@ -91,11 +93,18 @@ get_level() ->
end.
log(Level = error, Str, Args) ->
- {ok, LogState} = application:get_env(rebar, log),
- ec_cmd_log:Level(LogState, lists:flatten(cf:format("~!^~s~n", [Str])), Args);
+ case application:get_env(rebar, log) of
+ {ok, LogState} ->
+ NewStr = lists:flatten(cf:format("~!^~ts~n", [Str])),
+ ec_cmd_log:Level( LogState, NewStr, Args);
+ undefined -> % fallback
+ io:format(standard_error, Str++"~n", Args)
+ end;
log(Level, Str, Args) ->
- {ok, LogState} = application:get_env(rebar, log),
- ec_cmd_log:Level(LogState, Str++"~n", Args).
+ case application:get_env(rebar, log) of
+ {ok, LogState} -> ec_cmd_log:Level(LogState, Str++"~n", Args);
+ undefined -> io:format(Str++"~n", Args)
+ end.
crashdump(Str, Args) ->
crashdump("rebar3.crashdump", Str, Args).
diff --git a/src/rebar_opts.erl b/src/rebar_opts.erl
index b02a504..8195a77 100644
--- a/src/rebar_opts.erl
+++ b/src/rebar_opts.erl
@@ -35,46 +35,80 @@ erl_opts(Opts) ->
Defines = [{d, list_to_atom(D)} ||
D <- ?MODULE:get(Opts, defines, [])],
AllOpts = Defines ++ RawErlOpts,
- case proplists:is_defined(no_debug_info, AllOpts) of
- true ->
- [O || O <- AllOpts, O =/= no_debug_info];
- false ->
- [debug_info|AllOpts]
- end.
+ lists:reverse(filter_debug_info(lists:reverse(AllOpts))).
+
+filter_debug_info([]) ->
+ %% Default == ON
+ [debug_info];
+filter_debug_info([debug_info|_] = L) ->
+ %% drop no_debug_info and {debug_info_key, _} since those would
+ %% conflict with a plain debug_info
+ [debug_info |
+ lists:filter(fun(K) ->
+ K =/= no_debug_info andalso K =/= debug_info andalso
+ not (is_tuple(K) andalso element(1,K) =:= debug_info_key)
+ end, L)];
+filter_debug_info([{debug_info, _} = H | T]) ->
+ %% custom debug_info field; keep and filter the rest except
+ %% without no_debug_info. Still have to filter for regular or crypto
+ %% debug_info.
+ [H | filter_debug_info(lists:filter(fun(K) -> K =/= no_debug_info end, T))];
+filter_debug_info([{debug_info_key, _}=H | T]) ->
+ %% Drop no_debug_info and regular debug_info
+ [H | lists:filter(fun(K) ->
+ K =/= no_debug_info andalso K =/= debug_info andalso
+ not (is_tuple(K) andalso element(1,K) =:= debug_info_key)
+ end, T)];
+filter_debug_info([no_debug_info|T]) ->
+ %% Drop all debug info
+ lists:filter(fun(debug_info) -> false
+ ; ({debug_info, _}) -> false
+ ; ({debug_info_key, _}) -> false
+ ; (no_debug_info) -> false
+ ; (_Other) -> true
+ end, T);
+filter_debug_info([H|T]) ->
+ [H|filter_debug_info(T)].
apply_overrides(Opts, Name, Overrides) ->
%% Inefficient. We want the order we get here though.
Opts1 = lists:foldl(fun({override, O}, OptsAcc) ->
- lists:foldl(fun({deps, Value}, OptsAcc1) ->
- set(OptsAcc1, {deps,default}, Value);
- ({Key, Value}, OptsAcc1) ->
- set(OptsAcc1, Key, Value)
- end, OptsAcc, O);
+ override_opt(O, OptsAcc);
(_, OptsAcc) ->
OptsAcc
- end, Opts, Overrides),
-
- Opts2 = lists:foldl(fun({override, N, O}, OptsAcc) when N =:= Name ->
- lists:foldl(fun({deps, Value}, OptsAcc1) ->
- set(OptsAcc1, {deps,default}, Value);
- ({Key, Value}, OptsAcc1) ->
- set(OptsAcc1, Key, Value)
- end, OptsAcc, O);
+ end, Opts, Overrides),
+
+ Opts2 = lists:foldl(fun({add, O}, OptsAcc) ->
+ add_opt(O, OptsAcc);
+ (_, OptsAcc) ->
+ OptsAcc
+ end, Opts1, Overrides),
+
+ Opts3 = lists:foldl(fun({del, O}, OptsAcc) ->
+ del_opt(O, OptsAcc);
(_, OptsAcc) ->
OptsAcc
- end, Opts1, Overrides),
-
- lists:foldl(fun({add, N, O}, OptsAcc) when N =:= Name ->
- lists:foldl(fun({deps, Value}, OptsAcc1) ->
- OldValue = ?MODULE:get(OptsAcc1, {deps,default}, []),
- set(OptsAcc1, {deps,default}, Value++OldValue);
- ({Key, Value}, OptsAcc1) ->
- OldValue = ?MODULE:get(OptsAcc1, Key, []),
- set(OptsAcc1, Key, Value++OldValue)
- end, OptsAcc, O);
- (_, OptsAcc) ->
- OptsAcc
- end, Opts2, Overrides).
+ end, Opts2, Overrides),
+
+ Opts4 = lists:foldl(fun({override, N, O}, OptsAcc) when N =:= Name ->
+ override_opt(O, OptsAcc);
+ (_, OptsAcc) ->
+ OptsAcc
+ end, Opts3, Overrides),
+
+ Opts5 = lists:foldl(fun({add, N, O}, OptsAcc) when N =:= Name ->
+ add_opt(O, OptsAcc);
+ (_, OptsAcc) ->
+ OptsAcc
+ end, Opts4, Overrides),
+
+ Opts6 = lists:foldl(fun({del, N, O}, OptsAcc) when N =:= Name ->
+ del_opt(O, OptsAcc);
+ (_, OptsAcc) ->
+ OptsAcc
+ end, Opts5, Overrides),
+
+ Opts6.
add_to_profile(Opts, Profile, KVs) when is_atom(Profile), is_list(KVs) ->
Profiles = ?MODULE:get(Opts, profiles, []),
@@ -101,42 +135,107 @@ merge_opts(Profile, NewOpts, OldOpts) ->
end.
merge_opts(NewOpts, OldOpts) ->
- dict:merge(fun(deps, _NewValue, OldValue) ->
- OldValue;
- ({deps, _}, NewValue, _OldValue) ->
- NewValue;
- (plugins, NewValue, _OldValue) ->
- NewValue;
- ({plugins, _}, NewValue, _OldValue) ->
- NewValue;
- (profiles, NewValue, OldValue) ->
- dict:to_list(merge_opts(dict:from_list(NewValue), dict:from_list(OldValue)));
- (mib_first_files, Value, Value) ->
- Value;
- (mib_first_files, NewValue, OldValue) ->
- OldValue ++ NewValue;
- (relx, NewValue, OldValue) ->
- rebar_utils:tup_umerge(OldValue, NewValue);
- (_Key, NewValue, OldValue) when is_list(NewValue) ->
- case io_lib:printable_list(NewValue) of
- true when NewValue =:= [] ->
- case io_lib:printable_list(OldValue) of
- true ->
- NewValue;
- false ->
- OldValue
- end;
- true ->
- NewValue;
- false ->
- rebar_utils:tup_umerge(NewValue, OldValue)
- end;
- (_Key, NewValue, _OldValue) ->
- NewValue
- end, NewOpts, OldOpts).
+ dict:merge(fun merge_opt/3, NewOpts, OldOpts).
%% Internal functions
+add_opt(Opts1, Opts2) ->
+ lists:foldl(fun({deps, Value}, OptsAcc) ->
+ OldValue = ?MODULE:get(OptsAcc, {deps,default}, []),
+ set(OptsAcc, {deps,default}, Value++OldValue);
+ ({Key, Value}, OptsAcc) ->
+ OldValue = ?MODULE:get(OptsAcc, Key, []),
+ set(OptsAcc, Key, Value++OldValue)
+ end, Opts2, Opts1).
+
+del_opt(Opts1, Opts2) ->
+ lists:foldl(fun({deps, Value}, OptsAcc) ->
+ OldValue = ?MODULE:get(OptsAcc, {deps,default}, []),
+ set(OptsAcc, {deps,default}, OldValue--Value);
+ ({Key, Value}, OptsAcc) ->
+ OldValue = ?MODULE:get(OptsAcc, Key, []),
+ set(OptsAcc, Key, OldValue--Value)
+ end, Opts2, Opts1).
+
+override_opt(Opts1, Opts2) ->
+ lists:foldl(fun({deps, Value}, OptsAcc) ->
+ set(OptsAcc, {deps,default}, Value);
+ ({Key, Value}, OptsAcc) ->
+ set(OptsAcc, Key, Value)
+ end, Opts2, Opts1).
+
+%%
+%% Function for dict:merge/3 (in merge_opts/2) to merge options by priority.
+%%
+merge_opt(deps, _NewValue, OldValue) ->
+ OldValue;
+merge_opt({deps, _}, NewValue, _OldValue) ->
+ NewValue;
+merge_opt(plugins, NewValue, _OldValue) ->
+ NewValue;
+merge_opt({plugins, _}, NewValue, _OldValue) ->
+ NewValue;
+merge_opt(profiles, NewValue, OldValue) ->
+ %% Merge up sparse pairs of {Profile, Opts} into a joined up
+ %% {Profile, OptsNew, OptsOld} list.
+ ToMerge = normalise_profile_pairs(lists:sort(NewValue),
+ lists:sort(OldValue)),
+ [{K,dict:to_list(merge_opts(dict:from_list(New), dict:from_list(Old)))}
+ || {K,New,Old} <- ToMerge];
+merge_opt(erl_first_files, Value, Value) ->
+ Value;
+merge_opt(erl_first_files, NewValue, OldValue) ->
+ OldValue ++ NewValue;
+merge_opt(mib_first_files, Value, Value) ->
+ Value;
+merge_opt(mib_first_files, NewValue, OldValue) ->
+ OldValue ++ NewValue;
+merge_opt(relx, NewValue, OldValue) ->
+ Partition = fun(C) -> is_tuple(C) andalso element(1, C) =:= overlay end,
+ {NewOverlays, NewOther} = lists:partition(Partition, NewValue),
+ {OldOverlays, OldOther} = lists:partition(Partition, OldValue),
+ rebar_utils:tup_umerge(NewOverlays, OldOverlays)
+ ++ rebar_utils:tup_umerge(OldOther, NewOther);
+merge_opt(Key, NewValue, OldValue)
+ when Key == erl_opts; Key == eunit_compile_opts; Key == ct_compile_opts ->
+ merge_erl_opts(lists:reverse(OldValue), NewValue);
+merge_opt(_Key, NewValue, OldValue) when is_list(NewValue) ->
+ case io_lib:printable_list(NewValue) of
+ true when NewValue =:= [] ->
+ case io_lib:printable_list(OldValue) of
+ true ->
+ NewValue;
+ false ->
+ OldValue
+ end;
+ true ->
+ NewValue;
+ false ->
+ rebar_utils:tup_umerge(NewValue, OldValue)
+ end;
+merge_opt(_Key, NewValue, _OldValue) ->
+ NewValue.
+
+%%
+%% Merge Erlang compiler options such that the result
+%% a) Doesn't contain duplicates.
+%% b) Resulting options are ordered by increasing precedence as expected by
+%% the compiler.
+%% The first parameter is the lower precedence options, in reverse order, to
+%% be merged with the higher-precedence options in the second parameter.
+%%
+merge_erl_opts([Opt | Opts], []) ->
+ merge_erl_opts(Opts, [Opt]);
+merge_erl_opts([Opt | Opts], Merged) ->
+ case lists:member(Opt, Merged) of
+ true ->
+ merge_erl_opts(Opts, Merged);
+ _ ->
+ merge_erl_opts(Opts, [Opt | Merged])
+ end;
+merge_erl_opts([], Merged) ->
+ Merged.
+
%%
%% Filter a list of erl_opts platform_define options such that only
%% those which match the provided architecture regex are returned.
@@ -159,3 +258,26 @@ filter_defines([{platform_define, ArchRegex, Key, Value} | Rest], Acc) ->
end;
filter_defines([Opt | Rest], Acc) ->
filter_defines(Rest, [Opt | Acc]).
+
+%% @private takes two lists of profile tuples and merges them
+%% into one list of 3-tuples containing the values of either
+%% profiles.
+%% Any missing profile in one of the keys is replaced by an
+%% empty one.
+-spec normalise_profile_pairs([Profile], [Profile]) -> [Pair] when
+ Profile :: {Name, Opts},
+ Pair :: {Name, Opts, Opts},
+ Name :: atom(),
+ Opts :: [term()].
+normalise_profile_pairs([], []) ->
+ [];
+normalise_profile_pairs([{P,V}|Ps], []) ->
+ [{P,V,[]} | normalise_profile_pairs(Ps, [])];
+normalise_profile_pairs([], [{P,V}|Ps]) ->
+ [{P,[],V} | normalise_profile_pairs([], Ps)];
+normalise_profile_pairs([{P,VA}|PAs], [{P,VB}|PBs]) ->
+ [{P,VA,VB} | normalise_profile_pairs(PAs, PBs)];
+normalise_profile_pairs([{PA,VA}|PAs], [{PB,VB}|PBs]) when PA < PB ->
+ [{PA,VA,[]} | normalise_profile_pairs(PAs, [{PB, VB}|PBs])];
+normalise_profile_pairs([{PA,VA}|PAs], [{PB,VB}|PBs]) when PA > PB ->
+ [{PB,[],VB} | normalise_profile_pairs([{PA,VA}|PAs], PBs)].
diff --git a/src/rebar_otp_app.erl b/src/rebar_otp_app.erl
index ddaa44b..952271b 100644
--- a/src/rebar_otp_app.erl
+++ b/src/rebar_otp_app.erl
@@ -58,11 +58,12 @@ compile(State, App) ->
validate_app(State, App1).
format_error({missing_app_file, Filename}) ->
- io_lib:format("App file is missing: ~s", [Filename]);
-format_error({file_read, File, Reason}) ->
- io_lib:format("Failed to read required file ~s for processing: ~s", [File, file:format_error(Reason)]);
+ io_lib:format("App file is missing: ~ts", [Filename]);
+format_error({file_read, AppName, File, Reason}) ->
+ io_lib:format("Failed to read required ~ts file for processing the application '~ts': ~ts",
+ [File, AppName, file:format_error(Reason)]);
format_error({invalid_name, File, AppName}) ->
- io_lib:format("Invalid ~s: name of application (~p) must match filename.", [File, AppName]).
+ io_lib:format("Invalid ~ts: name of application (~p) must match filename.", [File, AppName]).
%% ===================================================================
%% Internal functions
@@ -79,7 +80,7 @@ validate_app(State, App) ->
Error
end;
{error, Reason} ->
- ?PRV_ERROR({file_read, AppFile, Reason})
+ ?PRV_ERROR({file_read, rebar_app_info:name(App), ".app", Reason})
end.
validate_app_modules(State, App, AppData) ->
@@ -110,25 +111,28 @@ preprocess(State, AppInfo, AppSrcFile) ->
A1 = apply_app_vars(AppVars, AppData),
%% AppSrcFile may contain instructions for generating a vsn number
- Vsn = app_vsn(AppData, AppSrcFile, State),
+ Vsn = app_vsn(AppInfo, AppData, AppSrcFile, State),
A2 = lists:keystore(vsn, 1, A1, {vsn, Vsn}),
%% systools:make_relup/4 fails with {missing_param, registered}
%% without a 'registered' value.
A3 = ensure_registered(A2),
+ %% some tools complain if a description is not present.
+ A4 = ensure_description(A3),
+
%% Build the final spec as a string
- Spec = io_lib:format("~p.\n", [{application, AppName, A3}]),
+ Spec = io_lib:format("~p.\n", [{application, AppName, A4}]),
%% Setup file .app filename and write new contents
EbinDir = rebar_app_info:ebin_dir(AppInfo),
- filelib:ensure_dir(filename:join(EbinDir, "dummy.beam")),
+ rebar_file_utils:ensure_dir(EbinDir),
AppFile = rebar_app_utils:app_src_to_app(OutDir, AppSrcFile),
- ok = rebar_file_utils:write_file_if_contents_differ(AppFile, Spec),
+ ok = rebar_file_utils:write_file_if_contents_differ(AppFile, Spec, utf8),
AppFile;
{error, Reason} ->
- throw(?PRV_ERROR({file_read, AppSrcFile, Reason}))
+ throw(?PRV_ERROR({file_read, rebar_app_info:name(AppInfo), ".app.src", Reason}))
end.
load_app_vars(State) ->
@@ -195,6 +199,15 @@ ensure_registered(AppData) ->
AppData
end.
+ensure_description(AppData) ->
+ case lists:keyfind(description, 1, AppData) of
+ false ->
+ %% Required for releases to work.
+ [{description, ""} | AppData];
+ {description, _} ->
+ AppData
+ end.
+
%% In the case of *.app.src we want to give the user the ability to
%% dynamically script the application resource file (think dynamic version
%% string, etc.), in a way similar to what can be done with the rebar
@@ -214,15 +227,13 @@ consult_app_file(Filename) ->
end
end.
-app_vsn(AppData, AppFile, State) ->
- AppDir = filename:dirname(filename:dirname(AppFile)),
- Resources = rebar_state:resources(State),
- rebar_utils:vcs_vsn(get_value(vsn, AppData, AppFile), AppDir, Resources).
+app_vsn(AppInfo, AppData, AppFile, State) ->
+ rebar_utils:vcs_vsn(AppInfo, get_value(vsn, AppData, AppFile), State).
get_value(Key, AppInfo, AppFile) ->
case proplists:get_value(Key, AppInfo) of
undefined ->
- ?ABORT("Failed to get app value '~p' from '~s'~n", [Key, AppFile]);
+ ?ABORT("Failed to get app value '~p' from '~ts'~n", [Key, AppFile]);
Value ->
Value
end.
diff --git a/src/rebar_packages.erl b/src/rebar_packages.erl
index 8b4611b..757eb86 100644
--- a/src/rebar_packages.erl
+++ b/src/rebar_packages.erl
@@ -1,17 +1,18 @@
-module(rebar_packages).
--export([packages/1
- ,close_packages/0
- ,load_and_verify_version/1
- ,deps/3
+-export([get/2
+ ,get_all_names/1
,registry_dir/1
- ,package_dir/1
- ,registry_checksum/2
- ,find_highest_matching/6
- ,find_highest_matching/4
- ,find_all/3
+ ,package_dir/2
+ ,find_highest_matching/5
,verify_table/1
- ,format_error/1]).
+ ,format_error/1
+ ,update_package/3
+ ,resolve_version/5]).
+
+-ifdef(TEST).
+-export([new_package_table/0, find_highest_matching_/5, cmp_/4, cmpl_/4, valid_vsn/1]).
+-endif.
-export_type([package/0]).
@@ -22,114 +23,131 @@
-type vsn() :: binary().
-type package() :: pkg_name() | {pkg_name(), vsn()}.
--spec packages(rebar_state:t()) -> ets:tid().
-packages(State) ->
- catch ets:delete(?PACKAGE_TABLE),
- case load_and_verify_version(State) of
- true ->
- ok;
- false ->
- ?DEBUG("Error loading package index.", []),
- handle_bad_index(State)
+format_error({missing_package, Name, Vsn}) ->
+ io_lib:format("Package not found in any repo: ~ts ~ts", [rebar_utils:to_binary(Name),
+ rebar_utils:to_binary(Vsn)]);
+format_error({missing_package, Pkg}) ->
+ io_lib:format("Package not found in any repo: ~p", [Pkg]).
+
+-spec get(rebar_hex_repos:repo(), binary()) -> {ok, map()} | {error, term()}.
+get(Config, Name) ->
+ try hex_api_package:get(Config, Name) of
+ {ok, {200, _Headers, PkgInfo}} ->
+ {ok, PkgInfo};
+ {ok, {404, _, _}} ->
+ {error, not_found};
+ Error ->
+ ?DEBUG("Hex api request failed: ~p", [Error]),
+ {error, unknown}
+ catch
+ error:{badmatch, {error, {failed_connect, _}}} ->
+ {error, failed_to_connect};
+ _:Exception ->
+ ?DEBUG("hex_api_package:get failed: ~p", [Exception]),
+ {error, unknown}
end.
-handle_bad_index(State) ->
- ?ERROR("Bad packages index. Trying to fix by updating the registry.", []),
- {ok, State1} = rebar_prv_update:do(State),
- case load_and_verify_version(State1) of
- true ->
- ok;
- false ->
- %% Still unable to load after an update, create an empty registry
- ets:new(?PACKAGE_TABLE, [named_table, public])
+
+-spec get_all_names(rebar_state:t()) -> [binary()].
+get_all_names(State) ->
+ verify_table(State),
+ lists:usort(ets:select(?PACKAGE_TABLE, [{#package{key={'$1', '_', '_'},
+ _='_'},
+ [], ['$1']}])).
+
+-spec get_package_versions(unicode:unicode_binary(), ec_semver:semver(),
+ unicode:unicode_binary(),
+ ets:tid(), rebar_state:t()) -> [vsn()].
+get_package_versions(Dep, {_, AlphaInfo}, Repo, Table, State) ->
+ ?MODULE:verify_table(State),
+ AllowPreRelease = rebar_state:get(State, deps_allow_prerelease, false)
+ orelse AlphaInfo =/= {[],[]},
+ ets:select(Table, [{#package{key={Dep, {'$1', '$2'}, Repo},
+ _='_'},
+ [{'==', '$2', {{[],[]}}} || not AllowPreRelease], [{{'$1', '$2'}}]}]).
+
+-spec get_package(unicode:unicode_binary(), unicode:unicode_binary(),
+ binary() | undefined | '_',
+ [unicode:unicode_binary()] | ['_'], ets:tab(), rebar_state:t())
+ -> {ok, #package{}} | not_found.
+get_package(Dep, Vsn, undefined, Repos, Table, State) ->
+ get_package(Dep, Vsn, '_', Repos, Table, State);
+get_package(Dep, Vsn, Hash, Repos, Table, State) ->
+ ?MODULE:verify_table(State),
+ case ets:select(Table, [{#package{key={Dep, ec_semver:parse(Vsn), Repo},
+ checksum=Hash,
+ _='_'}, [], ['$_']} || Repo <- Repos]) of
+ %% have to allow multiple matches in the list for cases that Repo is `_`
+ [Package | _] ->
+ {ok, Package};
+ _ ->
+ not_found
end.
-close_packages() ->
- catch ets:delete(?PACKAGE_TABLE).
+new_package_table() ->
+ ?PACKAGE_TABLE = ets:new(?PACKAGE_TABLE, [named_table, public, ordered_set, {keypos, 2}]),
+ ets:insert(?PACKAGE_TABLE, {?PACKAGE_INDEX_VERSION, package_index_version}).
load_and_verify_version(State) ->
{ok, RegistryDir} = registry_dir(State),
case ets:file2tab(filename:join(RegistryDir, ?INDEX_FILE)) of
{ok, _} ->
- case ets:lookup_element(?PACKAGE_TABLE, package_index_version, 2) of
+ case ets:lookup_element(?PACKAGE_TABLE, package_index_version, 1) of
?PACKAGE_INDEX_VERSION ->
true;
- _ ->
+ V ->
+ %% no reason to confuse the user since we just start fresh and they
+ %% shouldn't notice, so log as a debug message only
+ ?DEBUG("Package index version mismatch. Current version ~p, this rebar3 expecting ~p",
+ [V, ?PACKAGE_INDEX_VERSION]),
(catch ets:delete(?PACKAGE_TABLE)),
- rebar_prv_update:hex_to_index(State)
+ new_package_table()
end;
- _ ->
- rebar_prv_update:hex_to_index(State)
+ _ ->
+ new_package_table()
end.
-deps(Name, Vsn, State) ->
- try
- deps_(Name, Vsn, State)
- catch
- _:_ ->
- handle_missing_package({Name, Vsn}, State, fun(State1) -> deps_(Name, Vsn, State1) end)
- end.
-
-deps_(Name, Vsn, State) ->
- ?MODULE:verify_table(State),
- ets:lookup_element(?PACKAGE_TABLE, {ec_cnv:to_binary(Name), ec_cnv:to_binary(Vsn)}, 2).
-
-handle_missing_package(Dep, State, Fun) ->
- case Dep of
- {Name, Vsn} ->
- ?INFO("Package ~s-~s not found. Fetching registry updates and trying again...", [Name, Vsn]);
- _ ->
- ?INFO("Package ~p not found. Fetching registry updates and trying again...", [Dep])
- end,
+handle_missing_package(PkgKey, Repo, State, Fun) ->
+ Name =
+ case PkgKey of
+ {N, Vsn, _Repo} ->
+ ?DEBUG("Package ~ts-~ts not found. Fetching registry updates for "
+ "package and trying again...", [N, Vsn]),
+ N;
+ _ ->
+ ?DEBUG("Package ~p not found. Fetching registry updates for "
+ "package and trying again...", [PkgKey]),
+ PkgKey
+ end,
- {ok, State1} = rebar_prv_update:do(State),
- try
- Fun(State1)
+ update_package(Name, Repo, State),
+ try
+ Fun(State)
catch
_:_ ->
%% Even after an update the package is still missing, time to error out
- throw(?PRV_ERROR({missing_package, Dep}))
+ throw(?PRV_ERROR({missing_package, PkgKey}))
end.
registry_dir(State) ->
CacheDir = rebar_dir:global_cache_dir(rebar_state:opts(State)),
- case rebar_state:get(State, rebar_packages_cdn, ?DEFAULT_CDN) of
- ?DEFAULT_CDN ->
- RegistryDir = filename:join([CacheDir, "hex", "default"]),
- ok = filelib:ensure_dir(filename:join(RegistryDir, "placeholder")),
- {ok, RegistryDir};
- CDN ->
- case rebar_utils:url_append_path(CDN, ?REMOTE_PACKAGE_DIR) of
- {ok, Parsed} ->
- {ok, {_, _, Host, _, Path, _}} = http_uri:parse(Parsed),
- CDNHostPath = lists:reverse(string:tokens(Host, ".")),
- CDNPath = tl(filename:split(Path)),
- RegistryDir = filename:join([CacheDir, "hex"] ++ CDNHostPath ++ CDNPath),
- ok = filelib:ensure_dir(filename:join(RegistryDir, "placeholder")),
- {ok, RegistryDir};
- _ ->
- {uri_parse_error, CDN}
- end
- end.
+ RegistryDir = filename:join([CacheDir, "hex"]),
+ case filelib:ensure_dir(filename:join(RegistryDir, "placeholder")) of
+ ok -> ok;
+ {error, Posix} when Posix == eaccess; Posix == enoent ->
+ ?ABORT("Could not write to ~p. Please ensure the path is writeable.",
+ [RegistryDir])
+ end,
+ {ok, RegistryDir}.
-package_dir(State) ->
- case registry_dir(State) of
- {ok, RegistryDir} ->
- PackageDir = filename:join([RegistryDir, "packages"]),
- ok = filelib:ensure_dir(filename:join(PackageDir, "placeholder")),
- {ok, PackageDir};
- Error ->
- Error
- end.
+-spec package_dir(rebar_hex_repos:repo(), rebar_state:t()) -> {ok, file:filename_all()}.
+package_dir(Repo, State) ->
+ {ok, RegistryDir} = registry_dir(State),
+ RepoName = maps:get(name, Repo),
+ PackageDir = filename:join([RegistryDir, rebar_utils:to_list(RepoName), "packages"]),
+ ok = filelib:ensure_dir(filename:join(PackageDir, "placeholder")),
+ {ok, PackageDir}.
-registry_checksum({pkg, Name, Vsn, _Hash}, State) ->
- try
- ?MODULE:verify_table(State),
- ets:lookup_element(?PACKAGE_TABLE, {Name, Vsn}, 3)
- catch
- _:_ ->
- throw(?PRV_ERROR({missing_package, ec_cnv:to_binary(Name), ec_cnv:to_binary(Vsn)}))
- end.
%% Hex supports use of ~> to specify the version required for a dependency.
%% Since rebar3 requires exact versions to choose from we find the highest
@@ -146,80 +164,271 @@ registry_checksum({pkg, Name, Vsn, _Hash}, State) ->
%% `~> 2.1.3-dev` | `>= 2.1.3-dev and < 2.2.0`
%% `~> 2.0` | `>= 2.0.0 and < 3.0.0`
%% `~> 2.1` | `>= 2.1.0 and < 3.0.0`
-find_highest_matching(Dep, Constraint, Table, State) ->
- find_highest_matching(undefined, undefined, Dep, Constraint, Table, State).
-
-find_highest_matching(Pkg, PkgVsn, Dep, Constraint, Table, State) ->
- try find_highest_matching_(Pkg, PkgVsn, Dep, Constraint, Table, State) of
+find_highest_matching(Dep, Constraint, Repo, Table, State) ->
+ try find_highest_matching_(Dep, Constraint, Repo, Table, State) of
none ->
- handle_missing_package(Dep, State,
+ handle_missing_package(Dep, Repo, State,
fun(State1) ->
- find_highest_matching_(Pkg, PkgVsn, Dep, Constraint, Table, State1)
+ find_highest_matching_(Dep, Constraint, Repo, Table, State1)
end);
Result ->
Result
catch
_:_ ->
- handle_missing_package(Dep, State,
+ handle_missing_package(Dep, Repo, State,
fun(State1) ->
- find_highest_matching_(Pkg, PkgVsn, Dep, Constraint, Table, State1)
+ find_highest_matching_(Dep, Constraint, Repo, Table, State1)
end)
end.
-find_highest_matching_(Pkg, PkgVsn, Dep, Constraint, Table, State) ->
- try find_all(Dep, Table, State) of
- {ok, [Vsn]} ->
- handle_single_vsn(Pkg, PkgVsn, Dep, Vsn, Constraint);
- {ok, [HeadVsn | VsnTail]} ->
- {ok, handle_vsns(Constraint, HeadVsn, VsnTail)}
- catch
- error:badarg ->
- none
- end.
-
-find_all(Dep, Table, State) ->
- ?MODULE:verify_table(State),
- try ets:lookup_element(Table, Dep, 2) of
- [Vsns] when is_list(Vsns)->
- {ok, Vsns};
+find_highest_matching_(Dep, Constraint, #{name := Repo}, Table, State) ->
+ try get_package_versions(Dep, Constraint, Repo, Table, State) of
+ [Vsn] ->
+ handle_single_vsn(Vsn, Constraint);
Vsns ->
- {ok, Vsns}
+ case handle_vsns(Constraint, Vsns) of
+ none ->
+ none;
+ FoundVsn ->
+ {ok, FoundVsn}
+ end
catch
error:badarg ->
none
end.
-handle_vsns(Constraint, HeadVsn, VsnTail) ->
+handle_vsns(Constraint, Vsns) ->
lists:foldl(fun(Version, Highest) ->
case ec_semver:pes(Version, Constraint) andalso
- ec_semver:gt(Version, Highest) of
+ (Highest =:= none orelse ec_semver:gt(Version, Highest)) of
true ->
Version;
false ->
Highest
end
- end, HeadVsn, VsnTail).
+ end, none, Vsns).
-handle_single_vsn(Pkg, PkgVsn, Dep, Vsn, Constraint) ->
+handle_single_vsn(Vsn, Constraint) ->
case ec_semver:pes(Vsn, Constraint) of
true ->
{ok, Vsn};
false ->
- case {Pkg, PkgVsn} of
- {undefined, undefined} ->
- ?WARN("Only existing version of ~s is ~s which does not match constraint ~~> ~s. "
- "Using anyway, but it is not guaranteed to work.", [Dep, Vsn, Constraint]);
- _ ->
- ?WARN("[~s:~s] Only existing version of ~s is ~s which does not match constraint ~~> ~s. "
- "Using anyway, but it is not guaranteed to work.", [Pkg, PkgVsn, Dep, Vsn, Constraint])
- end,
- {ok, Vsn}
+ none
end.
-format_error({missing_package, {Name, Vsn}}) ->
- io_lib:format("Package not found in registry: ~s-~s.", [ec_cnv:to_binary(Name), ec_cnv:to_binary(Vsn)]);
-format_error({missing_package, Dep}) ->
- io_lib:format("Package not found in registry: ~p.", [Dep]).
-
verify_table(State) ->
ets:info(?PACKAGE_TABLE, named_table) =:= true orelse load_and_verify_version(State).
+
+parse_deps(Deps) ->
+ [{maps:get(app, D, Name), {pkg, Name, Constraint, undefined}}
+ || D=#{package := Name,
+ requirement := Constraint} <- Deps].
+
+parse_checksum(<<Checksum:256/big-unsigned>>) ->
+ list_to_binary(
+ rebar_string:uppercase(
+ lists:flatten(io_lib:format("~64.16.0b", [Checksum]))));
+parse_checksum(Checksum) ->
+ Checksum.
+
+update_package(Name, RepoConfig=#{name := Repo}, State) ->
+ ?MODULE:verify_table(State),
+ try hex_repo:get_package(RepoConfig#{repo_key => maps:get(read_key, RepoConfig, <<>>)}, Name) of
+ {ok, {200, _Headers, #{releases := Releases}}} ->
+ _ = insert_releases(Name, Releases, Repo, ?PACKAGE_TABLE),
+ {ok, RegistryDir} = rebar_packages:registry_dir(State),
+ PackageIndex = filename:join(RegistryDir, ?INDEX_FILE),
+ ok = ets:tab2file(?PACKAGE_TABLE, PackageIndex);
+ {ok, {403, _Headers, <<>>}} ->
+ not_found;
+ {ok, {404, _Headers, _}} ->
+ not_found;
+ Error ->
+ ?DEBUG("Hex get_package request failed: ~p", [Error]),
+ %% TODO: add better log message. hex_core should export a format_error
+ ?WARN("Failed to update package from repo ~ts", [Repo]),
+ fail
+ catch
+ _:Exception ->
+ ?DEBUG("hex_repo:get_package failed for package ~p: ~p", [Name, Exception]),
+ fail
+ end.
+
+insert_releases(Name, Releases, Repo, Table) ->
+ [true = ets:insert(Table,
+ #package{key={Name, ec_semver:parse(Version), Repo},
+ checksum=parse_checksum(Checksum),
+ retired=maps:get(retired, Release, false),
+ dependencies=parse_deps(Dependencies)})
+ || Release=#{checksum := Checksum,
+ version := Version,
+ dependencies := Dependencies} <- Releases].
+
+-spec resolve_version(unicode:unicode_binary(), unicode:unicode_binary() | undefined,
+ binary() | undefined,
+ ets:tab(), rebar_state:t())
+ -> {error, {invalid_vsn, unicode:unicode_binary()}} |
+ not_found |
+ {ok, #package{}, map()}.
+%% if checksum is defined search for any matching repo matching pkg-vsn and checksum
+resolve_version(Dep, DepVsn, Hash, HexRegistry, State) when is_binary(Hash) ->
+ Resources = rebar_state:resources(State),
+ #{repos := RepoConfigs} = rebar_resource_v2:find_resource_state(pkg, Resources),
+ RepoNames = [RepoName || #{name := RepoName} <- RepoConfigs],
+
+ %% allow retired packages when we have a checksum
+ case get_package(Dep, DepVsn, Hash, RepoNames, HexRegistry, State) of
+ {ok, Package=#package{key={_, _, RepoName}}} ->
+ {ok, RepoConfig} = rebar_hex_repos:get_repo_config(RepoName, RepoConfigs),
+ {ok, Package, RepoConfig};
+ _ ->
+ Fun = fun(Repo) ->
+ case resolve_version_(Dep, DepVsn, Repo, HexRegistry, State) of
+ none ->
+ not_found;
+ {ok, Vsn} ->
+ get_package(Dep, Vsn, Hash, [Repo], HexRegistry, State)
+ end
+ end,
+ handle_missing_no_exception(Fun, Dep, State)
+ end;
+resolve_version(Dep, undefined, Hash, HexRegistry, State) ->
+ Fun = fun(Repo) ->
+ case highest_matching(Dep, {0,{[],[]}}, Repo, HexRegistry, State) of
+ none ->
+ not_found;
+ {ok, Vsn} ->
+ get_package(Dep, Vsn, Hash, [Repo], HexRegistry, State)
+ end
+ end,
+ handle_missing_no_exception(Fun, Dep, State);
+resolve_version(Dep, DepVsn, Hash, HexRegistry, State) ->
+ case valid_vsn(DepVsn) of
+ false ->
+ {error, {invalid_vsn, DepVsn}};
+ _ ->
+ Fun = fun(Repo) ->
+ case resolve_version_(Dep, DepVsn, Repo, HexRegistry, State) of
+ none ->
+ not_found;
+ {ok, Vsn} ->
+ get_package(Dep, Vsn, Hash, [Repo], HexRegistry, State)
+ end
+ end,
+ handle_missing_no_exception(Fun, Dep, State)
+ end.
+
+check_all_repos(Fun, RepoConfigs) ->
+ ec_lists:search(fun(#{name := R}) ->
+ Fun(R)
+ end, RepoConfigs).
+
+handle_missing_no_exception(Fun, Dep, State) ->
+ Resources = rebar_state:resources(State),
+ #{repos := RepoConfigs} = rebar_resource_v2:find_resource_state(pkg, Resources),
+
+ %% first check all repos in order for a local match
+ %% if none is found then we step through checking after updating the repo registry
+ case check_all_repos(Fun, RepoConfigs) of
+ not_found ->
+ ec_lists:search(fun(Config=#{name := R}) ->
+ case ?MODULE:update_package(Dep, Config, State) of
+ ok ->
+ Fun(R);
+ _ ->
+ not_found
+ end
+ end, RepoConfigs);
+ Result ->
+ Result
+ end.
+
+resolve_version_(Dep, DepVsn, Repo, HexRegistry, State) ->
+ case DepVsn of
+ <<"~>", Vsn/binary>> ->
+ highest_matching(Dep, rm_ws(Vsn), Repo, HexRegistry, State);
+ <<">=", Vsn/binary>> ->
+ cmp(Dep, rm_ws(Vsn), Repo, HexRegistry, State, fun ec_semver:gte/2);
+ <<">", Vsn/binary>> ->
+ cmp(Dep, rm_ws(Vsn), Repo, HexRegistry, State, fun ec_semver:gt/2);
+ <<"<=", Vsn/binary>> ->
+ cmpl(Dep, rm_ws(Vsn), Repo, HexRegistry, State, fun ec_semver:lte/2);
+ <<"<", Vsn/binary>> ->
+ cmpl(Dep, rm_ws(Vsn), Repo, HexRegistry, State, fun ec_semver:lt/2);
+ <<"==", Vsn/binary>> ->
+ {ok, Vsn};
+ Vsn ->
+ {ok, Vsn}
+ end.
+
+rm_ws(<<" ", R/binary>>) ->
+ ec_semver:parse(rm_ws(R));
+rm_ws(R) ->
+ ec_semver:parse(R).
+
+valid_vsn(Vsn) ->
+ %% Regepx from https://github.com/sindresorhus/semver-regex/blob/master/index.js
+ SemVerRegExp = "v?(0|[1-9][0-9]*)\\.(0|[1-9][0-9]*)(\\.(0|[1-9][0-9]*))?"
+ "(-[0-9a-z-]+(\\.[0-9a-z-]+)*)?(\\+[0-9a-z-]+(\\.[0-9a-z-]+)*)?",
+ SupportedVersions = "^(>=?|<=?|~>|==)?\\s*" ++ SemVerRegExp ++ "$",
+ re:run(Vsn, SupportedVersions, [unicode]) =/= nomatch.
+
+highest_matching(Dep, Vsn, Repo, HexRegistry, State) ->
+ find_highest_matching_(Dep, Vsn, #{name => Repo}, HexRegistry, State).
+
+cmp(Dep, Vsn, Repo, HexRegistry, State, CmpFun) ->
+ case get_package_versions(Dep, Vsn, Repo, HexRegistry, State) of
+ [] ->
+ none;
+ Vsns ->
+ cmp_(undefined, Vsn, Vsns, CmpFun)
+ end.
+
+cmp_(undefined, MinVsn, [], _CmpFun) ->
+ {ok, MinVsn};
+cmp_(HighestDepVsn, _MinVsn, [], _CmpFun) ->
+ {ok, HighestDepVsn};
+
+cmp_(BestMatch, MinVsn, [Vsn | R], CmpFun) ->
+ case CmpFun(Vsn, MinVsn) of
+ true ->
+ cmp_(Vsn, Vsn, R, CmpFun);
+ false ->
+ cmp_(BestMatch, MinVsn, R, CmpFun)
+ end.
+
+%% We need to treat this differently since we want a version that is LOWER but
+%% the higest possible one.
+cmpl(Dep, Vsn, Repo, HexRegistry, State, CmpFun) ->
+ case get_package_versions(Dep, Vsn, Repo, HexRegistry, State) of
+ [] ->
+ none;
+ Vsns ->
+ cmpl_(undefined, Vsn, Vsns, CmpFun)
+ end.
+
+cmpl_(undefined, MaxVsn, [], _CmpFun) ->
+ {ok, MaxVsn};
+cmpl_(HighestDepVsn, _MaxVsn, [], _CmpFun) ->
+ {ok, HighestDepVsn};
+
+cmpl_(undefined, MaxVsn, [Vsn | R], CmpFun) ->
+ case CmpFun(Vsn, MaxVsn) of
+ true ->
+ cmpl_(Vsn, MaxVsn, R, CmpFun);
+ false ->
+ cmpl_(undefined, MaxVsn, R, CmpFun)
+ end;
+
+cmpl_(BestMatch, MaxVsn, [Vsn | R], CmpFun) ->
+ case CmpFun(Vsn, MaxVsn) of
+ true ->
+ case ec_semver:gte(Vsn, BestMatch) of
+ true ->
+ cmpl_(Vsn, MaxVsn, R, CmpFun);
+ false ->
+ cmpl_(BestMatch, MaxVsn, R, CmpFun)
+ end;
+ false ->
+ cmpl_(BestMatch, MaxVsn, R, CmpFun)
+ end.
diff --git a/src/rebar_paths.erl b/src/rebar_paths.erl
new file mode 100644
index 0000000..160f9fa
--- /dev/null
+++ b/src/rebar_paths.erl
@@ -0,0 +1,211 @@
+-module(rebar_paths).
+-include("rebar.hrl").
+
+-type target() :: deps | plugins.
+-type targets() :: [target(), ...].
+-export_type([target/0, targets/0]).
+-export([set_paths/2, unset_paths/2]).
+-export([clashing_apps/2]).
+
+-ifdef(TEST).
+-export([misloaded_modules/2]).
+-endif.
+
+-spec set_paths(targets(), rebar_state:t()) -> ok.
+set_paths(UserTargets, State) ->
+ Targets = normalize_targets(UserTargets),
+ GroupPaths = path_groups(Targets, State),
+ Paths = lists:append(lists:reverse([P || {_, P} <- GroupPaths])),
+ code:add_pathsa(Paths),
+ AppGroups = app_groups(Targets, State),
+ purge_and_load(AppGroups, sets:new()),
+ ok.
+
+-spec unset_paths(targets(), rebar_state:t()) -> ok.
+unset_paths(UserTargets, State) ->
+ Targets = normalize_targets(UserTargets),
+ GroupPaths = path_groups(Targets, State),
+ Paths = lists:append([P || {_, P} <- GroupPaths]),
+ [code:del_path(P) || P <- Paths],
+ purge(Paths, code:all_loaded()),
+ ok.
+
+-spec clashing_apps(targets(), rebar_state:t()) -> [{target(), [binary()]}].
+clashing_apps(Targets, State) ->
+ AppGroups = app_groups(Targets, State),
+ AppNames = [{G, sets:from_list(
+ [rebar_app_info:name(App) || App <- Apps]
+ )} || {G, Apps} <- AppGroups],
+ clashing_app_names(sets:new(), AppNames, []).
+
+%%%%%%%%%%%%%%%
+%%% PRIVATE %%%
+%%%%%%%%%%%%%%%
+
+%% The paths are to be set in the reverse order; i.e. the default
+%% path is always last when possible (minimize cases where a build
+%% tool version clashes with an app's), and put the highest priorities
+%% first.
+-spec normalize_targets(targets()) -> targets().
+normalize_targets(List) ->
+ %% Plan for the eventuality of getting values piped in
+ %% from future versions of rebar3, possibly from plugins and so on,
+ %% which means we'd risk failing kind of violently. We only support
+ %% deps and plugins
+ TmpList = lists:foldl(
+ fun(deps, [deps | _] = Acc) -> Acc;
+ (plugins, [plugins | _] = Acc) -> Acc;
+ (deps, Acc) -> [deps | Acc -- [deps]];
+ (plugins, Acc) -> [plugins | Acc -- [plugins]];
+ (_, Acc) -> Acc
+ end,
+ [],
+ List
+ ),
+ lists:reverse(TmpList).
+
+purge_and_load([], _) ->
+ ok;
+purge_and_load([{_Group, Apps}|Rest], Seen) ->
+ %% We have: a list of all applications in the current priority group,
+ %% a list of all loaded modules with their active path, and a list of
+ %% seen applications.
+ %%
+ %% We do the following:
+ %% 1. identify the apps that have not been solved yet
+ %% 2. find the paths for all apps in the current group
+ %% 3. unload and reload apps that may have changed paths in order
+ %% to get updated module lists and specs
+ %% (we ignore started apps and apps that have not run for this)
+ %% This part turns out to be the bottleneck of this module, so
+ %% to speed it up, using clash detection proves useful:
+ %% only reload apps that clashed since others are unlikely to
+ %% conflict in significant ways
+ %% 4. create a list of modules to check from that app list—only loaded
+ %% modules make sense to check.
+ %% 5. check the modules to match their currently loaded paths with
+ %% the path set from the apps in the current group; modules
+ %% that differ must be purged; others can stay
+
+ %% 1)
+ AppNames = [AppName || App <- Apps,
+ AppName <- [rebar_app_info:name(App)],
+ not sets:is_element(AppName, Seen)],
+ GoodApps = [App || AppName <- AppNames,
+ App <- Apps,
+ rebar_app_info:name(App) =:= AppName],
+ %% 2)
+ %% (no need for extra_src_dirs since those get put into ebin;
+ %% also no need for OTP libs; we want to allow overtaking them)
+ GoodAppPaths = [rebar_app_info:ebin_dir(App) || App <- GoodApps],
+ %% 3)
+ [begin
+ AtomApp = binary_to_atom(AppName, utf8),
+ %% blind load/unload won't interrupt an already-running app,
+ %% preventing odd errors, maybe!
+ case application:unload(AtomApp) of
+ ok -> application:load(AtomApp);
+ _ -> ok
+ end
+ end || AppName <- AppNames,
+ %% Shouldn't unload ourselves; rebar runs without ever
+ %% being started and unloading breaks logging!
+ AppName =/= <<"rebar">>],
+ %% 4)
+ CandidateMods = lists:append(
+ %% Start by asking the currently loaded app (if loaded)
+ %% since it would be the primary source of conflicting modules
+ [case application:get_key(AppName, modules) of
+ {ok, Mods} ->
+ Mods;
+ undefined ->
+ %% if not found, parse the app file on disk, in case
+ %% the app's modules are used without it being loaded;
+ %% invalidate the cache in case we're proceeding during
+ %% compilation steps by setting the app details to `[]', which
+ %% is its empty value; the details will then be reloaded
+ %% from disk when found
+ case rebar_app_info:app_details(rebar_app_info:app_details(App, [])) of
+ [] -> [];
+ Details -> proplists:get_value(modules, Details, [])
+ end
+ end || App <- GoodApps,
+ AppName <- [binary_to_atom(rebar_app_info:name(App), utf8)]]
+ ),
+ ModPaths = [{Mod,Path} || Mod <- CandidateMods,
+ erlang:function_exported(Mod, module_info, 0),
+ {file, Path} <- [code:is_loaded(Mod)]],
+
+ %% 5)
+ Mods = misloaded_modules(GoodAppPaths, ModPaths),
+ [purge_mod(Mod) || Mod <- Mods],
+
+ purge_and_load(Rest, sets:union(Seen, sets:from_list(AppNames))).
+
+purge(Paths, ModPaths) ->
+ SortedPaths = lists:sort(Paths),
+ lists:map(fun purge_mod/1,
+ [Mod || {Mod, Path} <- ModPaths,
+ is_list(Path), % not 'preloaded' or mocked
+ any_prefix(Path, SortedPaths)]
+ ).
+
+misloaded_modules(GoodAppPaths, ModPaths) ->
+ %% Identify paths that are invalid; i.e. app paths that cover an
+ %% app in the desired group, but are not in the desired group.
+ lists:usort(
+ [Mod || {Mod, Path} <- ModPaths,
+ is_list(Path), % not 'preloaded' or mocked
+ not any_prefix(Path, GoodAppPaths)]
+ ).
+
+any_prefix(Path, Paths) ->
+ lists:any(fun(P) -> lists:prefix(P, Path) end, Paths).
+
+%% assume paths currently set are good; only unload a module so next call
+%% uses the correctly set paths
+purge_mod(Mod) ->
+ code:soft_purge(Mod) andalso code:delete(Mod).
+
+
+%% This is a tricky O(n²) check since we want to
+%% know whether an app clashes with any of the top priority groups.
+%%
+%% For example, let's say we have `[deps, plugins]', then we want
+%% to find the plugins that clash with deps:
+%%
+%% `[{deps, [ClashingPlugins]}, {plugins, []}]'
+%%
+%% In case we'd ever have alternative or additional types, we can
+%% find all clashes from other 'groups'.
+clashing_app_names(_, [], Acc) ->
+ lists:reverse(Acc);
+clashing_app_names(PrevNames, [{G,AppNames} | Rest], Acc) ->
+ CurrentNames = sets:subtract(AppNames, PrevNames),
+ NextNames = sets:subtract(sets:union([A || {_, A} <- Rest]), PrevNames),
+ Clashes = sets:intersection(CurrentNames, NextNames),
+ NewAcc = [{G, sets:to_list(Clashes)} | Acc],
+ clashing_app_names(sets:union(PrevNames, CurrentNames), Rest, NewAcc).
+
+path_groups(Targets, State) ->
+ [{Target, get_paths(Target, State)} || Target <- Targets].
+
+app_groups(Targets, State) ->
+ [{Target, get_apps(Target, State)} || Target <- Targets].
+
+get_paths(deps, State) ->
+ rebar_state:code_paths(State, all_deps);
+get_paths(plugins, State) ->
+ rebar_state:code_paths(State, all_plugin_deps).
+
+get_apps(deps, State) ->
+ %% The code paths for deps also include the top level apps
+ %% and the extras, which we don't have here; we have to
+ %% add the apps by hand
+ case rebar_state:project_apps(State) of
+ undefined -> [];
+ List -> List
+ end ++
+ rebar_state:all_deps(State);
+get_apps(plugins, State) ->
+ rebar_state:all_plugin_deps(State).
diff --git a/src/rebar_pkg_resource.erl b/src/rebar_pkg_resource.erl
index 5817817..823b7fc 100644
--- a/src/rebar_pkg_resource.erl
+++ b/src/rebar_pkg_resource.erl
@@ -2,208 +2,280 @@
%% ex: ts=4 sw=4 et
-module(rebar_pkg_resource).
--behaviour(rebar_resource).
+-behaviour(rebar_resource_v2).
--export([lock/2
- ,download/3
- ,needs_update/2
- ,make_vsn/1]).
+-export([init/2,
+ lock/2,
+ download/4,
+ download/5,
+ needs_update/2,
+ make_vsn/2,
+ format_error/1]).
--export([request/2
- ,etag/1
- ,ssl_opts/1]).
+-ifdef(TEST).
+%% exported for test purposes
+-export([store_etag_in_cache/2]).
+-endif.
-include("rebar.hrl").
--include_lib("public_key/include/OTP-PUB-KEY.hrl").
+-include_lib("providers/include/providers.hrl").
-lock(_AppDir, Source) ->
- Source.
+-type package() :: {pkg, binary(), binary(), binary(), rebar_hex_repos:repo()}.
-needs_update(Dir, {pkg, _Name, Vsn, _Hash}) ->
- [AppInfo] = rebar_app_discover:find_apps([Dir], all),
- case rebar_app_info:original_vsn(AppInfo) =:= ec_cnv:to_list(Vsn) of
+%%==============================================================================
+%% Public API
+%%==============================================================================
+
+-spec init(atom(), rebar_state:t()) -> {ok, rebar_resource_v2:resource()}.
+init(Type, State) ->
+ {ok, Vsn} = application:get_key(rebar, vsn),
+ BaseConfig = #{http_adapter => hex_http_httpc,
+ http_user_agent_fragment =>
+ <<"(rebar3/", (list_to_binary(Vsn))/binary, ") (httpc)">>,
+ http_adapter_config => #{profile => rebar}},
+ Repos = rebar_hex_repos:from_state(BaseConfig, State),
+ Resource = rebar_resource_v2:new(Type, ?MODULE, #{repos => Repos,
+ base_config => BaseConfig}),
+ {ok, Resource}.
+
+
+
+-spec lock(AppInfo, ResourceState) -> Res when
+ AppInfo :: rebar_app_info:t(),
+ ResourceState :: rebar_resource_v2:resource_state(),
+ Res :: {atom(), string(), any(), binary()}.
+lock(AppInfo, _) ->
+ {pkg, Name, Vsn, Hash, _RepoConfig} = rebar_app_info:source(AppInfo),
+ {pkg, Name, Vsn, Hash}.
+
+%%------------------------------------------------------------------------------
+%% @doc
+%% Return true if the stored version of the pkg is older than the current
+%% version.
+%% @end
+%%------------------------------------------------------------------------------
+-spec needs_update(AppInfo, ResourceState) -> Res when
+ AppInfo :: rebar_app_info:t(),
+ ResourceState :: rebar_resource_v2:resource_state(),
+ Res :: boolean().
+needs_update(AppInfo, _) ->
+ {pkg, _Name, Vsn, _Hash, _} = rebar_app_info:source(AppInfo),
+ case rebar_utils:to_binary(rebar_app_info:original_vsn(AppInfo)) =:= rebar_utils:to_binary(Vsn) of
true ->
false;
false ->
true
end.
-download(TmpDir, Pkg={pkg, Name, Vsn, _Hash}, State) ->
- CDN = rebar_state:get(State, rebar_packages_cdn, ?DEFAULT_CDN),
- {ok, PackageDir} = rebar_packages:package_dir(State),
- Package = binary_to_list(<<Name/binary, "-", Vsn/binary, ".tar">>),
- CachePath = filename:join(PackageDir, Package),
- case rebar_utils:url_append_path(CDN, filename:join(?REMOTE_PACKAGE_DIR, Package)) of
- {ok, Url} ->
- cached_download(TmpDir, CachePath, Pkg, Url, etag(CachePath), State);
- _ ->
- {fetch_fail, Name, Vsn}
+%%------------------------------------------------------------------------------
+%% @doc
+%% Download the given pkg.
+%% @end
+%%------------------------------------------------------------------------------
+-spec download(TmpDir, AppInfo, State, ResourceState) -> Res when
+ TmpDir :: file:name(),
+ AppInfo :: rebar_app_info:t(),
+ ResourceState :: rebar_resource_v2:resource_state(),
+ State :: rebar_state:t(),
+ Res :: ok | {error,_}.
+download(TmpDir, AppInfo, State, ResourceState) ->
+ case download(TmpDir, rebar_app_info:source(AppInfo), State, ResourceState, true) of
+ ok ->
+ ok;
+ Error ->
+ {error, Error}
end.
-cached_download(TmpDir, CachePath, Pkg={pkg, Name, Vsn, _Hash}, Url, ETag, State) ->
- case request(Url, ETag) of
- {ok, cached} ->
- ?INFO("Version cached at ~s is up to date, reusing it", [CachePath]),
- serve_from_cache(TmpDir, CachePath, Pkg, State);
- {ok, Body, NewETag} ->
- ?INFO("Downloaded package, caching at ~s", [CachePath]),
- serve_from_download(TmpDir, CachePath, Pkg, NewETag, Body, State);
- error when ETag =/= false ->
- ?INFO("Download error, using cached file at ~s", [CachePath]),
- serve_from_cache(TmpDir, CachePath, Pkg, State);
- error ->
- {fetch_fail, Name, Vsn}
- end.
-
-serve_from_cache(TmpDir, CachePath, Pkg, State) ->
- {Files, Contents, Version, Meta} = extract(TmpDir, CachePath),
- case checksums(Pkg, Files, Contents, Version, Meta, State) of
- {Chk, Chk, Chk, Chk} ->
- ok = erl_tar:extract({binary, Contents}, [{cwd, TmpDir}, compressed]),
- {ok, true};
- {_Hash, Chk, Chk, Chk} ->
- ?DEBUG("Expected hash ~p does not match checksums ~p", [_Hash, Chk]),
- {unexpected_hash, CachePath, _Hash, Chk};
- {Chk, _Bin, Chk, Chk} ->
- ?DEBUG("Checksums: registry: ~p, pkg: ~p", [Chk, _Bin]),
- {failed_extract, CachePath};
- {Chk, Chk, _Reg, Chk} ->
- ?DEBUG("Checksums: registry: ~p, pkg: ~p", [_Reg, Chk]),
- {bad_registry_checksum, CachePath};
- {_Hash, _Bin, _Reg, _Tar} ->
- ?DEBUG("Checksums: expected: ~p, registry: ~p, pkg: ~p, meta: ~p", [_Hash, _Reg, _Bin, _Tar]),
- {bad_checksum, CachePath}
- end.
-
-serve_from_download(TmpDir, CachePath, Package, ETag, Binary, State) ->
- ?DEBUG("Writing ~p to cache at ~s", [Package, CachePath]),
- file:write_file(CachePath, Binary),
- case etag(CachePath) of
- ETag ->
- serve_from_cache(TmpDir, CachePath, Package, State);
- FileETag ->
- ?DEBUG("Downloaded file ~s ETag ~s doesn't match returned ETag ~s", [CachePath, ETag, FileETag]),
- {bad_download, CachePath}
+%%------------------------------------------------------------------------------
+%% @doc
+%% Download the given pkg. The etag belonging to the pkg file will be updated
+%% only if the UpdateEtag is true and the ETag returned from the hexpm server
+%% is different.
+%% @end
+%%------------------------------------------------------------------------------
+-spec download(TmpDir, Pkg, State, ResourceState, UpdateETag) -> Res when
+ TmpDir :: file:name(),
+ Pkg :: package(),
+ State :: rebar_state:t(),
+ ResourceState:: rebar_resource_v2:resource_state(),
+ UpdateETag :: boolean(),
+ Res :: ok | {error,_} | {unexpected_hash, string(), integer(), integer()} |
+ {fetch_fail, binary(), binary()}.
+download(TmpDir, Pkg={pkg, Name, Vsn, _Hash, Repo}, State, _ResourceState, UpdateETag) ->
+ {ok, PackageDir} = rebar_packages:package_dir(Repo, State),
+ Package = binary_to_list(<<Name/binary, "-", Vsn/binary, ".tar">>),
+ ETagFile = binary_to_list(<<Name/binary, "-", Vsn/binary, ".etag">>),
+ CachePath = filename:join(PackageDir, Package),
+ ETagPath = filename:join(PackageDir, ETagFile),
+ case cached_download(TmpDir, CachePath, Pkg, etag(CachePath, ETagPath), ETagPath, UpdateETag) of
+ {bad_registry_checksum, Expected, Found} ->
+ %% checksum comparison failed. in case this is from a modified cached package
+ %% overwrite the etag if it exists so it is not relied on again
+ store_etag_in_cache(ETagPath, <<>>),
+ ?PRV_ERROR({bad_registry_checksum, Name, Vsn, Expected, Found});
+ Result ->
+ Result
end.
-
-extract(TmpDir, CachePath) ->
- ec_file:mkdir_p(TmpDir),
- {ok, Files} = erl_tar:extract(CachePath, [memory]),
- {"contents.tar.gz", Contents} = lists:keyfind("contents.tar.gz", 1, Files),
- {"VERSION", Version} = lists:keyfind("VERSION", 1, Files),
- {"metadata.config", Meta} = lists:keyfind("metadata.config", 1, Files),
- {Files, Contents, Version, Meta}.
-
-checksums(Pkg={pkg, _Name, _Vsn, Hash}, Files, Contents, Version, Meta, State) ->
- Blob = <<Version/binary, Meta/binary, Contents/binary>>,
- <<X:256/big-unsigned>> = crypto:hash(sha256, Blob),
- BinChecksum = list_to_binary(string:to_upper(lists:flatten(io_lib:format("~64.16.0b", [X])))),
- RegistryChecksum = rebar_packages:registry_checksum(Pkg, State),
- {"CHECKSUM", TarChecksum} = lists:keyfind("CHECKSUM", 1, Files),
- {Hash, BinChecksum, RegistryChecksum, TarChecksum}.
-
-make_vsn(_) ->
+%%------------------------------------------------------------------------------
+%% @doc
+%% Implementation of rebar_resource make_vsn callback.
+%% Returns {error, string()} as this operation is not supported for pkg sources.
+%% @end
+%%------------------------------------------------------------------------------
+-spec make_vsn(AppInfo, ResourceState) -> Res when
+ AppInfo :: rebar_app_info:t(),
+ ResourceState :: rebar_resource_v2:resource_state(),
+ Res :: {'error', string()}.
+make_vsn(_, _) ->
{error, "Replacing version of type pkg not supported."}.
-request(Url, ETag) ->
- case httpc:request(get, {Url, [{"if-none-match", ETag} || ETag =/= false]++[{"User-Agent", rebar_utils:user_agent()}]},
- [{ssl, ssl_opts(Url)}, {relaxed, true}],
- [{body_format, binary}],
- rebar) of
- {ok, {{_Version, 200, _Reason}, Headers, Body}} ->
- ?DEBUG("Successfully downloaded ~s", [Url]),
- {"etag", ETag1} = lists:keyfind("etag", 1, Headers),
- {ok, Body, string:strip(ETag1, both, $")};
- {ok, {{_Version, 304, _Reason}, _Headers, _Body}} ->
- ?DEBUG("Cached copy of ~s still valid", [Url]),
+format_error({bad_registry_checksum, Name, Vsn, Expected, Found}) ->
+ io_lib:format("The checksum for package at ~ts-~ts (~ts) does not match the "
+ "checksum expected from the registry (~ts). "
+ "Run `rebar3 do unlock ~ts, update` and then try again.",
+ [Name, Vsn, Found, Expected, Name]).
+
+%%------------------------------------------------------------------------------
+%% @doc
+%% Download the pkg belonging to the given address. If the etag of the pkg
+%% is the same what we stored in the etag file previously return {ok, cached},
+%% if the file has changed (so the etag is not the same anymore) return
+%% {ok, Contents, NewEtag}, otherwise if some error occured return error.
+%% @end
+%%------------------------------------------------------------------------------
+-spec request(rebar_hex_repos:repo(), binary(), binary(), false | binary())
+ -> {ok, cached} | {ok, binary(), binary()} | error.
+request(Config, Name, Version, ETag) ->
+ Config1 = Config#{http_etag => ETag},
+ try hex_repo:get_tarball(Config1, Name, Version) of
+ {ok, {200, #{<<"etag">> := ETag1}, Tarball}} ->
+ {ok, Tarball, ETag1};
+ {ok, {304, _Headers, _}} ->
{ok, cached};
- {ok, {{_Version, Code, _Reason}, _Headers, _Body}} ->
- ?DEBUG("Request to ~p failed: status code ~p", [Url, Code]),
+ {ok, {Code, _Headers, _Body}} ->
+ ?DEBUG("Request for package ~s-~s failed: status code ~p", [Name, Version, Code]),
error;
{error, Reason} ->
- ?DEBUG("Request to ~p failed: ~p", [Url, Reason]),
+ ?DEBUG("Request for package ~s-~s failed: ~p", [Name, Version, Reason]),
+ error
+ catch
+ _:Exception ->
+ ?DEBUG("hex_repo:get_tarball failed: ~p", [Exception]),
error
end.
-etag(Path) ->
- case file:read_file(Path) of
- {ok, Binary} ->
- <<X:128/big-unsigned-integer>> = crypto:hash(md5, Binary),
- string:to_lower(lists:flatten(io_lib:format("~32.16.0b", [X])));
+%%------------------------------------------------------------------------------
+%% @doc
+%% Read the etag belonging to the pkg file from the cache directory. The etag
+%% is stored in a separate file when the etag belonging to the package is
+%% returned from the hexpm server. The name is package-vsn.etag.
+%% @end
+%%------------------------------------------------------------------------------
+-spec etag(PackagePath, ETagPath) -> Res when
+ PackagePath :: file:name(),
+ ETagPath :: file:name(),
+ Res :: binary().
+etag(PackagePath, ETagPath) ->
+ case file:read_file(ETagPath) of
+ {ok, Bin} ->
+ %% just in case a user deleted a cached package but not its etag
+ %% verify the package is also there, and if not, ignore the etag
+ case filelib:is_file(PackagePath) of
+ true ->
+ Bin;
+ false ->
+ <<>>
+ end;
{error, _} ->
- false
+ <<>>
end.
-ssl_opts(Url) ->
- case get_ssl_config() of
- ssl_verify_enabled ->
- ssl_opts(ssl_verify_enabled, Url);
- ssl_verify_disabled ->
- [{verify, verify_none}]
- end.
-
-ssl_opts(ssl_verify_enabled, Url) ->
- case check_ssl_version() of
- true ->
- {ok, {_, _, Hostname, _, _, _}} = http_uri:parse(ec_cnv:to_list(Url)),
- VerifyFun = {fun ssl_verify_hostname:verify_fun/3, [{check_hostname, Hostname}]},
- CACerts = certifi:cacerts(),
- [{verify, verify_peer}, {depth, 2}, {cacerts, CACerts}
- ,{partial_chain, fun partial_chain/1}, {verify_fun, VerifyFun}];
- false ->
- ?WARN("Insecure HTTPS request (peer verification disabled), please update to OTP 17.4 or later", []),
- [{verify, verify_none}]
- end.
+%%------------------------------------------------------------------------------
+%% @doc
+%% Store the given etag in the .cache folder. The name is pakckage-vsn.etag.
+%% @end
+%%------------------------------------------------------------------------------
+-spec store_etag_in_cache(File, ETag) -> Res when
+ File :: file:name(),
+ ETag :: binary(),
+ Res :: ok.
+store_etag_in_cache(Path, ETag) ->
+ _ = file:write_file(Path, ETag).
-partial_chain(Certs) ->
- Certs1 = [{Cert, public_key:pkix_decode_cert(Cert, otp)} || Cert <- Certs],
- CACerts = certifi:cacerts(),
- CACerts1 = [public_key:pkix_decode_cert(Cert, otp) || Cert <- CACerts],
-
- case ec_lists:find(fun({_, Cert}) ->
- check_cert(CACerts1, Cert)
- end, Certs1) of
- {ok, Trusted} ->
- {trusted_ca, element(1, Trusted)};
- _ ->
- unknown_ca
+%%%=============================================================================
+%%% Private functions
+%%%=============================================================================
+-spec cached_download(TmpDir, CachePath, Pkg, ETag, ETagPath, UpdateETag) -> Res when
+ TmpDir :: file:name(),
+ CachePath :: file:name(),
+ Pkg :: package(),
+ ETag :: binary(),
+ ETagPath :: file:name(),
+ UpdateETag :: boolean(),
+ Res :: ok | {unexpected_hash, integer(), integer()} | {fetch_fail, binary(), binary()}.
+cached_download(TmpDir, CachePath, Pkg={pkg, Name, Vsn, _Hash, RepoConfig}, ETag,
+ ETagPath, UpdateETag) ->
+ case request(RepoConfig, Name, Vsn, ETag) of
+ {ok, cached} ->
+ ?INFO("Version cached at ~ts is up to date, reusing it", [CachePath]),
+ serve_from_cache(TmpDir, CachePath, Pkg);
+ {ok, Body, NewETag} ->
+ ?INFO("Downloaded package, caching at ~ts", [CachePath]),
+ maybe_store_etag_in_cache(UpdateETag, ETagPath, NewETag),
+ serve_from_download(TmpDir, CachePath, Pkg, Body);
+ error when ETag =/= <<>> ->
+ store_etag_in_cache(ETagPath, ETag),
+ ?INFO("Download error, using cached file at ~ts", [CachePath]),
+ serve_from_cache(TmpDir, CachePath, Pkg);
+ error ->
+ {fetch_fail, Name, Vsn}
end.
-extract_public_key_info(Cert) ->
- ((Cert#'OTPCertificate'.tbsCertificate)#'OTPTBSCertificate'.subjectPublicKeyInfo).
-
-check_cert(CACerts, Cert) ->
- lists:any(fun(CACert) ->
- extract_public_key_info(CACert) == extract_public_key_info(Cert)
- end, CACerts).
+-spec serve_from_cache(TmpDir, CachePath, Pkg) -> Res when
+ TmpDir :: file:name(),
+ CachePath :: file:name(),
+ Pkg :: package(),
+ Res :: ok | {error,_} | {bad_registry_checksum, integer(), integer()}.
+serve_from_cache(TmpDir, CachePath, Pkg) ->
+ {ok, Binary} = file:read_file(CachePath),
+ serve_from_memory(TmpDir, Binary, Pkg).
-check_ssl_version() ->
- case application:get_key(ssl, vsn) of
- {ok, Vsn} ->
- parse_vsn(Vsn) >= {5, 3, 6};
- _ ->
- false
+-spec serve_from_memory(TmpDir, Tarball, Package) -> Res when
+ TmpDir :: file:name(),
+ Tarball :: binary(),
+ Package :: package(),
+ Res :: ok | {error,_} | {bad_registry_checksum, integer(), integer()}.
+serve_from_memory(TmpDir, Binary, {pkg, _Name, _Vsn, Hash, _RepoConfig}) ->
+ RegistryChecksum = list_to_integer(binary_to_list(Hash), 16),
+ case hex_tarball:unpack(Binary, TmpDir) of
+ {ok, #{checksum := <<Checksum:256/big-unsigned>>}} when RegistryChecksum =/= Checksum ->
+ ?DEBUG("Expected hash ~64.16.0B does not match checksum of fetched package ~64.16.0B",
+ [RegistryChecksum, Checksum]),
+ {bad_registry_checksum, RegistryChecksum, Checksum};
+ {ok, #{checksum := <<RegistryChecksum:256/big-unsigned>>}} ->
+ ok;
+ {error, Reason} ->
+ {error, {hex_tarball, Reason}}
end.
-get_ssl_config() ->
- GlobalConfigFile = rebar_dir:global_config(),
- Config = rebar_config:consult_file(GlobalConfigFile),
- case proplists:get_value(ssl_verify, Config, []) of
- false ->
- ssl_verify_disabled;
- _ ->
- ssl_verify_enabled
- end.
+-spec serve_from_download(TmpDir, CachePath, Package, Binary) -> Res when
+ TmpDir :: file:name(),
+ CachePath :: file:name(),
+ Package :: package(),
+ Binary :: binary(),
+ Res :: ok | {error,_}.
+serve_from_download(TmpDir, CachePath, Package, Binary) ->
+ ?DEBUG("Writing ~p to cache at ~ts", [Package, CachePath]),
+ file:write_file(CachePath, Binary),
+ serve_from_memory(TmpDir, Binary, Package).
-parse_vsn(Vsn) ->
- version_pad(string:tokens(Vsn, ".-")).
-
-version_pad([Major]) ->
- {list_to_integer(Major), 0, 0};
-version_pad([Major, Minor]) ->
- {list_to_integer(Major), list_to_integer(Minor), 0};
-version_pad([Major, Minor, Patch]) ->
- {list_to_integer(Major), list_to_integer(Minor), list_to_integer(Patch)};
-version_pad([Major, Minor, Patch | _]) ->
- {list_to_integer(Major), list_to_integer(Minor), list_to_integer(Patch)}.
+-spec maybe_store_etag_in_cache(UpdateETag, Path, ETag) -> Res when
+ UpdateETag :: boolean(),
+ Path :: file:name(),
+ ETag :: binary(),
+ Res :: ok.
+maybe_store_etag_in_cache(false = _UpdateETag, _Path, _ETag) ->
+ ok;
+maybe_store_etag_in_cache(true = _UpdateETag, Path, ETag) ->
+ store_etag_in_cache(Path, ETag).
diff --git a/src/rebar_plugins.erl b/src/rebar_plugins.erl
index 68ba6da..2a78c6e 100644
--- a/src/rebar_plugins.erl
+++ b/src/rebar_plugins.erl
@@ -39,13 +39,18 @@ project_apps_install(State) ->
Profiles = rebar_state:current_profiles(State),
ProjectApps = rebar_state:project_apps(State),
lists:foldl(fun(Profile, StateAcc) ->
- Plugins = rebar_state:get(State, {plugins, Profile}, []),
- StateAcc1 = handle_plugins(Profile, Plugins, StateAcc),
+ StateAcc1 = case Profile of
+ default ->
+ %% default profile top level plugins
+ %% are installed in run_aux
+ StateAcc;
+ _ ->
+ Plugins = rebar_state:get(State, {plugins, Profile}, []),
+ handle_plugins(Profile, Plugins, StateAcc)
+ end,
lists:foldl(fun(AppInfo, StateAcc2) ->
- C = rebar_config:consult(rebar_app_info:dir(AppInfo)),
- AppInfo0 = rebar_app_info:update_opts(AppInfo, rebar_app_info:opts(AppInfo), C),
- Plugins2 = rebar_app_info:get(AppInfo0, {plugins, Profile}, []),
+ Plugins2 = rebar_app_info:get(AppInfo, {plugins, Profile}, []),
handle_plugins(Profile, Plugins2, StateAcc2)
end, StateAcc1, ProjectApps)
end, State, Profiles).
@@ -62,12 +67,25 @@ install(State, AppInfo) ->
State2 = lists:foldl(fun(Profile, StateAcc) ->
Plugins = rebar_app_info:get(AppInfo, {plugins, Profile}, []),
- handle_plugins(Profile, Plugins, StateAcc)
+ Plugins1 = filter_existing_plugins(Plugins, StateAcc),
+ handle_plugins(Profile, Plugins1, StateAcc)
end, State1, Profiles),
%% Reset the overrides after processing the dep
rebar_state:set(State2, overrides, StateOverrides).
+filter_existing_plugins(Plugins, State) ->
+ PluginNames = lists:zip(Plugins, rebar_state:deps_names(Plugins)),
+ AllPlugins = rebar_state:all_plugin_deps(State),
+ lists:filtermap(fun({Plugin, PluginName}) ->
+ case rebar_app_utils:find(PluginName, AllPlugins) of
+ {ok, _} ->
+ false;
+ _ ->
+ {true, Plugin}
+ end
+ end, PluginNames).
+
handle_plugins(Profile, Plugins, State) ->
handle_plugins(Profile, Plugins, State, false).
@@ -76,7 +94,6 @@ handle_plugins(Profile, Plugins, State, Upgrade) ->
Locks = rebar_state:lock(State),
DepsDir = rebar_state:get(State, deps_dir, ?DEFAULT_DEPS_DIR),
State1 = rebar_state:set(State, deps_dir, ?DEFAULT_PLUGINS_DIR),
-
%% Install each plugin individually so if one fails to install it doesn't effect the others
{_PluginProviders, State2} =
lists:foldl(fun(Plugin, {PluginAcc, StateAcc}) ->
@@ -96,8 +113,8 @@ handle_plugin(Profile, Plugin, State, Upgrade) ->
ToBuild = rebar_prv_install_deps:cull_compile(Sorted, []),
%% Add already built plugin deps to the code path
- CodePaths = [rebar_app_info:ebin_dir(A) || A <- Apps -- ToBuild],
- code:add_pathsa(CodePaths),
+ PreBuiltPaths = [rebar_app_info:ebin_dir(A) || A <- Apps] -- ToBuild,
+ code:add_pathsa(PreBuiltPaths),
%% Build plugin and its deps
[build_plugin(AppInfo, Apps, State2) || AppInfo <- ToBuild],
@@ -105,23 +122,21 @@ handle_plugin(Profile, Plugin, State, Upgrade) ->
%% Add newly built deps and plugin to code path
State3 = rebar_state:update_all_plugin_deps(State2, Apps),
NewCodePaths = [rebar_app_info:ebin_dir(A) || A <- ToBuild],
- code:add_pathsa(CodePaths),
%% Store plugin code paths so we can remove them when compiling project apps
- State4 = rebar_state:update_code_paths(State3, all_plugin_deps, CodePaths++NewCodePaths),
+ State4 = rebar_state:update_code_paths(State3, all_plugin_deps, PreBuiltPaths++NewCodePaths),
+ rebar_paths:set_paths([plugins], State4),
{plugin_providers(Plugin), State4}
catch
- C:T ->
- ?DEBUG("~p ~p ~p", [C, T, erlang:get_stacktrace()]),
+ ?WITH_STACKTRACE(C,T,S)
+ ?DEBUG("~p ~p ~p", [C, T, S]),
?WARN("Plugin ~p not available. It will not be used.", [Plugin]),
{[], State}
end.
build_plugin(AppInfo, Apps, State) ->
Providers = rebar_state:providers(State),
- %Providers1 = rebar_state:providers(rebar_app_info:state(AppInfo)),
- %rebar_app_info:state_or_new(State, AppInfo)
S = rebar_state:all_deps(State, Apps),
S1 = rebar_state:set(S, deps_dir, ?DEFAULT_PLUGINS_DIR),
rebar_prv_compile:compile(S1, Providers, AppInfo).
diff --git a/src/rebar_prv_alias.erl b/src/rebar_prv_alias.erl
new file mode 100644
index 0000000..ce56f29
--- /dev/null
+++ b/src/rebar_prv_alias.erl
@@ -0,0 +1,138 @@
+%%% @doc Meta-provider that dynamically compiles providers
+%%% to run aliased commands.
+%%%
+%%% This is hackish and out-there, but this module has graduated
+%%% from a plugin at https://github.com/tsloughter/rebar_alias after
+%%% years of stability. Only some error checks were added
+-module(rebar_prv_alias).
+
+-export([init/1]).
+-include("rebar.hrl").
+
+%% ===================================================================
+%% Public API
+%% ===================================================================
+-spec init(rebar_state:t()) -> {ok, rebar_state:t()}.
+init(State) ->
+ Aliases = rebar_state:get(State, alias, []),
+ lists:foldl(fun({Alias, Cmds}, {ok, StateAcc}) ->
+ case validate_provider(Alias, Cmds, State) of
+ true -> init_alias(Alias, Cmds, StateAcc);
+ false -> {ok, State}
+ end
+ end, {ok, State}, Aliases).
+
+init_alias(Alias, Cmds, State) ->
+ Module = list_to_atom("rebar_prv_alias_" ++ atom_to_list(Alias)),
+
+ MF = module(Module),
+ EF = exports(),
+ FF = do_func(Cmds),
+
+ {ok, _, Bin} = compile:forms([MF, EF, FF]),
+ code:load_binary(Module, "none", Bin),
+
+ Provider = providers:create([
+ {name, Alias},
+ {module, Module},
+ {bare, true},
+ {deps, []},
+ {example, example(Alias)},
+ {opts, []},
+ {short_desc, desc(Cmds)},
+ {desc, desc(Cmds)}
+ ]),
+ {ok, rebar_state:add_provider(State, Provider)}.
+
+validate_provider(Alias, Cmds, State) ->
+ %% This would be caught and prevented anyway, but the warning
+ %% is friendlier
+ case providers:get_provider(Alias, rebar_state:providers(State)) of
+ not_found ->
+ %% check for circular deps in the alias.
+ case not proplists:is_defined(Alias, Cmds) of
+ true -> true;
+ false ->
+ ?WARN("Alias ~p contains itself and would never "
+ "terminate. It will be ignored.",
+ [Alias]),
+ false
+ end;
+ _ ->
+ ?WARN("Alias ~p is already the name of a command in "
+ "the default namespace and will be ignored.",
+ [Alias]),
+ false
+ end.
+
+
+example(Alias) ->
+ "rebar3 " ++ atom_to_list(Alias).
+
+desc(Cmds) ->
+ "Equivalent to running: rebar3 do "
+ ++ rebar_string:join(lists:map(fun to_desc/1, Cmds), ",").
+
+to_desc({Cmd, Args}) when is_list(Args) ->
+ atom_to_list(Cmd) ++ " " ++ Args;
+to_desc({Namespace, Cmd}) ->
+ atom_to_list(Namespace) ++ " " ++ atom_to_list(Cmd);
+to_desc({Namespace, Cmd, Args}) ->
+ atom_to_list(Namespace) ++ " " ++ atom_to_list(Cmd) ++ " " ++ Args;
+to_desc(Cmd) ->
+ atom_to_list(Cmd).
+
+module(Name) ->
+ {attribute, 1, module, Name}.
+
+exports() ->
+ {attribute, 1, export, [{do, 1}]}.
+
+do_func(Cmds) ->
+ {function, 1, do, 1,
+ [{clause, 1,
+ [{var, 1, 'State'}],
+ [],
+ [{call, 1,
+ {remote, 1, {atom, 1, rebar_prv_do}, {atom, 1, do_tasks}},
+ [make_args(Cmds), {var, 1, 'State'}]}]}]}.
+
+make_args(Cmds) ->
+ make_list(
+ lists:map(fun make_tuple/1,
+ lists:map(fun make_arg/1, Cmds))).
+
+make_arg({Namespace, Command, Args}) when is_atom(Namespace), is_atom(Command) ->
+ {make_atom(Namespace),
+ make_atom(Command),
+ make_list([make_string(A) || A <- split_args(Args)])};
+make_arg({Namespace, Command}) when is_atom(Namespace), is_atom(Command) ->
+ {make_atom(Namespace), make_atom(Command)};
+make_arg({Cmd, Args}) ->
+ {make_string(Cmd), make_list([make_string(A) || A <- split_args(Args)])};
+make_arg(Cmd) ->
+ {make_string(Cmd), make_list([])}.
+
+make_tuple(Tuple) ->
+ {tuple, 1, tuple_to_list(Tuple)}.
+
+make_list(List) ->
+ lists:foldr(
+ fun(Elem, Acc) -> {cons, 1, Elem, Acc} end,
+ {nil, 1},
+ List).
+
+make_string(Atom) when is_atom(Atom) ->
+ make_string(atom_to_list(Atom));
+make_string(String) when is_list(String) ->
+ {string, 1, String}.
+
+make_atom(Atom) when is_atom(Atom) ->
+ {atom, 1, Atom}.
+
+%% In case someone used the long option format, the option needs to get
+%% separated from its value.
+split_args(Args) ->
+ rebar_string:lexemes(
+ lists:map(fun($=) -> 32; (C) -> C end, Args),
+ " ").
diff --git a/src/rebar_prv_app_discovery.erl b/src/rebar_prv_app_discovery.erl
index 1954214..f5bab49 100644
--- a/src/rebar_prv_app_discovery.erl
+++ b/src/rebar_prv_app_discovery.erl
@@ -49,19 +49,19 @@ do(State) ->
-spec format_error(any()) -> iolist().
format_error({multiple_app_files, Files}) ->
- io_lib:format("Multiple app files found in one app dir: ~s", [string:join(Files, " and ")]);
+ io_lib:format("Multiple app files found in one app dir: ~ts", [rebar_string:join(Files, " and ")]);
format_error({invalid_app_file, File, Reason}) ->
case Reason of
{Line, erl_parse, Description} ->
- io_lib:format("Invalid app file ~s at line ~b: ~p",
+ io_lib:format("Invalid app file ~ts at line ~b: ~p",
[File, Line, lists:flatten(Description)]);
_ ->
- io_lib:format("Invalid app file ~s: ~p", [File, Reason])
+ io_lib:format("Invalid app file ~ts: ~p", [File, Reason])
end;
%% Provide a slightly more informative error message for consult of app file failure
format_error({rebar_file_utils, {bad_term_file, AppFile, Reason}}) ->
- io_lib:format("Error in app file ~s: ~s", [rebar_dir:make_relative_path(AppFile,
- rebar_dir:get_cwd()),
- file:format_error(Reason)]);
+ io_lib:format("Error in app file ~ts: ~ts", [rebar_dir:make_relative_path(AppFile,
+ rebar_dir:get_cwd()),
+ file:format_error(Reason)]);
format_error(Reason) ->
io_lib:format("~p", [Reason]).
diff --git a/src/rebar_prv_as.erl b/src/rebar_prv_as.erl
index b4f7ac4..562ce99 100644
--- a/src/rebar_prv_as.erl
+++ b/src/rebar_prv_as.erl
@@ -33,9 +33,11 @@ init(State) ->
-spec do(rebar_state:t()) -> {ok, rebar_state:t()} | {error, string()}.
do(State) ->
{Profiles, Tasks} = args_to_profiles_and_tasks(rebar_state:command_args(State)),
- case Profiles of
- [] ->
+ case {Profiles, Tasks} of
+ {[], _} ->
{error, "At least one profile must be specified when using `as`"};
+ {_, []} ->
+ {error, "At least one task must be specified when using `as`"};
_ ->
warn_on_empty_profile(Profiles, State),
State1 = rebar_state:apply_profiles(State, [list_to_atom(X) || X <- Profiles]),
@@ -62,7 +64,7 @@ args_to_profiles_and_tasks(Args) ->
first_profile([]) -> {[], []};
first_profile([ProfileList|Rest]) ->
- case re:split(ProfileList, ",", [{return, list}, {parts, 2}]) of
+ case re:split(ProfileList, ",", [{return, list}, {parts, 2}, unicode]) of
%% `foo, bar`
[P, ""] -> profiles(Rest, [P]);
%% `foo,bar`
@@ -73,7 +75,7 @@ first_profile([ProfileList|Rest]) ->
profiles([], Acc) -> {lists:reverse(Acc), rebar_utils:args_to_tasks([])};
profiles([ProfileList|Rest], Acc) ->
- case re:split(ProfileList, ",", [{return, list}, {parts, 2}]) of
+ case re:split(ProfileList, ",", [{return, list}, {parts, 2}, unicode]) of
%% `foo, bar`
[P, ""] -> profiles(Rest, [P|Acc]);
%% `foo,bar`
@@ -99,5 +101,5 @@ warn_on_empty_profile(Profiles, State) ->
ProjectApps = rebar_state:project_apps(State),
DefinedProfiles = rebar_state:get(State, profiles, []) ++
lists:flatten([rebar_app_info:get(AppInfo, profiles, []) || AppInfo <- ProjectApps]),
- [?WARN("No entry for profile ~s in config.", [Profile]) ||
+ [?WARN("No entry for profile ~ts in config.", [Profile]) ||
Profile <- Profiles, not(lists:keymember(list_to_atom(Profile), 1, DefinedProfiles))].
diff --git a/src/rebar_prv_bare_compile.erl b/src/rebar_prv_bare_compile.erl
index 201620a..5d3e977 100644
--- a/src/rebar_prv_bare_compile.erl
+++ b/src/rebar_prv_bare_compile.erl
@@ -29,7 +29,8 @@ init(State) ->
{example, ""},
{short_desc, ""},
{desc, ""},
- {opts, [{paths, $p, "paths", string, "Wildcard path of ebin directories to add to code path"}]}])),
+ {opts, [{paths, $p, "paths", string, "Wildcard paths of ebin directories to add to code path, separated by a colon"},
+ {separator, $s, "separator", string, "In case of multiple return paths, the separator character to use to join them."}]}])),
{ok, State1}.
-spec do(rebar_state:t()) -> {ok, rebar_state:t()} | {error, string()}.
@@ -39,12 +40,15 @@ do(State) ->
%% Add code paths from --paths to the beginning of the code path
{RawOpts, _} = rebar_state:command_parsed_args(State),
Paths = proplists:get_value(paths, RawOpts),
- CodePaths = filelib:wildcard(Paths),
- code:add_pathsa(CodePaths),
+ Sep = proplists:get_value(separator, RawOpts, " "),
+ [ code:add_pathsa(filelib:wildcard(PathWildcard))
+ || PathWildcard <- rebar_string:lexemes(Paths, Sep) ],
[AppInfo] = rebar_state:project_apps(State),
AppInfo1 = rebar_app_info:out_dir(AppInfo, rebar_dir:get_cwd()),
- rebar_prv_compile:compile(State, AppInfo1),
+
+ %% run compile in the default namespace
+ rebar_prv_compile:compile(rebar_state:namespace(State, default), AppInfo1),
rebar_utils:cleanup_code_path(OrigPath),
diff --git a/src/rebar_prv_clean.erl b/src/rebar_prv_clean.erl
index 8f31fdd..3c8a0c3 100644
--- a/src/rebar_prv_clean.erl
+++ b/src/rebar_prv_clean.erl
@@ -12,7 +12,7 @@
-include("rebar.hrl").
-define(PROVIDER, clean).
--define(DEPS, [app_discovery]).
+-define(DEPS, [app_discovery, install_deps]).
%% ===================================================================
%% Public API
@@ -44,7 +44,8 @@ do(State) ->
case All of
true ->
DepsDir = rebar_dir:deps_dir(State1),
- AllApps = rebar_app_discover:find_apps([filename:join(DepsDir, "*")], all),
+ DepsDirs = filelib:wildcard(filename:join(DepsDir, "*")),
+ AllApps = rebar_app_discover:find_apps(DepsDirs, all),
clean_apps(State1, Providers, AllApps);
false ->
ProjectApps = rebar_state:project_apps(State1),
@@ -66,11 +67,12 @@ format_error(Reason) ->
%% ===================================================================
clean_apps(State, Providers, Apps) ->
+ Compilers = rebar_state:compilers(State),
[begin
- ?INFO("Cleaning out ~s...", [rebar_app_info:name(AppInfo)]),
+ ?INFO("Cleaning out ~ts...", [rebar_app_info:name(AppInfo)]),
AppDir = rebar_app_info:dir(AppInfo),
AppInfo1 = rebar_hooks:run_all_hooks(AppDir, pre, ?PROVIDER, Providers, AppInfo, State),
- rebar_erlc_compiler:clean(AppInfo1),
+ rebar_compiler:clean(Compilers, AppInfo1),
rebar_hooks:run_all_hooks(AppDir, post, ?PROVIDER, Providers, AppInfo1, State)
end || AppInfo <- Apps].
diff --git a/src/rebar_prv_common_test.erl b/src/rebar_prv_common_test.erl
index fbd0e89..3d3bd8a 100644
--- a/src/rebar_prv_common_test.erl
+++ b/src/rebar_prv_common_test.erl
@@ -8,8 +8,11 @@
-export([init/1,
do/1,
format_error/1]).
-%% exported for test purposes, consider private
--export([compile/2, prepare_tests/1, translate_paths/2]).
+
+-ifdef(TEST).
+%% exported for test purposes
+-export([compile/2, prepare_tests/1, translate_paths/2, maybe_write_coverdata/1]).
+-endif.
-include("rebar.hrl").
-include_lib("providers/include/providers.hrl").
@@ -41,14 +44,21 @@ do(State) ->
Tests = prepare_tests(State),
case compile(State, Tests) of
%% successfully compiled apps
- {ok, S} -> do(S, Tests);
+ {ok, S} ->
+ {RawOpts, _} = rebar_state:command_parsed_args(S),
+ case proplists:get_value(compile_only, RawOpts, false) of
+ true ->
+ {ok, S};
+ false ->
+ do(S, Tests)
+ end;
%% this should look like a compiler error, not a ct error
Error -> Error
end.
do(State, Tests) ->
?INFO("Running Common Test suites...", []),
- rebar_utils:update_code(rebar_state:code_paths(State, all_deps), [soft_purge]),
+ rebar_paths:set_paths([deps, plugins], State),
%% Run ct provider prehooks
Providers = rebar_state:providers(State),
@@ -63,14 +73,14 @@ do(State, Tests) ->
ok ->
%% Run ct provider post hooks for all project apps and top level project hooks
rebar_hooks:run_project_and_app_hooks(Cwd, post, ?PROVIDER, Providers, State),
- rebar_utils:cleanup_code_path(rebar_state:code_paths(State, default)),
+ rebar_paths:set_paths([plugins, deps], State),
{ok, State};
Error ->
- rebar_utils:cleanup_code_path(rebar_state:code_paths(State, default)),
+ rebar_paths:set_paths([plugins, deps], State),
Error
end;
Error ->
- rebar_utils:cleanup_code_path(rebar_state:code_paths(State, default)),
+ rebar_paths:set_paths([plugins, deps], State),
Error
end.
@@ -93,14 +103,16 @@ format_error({error, Reason}) ->
format_error({error_running_tests, Reason}) ->
format_error({error, Reason});
format_error({failures_running_tests, {Failed, AutoSkipped}}) ->
- io_lib:format("Failures occured running tests: ~b", [Failed+AutoSkipped]);
+ io_lib:format("Failures occurred running tests: ~b", [Failed+AutoSkipped]);
format_error({badconfig, {Msg, {Value, Key}}}) ->
io_lib:format(Msg, [Value, Key]);
format_error({badconfig, Msg}) ->
io_lib:format(Msg, []);
format_error({multiple_errors, Errors}) ->
io_lib:format(lists:concat(["Error running tests:"] ++
- lists:map(fun(Error) -> "~n " ++ Error end, Errors)), []).
+ lists:map(fun(Error) -> "~n " ++ Error end, Errors)), []);
+format_error({error_reading_testspec, Reason}) ->
+ io_lib:format("Error reading testspec: ~p", [Reason]).
%% ===================================================================
%% Internal functions
@@ -126,7 +138,7 @@ cmdopts(State) ->
{RawOpts, _} = rebar_state:command_parsed_args(State),
%% filter out opts common_test doesn't know about and convert
%% to ct acceptable forms
- transform_opts(RawOpts, []).
+ transform_retry(transform_opts(RawOpts, []), State).
transform_opts([], Acc) -> lists:reverse(Acc);
transform_opts([{dir, Dirs}|Rest], Acc) ->
@@ -139,6 +151,8 @@ transform_opts([{testcase, Cases}|Rest], Acc) ->
transform_opts(Rest, [{testcase, split_string(Cases)}|Acc]);
transform_opts([{config, Configs}|Rest], Acc) ->
transform_opts(Rest, [{config, split_string(Configs)}|Acc]);
+transform_opts([{spec, Specs}|Rest], Acc) ->
+ transform_opts(Rest, [{spec, split_string(Specs)}|Acc]);
transform_opts([{include, Includes}|Rest], Acc) ->
transform_opts(Rest, [{include, split_string(Includes)}|Acc]);
transform_opts([{logopts, LogOpts}|Rest], Acc) ->
@@ -161,8 +175,20 @@ transform_opts([{verbose, _}|Rest], Acc) ->
transform_opts([Opt|Rest], Acc) ->
transform_opts(Rest, [Opt|Acc]).
+%% @private only retry if specified and if no other spec
+%% is given.
+transform_retry(Opts, State) ->
+ case proplists:get_value(retry, Opts, false) andalso
+ not is_any_defined([spec,dir,suite], Opts) of
+ false ->
+ Opts;
+ true ->
+ Path = filename:join([rebar_dir:base_dir(State), "logs", "retry.spec"]),
+ filelib:is_file(Path) andalso [{spec, Path}|Opts]
+ end.
+
split_string(String) ->
- string:tokens(String, [$,]).
+ rebar_string:lexemes(String, [$,]).
cfgopts(State) ->
case rebar_state:get(State, ct_opts, []) of
@@ -174,9 +200,6 @@ cfgopts(State) ->
end.
ensure_opts([], Acc) -> lists:reverse(Acc);
-ensure_opts([{test_spec, _}|Rest], Acc) ->
- ?WARN("Test specs not supported. See http://www.rebar3.org/docs/running-tests#common-test", []),
- ensure_opts(Rest, Acc);
ensure_opts([{cover, _}|Rest], Acc) ->
?WARN("Cover specs not supported. See http://www.rebar3.org/docs/running-tests#common-test", []),
ensure_opts(Rest, Acc);
@@ -204,16 +227,20 @@ add_hooks(Opts, State) ->
case {readable(State), lists:keyfind(ct_hooks, 1, Opts)} of
{false, _} ->
Opts;
- {true, false} ->
- [{ct_hooks, [cth_readable_failonly, cth_readable_shell]} | Opts];
- {true, {ct_hooks, Hooks}} ->
+ {Other, false} ->
+ [{ct_hooks, [cth_readable_failonly, readable_shell_type(Other), cth_retry]} | Opts];
+ {Other, {ct_hooks, Hooks}} ->
%% Make sure hooks are there once only.
- ReadableHooks = [cth_readable_failonly, cth_readable_shell],
- NewHooks = (Hooks -- ReadableHooks) ++ ReadableHooks,
+ ReadableHooks = [cth_readable_failonly, readable_shell_type(Other), cth_retry],
+ AllReadableHooks = [cth_readable_failonly, cth_retry,
+ cth_readable_shell, cth_readable_compact_shell],
+ NewHooks = (Hooks -- AllReadableHooks) ++ ReadableHooks,
lists:keyreplace(ct_hooks, 1, Opts, {ct_hooks, NewHooks})
end.
-select_tests(_, _, {error, _} = Error, _) -> Error;
+readable_shell_type(true) -> cth_readable_shell;
+readable_shell_type(compact) -> cth_readable_compact_shell.
+
select_tests(_, _, _, {error, _} = Error) -> Error;
select_tests(State, ProjectApps, CmdOpts, CfgOpts) ->
%% set application env if sys_config argument is provided
@@ -221,23 +248,48 @@ select_tests(State, ProjectApps, CmdOpts, CfgOpts) ->
Configs = lists:flatmap(fun(Filename) ->
rebar_file_utils:consult_config(State, Filename)
end, SysConfigs),
- [application:load(Application) || Config <- SysConfigs, {Application, _} <- Config],
+ %% NB: load the applications (from user directories too) to support OTP < 17
+ %% to our best ability.
+ rebar_paths:set_paths([deps, plugins], State),
+ [application:load(Application) || Config <- Configs, {Application, _} <- Config],
rebar_utils:reread_config(Configs),
- Merged = lists:ukeymerge(1,
- lists:ukeysort(1, CmdOpts),
- lists:ukeysort(1, CfgOpts)),
- %% make sure `dir` and/or `suite` from command line go in as
- %% a pair overriding both `dir` and `suite` from config if
- %% they exist
- Opts = case {proplists:get_value(suite, CmdOpts), proplists:get_value(dir, CmdOpts)} of
- {undefined, undefined} -> Merged;
- {_Suite, undefined} -> lists:keydelete(dir, 1, Merged);
- {undefined, _Dir} -> lists:keydelete(suite, 1, Merged);
- {_Suite, _Dir} -> Merged
- end,
+ Opts = merge_opts(CmdOpts,CfgOpts),
discover_tests(State, ProjectApps, Opts).
+%% Merge the option lists from command line and rebar.config:
+%%
+%% - Options set on the command line will replace the same options if
+%% set in rebar.config.
+%%
+%% - Special care is taken with options that select which tests to
+%% run - ANY such option on the command line will replace ALL such
+%% options in the config.
+%%
+%% Note that if 'spec' is given, common_test will ignore all 'dir',
+%% 'suite', 'group' and 'case', so there is no need to explicitly
+%% remove any options from the command line.
+%%
+%% All faulty combinations of options are also handled by
+%% common_test and are not taken into account here.
+merge_opts(CmdOpts0, CfgOpts0) ->
+ TestSelectOpts = [spec,dir,suite,group,testcase],
+ CmdOpts = lists:ukeysort(1, CmdOpts0),
+ CfgOpts1 = lists:ukeysort(1, CfgOpts0),
+ CfgOpts = case is_any_defined(TestSelectOpts,CmdOpts) of
+ false ->
+ CfgOpts1;
+ true ->
+ [Opt || Opt={K,_} <- CfgOpts1,
+ not lists:member(K,TestSelectOpts)]
+ end,
+ lists:ukeymerge(1, CmdOpts, CfgOpts).
+
+is_any_defined([Key|Keys],Opts) ->
+ proplists:is_defined(Key,Opts) orelse is_any_defined(Keys,Opts);
+is_any_defined([],_Opts) ->
+ false.
+
sys_config_list(CmdOpts, CfgOpts) ->
CmdSysConfigs = split_string(proplists:get_value(sys_config, CmdOpts, "")),
case proplists:get_value(sys_config, CfgOpts, []) of
@@ -250,11 +302,10 @@ sys_config_list(CmdOpts, CfgOpts) ->
end.
discover_tests(State, ProjectApps, Opts) ->
- case {proplists:get_value(suite, Opts), proplists:get_value(dir, Opts)} of
- %% no dirs or suites defined, try using `$APP/test` and `$ROOT/test`
- %% as suites
- {undefined, undefined} -> {ok, [default_tests(State, ProjectApps)|Opts]};
- {_, _} -> {ok, Opts}
+ case is_any_defined([spec,dir,suite],Opts) of
+ %% no tests defined, try using `$APP/test` and `$ROOT/test` as dirs
+ false -> {ok, [default_tests(State, ProjectApps)|Opts]};
+ true -> {ok, Opts}
end.
default_tests(State, ProjectApps) ->
@@ -289,14 +340,9 @@ compile(State, {ok, _} = Tests) ->
compile(_State, Error) -> Error.
do_compile(State) ->
- case rebar_prv_compile:do(State) of
- %% successfully compiled apps
- {ok, S} ->
- ok = maybe_cover_compile(S),
- {ok, S};
- %% this should look like a compiler error, not an eunit error
- Error -> Error
- end.
+ {ok, S} = rebar_prv_compile:do(State),
+ ok = maybe_cover_compile(S),
+ {ok, S}.
inject_ct_state(State, {ok, Tests}) ->
Apps = rebar_state:project_apps(State),
@@ -304,8 +350,7 @@ inject_ct_state(State, {ok, Tests}) ->
{ok, {NewState, ModdedApps}} ->
test_dirs(NewState, ModdedApps, Tests);
{error, _} = Error -> Error
- end;
-inject_ct_state(_State, Error) -> Error.
+ end.
inject_ct_state(State, Tests, [App|Rest], Acc) ->
case inject(rebar_app_info:opts(App), State, Tests) of
@@ -383,31 +428,50 @@ append(A, B) -> A ++ B.
add_transforms(CTOpts, State) when is_list(CTOpts) ->
case readable(State) of
- true ->
- ReadableTransform = [{parse_transform, cth_readable_transform}],
- (CTOpts -- ReadableTransform) ++ ReadableTransform;
false ->
- CTOpts
+ CTOpts;
+ Other when Other == true; Other == compact ->
+ ReadableTransform = [{parse_transform, cth_readable_transform}],
+ (CTOpts -- ReadableTransform) ++ ReadableTransform
end;
add_transforms({error, _} = Error, _State) -> Error.
readable(State) ->
{RawOpts, _} = rebar_state:command_parsed_args(State),
case proplists:get_value(readable, RawOpts) of
- true -> true;
- false -> false;
- undefined -> rebar_state:get(State, ct_readable, true)
+ "true" -> true;
+ "false" -> false;
+ "compact" -> compact;
+ undefined -> rebar_state:get(State, ct_readable, compact)
end.
test_dirs(State, Apps, Opts) ->
- case {proplists:get_value(suite, Opts), proplists:get_value(dir, Opts)} of
- {Suites, undefined} -> set_compile_dirs(State, Apps, {suite, Suites});
- {undefined, Dirs} -> set_compile_dirs(State, Apps, {dir, Dirs});
- {Suites, Dir} when is_integer(hd(Dir)) ->
- set_compile_dirs(State, Apps, join(Suites, Dir));
- {Suites, [Dir]} when is_integer(hd(Dir)) ->
- set_compile_dirs(State, Apps, join(Suites, Dir));
- {_Suites, _Dirs} -> {error, "Only a single directory may be specified when specifying suites"}
+ case proplists:get_value(spec, Opts) of
+ undefined ->
+ case {proplists:get_value(suite, Opts), proplists:get_value(dir, Opts)} of
+ {Suites, undefined} -> set_compile_dirs(State, Apps, {suite, Suites});
+ {undefined, Dirs} -> set_compile_dirs(State, Apps, {dir, Dirs});
+ {Suites, Dir} when is_integer(hd(Dir)) ->
+ set_compile_dirs(State, Apps, join(Suites, Dir));
+ {Suites, [Dir]} when is_integer(hd(Dir)) ->
+ set_compile_dirs(State, Apps, join(Suites, Dir));
+ {_Suites, _Dirs} -> {error, "Only a single directory may be specified when specifying suites"}
+ end;
+ Spec when is_integer(hd(Spec)) ->
+ spec_test_dirs(State, Apps, [Spec]);
+ Specs ->
+ spec_test_dirs(State, Apps, Specs)
+ end.
+
+spec_test_dirs(State, Apps, Specs0) ->
+ case get_dirs_from_specs(Specs0) of
+ {ok,{Specs,SuiteDirs}} ->
+ {State1,Apps1} = set_compile_dirs1(State, Apps, {dir, SuiteDirs}),
+ {State2,Apps2} = set_compile_dirs1(State1, Apps1, {spec, Specs}),
+ [maybe_copy_spec(State2,Apps2,S) || S <- Specs],
+ {ok, rebar_state:project_apps(State2, Apps2)};
+ Error ->
+ Error
end.
join(Suite, Dir) when is_integer(hd(Suite)) ->
@@ -415,27 +479,28 @@ join(Suite, Dir) when is_integer(hd(Suite)) ->
join(Suites, Dir) ->
{suite, lists:map(fun(S) -> filename:join([Dir, S]) end, Suites)}.
-set_compile_dirs(State, Apps, {dir, Dir}) when is_integer(hd(Dir)) ->
+set_compile_dirs(State, Apps, What) ->
+ {NewState,NewApps} = set_compile_dirs1(State, Apps, What),
+ {ok, rebar_state:project_apps(NewState, NewApps)}.
+
+set_compile_dirs1(State, Apps, {dir, Dir}) when is_integer(hd(Dir)) ->
%% single directory
%% insert `Dir` into an app if relative, or the base state if not
%% app relative but relative to the root or not at all if outside
%% project scope
- {NewState, NewApps} = maybe_inject_test_dir(State, [], Apps, Dir),
- {ok, rebar_state:project_apps(NewState, NewApps)};
-set_compile_dirs(State, Apps, {dir, Dirs}) ->
+ maybe_inject_test_dir(State, [], Apps, Dir);
+set_compile_dirs1(State, Apps, {dir, Dirs}) ->
%% multiple directories
F = fun(Dir, {S, A}) -> maybe_inject_test_dir(S, [], A, Dir) end,
- {NewState, NewApps} = lists:foldl(F, {State, Apps}, Dirs),
- {ok, rebar_state:project_apps(NewState, NewApps)};
-set_compile_dirs(State, Apps, {suite, Suites}) ->
- %% suites with dir component
- Dirs = find_suite_dirs(Suites),
+ lists:foldl(F, {State, Apps}, Dirs);
+set_compile_dirs1(State, Apps, {Type, Files}) when Type==spec; Type==suite ->
+ %% specs or suites with dir component
+ Dirs = find_file_dirs(Files),
F = fun(Dir, {S, A}) -> maybe_inject_test_dir(S, [], A, Dir) end,
- {NewState, NewApps} = lists:foldl(F, {State, Apps}, Dirs),
- {ok, rebar_state:project_apps(NewState, NewApps)}.
+ lists:foldl(F, {State, Apps}, Dirs).
-find_suite_dirs(Suites) ->
- AllDirs = lists:map(fun(S) -> filename:dirname(filename:absname(S)) end, Suites),
+find_file_dirs(Files) ->
+ AllDirs = lists:map(fun(F) -> filename:dirname(filename:absname(F)) end, Files),
%% eliminate duplicates
lists:usort(AllDirs).
@@ -483,52 +548,79 @@ copy_bare_suites(From, To) ->
ok = rebar_file_utils:cp_r(SrcFiles, To),
rebar_file_utils:cp_r(DataDirs, To).
+maybe_copy_spec(State, [App|Apps], Spec) ->
+ case rebar_file_utils:path_from_ancestor(filename:dirname(Spec), rebar_app_info:dir(App)) of
+ {ok, []} ->
+ ok = rebar_file_utils:cp_r([Spec],rebar_app_info:out_dir(App));
+ {ok,_} ->
+ ok;
+ {error,badparent} ->
+ maybe_copy_spec(State, Apps, Spec)
+ end;
+maybe_copy_spec(State, [], Spec) ->
+ case rebar_file_utils:path_from_ancestor(filename:dirname(Spec), rebar_state:dir(State)) of
+ {ok, []} ->
+ ExtrasDir = filename:join([rebar_dir:base_dir(State), "extras"]),
+ ok = rebar_file_utils:cp_r([Spec],ExtrasDir);
+ _R ->
+ ok
+ end.
+
inject_test_dir(Opts, Dir) ->
%% append specified test targets to app defined `extra_src_dirs`
ExtraSrcDirs = rebar_opts:get(Opts, extra_src_dirs, []),
rebar_opts:set(Opts, extra_src_dirs, ExtraSrcDirs ++ [Dir]).
+get_dirs_from_specs(Specs) ->
+ case get_tests_from_specs(Specs) of
+ {ok,Tests} ->
+ {SpecLists,NodeRunSkipLists} = lists:unzip(Tests),
+ SpecList = lists:append(SpecLists),
+ NodeRunSkipList = lists:append(NodeRunSkipLists),
+ RunList = lists:append([R || {_,R,_} <- NodeRunSkipList]),
+ DirList = [element(1,R) || R <- RunList],
+ {ok,{SpecList,DirList}};
+ {error,Reason} ->
+ {error,{?MODULE,{error_reading_testspec,Reason}}}
+ end.
+
+get_tests_from_specs(Specs) ->
+ _ = ct_testspec:module_info(), % make sure ct_testspec is loaded
+ case erlang:function_exported(ct_testspec,get_tests,1) of
+ true ->
+ ct_testspec:get_tests(Specs);
+ false ->
+ case ct_testspec:collect_tests_from_file(Specs,true) of
+ Tests when is_list(Tests) ->
+ {ok,[{S,ct_testspec:prepare_tests(R)} || {S,R} <- Tests]};
+ Error ->
+ Error
+ end
+ end.
+
translate_paths(State, Opts) ->
- case {proplists:get_value(suite, Opts), proplists:get_value(dir, Opts)} of
- {_Suites, undefined} -> translate_suites(State, Opts, []);
- {undefined, _Dirs} -> translate_dirs(State, Opts, []);
- %% both dirs and suites are defined, only translate dir paths
- _ -> translate_dirs(State, Opts, [])
+ case proplists:get_value(spec, Opts) of
+ undefined ->
+ case {proplists:get_value(suite, Opts), proplists:get_value(dir, Opts)} of
+ {_Suites, undefined} -> translate_paths(State, suite, Opts, []);
+ {undefined, _Dirs} -> translate_paths(State, dir, Opts, []);
+ %% both dirs and suites are defined, only translate dir paths
+ _ -> translate_paths(State, dir, Opts, [])
+ end;
+ _Specs ->
+ translate_paths(State, spec, Opts, [])
end.
-translate_dirs(_State, [], Acc) -> lists:reverse(Acc);
-translate_dirs(State, [{dir, Dir}|Rest], Acc) when is_integer(hd(Dir)) ->
- %% single dir
- Apps = rebar_state:project_apps(State),
- translate_dirs(State, Rest, [{dir, translate(State, Apps, Dir)}|Acc]);
-translate_dirs(State, [{dir, Dirs}|Rest], Acc) ->
- %% multiple dirs
- Apps = rebar_state:project_apps(State),
- NewDirs = {dir, lists:map(fun(Dir) -> translate(State, Apps, Dir) end, Dirs)},
- translate_dirs(State, Rest, [NewDirs|Acc]);
-translate_dirs(State, [Test|Rest], Acc) ->
- translate_dirs(State, Rest, [Test|Acc]).
-
-translate_suites(_State, [], Acc) -> lists:reverse(Acc);
-translate_suites(State, [{suite, Suite}|Rest], Acc) when is_integer(hd(Suite)) ->
- %% single suite
+translate_paths(_State, _Type, [], Acc) -> lists:reverse(Acc);
+translate_paths(State, Type, [{Type, Val}|Rest], Acc) when is_integer(hd(Val)) ->
+ %% single file or dir
+ translate_paths(State, Type, [{Type, [Val]}|Rest], Acc);
+translate_paths(State, Type, [{Type, Files}|Rest], Acc) ->
Apps = rebar_state:project_apps(State),
- translate_suites(State, Rest, [{suite, translate_suite(State, Apps, Suite)}|Acc]);
-translate_suites(State, [{suite, Suites}|Rest], Acc) ->
- %% multiple suites
- Apps = rebar_state:project_apps(State),
- NewSuites = {suite, lists:map(fun(Suite) -> translate_suite(State, Apps, Suite) end, Suites)},
- translate_suites(State, Rest, [NewSuites|Acc]);
-translate_suites(State, [Test|Rest], Acc) ->
- translate_suites(State, Rest, [Test|Acc]).
-
-translate_suite(State, Apps, Suite) ->
- Dirname = filename:dirname(Suite),
- Basename = filename:basename(Suite),
- case Dirname of
- "." -> Suite;
- _ -> filename:join([translate(State, Apps, Dirname), Basename])
- end.
+ New = {Type, lists:map(fun(File) -> translate(State, Apps, File) end, Files)},
+ translate_paths(State, Type, Rest, [New|Acc]);
+translate_paths(State, Type, [Test|Rest], Acc) ->
+ translate_paths(State, Type, Rest, [Test|Acc]).
translate(State, [App|Rest], Path) ->
case rebar_file_utils:path_from_ancestor(Path, rebar_app_info:dir(App)) of
@@ -584,7 +676,11 @@ handle_results(_) ->
sum_results({Passed, Failed, {UserSkipped, AutoSkipped}},
{Passed2, Failed2, {UserSkipped2, AutoSkipped2}}) ->
{Passed+Passed2, Failed+Failed2,
- {UserSkipped+UserSkipped2, AutoSkipped+AutoSkipped2}}.
+ {UserSkipped+UserSkipped2, AutoSkipped+AutoSkipped2}};
+sum_results(_, {error, Reason}) ->
+ {error, Reason};
+sum_results(Unknown, _) ->
+ {error, Unknown}.
handle_quiet_results(_, {error, _} = Result) ->
handle_results(Result);
@@ -607,7 +703,10 @@ format_result({Passed, 0, {0, 0}}) ->
format_result({Passed, Failed, Skipped}) ->
Format = [format_failed(Failed), format_skipped(Skipped),
format_passed(Passed)],
- ?CONSOLE("~s", [Format]).
+ ?CONSOLE("~ts", [Format]);
+format_result(_Unknown) ->
+ %% Happens when CT itself encounters a bug
+ ok.
format_failed(0) ->
[];
@@ -636,20 +735,24 @@ maybe_write_coverdata(State) ->
true -> rebar_state:set(State, cover_enabled, true);
false -> State
end,
- rebar_prv_cover:maybe_write_coverdata(State1, ?PROVIDER).
+ Name = proplists:get_value(cover_export_name, RawOpts, ?PROVIDER),
+ rebar_prv_cover:maybe_write_coverdata(State1, Name).
ct_opts(_State) ->
- [{dir, undefined, "dir", string, help(dir)}, %% comma-seperated list
- {suite, undefined, "suite", string, help(suite)}, %% comma-seperated list
- {group, undefined, "group", string, help(group)}, %% comma-seperated list
- {testcase, undefined, "case", string, help(testcase)}, %% comma-seperated list
+ [{dir, undefined, "dir", string, help(dir)}, %% comma-separated list
+ {suite, undefined, "suite", string, help(suite)}, %% comma-separated list
+ {group, undefined, "group", string, help(group)}, %% comma-separated list
+ {testcase, undefined, "case", string, help(testcase)}, %% comma-separated list
{label, undefined, "label", string, help(label)}, %% String
- {config, undefined, "config", string, help(config)}, %% comma-seperated list
+ {config, undefined, "config", string, help(config)}, %% comma-separated list
+ {spec, undefined, "spec", string, help(spec)}, %% comma-separated list
+ {join_specs, undefined, "join_specs", boolean, help(join_specs)},
{allow_user_terms, undefined, "allow_user_terms", boolean, help(allow_user_terms)}, %% Bool
{logdir, undefined, "logdir", string, help(logdir)}, %% dir
- {logopts, undefined, "logopts", string, help(logopts)}, %% comma seperated list
+ {logopts, undefined, "logopts", string, help(logopts)}, %% comma-separated list
{verbosity, undefined, "verbosity", integer, help(verbosity)}, %% Integer
{cover, $c, "cover", {boolean, false}, help(cover)},
+ {cover_export_name, undefined, "cover_export_name", string, help(cover_export_name)},
{repeat, undefined, "repeat", integer, help(repeat)}, %% integer
{duration, undefined, "duration", string, help(duration)}, % format: HHMMSS
{until, undefined, "until", string, help(until)}, %% format: YYMoMoDD[HHMMSS]
@@ -663,14 +766,18 @@ ct_opts(_State) ->
{scale_timetraps, undefined, "scale_timetraps", boolean, help(scale_timetraps)},
{create_priv_dir, undefined, "create_priv_dir", string, help(create_priv_dir)},
{include, undefined, "include", string, help(include)},
- {readable, undefined, "readable", boolean, help(readable)},
+ {readable, undefined, "readable", string, help(readable)},
{verbose, $v, "verbose", boolean, help(verbose)},
{name, undefined, "name", atom, help(name)},
{sname, undefined, "sname", atom, help(sname)},
{setcookie, undefined, "setcookie", atom, help(setcookie)},
- {sys_config, undefined, "sys_config", string, help(sys_config)} %% comma-seperated list
+ {sys_config, undefined, "sys_config", string, help(sys_config)}, %% comma-separated list
+ {compile_only, undefined, "compile_only", boolean, help(compile_only)},
+ {retry, undefined, "retry", boolean, help(retry)}
].
+help(compile_only) ->
+ "Compile modules in the project with the test configuration but do not run the tests";
help(dir) ->
"List of additional directories containing test suites";
help(suite) ->
@@ -683,6 +790,10 @@ help(label) ->
"Test label";
help(config) ->
"List of config files";
+help(spec) ->
+ "List of test specifications";
+help(join_specs) ->
+ "Merge all test specifications and perform a single test run";
help(sys_config) ->
"List of application config files";
help(allow_user_terms) ->
@@ -695,6 +806,8 @@ help(verbosity) ->
"Verbosity";
help(cover) ->
"Generate cover data";
+help(cover_export_name) ->
+ "Base name of the coverdata file to write";
help(repeat) ->
"How often to repeat tests";
help(duration) ->
@@ -722,7 +835,7 @@ help(create_priv_dir) ->
help(include) ->
"Directories containing additional include files";
help(readable) ->
- "Shows test case names and only displays logs to shell on failures";
+ "Shows test case names and only displays logs to shell on failures (true | compact | false)";
help(verbose) ->
"Verbose output";
help(name) ->
@@ -731,5 +844,7 @@ help(sname) ->
"Gives a short name to the node";
help(setcookie) ->
"Sets the cookie if the node is distributed";
+help(retry) ->
+ "Experimental feature. If any specification for previously failing test is found, runs them.";
help(_) ->
"".
diff --git a/src/rebar_prv_compile.erl b/src/rebar_prv_compile.erl
index effc763..cf2bcf2 100644
--- a/src/rebar_prv_compile.erl
+++ b/src/rebar_prv_compile.erl
@@ -30,34 +30,49 @@ init(State) ->
{example, "rebar3 compile"},
{short_desc, "Compile apps .app.src and .erl files."},
{desc, "Compile apps .app.src and .erl files."},
- {opts, []}])),
+ {opts, [{deps_only, $d, "deps_only", undefined,
+ "Only compile dependencies, no project apps will be built."}]}])),
{ok, State1}.
-spec do(rebar_state:t()) -> {ok, rebar_state:t()} | {error, string()}.
do(State) ->
- DepsPaths = rebar_state:code_paths(State, all_deps),
- PluginDepsPaths = rebar_state:code_paths(State, all_plugin_deps),
- rebar_utils:remove_from_code_path(PluginDepsPaths),
- code:add_pathsa(DepsPaths),
+ IsDepsOnly = is_deps_only(State),
+ rebar_paths:set_paths([deps], State),
- ProjectApps = rebar_state:project_apps(State),
Providers = rebar_state:providers(State),
Deps = rebar_state:deps_to_build(State),
- Cwd = rebar_state:dir(State),
+ copy_and_build_apps(State, Providers, Deps),
+
+ State1 = case IsDepsOnly of
+ true ->
+ State;
+ false ->
+ handle_project_apps(Providers, State)
+ end,
+
+ rebar_paths:set_paths([plugins], State1),
+
+ {ok, State1}.
+
+is_deps_only(State) ->
+ {Args, _} = rebar_state:command_parsed_args(State),
+ proplists:get_value(deps_only, Args, false).
- build_apps(State, Providers, Deps),
+handle_project_apps(Providers, State) ->
+ Cwd = rebar_state:dir(State),
+ ProjectApps = rebar_state:project_apps(State),
{ok, ProjectApps1} = rebar_digraph:compile_order(ProjectApps),
%% Run top level hooks *before* project apps compiled but *after* deps are
rebar_hooks:run_all_hooks(Cwd, pre, ?PROVIDER, Providers, State),
- ProjectApps2 = build_apps(State, Providers, ProjectApps1),
+ ProjectApps2 = copy_and_build_project_apps(State, Providers, ProjectApps1),
State2 = rebar_state:project_apps(State, ProjectApps2),
%% projects with structures like /apps/foo,/apps/bar,/test
build_extra_dirs(State, ProjectApps2),
- State3 = update_code_paths(State2, ProjectApps2, DepsPaths),
+ State3 = update_code_paths(State2, ProjectApps2),
rebar_hooks:run_all_hooks(Cwd, post, ?PROVIDER, Providers, State2),
case rebar_state:has_all_artifacts(State3) of
@@ -66,18 +81,23 @@ do(State) ->
true ->
true
end,
- rebar_utils:cleanup_code_path(rebar_state:code_paths(State3, default)
- ++ rebar_state:code_paths(State, all_plugin_deps)),
- {ok, State3}.
+ State3.
+
-spec format_error(any()) -> iolist().
format_error({missing_artifact, File}) ->
- io_lib:format("Missing artifact ~s", [File]);
+ io_lib:format("Missing artifact ~ts", [File]);
+format_error({bad_project_builder, Name, Type, Module}) ->
+ io_lib:format("Error building application ~s:~n Required project builder ~s function "
+ "~s:build/1 not found", [Name, Type, Module]);
+format_error({unknown_project_type, Name, Type}) ->
+ io_lib:format("Error building application ~s:~n "
+ "No project builder is configured for type ~s", [Name, Type]);
format_error(Reason) ->
io_lib:format("~p", [Reason]).
-build_apps(State, Providers, Apps) ->
+copy_and_build_apps(State, Providers, Apps) ->
[build_app(State, Providers, AppInfo) || AppInfo <- Apps].
build_app(State, Providers, AppInfo) ->
@@ -86,6 +106,19 @@ build_app(State, Providers, AppInfo) ->
copy_app_dirs(AppInfo, AppDir, OutDir),
compile(State, Providers, AppInfo).
+copy_and_build_project_apps(State, Providers, Apps) ->
+ %% Top-level apps, because of profile usage and specific orderings (i.e.
+ %% may require an include file from a profile-specific app for an extra_dirs
+ %% entry that only exists in a test context), need to be
+ %% copied and added to the path at once, and not just in compile order.
+ [copy_app_dirs(AppInfo,
+ rebar_app_info:dir(AppInfo),
+ rebar_app_info:out_dir(AppInfo))
+ || AppInfo <- Apps],
+ code:add_pathsa([rebar_app_info:ebin_dir(AppInfo) || AppInfo <- Apps]),
+ [compile(State, Providers, AppInfo) || AppInfo <- Apps].
+
+
build_extra_dirs(State, Apps) ->
BaseDir = rebar_state:dir(State),
F = fun(App) -> rebar_app_info:dir(App) == BaseDir end,
@@ -104,26 +137,47 @@ build_extra_dir(State, Dir) ->
true ->
BaseDir = filename:join([rebar_dir:base_dir(State), "extras"]),
OutDir = filename:join([BaseDir, Dir]),
- filelib:ensure_dir(filename:join([OutDir, "dummy.beam"])),
+ rebar_file_utils:ensure_dir(OutDir),
copy(rebar_state:dir(State), BaseDir, Dir),
- rebar_erlc_compiler:compile_dir(State, BaseDir, OutDir);
- false -> ok
+
+ Compilers = rebar_state:compilers(State),
+ FakeApp = rebar_app_info:new(),
+ FakeApp1 = rebar_app_info:out_dir(FakeApp, BaseDir),
+ FakeApp2 = rebar_app_info:ebin_dir(FakeApp1, OutDir),
+ Opts = rebar_state:opts(State),
+ FakeApp3 = rebar_app_info:opts(FakeApp2, Opts),
+ FakeApp4 = rebar_app_info:set(FakeApp3, src_dirs, [OutDir]),
+ rebar_compiler:compile_all(Compilers, FakeApp4);
+ false ->
+ ok
end.
compile(State, AppInfo) ->
compile(State, rebar_state:providers(State), AppInfo).
compile(State, Providers, AppInfo) ->
- ?INFO("Compiling ~s", [rebar_app_info:name(AppInfo)]),
+ ?INFO("Compiling ~ts", [rebar_app_info:name(AppInfo)]),
AppDir = rebar_app_info:dir(AppInfo),
AppInfo1 = rebar_hooks:run_all_hooks(AppDir, pre, ?PROVIDER, Providers, AppInfo, State),
AppInfo2 = rebar_hooks:run_all_hooks(AppDir, pre, ?ERLC_HOOK, Providers, AppInfo1, State),
- rebar_erlc_compiler:compile(AppInfo2),
+
+ build_app(AppInfo2, State),
+
AppInfo3 = rebar_hooks:run_all_hooks(AppDir, post, ?ERLC_HOOK, Providers, AppInfo2, State),
AppInfo4 = rebar_hooks:run_all_hooks(AppDir, pre, ?APP_HOOK, Providers, AppInfo3, State),
- case rebar_otp_app:compile(State, AppInfo4) of
+
+ %% Load plugins back for make_vsn calls in custom resources.
+ %% The rebar_otp_app compilation step is safe regarding the
+ %% overall path management, so we can just load all plugins back
+ %% in memory.
+ rebar_paths:set_paths([plugins], State),
+ AppFileCompileResult = rebar_otp_app:compile(State, AppInfo4),
+ %% Clean up after ourselves, leave things as they were with deps first
+ rebar_paths:set_paths([deps], State),
+
+ case AppFileCompileResult of
{ok, AppInfo5} ->
AppInfo6 = rebar_hooks:run_all_hooks(AppDir, post, ?APP_HOOK, Providers, AppInfo5, State),
AppInfo7 = rebar_hooks:run_all_hooks(AppDir, post, ?PROVIDER, Providers, AppInfo6, State),
@@ -137,9 +191,33 @@ compile(State, Providers, AppInfo) ->
%% Internal functions
%% ===================================================================
-update_code_paths(State, ProjectApps, DepsPaths) ->
+build_app(AppInfo, State) ->
+ case rebar_app_info:project_type(AppInfo) of
+ Type when Type =:= rebar3 ; Type =:= undefined ->
+ Compilers = rebar_state:compilers(State),
+ rebar_paths:set_paths([deps], State),
+ rebar_compiler:compile_all(Compilers, AppInfo);
+ Type ->
+ ProjectBuilders = rebar_state:project_builders(State),
+ case lists:keyfind(Type, 1, ProjectBuilders) of
+ {_, Module} ->
+ %% load plugins since thats where project builders would be
+ rebar_paths:set_paths([deps, plugins], State),
+ Res = Module:build(AppInfo),
+ rebar_paths:set_paths([deps], State),
+ case Res of
+ ok -> ok;
+ {error, Reason} -> throw({error, {Module, Reason}})
+ end;
+ _ ->
+ throw(?PRV_ERROR({unknown_project_type, rebar_app_info:name(AppInfo), Type}))
+ end
+ end.
+
+update_code_paths(State, ProjectApps) ->
ProjAppsPaths = paths_for_apps(ProjectApps),
ExtrasPaths = paths_for_extras(State, ProjectApps),
+ DepsPaths = rebar_state:code_paths(State, all_deps),
rebar_state:code_paths(State, all_deps, DepsPaths ++ ProjAppsPaths ++ ExtrasPaths).
paths_for_apps(Apps) -> paths_for_apps(Apps, []).
@@ -173,8 +251,8 @@ has_all_artifacts(AppInfo1) ->
end.
copy_app_dirs(AppInfo, OldAppDir, AppDir) ->
- case ec_cnv:to_binary(filename:absname(OldAppDir)) =/=
- ec_cnv:to_binary(filename:absname(AppDir)) of
+ case rebar_utils:to_binary(filename:absname(OldAppDir)) =/=
+ rebar_utils:to_binary(filename:absname(AppDir)) of
true ->
EbinDir = filename:join([OldAppDir, "ebin"]),
%% copy all files from ebin if it exists
@@ -201,7 +279,11 @@ copy_app_dirs(AppInfo, OldAppDir, AppDir) ->
end,
{SrcDirs, ExtraDirs} = resolve_src_dirs(rebar_app_info:opts(AppInfo)),
%% link to src_dirs to be adjacent to ebin is needed for R15 use of cover/xref
- [symlink_or_copy(OldAppDir, AppDir, Dir) || Dir <- ["priv", "include"] ++ SrcDirs],
+ %% priv/ and include/ are symlinked unconditionally to allow hooks
+ %% to write to them _after_ compilation has taken place when the
+ %% initial directory did not, and still work
+ [symlink_or_copy(OldAppDir, AppDir, Dir) || Dir <- ["priv", "include"]],
+ [symlink_or_copy_existing(OldAppDir, AppDir, Dir) || Dir <- SrcDirs],
%% copy all extra_src_dirs as they build into themselves and linking means they
%% are shared across profiles
[copy(OldAppDir, AppDir, Dir) || Dir <- ExtraDirs];
@@ -214,6 +296,14 @@ symlink_or_copy(OldAppDir, AppDir, Dir) ->
Target = filename:join([AppDir, Dir]),
rebar_file_utils:symlink_or_copy(Source, Target).
+symlink_or_copy_existing(OldAppDir, AppDir, Dir) ->
+ Source = filename:join([OldAppDir, Dir]),
+ Target = filename:join([AppDir, Dir]),
+ case ec_file:is_dir(Source) of
+ true -> rebar_file_utils:symlink_or_copy(Source, Target);
+ false -> ok
+ end.
+
copy(OldAppDir, AppDir, Dir) ->
Source = filename:join([OldAppDir, Dir]),
Target = filename:join([AppDir, Dir]),
diff --git a/src/rebar_prv_cover.erl b/src/rebar_prv_cover.erl
index 464967b..a279293 100644
--- a/src/rebar_prv_cover.erl
+++ b/src/rebar_prv_cover.erl
@@ -12,10 +12,11 @@
maybe_write_coverdata/2,
format_error/1]).
+-include_lib("providers/include/providers.hrl").
-include("rebar.hrl").
-define(PROVIDER, cover).
--define(DEPS, [app_discovery]).
+-define(DEPS, [lock]).
%% ===================================================================
%% Public API
@@ -62,6 +63,9 @@ maybe_write_coverdata(State, Task) ->
end.
-spec format_error(any()) -> iolist().
+format_error({min_coverage_failed, {PassRate, Total}}) ->
+ io_lib:format("Requiring ~p% coverage to pass. Only ~p% obtained",
+ [PassRate, Total]);
format_error(Reason) ->
io_lib:format("~p", [Reason]).
@@ -84,6 +88,15 @@ reset(State) ->
{ok, State}.
analyze(State) ->
+ %% modules have to be compiled and then cover compiled
+ %% in order for cover data to be reloaded
+ %% this maybe breaks if modules have been deleted
+ %% since code coverage was collected?
+ {ok, S} = rebar_prv_compile:do(State),
+ ok = cover_compile(S, apps),
+ do_analyze(State).
+
+do_analyze(State) ->
?INFO("Performing cover analysis...", []),
%% figure out what coverdata we have
CoverDir = cover_dir(State),
@@ -93,13 +106,13 @@ analyze(State) ->
%% redirect cover output
true = redirect_cover_output(State, CoverPid),
%% analyze!
- ok = case analyze(State, CoverFiles) of
- [] -> ok;
+ case analyze(State, CoverFiles) of
+ [] -> {ok, State};
Analysis ->
print_analysis(Analysis, verbose(State)),
- write_index(State, Analysis)
- end,
- {ok, State}.
+ write_index(State, Analysis),
+ maybe_fail_coverage(Analysis, State)
+ end.
get_all_coverdata(CoverDir) ->
ok = filelib:ensure_dir(filename:join([CoverDir, "dummy.log"])),
@@ -187,10 +200,7 @@ mod_to_filename(TaskDir, M) ->
process(Coverage) -> process(Coverage, {0, 0}).
-process([], {0, 0}) ->
- "0%";
-process([], {Cov, Not}) ->
- integer_to_list(trunc((Cov / (Cov + Not)) * 100)) ++ "%";
+process([], Acc) -> Acc;
%% line 0 is a line added by eunit and never executed so ignore it
process([{{_, 0}, _}|Rest], Acc) -> process(Rest, Acc);
process([{_, {Cov, Not}}|Rest], {Covered, NotCovered}) ->
@@ -199,56 +209,56 @@ process([{_, {Cov, Not}}|Rest], {Covered, NotCovered}) ->
print_analysis(_, false) -> ok;
print_analysis(Analysis, true) ->
{_, CoverFiles, Stats} = lists:keyfind("aggregate", 1, Analysis),
- ConsoleStats = [ {atom_to_list(M), C} || {M, C, _} <- Stats ],
- Table = format_table(ConsoleStats, CoverFiles),
+ Table = format_table(Stats, CoverFiles),
io:format("~ts", [Table]).
format_table(Stats, CoverFiles) ->
- MaxLength = max(lists:foldl(fun max_length/2, 0, Stats), 20),
+ MaxLength = lists:max([20 | lists:map(fun({M, _, _}) -> mod_length(M) end, Stats)]),
Header = header(MaxLength),
- Seperator = seperator(MaxLength),
+ Separator = separator(MaxLength),
TotalLabel = format("total", MaxLength),
- TotalCov = format(calculate_total(Stats), 8),
- [io_lib:format("~ts~n~ts~n~ts~n", [Seperator, Header, Seperator]),
- lists:map(fun({Mod, Coverage}) ->
+ TotalCov = format(calculate_total_string(Stats), 8),
+ [io_lib:format("~ts~n~ts~n~ts~n", [Separator, Header, Separator]),
+ lists:map(fun({Mod, Coverage, _}) ->
Name = format(Mod, MaxLength),
- Cov = format(Coverage, 8),
+ Cov = format(percentage_string(Coverage), 8),
io_lib:format(" | ~ts | ~ts |~n", [Name, Cov])
end, Stats),
- io_lib:format("~ts~n", [Seperator]),
+ io_lib:format("~ts~n", [Separator]),
io_lib:format(" | ~ts | ~ts |~n", [TotalLabel, TotalCov]),
- io_lib:format("~ts~n", [Seperator]),
+ io_lib:format("~ts~n", [Separator]),
io_lib:format(" coverage calculated from:~n", []),
lists:map(fun(File) ->
io_lib:format(" ~ts~n", [File])
end, CoverFiles)].
-max_length({ModName, _}, Min) ->
- Length = length(lists:flatten(ModName)),
- case Length > Min of
- true -> Length;
- false -> Min
- end.
+mod_length(Mod) when is_atom(Mod) -> mod_length(atom_to_list(Mod));
+mod_length(Mod) -> length(Mod).
header(Width) ->
[" | ", format("module", Width), " | ", format("coverage", 8), " |"].
-seperator(Width) ->
+separator(Width) ->
[" |--", io_lib:format("~*c", [Width, $-]), "--|------------|"].
format(String, Width) -> io_lib:format("~*.ts", [Width, String]).
-calculate_total(Stats) when length(Stats) =:= 0 ->
- "0%";
+calculate_total_string(Stats) ->
+ integer_to_list(calculate_total(Stats))++"%".
+
calculate_total(Stats) ->
- TotalStats = length(Stats),
- TotalCovInt = round(lists:foldl(
- fun({_Mod, Coverage, _File}, Acc) ->
- Acc + (list_to_integer(string:strip(Coverage, right, $%)) / TotalStats);
- ({_Mod, Coverage}, Acc) ->
- Acc + (list_to_integer(string:strip(Coverage, right, $%)) / TotalStats)
- end, 0, Stats)),
- integer_to_list(TotalCovInt) ++ "%".
+ percentage(lists:foldl(
+ fun({_Mod, {Cov, Not}, _File}, {CovAcc, NotAcc}) ->
+ {CovAcc + Cov, NotAcc + Not}
+ end,
+ {0, 0},
+ Stats
+ )).
+
+percentage_string(Data) -> integer_to_list(percentage(Data))++"%".
+
+percentage({_, 0}) -> 100;
+percentage({Cov, Not}) -> trunc((Cov / (Cov + Not)) * 100).
write_index(State, Coverage) ->
CoverDir = cover_dir(State),
@@ -269,7 +279,7 @@ write_index(State, Coverage) ->
write_index_section(_F, []) -> ok;
write_index_section(F, [{Section, DataFile, Mods}|Rest]) ->
%% Write the report
- ok = file:write(F, ?FMT("<h1>~s summary</h1>\n", [Section])),
+ ok = file:write(F, ?FMT("<h1>~ts summary</h1>\n", [Section])),
ok = file:write(F, "coverage calculated from:\n<ul>"),
ok = lists:foreach(fun(D) -> ok = file:write(F, io_lib:format("<li>~ts</li>", [D])) end,
DataFile),
@@ -278,14 +288,25 @@ write_index_section(F, [{Section, DataFile, Mods}|Rest]) ->
FmtLink =
fun({Mod, Cov, Report}) ->
?FMT("<tr><td><a href='~ts'>~ts</a></td><td>~ts</td>\n",
- [strip_coverdir(Report), Mod, Cov])
+ [strip_coverdir(Report), Mod, percentage_string(Cov)])
end,
lists:foreach(fun(M) -> ok = file:write(F, FmtLink(M)) end, Mods),
ok = file:write(F, ?FMT("<tr><td><strong>Total</strong></td><td>~ts</td>\n",
- [calculate_total(Mods)])),
+ [calculate_total_string(Mods)])),
ok = file:write(F, "</table>\n"),
write_index_section(F, Rest).
+maybe_fail_coverage(Analysis, State) ->
+ {_, _CoverFiles, Stats} = lists:keyfind("aggregate", 1, Analysis),
+ Total = calculate_total(Stats),
+ PassRate = min_coverage(State),
+ ?DEBUG("Comparing ~p to pass rate ~p", [Total, PassRate]),
+ if Total >= PassRate ->
+ {ok, State}
+ ; Total < PassRate ->
+ ?PRV_ERROR({min_coverage_failed, {PassRate, Total}})
+ end.
+
%% fix for r15b which doesn't put the correct path in the `source` section
%% of `module_info(compile)`
strip_coverdir([]) -> "";
@@ -294,45 +315,66 @@ strip_coverdir(File) ->
2))).
cover_compile(State, apps) ->
- Apps = filter_checkouts(rebar_state:project_apps(State)),
+ ExclApps = [rebar_utils:to_binary(A) || A <- rebar_state:get(State, cover_excl_apps, [])],
+ Apps = filter_checkouts_and_excluded(rebar_state:project_apps(State), ExclApps),
AppDirs = app_dirs(Apps),
cover_compile(State, lists:filter(fun(D) -> ec_file:is_dir(D) end, AppDirs));
cover_compile(State, Dirs) ->
- rebar_utils:update_code(rebar_state:code_paths(State, all_deps), [soft_purge]),
+ rebar_paths:set_paths([deps], State),
%% start the cover server if necessary
{ok, CoverPid} = start_cover(),
%% redirect cover output
true = redirect_cover_output(State, CoverPid),
+ ExclMods = rebar_state:get(State, cover_excl_mods, []),
lists:foreach(fun(Dir) ->
- ?DEBUG("cover compiling ~p", [Dir]),
- case catch(cover:compile_beam_directory(Dir)) of
+ case file:list_dir(Dir) of
+ {ok, Files} ->
+ ?DEBUG("cover compiling ~p", [Dir]),
+ [cover_compile_file(filename:join(Dir, File))
+ || File <- Files,
+ filename:extension(File) == ".beam",
+ not is_ignored(Dir, File, ExclMods)],
+ ok;
{error, eacces} ->
?WARN("Directory ~p not readable, modules will not be included in coverage", [Dir]);
{error, enoent} ->
?WARN("Directory ~p not found", [Dir]);
- {'EXIT', {Reason, _}} ->
- ?WARN("Cover compilation for directory ~p failed: ~p", [Dir, Reason]);
- Results ->
- %% print any warnings about modules that failed to cover compile
- lists:foreach(fun print_cover_warnings/1, lists:flatten(Results))
+ {error, Reason} ->
+ ?WARN("Directory ~p error ~p", [Dir, Reason])
end
end, Dirs),
- rebar_utils:cleanup_code_path(rebar_state:code_paths(State, default)),
ok.
+is_ignored(Dir, File, ExclMods) ->
+ Ignored = lists:any(fun(Excl) ->
+ File =:= atom_to_list(Excl) ++ ".beam"
+ end,
+ ExclMods),
+ Ignored andalso ?DEBUG("cover ignoring ~p ~p", [Dir, File]),
+ Ignored.
+
+cover_compile_file(FileName) ->
+ case catch(cover:compile_beam(FileName)) of
+ {error, Reason} ->
+ ?WARN("Cover compilation failed: ~p", [Reason]);
+ {ok, _} ->
+ ok
+ end.
+
app_dirs(Apps) ->
lists:foldl(fun app_ebin_dirs/2, [], Apps).
app_ebin_dirs(App, Acc) ->
[rebar_app_info:ebin_dir(App)|Acc].
-filter_checkouts(Apps) -> filter_checkouts(Apps, []).
+filter_checkouts_and_excluded(Apps, ExclApps) ->
+ filter_checkouts_and_excluded(Apps, ExclApps, []).
-filter_checkouts([], Acc) -> lists:reverse(Acc);
-filter_checkouts([App|Rest], Acc) ->
- case rebar_app_info:is_checkout(App) of
- true -> filter_checkouts(Rest, Acc);
- false -> filter_checkouts(Rest, [App|Acc])
+filter_checkouts_and_excluded([], _ExclApps, Acc) -> lists:reverse(Acc);
+filter_checkouts_and_excluded([App|Rest], ExclApps, Acc) ->
+ case rebar_app_info:is_checkout(App) orelse lists:member(rebar_app_info:name(App), ExclApps) of
+ true -> filter_checkouts_and_excluded(Rest, ExclApps, Acc);
+ false -> filter_checkouts_and_excluded(Rest, ExclApps, [App|Acc])
end.
start_cover() ->
@@ -349,16 +391,14 @@ redirect_cover_output(State, CoverPid) ->
[append]),
group_leader(F, CoverPid).
-print_cover_warnings({ok, _}) -> ok;
-print_cover_warnings({error, Error}) ->
- ?WARN("Cover compilation failed: ~p", [Error]).
-
-write_coverdata(State, Task) ->
+write_coverdata(State, Name) ->
DataDir = cover_dir(State),
ok = filelib:ensure_dir(filename:join([DataDir, "dummy.log"])),
- ExportFile = filename:join([DataDir, atom_to_list(Task) ++ ".coverdata"]),
+ ExportFile = filename:join([DataDir, rebar_utils:to_list(Name) ++ ".coverdata"]),
case cover:export(ExportFile) of
ok ->
+ %% dump accumulated coverdata after writing
+ ok = cover:reset(),
?DEBUG("Cover data written to ~p.", [ExportFile]);
{error, Reason} ->
?WARN("Cover data export failed: ~p", [Reason])
@@ -380,12 +420,23 @@ verbose(State) ->
{Verbose, _} -> Verbose
end.
+min_coverage(State) ->
+ Command = proplists:get_value(min_coverage, command_line_opts(State), undefined),
+ Config = proplists:get_value(min_coverage, config_opts(State), undefined),
+ case {Command, Config} of
+ {undefined, undefined} -> 0;
+ {undefined, Rate} -> Rate;
+ {Rate, _} -> Rate
+ end.
+
cover_dir(State) ->
filename:join([rebar_dir:base_dir(State), "cover"]).
cover_opts(_State) ->
[{reset, $r, "reset", boolean, help(reset)},
- {verbose, $v, "verbose", boolean, help(verbose)}].
+ {verbose, $v, "verbose", boolean, help(verbose)},
+ {min_coverage, $m, "min_coverage", integer, help(min_coverage)}].
help(reset) -> "Reset all coverdata.";
-help(verbose) -> "Print coverage analysis.".
+help(verbose) -> "Print coverage analysis.";
+help(min_coverage) -> "Mandate a coverage percentage required to succeed (0..100)".
diff --git a/src/rebar_prv_deps.erl b/src/rebar_prv_deps.erl
index c865276..577a859 100644
--- a/src/rebar_prv_deps.erl
+++ b/src/rebar_prv_deps.erl
@@ -55,7 +55,7 @@ merge(Deps, SourceDeps) ->
normalize(Name) when is_binary(Name) ->
Name;
normalize(Name) when is_atom(Name) ->
- ec_cnv:to_binary(Name);
+ atom_to_binary(Name, unicode);
normalize(Dep) when is_tuple(Dep) ->
Name = element(1, Dep),
setelement(1, Dep, normalize(Name)).
@@ -87,31 +87,33 @@ display_deps(State, Deps) ->
%% packages
display_dep(_State, {Name, Vsn}) when is_list(Vsn) ->
- ?CONSOLE("~s* (package ~s)", [ec_cnv:to_binary(Name), ec_cnv:to_binary(Vsn)]);
+ ?CONSOLE("~ts* (package ~ts)", [rebar_utils:to_binary(Name), rebar_utils:to_binary(Vsn)]);
display_dep(_State, Name) when is_binary(Name) ->
- ?CONSOLE("~s* (package)", [Name]);
+ ?CONSOLE("~ts* (package)", [Name]);
display_dep(_State, {Name, Source}) when is_tuple(Source) ->
- ?CONSOLE("~s* (~s source)", [ec_cnv:to_binary(Name), type(Source)]);
+ ?CONSOLE("~ts* (~ts source)", [rebar_utils:to_binary(Name), type(Source)]);
display_dep(_State, {Name, _Vsn, Source}) when is_tuple(Source) ->
- ?CONSOLE("~s* (~s source)", [ec_cnv:to_binary(Name), type(Source)]);
+ ?CONSOLE("~ts* (~ts source)", [rebar_utils:to_binary(Name), type(Source)]);
display_dep(_State, {Name, _Vsn, Source, _Opts}) when is_tuple(Source) ->
- ?CONSOLE("~s* (~s source)", [ec_cnv:to_binary(Name), type(Source)]);
+ ?CONSOLE("~ts* (~ts source)", [rebar_utils:to_binary(Name), type(Source)]);
%% Locked
-display_dep(State, {Name, Source={pkg, _, Vsn, _}, Level}) when is_integer(Level) ->
+display_dep(State, {Name, _Source={pkg, _, Vsn}, Level}) when is_integer(Level) ->
DepsDir = rebar_dir:deps_dir(State),
- AppDir = filename:join([DepsDir, ec_cnv:to_binary(Name)]),
- NeedsUpdate = case rebar_fetch:needs_update(AppDir, Source, State) of
+ AppDir = filename:join([DepsDir, rebar_utils:to_binary(Name)]),
+ {ok, AppInfo} = rebar_app_info:discover(AppDir),
+ NeedsUpdate = case rebar_fetch:needs_update(AppInfo, State) of
true -> "*";
false -> ""
end,
- ?CONSOLE("~s~s (locked package ~s)", [Name, NeedsUpdate, Vsn]);
+ ?CONSOLE("~ts~ts (locked package ~ts)", [Name, NeedsUpdate, Vsn]);
display_dep(State, {Name, Source, Level}) when is_tuple(Source), is_integer(Level) ->
DepsDir = rebar_dir:deps_dir(State),
- AppDir = filename:join([DepsDir, ec_cnv:to_binary(Name)]),
- NeedsUpdate = case rebar_fetch:needs_update(AppDir, Source, State) of
+ AppDir = filename:join([DepsDir, rebar_utils:to_binary(Name)]),
+ {ok, AppInfo} = rebar_app_info:discover(AppDir),
+ NeedsUpdate = case rebar_fetch:needs_update(AppInfo, State) of
true -> "*";
false -> ""
end,
- ?CONSOLE("~s~s (locked ~s source)", [Name, NeedsUpdate, type(Source)]).
+ ?CONSOLE("~ts~ts (locked ~ts source)", [Name, NeedsUpdate, type(Source)]).
type(Source) when is_tuple(Source) -> element(1, Source).
diff --git a/src/rebar_prv_deps_tree.erl b/src/rebar_prv_deps_tree.erl
index c0c8bab..d7b49c5 100644
--- a/src/rebar_prv_deps_tree.erl
+++ b/src/rebar_prv_deps_tree.erl
@@ -39,27 +39,23 @@ format_error(Reason) ->
%% Internal functions
print_deps_tree(SrcDeps, Verbose, State) ->
- Resources = rebar_state:resources(State),
D = lists:foldl(fun(App, Dict) ->
Name = rebar_app_info:name(App),
Vsn = rebar_app_info:original_vsn(App),
- AppDir = rebar_app_info:dir(App),
- Vsn1 = rebar_utils:vcs_vsn(Vsn, AppDir, Resources),
+ Vsn1 = rebar_utils:vcs_vsn(App, Vsn, State),
Source = rebar_app_info:source(App),
Parent = rebar_app_info:parent(App),
dict:append_list(Parent, [{Name, Vsn1, Source}], Dict)
end, dict:new(), SrcDeps),
ProjectAppNames = [{rebar_app_info:name(App)
- ,rebar_utils:vcs_vsn(rebar_app_info:original_vsn(App), rebar_app_info:dir(App), Resources)
+ ,rebar_utils:vcs_vsn(App, rebar_app_info:original_vsn(App), State)
,project} || App <- rebar_state:project_apps(State)],
- io:setopts([{encoding, unicode}]),
case dict:find(root, D) of
{ok, Children} ->
print_children("", lists:keysort(1, Children++ProjectAppNames), D, Verbose);
error ->
print_children("", lists:keysort(1, ProjectAppNames), D, Verbose)
- end,
- io:setopts([{encoding, latin1}]).
+ end.
print_children(_, [], _, _) ->
ok;
@@ -90,7 +86,7 @@ type(Source, Verbose) when is_tuple(Source) ->
{pkg, _} ->
"hex package";
{Other, false} ->
- io_lib:format("~s repo", [Other]);
+ io_lib:format("~ts repo", [Other]);
{_, true} ->
- io_lib:format("~s", [element(2, Source)])
+ io_lib:format("~ts", [element(2, Source)])
end.
diff --git a/src/rebar_prv_dialyzer.erl b/src/rebar_prv_dialyzer.erl
index 82d2d07..585051c 100644
--- a/src/rebar_prv_dialyzer.erl
+++ b/src/rebar_prv_dialyzer.erl
@@ -47,26 +47,33 @@ desc() ->
"`plt_apps` - the strategy for determining the applications which included "
"in the PLT file, `top_level_deps` to include just the direct dependencies "
"or `all_deps` to include all nested dependencies*\n"
- "`plt_extra_apps` - a list of applications to include in the PLT file**\n"
+ "`plt_extra_apps` - a list of extra applications to include in the PLT "
+ "file\n"
+ "`plt_extra_mods` - a list of extra modules to includes in the PLT file\n"
"`plt_location` - the location of the PLT file, `local` to store in the "
"profile's base directory (default) or a custom directory.\n"
- "`plt_prefix` - the prefix to the PLT file, defaults to \"rebar3\"***\n"
+ "`plt_prefix` - the prefix to the PLT file, defaults to \"rebar3\"**\n"
"`base_plt_apps` - a list of applications to include in the base "
- "PLT file****\n"
+ "PLT file***\n"
+ "`base_plt_mods` - a list of modules to include in the base "
+ "PLT file***\n"
"`base_plt_location` - the location of base PLT file, `global` to store in "
- "$HOME/.cache/rebar3 (default) or a custom directory****\n"
+ "$HOME/.cache/rebar3 (default) or a custom directory***\n"
"`base_plt_prefix` - the prefix to the base PLT file, defaults to "
- "\"rebar3\"*** ****\n"
+ "\"rebar3\"** ***\n"
+ "`exclude_apps` - a list of applications to exclude from PLT files and "
+ "success typing analysis, `plt_extra_mods` and `base_plt_mods` can add "
+ "modules from excluded applications\n"
+ "`exclude_mods` - a list of modules to exclude from PLT files and "
+ "success typing analysis\n"
"\n"
"For example, to warn on unmatched returns: \n"
"{dialyzer, [{warnings, [unmatched_returns]}]}.\n"
"\n"
"*The direct dependent applications are listed in `applications` and "
"`included_applications` of their .app files.\n"
- "**The applications in `base_plt_apps` will be added to the "
- "list. \n"
- "***PLT files are named \"<prefix>_<otp_release>_plt\".\n"
- "****The base PLT is a PLT containing the core applications often required "
+ "**PLT files are named \"<prefix>_<otp_release>_plt\".\n"
+ "***The base PLT is a PLT containing the core applications often required "
"for a project's PLT. One base PLT is created per OTP version and "
"stored in `base_plt_location`. A base PLT is used to build project PLTs."
"\n".
@@ -78,7 +85,8 @@ short_desc() ->
do(State) ->
maybe_fix_env(),
?INFO("Dialyzer starting, this may take a while...", []),
- code:add_pathsa(rebar_state:code_paths(State, all_deps)),
+ rebar_paths:unset_paths([plugins], State), % no plugins in analysis
+ rebar_paths:set_paths([deps], State),
Plt = get_plt(State),
try
@@ -90,10 +98,14 @@ do(State) ->
?PRV_ERROR({dialyzer_warnings, Warnings});
throw:{unknown_application, _} = Error ->
?PRV_ERROR(Error);
+ throw:{unknown_module, _} = Error ->
+ ?PRV_ERROR(Error);
+ throw:{duplicate_module, _, _, _} = Error ->
+ ?PRV_ERROR(Error);
throw:{output_file_error, _, _} = Error ->
?PRV_ERROR(Error)
after
- rebar_utils:cleanup_code_path(rebar_state:code_paths(State, default))
+ rebar_paths:set_paths([plugins,deps], State)
end.
%% This is used to workaround dialyzer quirk discussed here
@@ -105,14 +117,18 @@ maybe_fix_env() ->
-spec format_error(any()) -> iolist().
format_error({error_processing_apps, Error}) ->
- io_lib:format("Error in dialyzing apps: ~s", [Error]);
+ io_lib:format("Error in dialyzing apps: ~ts", [Error]);
format_error({dialyzer_warnings, Warnings}) ->
- io_lib:format("Warnings occured running dialyzer: ~b", [Warnings]);
+ io_lib:format("Warnings occurred running dialyzer: ~b", [Warnings]);
format_error({unknown_application, App}) ->
- io_lib:format("Could not find application: ~s", [App]);
+ io_lib:format("Could not find application: ~ts", [App]);
+format_error({unknown_module, Mod}) ->
+ io_lib:format("Could not find module: ~ts", [Mod]);
+format_error({duplicate_module, Mod, File1, File2}) ->
+ io_lib:format("Duplicates of module ~ts: ~ts ~ts", [Mod, File1, File2]);
format_error({output_file_error, File, Error}) ->
Error1 = file:format_error(Error),
- io_lib:format("Failed to write to ~s: ~s", [File, Error1]);
+ io_lib:format("Failed to write to ~ts: ~ts", [File, Error1]);
format_error(Reason) ->
io_lib:format("~p", [Reason]).
@@ -140,7 +156,7 @@ do(State, Plt) ->
0 ->
{ok, State2};
TotalWarnings ->
- ?INFO("Warnings written to ~s", [Output]),
+ ?INFO("Warnings written to ~ts", [Output]),
throw({dialyzer_warnings, TotalWarnings})
end.
@@ -178,45 +194,45 @@ do_update_proj_plt(State, Plt, Output) ->
end.
proj_plt_files(State) ->
- BasePltApps = get_config(State, base_plt_apps, default_plt_apps()),
- PltApps = get_config(State, plt_extra_apps, []),
+ BasePltApps = base_plt_apps(State),
+ PltApps = get_config(State, plt_extra_apps, []) ++ BasePltApps,
+ BasePltMods = get_config(State, base_plt_mods, []),
+ PltMods = get_config(State, plt_extra_mods, []) ++ BasePltMods,
+ Apps = proj_apps(State),
+ DepApps = proj_deps(State),
+ get_files(State, DepApps ++ PltApps, Apps -- PltApps, PltMods, []).
+
+proj_apps(State) ->
+ [ec_cnv:to_atom(rebar_app_info:name(App)) ||
+ App <- rebar_state:project_apps(State)].
+
+proj_deps(State) ->
Apps = rebar_state:project_apps(State),
DepApps = lists:flatmap(fun rebar_app_info:applications/1, Apps),
- DepApps1 =
- case get_config(State, plt_apps, top_level_deps) of
- top_level_deps -> DepApps;
- all_deps -> collect_nested_dependent_apps(DepApps)
- end,
- get_plt_files(BasePltApps ++ PltApps ++ DepApps1, Apps).
-
-default_plt_apps() ->
- [erts,
- crypto,
- kernel,
- stdlib].
-
-get_plt_files(DepApps, Apps) ->
+ case get_config(State, plt_apps, top_level_deps) of
+ top_level_deps -> DepApps;
+ all_deps -> collect_nested_dependent_apps(DepApps)
+ end.
+
+get_files(State, Apps, SkipApps, Mods, SkipMods) ->
?INFO("Resolving files...", []),
- get_plt_files(DepApps, Apps, [], []).
+ ExcludeApps = get_config(State, exclude_apps, []),
+ Files = apps_files(Apps, ExcludeApps ++ SkipApps, dict:new()),
+ ExcludeMods = get_config(State, exclude_mods, []),
+ Files2 = mods_files(Mods, ExcludeMods ++ SkipMods, Files),
+ dict:fold(fun(_, File, Acc) -> [File | Acc] end, [], Files2).
-get_plt_files([], _, _, Files) ->
+apps_files([], _, Files) ->
Files;
-get_plt_files([AppName | DepApps], Apps, PltApps, Files) ->
- case lists:member(AppName, PltApps) orelse app_member(AppName, Apps) of
+apps_files([AppName | DepApps], SkipApps, Files) ->
+ case lists:member(AppName, SkipApps) of
true ->
- get_plt_files(DepApps, Apps, PltApps, Files);
+ apps_files(DepApps, SkipApps, Files);
false ->
- Files2 = app_files(AppName),
- ?DEBUG("~s files: ~p", [AppName, Files2]),
- get_plt_files(DepApps, Apps, [AppName | PltApps], Files2 ++ Files)
- end.
-
-app_member(AppName, Apps) ->
- case rebar_app_utils:find(ec_cnv:to_binary(AppName), Apps) of
- {ok, _App} ->
- true;
- error ->
- false
+ AppFiles = app_files(AppName),
+ ?DEBUG("~ts modules: ~p", [AppName, dict:fetch_keys(AppFiles)]),
+ Files2 = merge_files(Files, AppFiles),
+ apps_files(DepApps, [AppName | SkipApps], Files2)
end.
app_files(AppName) ->
@@ -244,9 +260,41 @@ check_ebin(EbinDir) ->
end.
ebin_files(EbinDir) ->
- Wildcard = "*" ++ code:objfile_extension(),
- [filename:join(EbinDir, File) ||
- File <- filelib:wildcard(Wildcard, EbinDir)].
+ Ext = code:objfile_extension(),
+ Wildcard = "*" ++ Ext,
+ Files = filelib:wildcard(Wildcard, EbinDir),
+ Store = fun(File, Mods) ->
+ Mod = list_to_atom(filename:basename(File, Ext)),
+ Absname = filename:join(EbinDir, File),
+ dict:store(Mod, Absname, Mods)
+ end,
+ lists:foldl(Store, dict:new(), Files).
+
+merge_files(Files1, Files2) ->
+ Duplicate = fun(Mod, File1, File2) ->
+ throw({duplicate_module, Mod, File1, File2})
+ end,
+ dict:merge(Duplicate, Files1, Files2).
+
+mods_files(Mods, SkipMods, Files) ->
+ Keep = fun(File) -> File end,
+ Ensure = fun(Mod, Acc) ->
+ case lists:member(Mod, SkipMods) of
+ true ->
+ Acc;
+ false ->
+ dict:update(Mod, Keep, mod_file(Mod), Acc)
+ end
+ end,
+ Files2 = lists:foldl(Ensure, Files, Mods),
+ lists:foldl(fun dict:erase/2, Files2, SkipMods).
+
+mod_file(Mod) ->
+ File = atom_to_list(Mod) ++ code:objfile_extension(),
+ case code:where_is_file(File) of
+ non_existing -> throw({unknown_module, Mod});
+ Absname -> Absname
+ end.
read_plt(_State, Plt) ->
Vsn = dialyzer_version(),
@@ -260,6 +308,8 @@ read_plt(_State, Plt) ->
Result;
{error, no_such_file} ->
error;
+ {error, not_valid} ->
+ error;
{error, read_error} ->
Error = io_lib:format("Could not read the PLT file ~p", [Plt]),
throw({dialyzer_error, Error})
@@ -353,9 +403,12 @@ get_base_plt(State) ->
end.
base_plt_files(State) ->
- BasePltApps = get_config(State, base_plt_apps, default_plt_apps()),
- Apps = rebar_state:project_apps(State),
- get_plt_files(BasePltApps, Apps).
+ BasePltApps = base_plt_apps(State),
+ BasePltMods = get_config(State, base_plt_mods, []),
+ get_files(State, BasePltApps, [], BasePltMods, []).
+
+base_plt_apps(State) ->
+ get_config(State, base_plt_apps, [erts, crypto, kernel, stdlib]).
update_base_plt(State, BasePlt, Output, BaseFiles) ->
case read_plt(State, BasePlt) of
@@ -392,9 +445,8 @@ succ_typings(State, Plt, Output) ->
false ->
{0, State};
_ ->
- Apps = rebar_state:project_apps(State),
?INFO("Doing success typing analysis...", []),
- Files = apps_to_files(Apps),
+ Files = proj_files(State),
succ_typings(State, Plt, Output, Files)
end.
@@ -410,14 +462,13 @@ succ_typings(State, Plt, Output, Files) ->
{init_plt, Plt}],
run_dialyzer(State, Opts, Output).
-apps_to_files(Apps) ->
- ?INFO("Resolving files...", []),
- [File || App <- Apps,
- File <- app_to_files(App)].
-
-app_to_files(App) ->
- AppName = ec_cnv:to_atom(rebar_app_info:name(App)),
- app_files(AppName).
+proj_files(State) ->
+ Apps = proj_apps(State),
+ BasePltApps = get_config(State, base_plt_apps, []),
+ PltApps = get_config(State, plt_extra_apps, []) ++ BasePltApps,
+ BasePltMods = get_config(State, base_plt_mods, []),
+ PltMods = get_config(State, plt_extra_mods, []) ++ BasePltMods,
+ get_files(State, Apps, PltApps, [], PltMods).
run_dialyzer(State, Opts, Output) ->
%% dialyzer may return callgraph warnings when get_warnings is false
@@ -428,7 +479,8 @@ run_dialyzer(State, Opts, Output) ->
{check_plt, false} |
Opts],
?DEBUG("Running dialyzer with options: ~p~n", [Opts2]),
- Warnings = format_warnings(Output, dialyzer:run(Opts2)),
+ Warnings = format_warnings(rebar_state:opts(State),
+ Output, dialyzer:run(Opts2)),
{Warnings, State};
false ->
Opts2 = [{warnings, no_warnings()},
@@ -447,14 +499,14 @@ legacy_warnings(Warnings) ->
Warnings
end.
-format_warnings(Output, Warnings) ->
- Warnings1 = rebar_dialyzer_format:format_warnings(Warnings),
+format_warnings(Opts, Output, Warnings) ->
+ Warnings1 = rebar_dialyzer_format:format_warnings(Opts, Warnings),
console_warnings(Warnings1),
file_warnings(Output, Warnings),
length(Warnings).
console_warnings(Warnings) ->
- _ = [?CONSOLE("~s", [Warning]) || Warning <- Warnings],
+ _ = [?CONSOLE("~ts", [Warning]) || Warning <- Warnings],
ok.
file_warnings(_, []) ->
@@ -514,7 +566,7 @@ collect_nested_dependent_apps(App, Seen) ->
dialyzer_version() ->
_ = application:load(dialyzer),
{ok, Vsn} = application:get_key(dialyzer, vsn),
- case string:tokens(Vsn, ".") of
+ case rebar_string:lexemes(Vsn, ".") of
[Major, Minor] ->
version_tuple(Major, Minor, "0");
[Major, Minor, Patch | _] ->
diff --git a/src/rebar_prv_do.erl b/src/rebar_prv_do.erl
index f850135..5f7aa12 100644
--- a/src/rebar_prv_do.erl
+++ b/src/rebar_prv_do.erl
@@ -44,13 +44,31 @@ do(State) ->
do_tasks(Tasks, State)
end.
+-spec do_tasks(list(Task), State) -> Res when
+ Task :: {string(), string()} |
+ {string(), atom()} |
+ {atom(), atom(), string()},
+ State :: rebar_state:t(),
+ Res :: {ok, rebar_state:t()} |
+ {error, term()}.
do_tasks([], State) ->
{ok, State};
-do_tasks([{TaskStr, Args}|Tail], State) ->
+do_tasks([{TaskStr, Args} | Tail], State) when is_list(Args) ->
Task = list_to_atom(TaskStr),
State1 = rebar_state:set(State, task, Task),
State2 = rebar_state:command_args(State1, Args),
Namespace = rebar_state:namespace(State2),
+ do_task(TaskStr, Args, Tail, State, Namespace);
+do_tasks([{Namespace, Task} | Tail], State) ->
+ do_task(atom_to_list(Task), [], Tail, State, Namespace);
+do_tasks([{Namespace, Task, Args} | Tail], State)
+ when is_atom(Namespace), is_atom(Task) ->
+ do_task(atom_to_list(Task), Args, Tail, State, Namespace).
+
+do_task(TaskStr, Args, Tail, State, Namespace) ->
+ Task = list_to_atom(TaskStr),
+ State1 = rebar_state:set(State, task, Task),
+ State2 = rebar_state:command_args(State1, Args),
case Namespace of
default ->
%% The first task we hit might be a namespace!
@@ -65,7 +83,8 @@ do_tasks([{TaskStr, Args}|Tail], State) ->
_ ->
%% We're already in a non-default namespace, check the
%% task directly.
- case rebar_core:process_command(State2, Task) of
+ State3 = rebar_state:namespace(State2, Namespace),
+ case rebar_core:process_command(State3, Task) of
{ok, FinalState} when Tail =:= [] ->
{ok, FinalState};
{ok, _} ->
@@ -75,7 +94,6 @@ do_tasks([{TaskStr, Args}|Tail], State) ->
end
end.
-
-spec format_error(any()) -> iolist().
format_error(Reason) ->
io_lib:format("~p", [Reason]).
diff --git a/src/rebar_prv_edoc.erl b/src/rebar_prv_edoc.erl
index 6cefe14..c78296a 100644
--- a/src/rebar_prv_edoc.erl
+++ b/src/rebar_prv_edoc.erl
@@ -7,6 +7,7 @@
format_error/1]).
-include("rebar.hrl").
+-include_lib("providers/include/providers.hrl").
-define(PROVIDER, edoc).
-define(DEPS, [compile]).
@@ -28,30 +29,64 @@ init(State) ->
{profiles, [docs]}])),
{ok, State1}.
--spec do(rebar_state:t()) -> {ok, rebar_state:t()} | {error, string()}.
+-spec do(rebar_state:t()) ->
+ {ok, rebar_state:t()} | {error, string()} | {error, {module(), any()}}.
do(State) ->
- code:add_pathsa(rebar_state:code_paths(State, all_deps)),
+ rebar_paths:set_paths([deps, plugins], State),
ProjectApps = rebar_state:project_apps(State),
Providers = rebar_state:providers(State),
- EDocOpts = rebar_state:get(State, edoc_opts, []),
+ EdocOpts = rebar_state:get(State, edoc_opts, []),
+ ShouldAccPaths = not has_configured_paths(EdocOpts),
Cwd = rebar_state:dir(State),
rebar_hooks:run_all_hooks(Cwd, pre, ?PROVIDER, Providers, State),
- lists:foreach(fun(AppInfo) ->
- rebar_hooks:run_all_hooks(Cwd, pre, ?PROVIDER, Providers, AppInfo, State),
- AppName = ec_cnv:to_list(rebar_app_info:name(AppInfo)),
- ?INFO("Running edoc for ~s", [AppName]),
- AppDir = rebar_app_info:dir(AppInfo),
- ok = edoc:application(list_to_atom(AppName), AppDir, EDocOpts),
- rebar_hooks:run_all_hooks(Cwd, post, ?PROVIDER, Providers, AppInfo, State)
- end, ProjectApps),
+ Res = try
+ lists:foldl(fun(AppInfo, EdocOptsAcc) ->
+ rebar_hooks:run_all_hooks(Cwd, pre, ?PROVIDER, Providers, AppInfo, State),
+ AppName = rebar_utils:to_list(rebar_app_info:name(AppInfo)),
+ ?INFO("Running edoc for ~ts", [AppName]),
+ AppDir = rebar_app_info:dir(AppInfo),
+ AppRes = (catch edoc:application(list_to_atom(AppName), AppDir, EdocOptsAcc)),
+ rebar_hooks:run_all_hooks(Cwd, post, ?PROVIDER, Providers, AppInfo, State),
+ case {AppRes, ShouldAccPaths} of
+ {ok, true} ->
+ %% edoc wants / on all OSes
+ add_to_paths(EdocOptsAcc, AppDir++"/doc");
+ {ok, false} ->
+ EdocOptsAcc;
+ {{'EXIT', error}, _} ->
+ %% EDoc is not very descriptive
+ %% in terms of failures
+ throw({app_failed, AppName})
+ end
+ end, EdocOpts, ProjectApps)
+ catch
+ {app_failed, AppName} ->
+ {app_failed, AppName}
+ end,
rebar_hooks:run_all_hooks(Cwd, post, ?PROVIDER, Providers, State),
- rebar_utils:cleanup_code_path(rebar_state:code_paths(State, default)),
- {ok, State}.
+ rebar_paths:set_paths([plugins, deps], State),
+ case Res of
+ {app_failed, App} ->
+ ?PRV_ERROR({app_failed, App});
+ _ ->
+ {ok, State}
+ end.
-spec format_error(any()) -> iolist().
+format_error({app_failed, AppName}) ->
+ io_lib:format("Failed to generate documentation for app '~ts'", [AppName]);
format_error(Reason) ->
io_lib:format("~p", [Reason]).
%% ===================================================================
%% Internal functions
%% ===================================================================
+has_configured_paths(EdocOpts) ->
+ proplists:get_value(dir, EdocOpts) =/= undefined.
+
+add_to_paths([], Path) ->
+ [{doc_path, [Path]}];
+add_to_paths([{doc_path, Paths}|T], Path) ->
+ [{doc_path, [Path | Paths]} | T];
+add_to_paths([H|T], Path) ->
+ [H | add_to_paths(T, Path)].
diff --git a/src/rebar_prv_escriptize.erl b/src/rebar_prv_escriptize.erl
index d8704f6..fceb65e 100644
--- a/src/rebar_prv_escriptize.erl
+++ b/src/rebar_prv_escriptize.erl
@@ -61,8 +61,11 @@ desc() ->
"the project's and its dependencies' BEAM files.".
do(State) ->
+ Providers = rebar_state:providers(State),
+ Cwd = rebar_state:dir(State),
+ rebar_hooks:run_project_and_app_hooks(Cwd, pre, ?PROVIDER, Providers, State),
?INFO("Building escript...", []),
- case rebar_state:get(State, escript_main_app, undefined) of
+ Res = case rebar_state:get(State, escript_main_app, undefined) of
undefined ->
case rebar_state:project_apps(State) of
[App] ->
@@ -72,18 +75,24 @@ do(State) ->
end;
Name ->
AllApps = rebar_state:all_deps(State)++rebar_state:project_apps(State),
- {ok, AppInfo} = rebar_app_utils:find(ec_cnv:to_binary(Name), AllApps),
- escriptize(State, AppInfo)
- end.
+ case rebar_app_utils:find(rebar_utils:to_binary(Name), AllApps) of
+ {ok, AppInfo} ->
+ escriptize(State, AppInfo);
+ _ ->
+ ?PRV_ERROR({bad_name, Name})
+ end
+ end,
+ rebar_hooks:run_project_and_app_hooks(Cwd, post, ?PROVIDER, Providers, State),
+ Res.
escriptize(State0, App) ->
AppName = rebar_app_info:name(App),
- AppNameStr = ec_cnv:to_list(AppName),
+ AppNameStr = rebar_utils:to_list(AppName),
%% Get the output filename for the escript -- this may include dirs
Filename = filename:join([rebar_dir:base_dir(State0), "bin",
rebar_state:get(State0, escript_name, AppName)]),
- ?DEBUG("Creating escript file ~s", [Filename]),
+ ?DEBUG("Creating escript file ~ts", [Filename]),
ok = filelib:ensure_dir(Filename),
State = rebar_state:escript_path(State0, Filename),
@@ -105,9 +114,9 @@ escriptize(State0, App) ->
EbinFiles = usort(load_files(EbinPrefix, "*", "ebin")),
ExtraFiles = usort(InclBeams ++ InclExtra),
- Files = get_nonempty(EbinFiles ++ ExtraFiles),
+ Files = get_nonempty(EbinFiles ++ (ExtraFiles -- EbinFiles)), % drop dupes
- DefaultEmuArgs = ?FMT("%%! -escript main ~s -pz ~s/~s/ebin\n",
+ DefaultEmuArgs = ?FMT("%%! -escript main ~ts -pz ~ts/~ts/ebin\n",
[AppNameStr, AppNameStr, AppNameStr]),
EscriptSections =
[ {shebang,
@@ -121,9 +130,15 @@ escriptize(State0, App) ->
throw(?PRV_ERROR({escript_creation_failed, AppName, EscriptError}))
end,
- %% Finally, update executable perms for our script
- {ok, #file_info{mode = Mode}} = file:read_file_info(Filename),
- ok = file:change_mode(Filename, Mode bor 8#00111),
+ %% Finally, update executable perms for our script on *nix or write out
+ %% script files on win32
+ case os:type() of
+ {unix, _} ->
+ {ok, #file_info{mode = Mode}} = file:read_file_info(Filename),
+ ok = file:change_mode(Filename, Mode bor 8#00111);
+ {win32, _} ->
+ write_windows_script(Filename)
+ end,
{ok, State}.
-spec format_error(any()) -> iolist().
@@ -148,7 +163,7 @@ get_apps_beams(Apps, AllApps) ->
get_apps_beams([], _, Acc) ->
Acc;
get_apps_beams([App | Rest], AllApps, Acc) ->
- case rebar_app_utils:find(ec_cnv:to_binary(App), AllApps) of
+ case rebar_app_utils:find(rebar_utils:to_binary(App), AllApps) of
{ok, App1} ->
OutDir = filename:absname(rebar_app_info:ebin_dir(App1)),
Beams = get_app_beams(App, OutDir),
@@ -179,7 +194,8 @@ load_files(Wildcard, Dir) ->
load_files(Prefix, Wildcard, Dir) ->
[read_file(Prefix, Filename, Dir)
- || Filename <- filelib:wildcard(Wildcard, Dir)].
+ || Filename <- filelib:wildcard(Wildcard, Dir),
+ not filelib:is_dir(filename:join(Dir, Filename))].
read_file(Prefix, Filename, Dir) ->
Filename1 = case Prefix of
@@ -220,7 +236,7 @@ get_nonempty(Files) ->
[{FName,FBin} || {FName,FBin} <- Files, FBin =/= <<>>].
find_deps(AppNames, AllApps) ->
- BinAppNames = [ec_cnv:to_binary(Name) || Name <- AppNames],
+ BinAppNames = [rebar_utils:to_binary(Name) || Name <- AppNames],
[ec_cnv:to_atom(Name) ||
Name <- find_deps_of_deps(BinAppNames, AllApps, BinAppNames)].
@@ -230,9 +246,11 @@ find_deps_of_deps([Name|Names], Apps, Acc) ->
?DEBUG("processing ~p", [Name]),
{ok, App} = rebar_app_utils:find(Name, Apps),
DepNames = proplists:get_value(applications, rebar_app_info:app_details(App), []),
- BinDepNames = [ec_cnv:to_binary(Dep) || Dep <- DepNames,
+ BinDepNames = [rebar_utils:to_binary(Dep) || Dep <- DepNames,
%% ignore system libs; shouldn't include them.
- not lists:prefix(code:root_dir(), code:lib_dir(Dep))]
+ DepDir <- [code:lib_dir(Dep)],
+ DepDir =:= {error, bad_name} orelse % those are all local
+ not lists:prefix(code:root_dir(), DepDir)]
-- ([Name|Names]++Acc), % avoid already seen deps
?DEBUG("new deps of ~p found to be ~p", [Name, BinDepNames]),
find_deps_of_deps(BinDepNames ++ Names, Apps, BinDepNames ++ Acc).
@@ -247,3 +265,12 @@ def(Rm, State, Key, Default) ->
rm_newline(String) ->
[C || C <- String, C =/= $\n].
+
+write_windows_script(Target) ->
+ CmdPath = unicode:characters_to_list(Target) ++ ".cmd",
+ CmdScript=
+ "@echo off\r\n"
+ "setlocal\r\n"
+ "set rebarscript=%~f0\r\n"
+ "escript.exe \"%rebarscript:.cmd=%\" %*\r\n",
+ ok = file:write_file(CmdPath, CmdScript).
diff --git a/src/rebar_prv_eunit.erl b/src/rebar_prv_eunit.erl
index 942fd10..f120926 100644
--- a/src/rebar_prv_eunit.erl
+++ b/src/rebar_prv_eunit.erl
@@ -18,6 +18,8 @@
%% we need to modify app_info state before compile
-define(DEPS, [lock]).
+-define(DEFAULT_TEST_REGEX, "^(?!\\._).*\\.erl\$").
+
%% ===================================================================
%% Public API
%% ===================================================================
@@ -52,7 +54,7 @@ do(State, Tests) ->
?INFO("Performing EUnit tests...", []),
setup_name(State),
- rebar_utils:update_code(rebar_state:code_paths(State, all_deps), [soft_purge]),
+ rebar_paths:set_paths([deps, plugins], State),
%% Run eunit provider prehooks
Providers = rebar_state:providers(State),
@@ -65,14 +67,14 @@ do(State, Tests) ->
{ok, State1} ->
%% Run eunit provider posthooks
rebar_hooks:run_project_and_app_hooks(Cwd, post, ?PROVIDER, Providers, State1),
- rebar_utils:cleanup_code_path(rebar_state:code_paths(State, default)),
+ rebar_paths:set_paths([plugins, deps], State),
{ok, State1};
Error ->
- rebar_utils:cleanup_code_path(rebar_state:code_paths(State, default)),
+ rebar_paths:set_paths([plugins, deps], State),
Error
end;
Error ->
- rebar_utils:cleanup_code_path(rebar_state:code_paths(State, default)),
+ rebar_paths:set_paths([plugins, deps], State),
Error
end.
@@ -81,13 +83,16 @@ run_tests(State, Tests) ->
EUnitOpts = resolve_eunit_opts(State),
?DEBUG("eunit_tests ~p", [T]),
?DEBUG("eunit_opts ~p", [EUnitOpts]),
- Result = eunit:test(T, EUnitOpts),
- ok = maybe_write_coverdata(State),
- case handle_results(Result) of
- {error, Reason} ->
- ?PRV_ERROR(Reason);
- ok ->
- {ok, State}
+ try eunit:test(T, EUnitOpts) of
+ Result ->
+ ok = maybe_write_coverdata(State),
+ case handle_results(Result) of
+ {error, Reason} ->
+ ?PRV_ERROR(Reason);
+ ok ->
+ {ok, State}
+ end
+ catch error:badarg -> ?PRV_ERROR({error, badarg})
end.
-spec format_error(any()) -> iolist().
@@ -136,7 +141,8 @@ resolve(Flag, RawOpts) -> resolve(Flag, Flag, RawOpts).
resolve(Flag, EUnitKey, RawOpts) ->
case proplists:get_value(Flag, RawOpts) of
undefined -> [];
- Args -> lists:map(fun(Arg) -> normalize(EUnitKey, Arg) end, string:tokens(Args, [$,]))
+ Args -> lists:map(fun(Arg) -> normalize(EUnitKey, Arg) end,
+ rebar_string:lexemes(Args, [$,]))
end.
normalize(Key, Value) when Key == dir; Key == file -> {Key, Value};
@@ -151,7 +157,6 @@ cfg_tests(State) ->
?PRV_ERROR({badconfig, {"Value `~p' of option `~p' must be a list", {Wrong, eunit_tests}}})
end.
-select_tests(_State, _ProjectApps, {error, _} = Error, _) -> Error;
select_tests(_State, _ProjectApps, _, {error, _} = Error) -> Error;
select_tests(State, ProjectApps, [], []) -> {ok, default_tests(State, ProjectApps)};
select_tests(_State, _ProjectApps, [], Tests) -> {ok, Tests};
@@ -174,34 +179,38 @@ set_apps([App|Rest], Acc) ->
set_modules(Apps, State) -> set_modules(Apps, State, {[], []}).
set_modules([], State, {AppAcc, TestAcc}) ->
- TestSrc = gather_src([filename:join([rebar_state:dir(State), "test"])]),
+ Regex = rebar_state:get(State, eunit_test_regex, ?DEFAULT_TEST_REGEX),
+ BareTestDir = [filename:join([rebar_state:dir(State), "test"])],
+ TestSrc = gather_src(BareTestDir, Regex),
dedupe_tests({AppAcc, TestAcc ++ TestSrc});
set_modules([App|Rest], State, {AppAcc, TestAcc}) ->
F = fun(Dir) -> filename:join([rebar_app_info:dir(App), Dir]) end,
AppDirs = lists:map(F, rebar_dir:src_dirs(rebar_app_info:opts(App), ["src"])),
- AppSrc = gather_src(AppDirs),
+ Regex = rebar_state:get(State, eunit_test_regex, ?DEFAULT_TEST_REGEX),
+ AppSrc = gather_src(AppDirs, Regex),
TestDirs = [filename:join([rebar_app_info:dir(App), "test"])],
- TestSrc = gather_src(TestDirs),
+ TestSrc = gather_src(TestDirs, Regex),
set_modules(Rest, State, {AppSrc ++ AppAcc, TestSrc ++ TestAcc}).
-gather_src(Dirs) -> gather_src(Dirs, []).
+gather_src(Dirs, Regex) -> gather_src(Dirs, Regex, []).
-gather_src([], Srcs) -> Srcs;
-gather_src([Dir|Rest], Srcs) ->
- gather_src(Rest, Srcs ++ rebar_utils:find_files(Dir, "^[^._].*\\.erl\$", true)).
+gather_src([], _Regex, Srcs) -> Srcs;
+gather_src([Dir|Rest], Regex, Srcs) ->
+ gather_src(Rest, Regex, Srcs ++ rebar_utils:find_files(Dir, Regex, true)).
dedupe_tests({AppMods, TestMods}) ->
+ UniqueTestMods = lists:usort(TestMods) -- AppMods,
%% for each modules in TestMods create a test if there is not a module
%% in AppMods that will trigger it
- F = fun(Mod) ->
- M = filename:basename(Mod, ".erl"),
- MatchesTest = fun(Dir) -> filename:basename(Dir, ".erl") ++ "_tests" == M end,
+ F = fun(TestMod) ->
+ M = filename:rootname(filename:basename(TestMod)),
+ MatchesTest = fun(AppMod) -> filename:rootname(filename:basename(AppMod)) ++ "_tests" == M end,
case lists:any(MatchesTest, AppMods) of
false -> {true, {module, list_to_atom(M)}};
true -> false
end
end,
- lists:usort(rebar_utils:filtermap(F, TestMods)).
+ rebar_utils:filtermap(F, UniqueTestMods).
inject_eunit_state(State, {ok, Tests}) ->
Apps = rebar_state:project_apps(State),
@@ -306,19 +315,14 @@ maybe_inject_test_dir(State, AppAcc, [], Dir) ->
inject_test_dir(Opts, Dir) ->
%% append specified test targets to app defined `extra_src_dirs`
- ExtraSrcDirs = rebar_dir:extra_src_dirs(Opts),
+ ExtraSrcDirs = rebar_opts:get(Opts, extra_src_dirs, []),
rebar_opts:set(Opts, extra_src_dirs, ExtraSrcDirs ++ [Dir]).
compile({error, _} = Error) -> Error;
compile(State) ->
- case rebar_prv_compile:do(State) of
- %% successfully compiled apps
- {ok, S} ->
- ok = maybe_cover_compile(S),
- {ok, S};
- %% this should look like a compiler error, not an eunit error
- Error -> Error
- end.
+ {ok, S} = rebar_prv_compile:do(State),
+ ok = maybe_cover_compile(S),
+ {ok, S}.
validate_tests(State, {ok, Tests}) ->
gather_tests(fun(Elem) -> validate(State, Elem) end, Tests, []);
@@ -448,7 +452,7 @@ translate(State, [], {dir, Dir}) ->
translate(State, [], {file, FilePath}) ->
Dir = filename:dirname(FilePath),
File = filename:basename(FilePath),
- case rebar_file_utils:path_from_ancestor(Dir, rebar_app_info:dir(State)) of
+ case rebar_file_utils:path_from_ancestor(Dir, rebar_state:dir(State)) of
{ok, Path} -> {file, filename:join([rebar_dir:base_dir(State), "extras", Path, File])};
%% not relative, leave as is
{error, badparent} -> {file, FilePath}
@@ -468,7 +472,8 @@ maybe_write_coverdata(State) ->
true -> rebar_state:set(State, cover_enabled, true);
false -> State
end,
- rebar_prv_cover:maybe_write_coverdata(State1, ?PROVIDER).
+ Name = proplists:get_value(cover_export_name, RawOpts, ?PROVIDER),
+ rebar_prv_cover:maybe_write_coverdata(State1, Name).
handle_results(ok) -> ok;
handle_results(error) ->
@@ -480,6 +485,7 @@ eunit_opts(_State) ->
[{app, undefined, "app", string, help(app)},
{application, undefined, "application", string, help(app)},
{cover, $c, "cover", boolean, help(cover)},
+ {cover_export_name, undefined, "cover_export_name", string, help(cover_export_name)},
{dir, $d, "dir", string, help(dir)},
{file, $f, "file", string, help(file)},
{module, $m, "module", string, help(module)},
@@ -491,6 +497,7 @@ eunit_opts(_State) ->
help(app) -> "Comma separated list of application test suites to run. Equivalent to `[{application, App}]`.";
help(cover) -> "Generate cover data. Defaults to false.";
+help(cover_export_name) -> "Base name of the coverdata file to write";
help(dir) -> "Comma separated list of dirs to load tests from. Equivalent to `[{dir, Dir}]`.";
help(file) -> "Comma separated list of files to load tests from. Equivalent to `[{file, File}]`.";
help(module) -> "Comma separated list of modules to load tests from. Equivalent to `[{module, Module}]`.";
diff --git a/src/rebar_prv_get_deps.erl b/src/rebar_prv_get_deps.erl
new file mode 100644
index 0000000..020e50b
--- /dev/null
+++ b/src/rebar_prv_get_deps.erl
@@ -0,0 +1,37 @@
+%% -*- erlang-indent-level: 4;indent-tabs-mode: nil -*-
+%% ex: ts=4 sw=4 et
+
+-module(rebar_prv_get_deps).
+
+-behaviour(provider).
+
+-export([init/1,
+ do/1,
+ format_error/1]).
+
+-define(PROVIDER, 'get-deps').
+-define(DEPS, [lock]).
+
+%% ===================================================================
+%% Public API
+%% ===================================================================
+
+-spec init(rebar_state:t()) -> {ok, rebar_state:t()}.
+init(State) ->
+ Provider = providers:create([{name, ?PROVIDER},
+ {module, ?MODULE},
+ {deps, ?DEPS},
+ {bare, true},
+ {example, "rebar3 get-deps"},
+ {short_desc, "Fetch dependencies."},
+ {desc, "Fetch project dependencies."},
+ {opts, []},
+ {profiles, []}]),
+ {ok, rebar_state:add_provider(State, Provider)}.
+
+-spec do(rebar_state:t()) -> {ok, rebar_state:t()}.
+do(State) -> {ok, State}.
+
+-spec format_error(any()) -> iolist().
+format_error(Reason) ->
+ io_lib:format("~p", [Reason]). \ No newline at end of file
diff --git a/src/rebar_prv_help.erl b/src/rebar_prv_help.erl
index c028264..f34c755 100644
--- a/src/rebar_prv_help.erl
+++ b/src/rebar_prv_help.erl
@@ -41,7 +41,10 @@ do(State) ->
[Name] -> % default namespace
task_help(default, list_to_atom(Name), State);
[Namespace, Name] ->
- task_help(list_to_atom(Namespace), list_to_atom(Name), State)
+ task_help(list_to_atom(Namespace), list_to_atom(Name), State);
+ _ ->
+ {error, "Too many arguments given. " ++
+ "Usage: rebar3 help [<namespace>] <task>"}
end.
-spec format_error(any()) -> iolist().
@@ -54,7 +57,7 @@ format_error(Reason) ->
help(State) ->
?CONSOLE("Rebar3 is a tool for working with Erlang projects.~n~n", []),
OptSpecList = rebar3:global_option_spec_list(),
- getopt:usage(OptSpecList, "rebar", "", []),
+ getopt:usage(OptSpecList, "rebar3", "", []),
?CONSOLE("~nSeveral tasks are available:~n", []),
providers:help(rebar_state:providers(State)),
diff --git a/src/rebar_prv_install_deps.erl b/src/rebar_prv_install_deps.erl
index a8a7ea0..068c4c8 100644
--- a/src/rebar_prv_install_deps.erl
+++ b/src/rebar_prv_install_deps.erl
@@ -101,39 +101,48 @@ do_(State) ->
{error, Reason}
end.
+%% @doc convert a given exception's payload into an io description.
-spec format_error(any()) -> iolist().
format_error({dep_app_not_found, AppDir, AppName}) ->
- io_lib:format("Dependency failure: Application ~s not found at the top level of directory ~s", [AppName, AppDir]);
+ io_lib:format("Dependency failure: Application ~ts not found at the top level of directory ~ts", [AppName, AppDir]);
format_error({load_registry_fail, Dep}) ->
- io_lib:format("Error loading registry to resolve version of ~s. Try fixing by running 'rebar3 update'", [Dep]);
+ io_lib:format("Error loading registry to resolve version of ~ts. Try fixing by running 'rebar3 update'", [Dep]);
format_error({bad_constraint, Name, Constraint}) ->
- io_lib:format("Unable to parse version for package ~s: ~s", [Name, Constraint]);
+ io_lib:format("Unable to parse version for package ~ts: ~ts", [Name, Constraint]);
format_error({parse_dep, Dep}) ->
io_lib:format("Failed parsing dep ~p", [Dep]);
format_error({not_rebar_package, Package, Version}) ->
- io_lib:format("Package not buildable with rebar3: ~s-~s", [Package, Version]);
+ io_lib:format("Package not buildable with rebar3: ~ts-~ts", [Package, Version]);
format_error({missing_package, Package, Version}) ->
- io_lib:format("Package not found in registry: ~s-~s", [Package, Version]);
+ io_lib:format("Package not found in registry: ~ts-~ts", [Package, Version]);
format_error({missing_package, Package}) ->
- io_lib:format("Package not found in registry: ~s", [Package]);
+ io_lib:format("Package not found in registry: ~ts", [Package]);
format_error({cycles, Cycles}) ->
Prints = [["applications: ",
- [io_lib:format("~s ", [Dep]) || Dep <- Cycle],
- "depend on each other~n"]
+ [io_lib:format("~ts ", [Dep]) || Dep <- Cycle],
+ "depend on each other\n"]
|| Cycle <- Cycles],
- ["Dependency cycle(s) detected:~n", Prints];
+ ["Dependency cycle(s) detected:\n", Prints];
format_error(Reason) ->
io_lib:format("~p", [Reason]).
-%% Allows other providers to install deps in a given profile
+%% @doc Allows other providers to install deps in a given profile
%% manually, outside of what is provided by rebar3's deps tuple.
+-spec handle_deps_as_profile(Profile, State, Deps, Upgrade) -> {Apps, State} when
+ Profile :: atom(),
+ State :: rebar_state:t(),
+ Deps :: [tuple() | atom() | binary()], % TODO: meta to source() | lock()
+ Upgrade :: boolean(),
+ Apps :: [rebar_app_info:t()].
handle_deps_as_profile(Profile, State, Deps, Upgrade) ->
Locks = [],
Level = 0,
DepsDir = profile_dep_dir(State, Profile),
Deps1 = rebar_app_utils:parse_deps(DepsDir, Deps, State, Locks, Level),
ProfileLevelDeps = [{Profile, Deps1, Level}],
- handle_profile_level(ProfileLevelDeps, [], sets:new(), Upgrade, Locks, State).
+ RootSeen = sets:from_list([rebar_app_info:name(AppInfo)
+ || AppInfo <- rebar_state:project_apps(State)]),
+ handle_profile_level(ProfileLevelDeps, [], RootSeen, RootSeen, Upgrade, Locks, State).
%% ===================================================================
%% Internal functions
@@ -146,7 +155,9 @@ deps_per_profile(Profiles, Upgrade, State) ->
Deps = lists:foldl(fun(Profile, DepAcc) ->
[parsed_profile_deps(State, Profile, Level) | DepAcc]
end, [], Profiles),
- handle_profile_level(Deps, [], sets:new(), Upgrade, Locks, State).
+ RootSeen = sets:from_list([rebar_app_info:name(AppInfo)
+ || AppInfo <- rebar_state:project_apps(State)]),
+ handle_profile_level(Deps, [], RootSeen, RootSeen, Upgrade, Locks, State).
parsed_profile_deps(State, Profile, Level) ->
ParsedDeps = rebar_state:get(State, {parsed_deps, Profile}, []),
@@ -155,17 +166,27 @@ parsed_profile_deps(State, Profile, Level) ->
%% Level-order traversal of all dependencies, across profiles.
%% If profiles x,y,z are present, then the traversal will go:
%% x0, y0, z0, x1, y1, z1, ..., xN, yN, zN.
-handle_profile_level([], Apps, _Seen, _Upgrade, _Locks, State) ->
+%%
+%% There are two 'seen' sets: one for the top-level apps (`RootSeen') and
+%% one for all dependencies (`Seen'). The former is used to know when
+%% to skip the resolving of dependencies altogether (since they're already
+%% top-level apps), while the latter is used to prevent reprocessing
+%% deps more than one.
+handle_profile_level([], Apps, _RootSeen, _Seen, _Upgrade, _Locks, State) ->
{Apps, State};
-handle_profile_level([{Profile, Deps, Level} | Rest], Apps, Seen, Upgrade, Locks, State) ->
+handle_profile_level([{Profile, Deps, Level} | Rest], Apps, RootSeen, Seen, Upgrade, Locks, State) ->
+ Deps0 = [rebar_app_utils:expand_deps_sources(Dep, State)
+ || Dep <- Deps,
+ %% skip top-level apps being double-declared
+ not sets:is_element(rebar_app_info:name(Dep), RootSeen)],
{Deps1, Apps1, State1, Seen1} =
- update_deps(Profile, Level, Deps, Apps
+ update_deps(Profile, Level, Deps0, Apps
,State, Upgrade, Seen, Locks),
Deps2 = case Deps1 of
[] -> Rest;
_ -> Rest ++ [{Profile, Deps1, Level+1}]
end,
- handle_profile_level(Deps2, Apps1, sets:union(Seen, Seen1), Upgrade, Locks, State1).
+ handle_profile_level(Deps2, Apps1, RootSeen, sets:union(Seen, Seen1), Upgrade, Locks, State1).
find_cycles(Apps) ->
case rebar_digraph:compile_order(Apps) of
@@ -238,9 +259,21 @@ update_seen_dep(AppInfo, _Profile, _Level, Deps, Apps, State, Upgrade, Seen, Loc
%% If seen from lock file or user requested an upgrade
%% don't print warning about skipping
case lists:keymember(Name, 1, Locks) of
- false when Upgrade -> ok;
- false when not Upgrade -> warn_skip_deps(AppInfo, State);
- true -> ok
+ false when Upgrade ->
+ ok;
+ false when not Upgrade ->
+ {ok, SeenApp} = rebar_app_utils:find(Name, Apps),
+ Source = rebar_app_info:source(AppInfo),
+ case rebar_app_info:source(SeenApp) of
+ Source ->
+ %% dep is the same version and checksum as the one we already saw.
+ %% meaning there is no conflict, so don't warn about it.
+ skip;
+ _ ->
+ warn_skip_deps(Name, Source, State)
+ end;
+ true ->
+ ok
end,
{Deps, Apps, State, Seen}.
@@ -256,10 +289,8 @@ update_unseen_dep(AppInfo, Profile, Level, Deps, Apps, State, Upgrade, Seen, Loc
-spec handle_dep(rebar_state:t(), atom(), file:filename_all(), rebar_app_info:t(), list(), integer()) -> {rebar_app_info:t(), [rebar_app_info:t()], rebar_state:t()}.
handle_dep(State, Profile, DepsDir, AppInfo, Locks, Level) ->
Name = rebar_app_info:name(AppInfo),
- C = rebar_config:consult(rebar_app_info:dir(AppInfo)),
- AppInfo0 = rebar_app_info:update_opts(AppInfo, rebar_app_info:opts(AppInfo), C),
- AppInfo1 = rebar_app_info:apply_overrides(rebar_app_info:get(AppInfo, overrides, []), AppInfo0),
+ AppInfo1 = rebar_app_info:apply_overrides(rebar_app_info:get(AppInfo, overrides, []), AppInfo),
AppInfo2 = rebar_app_info:apply_profiles(AppInfo1, [default, prod]),
Plugins = rebar_app_info:get(AppInfo2, plugins, []),
@@ -276,34 +307,33 @@ handle_dep(State, Profile, DepsDir, AppInfo, Locks, Level) ->
AppInfo4 = rebar_app_info:deps(AppInfo3, rebar_state:deps_names(Deps)),
%% Keep all overrides from the global config and this dep when parsing its deps
- Overrides = rebar_app_info:get(AppInfo0, overrides, []),
+ Overrides = rebar_app_info:get(AppInfo, overrides, []),
Deps1 = rebar_app_utils:parse_deps(Name, DepsDir, Deps, rebar_state:set(State, overrides, Overrides)
,Locks, Level+1),
{AppInfo4, Deps1, State1}.
-spec maybe_fetch(rebar_app_info:t(), atom(), boolean(),
- sets:set(binary()), rebar_state:t()) -> {boolean(), rebar_app_info:t()}.
+ sets:set(binary()), rebar_state:t()) -> {ok, rebar_app_info:t()}.
maybe_fetch(AppInfo, Profile, Upgrade, Seen, State) ->
- AppDir = ec_cnv:to_list(rebar_app_info:dir(AppInfo)),
+ AppDir = rebar_utils:to_list(rebar_app_info:dir(AppInfo)),
%% Don't fetch dep if it exists in the _checkouts dir
case rebar_app_info:is_checkout(AppInfo) of
true ->
- {false, AppInfo};
+ {ok, AppInfo};
false ->
- case rebar_app_discover:find_app(AppInfo, AppDir, all) of
+ case rebar_app_info:is_available(AppInfo) of
false ->
- true = fetch_app(AppInfo, AppDir, State),
- maybe_symlink_default(State, Profile, AppDir, AppInfo),
- {true, rebar_app_info:valid(update_app_info(AppDir, AppInfo), false)};
- {true, AppInfo1} ->
- case sets:is_element(rebar_app_info:name(AppInfo1), Seen) of
+ AppInfo1 = fetch_app(AppInfo, State),
+ maybe_symlink_default(State, Profile, AppDir, AppInfo1),
+ {ok, rebar_app_info:is_available(rebar_app_info:valid(AppInfo1, false), true)};
+ true ->
+ case sets:is_element(rebar_app_info:name(AppInfo), Seen) of
true ->
- {false, AppInfo1};
+ {ok, AppInfo};
false ->
- maybe_symlink_default(State, Profile, AppDir, AppInfo1),
- MaybeUpgrade = maybe_upgrade(AppInfo, AppDir, Upgrade, State),
- AppInfo2 = update_app_info(AppDir, AppInfo1),
- {MaybeUpgrade, AppInfo2}
+ maybe_symlink_default(State, Profile, AppDir, AppInfo),
+ AppInfo1 = maybe_upgrade(AppInfo, AppDir, Upgrade, State),
+ {ok, AppInfo1}
end
end
end.
@@ -339,7 +369,7 @@ symlink_dep(State, From, To) ->
ok ->
RelativeFrom = make_relative_to_root(State, From),
RelativeTo = make_relative_to_root(State, To),
- ?INFO("Linking ~s to ~s", [RelativeFrom, RelativeTo]),
+ ?INFO("Linking ~ts to ~ts", [RelativeFrom, RelativeTo]),
ok;
exists ->
ok
@@ -351,55 +381,45 @@ make_relative_to_root(State, Path) when is_list(Path) ->
Root = rebar_dir:root_dir(State),
rebar_dir:make_relative_path(Path, Root).
-fetch_app(AppInfo, AppDir, State) ->
- ?INFO("Fetching ~s (~p)", [rebar_app_info:name(AppInfo),
- format_source(rebar_app_info:source(AppInfo))]),
- Source = rebar_app_info:source(AppInfo),
- true = rebar_fetch:download_source(AppDir, Source, State).
-
-format_source({pkg, Name, Vsn, _Hash}) -> {pkg, Name, Vsn};
-format_source(Source) -> Source.
-
-%% This is called after the dep has been downloaded and unpacked, if it hadn't been already.
-%% So this is the first time for newly downloaded apps that its .app/.app.src data can
-%% be read in an parsed.
-update_app_info(AppDir, AppInfo) ->
- case rebar_app_discover:find_app(AppInfo, AppDir, all) of
- {true, AppInfo1} ->
- AppInfo1;
- false ->
- throw(?PRV_ERROR({dep_app_not_found, AppDir, rebar_app_info:name(AppInfo)}))
- end.
+fetch_app(AppInfo, State) ->
+ ?INFO("Fetching ~ts (~p)", [rebar_app_info:name(AppInfo),
+ rebar_resource_v2:format_source(rebar_app_info:source(AppInfo))]),
+ rebar_fetch:download_source(AppInfo, State).
-maybe_upgrade(AppInfo, AppDir, Upgrade, State) ->
- Source = rebar_app_info:source(AppInfo),
+maybe_upgrade(AppInfo, _AppDir, Upgrade, State) ->
case Upgrade orelse rebar_app_info:is_lock(AppInfo) of
true ->
- case rebar_fetch:needs_update(AppDir, Source, State) of
+ case rebar_fetch:needs_update(AppInfo, State) of
true ->
- ?INFO("Upgrading ~s (~p)", [rebar_app_info:name(AppInfo), rebar_app_info:source(AppInfo)]),
- true = rebar_fetch:download_source(AppDir, Source, State);
+ ?INFO("Upgrading ~ts (~p)", [rebar_app_info:name(AppInfo),
+ rebar_resource_v2:format_source(rebar_app_info:source(AppInfo))]),
+ rebar_fetch:download_source(AppInfo, State);
false ->
case Upgrade of
true ->
- ?INFO("No upgrade needed for ~s", [rebar_app_info:name(AppInfo)]),
- false;
+ ?INFO("No upgrade needed for ~ts", [rebar_app_info:name(AppInfo)]),
+ AppInfo;
false ->
- false
+ AppInfo
end
end;
false ->
- false
+ AppInfo
end.
-warn_skip_deps(AppInfo, State) ->
- Msg = "Skipping ~s (from ~p) as an app of the same name "
+warn_skip_deps(Name, Source, State) ->
+ Msg = "Skipping ~ts (from ~p) as an app of the same name "
"has already been fetched",
- Args = [rebar_app_info:name(AppInfo),
- rebar_app_info:source(AppInfo)],
+ Args = [Name,
+ rebar_resource_v2:format_source(Source)],
case rebar_state:get(State, deps_error_on_conflict, false) of
- false -> ?WARN(Msg, Args);
- true -> ?ERROR(Msg, Args), ?FAIL
+ false ->
+ case rebar_state:get(State, deps_warning_on_conflict, true) of
+ true -> ?WARN(Msg, Args);
+ false -> ok
+ end;
+ true ->
+ ?ERROR(Msg, Args), ?FAIL
end.
not_needs_compile(App) ->
diff --git a/src/rebar_prv_local_install.erl b/src/rebar_prv_local_install.erl
index 1b58859..cd6a204 100644
--- a/src/rebar_prv_local_install.erl
+++ b/src/rebar_prv_local_install.erl
@@ -12,6 +12,7 @@
-export([extract_escript/2]).
-include("rebar.hrl").
+-include_lib("providers/include/providers.hrl").
-include_lib("kernel/include/file.hrl").
-define(PROVIDER, install).
@@ -54,13 +55,16 @@ do(State) ->
end.
-spec format_error(any()) -> iolist().
+format_error({non_writeable, Dir}) ->
+ io_lib:format("Could not write to ~p. Please ensure the path is writeable.",
+ [Dir]);
format_error(Reason) ->
io_lib:format("~p", [Reason]).
bin_contents(OutputDir) ->
<<"#!/usr/bin/env sh
-erl -pz ", (ec_cnv:to_binary(OutputDir))/binary,"/*/ebin +sbtu +A0 -noshell -boot start_clean -s rebar3 main $REBAR3_ERL_ARGS -extra \"$@\"
+erl -pz ", (rebar_utils:to_binary(OutputDir))/binary,"/*/ebin +sbtu +A1 -noshell -boot start_clean -s rebar3 main $REBAR3_ERL_ARGS -extra \"$@\"
">>.
extract_escript(State, ScriptPath) ->
@@ -71,25 +75,24 @@ extract_escript(State, ScriptPath) ->
%% And add a rebar3 bin script to ~/.cache/rebar3/bin
Opts = rebar_state:opts(State),
OutputDir = filename:join(rebar_dir:global_cache_dir(Opts), "lib"),
- filelib:ensure_dir(filename:join(OutputDir, "empty")),
-
- ?INFO("Extracting rebar3 libs to ~s...", [OutputDir]),
+ case filelib:ensure_dir(filename:join(OutputDir, "empty")) of
+ ok ->
+ ok;
+ {error, Posix} when Posix == eaccess; Posix == enoent ->
+ throw(?PRV_ERROR({non_writeable, OutputDir}))
+ end,
+
+ ?INFO("Extracting rebar3 libs to ~ts...", [OutputDir]),
zip:extract(Archive, [{cwd, OutputDir}]),
BinDir = filename:join(rebar_dir:global_cache_dir(Opts), "bin"),
BinFile = filename:join(BinDir, "rebar3"),
filelib:ensure_dir(BinFile),
- {ok, #file_info{mode = _,
- uid = Uid,
- gid = Gid}} = file:read_file_info(ScriptPath),
-
- ?INFO("Writing rebar3 run script ~s...", [BinFile]),
+ ?INFO("Writing rebar3 run script ~ts...", [BinFile]),
file:write_file(BinFile, bin_contents(OutputDir)),
- ok = file:write_file_info(BinFile, #file_info{mode=33277,
- uid=Uid,
- gid=Gid}),
+ ok = file:write_file_info(BinFile, #file_info{mode=33277}),
- ?INFO("Add to $PATH for use: export PATH=$PATH:~s", [BinDir]),
+ ?INFO("Add to $PATH for use: export PATH=~ts:$PATH", [BinDir]),
{ok, State}.
diff --git a/src/rebar_prv_local_upgrade.erl b/src/rebar_prv_local_upgrade.erl
index aa9ee44..1931d65 100644
--- a/src/rebar_prv_local_upgrade.erl
+++ b/src/rebar_prv_local_upgrade.erl
@@ -72,15 +72,15 @@ get_md5(Rebar3Path) ->
{ok, Rebar3File} = file:read_file(Rebar3Path),
Digest = crypto:hash(md5, Rebar3File),
DigestHex = lists:flatten([io_lib:format("~2.16.0B", [X]) || X <- binary_to_list(Digest)]),
- string:to_lower(DigestHex).
+ rebar_string:lowercase(DigestHex).
maybe_fetch_rebar3(Rebar3Md5) ->
TmpDir = ec_file:insecure_mkdtemp(),
TmpFile = filename:join(TmpDir, "rebar3"),
- case rebar_pkg_resource:request("https://s3.amazonaws.com/rebar3/rebar3", Rebar3Md5) of
+ case request("https://s3.amazonaws.com/rebar3/rebar3", Rebar3Md5) of
{ok, Binary, ETag} ->
file:write_file(TmpFile, Binary),
- case rebar_pkg_resource:etag(TmpFile) of
+ case etag(TmpFile) of
ETag ->
{saved, TmpFile};
_ ->
@@ -92,3 +92,38 @@ maybe_fetch_rebar3(Rebar3Md5) ->
?CONSOLE("No upgrade available", []),
up_to_date
end.
+
+etag(Path) ->
+ case file:read_file(Path) of
+ {ok, Binary} ->
+ <<X:128/big-unsigned-integer>> = crypto:hash(md5, Binary),
+ rebar_string:lowercase(lists:flatten(io_lib:format("~32.16.0b", [X])));
+ {error, _} ->
+ false
+ end.
+
+-spec request(Url, ETag) -> Res when
+ Url :: string(),
+ ETag :: false | string(),
+ Res :: 'error' | {ok, cached} | {ok, any(), string()}.
+request(Url, ETag) ->
+ HttpOptions = [{ssl, rebar_utils:ssl_opts(Url)},
+ {relaxed, true} | rebar_utils:get_proxy_auth()],
+ case httpc:request(get, {Url, [{"if-none-match", "\"" ++ ETag ++ "\""}
+ || ETag =/= false] ++
+ [{"User-Agent", rebar_utils:user_agent()}]},
+ HttpOptions, [{body_format, binary}], rebar) of
+ {ok, {{_Version, 200, _Reason}, Headers, Body}} ->
+ ?DEBUG("Successfully downloaded ~ts", [Url]),
+ {"etag", ETag1} = lists:keyfind("etag", 1, Headers),
+ {ok, Body, rebar_string:trim(ETag1, both, [$"])};
+ {ok, {{_Version, 304, _Reason}, _Headers, _Body}} ->
+ ?DEBUG("Cached copy of ~ts still valid", [Url]),
+ {ok, cached};
+ {ok, {{_Version, Code, _Reason}, _Headers, _Body}} ->
+ ?DEBUG("Request to ~p failed: status code ~p", [Url, Code]),
+ error;
+ {error, Reason} ->
+ ?DEBUG("Request to ~p failed: ~p", [Url, Reason]),
+ error
+ end.
diff --git a/src/rebar_prv_lock.erl b/src/rebar_prv_lock.erl
index cbe8dfe..570c03f 100644
--- a/src/rebar_prv_lock.erl
+++ b/src/rebar_prv_lock.erl
@@ -54,12 +54,9 @@ format_error(Reason) ->
build_locks(State) ->
AllDeps = rebar_state:lock(State),
[begin
- Dir = rebar_app_info:dir(Dep),
- Source = rebar_app_info:source(Dep),
-
%% If source is tuple it is a source dep
%% e.g. {git, "git://github.com/ninenines/cowboy.git", "master"}
- {rebar_app_info:name(Dep)
- ,rebar_fetch:lock_source(Dir, Source, State)
- ,rebar_app_info:dep_level(Dep)}
+ {rebar_app_info:name(Dep),
+ rebar_fetch:lock_source(Dep, State),
+ rebar_app_info:dep_level(Dep)}
end || Dep <- AllDeps, not(rebar_app_info:is_checkout(Dep))].
diff --git a/src/rebar_prv_new.erl b/src/rebar_prv_new.erl
index 064315e..c6a1e9b 100644
--- a/src/rebar_prv_new.erl
+++ b/src/rebar_prv_new.erl
@@ -60,7 +60,7 @@ do(State) ->
-spec format_error(any()) -> iolist().
format_error({consult, File, Reason}) ->
- io_lib:format("Error consulting file at ~s for reason ~p", [File, Reason]);
+ io_lib:format("Error consulting file at ~ts for reason ~p", [File, Reason]);
format_error(Reason) ->
io_lib:format("~p", [Reason]).
@@ -70,7 +70,7 @@ format_error(Reason) ->
list_templates(State) ->
lists:foldl(fun({error, {consult, File, Reason}}, Acc) ->
- ?WARN("Error consulting template file ~s for reason ~p",
+ ?WARN("Error consulting template file ~ts for reason ~p",
[File, Reason]),
Acc
; (Tpl, Acc) ->
@@ -82,7 +82,9 @@ info() ->
"Create rebar3 project based on template and vars.~n"
"~n"
"Valid command line options:~n"
- " <template> [var=foo,...]~n", []).
+ " <template> [var=foo,...]~n"
+ "~n"
+ "See available templates with: `rebar3 new help`~n", []).
strip_flags([]) -> [];
strip_flags(["-"++_|Opts]) -> strip_flags(Opts);
@@ -116,31 +118,34 @@ show_short_templates(List) ->
lists:map(fun show_short_template/1, lists:sort(List)).
show_short_template({Name, Type, _Location, Description, _Vars}) ->
- io:format("~s (~s): ~s~n",
+ io:format("~ts (~ts): ~ts~n",
[Name,
format_type(Type),
format_description(Description)]).
show_template({Name, Type, Location, Description, Vars}) ->
- io:format("~s:~n"
- "\t~s~n"
- "\tDescription: ~s~n"
- "\tVariables:~n~s~n",
+ io:format("~ts:~n"
+ "\t~ts~n"
+ "\tDescription: ~ts~n"
+ "\tVariables:~n~ts~n",
[Name,
format_type(Type, Location),
format_description(Description),
format_vars(Vars)]).
format_type(escript) -> "built-in";
+format_type(builtin) -> "built-in";
format_type(plugin) -> "plugin";
format_type(file) -> "custom".
format_type(escript, _) ->
"built-in template";
+format_type(builtin, _) ->
+ "built-in template";
format_type(plugin, Loc) ->
- io_lib:format("plugin template (~s)", [Loc]);
+ io_lib:format("plugin template (~ts)", [Loc]);
format_type(file, Loc) ->
- io_lib:format("custom template (~s)", [Loc]).
+ io_lib:format("custom template (~ts)", [Loc]).
format_description(Description) ->
case Description of
@@ -153,4 +158,4 @@ format_vars(Vars) -> [format_var(Var) || Var <- Vars].
format_var({Var, Default}) ->
io_lib:format("\t\t~p=~p~n",[Var, Default]);
format_var({Var, Default, Doc}) ->
- io_lib:format("\t\t~p=~p (~s)~n", [Var, Default, Doc]).
+ io_lib:format("\t\t~p=~p (~ts)~n", [Var, Default, Doc]).
diff --git a/src/rebar_prv_packages.erl b/src/rebar_prv_packages.erl
index 7217ab8..3e54cdc 100644
--- a/src/rebar_prv_packages.erl
+++ b/src/rebar_prv_packages.erl
@@ -15,53 +15,75 @@
-spec init(rebar_state:t()) -> {ok, rebar_state:t()}.
init(State) ->
- State1 = rebar_state:add_provider(State, providers:create([{name, ?PROVIDER},
- {module, ?MODULE},
- {bare, true},
- {deps, ?DEPS},
- {example, "rebar3 pkgs"},
- {short_desc, "List available packages."},
- {desc, info("List available packages")},
- {opts, []}])),
+ State1 = rebar_state:add_provider(State,
+ providers:create([{name, ?PROVIDER},
+ {module, ?MODULE},
+ {bare, true},
+ {deps, ?DEPS},
+ {example, "rebar3 pkgs elli"},
+ {short_desc, "List information for a package."},
+ {desc, info("List information for a package")},
+ {opts, [{package, undefined, undefined, string,
+ "Package to fetch information for."}]}])),
{ok, State1}.
-spec do(rebar_state:t()) -> {ok, rebar_state:t()} | {error, string()}.
do(State) ->
- rebar_packages:packages(State),
- case rebar_state:command_args(State) of
- [Name] ->
- print_packages(get_packages(iolist_to_binary(Name)));
- _ ->
- print_packages(sort_packages())
- end,
- {ok, State}.
+ {Args, _} = rebar_state:command_parsed_args(State),
+ case proplists:get_value(package, Args, undefined) of
+ undefined ->
+ ?PRV_ERROR(no_package_arg);
+ Name ->
+ Resources = rebar_state:resources(State),
+ #{repos := Repos} = rebar_resource_v2:find_resource_state(pkg, Resources),
+ Results = get_package(rebar_utils:to_binary(Name), Repos),
+ case lists:all(fun({_, {error, not_found}}) -> true; (_) -> false end, Results) of
+ true ->
+ ?PRV_ERROR({not_found, Name});
+ false ->
+ [print_packages(Result) || Result <- Results],
+ {ok, State}
+ end
+ end.
--spec format_error(any()) -> iolist().
-format_error(load_registry_fail) ->
- "Failed to load package regsitry. Try running 'rebar3 update' to fix".
+-spec get_package(binary(), [map()]) -> [{binary(), {ok, map()} | {error, term()}}].
+get_package(Name, Repos) ->
+ lists:foldl(fun(RepoConfig, Acc) ->
+ [{maps:get(name, RepoConfig), rebar_packages:get(RepoConfig, Name)} | Acc]
+ end, [], Repos).
-print_packages(Pkgs) ->
- orddict:map(fun(Name, Vsns) ->
- SortedVsns = lists:sort(fun(A, B) ->
- ec_semver:lte(ec_semver:parse(A)
- ,ec_semver:parse(B))
- end, Vsns),
- VsnStr = join(SortedVsns, <<", ">>),
- ?CONSOLE("~s:~n Versions: ~s~n", [Name, VsnStr])
- end, Pkgs).
-sort_packages() ->
- ets:foldl(fun({package_index_version, _}, Acc) ->
- Acc;
- ({Pkg, Vsns}, Acc) ->
- orddict:store(Pkg, Vsns, Acc);
- (_, Acc) ->
- Acc
- end, orddict:new(), ?PACKAGE_TABLE).
+-spec format_error(any()) -> iolist().
+format_error(no_package_arg) ->
+ "Missing package argument to `rebar3 pkgs` command.";
+format_error({not_found, Name}) ->
+ io_lib:format("Package ~ts not found in any repo.", [Name]);
+format_error(unknown) ->
+ "Something went wrong with fetching package metadata.".
-get_packages(Name) ->
- ets:lookup(?PACKAGE_TABLE, Name).
+print_packages({RepoName, {error, not_found}}) ->
+ ?CONSOLE("~ts: Package not found in this repo.~n", [RepoName]);
+print_packages({RepoName, {error, _}}) ->
+ ?CONSOLE("~ts: Error fetching from this repo.~n", [RepoName]);
+print_packages({RepoName, {ok, #{<<"name">> := Name,
+ <<"meta">> := Meta,
+ <<"releases">> := Releases}}}) ->
+ Description = maps:get(<<"description">>, Meta, ""),
+ Licenses = join(maps:get(<<"licenses">>, Meta, []), <<", ">>),
+ Links = join_map(maps:get(<<"links">>, Meta, []), <<"\n ">>),
+ Maintainers = join(maps:get(<<"maintainers">>, Meta, []), <<", ">>),
+ Versions = [V || #{<<"version">> := V} <- Releases],
+ VsnStr = join(Versions, <<", ">>),
+ ?CONSOLE("~ts:~n"
+ " Name: ~ts~n"
+ " Description: ~ts~n"
+ " Licenses: ~ts~n"
+ " Maintainers: ~ts~n"
+ " Links:~n ~ts~n"
+ " Versions: ~ts~n", [RepoName, Name, Description, Licenses, Maintainers, Links, VsnStr]);
+print_packages(_) ->
+ ok.
-spec join([binary()], binary()) -> binary().
join([Bin], _Sep) ->
@@ -69,6 +91,14 @@ join([Bin], _Sep) ->
join([Bin | T], Sep) ->
<<Bin/binary, Sep/binary, (join(T, Sep))/binary>>.
+-spec join_map(map(), binary()) -> binary().
+join_map(Map, Sep) ->
+ join_tuple_list(maps:to_list(Map), Sep).
+
+join_tuple_list([{K, V}], _Sep) ->
+ <<K/binary, ": ", V/binary>>;
+join_tuple_list([{K, V} | T], Sep) ->
+ <<K/binary, ": ", V/binary, Sep/binary, (join_tuple_list(T, Sep))/binary>>.
info(Description) ->
- io_lib:format("~s.~n", [Description]).
+ io_lib:format("~ts.~n", [Description]).
diff --git a/src/rebar_prv_path.erl b/src/rebar_prv_path.erl
index 4259eec..5374b0c 100644
--- a/src/rebar_prv_path.erl
+++ b/src/rebar_prv_path.erl
@@ -27,7 +27,7 @@ init(State) ->
{example, "rebar3 path"},
{short_desc, "Print paths to build dirs in current profile."},
{desc, "Print paths to build dirs in current profile."},
- {opts, eunit_opts(State)}])),
+ {opts, path_opts(State)}])),
{ok, State1}.
@@ -49,7 +49,7 @@ format_error(Reason) ->
filter_apps(RawOpts, State) ->
RawApps = proplists:get_all_values(app, RawOpts),
- Apps = lists:foldl(fun(String, Acc) -> string:tokens(String, ",") ++ Acc end, [], RawApps),
+ Apps = lists:foldl(fun(String, Acc) -> rebar_string:lexemes(String, ",") ++ Acc end, [], RawApps),
case Apps of
[] ->
ProjectDeps = project_deps(State),
@@ -75,23 +75,23 @@ paths([{src, true}|Rest], Apps, State, Acc) ->
paths([{rel, true}|Rest], Apps, State, Acc) ->
paths(Rest, Apps, State, [rel_dir(State)|Acc]).
-base_dir(State) -> io_lib:format("~s", [rebar_dir:base_dir(State)]).
-bin_dir(State) -> io_lib:format("~s/bin", [rebar_dir:base_dir(State)]).
-lib_dir(State) -> io_lib:format("~s", [rebar_dir:deps_dir(State)]).
-rel_dir(State) -> io_lib:format("~s/rel", [rebar_dir:base_dir(State)]).
+base_dir(State) -> io_lib:format("~ts", [rebar_dir:base_dir(State)]).
+bin_dir(State) -> io_lib:format("~ts/bin", [rebar_dir:base_dir(State)]).
+lib_dir(State) -> io_lib:format("~ts", [rebar_dir:deps_dir(State)]).
+rel_dir(State) -> io_lib:format("~ts/rel", [rebar_dir:base_dir(State)]).
ebin_dirs(Apps, State) ->
- lists:map(fun(App) -> io_lib:format("~s/~s/ebin", [rebar_dir:deps_dir(State), App]) end, Apps).
+ lists:map(fun(App) -> io_lib:format("~ts/~ts/ebin", [rebar_dir:deps_dir(State), App]) end, Apps).
priv_dirs(Apps, State) ->
- lists:map(fun(App) -> io_lib:format("~s/~s/priv", [rebar_dir:deps_dir(State), App]) end, Apps).
+ lists:map(fun(App) -> io_lib:format("~ts/~ts/priv", [rebar_dir:deps_dir(State), App]) end, Apps).
src_dirs(Apps, State) ->
- lists:map(fun(App) -> io_lib:format("~s/~s/src", [rebar_dir:deps_dir(State), App]) end, Apps).
+ lists:map(fun(App) -> io_lib:format("~ts/~ts/src", [rebar_dir:deps_dir(State), App]) end, Apps).
print_paths_if_exist(Paths, State) ->
{RawOpts, _} = rebar_state:command_parsed_args(State),
Sep = proplists:get_value(separator, RawOpts, " "),
RealPaths = lists:filter(fun(P) -> ec_file:is_dir(P) end, Paths),
- io:format("~s", [string:join(RealPaths, Sep)]).
+ io:format("~ts", [rebar_string:join(RealPaths, Sep)]).
project_deps(State) ->
Profiles = rebar_state:current_profiles(State),
@@ -107,7 +107,7 @@ normalize(AppName) when is_list(AppName) -> AppName;
normalize(AppName) when is_atom(AppName) -> atom_to_list(AppName);
normalize(AppName) when is_binary(AppName) -> binary_to_list(AppName).
-eunit_opts(_State) ->
+path_opts(_State) ->
[{app, undefined, "app", string, help(app)},
{base, undefined, "base", boolean, help(base)},
{bin, undefined, "bin", boolean, help(bin)},
@@ -118,7 +118,7 @@ eunit_opts(_State) ->
{src, undefined, "src", boolean, help(src)},
{rel, undefined, "rel", boolean, help(rel)}].
-help(app) -> "Comma seperated list of applications to return paths for.";
+help(app) -> "Comma separated list of applications to return paths for.";
help(base) -> "Return the `base' path of the current profile.";
help(bin) -> "Return the `bin' path of the current profile.";
help(ebin) -> "Return all `ebin' paths of the current profile's applications.";
diff --git a/src/rebar_prv_plugins.erl b/src/rebar_prv_plugins.erl
index 7e6b88e..fba5da0 100644
--- a/src/rebar_prv_plugins.erl
+++ b/src/rebar_prv_plugins.erl
@@ -34,15 +34,19 @@ do(State) ->
GlobalConfigFile = rebar_dir:global_config(),
GlobalConfig = rebar_state:new(rebar_config:consult_file(GlobalConfigFile)),
GlobalPlugins = rebar_state:get(GlobalConfig, plugins, []),
+ GlobalSrcDirs = rebar_state:get(GlobalConfig, src_dirs, ["src"]),
GlobalPluginsDir = filename:join([rebar_dir:global_cache_dir(rebar_state:opts(State)), "plugins", "*"]),
- GlobalApps = rebar_app_discover:find_apps([GlobalPluginsDir], all),
+ GlobalApps = rebar_app_discover:find_apps([GlobalPluginsDir], GlobalSrcDirs, all, State),
display_plugins("Global plugins", GlobalApps, GlobalPlugins),
+ RebarOpts = rebar_state:opts(State),
+ SrcDirs = rebar_dir:src_dirs(RebarOpts, ["src"]),
Plugins = rebar_state:get(State, plugins, []),
- PluginsDir = filename:join(rebar_dir:plugins_dir(State), "*"),
- CheckoutsDir = filename:join(rebar_dir:checkouts_dir(State), "*"),
- Apps = rebar_app_discover:find_apps([CheckoutsDir, PluginsDir], all),
- display_plugins("Local plugins", Apps, Plugins),
+ ProjectPlugins = rebar_state:get(State, project_plugins, []),
+ PluginsDirs = filelib:wildcard(filename:join(rebar_dir:plugins_dir(State), "*")),
+ CheckoutsDirs = filelib:wildcard(filename:join(rebar_dir:checkouts_dir(State), "*")),
+ Apps = rebar_app_discover:find_apps(CheckoutsDirs++PluginsDirs, SrcDirs, all, State),
+ display_plugins("Local plugins", Apps, Plugins ++ ProjectPlugins),
{ok, State}.
-spec format_error(any()) -> iolist().
@@ -52,19 +56,19 @@ format_error(Reason) ->
display_plugins(_Header, _Apps, []) ->
ok;
display_plugins(Header, Apps, Plugins) ->
- ?CONSOLE("--- ~s ---", [Header]),
+ ?CONSOLE("--- ~ts ---", [Header]),
display_plugins(Apps, Plugins),
?CONSOLE("", []).
display_plugins(Apps, Plugins) ->
lists:foreach(fun(Plugin) ->
- Name = if is_atom(Plugin) -> ec_cnv:to_binary(Plugin);
- is_tuple(Plugin) -> ec_cnv:to_binary(element(1, Plugin))
+ Name = if is_atom(Plugin) -> atom_to_binary(Plugin, unicode);
+ is_tuple(Plugin) -> rebar_utils:to_binary(element(1, Plugin))
end,
case rebar_app_utils:find(Name, Apps) of
{ok, _App} ->
- ?CONSOLE("~s", [Name]);
+ ?CONSOLE("~ts", [Name]);
error ->
- ?DEBUG("Unable to find plugin ~s", [Name])
+ ?DEBUG("Unable to find plugin ~ts", [Name])
end
end, Plugins).
diff --git a/src/rebar_prv_plugins_upgrade.erl b/src/rebar_prv_plugins_upgrade.erl
index 03521c7..7420c83 100644
--- a/src/rebar_prv_plugins_upgrade.erl
+++ b/src/rebar_prv_plugins_upgrade.erl
@@ -44,7 +44,7 @@ do(State) ->
format_error(no_plugin_arg) ->
io_lib:format("Must give an installed plugin to upgrade as an argument", []);
format_error({not_found, Plugin}) ->
- io_lib:format("Plugin to upgrade not found: ~s", [Plugin]);
+ io_lib:format("Plugin to upgrade not found: ~ts", [Plugin]);
format_error(Reason) ->
io_lib:format("~p", [Reason]).
diff --git a/src/rebar_prv_report.erl b/src/rebar_prv_report.erl
index d6c8b60..73e9624 100644
--- a/src/rebar_prv_report.erl
+++ b/src/rebar_prv_report.erl
@@ -44,7 +44,7 @@ do(State) ->
{ok, Vsn} = application:get_key(rebar, vsn),
{ok, Apps} = application:get_key(rebar, applications),
[application:load(App) || App <- Apps],
- Vsns = [io_lib:format("~p: ~s~n", [App, AVsn])
+ Vsns = [io_lib:format("~p: ~ts~n", [App, AVsn])
|| App <- lists:sort(Apps),
{ok, AVsn} <- [application:get_key(App, vsn)]],
%% Show OS and versions
@@ -59,10 +59,10 @@ do(State) ->
%%
?CONSOLE(
"Rebar3 report~n"
- " version ~s~n"
- " generated at ~s~n"
+ " version ~ts~n"
+ " generated at ~ts~n"
"=================~n"
- "Please submit this along with your issue at ~s "
+ "Please submit this along with your issue at ~ts "
"(and feel free to edit out private information, if any)~n"
"-----------------~n"
"Task: ~ts~n"
@@ -75,11 +75,11 @@ do(State) ->
"Library directory: ~ts~n"
"-----------------~n"
"Loaded Applications:~n"
- "~s~n"
+ "~ts~n"
"-----------------~n"
"Escript path: ~ts~n"
"Providers:~n"
- " ~s",
+ " ~ts",
[Vsn, time_to_string(UTC),
?ISSUES_URL, Command, Task,
OS, ERTS, Root, Lib,
@@ -100,4 +100,4 @@ time_to_string({{Y,M,D},{H,Min,S}}) ->
[Y,M,D,H,Min,S])).
parse_task(Str) ->
- hd(re:split(Str, " ")).
+ hd(re:split(Str, " ", [unicode])).
diff --git a/src/rebar_prv_repos.erl b/src/rebar_prv_repos.erl
new file mode 100644
index 0000000..0515910
--- /dev/null
+++ b/src/rebar_prv_repos.erl
@@ -0,0 +1,47 @@
+%% -*- erlang-indent-level: 4;indent-tabs-mode: nil -*-
+%% ex: ts=4 sw=4 et
+
+-module(rebar_prv_repos).
+
+-behaviour(provider).
+
+-export([init/1,
+ do/1,
+ format_error/1]).
+
+-include("rebar.hrl").
+
+-define(PROVIDER, repos).
+-define(DEPS, []).
+
+%% ===================================================================
+%% Public API
+%% ===================================================================
+
+-spec init(rebar_state:t()) -> {ok, rebar_state:t()}.
+init(State) ->
+ Provider = providers:create(
+ [{name, ?PROVIDER},
+ {module, ?MODULE},
+ {bare, false},
+ {deps, ?DEPS},
+ {example, "rebar3 repos"},
+ {short_desc, "Print current package repository configuration"},
+ {desc, "Display repository configuration for debugging purpose"},
+ {opts, []}]),
+ State1 = rebar_state:add_provider(State, Provider),
+ {ok, State1}.
+
+-spec do(rebar_state:t()) -> {ok, rebar_state:t()} | {error, string()}.
+do(State) ->
+ Resources = rebar_state:resources(State),
+ #{repos := Repos} = rebar_resource_v2:find_resource_state(pkg, Resources),
+
+ ?CONSOLE("Repos:", []),
+ %%TODO: do some formatting
+ ?CONSOLE("~p", [Repos]),
+ {ok, State}.
+
+-spec format_error(any()) -> iolist().
+format_error(Reason) ->
+ io_lib:format("~p", [Reason]).
diff --git a/src/rebar_prv_shell.erl b/src/rebar_prv_shell.erl
index b7febf8..760f0d8 100644
--- a/src/rebar_prv_shell.erl
+++ b/src/rebar_prv_shell.erl
@@ -40,6 +40,8 @@
-define(PROVIDER, shell).
-define(DEPS, [compile]).
+-dialyzer({nowarn_function, rewrite_leaders/2}).
+
%% ===================================================================
%% Public API
%% ===================================================================
@@ -75,7 +77,13 @@ init(State) ->
"A list of apps to boot before starting the "
"shell. (E.g. --apps app1,app2,app3) Defaults "
"to rebar.config {shell, [{apps, Apps}]} or "
- "relx apps if not specified."}]}
+ "relx apps if not specified."},
+ {start_clean, undefined, "start-clean", boolean,
+ "Cancel any applications in the 'apps' list "
+ "or release."},
+ {user_drv_args, undefined, "user_drv_args", string,
+ "Arguments passed to user_drv start function for "
+ "creating custom shells."}]}
])
),
{ok, State1}.
@@ -99,7 +107,9 @@ format_error(Reason) ->
shell(State) ->
setup_name(State),
setup_paths(State),
- setup_shell(),
+ ShellArgs = debug_get_value(shell_args, rebar_state:get(State, shell, []), undefined,
+ "Found user_drv args from command line option."),
+ setup_shell(ShellArgs),
maybe_run_script(State),
%% apps must be started after the change in shell because otherwise
%% their application masters never gets the new group leader (held in
@@ -117,13 +127,13 @@ shell(State) ->
info() ->
"Start a shell with project and deps preloaded similar to~n'erl -pa ebin -pa deps/*/ebin'.~n".
-setup_shell() ->
+setup_shell(ShellArgs) ->
OldUser = kill_old_user(),
%% Test for support here
- NewUser = try erlang:open_port({spawn,'tty_sl -c -e'}, []) of
+ NewUser = try erlang:open_port({spawn,"tty_sl -c -e"}, []) of
Port when is_port(Port) ->
true = port_close(Port),
- setup_new_shell()
+ setup_new_shell(ShellArgs)
catch
error:_ ->
setup_old_shell()
@@ -153,11 +163,16 @@ wait_for_port_death(N, P) ->
wait_for_port_death(N-10, P)
end.
-setup_new_shell() ->
+setup_new_shell(ShellArgs) ->
%% terminate the current user supervision structure, if any
_ = supervisor:terminate_child(kernel_sup, user),
%% start a new shell (this also starts a new user under the correct group)
- _ = user_drv:start(),
+ case ShellArgs of
+ undefined ->
+ _ = user_drv:start();
+ _ ->
+ _ = user_drv:start(ShellArgs)
+ end,
%% wait until user_drv and user have been registered (max 3 seconds)
ok = wait_until_user_started(3000),
whereis(user).
@@ -191,21 +206,28 @@ rewrite_leaders(OldUser, NewUser) ->
lists:member(proplists:get_value(group_leader, erlang:process_info(Pid)),
OldMasters)],
try
- %% enable error_logger's tty output
- error_logger:swap_handler(tty),
- %% disable the simple error_logger (which may have been added multiple
- %% times). removes at most the error_logger added by init and the
- %% error_logger added by the tty handler
- remove_error_handler(3),
- %% reset the tty handler once more for remote shells
- error_logger:swap_handler(tty)
+ case erlang:function_exported(logger, module_info, 0) of
+ false ->
+ %% Old style logger had a lock-up issue and other problems related
+ %% to group leader handling.
+ %% enable error_logger's tty output
+ error_logger:swap_handler(tty),
+ %% disable the simple error_logger (which may have been added
+ %% multiple times). removes at most the error_logger added by
+ %% init and the error_logger added by the tty handler
+ remove_error_handler(3),
+ %% reset the tty handler once more for remote shells
+ error_logger:swap_handler(tty);
+ true ->
+ %% This is no longer a problem with the logger interface
+ ok
+ end
catch
- E:R -> % may fail with custom loggers
- ?DEBUG("Logger changes failed for ~p:~p (~p)", [E,R,erlang:get_stacktrace()]),
+ ?WITH_STACKTRACE(E,R,S) % may fail with custom loggers
+ ?DEBUG("Logger changes failed for ~p:~p (~p)", [E,R,S]),
hope_for_best
end.
-
setup_paths(State) ->
%% Add deps to path
code:add_pathsa(rebar_state:code_paths(State, all_deps)),
@@ -225,9 +247,9 @@ maybe_run_script(State) ->
File = filename:absname(RelFile),
try run_script_file(File)
catch
- C:E ->
+ ?WITH_STACKTRACE(C,E,S)
?ABORT("Couldn't run shell escript ~p - ~p:~p~nStack: ~p",
- [File, C, E, erlang:get_stacktrace()])
+ [File, C, E, S])
end
end.
@@ -261,11 +283,11 @@ maybe_boot_apps(State) ->
case find_apps_to_boot(State) of
undefined ->
%% try to read in sys.config file
- ok = reread_config(State);
+ ok = reread_config([], State);
Apps ->
%% load apps, then check config, then boot them.
load_apps(Apps),
- ok = reread_config(State),
+ ok = reread_config(Apps, State),
boot_apps(Apps)
end.
@@ -294,10 +316,15 @@ find_apps_option(State) ->
{Opts, _} = rebar_state:command_parsed_args(State),
case debug_get_value(apps, Opts, no_value,
"Found shell apps from command line option.") of
- no_value -> no_value;
+ no_value ->
+ case debug_get_value(start_clean, Opts, false,
+ "Found start-clean argument to disable apps") of
+ false -> no_value;
+ true -> []
+ end;
AppsStr ->
[ list_to_atom(AppStr)
- || AppStr <- string:tokens(AppsStr, " ,:") ]
+ || AppStr <- rebar_string:lexemes(AppsStr, " ,:") ]
end.
-spec find_apps_rebar(rebar_state:t()) -> no_value | list().
@@ -312,6 +339,9 @@ find_apps_relx(State) ->
{_, _, Apps} ->
?DEBUG("Found shell apps from relx.", []),
Apps;
+ {_, _, Apps, _} ->
+ ?DEBUG("Found shell apps from relx.", []),
+ Apps;
false ->
no_value
end.
@@ -327,19 +357,44 @@ load_apps(Apps) ->
not lists:keymember(App, 1, application:loaded_applications())],
ok.
-reread_config(State) ->
+reread_config(AppsToStart, State) ->
case find_config(State) of
no_config ->
ok;
ConfigList ->
- _ = rebar_utils:reread_config(ConfigList),
+ %% This allows people who use applications that are also
+ %% depended on by rebar3 or its plugins to change their
+ %% configuration at runtime based on the configuration files.
+ %%
+ %% To do this, we stop apps that are already started before
+ %% reloading their configuration.
+ %%
+ %% We make an exception for apps that:
+ %% - are not already running
+ %% - would not be restarted (and hence would break some
+ %% compatibility with rebar3)
+ %% - are not in the config files and would see no config
+ %% changes
+ %% - are not in a blacklist, where changing their config
+ %% would be risky to the shell or the rebar3 agent
+ %% functionality (i.e. changing inets may break proxy
+ %% settings, stopping `kernel' would break everything)
+ Running = [App || {App, _, _} <- application:which_applications()],
+ BlackList = [inets, stdlib, kernel, rebar],
+ _ = [application:stop(App)
+ || Config <- ConfigList,
+ {App, _} <- Config,
+ lists:member(App, Running),
+ lists:member(App, AppsToStart),
+ not lists:member(App, BlackList)],
+ _ = rebar_utils:reread_config(ConfigList, [update_logger]),
ok
end.
boot_apps(Apps) ->
?WARN("The rebar3 shell is a development tool; to deploy "
"applications in production, consider using releases "
- "(http://www.rebar3.org/v3.0/docs/releases)", []),
+ "(http://www.rebar3.org/docs/releases)", []),
Normalized = normalize_boot_apps(Apps),
Res = [application:ensure_all_started(App) || App <- Normalized],
_ = [?INFO("Booted ~p", [App])
@@ -350,17 +405,24 @@ boot_apps(Apps) ->
ok.
normalize_load_apps([]) -> [];
+normalize_load_apps([{_App, none} | T]) -> normalize_load_apps(T);
normalize_load_apps([{App, _} | T]) -> [App | normalize_load_apps(T)];
normalize_load_apps([{App, _Vsn, load} | T]) -> [App | normalize_load_apps(T)];
+normalize_load_apps([{_App, _Vsn, none} | T]) -> normalize_load_apps(T);
+normalize_load_apps([{App, _Vsn, Operator} | T]) when is_atom(Operator) ->
+ [App | normalize_load_apps(T)];
normalize_load_apps([App | T]) when is_atom(App) -> [App | normalize_load_apps(T)].
normalize_boot_apps([]) -> [];
normalize_boot_apps([{_App, load} | T]) -> normalize_boot_apps(T);
normalize_boot_apps([{_App, _Vsn, load} | T]) -> normalize_boot_apps(T);
+normalize_boot_apps([{_App, none} | T]) -> normalize_boot_apps(T);
+normalize_boot_apps([{_App, _Vsn, none} | T]) -> normalize_boot_apps(T);
+normalize_boot_apps([{App, _Vsn, Operator} | T]) when is_atom(Operator) ->
+ [App | normalize_boot_apps(T)];
normalize_boot_apps([{App, _Vsn} | T]) -> [App | normalize_boot_apps(T)];
normalize_boot_apps([App | T]) when is_atom(App) -> [App | normalize_boot_apps(T)].
-
remove_error_handler(0) ->
?WARN("Unable to remove simple error_logger handler", []);
remove_error_handler(N) ->
diff --git a/src/rebar_prv_unlock.erl b/src/rebar_prv_unlock.erl
index 7ff0d89..6fe8bd8 100644
--- a/src/rebar_prv_unlock.erl
+++ b/src/rebar_prv_unlock.erl
@@ -48,12 +48,8 @@ do(State) ->
?PRV_ERROR({file,Reason});
{ok, _} ->
Locks = rebar_config:consult_lock_file(LockFile),
- case handle_unlocks(State, Locks, LockFile) of
- ok ->
- {ok, State};
- {error, Reason} ->
- ?PRV_ERROR({file,Reason})
- end
+ {ok, NewLocks} = handle_unlocks(State, Locks, LockFile),
+ {ok, rebar_state:set(State, {locks, default}, NewLocks)}
end.
-spec format_error(any()) -> iolist().
@@ -66,18 +62,21 @@ format_error(Reason) ->
handle_unlocks(State, Locks, LockFile) ->
{Args, _} = rebar_state:command_parsed_args(State),
- Names = parse_names(ec_cnv:to_binary(proplists:get_value(package, Args, <<"">>))),
+ Names = parse_names(rebar_utils:to_binary(proplists:get_value(package, Args, <<"">>))),
case [Lock || Lock = {Name, _, _} <- Locks, not lists:member(Name, Names)] of
[] ->
- file:delete(LockFile);
+ file:delete(LockFile),
+ {ok, []};
_ when Names =:= [] -> % implicitly all locks
- file:delete(LockFile);
+ file:delete(LockFile),
+ {ok, []};
NewLocks ->
- rebar_config:write_lock_file(LockFile, NewLocks)
+ rebar_config:write_lock_file(LockFile, NewLocks),
+ {ok, NewLocks}
end.
parse_names(Bin) ->
- case lists:usort(re:split(Bin, <<" *, *">>, [trim])) of
+ case lists:usort(re:split(Bin, <<" *, *">>, [trim, unicode])) of
[<<"">>] -> []; % nothing submitted
Other -> Other
end.
diff --git a/src/rebar_prv_update.erl b/src/rebar_prv_update.erl
index 54b135e..4c820c5 100644
--- a/src/rebar_prv_update.erl
+++ b/src/rebar_prv_update.erl
@@ -9,12 +9,6 @@
do/1,
format_error/1]).
--export([hex_to_index/1]).
-
--ifdef(TEST).
--export([cmp_/6, cmpl_/6, valid_vsn/1]).
--endif.
-
-include("rebar.hrl").
-include_lib("providers/include/providers.hrl").
@@ -39,43 +33,13 @@ init(State) ->
-spec do(rebar_state:t()) -> {ok, rebar_state:t()} | {error, string()}.
do(State) ->
- try
- case rebar_packages:registry_dir(State) of
- {ok, RegistryDir} ->
- filelib:ensure_dir(filename:join(RegistryDir, "dummy")),
- HexFile = filename:join(RegistryDir, "registry"),
- ?INFO("Updating package registry...", []),
- TmpDir = ec_file:insecure_mkdtemp(),
- TmpFile = filename:join(TmpDir, "packages.gz"),
-
- CDN = rebar_state:get(State, rebar_packages_cdn, ?DEFAULT_CDN),
- case rebar_utils:url_append_path(CDN, ?REMOTE_REGISTRY_FILE) of
- {ok, Url} ->
- ?DEBUG("Fetching registry from ~p", [Url]),
- case httpc:request(get, {Url, [{"User-Agent", rebar_utils:user_agent()}]},
- [], [{stream, TmpFile}, {sync, true}],
- rebar) of
- {ok, saved_to_file} ->
- {ok, Data} = file:read_file(TmpFile),
- Unzipped = zlib:gunzip(Data),
- ok = file:write_file(HexFile, Unzipped),
- ?INFO("Writing registry to ~s", [HexFile]),
- hex_to_index(State),
- {ok, State};
- _ ->
- ?PRV_ERROR(package_index_download)
- end;
- _ ->
- ?PRV_ERROR({package_parse_cdn, CDN})
- end;
- {uri_parse_error, CDN} ->
- ?PRV_ERROR({package_parse_cdn, CDN})
- end
- catch
- _E:C ->
- ?DEBUG("Error creating package index: ~p ~p", [C, erlang:get_stacktrace()]),
- throw(?PRV_ERROR(package_index_write))
- end.
+ Names = rebar_packages:get_all_names(State),
+ Resources = rebar_state:resources(State),
+ #{repos := RepoConfigs} = rebar_resource_v2:find_resource_state(pkg, Resources),
+ [[update_package(Name, RepoConfig, State)
+ || Name <- Names]
+ || RepoConfig <- RepoConfigs],
+ {ok, State}.
-spec format_error(any()) -> iolist().
format_error({package_parse_cdn, Uri}) ->
@@ -85,170 +49,11 @@ format_error(package_index_download) ->
format_error(package_index_write) ->
"Failed to write package index.".
-is_supported(<<"make">>) -> true;
-is_supported(<<"rebar">>) -> true;
-is_supported(<<"rebar3">>) -> true;
-is_supported(_) -> false.
-
-hex_to_index(State) ->
- {ok, RegistryDir} = rebar_packages:registry_dir(State),
- HexFile = filename:join(RegistryDir, "registry"),
- try ets:file2tab(HexFile) of
- {ok, Registry} ->
- try
- PackageIndex = filename:join(RegistryDir, "packages.idx"),
- ?INFO("Generating package index...", []),
- (catch ets:delete(?PACKAGE_TABLE)),
- ets:new(?PACKAGE_TABLE, [named_table, public]),
- ets:foldl(fun({{Pkg, PkgVsn}, [Deps, Checksum, BuildTools | _]}, _) when is_list(BuildTools) ->
- case lists:any(fun is_supported/1, BuildTools) of
- true ->
- DepsList = update_deps_list(Pkg, PkgVsn, Deps, Registry, State),
- ets:insert(?PACKAGE_TABLE, {{Pkg, PkgVsn}, DepsList, Checksum});
- false ->
- true
- end;
- (_, _) ->
- true
- end, true, Registry),
-
- ets:foldl(fun({Pkg, [[]]}, _) when is_binary(Pkg) ->
- true;
- ({Pkg, [Vsns=[_Vsn | _Rest]]}, _) when is_binary(Pkg) ->
- %% Verify the package is of the right build tool by checking if the first
- %% version exists in the table from the foldl above
- case [V || V <- Vsns, ets:member(?PACKAGE_TABLE, {Pkg, V})] of
- [] ->
- true;
- Vsns1 ->
- ets:insert(?PACKAGE_TABLE, {Pkg, Vsns1})
- end;
- (_, _) ->
- true
- end, true, Registry),
- ets:insert(?PACKAGE_TABLE, {package_index_version, ?PACKAGE_INDEX_VERSION}),
- ?INFO("Writing index to ~s", [PackageIndex]),
- ets:tab2file(?PACKAGE_TABLE, PackageIndex),
- true
- after
- catch ets:delete(Registry)
- end;
- {error, Reason} ->
- ?DEBUG("Error loading package registry: ~p", [Reason]),
- false
- catch
- _:_ ->
- fail
- end.
-
-update_deps_list(Pkg, PkgVsn, Deps, HexRegistry, State) ->
- lists:foldl(fun([Dep, DepVsn, false, _AppName | _], DepsListAcc) ->
- Dep1 = {Pkg, PkgVsn, Dep},
- case {valid_vsn(DepVsn), DepVsn} of
- %% Those are all not perfectly implemented!
- %% and doubled since spaces seem not to be
- %% enforced
- {false, Vsn} ->
- ?WARN("[~s:~s], Bad dependency version for ~s: ~s.",
- [Pkg, PkgVsn, Dep, Vsn]),
- DepsListAcc;
- {_, <<"~>", Vsn/binary>>} ->
- highest_matching(Dep1, rm_ws(Vsn), HexRegistry,
- State, DepsListAcc);
- {_, <<">=", Vsn/binary>>} ->
- cmp(Dep1, rm_ws(Vsn), HexRegistry, State,
- DepsListAcc, fun ec_semver:gte/2);
- {_, <<">", Vsn/binary>>} ->
- cmp(Dep1, rm_ws(Vsn), HexRegistry, State,
- DepsListAcc, fun ec_semver:gt/2);
- {_, <<"<=", Vsn/binary>>} ->
- cmpl(Dep1, rm_ws(Vsn), HexRegistry, State,
- DepsListAcc, fun ec_semver:lte/2);
- {_, <<"<", Vsn/binary>>} ->
- cmpl(Dep1, rm_ws(Vsn), HexRegistry, State,
- DepsListAcc, fun ec_semver:lt/2);
- {_, <<"==", Vsn/binary>>} ->
- [{Dep, Vsn} | DepsListAcc];
- {_, Vsn} ->
- [{Dep, Vsn} | DepsListAcc]
- end;
- ([_Dep, _DepVsn, true, _AppName | _], DepsListAcc) ->
- DepsListAcc
- end, [], Deps).
-
-rm_ws(<<" ", R/binary>>) ->
- rm_ws(R);
-rm_ws(R) ->
- R.
-
-valid_vsn(Vsn) ->
- %% Regepx from https://github.com/sindresorhus/semver-regex/blob/master/index.js
- SemVerRegExp = "v?(0|[1-9][0-9]*)\\.(0|[1-9][0-9]*)(\\.(0|[1-9][0-9]*))?"
- "(-[0-9a-z-]+(\\.[0-9a-z-]+)*)?(\\+[0-9a-z-]+(\\.[0-9a-z-]+)*)?",
- SupportedVersions = "^(>=?|<=?|~>|==)?\\s*" ++ SemVerRegExp ++ "$",
- re:run(Vsn, SupportedVersions) =/= nomatch.
-
-highest_matching({Pkg, PkgVsn, Dep}, Vsn, HexRegistry, State, DepsListAcc) ->
- case rebar_packages:find_highest_matching(Pkg, PkgVsn, Dep, Vsn, HexRegistry, State) of
- {ok, HighestDepVsn} ->
- [{Dep, HighestDepVsn} | DepsListAcc];
- none ->
- ?WARN("[~s:~s] Missing registry entry for package ~s. Try to fix with `rebar3 update`",
- [Pkg, PkgVsn, Dep]),
- DepsListAcc
- end.
-
-cmp({_Pkg, _PkgVsn, Dep} = Dep1, Vsn, HexRegistry, State, DepsListAcc, CmpFun) ->
- {ok, Vsns} = rebar_packages:find_all(Dep, HexRegistry, State),
- cmp_(undefined, Vsn, Vsns, DepsListAcc, Dep1, CmpFun).
-
-
-cmp_(undefined, _MinVsn, [], DepsListAcc, {Pkg, PkgVsn, Dep}, _CmpFun) ->
- ?WARN("[~s:~s] Missing registry entry for package ~s. Try to fix with `rebar3 update`",
- [Pkg, PkgVsn, Dep]),
- DepsListAcc;
-cmp_(HighestDepVsn, _MinVsn, [], DepsListAcc, {_Pkg, _PkgVsn, Dep}, _CmpFun) ->
- [{Dep, HighestDepVsn} | DepsListAcc];
-
-cmp_(BestMatch, MinVsn, [Vsn | R], DepsListAcc, Dep, CmpFun) ->
- case CmpFun(Vsn, MinVsn) of
- true ->
- cmp_(Vsn, Vsn, R, DepsListAcc, Dep, CmpFun);
- false ->
- cmp_(BestMatch, MinVsn, R, DepsListAcc, Dep, CmpFun)
- end.
-
-%% We need to treat this differently since we want a version that is LOWER but
-%% the higest possible one.
-cmpl({_Pkg, _PkgVsn, Dep} = Dep1, Vsn, HexRegistry, State, DepsListAcc, CmpFun) ->
- {ok, Vsns} = rebar_packages:find_all(Dep, HexRegistry, State),
- cmpl_(undefined, Vsn, Vsns, DepsListAcc, Dep1, CmpFun).
-
-cmpl_(undefined, _MaxVsn, [], DepsListAcc, {Pkg, PkgVsn, Dep}, _CmpFun) ->
- ?WARN("[~s:~s] Missing registry entry for package ~s. Try to fix with `rebar3 update`",
- [Pkg, PkgVsn, Dep]),
- DepsListAcc;
-
-cmpl_(HighestDepVsn, _MaxVsn, [], DepsListAcc, {_Pkg, _PkgVsn, Dep}, _CmpFun) ->
- [{Dep, HighestDepVsn} | DepsListAcc];
-
-cmpl_(undefined, MaxVsn, [Vsn | R], DepsListAcc, Dep, CmpFun) ->
- case CmpFun(Vsn, MaxVsn) of
- true ->
- cmpl_(Vsn, MaxVsn, R, DepsListAcc, Dep, CmpFun);
- false ->
- cmpl_(undefined, MaxVsn, R, DepsListAcc, Dep, CmpFun)
- end;
-cmpl_(BestMatch, MaxVsn, [Vsn | R], DepsListAcc, Dep, CmpFun) ->
- case CmpFun(Vsn, MaxVsn) of
- true ->
- case ec_semver:gte(Vsn, BestMatch) of
- true ->
- cmpl_(Vsn, MaxVsn, R, DepsListAcc, Dep, CmpFun);
- false ->
- cmpl_(BestMatch, MaxVsn, R, DepsListAcc, Dep, CmpFun)
- end;
- false ->
- cmpl_(BestMatch, MaxVsn, R, DepsListAcc, Dep, CmpFun)
+update_package(Name, RepoConfig, State) ->
+ case rebar_packages:update_package(Name, RepoConfig, State) of
+ fail ->
+ ?WARN("Failed to fetch updates for package ~ts from repo ~ts", [Name, maps:get(name, RepoConfig)]);
+ _ ->
+ ok
end.
diff --git a/src/rebar_prv_upgrade.erl b/src/rebar_prv_upgrade.erl
index 18c307b..565f342 100644
--- a/src/rebar_prv_upgrade.erl
+++ b/src/rebar_prv_upgrade.erl
@@ -32,7 +32,7 @@ init(State) ->
{deps, ?DEPS},
{example, "rebar3 upgrade [cowboy[,ranch]]"},
{short_desc, "Upgrade dependencies."},
- {desc, "Upgrade project dependecies. Mentioning no application "
+ {desc, "Upgrade project dependencies. Mentioning no application "
"will upgrade all dependencies. To upgrade specific dependencies, "
"their names can be listed in the command."},
{opts, [
@@ -43,6 +43,19 @@ init(State) ->
-spec do(rebar_state:t()) -> {ok, rebar_state:t()} | {error, string()}.
do(State) ->
+ Cwd = rebar_state:dir(State),
+ Providers = rebar_state:providers(State),
+ rebar_hooks:run_project_and_app_hooks(Cwd, pre, ?PROVIDER, Providers, State),
+ case do_(State) of
+ {ok, NewState} ->
+ rebar_hooks:run_project_and_app_hooks(Cwd, post, ?PROVIDER, Providers, NewState),
+ {ok, NewState};
+ Other ->
+ rebar_hooks:run_project_and_app_hooks(Cwd, post, ?PROVIDER, Providers, State),
+ Other
+ end.
+
+do_(State) ->
{Args, _} = rebar_state:command_parsed_args(State),
Locks = rebar_state:get(State, {locks, default}, []),
%% We have 3 sources of dependencies to upgrade from:
@@ -68,16 +81,23 @@ do(State) ->
ProfileDeps = rebar_state:get(State, {deps, default}, []),
Deps = [Dep || Dep <- TopDeps ++ ProfileDeps, % TopDeps > ProfileDeps
is_atom(Dep) orelse is_atom(element(1, Dep))],
- Names = parse_names(ec_cnv:to_binary(proplists:get_value(package, Args, <<"">>)), Locks),
+ Names = parse_names(rebar_utils:to_binary(proplists:get_value(package, Args, <<"">>)), Locks),
+
DepsDict = deps_dict(rebar_state:all_deps(State)),
- case prepare_locks(Names, Deps, Locks, [], DepsDict) of
+ AltDeps = find_non_default_deps(Deps, State),
+ FilteredNames = cull_default_names_if_profiles(Names, Deps, State),
+ case prepare_locks(FilteredNames, Deps, Locks, [], DepsDict, AltDeps) of
{error, Reason} ->
{error, Reason};
- {Locks0, _Unlocks0} ->
+ {Locks0, Unlocks0} ->
Deps0 = top_level_deps(Deps, Locks),
State1 = rebar_state:set(State, {deps, default}, Deps0),
DepsDir = rebar_prv_install_deps:profile_dep_dir(State, default),
D = rebar_app_utils:parse_deps(root, DepsDir, Deps0, State1, Locks0, 0),
+
+ %% first update the package index for the packages to be upgraded
+ update_pkg_deps(Unlocks0, D, State1),
+
State2 = rebar_state:set(State1, {parsed_deps, default}, D),
State3 = rebar_state:set(State2, {locks, default}, Locks0),
State4 = rebar_state:set(State3, upgrade, true),
@@ -100,14 +120,44 @@ do(State) ->
format_error({unknown_dependency, Name}) ->
io_lib:format("Dependency ~ts not found", [Name]);
format_error({transitive_dependency, Name}) ->
- io_lib:format("Dependency ~ts is transient and cannot be safely upgraded. "
+ io_lib:format("Dependency ~ts is transitive and cannot be safely upgraded. "
"Promote it to your top-level rebar.config file to upgrade it.",
[Name]);
format_error(Reason) ->
io_lib:format("~p", [Reason]).
+%% fetch updates for package deps that have been unlocked for upgrade
+update_pkg_deps([], _, _) ->
+ ok;
+update_pkg_deps([{Name, _, _} | Rest], AppInfos, State) ->
+ case rebar_app_utils:find(Name, AppInfos) of
+ {ok, AppInfo} ->
+ Source = rebar_app_info:source(AppInfo),
+ case element(1, Source) of
+ pkg ->
+ Resources = rebar_state:resources(State),
+ #{repos := RepoConfigs} = rebar_resource_v2:find_resource_state(pkg, Resources),
+ PkgName = element(2, Source),
+ [update_package(PkgName, RepoConfig, State) || RepoConfig <- RepoConfigs];
+ _ ->
+ skip
+ end;
+ _ ->
+ %% this should be impossible...
+ skip
+ end,
+ update_pkg_deps(Rest, AppInfos, State).
+
+update_package(Name, RepoConfig, State) ->
+ case rebar_packages:update_package(Name, RepoConfig, State) of
+ fail ->
+ ?WARN("Failed to fetch updates for package ~ts from repo ~ts", [Name, maps:get(name, RepoConfig)]);
+ _ ->
+ ok
+ end.
+
parse_names(Bin, Locks) ->
- case lists:usort(re:split(Bin, <<" *, *">>, [trim])) of
+ case lists:usort(re:split(Bin, <<" *, *">>, [trim, unicode])) of
%% Nothing submitted, use *all* apps
[<<"">>] -> [Name || {Name, _, 0} <- Locks];
[] -> [Name || {Name, _, 0} <- Locks];
@@ -115,20 +165,45 @@ parse_names(Bin, Locks) ->
Other -> Other
end.
-prepare_locks([], _, Locks, Unlocks, _Dict) ->
+%% Find alternative deps in non-default profiles since they may
+%% need to be passed through (they are never locked)
+find_non_default_deps(Deps, State) ->
+ AltProfiles = rebar_state:current_profiles(State) -- [default],
+ AltProfileDeps = lists:append([
+ rebar_state:get(State, {deps, Profile}, []) || Profile <- AltProfiles]
+ ),
+ [Dep || Dep <- AltProfileDeps,
+ is_atom(Dep) orelse is_atom(element(1, Dep))
+ andalso not lists:member(Dep, Deps)].
+
+%% If any alt profiles are used, remove the default profiles from
+%% the upgrade list and warn about it.
+cull_default_names_if_profiles(Names, Deps, State) ->
+ case rebar_state:current_profiles(State) of
+ [default] ->
+ Names;
+ _ ->
+ ?INFO("Dependencies in the default profile will not be upgraded", []),
+ lists:filter(fun(Name) ->
+ AtomName = binary_to_atom(Name, utf8),
+ rebar_utils:tup_find(AtomName, Deps) == false
+ end, Names)
+ end.
+
+prepare_locks([], _, Locks, Unlocks, _Dict, _AltDeps) ->
{Locks, Unlocks};
-prepare_locks([Name|Names], Deps, Locks, Unlocks, Dict) ->
+prepare_locks([Name|Names], Deps, Locks, Unlocks, Dict, AltDeps) ->
AtomName = binary_to_atom(Name, utf8),
case lists:keyfind(Name, 1, Locks) of
{_, _, 0} = Lock ->
case rebar_utils:tup_find(AtomName, Deps) of
false ->
- ?WARN("Dependency ~s has been removed and will not be upgraded", [Name]),
- prepare_locks(Names, Deps, Locks, Unlocks, Dict);
+ ?WARN("Dependency ~ts has been removed and will not be upgraded", [Name]),
+ prepare_locks(Names, Deps, Locks, Unlocks, Dict, AltDeps);
Dep ->
{Source, NewLocks, NewUnlocks} = prepare_lock(Dep, Lock, Locks, Dict),
prepare_locks(Names, Deps, NewLocks,
- [{Name, Source, 0} | NewUnlocks ++ Unlocks], Dict)
+ [{Name, Source, 0} | NewUnlocks ++ Unlocks], Dict, AltDeps)
end;
{_, _, Level} = Lock when Level > 0 ->
case rebar_utils:tup_find(AtomName, Deps) of
@@ -137,10 +212,15 @@ prepare_locks([Name|Names], Deps, Locks, Unlocks, Dict) ->
Dep -> % Dep has been promoted
{Source, NewLocks, NewUnlocks} = prepare_lock(Dep, Lock, Locks, Dict),
prepare_locks(Names, Deps, NewLocks,
- [{Name, Source, 0} | NewUnlocks ++ Unlocks], Dict)
+ [{Name, Source, 0} | NewUnlocks ++ Unlocks], Dict, AltDeps)
end;
false ->
- ?PRV_ERROR({unknown_dependency, Name})
+ case rebar_utils:tup_find(AtomName, AltDeps) of
+ false ->
+ ?PRV_ERROR({unknown_dependency, Name});
+ _ -> % non-default profile dependency found, pass through
+ prepare_locks(Names, Deps, Locks, Unlocks, Dict, AltDeps)
+ end
end.
prepare_lock(Dep, Lock, Locks, Dict) ->
@@ -149,7 +229,7 @@ prepare_lock(Dep, Lock, Locks, Dict) ->
{Name, _, Src} -> {Name, Src};
_ when is_atom(Dep) ->
%% version-free package. Must unlock whatever matches in locks
- {_, Vsn, _} = lists:keyfind(ec_cnv:to_binary(Dep), 1, Locks),
+ {_, Vsn, _} = lists:keyfind(rebar_utils:to_binary(Dep), 1, Locks),
{Dep, Vsn}
end,
Children = all_children(Name1, Dict),
@@ -165,7 +245,7 @@ unlock_children(Children, Locks) ->
unlock_children(_, [], Locks, Unlocks) ->
{Locks, Unlocks};
unlock_children(Children, [App = {Name,_,_} | Apps], Locks, Unlocks) ->
- case lists:member(ec_cnv:to_binary(Name), Children) of
+ case lists:member(rebar_utils:to_binary(Name), Children) of
true ->
unlock_children(Children, Apps, Locks, [App | Unlocks]);
false ->
@@ -183,7 +263,7 @@ all_children(Name, Dict) ->
lists:flatten(all_children_(Name, Dict)).
all_children_(Name, Dict) ->
- case dict:find(ec_cnv:to_binary(Name), Dict) of
+ case dict:find(rebar_utils:to_binary(Name), Dict) of
{ok, Children} ->
[Children | [all_children_(Child, Dict) || Child <- Children]];
error ->
diff --git a/src/rebar_prv_xref.erl b/src/rebar_prv_xref.erl
index 45badd3..3c987b4 100644
--- a/src/rebar_prv_xref.erl
+++ b/src/rebar_prv_xref.erl
@@ -36,7 +36,7 @@ init(State) ->
-spec do(rebar_state:t()) -> {ok, rebar_state:t()} | {error, string()}.
do(State) ->
- code:add_pathsa(rebar_state:code_paths(State, all_deps)),
+ rebar_paths:set_paths([deps], State),
XrefChecks = prepare(State),
XrefIgnores = rebar_state:get(State, xref_ignores, []),
%% Run xref checks
@@ -47,7 +47,6 @@ do(State) ->
QueryChecks = rebar_state:get(State, xref_queries, []),
QueryResults = lists:foldl(fun check_query/2, [], QueryChecks),
stopped = xref:stop(xref),
- rebar_utils:cleanup_code_path(rebar_state:code_paths(State, default)),
case XrefResults =:= [] andalso QueryResults =:= [] of
true ->
{ok, State};
@@ -70,7 +69,7 @@ short_desc() ->
desc() ->
io_lib:format(
- "~s~n"
+ "~ts~n"
"~n"
"Valid rebar.config options:~n"
" ~p~n"
@@ -97,8 +96,11 @@ prepare(State) ->
rebar_state:get(State, xref_warnings, false)},
{verbose, rebar_log:is_verbose(State)}]),
- [{ok, _} = xref:add_directory(xref, rebar_app_info:ebin_dir(App))
- || App <- rebar_state:project_apps(State)],
+ [{ok, _} = xref:add_directory(xref, Dir)
+ || App <- rebar_state:project_apps(State),
+ %% the directory may not exist in rare cases of a compile
+ %% hook of a dep running xref prior to the full job being done
+ Dir <- [rebar_app_info:ebin_dir(App)], filelib:is_dir(Dir)],
%% Get list of xref checks we want to run
ConfXrefChecks = rebar_state:get(State, xref_checks,
@@ -158,14 +160,23 @@ get_xref_ignorelist(Mod, XrefCheck) ->
%% And create a flat {M,F,A} list
lists:foldl(
fun({F, A}, Acc) -> [{Mod,F,A} | Acc];
- ({M, F, A}, Acc) -> [{M,F,A} | Acc]
+ ({M, F, A}, Acc) -> [{M,F,A} | Acc];
+ (M, Acc) when is_atom(M) -> [M | Acc]
end, [], lists:flatten([IgnoreXref, BehaviourCallbacks])).
keyall(Key, List) ->
lists:flatmap(fun({K, L}) when Key =:= K -> L; (_) -> [] end, List).
get_behaviour_callbacks(exports_not_used, Attributes) ->
- [B:behaviour_info(callbacks) || B <- keyall(behaviour, Attributes)];
+ lists:map(fun(Mod) ->
+ try
+ Mod:behaviour_info(callbacks)
+ catch
+ error:undef ->
+ ?WARN("Behaviour ~p is used but cannot be found.", [Mod]),
+ []
+ end
+ end, keyall(behaviour, Attributes) ++ keyall(behavior, Attributes));
get_behaviour_callbacks(_XrefCheck, _Attributes) ->
[].
@@ -185,14 +196,15 @@ filter_xref_results(XrefCheck, XrefIgnores, XrefResults) ->
end, SearchModules),
[Result || Result <- XrefResults,
- not lists:member(parse_xref_result(Result), Ignores)].
+ not lists:member(element(1, Result), Ignores)
+ andalso not lists:member(parse_xref_result(Result), Ignores)].
display_results(XrefResults, QueryResults) ->
[lists:map(fun display_xref_results_for_type/1, XrefResults),
lists:map(fun display_query_result/1, QueryResults)].
display_query_result({Query, Answer, Value}) ->
- io_lib:format("Query ~s~n answer ~p~n did not match ~p~n",
+ io_lib:format("Query ~ts~n answer ~p~n did not match ~p~n",
[Query, Answer, Value]).
display_xref_results_for_type({Type, XrefResults}) ->
@@ -213,37 +225,37 @@ display_xref_result_fun(Type) ->
end,
case Type of
undefined_function_calls ->
- io_lib:format("~sWarning: ~s calls undefined function ~s (Xref)\n",
+ io_lib:format("~tsWarning: ~ts calls undefined function ~ts (Xref)\n",
[Source, SMFA, TMFA]);
undefined_functions ->
- io_lib:format("~sWarning: ~s is undefined function (Xref)\n",
+ io_lib:format("~tsWarning: ~ts is undefined function (Xref)\n",
[Source, SMFA]);
locals_not_used ->
- io_lib:format("~sWarning: ~s is unused local function (Xref)\n",
+ io_lib:format("~tsWarning: ~ts is unused local function (Xref)\n",
[Source, SMFA]);
exports_not_used ->
- io_lib:format("~sWarning: ~s is unused export (Xref)\n",
+ io_lib:format("~tsWarning: ~ts is unused export (Xref)\n",
[Source, SMFA]);
deprecated_function_calls ->
- io_lib:format("~sWarning: ~s calls deprecated function ~s (Xref)\n",
+ io_lib:format("~tsWarning: ~ts calls deprecated function ~ts (Xref)\n",
[Source, SMFA, TMFA]);
deprecated_functions ->
- io_lib:format("~sWarning: ~s is deprecated function (Xref)\n",
+ io_lib:format("~tsWarning: ~ts is deprecated function (Xref)\n",
[Source, SMFA]);
Other ->
- io_lib:format("~sWarning: ~s - ~s xref check: ~s (Xref)\n",
+ io_lib:format("~tsWarning: ~ts - ~ts xref check: ~ts (Xref)\n",
[Source, SMFA, TMFA, Other])
end
end.
format_mfa({M, F, A}) ->
- ?FMT("~s:~s/~w", [M, F, A]).
+ ?FMT("~ts:~ts/~w", [M, F, A]).
format_mfa_source(MFA) ->
case find_mfa_source(MFA) of
{module_not_found, function_not_found} -> "";
- {Source, function_not_found} -> ?FMT("~s: ", [Source]);
- {Source, Line} -> ?FMT("~s:~w: ", [Source, Line])
+ {Source, function_not_found} -> ?FMT("~ts: ", [Source]);
+ {Source, Line} -> ?FMT("~ts:~w: ", [Source, Line])
end.
%%
@@ -269,12 +281,21 @@ find_mfa_source({M, F, A}) ->
end.
find_function_source(M, F, A, Bin) ->
- AbstractCode = beam_lib:chunks(Bin, [abstract_code]),
- {ok, {M, [{abstract_code, {raw_abstract_v1, Code}}]}} = AbstractCode,
+ ChunksLookup = beam_lib:chunks(Bin, [abstract_code]),
+ {ok, {M, [{abstract_code, AbstractCodeLookup}]}} = ChunksLookup,
+ case AbstractCodeLookup of
+ no_abstract_code ->
+ % There isn't much else we can do at this point
+ {module_not_found, function_not_found};
+ {raw_abstract_v1, AbstractCode} ->
+ find_function_source_in_abstract_code(F, A, AbstractCode)
+ end.
+
+find_function_source_in_abstract_code(F, A, AbstractCode) ->
%% Extract the original source filename from the abstract code
- [{attribute, 1, file, {Source, _}} | _] = Code,
+ [{attribute, _, file, {Source, _}} | _] = AbstractCode,
%% Extract the line number for a given function def
- Fn = [E || E <- Code,
+ Fn = [E || E <- AbstractCode,
safe_element(1, E) == function,
safe_element(3, E) == F,
safe_element(4, E) == A],
diff --git a/src/rebar_relx.erl b/src/rebar_relx.erl
index 5c653a3..431e1bc 100644
--- a/src/rebar_relx.erl
+++ b/src/rebar_relx.erl
@@ -6,6 +6,10 @@
-export([do/4,
format_error/1]).
+-ifdef(TEST).
+-export([merge_overlays/1]).
+-endif.
+
-include("rebar.hrl").
%% ===================================================================
@@ -23,22 +27,25 @@ do(Module, Command, Provider, State) ->
LibDirs = rebar_utils:filtermap(fun ec_file:exists/1,
[rebar_dir:checkouts_dir(State), DepsDir | ProjectAppDirs]),
OutputDir = filename:join(rebar_dir:base_dir(State), ?DEFAULT_RELEASE_DIR),
- AllOptions = string:join([Command | Options], " "),
+ AllOptions = rebar_string:join([Command | Options], " "),
Cwd = rebar_state:dir(State),
Providers = rebar_state:providers(State),
+ RebarOpts = rebar_state:opts(State),
+ ErlOpts = rebar_opts:erl_opts(RebarOpts),
rebar_hooks:run_project_and_app_hooks(Cwd, pre, Provider, Providers, State),
try
case rebar_state:get(State, relx, []) of
[] ->
relx:main([{lib_dirs, LibDirs}
,{caller, api}
- ,{log_level, LogLevel} | output_dir(OutputDir, Options)], AllOptions);
+ ,{log_level, LogLevel} | output_dir(OutputDir, Options)] ++ ErlOpts, AllOptions);
Config ->
- Config1 = merge_overlays(Config),
+ Config1 = [{overlay_vars, [{base_dir, rebar_dir:base_dir(State)}]}
+ | merge_overlays(Config)],
relx:main([{lib_dirs, LibDirs}
,{config, Config1}
,{caller, api}
- ,{log_level, LogLevel} | output_dir(OutputDir, Options)], AllOptions)
+ ,{log_level, LogLevel} | output_dir(OutputDir, Options)] ++ ErlOpts, AllOptions)
end,
rebar_hooks:run_project_and_app_hooks(Cwd, post, Provider, Providers, State),
{ok, State}
@@ -62,5 +69,5 @@ merge_overlays(Config) ->
(_) -> false
end, Config),
%% Have profile overlay entries come before others to match how profiles work elsewhere
- NewOverlay = lists:reverse(lists:flatmap(fun({overlay, Overlay}) -> Overlay end, Overlays)),
+ NewOverlay = lists:flatmap(fun({overlay, Overlay}) -> Overlay end, lists:reverse(Overlays)),
[{overlay, NewOverlay} | Others].
diff --git a/src/rebar_resource.erl b/src/rebar_resource.erl
index cdce7a8..a3a8edb 100644
--- a/src/rebar_resource.erl
+++ b/src/rebar_resource.erl
@@ -2,23 +2,53 @@
%% ex: ts=4 sw=4 et
-module(rebar_resource).
--export([]).
+-export([new/3,
+ lock/2,
+ download/4,
+ needs_update/2,
+ make_vsn/2]).
--export_type([resource/0
- ,type/0
- ,location/0
- ,ref/0]).
+-export_type([source/0,
+ type/0,
+ location/0,
+ ref/0]).
--type resource() :: {type(), location(), ref()}.
+-include("rebar.hrl").
+
+-type source() :: {type(), location(), ref()} | {type(), location(), ref(), binary()}.
-type type() :: atom().
-type location() :: string().
-type ref() :: any().
-callback lock(file:filename_all(), tuple()) ->
- rebar_resource:resource().
+ source().
-callback download(file:filename_all(), tuple(), rebar_state:t()) ->
{tarball, file:filename_all()} | {ok, any()} | {error, any()}.
-callback needs_update(file:filename_all(), tuple()) ->
boolean().
-callback make_vsn(file:filename_all()) ->
{plain, string()} | {error, string()}.
+
+-spec new(type(), module(), term()) -> rebar_resource_v2:resource().
+new(Type, Module, State) ->
+ #resource{type=Type,
+ module=Module,
+ state=State,
+ implementation=?MODULE}.
+
+lock(Module, AppInfo) ->
+ Module:lock(rebar_app_info:dir(AppInfo), rebar_app_info:source(AppInfo)).
+
+download(Module, TmpDir, AppInfo, State) ->
+ case Module:download(TmpDir, rebar_app_info:source(AppInfo), State) of
+ {ok, _} ->
+ ok;
+ Error ->
+ Error
+ end.
+
+needs_update(Module, AppInfo) ->
+ Module:needs_update(rebar_app_info:dir(AppInfo), rebar_app_info:source(AppInfo)).
+
+make_vsn(Module, AppInfo) ->
+ Module:make_vsn(rebar_app_info:dir(AppInfo)).
diff --git a/src/rebar_resource_v2.erl b/src/rebar_resource_v2.erl
new file mode 100644
index 0000000..f032f6e
--- /dev/null
+++ b/src/rebar_resource_v2.erl
@@ -0,0 +1,147 @@
+%% -*- erlang-indent-level: 4;indent-tabs-mode: nil -*-
+%% ex: ts=4 sw=4 et
+-module(rebar_resource_v2).
+
+-export([new/3,
+ find_resource_state/2,
+ format_source/1,
+ lock/2,
+ download/3,
+ needs_update/2,
+ make_vsn/3,
+ format_error/1]).
+
+-export_type([resource/0,
+ source/0,
+ type/0,
+ location/0,
+ ref/0,
+ resource_state/0]).
+
+-include("rebar.hrl").
+-include_lib("providers/include/providers.hrl").
+
+-type resource() :: #resource{}.
+-type source() :: {type(), location(), ref()} | {type(), location(), ref(), binary()}.
+-type type() :: atom().
+-type location() :: string().
+-type ref() :: any().
+-type resource_state() :: term().
+
+-callback init(type(), rebar_state:t()) -> {ok, resource()}.
+-callback lock(rebar_app_info:t(), resource_state()) -> source().
+-callback download(file:filename_all(), rebar_app_info:t(), resource_state(), rebar_state:t()) ->
+ ok | {error, any()}.
+-callback needs_update(rebar_app_info:t(), resource_state()) -> boolean().
+-callback make_vsn(rebar_app_info:t(), resource_state()) ->
+ {plain, string()} | {error, string()}.
+
+-spec new(type(), module(), term()) -> resource().
+new(Type, Module, State) ->
+ #resource{type=Type,
+ module=Module,
+ state=State,
+ implementation=?MODULE}.
+
+-spec find_resource(type(), [resource()]) -> {ok, resource()} | {error, not_found}.
+find_resource(Type, Resources) ->
+ case ec_lists:find(fun(#resource{type=T}) -> T =:= Type end, Resources) of
+ error when is_atom(Type) ->
+ case code:which(Type) of
+ non_existing ->
+ {error, not_found};
+ _ ->
+ {ok, rebar_resource:new(Type, Type, #{})}
+ end;
+ error ->
+ {error, not_found};
+ {ok, Resource} ->
+ {ok, Resource}
+ end.
+
+find_resource_state(Type, Resources) ->
+ case lists:keyfind(Type, #resource.type, Resources) of
+ false ->
+ {error, not_found};
+ #resource{state=State} ->
+ State
+ end.
+
+format_source({pkg, Name, Vsn, _Hash, _}) -> {pkg, Name, Vsn};
+format_source(Source) -> Source.
+
+lock(AppInfo, State) ->
+ resource_run(lock, rebar_app_info:source(AppInfo), [AppInfo], State).
+
+resource_run(Function, Source, Args, State) ->
+ Resources = rebar_state:resources(State),
+ case get_resource_type(Source, Resources) of
+ {ok, #resource{type=_,
+ module=Module,
+ state=ResourceState,
+ implementation=?MODULE}} ->
+ erlang:apply(Module, Function, Args++[ResourceState]);
+ {ok, #resource{type=_,
+ module=Module,
+ state=_,
+ implementation=rebar_resource}} ->
+ erlang:apply(rebar_resource, Function, [Module | Args])
+ end.
+
+download(TmpDir, AppInfo, State) ->
+ resource_run(download, rebar_app_info:source(AppInfo), [TmpDir, AppInfo, State], State).
+
+needs_update(AppInfo, State) ->
+ resource_run(needs_update, rebar_app_info:source(AppInfo), [AppInfo], State).
+
+%% this is a special case since it is used for project apps as well, not just deps
+make_vsn(AppInfo, VcsType, State) ->
+ Resources = rebar_state:resources(State),
+ case is_resource_type(VcsType, Resources) of
+ true ->
+ case find_resource(VcsType, Resources) of
+ {ok, #resource{type=_,
+ module=Module,
+ state=ResourceState,
+ implementation=?MODULE}} ->
+ Module:make_vsn(AppInfo, ResourceState);
+ {ok, #resource{type=_,
+ module=Module,
+ state=_,
+ implementation=rebar_resource}} ->
+ rebar_resource:make_vsn(Module, AppInfo)
+ end;
+ false ->
+ unknown
+ end.
+
+format_error({no_resource, Location, Type}) ->
+ io_lib:format("Cannot handle dependency ~ts.~n"
+ " No module found for resource type ~p.", [Location, Type]);
+format_error({no_resource, Source}) ->
+ io_lib:format("Cannot handle dependency ~ts.~n"
+ " No module found for unknown resource type.", [Source]).
+
+is_resource_type(Type, Resources) ->
+ lists:any(fun(#resource{type=T}) -> T =:= Type end, Resources).
+
+-spec get_resource_type(term(), [resource()]) -> {ok, resource()}.
+get_resource_type({Type, Location}, Resources) ->
+ get_resource(Type, Location, Resources);
+get_resource_type({Type, Location, _}, Resources) ->
+ get_resource(Type, Location, Resources);
+get_resource_type({Type, _, _, Location}, Resources) ->
+ get_resource(Type, Location, Resources);
+get_resource_type(Location={Type, _, _, _, _}, Resources) ->
+ get_resource(Type, Location, Resources);
+get_resource_type(Source, _) ->
+ throw(?PRV_ERROR({no_resource, Source})).
+
+-spec get_resource(type(), term(), [resource()]) -> {ok, resource()}.
+get_resource(Type, Location, Resources) ->
+ case find_resource(Type, Resources) of
+ {error, not_found} ->
+ throw(?PRV_ERROR({no_resource, Location, Type}));
+ {ok, Resource} ->
+ {ok, Resource}
+ end.
diff --git a/src/rebar_state.erl b/src/rebar_state.erl
index bdd4aeb..31d3a08 100644
--- a/src/rebar_state.erl
+++ b/src/rebar_state.erl
@@ -38,6 +38,12 @@
to_list/1,
+ compilers/1, compilers/2,
+ prepend_compilers/2, append_compilers/2,
+
+ project_builders/1, add_project_builder/3,
+
+ create_resources/2, set_resources/2,
resources/1, resources/2, add_resource/2,
providers/1, providers/2, add_provider/2,
allow_provider_overrides/1, allow_provider_overrides/2
@@ -59,12 +65,14 @@
command_args = [],
command_parsed_args = {[], []},
- current_app :: rebar_app_info:t(),
+ current_app :: undefined | rebar_app_info:t(),
project_apps = [] :: [rebar_app_info:t()],
deps_to_build = [] :: [rebar_app_info:t()],
all_plugin_deps = [] :: [rebar_app_info:t()],
all_deps = [] :: [rebar_app_info:t()],
+ compilers = [] :: [{compiler_type(), extension(), extension(), compile_fun()}],
+ project_builders = [] :: [{rebar_app_info:project_type(), module()}],
resources = [],
providers = [],
allow_provider_overrides = false :: boolean()}).
@@ -73,28 +81,30 @@
-type t() :: #state_t{}.
+-type compiler_type() :: atom().
+-type extension() :: string().
+-type compile_fun() :: fun(([file:filename()], rebar_app_info:t(), list()) -> ok).
+
-spec new() -> t().
new() ->
- BaseState = base_state(),
+ BaseState = base_state(dict:new()),
BaseState#state_t{dir = rebar_dir:get_cwd()}.
-spec new(list()) -> t().
new(Config) when is_list(Config) ->
- BaseState = base_state(),
Opts = base_opts(Config),
- BaseState#state_t { dir = rebar_dir:get_cwd(),
- default = Opts,
- opts = Opts }.
+ BaseState = base_state(Opts),
+ BaseState#state_t{dir=rebar_dir:get_cwd(),
+ default=Opts}.
-spec new(t() | atom(), list()) -> t().
new(Profile, Config) when is_atom(Profile)
, is_list(Config) ->
- BaseState = base_state(),
Opts = base_opts(Config),
- BaseState#state_t { dir = rebar_dir:get_cwd(),
- current_profiles = [Profile],
- default = Opts,
- opts = Opts };
+ BaseState = base_state(Opts),
+ BaseState#state_t{dir = rebar_dir:get_cwd(),
+ current_profiles = [Profile],
+ default = Opts};
new(ParentState=#state_t{}, Config) ->
%% Load terms from rebar.config, if it exists
Dir = rebar_dir:get_cwd(),
@@ -129,20 +139,15 @@ deps_from_config(Dir, Config) ->
[{{locks, default}, D}, {{deps, default}, Deps}]
end.
-base_state() ->
- case application:get_env(rebar, resources) of
- undefined ->
- Resources = [];
- {ok, Resources} ->
- Resources
- end,
- #state_t{resources=Resources}.
+base_state(Opts) ->
+ #state_t{opts=Opts}.
base_opts(Config) ->
Deps = proplists:get_value(deps, Config, []),
Plugins = proplists:get_value(plugins, Config, []),
ProjectPlugins = proplists:get_value(project_plugins, Config, []),
- Terms = [{{deps, default}, Deps}, {{plugins, default}, Plugins}, {{project_plugins, default}, ProjectPlugins} | Config],
+ Terms = [{{deps, default}, Deps}, {{plugins, default}, Plugins},
+ {{project_plugins, default}, ProjectPlugins} | Config],
true = rebar_config:verify_config_format(Terms),
dict:from_list(Terms).
@@ -182,7 +187,7 @@ all(_, []) ->
all(Dir, [File|Artifacts]) ->
case filelib:is_regular(filename:join(Dir, File)) of
false ->
- ?DEBUG("Missing artifact ~s", [filename:join(Dir, File)]),
+ ?DEBUG("Missing artifact ~ts", [filename:join(Dir, File)]),
{false, File};
true ->
all(Dir, Artifacts)
@@ -257,12 +262,15 @@ apply_profiles(State, Profile) when not is_list(Profile) ->
apply_profiles(State, [default]) ->
State;
apply_profiles(State=#state_t{default = Defaults, current_profiles=CurrentProfiles}, Profiles) ->
+ ProvidedProfiles = lists:prefix([default|Profiles], CurrentProfiles),
AppliedProfiles = case Profiles of
%% Head of list global profile is special, only for use by rebar3
%% It does not clash if a user does `rebar3 as global...` but when
%% it is the head we must make sure not to prepend `default`
[global | _] ->
Profiles;
+ _ when ProvidedProfiles ->
+ deduplicate(CurrentProfiles);
_ ->
deduplicate(CurrentProfiles ++ Profiles)
end,
@@ -302,9 +310,9 @@ dir(State=#state_t{}, Dir) ->
deps_names(Deps) when is_list(Deps) ->
lists:map(fun(Dep) when is_tuple(Dep) ->
- ec_cnv:to_binary(element(1, Dep));
+ rebar_utils:to_binary(element(1, Dep));
(Dep) when is_atom(Dep) ->
- ec_cnv:to_binary(Dep)
+ rebar_utils:to_binary(Dep)
end, Deps);
deps_names(State) ->
Deps = rebar_state:get(State, deps, []),
@@ -356,18 +364,80 @@ namespace(#state_t{namespace=Namespace}) ->
namespace(State=#state_t{}, Namespace) ->
State#state_t{namespace=Namespace}.
--spec resources(t()) -> [{rebar_resource:type(), module()}].
+-spec resources(t()) -> [{rebar_resource_v2:type(), module()}].
resources(#state_t{resources=Resources}) ->
Resources.
--spec resources(t(), [{rebar_resource:type(), module()}]) -> t().
-resources(State, NewResources) ->
- State#state_t{resources=NewResources}.
+-spec set_resources(t(), [{rebar_resource_v2:type(), module()}]) -> t().
+set_resources(State, Resources) ->
+ State#state_t{resources=Resources}.
--spec add_resource(t(), {rebar_resource:type(), module()}) -> t().
-add_resource(State=#state_t{resources=Resources}, Resource) ->
+-spec resources(t(), [{rebar_resource_v2:type(), module()}]) -> t().
+resources(State, NewResources) ->
+ lists:foldl(fun(Resource, StateAcc) ->
+ add_resource(StateAcc, Resource)
+ end, State, NewResources).
+
+-spec add_resource(t(), {rebar_resource_v2:type(), module()}) -> t().
+add_resource(State=#state_t{resources=Resources}, {ResourceType, ResourceModule}) ->
+ _ = code:ensure_loaded(ResourceModule),
+ Resource = case erlang:function_exported(ResourceModule, init, 2) of
+ true ->
+ case ResourceModule:init(ResourceType, State) of
+ {ok, R=#resource{}} ->
+ R;
+ _ ->
+ %% init didn't return a resource
+ %% must be an old resource
+ warn_old_resource(ResourceModule),
+ rebar_resource:new(ResourceType,
+ ResourceModule,
+ #{})
+ end;
+ false ->
+ %% no init, must be initial implementation
+ warn_old_resource(ResourceModule),
+ rebar_resource:new(ResourceType,
+ ResourceModule,
+ #{})
+ end,
State#state_t{resources=[Resource | Resources]}.
+warn_old_resource(ResourceModule) ->
+ ?WARN("Using custom resource ~s that implements a deprecated api. "
+ "It should be upgraded to rebar_resource_v2.", [ResourceModule]).
+
+compilers(#state_t{compilers=Compilers}) ->
+ Compilers.
+
+prepend_compilers(State=#state_t{compilers=Compilers}, NewCompilers) ->
+ State#state_t{compilers=NewCompilers++Compilers}.
+
+append_compilers(State=#state_t{compilers=Compilers}, NewCompilers) ->
+ State#state_t{compilers=Compilers++NewCompilers}.
+
+compilers(State, Compilers) ->
+ State#state_t{compilers=Compilers}.
+
+project_builders(#state_t{project_builders=ProjectBuilders}) ->
+ ProjectBuilders.
+
+add_project_builder(State=#state_t{project_builders=ProjectBuilders}, Type, Module) ->
+ _ = code:ensure_loaded(Module),
+ case erlang:function_exported(Module, build, 1) of
+ true ->
+ State#state_t{project_builders=[{Type, Module} | ProjectBuilders]};
+ false ->
+ ?WARN("Unable to add project builder for type ~s, required function ~s:build/1 not found.",
+ [Type, Module]),
+ State
+ end.
+
+create_resources(Resources, State) ->
+ lists:foldl(fun(R, StateAcc) ->
+ add_resource(StateAcc, R)
+ end, State, Resources).
+
providers(#state_t{providers=Providers}) ->
Providers.
@@ -391,7 +461,7 @@ add_provider(State=#state_t{providers=Providers, allow_provider_overrides=false}
case {providers:impl(P), providers:namespace(P)} of
{Name, Namespace} ->
?DEBUG("Not adding provider ~p ~p from module ~p because it already exists from module ~p",
- [Namespace, Name, providers:module(P), Module]),
+ [Namespace, Name, Module, providers:module(P)]),
true;
_ ->
false
@@ -415,26 +485,33 @@ create_logic_providers(ProviderModules, State0) ->
end
end, State0, ProviderModules)
catch
- C:T ->
- ?DEBUG("~p: ~p ~p", [C, T, erlang:get_stacktrace()]),
- ?CRASHDUMP("~p: ~p~n~p~n~n~p", [C, T, erlang:get_stacktrace(), State0]),
+ ?WITH_STACKTRACE(C,T,S)
+ ?DEBUG("~p: ~p ~p", [C, T, S]),
+ ?CRASHDUMP("~p: ~p~n~p~n~n~p", [C, T, S, State0]),
throw({error, "Failed creating providers. Run with DEBUG=1 for stacktrace or consult rebar3.crashdump."})
end.
to_list(#state_t{} = State) ->
Fields = record_info(fields, state_t),
Values = tl(tuple_to_list(State)),
- DictSz = tuple_size(dict:new()),
- lists:zip(Fields, [reformat(I, DictSz) || I <- Values]).
-
-reformat({K,V}, DSz) when is_list(V) ->
- {K, [reformat(I, DSz) || I <- V]};
-reformat(V, DSz) when is_tuple(V), element(1,V) =:= dict, tuple_size(V) =:= DSz ->
- [reformat(I, DSz) || I <- dict:to_list(V)];
-reformat({K,V}, DSz) when is_tuple(V), element(1,V) =:= dict, tuple_size(V) =:= DSz ->
- {K, [reformat(I, DSz) || I <- dict:to_list(V)]};
-reformat(Other, _DSz) ->
- Other.
+ lists:zip(Fields, [reformat(I) || I <- Values]).
+
+reformat({K,V}) when is_list(V) ->
+ {K, [reformat(I) || I <- V]};
+reformat({K,V}) ->
+ try
+ {K, [reformat(I) || I <- dict:to_list(V)]}
+ catch
+ error:{badrecord,dict} ->
+ {K,V}
+ end;
+reformat(V) ->
+ try
+ [reformat(I) || I <- dict:to_list(V)]
+ catch
+ error:{badrecord,dict} ->
+ V
+ end.
%% ===================================================================
%% Internal functions
diff --git a/src/rebar_string.erl b/src/rebar_string.erl
new file mode 100644
index 0000000..d03b14e
--- /dev/null
+++ b/src/rebar_string.erl
@@ -0,0 +1,44 @@
+%%% @doc Compatibility module for string functionality
+%%% for pre- and post-unicode support.
+-module(rebar_string).
+-export([join/2, split/2, lexemes/2, trim/3, uppercase/1, lowercase/1, chr/2]).
+
+-ifdef(unicode_str).
+
+%% string:join/2 copy; string:join/2 is getting obsoleted
+%% and replaced by lists:join/2, but lists:join/2 is too new
+%% for version support (only appeared in 19.0) so it cannot be
+%% used. Instead we just adopt join/2 locally and hope it works
+%% for most unicode use cases anyway.
+join([], Sep) when is_list(Sep) ->
+ [];
+join([H|T], Sep) ->
+ H ++ lists:append([Sep ++ X || X <- T]).
+
+split(Str, SearchPattern) -> string:split(Str, SearchPattern).
+lexemes(Str, SepList) -> string:lexemes(Str, SepList).
+trim(Str, Direction, Cluster=[_]) -> string:trim(Str, Direction, Cluster).
+uppercase(Str) -> string:uppercase(Str).
+lowercase(Str) -> string:lowercase(Str).
+
+chr(S, C) when is_integer(C) -> chr(S, C, 1).
+chr([C|_Cs], C, I) -> I;
+chr([_|Cs], C, I) -> chr(Cs, C, I+1);
+chr([], _C, _I) -> 0.
+-else.
+
+join(Strings, Separator) -> string:join(Strings, Separator).
+split(Str, SearchPattern) when is_list(Str) -> string:split(Str, SearchPattern);
+split(Str, SearchPattern) when is_binary(Str) -> binary:split(Str, SearchPattern).
+lexemes(Str, SepList) -> string:tokens(Str, SepList).
+trim(Str, Direction, [Char]) ->
+ Dir = case Direction of
+ both -> both;
+ leading -> left;
+ trailing -> right
+ end,
+ string:strip(Str, Dir, Char).
+uppercase(Str) -> string:to_upper(Str).
+lowercase(Str) -> string:to_lower(Str).
+chr(Str, Char) -> string:chr(Str, Char).
+-endif.
diff --git a/src/rebar_templater.erl b/src/rebar_templater.erl
index 2f33bfc..929ca47 100644
--- a/src/rebar_templater.erl
+++ b/src/rebar_templater.erl
@@ -33,7 +33,7 @@
-include("rebar.hrl").
--define(TEMPLATE_RE, "^[^._].*\\.template\$").
+-define(TEMPLATE_RE, "^(?!\\._).*\\.template\$").
%% ===================================================================
%% Public API
@@ -120,7 +120,8 @@ default_author_and_email() ->
{ok, Name} ->
case rebar_utils:sh("git config --global user.email", [return_on_error]) of
{ok, Email} ->
- {string:strip(Name, both, $\n), string:strip(Email, both, $\n)};
+ {rebar_string:trim(Name, both, "\n"),
+ rebar_string:trim(Email, both, "\n")};
{error, _} ->
%% Use neither if one doesn't exist
{"Anonymous", "anonymous@example.org"}
@@ -129,7 +130,7 @@ default_author_and_email() ->
%% Ok, try mecurial
case rebar_utils:sh("hg showconfig ui.username", [return_on_error]) of
{ok, NameEmail} ->
- case re:run(NameEmail, "^(.*) <(.*)>$", [{capture, [1,2], list}]) of
+ case re:run(NameEmail, "^(.*) <(.*)>$", [{capture, [1,2], list}, unicode]) of
{match, [Name, Email]} ->
{Name, Email};
_ ->
@@ -169,7 +170,7 @@ maybe_warn_about_name(Vars) ->
invalid ->
?WARN("The 'name' variable is often associated with Erlang "
"module names and/or file names. The value submitted "
- "(~s) isn't an unquoted Erlang atom. Templates "
+ "(~ts) isn't an unquoted Erlang atom. Templates "
"generated may contain errors.",
[Name]);
valid ->
@@ -189,7 +190,7 @@ validate_atom(Str) ->
%% Run template instructions one at a time.
execute_template([], _, {Template,_,_}, _, _) ->
- ?DEBUG("Template ~s applied", [Template]),
+ ?DEBUG("Template ~ts applied", [Template]),
ok;
%% We can't execute the description
execute_template([{description, _} | Terms], Files, Template, Vars, Force) ->
@@ -242,7 +243,7 @@ execute_template([{template, From, To} | Terms], Files, {Template, Type, Cwd}, V
execute_template(Terms, Files, {Template, Type, Cwd}, Vars, Force);
%% Unknown
execute_template([Instruction|Terms], Files, Tpl={Template,_,_}, Vars, Force) ->
- ?WARN("Unknown template instruction ~p in template ~s",
+ ?WARN("Unknown template instruction ~p in template ~ts",
[Instruction, Template]),
execute_template(Terms, Files, Tpl, Vars, Force).
@@ -267,8 +268,8 @@ find_templates(State) ->
PluginTemplates = find_plugin_templates(State),
{MainTemplates, Files} =
case rebar_state:escript_path(State) of
- undefined ->
- {find_priv_templates(State), []};
+ undefined -> % running in local install
+ {find_localinstall_templates(State), []};
_ ->
%% Cache the files since we'll potentially need to walk it several times
%% over the course of a run.
@@ -305,13 +306,12 @@ cache_escript_files(State) ->
find_escript_templates(Files) ->
[{escript, Name}
|| {Name, _Bin} <- Files,
- re:run(Name, ?TEMPLATE_RE, [{capture, none}]) == match].
+ re:run(Name, ?TEMPLATE_RE, [{capture, none}, unicode]) == match].
-find_priv_templates(State) ->
- OtherTemplates = rebar_utils:find_files(code:priv_dir(rebar), ?TEMPLATE_RE),
- HomeFiles = rebar_utils:find_files(rebar_dir:template_dir(State),
- ?TEMPLATE_RE, true), % recursive
- [{file, F} || F <- OtherTemplates ++ HomeFiles].
+find_localinstall_templates(_State) ->
+ Templates = rebar_utils:find_files(code:priv_dir(rebar), ?TEMPLATE_RE),
+ %% Pretend we're still running escripts; should work transparently.
+ [{builtin, F} || F <- Templates].
%% Fetch template indexes that sit on disk in the user's HOME
find_disk_templates(State) ->
@@ -326,7 +326,7 @@ find_other_templates(State) ->
undefined ->
[];
TemplateDir ->
- rebar_utils:find_files(TemplateDir, ?TEMPLATE_RE)
+ rebar_utils:find_files(TemplateDir, ?TEMPLATE_RE, true) % recursive
end.
%% Fetch template indexes that sit on disk in plugins
@@ -335,8 +335,19 @@ find_plugin_templates(State) ->
|| App <- rebar_state:all_plugin_deps(State),
Priv <- [rebar_app_info:priv_dir(App)],
Priv =/= undefined,
+ File <- rebar_utils:find_files(Priv, ?TEMPLATE_RE)]
+ ++ %% and add global plugins too
+ [{plugin, File}
+ || PSource <- rebar_state:get(State, {plugins, global}, []),
+ Plugin <- [plugin_provider(PSource)],
+ is_atom(Plugin),
+ Priv <- [code:priv_dir(Plugin)],
+ Priv =/= undefined,
File <- rebar_utils:find_files(Priv, ?TEMPLATE_RE)].
+plugin_provider(P) when is_atom(P) -> P;
+plugin_provider(T) when is_tuple(T) -> element(1, T).
+
%% Take an existing list of templates and tag them by name the way
%% the user would enter it from the CLI
tag_names(List) ->
@@ -354,13 +365,17 @@ prioritize_templates([{Name, Type, File} | Rest], Valid) ->
?DEBUG("Skipping template ~p, due to presence of a built-in "
"template with the same name", [Name]),
prioritize_templates(Rest, Valid);
+ {_, builtin, _} ->
+ ?DEBUG("Skipping template ~p, due to presence of a built-in "
+ "template with the same name", [Name]),
+ prioritize_templates(Rest, Valid);
{_, plugin, _} ->
?DEBUG("Skipping template ~p, due to presence of a plugin "
"template with the same name", [Name]),
prioritize_templates(Rest, Valid);
{_, file, _} ->
?DEBUG("Skipping template ~p, due to presence of a custom "
- "template at ~s", [Name, File]),
+ "template at ~ts", [Name, File]),
prioritize_templates(Rest, Valid)
end.
@@ -369,6 +384,9 @@ prioritize_templates([{Name, Type, File} | Rest], Valid) ->
load_file(Files, escript, Name) ->
{Name, Bin} = lists:keyfind(Name, 1, Files),
Bin;
+load_file(_Files, builtin, Name) ->
+ {ok, Bin} = file:read_file(Name),
+ Bin;
load_file(_Files, plugin, Name) ->
{ok, Bin} = file:read_file(Name),
Bin;
@@ -412,10 +430,10 @@ write_file(Output, Data, Force) ->
ok = filelib:ensure_dir(Output),
case {Force, FileExists} of
{true, true} ->
- ?INFO("Writing ~s (forcibly overwriting)",
+ ?INFO("Writing ~ts (forcibly overwriting)",
[Output]);
_ ->
- ?INFO("Writing ~s", [Output])
+ ?INFO("Writing ~ts", [Output])
end,
case file:write_file(Output, Data) of
ok ->
@@ -432,4 +450,4 @@ write_file(Output, Data, Force) ->
%% Render a binary to a string, using mustache and the specified context
%%
render(Bin, Context) ->
- bbmustache:render(ec_cnv:to_binary(Bin), Context, [{key_type, atom}]).
+ bbmustache:render(rebar_utils:to_binary(Bin), Context, [{key_type, atom}]).
diff --git a/src/rebar_utils.erl b/src/rebar_utils.erl
index aa9e268..11add61 100644
--- a/src/rebar_utils.erl
+++ b/src/rebar_utils.erl
@@ -37,8 +37,9 @@
escript_foldl/3,
find_files/2,
find_files/3,
+ find_files_in_dirs/3,
+ find_source/3,
beam_to_mod/1,
- beam_to_mod/2,
erl_to_mod/1,
beams/1,
find_executable/1,
@@ -55,6 +56,8 @@
get_arch/0,
wordsize/0,
deps_to_binary/1,
+ to_binary/1,
+ to_list/1,
tup_dedup/1,
tup_umerge/2,
tup_sort/1,
@@ -70,12 +73,17 @@
info_useless/2,
list_dir/1,
user_agent/0,
- reread_config/1]).
+ reread_config/1, reread_config/2,
+ get_proxy_auth/0,
+ is_list_of_strings/1,
+ ssl_opts/1]).
+
%% for internal use only
-export([otp_release/0]).
-include("rebar.hrl").
+-include_lib("public_key/include/OTP-PUB-KEY.hrl").
-define(ONE_LEVEL_INDENT, " ").
-define(APP_NAME_INDEX, 2).
@@ -95,6 +103,12 @@ sort_deps(Deps) ->
droplast(L) ->
lists:reverse(tl(lists:reverse(L))).
+%% @doc filtermap takes in a function that is either or both
+%% a predicate and a map, and returns the matching and valid elements.
+-spec filtermap(F, [In]) -> [Out] when
+ F :: fun((In) -> boolean() | {true, Out}),
+ In :: term(),
+ Out :: term().
filtermap(F, [Hd|Tail]) ->
case F(Hd) of
true ->
@@ -107,18 +121,23 @@ filtermap(F, [Hd|Tail]) ->
filtermap(F, []) when is_function(F, 1) -> [].
is_arch(ArchRegex) ->
- case re:run(get_arch(), ArchRegex, [{capture, none}]) of
+ case re:run(get_arch(), ArchRegex, [{capture, none}, unicode]) of
match ->
true;
nomatch ->
false
end.
+%% @doc returns the sytem architecture, in strings like
+%% `"19.0.4-x86_64-unknown-linux-gnu-64"'.
+-spec get_arch() -> string().
get_arch() ->
Words = wordsize(),
otp_release() ++ "-"
++ erlang:system_info(system_architecture) ++ "-" ++ Words.
+%% @doc returns the size of a word on the system, as a string
+-spec wordsize() -> string().
wordsize() ->
try erlang:system_info({wordsize, external}) of
Val ->
@@ -129,7 +148,7 @@ wordsize() ->
end.
sh_send(Command0, String, Options0) ->
- ?INFO("sh_send info:\n\tcwd: ~p\n\tcmd: ~s < ~s\n",
+ ?INFO("sh_send info:\n\tcwd: ~p\n\tcmd: ~ts < ~ts\n",
[rebar_dir:get_cwd(), Command0, String]),
?DEBUG("\topts: ~p\n", [Options0]),
@@ -137,7 +156,7 @@ sh_send(Command0, String, Options0) ->
Options = [expand_sh_flag(V)
|| V <- proplists:compact(Options0 ++ DefaultOptions)],
- Command = lists:flatten(patch_on_windows(Command0, proplists:get_value(env, Options, []))),
+ Command = lists:flatten(patch_on_windows(Command0, proplists:get_value(env, Options0, []))),
PortSettings = proplists:get_all_values(port_settings, Options) ++
[exit_status, {line, 16384}, use_stdio, stderr_to_stdout, hide],
Port = open_port({spawn, Command}, PortSettings),
@@ -158,7 +177,7 @@ sh_send(Command0, String, Options0) ->
%% Val = string() | false
%%
sh(Command0, Options0) ->
- ?DEBUG("sh info:\n\tcwd: ~p\n\tcmd: ~s\n", [rebar_dir:get_cwd(), Command0]),
+ ?DEBUG("sh info:\n\tcwd: ~p\n\tcmd: ~ts\n", [rebar_dir:get_cwd(), Command0]),
?DEBUG("\topts: ~p\n", [Options0]),
DefaultOptions = [{use_stdout, false}, debug_and_abort_on_error],
@@ -168,10 +187,10 @@ sh(Command0, Options0) ->
ErrorHandler = proplists:get_value(error_handler, Options),
OutputHandler = proplists:get_value(output_handler, Options),
- Command = lists:flatten(patch_on_windows(Command0, proplists:get_value(env, Options, []))),
+ Command = lists:flatten(patch_on_windows(Command0, proplists:get_value(env, Options0, []))),
PortSettings = proplists:get_all_values(port_settings, Options) ++
[exit_status, {line, 16384}, use_stdio, stderr_to_stdout, hide, eof],
- ?DEBUG("Port Cmd: ~s\nPort Opts: ~p\n", [Command, PortSettings]),
+ ?DEBUG("Port Cmd: ~ts\nPort Opts: ~p\n", [Command, PortSettings]),
Port = open_port({spawn, Command}, PortSettings),
try
@@ -188,6 +207,12 @@ sh(Command0, Options0) ->
find_files(Dir, Regex) ->
find_files(Dir, Regex, true).
+find_files_in_dirs([], _Regex, _Recursive) ->
+ [];
+find_files_in_dirs([Dir | T], Regex, Recursive) ->
+ find_files(Dir, Regex, Recursive) ++ find_files_in_dirs(T, Regex, Recursive).
+
+
find_files(Dir, Regex, Recursive) ->
filelib:fold_files(Dir, Regex, Recursive,
fun(F, Acc) -> [F | Acc] end, []).
@@ -219,7 +244,7 @@ deprecated(Old, New, When) ->
<<"WARNING: deprecated ~p option used~n"
"Option '~p' has been deprecated~n"
"in favor of '~p'.~n"
- "'~p' will be removed ~s.~n">>,
+ "'~p' will be removed ~ts.~n">>,
[Old, Old, New, Old, When]).
%% for use by `do` task
@@ -231,11 +256,17 @@ args_to_tasks(Args) -> new_task(Args, []).
deps_to_binary([]) ->
[];
deps_to_binary([{Name, _, Source} | T]) ->
- [{ec_cnv:to_binary(Name), Source} | deps_to_binary(T)];
+ [{to_binary(Name), Source} | deps_to_binary(T)];
deps_to_binary([{Name, Source} | T]) ->
- [{ec_cnv:to_binary(Name), Source} | deps_to_binary(T)];
+ [{to_binary(Name), Source} | deps_to_binary(T)];
deps_to_binary([Name | T]) ->
- [ec_cnv:to_binary(Name) | deps_to_binary(T)].
+ [to_binary(Name) | deps_to_binary(T)].
+
+to_binary(A) when is_atom(A) -> atom_to_binary(A, unicode);
+to_binary(Str) -> unicode:characters_to_binary(Str).
+
+to_list(A) when is_atom(A) -> atom_to_list(A);
+to_list(Str) -> unicode:characters_to_list(Str).
tup_dedup(List) ->
tup_dedup_(tup_sort(List)).
@@ -370,7 +401,7 @@ compare({Priority, A}, {Secondary, B}) when not is_tuple(A), is_tuple(B) ->
%% Implements wc -l functionality used to determine patchcount from git output
line_count(PatchLines) ->
- Tokenized = string:tokens(PatchLines, "\n"),
+ Tokenized = rebar_string:lexemes(PatchLines, "\n"),
{ok, length(Tokenized)}.
check_min_otp_version(undefined) ->
@@ -383,10 +414,10 @@ check_min_otp_version(MinOtpVersion) ->
case ParsedVsn >= ParsedMin of
true ->
- ?DEBUG("~s satisfies the requirement for minimum OTP version ~s",
+ ?DEBUG("~ts satisfies the requirement for minimum OTP version ~ts",
[OtpRelease, MinOtpVersion]);
false ->
- ?ABORT("OTP release ~s or later is required. Version in use: ~s",
+ ?ABORT("OTP release ~ts or later is required. Version in use: ~ts",
[MinOtpVersion, OtpRelease])
end.
@@ -402,28 +433,103 @@ check_blacklisted_otp_versions(BlacklistedRegexes) ->
abort_if_blacklisted(BlacklistedRegex, OtpRelease) ->
case re:run(OtpRelease, BlacklistedRegex, [{capture, none}]) of
match ->
- ?ABORT("OTP release ~s matches blacklisted version ~s",
+ ?ABORT("OTP release ~ts matches blacklisted version ~ts",
[OtpRelease, BlacklistedRegex]);
nomatch ->
- ?DEBUG("~s does not match blacklisted OTP version ~s",
+ ?DEBUG("~ts does not match blacklisted OTP version ~ts",
[OtpRelease, BlacklistedRegex])
end.
user_agent() ->
{ok, Vsn} = application:get_key(rebar, vsn),
- ?FMT("Rebar/~s (OTP/~s)", [Vsn, otp_release()]).
+ ?FMT("Rebar/~ts (OTP/~ts)", [Vsn, otp_release()]).
reread_config(ConfigList) ->
+ %% Default to not re-configuring the logger for now;
+ %% this can leak logs in CT redirection when setting up hooks
+ %% for example. If we want to turn it on by default, we may
+ %% want to disable it in CT at the same time or figure out a
+ %% way to silence it.
+ %% The same pattern may apply to other tasks, so let's enable
+ %% case-by-case.
+ reread_config(ConfigList, []).
+
+reread_config(ConfigList, Opts) ->
+ UpdateLoggerConfig = erlang:function_exported(logger, module_info, 0) andalso
+ proplists:get_value(update_logger, Opts, false),
+ %% NB: we attempt to mimic -config here, which survives app reload,
+ %% hence {persistent, true}.
+ SetEnv = case version_tuple(?MODULE:otp_release()) of
+ {X, _, _} when X =< 17 ->
+ fun application:set_env/3;
+ _ ->
+ fun (App, Key, Val) -> application:set_env(App, Key, Val, [{persistent, true}]) end
+ end,
try
- [application:set_env(Application, Key, Val)
+ Res =
+ [SetEnv(Application, Key, Val)
|| Config <- ConfigList,
{Application, Items} <- Config,
- {Key, Val} <- Items]
+ {Key, Val} <- Items],
+ case UpdateLoggerConfig of
+ true -> reread_logger_config();
+ false -> ok
+ end,
+ Res
catch _:_ ->
?ERROR("The configuration file submitted could not be read "
"and will be ignored.", [])
end.
+%% @private since the kernel app is already booted, re-reading its config
+%% requires doing some magic to dynamically patch running handlers to
+%% deal with the current value.
+reread_logger_config() ->
+ KernelCfg = application:get_all_env(kernel),
+ LogCfg = proplists:get_value(logger, KernelCfg),
+ case LogCfg of
+ undefined ->
+ ok;
+ _ ->
+ %% Extract and apply settings related to primary configuration
+ %% -- primary config is used for settings shared across handlers
+ LogLvlPrimary = proplists:get_value(logger_info, KernelCfg, all),
+ {FilterDefault, Filters} =
+ case lists:keyfind(filters, 1, KernelCfg) of
+ false -> {log, []};
+ {filters, FoundDef, FoundFilter} -> {FoundDef, FoundFilter}
+ end,
+ Primary = #{level => LogLvlPrimary,
+ filter_default => FilterDefault,
+ filters => Filters},
+ %% Load the correct handlers based on their individual config.
+ [case Id of
+ default -> logger:update_handler_config(Id, Cfg);
+ _ -> logger:add_handler(Id, Mod, Cfg)
+ end || {handler, Id, Mod, Cfg} <- LogCfg],
+ logger:set_primary_config(Primary),
+ ok
+ end.
+
+
+%% @doc Given env. variable `FOO' we want to expand all references to
+%% it in `InStr'. References can have two forms: `$FOO' and `${FOO}'
+%% The end of form `$FOO' is delimited with whitespace or EOL
+-spec expand_env_variable(string(), string(), term()) -> string().
+expand_env_variable(InStr, VarName, RawVarValue) ->
+ case rebar_string:chr(InStr, $$) of
+ 0 ->
+ %% No variables to expand
+ InStr;
+ _ ->
+ ReOpts = [global, unicode, {return, list}],
+ VarValue = re:replace(RawVarValue, "\\\\", "\\\\\\\\", ReOpts),
+ %% Use a regex to match/replace:
+ %% Given variable "FOO": match $FOO\s | $FOOeol | ${FOO}
+ RegEx = io_lib:format("\\\$(~ts(\\W|$)|{~ts})", [VarName, VarName]),
+ re:replace(InStr, RegEx, [VarValue, "\\2"], ReOpts)
+ end.
+
%% ====================================================================
%% Internal functions
%% ====================================================================
@@ -436,7 +542,7 @@ version_tuple(OtpRelease) ->
{match, [_Full, Maj]} ->
{list_to_integer(Maj), 0, 0};
nomatch ->
- ?ABORT("Minimum OTP release unable to be parsed: ~s", [OtpRelease])
+ ?ABORT("Minimum OTP release unable to be parsed: ~ts", [OtpRelease])
end.
otp_release() ->
@@ -459,11 +565,10 @@ otp_release1(Rel) ->
%% It's fine to rely on the binary module here because we can
%% be sure that it's available when the otp_release string does
%% not begin with $R.
- Size = byte_size(Vsn),
%% The shortest vsn string consists of at least two digits
%% followed by "\n". Therefore, it's safe to assume Size >= 3.
- case binary:part(Vsn, {Size, -3}) of
- <<"**\n">> ->
+ case binary:match(Vsn, <<"**">>) of
+ {Pos, _} ->
%% The OTP documentation mentions that a system patched
%% using the otp_patch_apply tool available to licensed
%% customers will leave a '**' suffix in the version as a
@@ -472,9 +577,9 @@ otp_release1(Rel) ->
%% drop the suffix, given for all intents and purposes, we
%% cannot obtain relevant information from it as far as
%% tooling is concerned.
- binary:bin_to_list(Vsn, {0, Size - 3});
- _ ->
- binary:bin_to_list(Vsn, {0, Size - 1})
+ binary:bin_to_list(Vsn, {0, Pos});
+ nomatch ->
+ rebar_string:trim(binary:bin_to_list(Vsn), trailing, "\n")
end
end.
@@ -489,30 +594,11 @@ patch_on_windows(Cmd, Env) ->
end, Cmd, Env),
%% Remove left-over vars
re:replace(Cmd1, "\\\$\\w+|\\\${\\w+}", "",
- [global, {return, list}]);
+ [global, {return, list}, unicode]);
_ ->
Cmd
end.
-%%
-%% Given env. variable FOO we want to expand all references to
-%% it in InStr. References can have two forms: $FOO and ${FOO}
-%% The end of form $FOO is delimited with whitespace or eol
-%%
-expand_env_variable(InStr, VarName, RawVarValue) ->
- case string:chr(InStr, $$) of
- 0 ->
- %% No variables to expand
- InStr;
- _ ->
- ReOpts = [global, unicode, {return, list}],
- VarValue = re:replace(RawVarValue, "\\\\", "\\\\\\\\", ReOpts),
- %% Use a regex to match/replace:
- %% Given variable "FOO": match $FOO\s | $FOOeol | ${FOO}
- RegEx = io_lib:format("\\\$(~s(\\W|$)|{~s})", [VarName, VarName]),
- re:replace(InStr, RegEx, [VarValue, "\\2"], ReOpts)
- end.
-
expand_sh_flag(return_on_error) ->
{error_handler,
fun(_Command, Err) ->
@@ -534,7 +620,7 @@ expand_sh_flag(use_stdout) ->
{output_handler,
fun(Line, Acc) ->
%% Line already has a newline so don't use ?CONSOLE which adds one
- io:format("~s", [Line]),
+ io:format("~ts", [Line]),
[Line | Acc]
end};
expand_sh_flag({use_stdout, false}) ->
@@ -557,23 +643,23 @@ log_msg_and_abort(Message) ->
-spec debug_log_msg_and_abort(string()) -> err_handler().
debug_log_msg_and_abort(Message) ->
fun(Command, {Rc, Output}) ->
- ?DEBUG("sh(~s)~n"
+ ?DEBUG("sh(~ts)~n"
"failed with return code ~w and the following output:~n"
- "~s", [Command, Rc, Output]),
+ "~ts", [Command, Rc, Output]),
?ABORT(Message, [])
end.
-spec log_and_abort(string(), {integer(), string()}) -> no_return().
log_and_abort(Command, {Rc, Output}) ->
- ?ABORT("sh(~s)~n"
+ ?ABORT("sh(~ts)~n"
"failed with return code ~w and the following output:~n"
- "~s", [Command, Rc, Output]).
+ "~ts", [Command, Rc, Output]).
-spec debug_and_abort(string(), {integer(), string()}) -> no_return().
debug_and_abort(Command, {Rc, Output}) ->
- ?DEBUG("sh(~s)~n"
+ ?DEBUG("sh(~ts)~n"
"failed with return code ~w and the following output:~n"
- "~s", [Command, Rc, Output]),
+ "~ts", [Command, Rc, Output]),
throw(rebar_abort).
sh_loop(Port, Fun, Acc) ->
@@ -592,10 +678,6 @@ sh_loop(Port, Fun, Acc) ->
end
end.
-beam_to_mod(Dir, Filename) ->
- [Dir | Rest] = filename:split(Filename),
- list_to_atom(filename:basename(string:join(Rest, "."), ".beam")).
-
beam_to_mod(Filename) ->
list_to_atom(filename:basename(Filename, ".beam")).
@@ -633,36 +715,40 @@ escript_foldl(Fun, Acc, File) ->
Error
end.
-vcs_vsn(Vcs, Dir, Resources) ->
- case vcs_vsn_cmd(Vcs, Dir, Resources) of
+%% TODO: this is just for rebar3_hex and maybe other plugins
+%% but eventually it should be dropped
+vcs_vsn(OriginalVsn, Dir, Resources) when is_list(Dir) ,
+ is_list(Resources) ->
+ ?WARN("Using deprecated rebar_utils:vcs_vsn/3. Please upgrade your plugins.", []),
+ FakeState = rebar_state:new(),
+ {ok, AppInfo} = rebar_app_info:new(fake, OriginalVsn, Dir),
+ vcs_vsn(AppInfo, OriginalVsn,
+ rebar_state:set_resources(FakeState, Resources));
+vcs_vsn(AppInfo, Vcs, State) ->
+ case vcs_vsn_cmd(AppInfo, Vcs, State) of
{plain, VsnString} ->
VsnString;
{cmd, CmdString} ->
- vcs_vsn_invoke(CmdString, Dir);
+ vcs_vsn_invoke(CmdString, rebar_app_info:dir(AppInfo));
unknown ->
?ABORT("vcs_vsn: Unknown vsn format: ~p", [Vcs]);
{error, Reason} ->
- ?ABORT("vcs_vsn: ~s", [Reason])
+ ?ABORT("vcs_vsn: ~ts", [Reason])
end.
%% Temp work around for repos like relx that use "semver"
-vcs_vsn_cmd(Vsn, _, _) when is_binary(Vsn) ->
+vcs_vsn_cmd(_, Vsn, _) when is_binary(Vsn) ->
{plain, Vsn};
-vcs_vsn_cmd(VCS, Dir, Resources) when VCS =:= semver ; VCS =:= "semver" ->
- vcs_vsn_cmd(git, Dir, Resources);
-vcs_vsn_cmd({cmd, _Cmd}=Custom, _, _) ->
+vcs_vsn_cmd(AppInfo, VCS, State) when VCS =:= semver ; VCS =:= "semver" ->
+ vcs_vsn_cmd(AppInfo, git, State);
+vcs_vsn_cmd(_AppInfo, {cmd, _Cmd}=Custom, _) ->
Custom;
-vcs_vsn_cmd(VCS, Dir, Resources) when is_atom(VCS) ->
- case find_resource_module(VCS, Resources) of
- {ok, Module} ->
- Module:make_vsn(Dir);
- {error, _} ->
- unknown
- end;
-vcs_vsn_cmd(VCS, Dir, Resources) when is_list(VCS) ->
+vcs_vsn_cmd(AppInfo, VCS, State) when is_atom(VCS) ->
+ rebar_resource_v2:make_vsn(AppInfo, VCS, State);
+vcs_vsn_cmd(AppInfo, VCS, State) when is_list(VCS) ->
try list_to_existing_atom(VCS) of
AVCS ->
- case vcs_vsn_cmd(AVCS, Dir, Resources) of
+ case vcs_vsn_cmd(AppInfo, AVCS, State) of
unknown -> {plain, VCS};
Other -> Other
end
@@ -675,20 +761,7 @@ vcs_vsn_cmd(_, _, _) ->
vcs_vsn_invoke(Cmd, Dir) ->
{ok, VsnString} = rebar_utils:sh(Cmd, [{cd, Dir}, {use_stdout, false}]),
- string:strip(VsnString, right, $\n).
-
-find_resource_module(Type, Resources) ->
- case lists:keyfind(Type, 1, Resources) of
- false ->
- case code:which(Type) of
- non_existing ->
- {error, unknown};
- _ ->
- {ok, Type}
- end;
- {Type, Module} ->
- {ok, Module}
- end.
+ rebar_string:trim(VsnString, trailing, "\n").
%% @doc ident to the level specified
-spec indent(non_neg_integer()) -> iolist().
@@ -735,11 +808,20 @@ remove_from_code_path(Paths) ->
ok;
{ok, Modules} ->
application:unload(App),
- [begin code:purge(M), code:delete(M) end || M <- Modules]
+ [case erlang:check_process_code(self(), M) of
+ false ->
+ code:purge(M), code:delete(M);
+ _ ->
+ ?DEBUG("~p can't purge ~p safely, doing a soft purge", [self(), M]),
+ code:soft_purge(M) andalso code:delete(M)
+ end || M <- Modules]
end,
code:del_path(Path)
- end, Paths).
+ end, lists:usort(Paths)).
+%% @doc Revert to only having the beams necessary for running rebar3 and
+%% plugins in the path
+-spec cleanup_code_path([string()]) -> true | {error, term()}.
cleanup_code_path(OrigPath) ->
CurrentPath = code:get_path(),
AddedPaths = CurrentPath -- OrigPath,
@@ -756,7 +838,7 @@ cleanup_code_path(OrigPath) ->
new_task([], Acc) -> lists:reverse(Acc);
new_task([TaskList|Rest], Acc) ->
- case re:split(TaskList, ",", [{return, list}, {parts, 2}]) of
+ case re:split(TaskList, ",", [{return, list}, {parts, 2}, unicode]) of
%% `do` consumes all remaining args
["do" = Task] ->
lists:reverse([{Task, Rest}|Acc]);
@@ -783,7 +865,7 @@ arg_or_flag(["-" ++ _ = Flag|Rest], [{Task, Args}|Acc]) ->
end;
%% an argument or a sequence of arguments
arg_or_flag([ArgList|Rest], [{Task, Args}|Acc]) ->
- case re:split(ArgList, ",", [{return, list}, {parts, 2}]) of
+ case re:split(ArgList, ",", [{return, list}, {parts, 2}, unicode]) of
%% single arg terminated by a comma
[Arg, ""] -> new_task(Rest, [{Task,
lists:reverse([Arg|Args])}|Acc]);
@@ -817,8 +899,17 @@ set_httpc_options(_, []) ->
ok;
set_httpc_options(Scheme, Proxy) ->
- {ok, {_, _, Host, Port, _, _}} = http_uri:parse(Proxy),
- httpc:set_options([{Scheme, {{Host, Port}, []}}], rebar).
+ URI = normalise_proxy(Scheme, Proxy),
+ {ok, {_, UserInfo, Host, Port, _, _}} = http_uri:parse(URI),
+ httpc:set_options([{Scheme, {{Host, Port}, []}}], rebar),
+ set_proxy_auth(UserInfo).
+
+normalise_proxy(Scheme, URI) ->
+ case re:run(URI, "://", [unicode]) of
+ nomatch when Scheme =:= https_proxy -> "https://" ++ URI;
+ nomatch when Scheme =:= proxy -> "http://" ++ URI;
+ _ -> URI
+ end.
url_append_path(Url, ExtraPath) ->
case http_uri:parse(Url) of
@@ -833,15 +924,18 @@ url_append_path(Url, ExtraPath) ->
escape_chars(Str) when is_atom(Str) ->
escape_chars(atom_to_list(Str));
escape_chars(Str) ->
- re:replace(Str, "([ ()?`!$&;])", "\\\\&", [global, {return, list}]).
+ re:replace(Str, "([ ()?`!$&;\"\'])", "\\\\&",
+ [global, {return, list}, unicode]).
%% "escape inside these"
escape_double_quotes(Str) ->
- re:replace(Str, "([\"\\\\`!$&*;])", "\\\\&", [global, {return, list}]).
+ re:replace(Str, "([\"\\\\`!$&*;])", "\\\\&",
+ [global, {return, list}, unicode]).
%% "escape inside these" but allow *
escape_double_quotes_weak(Str) ->
- re:replace(Str, "([\"\\\\`!$&;])", "\\\\&", [global, {return, list}]).
+ re:replace(Str, "([\"\\\\`!$&;])", "\\\\&",
+ [global, {return, list}, unicode]).
info_useless(Old, New) ->
[?INFO("App ~ts is no longer needed and can be deleted.", [Name])
@@ -857,3 +951,212 @@ list_dir(Dir) ->
true -> file:list_dir_all(Dir);
false -> file:list_dir(Dir)
end.
+
+set_proxy_auth([]) ->
+ ok;
+set_proxy_auth(UserInfo) ->
+ [Username, Password] = re:split(UserInfo, ":",
+ [{return, list}, {parts,2}, unicode]),
+ %% password may contain url encoded characters, need to decode them first
+ application:set_env(rebar, proxy_auth, [{proxy_auth, {Username, http_uri:decode(Password)}}]).
+
+get_proxy_auth() ->
+ case application:get_env(rebar, proxy_auth) of
+ undefined -> [];
+ {ok, ProxyAuth} -> ProxyAuth
+ end.
+
+-spec rebar_utils:is_list_of_strings(term()) -> boolean().
+is_list_of_strings(List) when not is_list(hd(List)) ->
+ false;
+is_list_of_strings(List) when is_list(hd(List)) ->
+ true;
+is_list_of_strings(List) when is_list(List) ->
+ true.
+
+%%------------------------------------------------------------------------------
+%% @doc
+%% Return the SSL options adequate for the project based on
+%% its configuration, including for validation of certs.
+%% @end
+%%------------------------------------------------------------------------------
+-spec ssl_opts(Url) -> Res when
+ Url :: string(),
+ Res :: proplists:proplist().
+ssl_opts(Url) ->
+ case get_ssl_config() of
+ ssl_verify_enabled ->
+ ssl_opts(ssl_verify_enabled, Url);
+ ssl_verify_disabled ->
+ [{verify, verify_none}]
+ end.
+
+%%------------------------------------------------------------------------------
+%% @doc
+%% Return the SSL options adequate for the project based on
+%% its configuration, including for validation of certs.
+%% @end
+%%------------------------------------------------------------------------------
+-spec ssl_opts(Enabled, Url) -> Res when
+ Enabled :: atom(),
+ Url :: string(),
+ Res :: proplists:proplist().
+ssl_opts(ssl_verify_enabled, Url) ->
+ case check_ssl_version() of
+ true ->
+ {ok, {_, _, Hostname, _, _, _}} =
+ http_uri:parse(rebar_utils:to_list(Url)),
+ VerifyFun = {fun ssl_verify_hostname:verify_fun/3,
+ [{check_hostname, Hostname}]},
+ CACerts = certifi:cacerts(),
+ [{verify, verify_peer}, {depth, 2}, {cacerts, CACerts},
+ {partial_chain, fun partial_chain/1}, {verify_fun, VerifyFun}];
+ false ->
+ ?WARN("Insecure HTTPS request (peer verification disabled), "
+ "please update to OTP 17.4 or later", []),
+ [{verify, verify_none}]
+ end.
+
+-spec partial_chain(Certs) -> Res when
+ Certs :: list(any()),
+ Res :: unknown_ca | {trusted_ca, any()}.
+partial_chain(Certs) ->
+ Certs1 = [{Cert, public_key:pkix_decode_cert(Cert, otp)} || Cert <- Certs],
+ CACerts = certifi:cacerts(),
+ CACerts1 = [public_key:pkix_decode_cert(Cert, otp) || Cert <- CACerts],
+ case ec_lists:find(fun({_, Cert}) ->
+ check_cert(CACerts1, Cert)
+ end, Certs1) of
+ {ok, Trusted} ->
+ {trusted_ca, element(1, Trusted)};
+ _ ->
+ unknown_ca
+ end.
+
+-spec extract_public_key_info(Cert) -> Res when
+ Cert :: #'OTPCertificate'{tbsCertificate::#'OTPTBSCertificate'{}},
+ Res :: any().
+extract_public_key_info(Cert) ->
+ ((Cert#'OTPCertificate'.tbsCertificate)#'OTPTBSCertificate'.subjectPublicKeyInfo).
+
+-spec check_cert(CACerts, Cert) -> Res when
+ CACerts :: list(any()),
+ Cert :: any(),
+ Res :: boolean().
+check_cert(CACerts, Cert) ->
+ lists:any(fun(CACert) ->
+ extract_public_key_info(CACert) == extract_public_key_info(Cert)
+ end, CACerts).
+
+-spec check_ssl_version() ->
+ boolean().
+check_ssl_version() ->
+ case application:get_key(ssl, vsn) of
+ {ok, Vsn} ->
+ parse_vsn(Vsn) >= {5, 3, 6};
+ _ ->
+ false
+ end.
+
+-spec get_ssl_config() ->
+ ssl_verify_disabled | ssl_verify_enabled.
+get_ssl_config() ->
+ GlobalConfigFile = rebar_dir:global_config(),
+ Config = rebar_config:consult_file(GlobalConfigFile),
+ case proplists:get_value(ssl_verify, Config, []) of
+ false ->
+ ssl_verify_disabled;
+ _ ->
+ ssl_verify_enabled
+ end.
+
+-spec parse_vsn(Vsn) -> Res when
+ Vsn :: string(),
+ Res :: {integer(), integer(), integer()}.
+parse_vsn(Vsn) ->
+ version_pad(rebar_string:lexemes(Vsn, ".-")).
+
+-spec version_pad(list(nonempty_string())) -> Res when
+ Res :: {integer(), integer(), integer()}.
+version_pad([Major]) ->
+ {list_to_integer(Major), 0, 0};
+version_pad([Major, Minor]) ->
+ {list_to_integer(Major), list_to_integer(Minor), 0};
+version_pad([Major, Minor, Patch]) ->
+ {list_to_integer(Major), list_to_integer(Minor), list_to_integer(Patch)};
+version_pad([Major, Minor, Patch | _]) ->
+ {list_to_integer(Major), list_to_integer(Minor), list_to_integer(Patch)}.
+
+
+-ifdef(filelib_find_source).
+find_source(Filename, Dir, Rules) ->
+ filelib:find_source(Filename, Dir, Rules).
+-else.
+%% Looks for a file relative to a given directory
+
+-type find_file_rule() :: {ObjDirSuffix::string(), SrcDirSuffix::string()}.
+
+%% Looks for a source file relative to the object file name and directory
+
+-type find_source_rule() :: {ObjExtension::string(), SrcExtension::string(),
+ [find_file_rule()]}.
+
+keep_suffix_search_rules(Rules) ->
+ [T || {_,_,_}=T <- Rules].
+
+-spec find_source(file:filename(), file:filename(), [find_source_rule()]) ->
+ {ok, file:filename()} | {error, not_found}.
+find_source(Filename, Dir, Rules) ->
+ try_suffix_rules(keep_suffix_search_rules(Rules), Filename, Dir).
+
+try_suffix_rules(Rules, Filename, Dir) ->
+ Ext = filename:extension(Filename),
+ try_suffix_rules(Rules, filename:rootname(Filename, Ext), Dir, Ext).
+
+try_suffix_rules([{Ext,Src,Rules}|Rest], Root, Dir, Ext)
+ when is_list(Src), is_list(Rules) ->
+ case try_dir_rules(add_local_search(Rules), Root ++ Src, Dir) of
+ {ok, File} -> {ok, File};
+ _Other ->
+ try_suffix_rules(Rest, Root, Dir, Ext)
+ end;
+try_suffix_rules([_|Rest], Root, Dir, Ext) ->
+ try_suffix_rules(Rest, Root, Dir, Ext);
+try_suffix_rules([], _Root, _Dir, _Ext) ->
+ {error, not_found}.
+
+%% ensuring we check the directory of the object file before any other directory
+add_local_search(Rules) ->
+ Local = {"",""},
+ [Local] ++ lists:filter(fun (X) -> X =/= Local end, Rules).
+
+try_dir_rules([{From, To}|Rest], Filename, Dir)
+ when is_list(From), is_list(To) ->
+ case try_dir_rule(Dir, Filename, From, To) of
+ {ok, File} -> {ok, File};
+ error -> try_dir_rules(Rest, Filename, Dir)
+ end;
+try_dir_rules([], _Filename, _Dir) ->
+ {error, not_found}.
+
+try_dir_rule(Dir, Filename, From, To) ->
+ case lists:suffix(From, Dir) of
+ true ->
+ NewDir = lists:sublist(Dir, 1, length(Dir)-length(From))++To,
+ Src = filename:join(NewDir, Filename),
+ case filelib:is_regular(Src) of
+ true -> {ok, Src};
+ false -> find_regular_file(filelib:wildcard(Src))
+ end;
+ false ->
+ error
+ end.
+
+find_regular_file([]) ->
+ error;
+find_regular_file([File|Files]) ->
+ case filelib:is_regular(File) of
+ true -> {ok, File};
+ false -> find_regular_file(Files)
+ end.
+-endif.