diff options
Diffstat (limited to 'src')
29 files changed, 1586 insertions, 726 deletions
diff --git a/src/rebar.erl b/src/rebar.erl index 2d9fe04..a43da5f 100644 --- a/src/rebar.erl +++ b/src/rebar.erl @@ -87,7 +87,9 @@ run(["help"|RawCmds]) when RawCmds =/= [] -> run(["help"]) -> help(); run(["info"|_]) -> - help(); + %% Catch calls to 'rebar info' to avoid treating plugins' info/2 functions + %% as commands. + ?CONSOLE("Command 'info' not understood or not applicable~n", []); run(["version"]) -> ok = load_rebar_app(), %% Display vsn and build time info @@ -178,22 +180,45 @@ run_aux(BaseConfig, Commands) -> %% help() -> OptSpecList = option_spec_list(), - getopt:usage(OptSpecList, "rebar", - "[var=value,...] <command,...>", - [{"var=value", "rebar global variables (e.g. force=1)"}, - {"command", "Command to run (e.g. compile)"}]), + rebar_getopt:usage(OptSpecList, "rebar", + "[var=value,...] <command,...>", + [{"var=value", "rebar global variables (e.g. force=1)"}, + {"command", "Command to run (e.g. compile)"}]), + + ?CONSOLE("To see a list of built-in commands, execute rebar -c.~n~n", []), ?CONSOLE( "Type 'rebar help <CMD1> <CMD2>' for help on specific commands." "~n~n", []), ?CONSOLE( + "rebar allows you to abbreviate the command to run:~n" + "$ rebar co # same as rebar compile~n" + "$ rebar eu # same as rebar eunit~n" + "$ rebar g-d # same as rebar get-deps~n" + "$ rebar x eu # same as rebar xref eunit~n" + "$ rebar l-d # same as rebar list-deps~n" + "$ rebar l-d l-t # same as rebar list-deps list-templates~n" + "$ rebar list-d l-te # same as rebar list-deps list-templates~n" + "~n", []), + ?CONSOLE( "Core rebar.config options:~n" " ~p~n" " ~p~n" " ~p~n" " ~p~n" " ~p~n" - " ~p~n", + " ~p~n" + " ~p~n" + " ~p~n" + " ~p~n" + " ~p~n" + "Core command line options:~n" + " apps=app1,app2 (specify apps to process)~n" + " skip_apps=app1,app2 (specify apps to skip)~n", [ + {recursive_cmds, []}, + {require_erts_vsn, ".*"}, + {require_otp_vsn, ".*"}, + {require_min_otp_vsn, ".*"}, {lib_dirs, []}, {sub_dirs, ["dir1", "dir2"]}, {plugins, [plugin1, plugin2]}, @@ -215,7 +240,7 @@ help() -> parse_args(RawArgs) -> %% Parse getopt options OptSpecList = option_spec_list(), - case getopt:parse(OptSpecList, RawArgs) of + case rebar_getopt:parse(OptSpecList, RawArgs) of {ok, Args} -> Args; {error, {Reason, Data}} -> @@ -240,31 +265,49 @@ save_options(Config, {Options, NonOptArgs}) -> Config3 = rebar_config:set_xconf(Config2, keep_going, proplists:get_bool(keep_going, Options)), + %% Setup flag to enable recursive application of commands + Config4 = rebar_config:set_xconf(Config3, recursive, + proplists:get_bool(recursive, Options)), + %% Set global variables based on getopt options - Config4 = set_global_flag(Config3, Options, force), - Config5 = case proplists:get_value(jobs, Options, ?DEFAULT_JOBS) of + Config5 = set_global_flag(Config4, Options, force), + Config6 = case proplists:get_value(jobs, Options, ?DEFAULT_JOBS) of ?DEFAULT_JOBS -> - Config4; + Config5; Jobs -> - rebar_config:set_global(Config4, jobs, Jobs) + rebar_config:set_global(Config5, jobs, Jobs) end, %% Filter all the flags (i.e. strings of form key=value) from the %% command line arguments. What's left will be the commands to run. - {Config6, RawCmds} = filter_flags(Config5, NonOptArgs, []), - {Config6, unabbreviate_command_names(RawCmds)}. + {Config7, RawCmds} = filter_flags(Config6, NonOptArgs, []), + {Config7, unabbreviate_command_names(RawCmds)}. %% %% set log level based on getopt option %% set_log_level(Config, Options) -> - LogLevel = case proplists:get_all_values(verbose, Options) of - [] -> - rebar_log:default_level(); - Verbosities -> - lists:last(Verbosities) - end, - rebar_config:set_global(Config, verbose, LogLevel). + {IsVerbose, Level} = + case proplists:get_bool(quiet, Options) of + true -> + {false, rebar_log:error_level()}; + false -> + DefaultLevel = rebar_log:default_level(), + case proplists:get_all_values(verbose, Options) of + [] -> + {false, DefaultLevel}; + Verbosities -> + {true, DefaultLevel + lists:last(Verbosities)} + end + end, + + case IsVerbose of + true -> + Config1 = rebar_config:set_xconf(Config, is_verbose, true), + rebar_config:set_global(Config1, verbose, Level); + false -> + rebar_config:set_global(Config, verbose, Level) + end. %% %% show version information and halt @@ -317,50 +360,60 @@ show_info_maybe_halt(O, Opts, F) -> %% commands() -> S = <<" -clean Clean -compile Compile sources +clean Clean +compile Compile sources + +escriptize Generate escript archive -escriptize Generate escript archive +create template= [var=foo,...] Create skel based on template and vars +create-app [appid=myapp] Create simple app skel +create-lib [libid=mylib] Create simple lib skel +create-node [nodeid=mynode] Create simple node skel +list-templates List available templates -create template= [var=foo,...] Create skel based on template and vars -create-app [appid=myapp] Create simple app skel -create-node [nodeid=mynode] Create simple node skel -list-templates List available templates +doc Generate Erlang program documentation -doc Generate Erlang program documentation +prepare-deps Run 'rebar -r get-deps compile' +refresh-deps Run 'rebar -r update-deps compile' -check-deps Display to be fetched dependencies -get-deps Fetch dependencies -update-deps Update fetched dependencies -delete-deps Delete fetched dependencies -list-deps List dependencies +check-deps Display to be fetched dependencies +get-deps Fetch dependencies +update-deps Update fetched dependencies +delete-deps Delete fetched dependencies +list-deps List dependencies -generate [dump_spec=0/1] Build release with reltool -overlay Run reltool overlays only +generate [dump_spec=0/1] Build release with reltool +overlay Run reltool overlays only generate-upgrade previous_release=path Build an upgrade package generate-appups previous_release=path Generate appup files -eunit [suites=foo] Run eunit tests in foo.erl and - test/foo_tests.erl - [suites=foo] [tests=bar] Run specific eunit tests [first test name - starting with 'bar' in foo.erl and - test/foo_tests.erl] - [tests=bar] For every existing suite, run the first - test whose name starts with bar and, if - no such test exists, run the test whose - name starts with bar in the suite's - _tests module +eunit [suite[s]=foo] Run EUnit tests in foo.erl and + test/foo_tests.erl + [suite[s]=foo] [test[s]=bar] Run specific EUnit tests [first test + name starting with 'bar' in foo.erl + and test/foo_tests.erl] + [test[s]=bar] For every existing suite, run the first + test whose name starts with bar and, if + no such test exists, run the test whose + name starts with bar in the suite's + _tests module. + [random_suite_order=true] Run tests in a random order, either + [random_suite_order=Seed] with a random seed for the PRNG, or a + specific one. + +ct [suite[s]=] [case=] Run common_test suites -ct [suites=] [case=] Run common_test suites +qc Test QuickCheck properties -qc Test QuickCheck properties +xref Run cross reference analysis -xref Run cross reference analysis +shell Start a shell similar to + 'erl -pa ebin -pa deps/*/ebin' -help Show the program options -version Show version information +help Show the program options +version Show version information ">>, io:put_chars(S). @@ -375,12 +428,12 @@ option_spec_list() -> JobsHelp = io_lib:format( "Number of concurrent workers a command may use. Default: ~B", [Jobs]), - VerboseHelp = "Verbosity level (-v, -vv, -vvv, --verbose 3). Default: 0", [ %% {Name, ShortOpt, LongOpt, ArgSpec, HelpMsg} {help, $h, "help", undefined, "Show the program options"}, {commands, $c, "commands", undefined, "Show available commands"}, - {verbose, $v, "verbose", integer, VerboseHelp}, + {verbose, $v, "verbose", integer, "Verbosity level (-v, -vv)"}, + {quiet, $q, "quiet", boolean, "Quiet, only print error messages"}, {version, $V, "version", undefined, "Show version information"}, {force, $f, "force", undefined, "Force"}, {defines, $D, undefined, string, "Define compiler macro"}, @@ -388,7 +441,9 @@ option_spec_list() -> {config, $C, "config", string, "Rebar config file to use"}, {profile, $p, "profile", undefined, "Profile this run of rebar"}, {keep_going, $k, "keep-going", undefined, - "Keep running after a command fails"} + "Keep running after a command fails"}, + {recursive, $r, "recursive", boolean, + "Apply commands to subdirs and dependencies"} ]. %% @@ -423,6 +478,7 @@ command_names() -> "compile", "create", "create-app", + "create-lib", "create-node", "ct", "delete-deps", @@ -436,7 +492,9 @@ command_names() -> "help", "list-deps", "list-templates", + "prepare-deps", "qc", + "refresh-deps", "update-deps", "overlay", "shell", diff --git a/src/rebar.hrl b/src/rebar.hrl new file mode 100644 index 0000000..b19fdd3 --- /dev/null +++ b/src/rebar.hrl @@ -0,0 +1,14 @@ +%% TODO: rename FAIL to ABORT once we require at least R13B04 for +%% building rebar. Macros with different arity were not supported by the +%% compiler before 13B04. +-define(FAIL, rebar_utils:abort()). +-define(ABORT(Str, Args), rebar_utils:abort(Str, Args)). + +-define(CONSOLE(Str, Args), io:format(Str, Args)). + +-define(DEBUG(Str, Args), rebar_log:log(debug, Str, Args)). +-define(INFO(Str, Args), rebar_log:log(info, Str, Args)). +-define(WARN(Str, Args), rebar_log:log(warn, Str, Args)). +-define(ERROR(Str, Args), rebar_log:log(standard_error, error, Str, Args)). + +-define(FMT(Str, Args), lists:flatten(io_lib:format(Str, Args))). diff --git a/src/rebar_app_utils.erl b/src/rebar_app_utils.erl index 8158eb6..a2484e1 100644 --- a/src/rebar_app_utils.erl +++ b/src/rebar_app_utils.erl @@ -163,15 +163,6 @@ consult_app_file(Filename) -> false -> file:consult(Filename); true -> - %% TODO: EXPERIMENTAL For now let's warn the user if a - %% script is going to be run. - case filelib:is_regular([Filename, ".script"]) of - true -> - ?CONSOLE("NOTICE: Using experimental *.app.src.script " - "functionality on ~s ~n", [Filename]); - _ -> - ok - end, rebar_config:consult_file(Filename) end. diff --git a/src/rebar_appups.erl b/src/rebar_appups.erl index 722f161..a51c30d 100644 --- a/src/rebar_appups.erl +++ b/src/rebar_appups.erl @@ -50,6 +50,8 @@ PrevRelPath = rebar_rel_utils:get_previous_release_path(Config), OldVerPath = filename:join([TargetParentDir, PrevRelPath]), + ModDeps = rebar_config:get(Config, module_deps, []), + %% Get the new and old release name and versions {Name, _Ver} = rebar_rel_utils:get_reltool_release_info(ReltoolConfig), NewVerPath = filename:join([TargetParentDir, Name]), @@ -77,7 +79,7 @@ UpgradeApps = genappup_which_apps(Upgraded, AppUpApps), %% Generate appup files for upgraded apps - generate_appup_files(NewVerPath, OldVerPath, UpgradeApps), + generate_appup_files(NewVerPath, OldVerPath, ModDeps, UpgradeApps), {ok, Config1}. @@ -139,9 +141,9 @@ genappup_which_apps(UpgradedApps, [First|Rest]) -> genappup_which_apps(Apps, []) -> Apps. -generate_appup_files(NewVerPath, OldVerPath, [{_App, {undefined, _}}|Rest]) -> - generate_appup_files(NewVerPath, OldVerPath, Rest); -generate_appup_files(NewVerPath, OldVerPath, [{App, {OldVer, NewVer}}|Rest]) -> +generate_appup_files(NewVerPath, OldVerPath, ModDeps, [{_App, {undefined, _}}|Rest]) -> + generate_appup_files(NewVerPath, OldVerPath, ModDeps, Rest); +generate_appup_files(NewVerPath, OldVerPath, ModDeps, [{App, {OldVer, NewVer}}|Rest]) -> OldEbinDir = filename:join([OldVerPath, "lib", atom_to_list(App) ++ "-" ++ OldVer, "ebin"]), NewEbinDir = filename:join([NewVerPath, "lib", @@ -150,9 +152,14 @@ generate_appup_files(NewVerPath, OldVerPath, [{App, {OldVer, NewVer}}|Rest]) -> {AddedFiles, DeletedFiles, ChangedFiles} = beam_lib:cmp_dirs(NewEbinDir, OldEbinDir), + ChangedNames = [list_to_atom(file_to_name(F)) || {F, _} <- ChangedFiles], + ModDeps1 = [{N, [M1 || M1 <- M, lists:member(M1, ChangedNames)]} + || {N, M} <- ModDeps], + Added = [generate_instruction(added, File) || File <- AddedFiles], Deleted = [generate_instruction(deleted, File) || File <- DeletedFiles], - Changed = [generate_instruction(changed, File) || File <- ChangedFiles], + Changed = [generate_instruction(changed, ModDeps1, File) + || File <- ChangedFiles], Inst = lists:append([Added, Deleted, Changed]), @@ -164,8 +171,8 @@ generate_appup_files(NewVerPath, OldVerPath, [{App, {OldVer, NewVer}}|Rest]) -> OldVer, Inst, OldVer])), ?CONSOLE("Generated appup for ~p~n", [App]), - generate_appup_files(NewVerPath, OldVerPath, Rest); -generate_appup_files(_, _, []) -> + generate_appup_files(NewVerPath, OldVerPath, ModDeps, Rest); +generate_appup_files(_, _, _, []) -> ?CONSOLE("Appup generation complete~n", []). generate_instruction(added, File) -> @@ -173,25 +180,27 @@ generate_instruction(added, File) -> {add_module, Name}; generate_instruction(deleted, File) -> Name = list_to_atom(file_to_name(File)), - {delete_module, Name}; -generate_instruction(changed, {File, _}) -> + {delete_module, Name}. + +generate_instruction(changed, ModDeps, {File, _}) -> {ok, {Name, List}} = beam_lib:chunks(File, [attributes, exports]), Behavior = get_behavior(List), CodeChange = is_code_change(List), - generate_instruction_advanced(Name, Behavior, CodeChange). + Deps = proplists:get_value(Name, ModDeps, []), + generate_instruction_advanced(Name, Behavior, CodeChange, Deps). -generate_instruction_advanced(Name, undefined, undefined) -> +generate_instruction_advanced(Name, undefined, undefined, Deps) -> %% Not a behavior or code change, assume purely functional - {load_module, Name}; -generate_instruction_advanced(Name, [supervisor], _) -> + {load_module, Name, Deps}; +generate_instruction_advanced(Name, [supervisor], _, _) -> %% Supervisor {update, Name, supervisor}; -generate_instruction_advanced(Name, _, code_change) -> +generate_instruction_advanced(Name, _, code_change, Deps) -> %% Includes code_change export - {update, Name, {advanced, []}}; -generate_instruction_advanced(Name, _, _) -> + {update, Name, {advanced, []}, Deps}; +generate_instruction_advanced(Name, _, _, Deps) -> %% Anything else - {load_module, Name}. + {load_module, Name, Deps}. get_behavior(List) -> Attributes = proplists:get_value(attributes, List), diff --git a/src/rebar_base_compiler.erl b/src/rebar_base_compiler.erl index a0dec30..1957070 100644 --- a/src/rebar_base_compiler.erl +++ b/src/rebar_base_compiler.erl @@ -49,7 +49,7 @@ run(Config, FirstFiles, RestFiles, CompileFn) -> Jobs = rebar:get_jobs(Config), ?DEBUG("Starting ~B compile worker(s)~n", [Jobs]), Pids = [spawn_monitor(F) || _I <- lists:seq(1,Jobs)], - compile_queue(Pids, RestFiles) + compile_queue(Config, Pids, RestFiles) end. run(Config, FirstFiles, SourceDir, SourceExt, TargetDir, TargetExt, @@ -139,27 +139,31 @@ compile_each([Source | Rest], Config, CompileFn) -> skipped -> ?INFO("Skipped ~s\n", [Source]); Error -> + ?CONSOLE("Compiling ~s failed:\n", + [maybe_absname(Config, Source)]), maybe_report(Error), ?DEBUG("Compilation failed: ~p\n", [Error]), ?FAIL end, compile_each(Rest, Config, CompileFn). -compile_queue([], []) -> +compile_queue(_Config, [], []) -> ok; -compile_queue(Pids, Targets) -> +compile_queue(Config, Pids, Targets) -> receive {next, Worker} -> case Targets of [] -> Worker ! empty, - compile_queue(Pids, Targets); + compile_queue(Config, Pids, Targets); [Source | Rest] -> Worker ! {compile, Source}, - compile_queue(Pids, Rest) + compile_queue(Config, Pids, Rest) end; - {fail, Error} -> + {fail, {_, {source, Source}}=Error} -> + ?CONSOLE("Compiling ~s failed:\n", + [maybe_absname(Config, Source)]), maybe_report(Error), ?DEBUG("Worker compilation failed: ~p\n", [Error]), ?FAIL; @@ -167,20 +171,20 @@ compile_queue(Pids, Targets) -> {compiled, Source, Warnings} -> report(Warnings), ?CONSOLE("Compiled ~s\n", [Source]), - compile_queue(Pids, Targets); + compile_queue(Config, Pids, Targets); {compiled, Source} -> ?CONSOLE("Compiled ~s\n", [Source]), - compile_queue(Pids, Targets); + compile_queue(Config, Pids, Targets); {skipped, Source} -> ?INFO("Skipped ~s\n", [Source]), - compile_queue(Pids, Targets); + compile_queue(Config, Pids, Targets); {'DOWN', Mref, _, Pid, normal} -> ?DEBUG("Worker exited cleanly\n", []), Pids2 = lists:delete({Pid, Mref}, Pids), - compile_queue(Pids2, Targets); + compile_queue(Config, Pids2, Targets); {'DOWN', _Mref, _, _Pid, Info} -> ?DEBUG("Worker failed: ~p\n", [Info]), @@ -202,8 +206,7 @@ compile_worker(QueuePid, Config, CompileFn) -> QueuePid ! {skipped, Source}, compile_worker(QueuePid, Config, CompileFn); Error -> - QueuePid ! {fail, [{error, Error}, - {source, Source}]}, + QueuePid ! {fail, {{error, Error}, {source, Source}}}, ok end; @@ -224,7 +227,7 @@ format_warnings(Config, Source, Warnings, Opts) -> end, format_errors(Config, Source, Prefix, Warnings). -maybe_report([{error, {error, _Es, _Ws}=ErrorsAndWarnings}, {source, _}]) -> +maybe_report({{error, {error, _Es, _Ws}=ErrorsAndWarnings}, {source, _}}) -> maybe_report(ErrorsAndWarnings); maybe_report([{error, E}, {source, S}]) -> report(["unexpected error compiling " ++ S, io_lib:fwrite("~n~p~n", [E])]); @@ -239,12 +242,7 @@ report(Messages) -> format_errors(Config, _MainSource, Extra, Errors) -> [begin - AbsSource = case rebar_utils:processing_base_dir(Config) of - true -> - Source; - false -> - filename:absname(Source) - end, + AbsSource = maybe_absname(Config, Source), [format_error(AbsSource, Extra, Desc) || Desc <- Descs] end || {Source, Descs} <- Errors]. @@ -258,3 +256,11 @@ format_error(AbsSource, Extra, {Line, Mod, Desc}) -> format_error(AbsSource, Extra, {Mod, Desc}) -> ErrorDesc = Mod:format_error(Desc), ?FMT("~s: ~s~s~n", [AbsSource, Extra, ErrorDesc]). + +maybe_absname(Config, Filename) -> + case rebar_utils:processing_base_dir(Config) of + true -> + Filename; + false -> + filename:absname(Filename) + end. diff --git a/src/rebar_config.erl b/src/rebar_config.erl index 461de5d..1c90d22 100644 --- a/src/rebar_config.erl +++ b/src/rebar_config.erl @@ -31,7 +31,7 @@ get_all/2, set/3, set_global/3, get_global/3, - is_verbose/1, + is_recursive/1, save_env/3, get_env/2, reset_envs/1, set_skip_dir/2, is_skip_dir/2, reset_skip_dirs/1, clean_config/2, @@ -39,13 +39,21 @@ -include("rebar.hrl"). +-ifdef(namespaced_types). +% dict:dict() exists starting from Erlang 17. +-type rebar_dict() :: dict:dict(). +-else. +% dict() has been obsoleted in Erlang 17 and deprecated in 18. +-type rebar_dict() :: dict(). +-endif. + -record(config, { dir :: file:filename(), opts = [] :: list(), - globals = new_globals() :: dict(), - envs = new_env() :: dict(), + globals = new_globals() :: rebar_dict(), + envs = new_env() :: rebar_dict(), %% cross-directory/-command config - skip_dirs = new_skip_dirs() :: dict(), - xconf = new_xconf() :: dict() }). + skip_dirs = new_skip_dirs() :: rebar_dict(), + xconf = new_xconf() :: rebar_dict() }). -export_type([config/0]). @@ -110,9 +118,8 @@ get_global(Config, Key, Default) -> Value end. -is_verbose(Config) -> - DefaulLevel = rebar_log:default_level(), - get_global(Config, verbose, DefaulLevel) > DefaulLevel. +is_recursive(Config) -> + get_xconf(Config, recursive, false). consult_file(File) -> case filename:extension(File) of diff --git a/src/rebar_core.erl b/src/rebar_core.erl index 631cef2..212365b 100644 --- a/src/rebar_core.erl +++ b/src/rebar_core.erl @@ -88,7 +88,7 @@ process_commands([Command | Rest], ParentConfig) -> %% path from inside a subdirectory. true = rebar_utils:expand_code_path(), {ParentConfig2, _DirSet} = process_dir(rebar_utils:get_cwd(), - ParentConfig1, Command, + Command, ParentConfig1, sets:new()), case get_operations(ParentConfig2) of Operations -> @@ -117,71 +117,110 @@ process_commands([Command | Rest], ParentConfig) -> end, process_commands(Rest, ParentConfig4). -process_dir(Dir, ParentConfig, Command, DirSet) -> +process_dir(Dir, Command, ParentConfig, DirSet) -> case filelib:is_dir(Dir) of false -> ?WARN("Skipping non-existent sub-dir: ~p\n", [Dir]), {ParentConfig, DirSet}; - true -> + WouldCd = would_cd_into_dir(Dir, Command, ParentConfig), ok = file:set_cwd(Dir), Config = maybe_load_local_config(Dir, ParentConfig), %% Save the current code path and then update it with - %% lib_dirs. Children inherit parents code path, but we - %% also want to ensure that we restore everything to pristine + %% lib_dirs. Children inherit parents code path, but we also + %% want to ensure that we restore everything to pristine %% condition after processing this child CurrentCodePath = update_code_path(Config), - %% Get the list of processing modules and check each one against - %% CWD to see if it's a fit -- if it is, use that set of modules - %% to process this dir. + %% Get the list of processing modules and check each one + %% against CWD to see if it's a fit -- if it is, use that + %% set of modules to process this dir. {ok, AvailModuleSets} = application:get_env(rebar, modules), ModuleSet = choose_module_set(AvailModuleSets, Dir), - skip_or_process_dir(ModuleSet, Config, CurrentCodePath, - Dir, Command, DirSet) + skip_or_process_dir(Dir, Command, Config, DirSet, CurrentCodePath, + ModuleSet, WouldCd) end. -skip_or_process_dir({[], undefined}=ModuleSet, Config, CurrentCodePath, - Dir, Command, DirSet) -> - process_dir1(Dir, Command, DirSet, Config, CurrentCodePath, ModuleSet); -skip_or_process_dir({_, ModuleSetFile}=ModuleSet, Config, CurrentCodePath, - Dir, Command, DirSet) -> - case lists:suffix(".app.src", ModuleSetFile) - orelse lists:suffix(".app", ModuleSetFile) of +would_cd_into_dir(Dir, Command, Config) -> + case would_cd_into_dir1(Dir, Command, Config) of + true -> + would_cd; + false -> + would_not_cd + end. + +would_cd_into_dir1(Dir, Command, Config) -> + rebar_utils:processing_base_dir(Config, Dir) orelse + rebar_config:is_recursive(Config) orelse + is_recursive_command(Command, Config) orelse + is_generate_in_rel_dir(Command, Dir). + +%% Check whether the command is part of the built-in (or extended via +%% rebar.config) list of default-recursive commands. +is_recursive_command(Command, Config) -> + {ok, AppCmds} = application:get_env(rebar, recursive_cmds), + ConfCmds = rebar_config:get_local(Config, recursive_cmds, []), + RecursiveCmds = AppCmds ++ ConfCmds, + lists:member(Command, RecursiveCmds). + +%% If the directory we're about to process contains +%% reltool.config[.script] and the command to be applied is +%% 'generate', then it's safe to process. We do this to retain the +%% behavior of specifying {sub_dirs, ["rel"]} and have "rebar generate" +%% pick up rel/reltool.config[.script]. Without this workaround you'd +%% have to run "rebar -r generate" (which you don't want to do if you +%% have deps or other sub_dirs) or "cd rel && rebar generate". +is_generate_in_rel_dir(generate, Dir) -> + case rebar_rel_utils:is_rel_dir(Dir) of + {true, _} -> + true; + false -> + false + end; +is_generate_in_rel_dir(_, _) -> + false. + +skip_or_process_dir(Dir, Command, Config, DirSet, CurrentCodePath, + {[], undefined}=ModuleSet, WouldCd) -> + process_dir1(Dir, Command, Config, DirSet, CurrentCodePath, ModuleSet, + WouldCd); +skip_or_process_dir(Dir, Command, Config, DirSet, CurrentCodePath, + {_, File}=ModuleSet, WouldCd) -> + case lists:suffix(".app.src", File) + orelse lists:suffix(".app", File) of true -> %% .app or .app.src file, check if is_skipped_app - skip_or_process_dir1(ModuleSetFile, ModuleSet, - Config, CurrentCodePath, Dir, - Command, DirSet); + skip_or_process_dir1(Dir, Command, Config, DirSet, CurrentCodePath, + ModuleSet, WouldCd, File); false -> %% not an app dir, no need to consider apps=/skip_apps= - process_dir1(Dir, Command, DirSet, Config, - CurrentCodePath, ModuleSet) + process_dir1(Dir, Command, Config, DirSet, CurrentCodePath, + ModuleSet, WouldCd) end. -skip_or_process_dir1(AppFile, ModuleSet, Config, CurrentCodePath, - Dir, Command, DirSet) -> +skip_or_process_dir1(Dir, Command, Config, DirSet, CurrentCodePath, ModuleSet, + WouldCd, AppFile) -> case rebar_app_utils:is_skipped_app(Config, AppFile) of {Config1, {true, _SkippedApp}} when Command == 'update-deps' -> %% update-deps does its own app skipping. Unfortunately there's no %% way to signal this to rebar_core, so we have to explicitly do it %% here... Otherwise if you use app=, it'll skip the toplevel %% directory and nothing will be updated. - process_dir1(Dir, Command, DirSet, Config1, - CurrentCodePath, ModuleSet); + process_dir1(Dir, Command, Config1, DirSet, CurrentCodePath, + ModuleSet, WouldCd); {Config1, {true, SkippedApp}} -> ?DEBUG("Skipping app: ~p~n", [SkippedApp]), - Config2 = increment_operations(Config1), - {Config2, DirSet}; + {increment_operations(Config1), DirSet}; {Config1, false} -> - process_dir1(Dir, Command, DirSet, Config1, - CurrentCodePath, ModuleSet) + process_dir1(Dir, Command, Config1, DirSet, CurrentCodePath, + ModuleSet, WouldCd) end. -process_dir1(Dir, Command, DirSet, Config, CurrentCodePath, - {DirModules, ModuleSetFile}) -> +process_dir1(Dir, Command, Config, DirSet, CurrentCodePath, + {DirModules, File}, WouldCd) -> Config0 = rebar_config:set_xconf(Config, current_command, Command), + %% Get the list of modules for "any dir". This is a catch-all list %% of modules that are processed in addition to modules associated %% with this directory type. These any_dir modules are processed @@ -192,8 +231,7 @@ process_dir1(Dir, Command, DirSet, Config, CurrentCodePath, %% Invoke 'preprocess' on the modules -- this yields a list of other %% directories that should be processed _before_ the current one. - {Config1, Predirs} = acc_modules(Modules, preprocess, Config0, - ModuleSetFile), + {Config1, Predirs} = acc_modules(Modules, preprocess, Config0, File), %% Remember associated pre-dirs (used for plugin lookup) PredirsAssoc = remember_cwd_predirs(Dir, Predirs), @@ -201,55 +239,33 @@ process_dir1(Dir, Command, DirSet, Config, CurrentCodePath, %% Get the list of plug-in modules from rebar.config. These %% modules may participate in preprocess and postprocess. {ok, PluginModules} = plugin_modules(Config1, PredirsAssoc), + AllModules = Modules ++ PluginModules, - {Config2, PluginPredirs} = acc_modules(PluginModules, preprocess, - Config1, ModuleSetFile), + {Config2, PluginPredirs} = acc_modules(PluginModules, preprocess, Config1, + File), AllPredirs = Predirs ++ PluginPredirs, ?DEBUG("Predirs: ~p\n", [AllPredirs]), - {Config3, DirSet2} = process_each(AllPredirs, Command, Config2, - ModuleSetFile, DirSet), + {Config3, DirSet2} = process_each(AllPredirs, Command, Config2, DirSet, + File), %% Make sure the CWD is reset properly; processing the dirs may have %% caused it to change ok = file:set_cwd(Dir), - %% Check that this directory is not on the skip list - Config7 = case rebar_config:is_skip_dir(Config3, Dir) of - true -> - %% Do not execute the command on the directory, as some - %% module has requested a skip on it. - ?INFO("Skipping ~s in ~s\n", [Command, Dir]), - Config3; - - false -> - %% Check for and get command specific environments - {Config4, Env} = setup_envs(Config3, Modules), - - %% Execute any before_command plugins on this directory - Config5 = execute_pre(Command, PluginModules, - Config4, ModuleSetFile, Env), - - %% Execute the current command on this directory - Config6 = execute(Command, Modules ++ PluginModules, - Config5, ModuleSetFile, Env), - - %% Execute any after_command plugins on this directory - execute_post(Command, PluginModules, - Config6, ModuleSetFile, Env) - end, + %% Maybe apply command to Dir + Config4 = maybe_execute(Dir, Command, Config3, Modules, PluginModules, + AllModules, File, WouldCd), %% Mark the current directory as processed DirSet3 = sets:add_element(Dir, DirSet2), %% Invoke 'postprocess' on the modules. This yields a list of other %% directories that should be processed _after_ the current one. - {Config8, Postdirs} = acc_modules(Modules ++ PluginModules, postprocess, - Config7, ModuleSetFile), + {Config5, Postdirs} = acc_modules(AllModules, postprocess, Config4, File), ?DEBUG("Postdirs: ~p\n", [Postdirs]), - Res = process_each(Postdirs, Command, Config8, - ModuleSetFile, DirSet3), + Res = process_each(Postdirs, Command, Config5, DirSet3, File), %% Make sure the CWD is reset properly; processing the dirs may have %% caused it to change @@ -262,6 +278,33 @@ process_dir1(Dir, Command, DirSet, Config, CurrentCodePath, %% Return the updated {config, dirset} as result Res. +maybe_execute(Dir, Command, Config, Modules, PluginModules, AllModules, File, + would_cd) -> + %% Check that this directory is not on the skip list + case rebar_config:is_skip_dir(Config, Dir) of + true -> + %% Do not execute the command on the directory, as some + %% module has requested a skip on it. + ?INFO("Skipping ~s in ~s\n", [Command, Dir]), + Config; + + false -> + %% Check for and get command specific environments + {Config1, Env} = setup_envs(Config, Modules), + + %% Execute any before_command plugins on this directory + Config2 = execute_pre(Command, PluginModules, Config1, File, Env), + + %% Execute the current command on this directory + Config3 = execute(Command, AllModules, Config2, File, Env), + + %% Execute any after_command plugins on this directory + execute_post(Command, PluginModules, Config3, File, Env) + end; +maybe_execute(_Dir, _Command, Config, _Modules, _PluginModules, _AllModules, + _File, would_not_cd) -> + Config. + remember_cwd_predirs(Cwd, Predirs) -> Store = fun(Dir, Dict) -> case dict:find(Dir, Dict) of @@ -292,21 +335,21 @@ maybe_load_local_config(Dir, ParentConfig) -> %% Given a list of directories and a set of previously processed directories, %% process each one we haven't seen yet %% -process_each([], _Command, Config, _ModuleSetFile, DirSet) -> +process_each([], _Command, Config, DirSet, _File) -> %% reset cached (setup_env) envs Config1 = rebar_config:reset_envs(Config), {Config1, DirSet}; -process_each([Dir | Rest], Command, Config, ModuleSetFile, DirSet) -> +process_each([Dir | Rest], Command, Config, DirSet, File) -> case sets:is_element(Dir, DirSet) of true -> ?DEBUG("Skipping ~s; already processed!\n", [Dir]), - process_each(Rest, Command, Config, ModuleSetFile, DirSet); + process_each(Rest, Command, Config, DirSet, File); false -> - {Config1, DirSet2} = process_dir(Dir, Config, Command, DirSet), + {Config1, DirSet2} = process_dir(Dir, Command, Config, DirSet), Config2 = rebar_config:clean_config(Config, Config1), %% reset cached (setup_env) envs Config3 = rebar_config:reset_envs(Config2), - process_each(Rest, Command, Config3, ModuleSetFile, DirSet2) + process_each(Rest, Command, Config3, DirSet2, File) end. %% @@ -340,20 +383,21 @@ execute_post(Command, Modules, Config, ModuleFile, Env) -> execute_plugin_hook(Hook, Command, Modules, Config, ModuleFile, Env) -> HookFunction = list_to_atom(Hook ++ atom_to_list(Command)), - execute(HookFunction, Modules, Config, ModuleFile, Env). + execute(HookFunction, hook, Modules, Config, ModuleFile, Env). %% %% Execute a command across all applicable modules %% execute(Command, Modules, Config, ModuleFile, Env) -> + execute(Command, not_a_hook, Modules, Config, ModuleFile, Env). + +execute(Command, Type, Modules, Config, ModuleFile, Env) -> case select_modules(Modules, Command, []) of [] -> - Cmd = atom_to_list(Command), - case lists:prefix("pre_", Cmd) - orelse lists:prefix("post_", Cmd) of - true -> + case Type of + hook -> ok; - false -> + not_a_hook -> ?WARN("'~p' command does not apply to directory ~s\n", [Command, rebar_utils:get_cwd()]) end, @@ -452,8 +496,9 @@ run_modules([Module | Rest], Command, Config, File) -> {Module, Error} end. -apply_hooks(Mode, Config, Command, Env) -> +apply_hooks(Mode, Config, Command, Env0) -> Hooks = rebar_config:get_local(Config, Mode, []), + Env = rebar_utils:patch_env(Config, Env0), lists:foreach(fun apply_hook/1, [{Env, Hook} || Hook <- Hooks, element(1, Hook) =:= Command orelse @@ -502,6 +547,8 @@ acc_modules([Module | Rest], Command, Config, File, Acc) -> %% plugin_modules(Config, PredirsAssoc) -> Modules = lists:flatten(rebar_config:get_all(Config, plugins)), + ?DEBUG("Plugins requested while processing ~s: ~p~n", + [rebar_utils:get_cwd(), Modules]), plugin_modules(Config, PredirsAssoc, ulist(Modules)). ulist(L) -> @@ -543,6 +590,7 @@ plugin_modules(Config, PredirsAssoc, FoundModules, MissingModules) -> load_plugin_modules(Config, PredirsAssoc, Modules) -> Cwd = rebar_utils:get_cwd(), PluginDirs = get_all_plugin_dirs(Config, Cwd, PredirsAssoc), + ?DEBUG("Plugin dirs for ~s:~n~p~n", [Cwd, PluginDirs]), %% Find relevant sources in base_dir and plugin_dir Erls = string:join([atom_to_list(M)++"\\.erl" || M <- Modules], "|"), @@ -558,7 +606,9 @@ load_plugin_modules(Config, PredirsAssoc, Modules) -> {Loaded, NotLoaded}. get_all_plugin_dirs(Config, Cwd, PredirsAssoc) -> - get_plugin_dir(Config, Cwd) ++ get_base_plugin_dirs(Cwd, PredirsAssoc). + [rebar_utils:get_cwd()] + ++ get_plugin_dir(Config, Cwd) + ++ get_base_plugin_dirs(Cwd, PredirsAssoc). get_plugin_dir(Config, Cwd) -> case rebar_config:get_local(Config, plugin_dir, undefined) of @@ -577,7 +627,7 @@ get_base_plugin_dirs(Cwd, PredirsAssoc) -> [filename:join(Dir, "plugins") || Dir <- get_plugin_base_dirs(Cwd, PredirsAssoc)]. -%% @doc PredirsAssoc is a dictionary of plugindir -> 'parent' pairs +%% @doc PredirsAssoc is a dictionary of plugindir -> 'parent' pairs. %% 'parent' in this case depends on plugin; therefore we have to give %% all plugins that Cwd ('parent' in this case) depends on. get_plugin_base_dirs(Cwd, PredirsAssoc) -> diff --git a/src/rebar_cover_utils.erl b/src/rebar_cover_utils.erl new file mode 100644 index 0000000..3195fe2 --- /dev/null +++ b/src/rebar_cover_utils.erl @@ -0,0 +1,261 @@ +%% -*- erlang-indent-level: 4;indent-tabs-mode: nil -*- +%% ex: ts=4 sw=4 et +%% ------------------------------------------------------------------- +%% +%% rebar: Erlang Build Tools +%% +%% Copyright (c) 2009, 2010 Dave Smith (dizzyd@dizzyd.com) +%% Copyright (c) 2013 Andras Horvath (andras.horvath@erlang-solutions.com) +%% +%% Permission is hereby granted, free of charge, to any person obtaining a copy +%% of this software and associated documentation files (the "Software"), to deal +%% in the Software without restriction, including without limitation the rights +%% to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +%% copies of the Software, and to permit persons to whom the Software is +%% furnished to do so, subject to the following conditions: +%% +%% The above copyright notice and this permission notice shall be included in +%% all copies or substantial portions of the Software. +%% +%% THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +%% IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +%% FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +%% AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +%% LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +%% OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +%% THE SOFTWARE. +%% ------------------------------------------------------------------- +-module(rebar_cover_utils). + +%% for internal use only +-export([init/3, + perform_cover/4, + close/1, + exit/0]). + +-include("rebar.hrl"). + +%% ==================================================================== +%% Internal functions +%% ==================================================================== + +perform_cover(Config, BeamFiles, SrcModules, TargetDir) -> + perform_cover(rebar_config:get(Config, cover_enabled, false), + Config, BeamFiles, SrcModules, TargetDir). + +perform_cover(false, _Config, _BeamFiles, _SrcModules, _TargetDir) -> + ok; +perform_cover(true, Config, BeamFiles, SrcModules, TargetDir) -> + analyze(Config, BeamFiles, SrcModules, TargetDir). + +close(not_enabled) -> + ok; +close(F) -> + ok = file:close(F). + +exit() -> + cover:stop(). + +init(false, _BeamFiles, _TargetDir) -> + {ok, not_enabled}; +init(true, BeamFiles, TargetDir) -> + %% Attempt to start the cover server, then set its group leader to + %% TargetDir/cover.log, so all cover log messages will go there instead of + %% to stdout. If the cover server is already started, we'll kill that + %% server and start a new one in order not to inherit a polluted + %% cover_server state. + {ok, CoverPid} = case whereis(cover_server) of + undefined -> + cover:start(); + _ -> + cover:stop(), + cover:start() + end, + + {ok, F} = OkOpen = file:open( + filename:join([TargetDir, "cover.log"]), + [write]), + + group_leader(F, CoverPid), + + ?INFO("Cover compiling ~s\n", [rebar_utils:get_cwd()]), + + Compiled = [{Beam, cover:compile_beam(Beam)} || Beam <- BeamFiles], + case [Module || {_, {ok, Module}} <- Compiled] of + [] -> + %% No modules compiled successfully...fail + ?ERROR("Cover failed to compile any modules; aborting.~n", []), + ?FAIL; + _ -> + %% At least one module compiled successfully + + %% It's not an error for cover compilation to fail partially, + %% but we do want to warn about them + PrintWarning = + fun(Beam, Desc) -> + ?CONSOLE("Cover compilation warning for ~p: ~p", + [Beam, Desc]) + end, + _ = [PrintWarning(Beam, Desc) || {Beam, {error, Desc}} <- Compiled], + OkOpen + end; +init(Config, BeamFiles, TargetDir) -> + init(rebar_config:get(Config, cover_enabled, false), BeamFiles, TargetDir). + +analyze(_Config, [], _SrcModules, _TargetDir) -> + ok; +analyze(Config, FilteredModules, SrcModules, TargetDir) -> + %% Generate coverage info for all the cover-compiled modules + Coverage = lists:flatten([analyze_mod(M) + || M <- FilteredModules, + cover:is_compiled(M) =/= false]), + + %% Write index of coverage info + write_index(lists:sort(Coverage), SrcModules, TargetDir), + + %% Write coverage details for each file + lists:foreach( + fun({M, _, _}) -> + {ok, _} = cover:analyze_to_file(M, + cover_file(M, TargetDir), + [html]) + end, Coverage), + + Index = filename:join([rebar_utils:get_cwd(), TargetDir, "index.html"]), + ?CONSOLE("Cover analysis: ~s\n", [Index]), + + %% Export coverage data, if configured + case rebar_config:get(Config, cover_export_enabled, false) of + true -> + export_coverdata(TargetDir); + false -> + ok + end, + + %% Print coverage report, if configured + case rebar_config:get(Config, cover_print_enabled, false) of + true -> + print_coverage(lists:sort(Coverage)); + false -> + ok + end. + +analyze_mod(Module) -> + case cover:analyze(Module, coverage, module) of + {ok, {Module, {Covered, NotCovered}}} -> + %% Modules that include the eunit header get an implicit + %% test/0 fun, which cover considers a runnable line, but + %% eunit:test(TestRepresentation) never calls. Decrement + %% NotCovered in this case. + [align_notcovered_count(Module, Covered, NotCovered, + is_eunitized(Module))]; + {error, Reason} -> + ?ERROR("Cover analyze failed for ~p: ~p ~p\n", + [Module, Reason, code:which(Module)]), + [] + end. + +is_eunitized(Mod) -> + has_eunit_test_fun(Mod) andalso + has_header(Mod, "include/eunit.hrl"). + +has_eunit_test_fun(Mod) -> + [F || {exports, Funs} <- Mod:module_info(), + {F, 0} <- Funs, F =:= test] =/= []. + +has_header(Mod, Header) -> + Mod1 = case code:which(Mod) of + cover_compiled -> + {file, File} = cover:is_compiled(Mod), + File; + non_existing -> Mod; + preloaded -> Mod; + L -> L + end, + {ok, {_, [{abstract_code, {_, AC}}]}} = + beam_lib:chunks(Mod1, [abstract_code]), + [F || {attribute, 1, file, {F, 1}} <- AC, + string:str(F, Header) =/= 0] =/= []. + +align_notcovered_count(Module, Covered, NotCovered, false) -> + {Module, Covered, NotCovered}; +align_notcovered_count(Module, Covered, NotCovered, true) -> + {Module, Covered, NotCovered - 1}. + +write_index(Coverage, SrcModules, TargetDir) -> + {ok, F} = file:open(filename:join([TargetDir, "index.html"]), [write]), + ok = file:write(F, "<!DOCTYPE HTML><html>\n" + "<head><meta charset=\"utf-8\">" + "<title>Coverage Summary</title></head>\n" + "<body>\n"), + IsSrcCoverage = fun({Mod,_C,_N}) -> lists:member(Mod, SrcModules) end, + {SrcCoverage, TestCoverage} = lists:partition(IsSrcCoverage, Coverage), + write_index_section(F, "Source", SrcCoverage), + write_index_section(F, "Test", TestCoverage), + ok = file:write(F, "</body></html>"), + ok = file:close(F). + +write_index_section(_F, _SectionName, []) -> + ok; +write_index_section(F, SectionName, Coverage) -> + %% Calculate total coverage + {Covered, NotCovered} = lists:foldl(fun({_Mod, C, N}, {CAcc, NAcc}) -> + {CAcc + C, NAcc + N} + end, {0, 0}, Coverage), + TotalCoverage = percentage(Covered, NotCovered), + + %% Write the report + ok = file:write(F, ?FMT("<h1>~s Summary</h1>\n", [SectionName])), + ok = file:write(F, ?FMT("<h3>Total: ~s</h3>\n", [TotalCoverage])), + ok = file:write(F, "<table><tr><th>Module</th><th>Coverage %</th></tr>\n"), + + FmtLink = + fun(Module, Cov, NotCov) -> + ?FMT("<tr><td><a href='~s.COVER.html'>~s</a></td><td>~s</td>\n", + [Module, Module, percentage(Cov, NotCov)]) + end, + lists:foreach(fun({Module, Cov, NotCov}) -> + ok = file:write(F, FmtLink(Module, Cov, NotCov)) + end, Coverage), + ok = file:write(F, "</table>\n"). + +print_coverage(Coverage) -> + {Covered, NotCovered} = lists:foldl(fun({_Mod, C, N}, {CAcc, NAcc}) -> + {CAcc + C, NAcc + N} + end, {0, 0}, Coverage), + TotalCoverage = percentage(Covered, NotCovered), + + %% Determine the longest module name for right-padding + Width = lists:foldl(fun({Mod, _, _}, Acc) -> + case length(atom_to_list(Mod)) of + N when N > Acc -> + N; + _ -> + Acc + end + end, 0, Coverage) * -1, + + %% Print the output the console + ?CONSOLE("~nCode Coverage:~n", []), + lists:foreach(fun({Mod, C, N}) -> + ?CONSOLE("~*s : ~3s~n", + [Width, Mod, percentage(C, N)]) + end, Coverage), + ?CONSOLE("~n~*s : ~s~n", [Width, "Total", TotalCoverage]). + +cover_file(Module, TargetDir) -> + filename:join([TargetDir, atom_to_list(Module) ++ ".COVER.html"]). + +export_coverdata(TargetDir) -> + ExportFile = filename:join(TargetDir, "cover.coverdata"), + case cover:export(ExportFile) of + ok -> + ?CONSOLE("Coverdata export: ~s~n", [ExportFile]); + {error, Reason} -> + ?ERROR("Coverdata export failed: ~p~n", [Reason]) + end. + +percentage(0, 0) -> + "not executed"; +percentage(Cov, NotCov) -> + integer_to_list(trunc((Cov / (Cov + NotCov)) * 100)) ++ "%". diff --git a/src/rebar_ct.erl b/src/rebar_ct.erl index 74ae618..c075e8c 100644 --- a/src/rebar_ct.erl +++ b/src/rebar_ct.erl @@ -101,7 +101,7 @@ run_test(TestDir, LogDir, Config, _File) -> {Cmd, RawLog} = make_cmd(TestDir, LogDir, Config), ?DEBUG("ct_run cmd:~n~p~n", [Cmd]), clear_log(LogDir, RawLog), - Output = case rebar_config:is_verbose(Config) of + Output = case rebar_log:is_verbose(Config) of false -> " >> " ++ RawLog ++ " 2>&1"; true -> @@ -172,7 +172,7 @@ check_log(Config,RawLog,Fun) -> %% Show the log if it hasn't already been shown because verbose was on show_log(Config, RawLog) -> ?CONSOLE("Showing log\n", []), - case rebar_config:is_verbose(Config) of + case rebar_log:is_verbose(Config) of false -> {ok, Contents} = file:read_file(RawLog), ?CONSOLE("~s", [Contents]); @@ -210,7 +210,7 @@ make_cmd(TestDir, RawLogDir, Config) -> CodeDirs = [io_lib:format("\"~s\"", [Dir]) || Dir <- [EbinDir|NonLibCodeDirs]], CodePathString = string:join(CodeDirs, " "), - Cmd = case get_ct_specs(Cwd) of + Cmd = case get_ct_specs(Config, Cwd) of undefined -> ?FMT("~s" " -pa ~s" @@ -260,8 +260,8 @@ build_name(Config) -> get_extra_params(Config) -> rebar_config:get_local(Config, ct_extra_params, ""). -get_ct_specs(Cwd) -> - case collect_glob(Cwd, ".*\.test\.spec\$") of +get_ct_specs(Config, Cwd) -> + case collect_glob(Config, Cwd, ".*\.test\.spec\$") of [] -> undefined; [Spec] -> " -spec " ++ Spec; @@ -275,31 +275,38 @@ get_cover_config(Config, Cwd) -> false -> ""; true -> - case collect_glob(Cwd, ".*cover\.spec\$") of + case collect_glob(Config, Cwd, ".*cover\.spec\$") of [] -> ?DEBUG("No cover spec found: ~s~n", [Cwd]), ""; [Spec] -> - ?DEBUG("Found cover file ~w~n", [Spec]), + ?DEBUG("Found cover file ~s~n", [Spec]), " -cover " ++ Spec; Specs -> ?ABORT("Multiple cover specs found: ~p~n", [Specs]) end end. -collect_glob(Cwd, Glob) -> - filelib:fold_files(Cwd, Glob, true, fun collect_files/2, []). - -collect_files(F, Acc) -> - %% Ignore any specs under the deps/ directory. Do this pulling - %% the dirname off the the F and then splitting it into a list. - Parts = filename:split(filename:dirname(F)), - case lists:member("deps", Parts) of - true -> - Acc; % There is a directory named "deps" in path - false -> - [F | Acc] % No "deps" directory in path - end. +collect_glob(Config, Cwd, Glob) -> + {true, Deps} = rebar_deps:get_deps_dir(Config), + CwdParts = filename:split(Cwd), + filelib:fold_files(Cwd, Glob, true, fun(F, Acc) -> + %% Ignore any specs under the deps/ directory. Do this pulling + %% the dirname off the F and then splitting it into a list. + Parts = filename:split(filename:dirname(F)), + Parts2 = remove_common_prefix(Parts, CwdParts), + case lists:member(Deps, Parts2) of + true -> + Acc; % There is a directory named "deps" in path + false -> + [F | Acc] % No "deps" directory in path + end + end, []). + +remove_common_prefix([H1|T1], [H1|T2]) -> + remove_common_prefix(T1, T2); +remove_common_prefix(L1, _) -> + L1. get_ct_config_file(TestDir) -> Config = filename:join(TestDir, "test.config"), @@ -320,7 +327,7 @@ get_config_file(TestDir) -> end. get_suites(Config, TestDir) -> - case rebar_config:get_global(Config, suites, undefined) of + case get_suites(Config) of undefined -> " -dir " ++ TestDir; Suites -> @@ -329,6 +336,14 @@ get_suites(Config, TestDir) -> string:join([" -suite"] ++ Suites2, " ") end. +get_suites(Config) -> + case rebar_config:get_global(Config, suites, undefined) of + undefined -> + rebar_config:get_global(Config, suite, undefined); + Suites -> + Suites + end. + find_suite_path(Suite, TestDir) -> Path = filename:join(TestDir, Suite ++ "_SUITE.erl"), case filelib:is_regular(Path) of diff --git a/src/rebar_deps.erl b/src/rebar_deps.erl index 2e305d5..bd94921 100644 --- a/src/rebar_deps.erl +++ b/src/rebar_deps.erl @@ -40,6 +40,7 @@ %% for internal use only -export([info/2]). +-export([get_deps_dir/1]). -record(dep, { dir, app, @@ -277,7 +278,8 @@ info_help(Description) -> {app_name, ".*", {svn, "svn://svn.example.org/url"}}, {app_name, ".*", {bzr, "https://www.example.org/url", "Rev"}}, {app_name, ".*", {fossil, "https://www.example.org/url"}}, - {app_name, ".*", {fossil, "https://www.example.org/url", "Vsn"}}]} + {app_name, ".*", {fossil, "https://www.example.org/url", "Vsn"}}, + {app_name, ".*", {p4, "//depot/subdir/app_dir"}}]} ]). %% Added because of trans deps, @@ -304,7 +306,7 @@ get_deps_dir(Config) -> get_deps_dir(Config, ""). get_deps_dir(Config, App) -> - BaseDir = rebar_config:get_xconf(Config, base_dir, []), + BaseDir = rebar_utils:base_dir(Config), DepsDir = get_shared_deps_dir(Config, "deps"), {true, filename:join([BaseDir, DepsDir, App])}. @@ -507,6 +509,40 @@ use_source(Config, Dep, Count) -> use_source(Config, Dep#dep { dir = TargetDir }, Count-1) end. +-record(p4_settings, { + client=undefined, + transport="tcp4:perforce:1666", + username, + password + }). +init_p4_settings(Basename) -> + #p4_settings{client = + case inet:gethostname() of + {ok,HostName} -> + HostName ++ "-" + ++ os:getenv("USER") ++ "-" + ++ Basename + ++ "-Rebar-automated-download" + end}. + +download_source(AppDir, {p4, Url}) -> + download_source(AppDir, {p4, Url, "#head"}); +download_source(AppDir, {p4, Url, Rev}) -> + download_source(AppDir, {p4, Url, Rev, init_p4_settings(filename:basename(AppDir))}); +download_source(AppDir, {p4, Url, _Rev, Settings}) -> + ok = filelib:ensure_dir(AppDir), + rebar_utils:sh_send("p4 client -i", + ?FMT("Client: ~s~n" + ++"Description: generated by Rebar~n" + ++"Root: ~s~n" + ++"View:~n" + ++" ~s/... //~s/...~n", + [Settings#p4_settings.client, + AppDir, + Url, + Settings#p4_settings.client]), + []), + rebar_utils:sh(?FMT("p4 -c ~s sync -f", [Settings#p4_settings.client]), []); download_source(AppDir, {hg, Url, Rev}) -> ok = filelib:ensure_dir(AppDir), rebar_utils:sh(?FMT("hg clone -U ~s ~s", [Url, filename:basename(AppDir)]), @@ -573,6 +609,8 @@ update_source(Config, Dep) -> Dep end. +update_source1(AppDir, Args) when element(1, Args) =:= p4 -> + download_source(AppDir, Args); update_source1(AppDir, {git, Url}) -> update_source1(AppDir, {git, Url, {branch, "HEAD"}}); update_source1(AppDir, {git, Url, ""}) -> @@ -696,7 +734,7 @@ source_engine_avail(Source) -> source_engine_avail(Name, Source) when Name == hg; Name == git; Name == svn; Name == bzr; Name == rsync; - Name == fossil -> + Name == fossil; Name == p4 -> case vcs_client_vsn(Name) >= required_vcs_client_vsn(Name) of true -> true; @@ -717,6 +755,7 @@ vcs_client_vsn(Path, VsnArg, VsnRegex) -> false end. +required_vcs_client_vsn(p4) -> {2013, 1}; required_vcs_client_vsn(hg) -> {1, 1}; required_vcs_client_vsn(git) -> {1, 5}; required_vcs_client_vsn(bzr) -> {2, 0}; @@ -724,6 +763,9 @@ required_vcs_client_vsn(svn) -> {1, 6}; required_vcs_client_vsn(rsync) -> {2, 0}; required_vcs_client_vsn(fossil) -> {1, 0}. +vcs_client_vsn(p4) -> + vcs_client_vsn(rebar_utils:find_executable("p4"), " -V", + "Rev\\. .*/(\\d+)\\.(\\d)/"); vcs_client_vsn(hg) -> vcs_client_vsn(rebar_utils:find_executable("hg"), " --version", "version (\\d+).(\\d+)"); @@ -743,6 +785,8 @@ vcs_client_vsn(fossil) -> vcs_client_vsn(rebar_utils:find_executable("fossil"), " version", "version (\\d+).(\\d+)"). +has_vcs_dir(p4, _) -> + true; has_vcs_dir(git, Dir) -> filelib:is_dir(filename:join(Dir, ".git")); has_vcs_dir(hg, Dir) -> @@ -760,6 +804,8 @@ has_vcs_dir(_, _) -> print_source(#dep{app=App, source=Source}) -> ?CONSOLE("~s~n", [format_source(App, Source)]). +format_source(App, {p4, Url}) -> + format_source(App, {p4, Url, "#head"}); format_source(App, {git, Url}) -> ?FMT("~p BRANCH ~s ~s", [App, "HEAD", Url]); format_source(App, {git, Url, ""}) -> diff --git a/src/rebar_dia_compiler.erl b/src/rebar_dia_compiler.erl index f81c734..ba9d159 100644 --- a/src/rebar_dia_compiler.erl +++ b/src/rebar_dia_compiler.erl @@ -75,8 +75,8 @@ compile_dia(Source, Target, Config) -> case diameter_dict_util:parse({path, Source}, []) of {ok, Spec} -> FileName = dia_filename(Source, Spec), - diameter_codegen:from_dict(FileName, Spec, Opts, erl), - diameter_codegen:from_dict(FileName, Spec, Opts, hrl), + _ = diameter_codegen:from_dict(FileName, Spec, Opts, erl), + _ = diameter_codegen:from_dict(FileName, Spec, Opts, hrl), HrlFile = filename:join("src", FileName ++ ".hrl"), case filelib:is_regular(HrlFile) of true -> diff --git a/src/rebar_erlc_compiler.erl b/src/rebar_erlc_compiler.erl index dbefa4a..5f541d9 100644 --- a/src/rebar_erlc_compiler.erl +++ b/src/rebar_erlc_compiler.erl @@ -36,6 +36,25 @@ -include("rebar.hrl"). -include_lib("stdlib/include/erl_compile.hrl"). +-define(ERLCINFO_VSN, 1). +-define(ERLCINFO_FILE, "erlcinfo"). +-type erlc_info_v() :: {digraph:vertex(), term()} | 'false'. +-type erlc_info_e() :: {digraph:vertex(), digraph:vertex()}. +-type erlc_info() :: {list(erlc_info_v()), list(erlc_info_e())}. +-record(erlcinfo, + { + vsn = ?ERLCINFO_VSN :: pos_integer(), + info = {[], []} :: erlc_info() + }). + +-ifdef(namespaced_types). +% digraph:digraph() exists starting from Erlang 17. +-type rebar_digraph() :: digraph:digraph(). +-else. +% digraph() has been obsoleted in Erlang 17 and deprecated in 18. +-type rebar_digraph() :: digraph(). +-endif. + %% =================================================================== %% Public API %% =================================================================== @@ -90,7 +109,7 @@ compile(Config, _AppFile) -> doterl_compile(Config, "ebin"). -spec clean(rebar_config:config(), file:filename()) -> 'ok'. -clean(_Config, _AppFile) -> +clean(Config, _AppFile) -> MibFiles = rebar_utils:find_files("mibs", "^.*\\.mib\$"), MIBs = [filename:rootname(filename:basename(MIB)) || MIB <- MibFiles], rebar_file_utils:delete_each( @@ -103,6 +122,9 @@ clean(_Config, _AppFile) -> [ binary_to_list(iolist_to_binary(re:replace(F, "\\.[x|y]rl$", ".erl"))) || F <- YrlFiles ]), + %% Delete the build graph, if any + rebar_file_utils:rm_rf(erlcinfo_file(Config)), + %% Erlang compilation is recursive, so it's possible that we have a nested %% directory structure in ebin with .beam files within. As such, we want %% to scan whatever is left in the ebin/ directory for sub-dirs which @@ -120,24 +142,26 @@ test_compile(Config, Cmd, OutDir) -> %% Obtain all the test modules for inclusion in the compile stage. TestErls = rebar_utils:find_files("test", ".*\\.erl\$"), + ErlOpts = rebar_utils:erl_opts(Config), + {Config1, ErlOpts1} = test_compile_config_and_opts(Config, ErlOpts, Cmd), + %% Copy source files to eunit dir for cover in case they are not directly %% in src but in a subdirectory of src. Cover only looks in cwd and ../src %% for source files. Also copy files from src_dirs. - ErlOpts = rebar_utils:erl_opts(Config), - - SrcDirs = rebar_utils:src_dirs(proplists:append_values(src_dirs, ErlOpts)), + SrcDirs = rebar_utils:src_dirs(proplists:append_values(src_dirs, ErlOpts1)), SrcErls = lists:foldl( fun(Dir, Acc) -> Files = rebar_utils:find_files(Dir, ".*\\.erl\$"), lists:append(Acc, Files) end, [], SrcDirs), - %% If it is not the first time rebar eunit is executed, there will be source - %% files already present in OutDir. Since some SCMs (like Perforce) set - %% the source files as being read only (unless they are checked out), we - %% need to be sure that the files already present in OutDir are writable - %% before doing the copy. This is done here by removing any file that was - %% already present before calling rebar_file_utils:cp_r. + %% If it is not the first time rebar eunit or rebar qc is executed, + %% there will be source files already present in OutDir. Since some + %% SCMs (like Perforce) set the source files as being read only (unless + %% they are checked out), we need to be sure that the files already + %% present in OutDir are writable before doing the copy. This is done + %% here by removing any file that was already present before calling + %% rebar_file_utils:cp_r. %% Get the full path to a file that was previously copied in OutDir ToCleanUp = fun(F, Acc) -> @@ -157,8 +181,7 @@ test_compile(Config, Cmd, OutDir) -> %% Compile erlang code to OutDir, using a tweaked config %% with appropriate defines for eunit, and include all the test modules %% as well. - ok = doterl_compile(test_compile_config(Config, ErlOpts, Cmd), - OutDir, TestErls), + ok = doterl_compile(Config1, OutDir, TestErls, ErlOpts1), {ok, SrcErls}. @@ -202,21 +225,22 @@ info_help(Description) -> {yrl_first_files, []} ]). -test_compile_config(Config, ErlOpts, Cmd) -> +test_compile_config_and_opts(Config, ErlOpts, Cmd) -> {Config1, TriqOpts} = triq_opts(Config), {Config2, PropErOpts} = proper_opts(Config1), {Config3, EqcOpts} = eqc_opts(Config2), OptsAtom = list_to_atom(Cmd ++ "_compile_opts"), - EunitOpts = rebar_config:get_list(Config3, OptsAtom, []), + TestOpts = rebar_config:get_list(Config3, OptsAtom, []), Opts0 = [{d, 'TEST'}] ++ - ErlOpts ++ EunitOpts ++ TriqOpts ++ PropErOpts ++ EqcOpts, + ErlOpts ++ TestOpts ++ TriqOpts ++ PropErOpts ++ EqcOpts, Opts = [O || O <- Opts0, O =/= no_debug_info], Config4 = rebar_config:set(Config3, erl_opts, Opts), FirstFilesAtom = list_to_atom(Cmd ++ "_first_files"), FirstErls = rebar_config:get_list(Config4, FirstFilesAtom, []), - rebar_config:set(Config4, erl_first_files, FirstErls). + Config5 = rebar_config:set(Config4, erl_first_files, FirstErls), + {Config5, Opts}. triq_opts(Config) -> {NewConfig, IsAvail} = is_lib_avail(Config, is_triq_avail, triq, @@ -257,125 +281,281 @@ is_lib_avail(Config, DictKey, Mod, Hrl, Name) -> -spec doterl_compile(rebar_config:config(), file:filename()) -> 'ok'. doterl_compile(Config, OutDir) -> - doterl_compile(Config, OutDir, []). - -doterl_compile(Config, OutDir, MoreSources) -> - FirstErls = rebar_config:get_list(Config, erl_first_files, []), ErlOpts = rebar_utils:erl_opts(Config), + doterl_compile(Config, OutDir, [], ErlOpts). + +doterl_compile(Config, OutDir, MoreSources, ErlOpts) -> + ErlFirstFiles = rebar_config:get_list(Config, erl_first_files, []), ?DEBUG("erl_opts ~p~n", [ErlOpts]), %% Support the src_dirs option allowing multiple directories to %% contain erlang source. This might be used, for example, should %% eunit tests be separated from the core application source. SrcDirs = rebar_utils:src_dirs(proplists:append_values(src_dirs, ErlOpts)), RestErls = [Source || Source <- gather_src(SrcDirs, []) ++ MoreSources, - not lists:member(Source, FirstErls)], - - %% Split RestErls so that parse_transforms and behaviours are instead added - %% to erl_first_files, parse transforms first. - %% This should probably be somewhat combined with inspect_epp - [ParseTransforms, Behaviours, OtherErls] = - lists:foldl(fun(F, [A, B, C]) -> - case compile_priority(F) of - parse_transform -> - [[F | A], B, C]; - behaviour -> - [A, [F | B], C]; - callback -> - [A, [F | B], C]; - _ -> - [A, B, [F | C]] - end - end, [[], [], []], RestErls), - - NewFirstErls = FirstErls ++ ParseTransforms ++ Behaviours, - + not lists:member(Source, ErlFirstFiles)], %% Make sure that ebin/ exists and is on the path ok = filelib:ensure_dir(filename:join("ebin", "dummy.beam")), CurrPath = code:get_path(), true = code:add_path(filename:absname("ebin")), OutDir1 = proplists:get_value(outdir, ErlOpts, OutDir), - rebar_base_compiler:run(Config, NewFirstErls, OtherErls, - fun(S, C) -> - internal_erl_compile(C, S, OutDir1, ErlOpts) - end), + G = init_erlcinfo(Config, RestErls), + %% Split RestErls so that files which are depended on are treated + %% like erl_first_files. + {OtherFirstErls, OtherErls} = + lists:partition( + fun(F) -> + Children = get_children(G, F), + log_files(?FMT("Files dependent on ~s", [F]), Children), + + case erls(Children) of + [] -> + %% There are no files dependent on this file. + false; + _ -> + %% There are some files dependent on the file. + %% Thus the file has higher priority + %% and should be compiled in the first place. + true + end + end, RestErls), + %% Dependencies of OtherFirstErls that must be compiled first. + OtherFirstErlsDeps = lists:flatmap( + fun(Erl) -> erls(get_parents(G, Erl)) end, + OtherFirstErls), + %% NOTE: In case the way we retrieve OtherFirstErlsDeps or merge + %% it with OtherFirstErls does not result in the correct compile + %% priorities, or the method in use proves to be too slow for + %% certain projects, consider using a more elaborate method (maybe + %% digraph_utils) or alternatively getting and compiling the .erl + %% parents of an individual Source in internal_erl_compile. By not + %% handling this in internal_erl_compile, we also avoid extra + %% needs_compile/2 calls. + FirstErls = ErlFirstFiles ++ uo_merge(OtherFirstErlsDeps, OtherFirstErls), + ?DEBUG("Files to compile first: ~p~n", [FirstErls]), + rebar_base_compiler:run( + Config, FirstErls, OtherErls, + fun(S, C) -> + internal_erl_compile(C, S, OutDir1, ErlOpts, G) + end), true = code:set_path(CurrPath), ok. +%% +%% Return all .erl files from a list of files +%% +erls(Files) -> + [Erl || Erl <- Files, filename:extension(Erl) =:= ".erl"]. + +%% +%% Return a list without duplicates while preserving order +%% +ulist(L) -> + ulist(L, []). + +ulist([H|T], Acc) -> + case lists:member(H, T) of + true -> + ulist(T, Acc); + false -> + ulist(T, [H|Acc]) + end; +ulist([], Acc) -> + lists:reverse(Acc). + +%% +%% Merge two lists without duplicates while preserving order +%% +uo_merge(L1, L2) -> + lists:foldl(fun(E, Acc) -> u_add_element(E, Acc) end, ulist(L1), L2). + +u_add_element(Elem, [Elem|_]=Set) -> Set; +u_add_element(Elem, [E1|Set]) -> [E1|u_add_element(Elem, Set)]; +u_add_element(Elem, []) -> [Elem]. + -spec include_path(file:filename(), rebar_config:config()) -> [file:filename(), ...]. include_path(Source, Config) -> ErlOpts = rebar_config:get(Config, erl_opts, []), - ["include", filename:dirname(Source)] - ++ proplists:get_all_values(i, ErlOpts). - --spec inspect(file:filename(), - [file:filename(), ...]) -> {string(), [string()]}. -inspect(Source, IncludePath) -> - ModuleDefault = filename:basename(Source, ".erl"), - case epp:open(Source, IncludePath) of - {ok, Epp} -> - inspect_epp(Epp, Source, ModuleDefault, []); - {error, Reason} -> - ?DEBUG("Failed to inspect ~s: ~p\n", [Source, Reason]), - {ModuleDefault, []} - end. - --spec inspect_epp(pid(), file:filename(), file:filename(), - [string()]) -> {string(), [string()]}. -inspect_epp(Epp, Source, Module, Includes) -> - case epp:parse_erl_form(Epp) of - {ok, {attribute, _, module, ModInfo}} -> - ActualModuleStr = - case ModInfo of - %% Typical module name, single atom - ActualModule when is_atom(ActualModule) -> - atom_to_list(ActualModule); - %% Packag-ized module name, list of atoms - ActualModule when is_list(ActualModule) -> - string:join([atom_to_list(P) || - P <- ActualModule], "."); - %% Parameterized module name, single atom - {ActualModule, _} when is_atom(ActualModule) -> - atom_to_list(ActualModule); - %% Parameterized and packagized module name, list of atoms - {ActualModule, _} when is_list(ActualModule) -> - string:join([atom_to_list(P) || - P <- ActualModule], ".") - end, - inspect_epp(Epp, Source, ActualModuleStr, Includes); - {ok, {attribute, 1, file, {Module, 1}}} -> - inspect_epp(Epp, Source, Module, Includes); - {ok, {attribute, 1, file, {Source, 1}}} -> - inspect_epp(Epp, Source, Module, Includes); - {ok, {attribute, 1, file, {IncFile, 1}}} -> - inspect_epp(Epp, Source, Module, [IncFile | Includes]); - {eof, _} -> - epp:close(Epp), - {Module, Includes}; - _ -> - inspect_epp(Epp, Source, Module, Includes) - end. + lists:usort(["include", filename:dirname(Source)] + ++ proplists:get_all_values(i, ErlOpts)). -spec needs_compile(file:filename(), file:filename(), [string()]) -> boolean(). -needs_compile(Source, Target, Hrls) -> +needs_compile(Source, Target, Parents) -> TargetLastMod = filelib:last_modified(Target), lists:any(fun(I) -> TargetLastMod < filelib:last_modified(I) end, - [Source] ++ Hrls). + [Source] ++ Parents). + +check_erlcinfo(_Config, #erlcinfo{vsn=?ERLCINFO_VSN}) -> + ok; +check_erlcinfo(Config, #erlcinfo{vsn=Vsn}) -> + ?ABORT("~s file version is incompatible. expected: ~b got: ~b~n", + [erlcinfo_file(Config), ?ERLCINFO_VSN, Vsn]); +check_erlcinfo(Config, _) -> + ?ABORT("~s file is invalid. Please delete before next run.~n", + [erlcinfo_file(Config)]). + +erlcinfo_file(Config) -> + filename:join([rebar_utils:base_dir(Config), ".rebar", ?ERLCINFO_FILE]). + +init_erlcinfo(Config, Erls) -> + G = restore_erlcinfo(Config), + %% Get a unique list of dirs based on the source files' locations. + %% This is used for finding files in sub dirs of the configured + %% src_dirs. For example, src/sub_dir/foo.erl. + Dirs = sets:to_list(lists:foldl( + fun(Erl, Acc) -> + Dir = filename:dirname(Erl), + sets:add_element(Dir, Acc) + end, sets:new(), Erls)), + Updates = [update_erlcinfo(G, Erl, include_path(Erl, Config) ++ Dirs) + || Erl <- Erls], + Modified = lists:member(modified, Updates), + ok = store_erlcinfo(G, Config, Modified), + G. + +update_erlcinfo(G, Source, Dirs) -> + case digraph:vertex(G, Source) of + {_, LastUpdated} -> + case filelib:last_modified(Source) of + 0 -> + %% The file doesn't exist anymore, + %% erase it from the graph. + %% All the edges will be erased automatically. + digraph:del_vertex(G, Source), + modified; + LastModified when LastUpdated < LastModified -> + modify_erlcinfo(G, Source, Dirs), + modified; + _ -> + unmodified + end; + false -> + modify_erlcinfo(G, Source, Dirs), + modified + end. + +modify_erlcinfo(G, Source, Dirs) -> + {ok, Fd} = file:open(Source, [read]), + Incls = parse_attrs(Fd, []), + AbsIncls = expand_file_names(Incls, Dirs), + ok = file:close(Fd), + LastUpdated = {date(), time()}, + digraph:add_vertex(G, Source, LastUpdated), + lists:foreach( + fun(Incl) -> + update_erlcinfo(G, Incl, Dirs), + digraph:add_edge(G, Source, Incl) + end, AbsIncls). + +restore_erlcinfo(Config) -> + File = erlcinfo_file(Config), + G = digraph:new(), + case file:read_file(File) of + {ok, Data} -> + try binary_to_term(Data) of + Erlcinfo -> + ok = check_erlcinfo(Config, Erlcinfo), + #erlcinfo{info=ErlcInfo} = Erlcinfo, + {Vs, Es} = ErlcInfo, + lists:foreach( + fun({V, LastUpdated}) -> + digraph:add_vertex(G, V, LastUpdated) + end, Vs), + lists:foreach( + fun({V1, V2}) -> + digraph:add_edge(G, V1, V2) + end, Es) + catch + error:badarg -> + ?ERROR( + "Failed (binary_to_term) to restore rebar info file." + " Discard file.~n", []), + ok + end; + _Err -> + ok + end, + G. + +store_erlcinfo(_G, _Config, _Modified = false) -> + ok; +store_erlcinfo(G, Config, _Modified) -> + Vs = lists:map( + fun(V) -> + digraph:vertex(G, V) + end, digraph:vertices(G)), + Es = lists:flatmap( + fun({V, _}) -> + lists:map( + fun(E) -> + {_, V1, V2, _} = digraph:edge(G, E), + {V1, V2} + end, digraph:out_edges(G, V)) + end, Vs), + File = erlcinfo_file(Config), + ok = filelib:ensure_dir(File), + Data = term_to_binary(#erlcinfo{info={Vs, Es}}, [{compressed, 9}]), + file:write_file(File, Data). + +%% NOTE: If, for example, one of the entries in Files, refers to +%% gen_server.erl, that entry will be dropped. It is dropped because +%% such an entry usually refers to the beam file, and we don't pass a +%% list of OTP src dirs for finding gen_server.erl's full path. Also, +%% if gen_server.erl was modified, it's not rebar's task to compile a +%% new version of the beam file. Therefore, it's reasonable to drop +%% such entries. Also see process_attr(behaviour, Form, Includes). +-spec expand_file_names([file:filename()], + [file:filename()]) -> [file:filename()]. +expand_file_names(Files, Dirs) -> + %% We check if Files exist by itself or within the directories + %% listed in Dirs. + %% Return the list of files matched. + lists:flatmap( + fun(Incl) -> + case filelib:is_regular(Incl) of + true -> + [Incl]; + false -> + lists:flatmap( + fun(Dir) -> + FullPath = filename:join(Dir, Incl), + case filelib:is_regular(FullPath) of + true -> + [FullPath]; + false -> + [] + end + end, Dirs) + end + end, Files). + +-spec get_parents(rebar_digraph(), file:filename()) -> [file:filename()]. +get_parents(G, Source) -> + %% Return all files which the Source depends upon. + digraph_utils:reachable_neighbours([Source], G). + +-spec get_children(rebar_digraph(), file:filename()) -> [file:filename()]. +get_children(G, Source) -> + %% Return all files dependent on the Source. + digraph_utils:reaching_neighbours([Source], G). -spec internal_erl_compile(rebar_config:config(), file:filename(), - file:filename(), list()) -> 'ok' | 'skipped'. -internal_erl_compile(Config, Source, Outdir, ErlOpts) -> + file:filename(), list(), + rebar_digraph()) -> 'ok' | 'skipped'. +internal_erl_compile(Config, Source, OutDir, ErlOpts, G) -> %% Determine the target name and includes list by inspecting the source file - {Module, Hrls} = inspect(Source, include_path(Source, Config)), + Module = filename:basename(Source, ".erl"), + Parents = get_parents(G, Source), + log_files(?FMT("Dependencies of ~s", [Source]), Parents), %% Construct the target filename - Target = filename:join([Outdir | string:tokens(Module, ".")]) ++ ".beam", + Target = filename:join([OutDir | string:tokens(Module, ".")]) ++ ".beam", ok = filelib:ensure_dir(Target), %% If the file needs compilation, based on last mod date of includes or %% the target - case needs_compile(Source, Target, Hrls) of + case needs_compile(Source, Target, Parents) of true -> Opts = [{outdir, filename:dirname(Target)}] ++ ErlOpts ++ [{i, "include"}, return], @@ -463,40 +643,97 @@ delete_dir(Dir, Subdirs) -> lists:foreach(fun(D) -> delete_dir(D, dirs(D)) end, Subdirs), file:del_dir(Dir). --spec compile_priority(file:filename()) -> 'normal' | 'behaviour' | - 'callback' | - 'parse_transform'. -compile_priority(File) -> - case epp_dodger:parse_file(File) of - {error, _} -> - normal; % couldn't parse the file, default priority - {ok, Trees} -> - F2 = fun({tree,arity_qualifier,_, - {arity_qualifier,{tree,atom,_,behaviour_info}, - {tree,integer,_,1}}}, _) -> - behaviour; - ({tree,arity_qualifier,_, - {arity_qualifier,{tree,atom,_,parse_transform}, - {tree,integer,_,2}}}, _) -> - parse_transform; - (_, Acc) -> - Acc - end, - - F = fun({tree, attribute, _, - {attribute, {tree, atom, _, export}, - [{tree, list, _, {list, List, none}}]}}, Acc) -> - lists:foldl(F2, Acc, List); - ({tree, attribute, _, - {attribute, {tree, atom, _, callback},_}}, _Acc) -> - callback; - (_, Acc) -> - Acc - end, +parse_attrs(Fd, Includes) -> + case io:parse_erl_form(Fd, "") of + {ok, Form, _Line} -> + case erl_syntax:type(Form) of + attribute -> + NewIncludes = process_attr(Form, Includes), + parse_attrs(Fd, NewIncludes); + _ -> + parse_attrs(Fd, Includes) + end; + {eof, _} -> + Includes; + _Err -> + parse_attrs(Fd, Includes) + end. + +process_attr(Form, Includes) -> + try + AttrName = erl_syntax:atom_value(erl_syntax:attribute_name(Form)), + process_attr(AttrName, Form, Includes) + catch _:_ -> + %% TODO: We should probably try to be more specific here + %% and not suppress all errors. + Includes + end. - lists:foldl(F, normal, Trees) +process_attr(import, Form, Includes) -> + case erl_syntax_lib:analyze_import_attribute(Form) of + {Mod, _Funs} -> + [atom_to_list(Mod) ++ ".erl"|Includes]; + Mod -> + [atom_to_list(Mod) ++ ".erl"|Includes] + end; +process_attr(file, Form, Includes) -> + {File, _} = erl_syntax_lib:analyze_file_attribute(Form), + [File|Includes]; +process_attr(include, Form, Includes) -> + [FileNode] = erl_syntax:attribute_arguments(Form), + File = erl_syntax:string_value(FileNode), + [File|Includes]; +process_attr(include_lib, Form, Includes) -> + [FileNode] = erl_syntax:attribute_arguments(Form), + RawFile = erl_syntax:string_value(FileNode), + File = maybe_expand_include_lib_path(RawFile), + [File|Includes]; +process_attr(behaviour, Form, Includes) -> + [FileNode] = erl_syntax:attribute_arguments(Form), + File = erl_syntax:atom_name(FileNode) ++ ".erl", + [File|Includes]; +process_attr(compile, Form, Includes) -> + [Arg] = erl_syntax:attribute_arguments(Form), + case erl_syntax:concrete(Arg) of + {parse_transform, Mod} -> + [atom_to_list(Mod) ++ ".erl"|Includes]; + {core_transform, Mod} -> + [atom_to_list(Mod) ++ ".erl"|Includes]; + L when is_list(L) -> + lists:foldl( + fun({parse_transform, M}, Acc) -> + [atom_to_list(M) ++ ".erl"|Acc]; + ({core_transform, M}, Acc) -> + [atom_to_list(M) ++ ".erl"|Acc]; + (_, Acc) -> + Acc + end, Includes, L) + end. + +%% Given the filename from an include_lib attribute, if the path +%% exists, return unmodified, or else get the absolute ERL_LIBS +%% path. +maybe_expand_include_lib_path(File) -> + case filelib:is_regular(File) of + true -> + File; + false -> + expand_include_lib_path(File) end. +%% Given a path like "stdlib/include/erl_compile.hrl", return +%% "OTP_INSTALL_DIR/lib/erlang/lib/stdlib-x.y.z/include/erl_compile.hrl". +%% Usually a simple [Lib, SubDir, File1] = filename:split(File) should +%% work, but to not crash when an unusual include_lib path is used, +%% utilize more elaborate logic. +expand_include_lib_path(File) -> + File1 = filename:basename(File), + Split = filename:split(filename:dirname(File)), + Lib = hd(Split), + SubDir = filename:join(tl(Split)), + Dir = code:lib_dir(list_to_atom(Lib), list_to_atom(SubDir)), + filename:join(Dir, File1). + %% %% Ensure all files in a list are present and abort if one is missing %% @@ -509,3 +746,13 @@ check_file(File) -> false -> ?ABORT("File ~p is missing, aborting\n", [File]); true -> File end. + +%% Print prefix followed by list of files. If the list is empty, print +%% on the same line, otherwise use a separate line. +log_files(Prefix, Files) -> + case Files of + [] -> + ?DEBUG("~s: ~p~n", [Prefix, Files]); + _ -> + ?DEBUG("~s:~n~p~n", [Prefix, Files]) + end. diff --git a/src/rebar_erlydtl_compiler.erl b/src/rebar_erlydtl_compiler.erl index 6172879..10387f5 100644 --- a/src/rebar_erlydtl_compiler.erl +++ b/src/rebar_erlydtl_compiler.erl @@ -89,7 +89,7 @@ %% {doc_root, "src"}, {module_ext, "_dtl"} %% ], %% [ -%% {doc_root, "templates", {module_ext, ""}, {source_ext, ".html"} +%% {doc_root, "templates"}, {module_ext, ""}, {source_ext, ".html"} %% ] %% ]}. -module(rebar_erlydtl_compiler). @@ -178,7 +178,7 @@ compile_dtl(Config, Source, Target, DtlOpts) -> ?ERROR("~n===============================================~n" " You need to install erlydtl to compile DTL templates~n" " Download the latest tarball release from github~n" - " http://code.google.com/p/erlydtl/~n" + " https://github.com/erlydtl/erlydtl/releases~n" " and install it into your erlang library dir~n" "===============================================~n~n", []), ?FAIL; @@ -194,15 +194,22 @@ compile_dtl(Config, Source, Target, DtlOpts) -> do_compile(Config, Source, Target, DtlOpts) -> %% TODO: Check last mod on target and referenced DTLs here.. + %% erlydtl >= 0.8.1 does not use the extra indirection using the + %% compiler_options. Kept for backward compatibility with older + %% versions of erlydtl. + CompilerOptions = option(compiler_options, DtlOpts), + + Sorted = proplists:unfold( + lists:sort( + [{out_dir, option(out_dir, DtlOpts)}, + {doc_root, option(doc_root, DtlOpts)}, + {custom_tags_dir, option(custom_tags_dir, DtlOpts)}, + {compiler_options, CompilerOptions} + |CompilerOptions])), + %% ensure that doc_root and out_dir are defined, %% using defaults if necessary - Opts = lists:ukeymerge(1, - DtlOpts, - lists:sort( - [{out_dir, option(out_dir, DtlOpts)}, - {doc_root, option(doc_root, DtlOpts)}, - {custom_tags_dir, option(custom_tags_dir, DtlOpts)}, - {compiler_options, option(compiler_options, DtlOpts)}])), + Opts = lists:ukeymerge(1, DtlOpts, Sorted), ?INFO("Compiling \"~s\" -> \"~s\" with options:~n ~s~n", [Source, Target, io_lib:format("~p", [Opts])]), case erlydtl:compile(Source, @@ -210,13 +217,21 @@ do_compile(Config, Source, Target, DtlOpts) -> Opts) of ok -> ok; + {ok, _Mod} -> + ok; + {ok, _Mod, Ws} -> + rebar_base_compiler:ok_tuple(Config, Source, Ws); + {ok, _Mod, _Bin, Ws} -> + rebar_base_compiler:ok_tuple(Config, Source, Ws); error -> rebar_base_compiler:error_tuple(Config, Source, [], [], Opts); {error, {_File, _Msgs} = Error} -> rebar_base_compiler:error_tuple(Config, Source, [Error], [], Opts); {error, Msg} -> Es = [{Source, [{erlydtl_parser, Msg}]}], - rebar_base_compiler:error_tuple(Config, Source, Es, [], Opts) + rebar_base_compiler:error_tuple(Config, Source, Es, [], Opts); + {error, Es, Ws} -> + rebar_base_compiler:error_tuple(Config, Source, Es, Ws, Opts) end. module_name(Target) -> diff --git a/src/rebar_eunit.erl b/src/rebar_eunit.erl index d39b1a2..a5b7b00 100644 --- a/src/rebar_eunit.erl +++ b/src/rebar_eunit.erl @@ -84,8 +84,7 @@ eunit(Config, _AppFile) -> ok = ensure_dirs(), %% Save code path CodePath = setup_code_path(), - CompileOnly = rebar_utils:get_experimental_global(Config, compile_only, - false), + CompileOnly = rebar_config:get_global(Config, compile_only, false), {ok, SrcErls} = rebar_erlc_compiler:test_compile(Config, "eunit", ?EUNIT_DIR), case CompileOnly of @@ -121,12 +120,16 @@ info_help(Description) -> " ~p~n" " ~p~n" "Valid command line options:~n" - " suites=\"foo,bar\" (Run tests in foo.erl, test/foo_tests.erl and~n" + " suite[s]=\"foo,bar\" (Run tests in foo.erl, test/foo_tests.erl and~n" " tests in bar.erl, test/bar_tests.erl)~n" - " tests=\"baz\" (For every existing suite, run the first test whose~n" + " test[s]=\"baz\" (For every existing suite, run the first test whose~n" " name starts with bar and, if no such test exists,~n" " run the test whose name starts with bar in the~n" - " suite's _tests module)~n", + " suite's _tests module)~n" + " random_suite_order=true (Run tests in random order)~n" + " random_suite_order=Seed (Run tests in random order,~n" + " with the PRNG seeded with Seed)~n" + " compile_only=true (Compile but do not run tests)", [ Description, {eunit_opts, []}, @@ -150,24 +153,24 @@ run_eunit(Config, CodePath, SrcErls) -> AllBeamFiles), OtherBeamFiles = TestBeamFiles -- [filename:rootname(N) ++ "_tests.beam" || N <- AllBeamFiles], - ModuleBeamFiles = BeamFiles ++ OtherBeamFiles, + ModuleBeamFiles = randomize_suites(Config, BeamFiles ++ OtherBeamFiles), - %% Get modules to be run in eunit + %% Get matching tests and modules AllModules = [rebar_utils:beam_to_mod(?EUNIT_DIR, N) || N <- AllBeamFiles], - {SuitesProvided, FilteredModules} = filter_suites(Config, AllModules), - - %% Get matching tests - Tests = get_tests(Config, SuitesProvided, ModuleBeamFiles, FilteredModules), + {Tests, FilteredModules} = + get_tests_and_modules(Config, ModuleBeamFiles, AllModules), SrcModules = [rebar_utils:erl_to_mod(M) || M <- SrcErls], - {ok, CoverLog} = cover_init(Config, ModuleBeamFiles), + {ok, CoverLog} = rebar_cover_utils:init(Config, ModuleBeamFiles, + eunit_dir()), StatusBefore = status_before_eunit(), EunitResult = perform_eunit(Config, Tests), - perform_cover(Config, FilteredModules, SrcModules), - cover_close(CoverLog), + rebar_cover_utils:perform_cover(Config, FilteredModules, SrcModules, + eunit_dir()), + rebar_cover_utils:close(CoverLog), case proplists:get_value(reset_after_eunit, get_eunit_opts(Config), true) of @@ -179,7 +182,7 @@ run_eunit(Config, CodePath, SrcErls) -> %% Stop cover to clean the cover_server state. This is important if we want %% eunit+cover to not slow down when analyzing many Erlang modules. - ok = cover:stop(), + ok = rebar_cover_utils:exit(), case EunitResult of ok -> @@ -211,69 +214,149 @@ setup_code_path() -> CodePath. %% -%% == filter suites == +%% == get matching tests == %% +get_tests_and_modules(Config, ModuleBeamFiles, AllModules) -> + SelectedSuites = get_selected_suites(Config, AllModules), + {Tests, QualifiedTests} = get_qualified_and_unqualified_tests(Config), + Modules = get_test_modules(SelectedSuites, Tests, + QualifiedTests, ModuleBeamFiles), + FilteredModules = get_matching_modules(AllModules, Modules, QualifiedTests), + MatchedTests = get_matching_tests(Modules, Tests, QualifiedTests), + {MatchedTests, FilteredModules}. -filter_suites(Config, Modules) -> - RawSuites = rebar_config:get_global(Config, suites, ""), - SuitesProvided = RawSuites =/= "", +%% +%% == get suites specified via 'suites' option == +%% +get_selected_suites(Config, Modules) -> + RawSuites = get_suites(Config), Suites = [list_to_atom(Suite) || Suite <- string:tokens(RawSuites, ",")], - {SuitesProvided, filter_suites1(Modules, Suites)}. - -filter_suites1(Modules, []) -> - Modules; -filter_suites1(Modules, Suites) -> [M || M <- Suites, lists:member(M, Modules)]. +get_suites(Config) -> + case rebar_config:get_global(Config, suites, "") of + "" -> + rebar_config:get_global(Config, suite, ""); + Suites -> + Suites + end. + +get_qualified_and_unqualified_tests(Config) -> + RawFunctions = rebar_utils:get_experimental_global(Config, tests, ""), + FunctionNames = [FunctionName || + FunctionName <- string:tokens(RawFunctions, ",")], + get_qualified_and_unqualified_tests1(FunctionNames, [], []). + +get_qualified_and_unqualified_tests1([], Functions, QualifiedFunctions) -> + {Functions, QualifiedFunctions}; +get_qualified_and_unqualified_tests1([TestName|TestNames], Functions, + QualifiedFunctions) -> + case string:tokens(TestName, ":") of + [TestName] -> + Function = list_to_atom(TestName), + get_qualified_and_unqualified_tests1( + TestNames, [Function|Functions], QualifiedFunctions); + [ModuleName, FunctionName] -> + M = list_to_atom(ModuleName), + F = list_to_atom(FunctionName), + get_qualified_and_unqualified_tests1(TestNames, Functions, + [{M, F}|QualifiedFunctions]); + _ -> + ?ABORT("Unsupported test function specification: ~s~n", [TestName]) + end. + +%% Provide modules which are to be searched for tests. +%% Several scenarios are possible: %% -%% == get matching tests == +%% == randomize suites == %% -get_tests(Config, SuitesProvided, ModuleBeamFiles, FilteredModules) -> - Modules = case SuitesProvided of - false -> - %% No specific suites have been provided, use - %% ModuleBeamFiles which filters out "*_tests" modules - %% so eunit won't doubly run them and cover only - %% calculates coverage on production code. However, - %% keep "*_tests" modules that are not automatically - %% included by eunit. - %% - %% From 'Primitives' in the EUnit User's Guide - %% http://www.erlang.org/doc/apps/eunit/chapter.html - %% "In addition, EUnit will also look for another - %% module whose name is ModuleName plus the suffix - %% _tests, and if it exists, all the tests from that - %% module will also be added. (If ModuleName already - %% contains the suffix _tests, this is not done.) E.g., - %% the specification {module, mymodule} will run all - %% tests in the modules mymodule and mymodule_tests. - %% Typically, the _tests module should only contain - %% test cases that use the public interface of the main - %% module (and no other code)." - [rebar_utils:beam_to_mod(?EUNIT_DIR, N) || - N <- ModuleBeamFiles]; - true -> - %% Specific suites have been provided, return the - %% filtered modules - FilteredModules - end, - get_matching_tests(Config, Modules). -get_matching_tests(Config, Modules) -> - RawFunctions = rebar_utils:get_experimental_global(Config, tests, ""), - Tests = [list_to_atom(F1) || F1 <- string:tokens(RawFunctions, ",")], - case Tests of - [] -> +randomize_suites(Config, Modules) -> + case rebar_config:get_global(Config, random_suite_order, undefined) of + undefined -> Modules; - Functions -> - case get_matching_tests1(Modules, Functions, []) of - [] -> - []; - RawTests -> - make_test_primitives(RawTests) + "true" -> + Seed = crypto:rand_uniform(1, 65535), + randomize_suites1(Modules, Seed); + String -> + try list_to_integer(String) of + Seed -> + randomize_suites1(Modules, Seed) + catch + error:badarg -> + ?ERROR("Bad random seed provided: ~p~n", [String]), + ?FAIL end end. +randomize_suites1(Modules, Seed) -> + _ = random:seed(35, Seed, 1337), + ?CONSOLE("Randomizing suite order with seed ~b~n", [Seed]), + [X||{_,X} <- lists:sort([{random:uniform(), M} || M <- Modules])]. + +%% +%% == get matching tests == +%% 1) Specific tests have been provided and/or +%% no unqualified tests have been specified and +%% there were some qualified tests, then we can search for +%% functions in specified suites (or in empty set of suites). +%% +%% 2) Neither specific suites nor qualified test names have been +%% provided use ModuleBeamFiles which filters out "*_tests" +%% modules so EUnit won't doubly run them and cover only +%% calculates coverage on production code. However, +%% keep "*_tests" modules that are not automatically +%% included by EUnit. +%% +%% From 'Primitives' in the EUnit User's Guide +%% http://www.erlang.org/doc/apps/eunit/chapter.html +%% "In addition, EUnit will also look for another +%% module whose name is ModuleName plus the suffix +%% _tests, and if it exists, all the tests from that +%% module will also be added. (If ModuleName already +%% contains the suffix _tests, this is not done.) E.g., +%% the specification {module, mymodule} will run all +%% tests in the modules mymodule and mymodule_tests. +%% Typically, the _tests module should only contain +%% test cases that use the public interface of the main +%% module (and no other code)." +get_test_modules(SelectedSuites, Tests, QualifiedTests, ModuleBeamFiles) -> + SuitesProvided = SelectedSuites =/= [], + OnlyQualifiedTestsProvided = QualifiedTests =/= [] andalso Tests =:= [], + if + SuitesProvided orelse OnlyQualifiedTestsProvided -> + SelectedSuites; + true -> + [rebar_utils:beam_to_mod(?EUNIT_DIR, N) || + N <- ModuleBeamFiles] + end. + +get_matching_modules(AllModules, Modules, QualifiedTests) -> + ModuleFilterMapper = + fun({M, _}) -> + case lists:member(M, AllModules) of + true -> {true, M}; + _-> false + end + end, + ModulesFromQualifiedTests = lists:zf(ModuleFilterMapper, QualifiedTests), + lists:usort(Modules ++ ModulesFromQualifiedTests). + +get_matching_tests(Modules, [], []) -> + Modules; +get_matching_tests(Modules, [], QualifiedTests) -> + FilteredQualifiedTests = filter_qualified_tests(Modules, QualifiedTests), + lists:merge(Modules, make_test_primitives(FilteredQualifiedTests)); +get_matching_tests(Modules, Tests, QualifiedTests) -> + AllTests = lists:merge(QualifiedTests, + get_matching_tests1(Modules, Tests, [])), + make_test_primitives(AllTests). + +filter_qualified_tests(Modules, QualifiedTests) -> + TestsFilter = fun({Module, _Function}) -> + lists:all(fun(M) -> M =/= Module end, Modules) end, + lists:filter(TestsFilter, QualifiedTests). + get_matching_tests1([], _Functions, TestFunctions) -> TestFunctions; @@ -408,7 +491,7 @@ perform_eunit(Config, Tests) -> get_eunit_opts(Config) -> %% Enable verbose in eunit if so requested.. - BaseOpts = case rebar_config:is_verbose(Config) of + BaseOpts = case rebar_log:is_verbose(Config) of true -> [verbose]; false -> @@ -418,226 +501,6 @@ get_eunit_opts(Config) -> BaseOpts ++ rebar_config:get_list(Config, eunit_opts, []). %% -%% == code coverage == -%% - -perform_cover(Config, BeamFiles, SrcModules) -> - perform_cover(rebar_config:get(Config, cover_enabled, false), - Config, BeamFiles, SrcModules). - -perform_cover(false, _Config, _BeamFiles, _SrcModules) -> - ok; -perform_cover(true, Config, BeamFiles, SrcModules) -> - cover_analyze(Config, BeamFiles, SrcModules). - -cover_analyze(_Config, [], _SrcModules) -> - ok; -cover_analyze(Config, FilteredModules, SrcModules) -> - %% Generate coverage info for all the cover-compiled modules - Coverage = lists:flatten([cover_analyze_mod(M) - || M <- FilteredModules, - cover:is_compiled(M) =/= false]), - - %% Write index of coverage info - cover_write_index(lists:sort(Coverage), SrcModules), - - %% Write coverage details for each file - lists:foreach(fun({M, _, _}) -> - {ok, _} = cover:analyze_to_file(M, cover_file(M), - [html]) - end, Coverage), - - Index = filename:join([rebar_utils:get_cwd(), ?EUNIT_DIR, "index.html"]), - ?CONSOLE("Cover analysis: ~s\n", [Index]), - - %% Export coverage data, if configured - case rebar_config:get(Config, cover_export_enabled, false) of - true -> - cover_export_coverdata(); - false -> - ok - end, - - %% Print coverage report, if configured - case rebar_config:get(Config, cover_print_enabled, false) of - true -> - cover_print_coverage(lists:sort(Coverage)); - false -> - ok - end. - -cover_close(not_enabled) -> - ok; -cover_close(F) -> - ok = file:close(F). - -cover_init(false, _BeamFiles) -> - {ok, not_enabled}; -cover_init(true, BeamFiles) -> - %% Attempt to start the cover server, then set its group leader to - %% .eunit/cover.log, so all cover log messages will go there instead of - %% to stdout. If the cover server is already started, we'll kill that - %% server and start a new one in order not to inherit a polluted - %% cover_server state. - {ok, CoverPid} = case whereis(cover_server) of - undefined -> - cover:start(); - _ -> - cover:stop(), - cover:start() - end, - - {ok, F} = OkOpen = file:open( - filename:join([?EUNIT_DIR, "cover.log"]), - [write]), - - group_leader(F, CoverPid), - - ?INFO("Cover compiling ~s\n", [rebar_utils:get_cwd()]), - - Compiled = [{Beam, cover:compile_beam(Beam)} || Beam <- BeamFiles], - case [Module || {_, {ok, Module}} <- Compiled] of - [] -> - %% No modules compiled successfully...fail - ?ERROR("Cover failed to compile any modules; aborting.~n", []), - ?FAIL; - _ -> - %% At least one module compiled successfully - - %% It's not an error for cover compilation to fail partially, - %% but we do want to warn about them - PrintWarning = - fun(Beam, Desc) -> - ?CONSOLE("Cover compilation warning for ~p: ~p", - [Beam, Desc]) - end, - _ = [PrintWarning(Beam, Desc) || {Beam, {error, Desc}} <- Compiled], - OkOpen - end; -cover_init(Config, BeamFiles) -> - cover_init(rebar_config:get(Config, cover_enabled, false), BeamFiles). - -cover_analyze_mod(Module) -> - case cover:analyze(Module, coverage, module) of - {ok, {Module, {Covered, NotCovered}}} -> - %% Modules that include the eunit header get an implicit - %% test/0 fun, which cover considers a runnable line, but - %% eunit:test(TestRepresentation) never calls. Decrement - %% NotCovered in this case. - [align_notcovered_count(Module, Covered, NotCovered, - is_eunitized(Module))]; - {error, Reason} -> - ?ERROR("Cover analyze failed for ~p: ~p ~p\n", - [Module, Reason, code:which(Module)]), - [] - end. - -is_eunitized(Mod) -> - has_eunit_test_fun(Mod) andalso - has_header(Mod, "include/eunit.hrl"). - -has_eunit_test_fun(Mod) -> - [F || {exports, Funs} <- Mod:module_info(), - {F, 0} <- Funs, F =:= test] =/= []. - -has_header(Mod, Header) -> - Mod1 = case code:which(Mod) of - cover_compiled -> - {file, File} = cover:is_compiled(Mod), - File; - non_existing -> Mod; - preloaded -> Mod; - L -> L - end, - {ok, {_, [{abstract_code, {_, AC}}]}} = beam_lib:chunks(Mod1, - [abstract_code]), - [F || {attribute, 1, file, {F, 1}} <- AC, - string:str(F, Header) =/= 0] =/= []. - -align_notcovered_count(Module, Covered, NotCovered, false) -> - {Module, Covered, NotCovered}; -align_notcovered_count(Module, Covered, NotCovered, true) -> - {Module, Covered, NotCovered - 1}. - -cover_write_index(Coverage, SrcModules) -> - {ok, F} = file:open(filename:join([?EUNIT_DIR, "index.html"]), [write]), - ok = file:write(F, "<!DOCTYPE HTML><html>\n" - "<head><meta charset=\"utf-8\">" - "<title>Coverage Summary</title></head>\n" - "<body>\n"), - IsSrcCoverage = fun({Mod,_C,_N}) -> lists:member(Mod, SrcModules) end, - {SrcCoverage, TestCoverage} = lists:partition(IsSrcCoverage, Coverage), - cover_write_index_section(F, "Source", SrcCoverage), - cover_write_index_section(F, "Test", TestCoverage), - ok = file:write(F, "</body></html>"), - ok = file:close(F). - -cover_write_index_section(_F, _SectionName, []) -> - ok; -cover_write_index_section(F, SectionName, Coverage) -> - %% Calculate total coverage - {Covered, NotCovered} = lists:foldl(fun({_Mod, C, N}, {CAcc, NAcc}) -> - {CAcc + C, NAcc + N} - end, {0, 0}, Coverage), - TotalCoverage = percentage(Covered, NotCovered), - - %% Write the report - ok = file:write(F, ?FMT("<h1>~s Summary</h1>\n", [SectionName])), - ok = file:write(F, ?FMT("<h3>Total: ~s</h3>\n", [TotalCoverage])), - ok = file:write(F, "<table><tr><th>Module</th><th>Coverage %</th></tr>\n"), - - FmtLink = - fun(Module, Cov, NotCov) -> - ?FMT("<tr><td><a href='~s.COVER.html'>~s</a></td><td>~s</td>\n", - [Module, Module, percentage(Cov, NotCov)]) - end, - lists:foreach(fun({Module, Cov, NotCov}) -> - ok = file:write(F, FmtLink(Module, Cov, NotCov)) - end, Coverage), - ok = file:write(F, "</table>\n"). - -cover_print_coverage(Coverage) -> - {Covered, NotCovered} = lists:foldl(fun({_Mod, C, N}, {CAcc, NAcc}) -> - {CAcc + C, NAcc + N} - end, {0, 0}, Coverage), - TotalCoverage = percentage(Covered, NotCovered), - - %% Determine the longest module name for right-padding - Width = lists:foldl(fun({Mod, _, _}, Acc) -> - case length(atom_to_list(Mod)) of - N when N > Acc -> - N; - _ -> - Acc - end - end, 0, Coverage) * -1, - - %% Print the output the console - ?CONSOLE("~nCode Coverage:~n", []), - lists:foreach(fun({Mod, C, N}) -> - ?CONSOLE("~*s : ~3s~n", - [Width, Mod, percentage(C, N)]) - end, Coverage), - ?CONSOLE("~n~*s : ~s~n", [Width, "Total", TotalCoverage]). - -cover_file(Module) -> - filename:join([?EUNIT_DIR, atom_to_list(Module) ++ ".COVER.html"]). - -cover_export_coverdata() -> - ExportFile = filename:join(eunit_dir(), "eunit.coverdata"), - case cover:export(ExportFile) of - ok -> - ?CONSOLE("Coverdata export: ~s~n", [ExportFile]); - {error, Reason} -> - ?ERROR("Coverdata export failed: ~p~n", [Reason]) - end. - -percentage(0, 0) -> - "not executed"; -percentage(Cov, NotCov) -> - integer_to_list(trunc((Cov / (Cov + NotCov)) * 100)) ++ "%". - -%% %% == reset_after_eunit == %% diff --git a/src/rebar_file_utils.erl b/src/rebar_file_utils.erl index fcd9c5e..9ddbf27 100644 --- a/src/rebar_file_utils.erl +++ b/src/rebar_file_utils.erl @@ -46,7 +46,7 @@ rm_rf(Target) -> {unix, _} -> EscTarget = escape_spaces(Target), {ok, []} = rebar_utils:sh(?FMT("rm -rf ~s", [EscTarget]), - [{use_stdout, false}, return_on_error]), + [{use_stdout, false}, abort_on_error]), ok; {win32, _} -> Filelist = filelib:wildcard(Target), @@ -67,7 +67,7 @@ cp_r(Sources, Dest) -> SourceStr = string:join(EscSources, " "), {ok, []} = rebar_utils:sh(?FMT("cp -R ~s \"~s\"", [SourceStr, Dest]), - [{use_stdout, false}, return_on_error]), + [{use_stdout, false}, abort_on_error]), ok; {win32, _} -> lists:foreach(fun(Src) -> ok = cp_r_win32(Src,Dest) end, Sources), @@ -81,7 +81,7 @@ mv(Source, Dest) -> EscSource = escape_spaces(Source), EscDest = escape_spaces(Dest), {ok, []} = rebar_utils:sh(?FMT("mv ~s ~s", [EscSource, EscDest]), - [{use_stdout, false}, return_on_error]), + [{use_stdout, false}, abort_on_error]), ok; {win32, _} -> {ok, R} = rebar_utils:sh( diff --git a/src/getopt.erl b/src/rebar_getopt.erl index f9852fb..79b871d 100644 --- a/src/getopt.erl +++ b/src/rebar_getopt.erl @@ -8,10 +8,11 @@ %%% a copy of the New BSD license with this software. If not, it can be %%% retrieved from: http://www.opensource.org/licenses/bsd-license.php %%%------------------------------------------------------------------- --module(getopt). +-module(rebar_getopt). -author('juanjo@comellas.org'). --export([parse/2, usage/2, usage/3, usage/4, tokenize/1]). +-export([parse/2, check/2, parse_and_check/2, format_error/2, + usage/2, usage/3, usage/4, tokenize/1]). -export([usage_cmd_line/2]). -define(LINE_LENGTH, 75). @@ -57,11 +58,52 @@ -export_type([arg_type/0, arg_value/0, arg_spec/0, simple_option/0, compound_option/0, option/0, option_spec/0]). -%% @doc Parse the command line options and arguments returning a list of tuples -%% and/or atoms using the Erlang convention for sending options to a -%% function. +%% @doc Parse the command line options and arguments returning a list of tuples +%% and/or atoms using the Erlang convention for sending options to a +%% function. Additionally perform check if all required options (the ones +%% without default values) are present. The function is a combination of +%% two calls: parse/2 and check/2. +-spec parse_and_check([option_spec()], string() | [string()]) -> + {ok, {[option()], [string()]}} | {error, {Reason :: atom(), Data :: term()}}. +parse_and_check(OptSpecList, CmdLine) when is_list(OptSpecList), is_list(CmdLine) -> + case parse(OptSpecList, CmdLine) of + {ok, {Opts, _}} = Result -> + case check(OptSpecList, Opts) of + ok -> Result; + Error -> Error + end; + Error -> + Error + end. + +%% @doc Check the parsed command line arguments returning ok if all required +%% options (i.e. that don't have defaults) are present, and returning +%% error otherwise. +-spec check([option_spec()], [option()]) -> + ok | {error, {Reason :: atom(), Option :: atom()}}. +check(OptSpecList, ParsedOpts) when is_list(OptSpecList), is_list(ParsedOpts) -> + try + RequiredOpts = [Name || {Name, _, _, Arg, _} <- OptSpecList, + not is_tuple(Arg) andalso Arg =/= undefined], + lists:foreach(fun (Option) -> + case proplists:is_defined(Option, ParsedOpts) of + true -> + ok; + false -> + throw({error, {missing_required_option, Option}}) + end + end, RequiredOpts) + catch + _:Error -> + Error + end. + + +%% @doc Parse the command line options and arguments returning a list of tuples +%% and/or atoms using the Erlang convention for sending options to a +%% function. -spec parse([option_spec()], string() | [string()]) -> - {ok, {[option()], [string()]}} | {error, {Reason :: atom(), Data :: any()}}. + {ok, {[option()], [string()]}} | {error, {Reason :: atom(), Data :: term()}}. parse(OptSpecList, CmdLine) when is_list(CmdLine) -> try Args = if @@ -101,6 +143,24 @@ parse(OptSpecList, OptAcc, ArgAcc, _ArgPos, []) -> {ok, {lists:reverse(append_default_options(OptSpecList, OptAcc)), lists:reverse(ArgAcc)}}. +%% @doc Format the error code returned by prior call to parse/2 or check/2. +-spec format_error([option_spec()], {error, {Reason :: atom(), Data :: term()}} | + {Reason :: term(), Data :: term()}) -> string(). +format_error(OptSpecList, {error, Reason}) -> + format_error(OptSpecList, Reason); +format_error(OptSpecList, {missing_required_option, Name}) -> + {_Name, Short, Long, _Type, _Help} = lists:keyfind(Name, 1, OptSpecList), + lists:flatten(["missing required option: -", [Short], " (", to_string(Long), ")"]); +format_error(_OptSpecList, {invalid_option, OptStr}) -> + lists:flatten(["invalid option: ", to_string(OptStr)]); +format_error(_OptSpecList, {invalid_option_arg, {Name, Arg}}) -> + lists:flatten(["option \'", to_string(Name) ++ "\' has invalid argument: ", to_string(Arg)]); +format_error(_OptSpecList, {invalid_option_arg, OptStr}) -> + lists:flatten(["invalid option argument: ", to_string(OptStr)]); +format_error(_OptSpecList, {Reason, Data}) -> + lists:flatten([to_string(Reason), " ", to_string(Data)]). + + %% @doc Parse a long option, add it to the option accumulator and continue %% parsing the rest of the arguments recursively. %% A long option can have the following syntax: @@ -698,7 +758,7 @@ format_usage_line(_MaxOptionLength, _MaxLineLength, {_OptionLength, OptionText, %% @doc Wrap a text line converting it into several text lines so that the -%% length of each one of them is never over HelpLength characters. +%% length of each one of them is never over Length characters. -spec wrap_text_line(Length :: non_neg_integer(), Text :: string()) -> [string()]. wrap_text_line(Length, Text) -> wrap_text_line(Length, Text, [], 0, []). @@ -730,7 +790,7 @@ default_arg_value_to_string(Value) when is_binary(Value) -> default_arg_value_to_string(Value) when is_integer(Value) -> integer_to_list(Value); default_arg_value_to_string(Value) when is_float(Value) -> - float_to_list(Value); + lists:flatten(io_lib:format("~w", [Value])); default_arg_value_to_string(Value) -> Value. @@ -832,7 +892,7 @@ get_env_var(Prefix, Suffix, []) -> Prefix ++ Suffix. --spec line_length() -> non_neg_integer(). +-spec line_length() -> 0..?LINE_LENGTH. line_length() -> case io:columns() of {ok, Columns} when Columns < ?LINE_LENGTH -> @@ -840,3 +900,15 @@ line_length() -> _ -> ?LINE_LENGTH end. + + +-spec to_string(term()) -> string(). +to_string(List) when is_list(List) -> + case io_lib:printable_list(List) of + true -> List; + false -> io_lib:format("~p", [List]) + end; +to_string(Atom) when is_atom(Atom) -> + atom_to_list(Atom); +to_string(Value) -> + io_lib:format("~p", [Value]). diff --git a/src/rebar_log.erl b/src/rebar_log.erl index 4108c9c..ba25332 100644 --- a/src/rebar_log.erl +++ b/src/rebar_log.erl @@ -27,8 +27,17 @@ -module(rebar_log). -export([init/1, - set_level/1, default_level/0, - log/3]). + set_level/1, + error_level/0, + default_level/0, + log/3, + log/4, + is_verbose/1]). + +-define(ERROR_LEVEL, 0). +-define(WARN_LEVEL, 1). +-define(INFO_LEVEL, 2). +-define(DEBUG_LEVEL, 3). %% =================================================================== %% Public API @@ -37,35 +46,39 @@ init(Config) -> Verbosity = rebar_config:get_global(Config, verbose, default_level()), case valid_level(Verbosity) of - 0 -> set_level(error); - 1 -> set_level(warn); - 2 -> set_level(info); - 3 -> set_level(debug) + ?ERROR_LEVEL -> set_level(error); + ?WARN_LEVEL -> set_level(warn); + ?INFO_LEVEL -> set_level(info); + ?DEBUG_LEVEL -> set_level(debug) end. set_level(Level) -> ok = application:set_env(rebar, log_level, Level). log(Level, Str, Args) -> + log(standard_io, Level, Str, Args). + +log(Device, Level, Str, Args) -> {ok, LogLevel} = application:get_env(rebar, log_level), case should_log(LogLevel, Level) of true -> - io:format(log_prefix(Level) ++ Str, Args); + io:format(Device, log_prefix(Level) ++ Str, Args); false -> ok end. -default_level() -> error_level(). +error_level() -> ?ERROR_LEVEL. +default_level() -> ?WARN_LEVEL. + +is_verbose(Config) -> + rebar_config:get_xconf(Config, is_verbose, false). %% =================================================================== %% Internal functions %% =================================================================== valid_level(Level) -> - erlang:max(error_level(), erlang:min(Level, debug_level())). - -error_level() -> 0. -debug_level() -> 3. + erlang:max(?ERROR_LEVEL, erlang:min(Level, ?DEBUG_LEVEL)). should_log(debug, _) -> true; should_log(info, debug) -> false; diff --git a/src/rebar_metacmds.erl b/src/rebar_metacmds.erl new file mode 100644 index 0000000..6e223bd --- /dev/null +++ b/src/rebar_metacmds.erl @@ -0,0 +1,56 @@ +%% -*- erlang-indent-level: 4;indent-tabs-mode: nil -*- +%% ex: ts=4 sw=4 et +%% ------------------------------------------------------------------- +%% +%% rebar: Erlang Build Tools +%% +%% Copyright (c) 2013-2014 Tuncer Ayaz +%% +%% Permission is hereby granted, free of charge, to any person obtaining a copy +%% of this software and associated documentation files (the "Software"), to deal +%% in the Software without restriction, including without limitation the rights +%% to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +%% copies of the Software, and to permit persons to whom the Software is +%% furnished to do so, subject to the following conditions: +%% +%% The above copyright notice and this permission notice shall be included in +%% all copies or substantial portions of the Software. +%% +%% THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +%% IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +%% FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +%% AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +%% LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +%% OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +%% THE SOFTWARE. +%% ------------------------------------------------------------------- +-module(rebar_metacmds). + +-export(['prepare-deps'/2, + 'refresh-deps'/2]). + +%% for internal use only +-export([info/2]). + +-include("rebar.hrl"). + +%% =================================================================== +%% Public API +%% =================================================================== +'prepare-deps'(Config, _AppFile) -> + rebar:run(enable_recursion(Config), ["get-deps", "compile"]). + +'refresh-deps'(Config, _AppFile) -> + rebar:run(enable_recursion(Config), ["update-deps", "compile"]). + +%% =================================================================== +%% Internal functions +%% =================================================================== + +info(help, 'prepare-deps') -> + ?CONSOLE("Meta command to run 'rebar -r get-deps compile'.~n", []); +info(help, 'refresh-deps') -> + ?CONSOLE("Meta command to run 'rebar -r update-deps compile'.~n", []). + +enable_recursion(Config) -> + rebar_config:set_xconf(Config, recursive, true). diff --git a/src/mustache.erl b/src/rebar_mustache.erl index f6963cd..9016c0f 100644 --- a/src/mustache.erl +++ b/src/rebar_mustache.erl @@ -23,7 +23,7 @@ %% See the README at http://github.com/mojombo/mustache.erl for additional %% documentation and usage examples. --module(mustache). %% v0.1.0 +-module(rebar_mustache). %% v0.1.0 -author("Tom Preston-Werner"). -export([compile/1, compile/2, render/1, render/2, render/3, get/2, get/3, escape/1, start/1]). @@ -31,6 +31,8 @@ section_re = undefined, tag_re = undefined}). +-define(MUSTACHE_STR, "rebar_mustache"). + compile(Body) when is_list(Body) -> State = #mstate{}, CompiledTemplate = pre_compile(Body, State), @@ -108,7 +110,7 @@ compile_section(Name, Content, State) -> Mod = State#mstate.mod, Result = compiler(Content, State), "fun() -> " ++ - "case mustache:get(" ++ Name ++ ", Ctx, " ++ atom_to_list(Mod) ++ ") of " ++ + "case " ++ ?MUSTACHE_STR ++ ":get(" ++ Name ++ ", Ctx, " ++ atom_to_list(Mod) ++ ") of " ++ "\"true\" -> " ++ Result ++ "; " ++ "\"false\" -> " ++ @@ -143,10 +145,10 @@ tag_kind(T, {K0, K1}) -> compile_tag(none, Content, State) -> Mod = State#mstate.mod, - "mustache:escape(mustache:get(" ++ Content ++ ", Ctx, " ++ atom_to_list(Mod) ++ "))"; + ?MUSTACHE_STR ++ ":escape(" ++ ?MUSTACHE_STR ++ ":get(" ++ Content ++ ", Ctx, " ++ atom_to_list(Mod) ++ "))"; compile_tag("{", Content, State) -> Mod = State#mstate.mod, - "mustache:get(" ++ Content ++ ", Ctx, " ++ atom_to_list(Mod) ++ ")"; + ?MUSTACHE_STR ++ ":get(" ++ Content ++ ", Ctx, " ++ atom_to_list(Mod) ++ ")"; compile_tag("!", _Content, _State) -> "[]". diff --git a/src/rebar_port_compiler.erl b/src/rebar_port_compiler.erl index 0abb044..fec8e04 100644 --- a/src/rebar_port_compiler.erl +++ b/src/rebar_port_compiler.erl @@ -498,10 +498,20 @@ erts_dir() -> os_env() -> ReOpts = [{return, list}, {parts, 2}, unicode], - Os = [list_to_tuple(re:split(S, "=", ReOpts)) || S <- os:getenv()], + Os = [list_to_tuple(re:split(S, "=", ReOpts)) || + S <- lists:filter(fun discard_deps_vars/1, os:getenv())], %% Drop variables without a name (win32) [T1 || {K, _V} = T1 <- Os, K =/= []]. +%% +%% To avoid having multiple repetitions of the same environment variables +%% (ERL_LIBS), avoid exporting any variables that may cause conflict with +%% those exported by the rebar_deps module (ERL_LIBS, REBAR_DEPS_DIR) +%% +discard_deps_vars("ERL_LIBS=" ++ _Value) -> false; +discard_deps_vars("REBAR_DEPS_DIR=" ++ _Value) -> false; +discard_deps_vars(_Var) -> true. + select_compile_template(drv, Compiler) -> select_compile_drv_template(Compiler); select_compile_template(exe, Compiler) -> diff --git a/src/rebar_qc.erl b/src/rebar_qc.erl index 53a6f52..e08833b 100644 --- a/src/rebar_qc.erl +++ b/src/rebar_qc.erl @@ -4,7 +4,7 @@ %% %% rebar: Erlang Build Tools %% -%% Copyright (c) 2011-2012 Tuncer Ayaz +%% Copyright (c) 2011-2014 Tuncer Ayaz %% %% Permission is hereby granted, free of charge, to any person obtaining a copy %% of this software and associated documentation files (the "Software"), to deal @@ -67,11 +67,22 @@ info(help, qc) -> "Valid rebar.config options:~n" " {qc_opts, [{qc_mod, module()}, Options]}~n" " ~p~n" - " ~p~n", + " ~p~n" + " ~p~n" + " ~p~n" + " ~p~n" + "Valid command line options:~n" + " compile_only=true (Compile but do not test properties)", [ {qc_compile_opts, []}, - {qc_first_files, []} - ]). + {qc_first_files, []}, + {cover_enabled, false}, + {cover_print_enabled, false}, + {cover_export_enabled, false} + ]); +info(help, clean) -> + Description = ?FMT("Delete QuickCheck test dir (~s)", [?QC_DIR]), + ?CONSOLE("~s.~n", [Description]). -define(TRIQ_MOD, triq). -define(EQC_MOD, eqc). @@ -142,26 +153,44 @@ run(Config, QC, QCOpts) -> ok = ensure_dirs(), CodePath = setup_codepath(), - CompileOnly = rebar_utils:get_experimental_global(Config, compile_only, - false), + CompileOnly = rebar_config:get_global(Config, compile_only, false), %% Compile erlang code to ?QC_DIR, using a tweaked config %% with appropriate defines, and include all the test modules %% as well. - {ok, _SrcErls} = rebar_erlc_compiler:test_compile(Config, "qc", ?QC_DIR), + {ok, SrcErls} = rebar_erlc_compiler:test_compile(Config, "qc", ?QC_DIR), case CompileOnly of "true" -> true = code:set_path(CodePath), ?CONSOLE("Compiled modules for qc~n", []); false -> - run1(QC, QCOpts, CodePath) + run1(QC, QCOpts, Config, CodePath, SrcErls) end. -run1(QC, QCOpts, CodePath) -> +run1(QC, QCOpts, Config, CodePath, SrcErls) -> + + AllBeamFiles = rebar_utils:beams(?QC_DIR), + AllModules = [rebar_utils:beam_to_mod(?QC_DIR, N) + || N <- AllBeamFiles], + PropMods = find_prop_mods(), + FilteredModules = AllModules -- PropMods, + + SrcModules = [rebar_utils:erl_to_mod(M) || M <- SrcErls], + + {ok, CoverLog} = rebar_cover_utils:init(Config, AllBeamFiles, qc_dir()), + TestModule = fun(M) -> qc_module(QC, QCOpts, M) end, - case lists:flatmap(TestModule, find_prop_mods()) of + QCResult = lists:flatmap(TestModule, PropMods), + + rebar_cover_utils:perform_cover(Config, FilteredModules, SrcModules, + qc_dir()), + rebar_cover_utils:close(CoverLog), + ok = rebar_cover_utils:exit(), + + true = code:set_path(CodePath), + + case QCResult of [] -> - true = code:set_path(CodePath), ok; Errors -> ?ABORT("One or more QC properties didn't hold true:~n~p~n", diff --git a/src/rebar_rel_utils.erl b/src/rebar_rel_utils.erl index 085dbd9..5d99948 100644 --- a/src/rebar_rel_utils.erl +++ b/src/rebar_rel_utils.erl @@ -37,6 +37,7 @@ get_rel_file_path/2, load_config/2, get_sys_tuple/1, + get_excl_lib_tuple/1, get_target_dir/2, get_root_dir/2, get_target_parent_dir/2]). @@ -144,6 +145,13 @@ get_sys_tuple(ReltoolConfig) -> end. %% +%% Look for the {excl_lib, ...} tuple in sys tuple of the reltool.config file. +%% Without this present, return false. +%% +get_excl_lib_tuple(ReltoolConfig) -> + lists:keyfind(excl_lib, 1, element(2, get_sys_tuple(ReltoolConfig))). + +%% %% Look for {target_dir, TargetDir} in the reltool config file; if none is %% found, use the name of the release as the default target directory. %% diff --git a/src/rebar_reltool.erl b/src/rebar_reltool.erl index 9f9488e..fdaa7e0 100644 --- a/src/rebar_reltool.erl +++ b/src/rebar_reltool.erl @@ -147,15 +147,12 @@ process_overlay(Config, ReltoolConfig) -> OverlayVars1), %% Finally, overlay the files specified by the overlay section - case lists:keyfind(overlay, 1, ReltoolConfig) of - {overlay, Overlay} when is_list(Overlay) -> + case overlay_files(ReltoolConfig) of + [] -> + ok; + Overlay -> execute_overlay(Overlay, OverlayVars, rebar_utils:get_cwd(), - TargetDir); - false -> - ?INFO("No {overlay, [...]} found in reltool.config.\n", []); - _ -> - ?ABORT("{overlay, [...]} entry in reltool.config " - "must be a list.\n", []) + TargetDir) end. %% @@ -292,6 +289,26 @@ dump_spec(Config, Spec) -> end. +overlay_files(ReltoolConfig) -> + Original = case lists:keyfind(overlay, 1, ReltoolConfig) of + {overlay, Overlay} when is_list(Overlay) -> + Overlay; + false -> + ?INFO("No {overlay, [...]} found in reltool.config.\n", []), + []; + _ -> + ?ABORT("{overlay, [...]} entry in reltool.config " + "must be a list.\n", []) + end, + SlimAddition = case rebar_rel_utils:get_excl_lib_tuple(ReltoolConfig) of + {excl_lib, otp_root} -> + [{create, "releases/{{rel_vsn}}/runner_script.data", + "slim\n"}]; + false -> + [] + end, + Original ++ SlimAddition. + %% TODO: Merge functionality here with rebar_templater execute_overlay([], _Vars, _BaseDir, _TargetDir) -> diff --git a/src/rebar_require_vsn.erl b/src/rebar_require_vsn.erl index 385f55c..af805c8 100644 --- a/src/rebar_require_vsn.erl +++ b/src/rebar_require_vsn.erl @@ -34,7 +34,8 @@ eunit/2]). %% for internal use only --export([info/2]). +-export([info/2, + version_tuple/2]). %% =================================================================== %% Public API @@ -110,7 +111,7 @@ check_versions(Config) -> end. version_tuple(OtpRelease, Type) -> - case re:run(OtpRelease, "R(\\d+)B?-?(\\d+)?", [{capture, all, list}]) of + case re:run(OtpRelease, "R?(\\d+)B?-?(\\d+)?", [{capture, all, list}]) of {match, [_Full, Maj, Min]} -> {list_to_integer(Maj), list_to_integer(Min)}; {match, [_Full, Maj]} -> diff --git a/src/rebar_shell.erl b/src/rebar_shell.erl index 2dbf4a0..348e540 100644 --- a/src/rebar_shell.erl +++ b/src/rebar_shell.erl @@ -30,27 +30,40 @@ -include("rebar.hrl"). --export([shell/2]). +-export([shell/2, info/2]). + +%% NOTE: +%% this is an attempt to replicate `erl -pa ./ebin -pa deps/*/ebin`. it is +%% mostly successful but does stop and then restart the user io system to get +%% around issues with rebar being an escript and starting in `noshell` mode. +%% it also lacks the ctrl-c interrupt handler that `erl` features. ctrl-c will +%% immediately kill the script. ctrl-g, however, works fine shell(_Config, _AppFile) -> - ?CONSOLE("NOTICE: Using experimental 'shell' command~n", []), - %% backwards way to say we only want this executed - %% for the "top level" directory - case is_deps_dir(rebar_utils:get_cwd()) of - false -> - true = code:add_pathz(rebar_utils:ebin_dir()), - user_drv:start(), - %% this call never returns (until user quits shell) - shell:server(false, false); - true -> - ok - end, - ok. + true = code:add_pathz(rebar_utils:ebin_dir()), + %% terminate the current user + ok = supervisor:terminate_child(kernel_sup, user), + %% start a new shell (this also starts a new user under the correct group) + user_drv:start(), + %% enable error_logger's tty output + ok = error_logger:swap_handler(tty), + %% disable the simple error_logger (which may have been added multiple + %% times). removes at most the error_logger added by init and the + %% error_logger added by the tty handler + ok = remove_error_handler(3), + %% this call never returns (until user quits shell) + timer:sleep(infinity). + +info(help, shell) -> + ?CONSOLE( + "Start a shell with project and deps preloaded similar to~n" + "'erl -pa ebin -pa deps/*/ebin'.~n", + []). -is_deps_dir(Dir) -> - case lists:reverse(filename:split(Dir)) of - [_, "deps" | _] -> - true; - _V -> - false - end. +remove_error_handler(0) -> + ?WARN("Unable to remove simple error_logger handler~n", []); +remove_error_handler(N) -> + case gen_event:delete_handler(error_logger, error_logger, []) of + {error, module_not_found} -> ok; + {error_logger, _} -> remove_error_handler(N-1) + end.
\ No newline at end of file diff --git a/src/rebar_templater.erl b/src/rebar_templater.erl index 279c53d..4abf404 100644 --- a/src/rebar_templater.erl +++ b/src/rebar_templater.erl @@ -27,6 +27,7 @@ -module(rebar_templater). -export(['create-app'/2, + 'create-lib'/2, 'create-node'/2, 'list-templates'/2, create/2]). @@ -50,6 +51,10 @@ %% Alias for create w/ template=simpleapp create1(Config, "simpleapp"). +'create-lib'(Config, _File) -> + %% Alias for create w/ template=simplelib + create1(Config, "simplelib"). + 'create-node'(Config, _File) -> %% Alias for create w/ template=simplenode create1(Config, "simplenode"). @@ -98,7 +103,7 @@ render(Bin, Context) -> ReOpts = [global, {return, list}], Str0 = re:replace(Bin, "\\\\", "\\\\\\", ReOpts), Str1 = re:replace(Str0, "\"", "\\\\\"", ReOpts), - mustache:render(Str1, Context). + rebar_mustache:render(Str1, Context). %% =================================================================== %% Internal functions @@ -116,6 +121,12 @@ info(help, 'create-app') -> "~n" "Valid command line options:~n" " [appid=myapp]~n", []); +info(help, 'create-lib') -> + ?CONSOLE( + "Create simple lib skel.~n" + "~n" + "Valid command line options:~n" + " [libid=mylib]~n", []); info(help, 'create-node') -> ?CONSOLE( "Create simple node skel.~n" @@ -234,7 +245,8 @@ find_disk_templates(Config) -> HomeFiles = rebar_utils:find_files(filename:join([os:getenv("HOME"), ".rebar", "templates"]), ?TEMPLATE_RE), - LocalFiles = rebar_utils:find_files(".", ?TEMPLATE_RE), + Recursive = rebar_config:is_recursive(Config), + LocalFiles = rebar_utils:find_files(".", ?TEMPLATE_RE, Recursive), [{file, F} || F <- OtherTemplates ++ HomeFiles ++ LocalFiles]. find_other_templates(Config) -> diff --git a/src/rebar_upgrade.erl b/src/rebar_upgrade.erl index 5814e51..3a38a08 100644 --- a/src/rebar_upgrade.erl +++ b/src/rebar_upgrade.erl @@ -87,7 +87,8 @@ info(help, 'generate-upgrade') -> ?CONSOLE("Build an upgrade package.~n" "~n" "Valid command line options:~n" - " previous_release=path~n", + " previous_release=path~n" + " target_dir=target_dir (optional)~n", []). run_checks(Config, OldVerPath, ReltoolConfig) -> @@ -97,10 +98,7 @@ run_checks(Config, OldVerPath, ReltoolConfig) -> {Name, Ver} = rebar_rel_utils:get_reltool_release_info(ReltoolConfig), - NewVerPath = - filename:join( - [rebar_rel_utils:get_target_parent_dir(Config, ReltoolConfig), - Name]), + NewVerPath = rebar_rel_utils:get_target_dir(Config, ReltoolConfig), true = rebar_utils:prop_check(filelib:is_dir(NewVerPath), "Release directory doesn't exist (~p)~n", [NewVerPath]), diff --git a/src/rebar_utils.erl b/src/rebar_utils.erl index 618427f..fa35fed 100644 --- a/src/rebar_utils.erl +++ b/src/rebar_utils.erl @@ -31,6 +31,7 @@ get_arch/0, wordsize/0, sh/2, + sh_send/3, find_files/2, find_files/3, now_str/0, ensure_dir/1, @@ -52,7 +53,9 @@ erl_opts/1, src_dirs/1, ebin_dir/0, - processing_base_dir/1, processing_base_dir/2]). + base_dir/1, + processing_base_dir/1, processing_base_dir/2, + patch_env/2]). -include("rebar.hrl"). @@ -86,6 +89,24 @@ wordsize() -> integer_to_list(8 * erlang:system_info(wordsize)) end. +sh_send(Command0, String, Options0) -> + ?INFO("sh_send info:\n\tcwd: ~p\n\tcmd: ~s < ~s\n", [get_cwd(), Command0, String]), + ?DEBUG("\topts: ~p\n", [Options0]), + + DefaultOptions = [use_stdout, abort_on_error], + Options = [expand_sh_flag(V) + || V <- proplists:compact(Options0 ++ DefaultOptions)], + + Command = patch_on_windows(Command0, proplists:get_value(env, Options, [])), + PortSettings = proplists:get_all_values(port_settings, Options) ++ + [exit_status, {line, 16384}, use_stdio, stderr_to_stdout, hide], + Port = open_port({spawn, Command}, PortSettings), + + %% allow us to send some data to the shell command's STDIN + %% Erlang doesn't let us get any reply after sending an EOF, though... + Port ! {self(), {command, String}}, + port_close(Port). + %% %% Options = [Option] -- defaults to [use_stdout, abort_on_error] %% Option = ErrorOption | OutputOption | {cd, string()} | {env, Env} @@ -307,12 +328,33 @@ src_dirs(SrcDirs) -> ebin_dir() -> filename:join(get_cwd(), "ebin"). +base_dir(Config) -> + rebar_config:get_xconf(Config, base_dir). + processing_base_dir(Config) -> Cwd = rebar_utils:get_cwd(), processing_base_dir(Config, Cwd). processing_base_dir(Config, Dir) -> - Dir =:= rebar_config:get_xconf(Config, base_dir). + AbsDir = filename:absname(Dir), + AbsDir =:= base_dir(Config). + +%% @doc Returns the list of environment variables including 'REBAR' which points to the +%% rebar executable used to execute the currently running command. The environment is +%% not modified if rebar was invoked programmatically. +-spec patch_env(rebar_config:config(), [{string(), string()}]) -> [{string(), string()}]. +patch_env(Config, []) -> + % if we reached an empty list the env did not contain the REBAR variable + case rebar_config:get_xconf(Config, escript, "") of + "" -> % rebar was invoked programmatically + []; + Path -> + [{"REBAR", Path}] + end; +patch_env(_Config, [{"REBAR", _} | _]=All) -> + All; +patch_env(Config, [E | Rest]) -> + [E | patch_env(Config, Rest)]. %% ==================================================================== %% Internal functions @@ -394,8 +436,9 @@ log_msg_and_abort(Message) -> -spec log_and_abort(string(), {integer(), string()}) -> no_return(). log_and_abort(Command, {Rc, Output}) -> - ?ABORT("~s failed with error: ~w and output:~n~s~n", - [Command, Rc, Output]). + ?ABORT("sh(~s)~n" + "failed with return code ~w and the following output:~n" + "~s~n", [Command, Rc, Output]). sh_loop(Port, Fun, Acc) -> receive @@ -474,6 +517,7 @@ vcs_vsn_1(Vcs, Dir) -> end. vcs_vsn_cmd(git) -> "git describe --always --tags"; +vcs_vsn_cmd(p4) -> "echo #head"; vcs_vsn_cmd(hg) -> "hg identify -i"; vcs_vsn_cmd(bzr) -> "bzr revno"; vcs_vsn_cmd(svn) -> "svnversion"; diff --git a/src/rebar_xref.erl b/src/rebar_xref.erl index eaf6d03..16e8cc4 100644 --- a/src/rebar_xref.erl +++ b/src/rebar_xref.erl @@ -51,7 +51,7 @@ xref(Config, _) -> xref:set_default(xref, [{warnings, rebar_config:get(Config, xref_warnings, false)}, - {verbose, rebar_config:is_verbose(Config)}]), + {verbose, rebar_log:is_verbose(Config)}]), {ok, _} = xref:add_directory(xref, "ebin"), @@ -103,9 +103,11 @@ info(help, xref) -> "Valid rebar.config options:~n" " ~p~n" " ~p~n" + " ~p~n" " ~p~n", [ {xref_warnings, false}, + {xref_extra_paths,[]}, {xref_checks, [undefined_function_calls, undefined_functions, locals_not_used, exports_not_used, deprecated_function_calls, deprecated_functions]}, @@ -144,8 +146,9 @@ code_path(Config) -> %% functions, even though those functions are present as part %% of compilation. H/t to @dluna. Long term we should tie more %% properly into the overall compile code path if possible. - BaseDir = rebar_config:get_xconf(Config, base_dir), + BaseDir = rebar_utils:base_dir(Config), [P || P <- code:get_path() ++ + rebar_config:get(Config, xref_extra_paths, []) ++ [filename:join(BaseDir, filename:join(SubDir, "ebin")) || SubDir <- rebar_config:get(Config, sub_dirs, [])], filelib:is_dir(P)]. |