mirror of
https://github.com/correl/rebar.git
synced 2024-12-18 03:00:17 +00:00
Clean up code
This commit is contained in:
parent
7710ab0d9f
commit
63de05d914
26 changed files with 521 additions and 383 deletions
|
@ -52,7 +52,8 @@ main(Args) ->
|
|||
{error, failed} ->
|
||||
halt(1);
|
||||
Error ->
|
||||
%% Nothing should percolate up from rebar_core; dump this error to console
|
||||
%% Nothing should percolate up from rebar_core;
|
||||
%% Dump this error to console
|
||||
io:format("Uncaught error in rebar_core: ~p\n", [Error]),
|
||||
halt(1)
|
||||
end.
|
||||
|
@ -88,7 +89,8 @@ run_aux(Commands) ->
|
|||
%% Determine the location of the rebar executable; important for pulling
|
||||
%% resources out of the escript
|
||||
rebar_config:set_global(escript, filename:absname(escript:script_name())),
|
||||
?DEBUG("Rebar location: ~p\n", [rebar_config:get_global(escript, undefined)]),
|
||||
?DEBUG("Rebar location: ~p\n",
|
||||
[rebar_config:get_global(escript, undefined)]),
|
||||
|
||||
%% Note the top-level directory for reference
|
||||
rebar_config:set_global(base_dir, filename:absname(rebar_utils:get_cwd())),
|
||||
|
@ -153,7 +155,8 @@ parse_args(Args) ->
|
|||
%%
|
||||
version() ->
|
||||
{ok, Vsn} = application:get_key(rebar, vsn),
|
||||
?CONSOLE("rebar version: ~s date: ~s vcs: ~s\n", [Vsn, ?BUILD_TIME, ?VCS_INFO]).
|
||||
?CONSOLE("rebar version: ~s date: ~s vcs: ~s\n",
|
||||
[Vsn, ?BUILD_TIME, ?VCS_INFO]).
|
||||
|
||||
|
||||
%%
|
||||
|
@ -239,17 +242,17 @@ version Show version information
|
|||
option_spec_list() ->
|
||||
Jobs = rebar_config:get_jobs(),
|
||||
JobsHelp = io_lib:format(
|
||||
"Number of concurrent workers a command may use. Default: ~B",
|
||||
[Jobs]),
|
||||
"Number of concurrent workers a command may use. Default: ~B",
|
||||
[Jobs]),
|
||||
[
|
||||
%% {Name, ShortOpt, LongOpt, ArgSpec, HelpMsg}
|
||||
{help, $h, "help", undefined, "Show the program options"},
|
||||
{commands, $c, "commands", undefined, "Show available commands"},
|
||||
{verbose, $v, "verbose", undefined, "Be verbose about what gets done"},
|
||||
{version, $V, "version", undefined, "Show version information"},
|
||||
{force, $f, "force", undefined, "Force"},
|
||||
{jobs, $j, "jobs", integer, JobsHelp},
|
||||
{config, $C, "config", string, "Rebar config file to use"}
|
||||
{help, $h, "help", undefined, "Show the program options"},
|
||||
{commands, $c, "commands", undefined, "Show available commands"},
|
||||
{verbose, $v, "verbose", undefined, "Be verbose about what gets done"},
|
||||
{version, $V, "version", undefined, "Show version information"},
|
||||
{force, $f, "force", undefined, "Force"},
|
||||
{jobs, $j, "jobs", integer, JobsHelp},
|
||||
{config, $C, "config", string, "Rebar config file to use"}
|
||||
].
|
||||
|
||||
%%
|
||||
|
@ -303,7 +306,7 @@ get_command_name_candidates(Command) ->
|
|||
%% * "create-a" matches create-app
|
||||
%% * "c-app" matches create-app
|
||||
Candidates = [Candidate || Candidate <- command_names(),
|
||||
is_command_name_candidate(Command, Candidate)],
|
||||
is_command_name_candidate(Command, Candidate)],
|
||||
%% Is there a complete match? If so return only that, return a
|
||||
%% list of candidates otherwise
|
||||
case lists:member(Command, Candidates) of
|
||||
|
|
|
@ -85,12 +85,12 @@ compile_abnfc(Source, _Target, Config) ->
|
|||
case abnfc_is_present() of
|
||||
false ->
|
||||
?CONSOLE(
|
||||
"~n===============================================~n"
|
||||
" You need to install abnfc to compile ABNF grammars~n"
|
||||
" Download the latest tarball release from github~n"
|
||||
" https://github.com/nygge/abnfc~n"
|
||||
" and install it into your erlang library dir~n"
|
||||
"===============================================~n~n", []),
|
||||
<<"~n===============================================~n"
|
||||
" You need to install abnfc to compile ABNF grammars~n"
|
||||
" Download the latest tarball release from github~n"
|
||||
" https://github.com/nygge/abnfc~n"
|
||||
" and install it into your erlang library dir~n"
|
||||
"===============================================~n~n">>, []),
|
||||
?FAIL;
|
||||
true ->
|
||||
AbnfcOpts = abnfc_opts(Config),
|
||||
|
@ -98,7 +98,7 @@ compile_abnfc(Source, _Target, Config) ->
|
|||
Opts = [noobj,
|
||||
{o, option(out_dir, AbnfcOpts)},
|
||||
{mod, filename:basename(Source, SourceExt) ++
|
||||
option(module_ext, AbnfcOpts)}],
|
||||
option(module_ext, AbnfcOpts)}],
|
||||
case abnfc:file(Source, Opts) of
|
||||
ok -> ok;
|
||||
Error ->
|
||||
|
|
|
@ -52,7 +52,8 @@ run(Config, FirstFiles, RestFiles, CompileFn) ->
|
|||
compile_queue(Pids, RestFiles)
|
||||
end.
|
||||
|
||||
run(Config, FirstFiles, SourceDir, SourceExt, TargetDir, TargetExt, Compile3Fn) ->
|
||||
run(Config, FirstFiles, SourceDir, SourceExt, TargetDir, TargetExt,
|
||||
Compile3Fn) ->
|
||||
run(Config, FirstFiles, SourceDir, SourceExt, TargetDir, TargetExt,
|
||||
Compile3Fn, [check_last_mod]).
|
||||
|
||||
|
@ -73,7 +74,8 @@ run(Config, FirstFiles, SourceDir, SourceExt, TargetDir, TargetExt,
|
|||
|
||||
run(Config, FirstFiles, RestFiles,
|
||||
fun(S, C) ->
|
||||
Target = target_file(S, SourceDir, SourceExt, TargetDir, TargetExt),
|
||||
Target = target_file(S, SourceDir, SourceExt,
|
||||
TargetDir, TargetExt),
|
||||
simple_compile_wrapper(S, Target, Compile3Fn, C, CheckLastMod)
|
||||
end).
|
||||
|
||||
|
|
|
@ -50,10 +50,10 @@ skip_dir(Dir) ->
|
|||
|
||||
is_skip_dir(Dir) ->
|
||||
case erlang:get({skip_dir, Dir}) of
|
||||
undefined ->
|
||||
false;
|
||||
true ->
|
||||
true
|
||||
undefined ->
|
||||
false;
|
||||
true ->
|
||||
true
|
||||
end.
|
||||
|
||||
skip_dirs() ->
|
||||
|
@ -76,7 +76,8 @@ process_commands([Command | Rest]) ->
|
|||
lists:foreach(fun (D) -> erlang:erase({skip_dir, D}) end, skip_dirs()),
|
||||
Operations = erlang:get(operations),
|
||||
|
||||
_ = process_dir(rebar_utils:get_cwd(), rebar_config:new(), Command, sets:new()),
|
||||
_ = process_dir(rebar_utils:get_cwd(), rebar_config:new(),
|
||||
Command, sets:new()),
|
||||
case erlang:get(operations) of
|
||||
Operations ->
|
||||
%% This command didn't do anything
|
||||
|
@ -108,7 +109,8 @@ process_dir(Dir, ParentConfig, Command, DirSet) ->
|
|||
%% CWD to see if it's a fit -- if it is, use that set of modules
|
||||
%% to process this dir.
|
||||
{ok, AvailModuleSets} = application:get_env(rebar, modules),
|
||||
{DirModules, ModuleSetFile} = choose_module_set(AvailModuleSets, Dir),
|
||||
{DirModules, ModuleSetFile} = choose_module_set(AvailModuleSets,
|
||||
Dir),
|
||||
|
||||
%% Get the list of modules for "any dir". This is a catch-all list
|
||||
%% of modules that are processed in addition to modules associated
|
||||
|
@ -122,7 +124,8 @@ process_dir(Dir, ParentConfig, Command, DirSet) ->
|
|||
%% directories that should be processed _before_ the current one.
|
||||
Predirs = acc_modules(Modules, preprocess, Config, ModuleSetFile),
|
||||
?DEBUG("Predirs: ~p\n", [Predirs]),
|
||||
DirSet2 = process_each(Predirs, Command, Config, ModuleSetFile, DirSet),
|
||||
DirSet2 = process_each(Predirs, Command, Config,
|
||||
ModuleSetFile, DirSet),
|
||||
|
||||
%% Make sure the CWD is reset properly; processing the dirs may have
|
||||
%% caused it to change
|
||||
|
@ -131,27 +134,30 @@ process_dir(Dir, ParentConfig, Command, DirSet) ->
|
|||
%% Check that this directory is not on the skip list
|
||||
case is_skip_dir(Dir) of
|
||||
true ->
|
||||
%% Do not execute the command on the directory, as some module
|
||||
%% as requested a skip on it.
|
||||
%% Do not execute the command on the directory, as some
|
||||
%% module as requested a skip on it.
|
||||
?INFO("Skipping ~s in ~s\n", [Command, Dir]);
|
||||
|
||||
false ->
|
||||
%% Get the list of plug-in modules from rebar.config. These modules are
|
||||
%% processed LAST and do not participate in preprocess.
|
||||
%% Get the list of plug-in modules from rebar.config. These
|
||||
%% modules are processed LAST and do not participate
|
||||
%% in preprocess.
|
||||
{ok, PluginModules} = plugin_modules(Config),
|
||||
|
||||
%% Execute the current command on this directory
|
||||
execute(Command, Modules ++ PluginModules, Config, ModuleSetFile)
|
||||
execute(Command, Modules ++ PluginModules,
|
||||
Config, ModuleSetFile)
|
||||
end,
|
||||
|
||||
%% Mark the current directory as processed
|
||||
DirSet3 = sets:add_element(Dir, DirSet2),
|
||||
|
||||
%% Invoke 'postprocess' on the modules -- this yields a list of other
|
||||
%% Invoke 'postprocess' on the modules. This yields a list of other
|
||||
%% directories that should be processed _after_ the current one.
|
||||
Postdirs = acc_modules(Modules, postprocess, Config, ModuleSetFile),
|
||||
?DEBUG("Postdirs: ~p\n", [Postdirs]),
|
||||
DirSet4 = process_each(Postdirs, Command, Config, ModuleSetFile, DirSet3),
|
||||
DirSet4 = process_each(Postdirs, Command, Config,
|
||||
ModuleSetFile, DirSet3),
|
||||
|
||||
%% Make sure the CWD is reset properly; processing the dirs may have
|
||||
%% caused it to change
|
||||
|
@ -220,11 +226,13 @@ execute(Command, Modules, Config, ModuleFile) ->
|
|||
Dir = rebar_utils:get_cwd(),
|
||||
?CONSOLE("==> ~s (~s)\n", [filename:basename(Dir), Command]),
|
||||
|
||||
%% Increment the count of operations, since some module responds to this command
|
||||
%% Increment the count of operations, since some module
|
||||
%% responds to this command
|
||||
erlang:put(operations, erlang:get(operations) + 1),
|
||||
|
||||
%% Run the available modules
|
||||
case catch(run_modules(TargetModules, Command, Config, ModuleFile)) of
|
||||
case catch(run_modules(TargetModules, Command,
|
||||
Config, ModuleFile)) of
|
||||
ok ->
|
||||
ok;
|
||||
{error, failed} ->
|
||||
|
|
|
@ -165,21 +165,22 @@ get_cover_config(Config, Cwd) ->
|
|||
false ->
|
||||
"";
|
||||
true ->
|
||||
case filelib:fold_files(Cwd, ".*cover\.spec\$", true, fun collect_ct_specs/2, []) of
|
||||
[] ->
|
||||
?DEBUG("No cover spec found: ~s~n", [Cwd]),
|
||||
"";
|
||||
[Spec] ->
|
||||
?DEBUG("Found cover file ~w~n", [Spec]),
|
||||
" -cover " ++ Spec;
|
||||
Specs ->
|
||||
?ABORT("Multiple cover specs found: ~p~n", [Specs])
|
||||
end
|
||||
end.
|
||||
case filelib:fold_files(Cwd, ".*cover\.spec\$",
|
||||
true, fun collect_ct_specs/2, []) of
|
||||
[] ->
|
||||
?DEBUG("No cover spec found: ~s~n", [Cwd]),
|
||||
"";
|
||||
[Spec] ->
|
||||
?DEBUG("Found cover file ~w~n", [Spec]),
|
||||
" -cover " ++ Spec;
|
||||
Specs ->
|
||||
?ABORT("Multiple cover specs found: ~p~n", [Specs])
|
||||
end
|
||||
end.
|
||||
|
||||
collect_ct_specs(F, Acc) ->
|
||||
%% Ignore any specs under the deps/ directory. Do this pulling the dirname off the
|
||||
%% the F and then splitting it into a list.
|
||||
%% Ignore any specs under the deps/ directory. Do this pulling
|
||||
%% the dirname off the the F and then splitting it into a list.
|
||||
Parts = filename:split(filename:dirname(F)),
|
||||
case lists:member("deps", Parts) of
|
||||
true ->
|
||||
|
|
|
@ -47,9 +47,9 @@
|
|||
%% ===================================================================
|
||||
|
||||
preprocess(Config, _) ->
|
||||
%% Side effect to set deps_dir globally for all dependencies from top level down.
|
||||
%% Means the root deps_dir is honoured or the default used globally
|
||||
%% since it will be set on the first time through here
|
||||
%% Side effect to set deps_dir globally for all dependencies from
|
||||
%% top level down. Means the root deps_dir is honoured or the default
|
||||
%% used globally since it will be set on the first time through here
|
||||
set_global_deps_dir(Config, rebar_config:get_global(deps_dir, [])),
|
||||
|
||||
%% Get the list of deps for the current working directory and identify those
|
||||
|
@ -69,8 +69,8 @@ preprocess(Config, _) ->
|
|||
case rebar_config:get_global(skip_deps, false) of
|
||||
"true" ->
|
||||
lists:foreach(fun (#dep{dir = Dir}) ->
|
||||
rebar_core:skip_dir(Dir)
|
||||
end, AvailableDeps);
|
||||
rebar_core:skip_dir(Dir)
|
||||
end, AvailableDeps);
|
||||
_ ->
|
||||
ok
|
||||
end,
|
||||
|
@ -100,9 +100,9 @@ compile(Config, AppFile) ->
|
|||
ok;
|
||||
{_, MissingDeps} ->
|
||||
lists:foreach(fun (#dep{app=App, vsn_regex=Vsn, source=Src}) ->
|
||||
?CONSOLE("Dependency not available: ~p-~s (~p)\n",
|
||||
[App, Vsn, Src])
|
||||
end, MissingDeps),
|
||||
?CONSOLE("Dependency not available: "
|
||||
"~p-~s (~p)\n", [App, Vsn, Src])
|
||||
end, MissingDeps),
|
||||
?FAIL
|
||||
end.
|
||||
|
||||
|
@ -148,7 +148,8 @@ compile(Config, AppFile) ->
|
|||
%% need all deps in same dir and should be the one set by the root rebar.config
|
||||
%% Sets a default if root config has no deps_dir set
|
||||
set_global_deps_dir(Config, []) ->
|
||||
rebar_config:set_global(deps_dir, rebar_config:get_local(Config, deps_dir, "deps"));
|
||||
rebar_config:set_global(deps_dir,
|
||||
rebar_config:get_local(Config, deps_dir, "deps"));
|
||||
set_global_deps_dir(_Config, _DepsDir) ->
|
||||
ok.
|
||||
|
||||
|
@ -253,13 +254,14 @@ is_app_available(App, VsnRegex, Path) ->
|
|||
{expected, VsnRegex}, {has, Vsn}}}}
|
||||
end;
|
||||
OtherApp ->
|
||||
?WARN("~s has application id ~p; expected ~p\n", [AppFile, OtherApp, App]),
|
||||
?WARN("~s has application id ~p; expected ~p\n",
|
||||
[AppFile, OtherApp, App]),
|
||||
{false, {name_mismatch,
|
||||
{AppFile, {expected, App}, {has, OtherApp}}}}
|
||||
end;
|
||||
false ->
|
||||
?WARN("Expected ~s to be an app dir (containing ebin/*.app), but no .app found.\n",
|
||||
[Path]),
|
||||
?WARN("Expected ~s to be an app dir (containing ebin/*.app), "
|
||||
"but no .app found.\n", [Path]),
|
||||
{false, {missing_app_file, Path}}
|
||||
end.
|
||||
|
||||
|
@ -267,12 +269,14 @@ use_source(Dep) ->
|
|||
use_source(Dep, 3).
|
||||
|
||||
use_source(Dep, 0) ->
|
||||
?ABORT("Failed to acquire source from ~p after 3 tries.\n", [Dep#dep.source]);
|
||||
?ABORT("Failed to acquire source from ~p after 3 tries.\n",
|
||||
[Dep#dep.source]);
|
||||
use_source(Dep, Count) ->
|
||||
case filelib:is_dir(Dep#dep.dir) of
|
||||
true ->
|
||||
%% Already downloaded -- verify the versioning matches up with our regex
|
||||
case is_app_available(Dep#dep.app, Dep#dep.vsn_regex, Dep#dep.dir) of
|
||||
%% Already downloaded -- verify the versioning matches the regex
|
||||
case is_app_available(Dep#dep.app,
|
||||
Dep#dep.vsn_regex, Dep#dep.dir) of
|
||||
{true, _} ->
|
||||
Dir = filename:join(Dep#dep.dir, "ebin"),
|
||||
ok = filelib:ensure_dir(filename:join(Dir, "dummy")),
|
||||
|
@ -284,7 +288,7 @@ use_source(Dep, Count) ->
|
|||
%% The app that was downloaded doesn't match up (or had
|
||||
%% errors or something). For the time being, abort.
|
||||
?ABORT("Dependency dir ~s failed application validation "
|
||||
"with reason:~n~p.\n", [Dep#dep.dir, Reason])
|
||||
"with reason:~n~p.\n", [Dep#dep.dir, Reason])
|
||||
end;
|
||||
false ->
|
||||
?CONSOLE("Pulling ~p from ~p\n", [Dep#dep.app, Dep#dep.source]),
|
||||
|
@ -335,7 +339,8 @@ update_source(Dep) ->
|
|||
update_source(AppDir, Dep#dep.source),
|
||||
Dep;
|
||||
false ->
|
||||
?WARN("Skipping update for ~p: no VCS directory available!\n", [Dep]),
|
||||
?WARN("Skipping update for ~p: "
|
||||
"no VCS directory available!\n", [Dep]),
|
||||
Dep
|
||||
end.
|
||||
|
||||
|
@ -390,13 +395,17 @@ required_scm_client_vsn(bzr) -> {2, 0};
|
|||
required_scm_client_vsn(svn) -> {1, 6}.
|
||||
|
||||
scm_client_vsn(hg) ->
|
||||
scm_client_vsn(rebar_utils:find_executable("hg"), " --version", "version (\\d+).(\\d+)");
|
||||
scm_client_vsn(rebar_utils:find_executable("hg"), " --version",
|
||||
"version (\\d+).(\\d+)");
|
||||
scm_client_vsn(git) ->
|
||||
scm_client_vsn(rebar_utils:find_executable("git"), " --version", "git version (\\d+).(\\d+)");
|
||||
scm_client_vsn(rebar_utils:find_executable("git"), " --version",
|
||||
"git version (\\d+).(\\d+)");
|
||||
scm_client_vsn(bzr) ->
|
||||
scm_client_vsn(rebar_utils:find_executable("bzr"), " --version", "Bazaar \\(bzr\\) (\\d+).(\\d+)");
|
||||
scm_client_vsn(rebar_utils:find_executable("bzr"), " --version",
|
||||
"Bazaar \\(bzr\\) (\\d+).(\\d+)");
|
||||
scm_client_vsn(svn) ->
|
||||
scm_client_vsn(rebar_utils:find_executable("svn"), " --version", "svn, version (\\d+).(\\d+)").
|
||||
scm_client_vsn(rebar_utils:find_executable("svn"), " --version",
|
||||
"svn, version (\\d+).(\\d+)").
|
||||
|
||||
has_vcs_dir(git, Dir) ->
|
||||
filelib:is_dir(filename:join(Dir, ".git"));
|
||||
|
|
|
@ -31,15 +31,19 @@
|
|||
%% <li>build-plt (essentially "dialyzer --build_plt -r <app_dirs>")</li>
|
||||
%% <li>check-plt (essentially "dialyzer --check_plt")</li>
|
||||
%% </ul>
|
||||
%% A single option <code>plt</code> can be presented in the <code>dialyzer_opts</code>
|
||||
%% options in <code>rebar.config</code>. If it is present, it is used as the PLT for the
|
||||
%% supported commands. Should it not be present, then the default is <code>$HOME/.dialyzer_plt</code>.
|
||||
%% @reference <a href="http://user.it.uu.se/~kostis/Papers/bugs05.pdf">Experience from developing the Dialyzer:
|
||||
%% A static analysis tool detecting defects in Erlang applications</a>
|
||||
%% @reference <a href="http://user.it.uu.se/~kostis/Papers/contracts.pdf">A Language for Specifying Type
|
||||
%% Contracts in Erlang and its Interaction with Success Typings</a>
|
||||
%% @reference <a href="http://user.it.uu.se/~kostis/Papers/wrangler.pdf">Gradual Typing of Erlang
|
||||
%% Programs: A Wrangler Experience</a>
|
||||
%% A single option <code>plt</code> can be presented in the
|
||||
%% <code>dialyzer_opts</code> options in <code>rebar.config</code>. If it
|
||||
%% is present, it is used as the PLT for the supported commands. Should it
|
||||
%% not be present, then the default is <code>$HOME/.dialyzer_plt</code>.
|
||||
%%
|
||||
%% @reference <a href="http://user.it.uu.se/~kostis/Papers/bugs05.pdf">
|
||||
%% Experience from developing the Dialyzer: A static analysis tool detecting
|
||||
%% defects in Erlang applications</a>
|
||||
%% @reference <a href="http://user.it.uu.se/~kostis/Papers/contracts.pdf">
|
||||
%% A Language for Specifying Type Contracts in Erlang and its Interaction
|
||||
%% with Success Typings</a>
|
||||
%% @reference <a href="http://user.it.uu.se/~kostis/Papers/wrangler.pdf">Gradual
|
||||
%% Typing of Erlang Programs: A Wrangler Experience</a>
|
||||
%% @copyright 2010 Dave Smith
|
||||
%% -------------------------------------------------------------------
|
||||
-module(rebar_dialyzer).
|
||||
|
@ -67,12 +71,12 @@ dialyze(Config, File) ->
|
|||
dialyzer_opts,
|
||||
[])),
|
||||
DialyzerOpts0 = case FromSrc of
|
||||
true ->
|
||||
[{files_rec, ["src"]}, {init_plt, Plt},
|
||||
{from, src_code}];
|
||||
false ->
|
||||
[{files_rec, ["ebin"]}, {init_plt, Plt}]
|
||||
end,
|
||||
true ->
|
||||
[{files_rec, ["src"]}, {init_plt, Plt},
|
||||
{from, src_code}];
|
||||
false ->
|
||||
[{files_rec, ["ebin"]}, {init_plt, Plt}]
|
||||
end,
|
||||
WarnOpts = warnings(Config),
|
||||
DialyzerOpts = case WarnOpts of
|
||||
[] -> DialyzerOpts0;
|
||||
|
@ -150,7 +154,7 @@ app_dirs(Apps) ->
|
|||
-spec output_warnings(Warnings::[warning()]) -> 'ok'.
|
||||
output_warnings(Warnings) ->
|
||||
lists:foreach(fun(Warning) ->
|
||||
?CONSOLE("~s", [dialyzer:format_warning(Warning)])
|
||||
?CONSOLE("~s", [dialyzer:format_warning(Warning)])
|
||||
end, Warnings).
|
||||
|
||||
%% @doc If the plt option is present in rebar.config return its value, otherwise
|
||||
|
|
|
@ -27,10 +27,12 @@
|
|||
%% @author Dave Smith <dizzyd@dizzyd.com>
|
||||
%% @doc rebar_edoc supports the following command:
|
||||
%% <ul>
|
||||
%% <li>doc (essentially erl -noshell -run edoc_run application "'$(<app_name>)'"
|
||||
%% <li>doc (essentially erl -noshell -run edoc_run application
|
||||
%% "'$(<app_name>)'"
|
||||
%% '"."' '[<options>]')</li>
|
||||
%% </ul>
|
||||
%% EDoc options can be given in the <code>edoc_opts</code> option in <code>rebar.config</code>.
|
||||
%% EDoc options can be given in the <code>edoc_opts</code> option in
|
||||
%% <code>rebar.config</code>.
|
||||
%% @copyright 2010 Dave Smith
|
||||
%% -------------------------------------------------------------------
|
||||
-module(rebar_edoc).
|
||||
|
|
|
@ -70,19 +70,19 @@
|
|||
-spec compile(Config::#config{}, AppFile::string()) -> 'ok'.
|
||||
compile(Config, _AppFile) ->
|
||||
rebar_base_compiler:run(Config,
|
||||
check_files(rebar_config:get_local(Config,
|
||||
xrl_first_files, [])),
|
||||
check_files(rebar_config:get_local(
|
||||
Config, xrl_first_files, [])),
|
||||
"src", ".xrl", "src", ".erl",
|
||||
fun compile_xrl/3),
|
||||
rebar_base_compiler:run(Config,
|
||||
check_files(rebar_config:get_local(Config,
|
||||
yrl_first_files, [])),
|
||||
check_files(rebar_config:get_local(
|
||||
Config, yrl_first_files, [])),
|
||||
"src", ".yrl", "src", ".erl",
|
||||
fun compile_yrl/3),
|
||||
doterl_compile(Config, "ebin"),
|
||||
rebar_base_compiler:run(Config,
|
||||
check_files(rebar_config:get_local(Config,
|
||||
mib_first_files, [])),
|
||||
check_files(rebar_config:get_local(
|
||||
Config, mib_first_files, [])),
|
||||
"mibs", ".mib", "priv/mibs", ".bin",
|
||||
fun compile_mib/3).
|
||||
|
||||
|
@ -132,19 +132,20 @@ doterl_compile(Config, OutDir, MoreSources) ->
|
|||
RestErls = [Source || Source <- gather_src(SrcDirs, []) ++ MoreSources,
|
||||
not lists:member(Source, FirstErls)],
|
||||
|
||||
% Split RestErls so that parse_transforms and behaviours are instead added
|
||||
% to erl_first_files, parse transforms first.
|
||||
% This should probably be somewhat combined with inspect_epp
|
||||
[ParseTransforms, Behaviours, OtherErls] = lists:foldl(fun(F, [A, B, C]) ->
|
||||
case compile_priority(F) of
|
||||
parse_transform ->
|
||||
[[F | A], B, C];
|
||||
behaviour ->
|
||||
[A, [F | B], C];
|
||||
_ ->
|
||||
[A, B, [F | C]]
|
||||
end
|
||||
end, [[], [], []], RestErls),
|
||||
%% Split RestErls so that parse_transforms and behaviours are instead added
|
||||
%% to erl_first_files, parse transforms first.
|
||||
%% This should probably be somewhat combined with inspect_epp
|
||||
[ParseTransforms, Behaviours, OtherErls] =
|
||||
lists:foldl(fun(F, [A, B, C]) ->
|
||||
case compile_priority(F) of
|
||||
parse_transform ->
|
||||
[[F | A], B, C];
|
||||
behaviour ->
|
||||
[A, [F | B], C];
|
||||
_ ->
|
||||
[A, B, [F | C]]
|
||||
end
|
||||
end, [[], [], []], RestErls),
|
||||
|
||||
NewFirstErls = FirstErls ++ ParseTransforms ++ Behaviours,
|
||||
|
||||
|
@ -167,9 +168,11 @@ doterl_compile(Config, OutDir, MoreSources) ->
|
|||
-spec include_path(Source::string(), Config::#config{}) -> [string(), ...].
|
||||
include_path(Source, Config) ->
|
||||
ErlOpts = rebar_config:get(Config, erl_opts, []),
|
||||
["include", filename:dirname(Source)] ++ proplists:get_all_values(i, ErlOpts).
|
||||
["include", filename:dirname(Source)]
|
||||
++ proplists:get_all_values(i, ErlOpts).
|
||||
|
||||
-spec inspect(Source::string(), IncludePath::[string(),...]) -> {string(), [string()]}.
|
||||
-spec inspect(Source::string(),
|
||||
IncludePath::[string(),...]) -> {string(), [string()]}.
|
||||
inspect(Source, IncludePath) ->
|
||||
ModuleDefault = filename:basename(Source, ".erl"),
|
||||
case epp:open(Source, IncludePath) of
|
||||
|
@ -180,7 +183,8 @@ inspect(Source, IncludePath) ->
|
|||
{ModuleDefault, []}
|
||||
end.
|
||||
|
||||
-spec inspect_epp(Epp::pid(), Source::string(), Module::string(), Includes::[string()]) -> {string(), [string()]}.
|
||||
-spec inspect_epp(Epp::pid(), Source::string(), Module::string(),
|
||||
Includes::[string()]) -> {string(), [string()]}.
|
||||
inspect_epp(Epp, Source, Module, Includes) ->
|
||||
case epp:parse_erl_form(Epp) of
|
||||
{ok, {attribute, _, module, ModInfo}} ->
|
||||
|
@ -190,13 +194,15 @@ inspect_epp(Epp, Source, Module, Includes) ->
|
|||
ActualModuleStr = atom_to_list(ActualModule);
|
||||
%% Packag-ized module name, list of atoms
|
||||
ActualModule when is_list(ActualModule) ->
|
||||
ActualModuleStr = string:join([atom_to_list(P) || P <- ActualModule], ".");
|
||||
ActualModuleStr = string:join([atom_to_list(P) ||
|
||||
P <- ActualModule], ".");
|
||||
%% Parameterized module name, single atom
|
||||
{ActualModule, _} when is_atom(ActualModule) ->
|
||||
ActualModuleStr = atom_to_list(ActualModule);
|
||||
%% Parameterized and packagized module name, list of atoms
|
||||
{ActualModule, _} when is_list(ActualModule) ->
|
||||
ActualModuleStr = string:join([atom_to_list(P) || P <- ActualModule], ".")
|
||||
ActualModuleStr = string:join([atom_to_list(P) ||
|
||||
P <- ActualModule], ".")
|
||||
end,
|
||||
inspect_epp(Epp, Source, ActualModuleStr, Includes);
|
||||
{ok, {attribute, 1, file, {Module, 1}}} ->
|
||||
|
@ -212,14 +218,16 @@ inspect_epp(Epp, Source, Module, Includes) ->
|
|||
inspect_epp(Epp, Source, Module, Includes)
|
||||
end.
|
||||
|
||||
-spec needs_compile(Source::string(), Target::string(), Hrls::[string()]) -> boolean().
|
||||
-spec needs_compile(Source::string(), Target::string(),
|
||||
Hrls::[string()]) -> boolean().
|
||||
needs_compile(Source, Target, Hrls) ->
|
||||
TargetLastMod = filelib:last_modified(Target),
|
||||
lists:any(fun(I) -> TargetLastMod < filelib:last_modified(I) end,
|
||||
[Source] ++ Hrls).
|
||||
|
||||
-spec internal_erl_compile(Source::string(), Config::#config{},
|
||||
Outdir::string(), ErlOpts::list()) -> 'ok' | 'skipped'.
|
||||
Outdir::string(),
|
||||
ErlOpts::list()) -> 'ok' | 'skipped'.
|
||||
internal_erl_compile(Source, Config, Outdir, ErlOpts) ->
|
||||
%% Determine the target name and includes list by inspecting the source file
|
||||
{Module, Hrls} = inspect(Source, include_path(Source, Config)),
|
||||
|
@ -238,7 +246,8 @@ internal_erl_compile(Source, Config, Outdir, ErlOpts) ->
|
|||
{ok, _, []} ->
|
||||
ok;
|
||||
{ok, _, _Warnings} ->
|
||||
%% We got at least one warning -- if fail_on_warning is in options, fail
|
||||
%% We got at least one warning -- if fail_on_warning
|
||||
%% is in options, fail
|
||||
case lists:member(fail_on_warning, Opts) of
|
||||
true ->
|
||||
%% remove target to prevent overlooking this failure
|
||||
|
@ -254,7 +263,8 @@ internal_erl_compile(Source, Config, Outdir, ErlOpts) ->
|
|||
skipped
|
||||
end.
|
||||
|
||||
-spec compile_mib(Source::string(), Target::string(), Config::#config{}) -> 'ok'.
|
||||
-spec compile_mib(Source::string(), Target::string(),
|
||||
Config::#config{}) -> 'ok'.
|
||||
compile_mib(Source, Target, Config) ->
|
||||
ok = rebar_utils:ensure_dir(Target),
|
||||
Opts = [{outdir, "priv/mibs"}, {i, ["priv/mibs"]}] ++
|
||||
|
@ -266,13 +276,15 @@ compile_mib(Source, Target, Config) ->
|
|||
?FAIL
|
||||
end.
|
||||
|
||||
-spec compile_xrl(Source::string(), Target::string(), Config::#config{}) -> 'ok'.
|
||||
-spec compile_xrl(Source::string(), Target::string(),
|
||||
Config::#config{}) -> 'ok'.
|
||||
compile_xrl(Source, Target, Config) ->
|
||||
Opts = [{scannerfile, Target}, {return, true}
|
||||
|rebar_config:get(Config, xrl_opts, [])],
|
||||
compile_xrl_yrl(Source, Target, Opts, leex).
|
||||
|
||||
-spec compile_yrl(Source::string(), Target::string(), Config::#config{}) -> 'ok'.
|
||||
-spec compile_yrl(Source::string(), Target::string(),
|
||||
Config::#config{}) -> 'ok'.
|
||||
compile_yrl(Source, Target, Config) ->
|
||||
Opts = [{parserfile, Target}, {return, true}
|
||||
|rebar_config:get(Config, yrl_opts, [])],
|
||||
|
@ -315,7 +327,8 @@ src_dirs(SrcDirs) ->
|
|||
dirs(Dir) ->
|
||||
[F || F <- filelib:wildcard(filename:join([Dir, "*"])), filelib:is_dir(F)].
|
||||
|
||||
-spec delete_dir(Dir::string(), Subdirs::[string()]) -> 'ok' | {'error', atom()}.
|
||||
-spec delete_dir(Dir::string(),
|
||||
Subdirs::[string()]) -> 'ok' | {'error', atom()}.
|
||||
delete_dir(Dir, []) ->
|
||||
file:del_dir(Dir);
|
||||
delete_dir(Dir, Subdirs) ->
|
||||
|
@ -330,23 +343,24 @@ compile_priority(File) ->
|
|||
normal; % couldn't parse the file, default priority
|
||||
{ok, Trees} ->
|
||||
F2 = fun({tree,arity_qualifier,_,
|
||||
{arity_qualifier,{tree,atom,_,behaviour_info},
|
||||
{tree,integer,_,1}}}, _) ->
|
||||
behaviour;
|
||||
({tree,arity_qualifier,_,
|
||||
{arity_qualifier,{tree,atom,_,parse_transform},
|
||||
{tree,integer,_,2}}}, _) ->
|
||||
parse_transform;
|
||||
(_, Acc) ->
|
||||
Acc
|
||||
end,
|
||||
{arity_qualifier,{tree,atom,_,behaviour_info},
|
||||
{tree,integer,_,1}}}, _) ->
|
||||
behaviour;
|
||||
({tree,arity_qualifier,_,
|
||||
{arity_qualifier,{tree,atom,_,parse_transform},
|
||||
{tree,integer,_,2}}}, _) ->
|
||||
parse_transform;
|
||||
(_, Acc) ->
|
||||
Acc
|
||||
end,
|
||||
|
||||
F = fun({tree, attribute, _, {attribute, {tree, atom, _, export},
|
||||
[{tree, list, _, {list, List, none}}]}}, Acc) ->
|
||||
lists:foldl(F2, Acc, List);
|
||||
(_, Acc) ->
|
||||
Acc
|
||||
end,
|
||||
F = fun({tree, attribute, _,
|
||||
{attribute, {tree, atom, _, export},
|
||||
[{tree, list, _, {list, List, none}}]}}, Acc) ->
|
||||
lists:foldl(F2, Acc, List);
|
||||
(_, Acc) ->
|
||||
Acc
|
||||
end,
|
||||
|
||||
lists:foldl(F, normal, Trees)
|
||||
end.
|
||||
|
|
|
@ -83,8 +83,10 @@
|
|||
compile(Config, _AppFile) ->
|
||||
DtlOpts = erlydtl_opts(Config),
|
||||
rebar_base_compiler:run(Config, [],
|
||||
option(doc_root, DtlOpts), option(source_ext, DtlOpts),
|
||||
option(out_dir, DtlOpts), option(module_ext, DtlOpts) ++ ".beam",
|
||||
option(doc_root, DtlOpts),
|
||||
option(source_ext, DtlOpts),
|
||||
option(out_dir, DtlOpts),
|
||||
option(module_ext, DtlOpts) ++ ".beam",
|
||||
fun compile_dtl/3, [{check_last_mod, false}]).
|
||||
|
||||
|
||||
|
@ -102,18 +104,18 @@ default(doc_root) -> "templates";
|
|||
default(out_dir) -> "ebin";
|
||||
default(source_ext) -> ".dtl";
|
||||
default(module_ext) -> "_dtl";
|
||||
default(custom_tags_dir) -> "".
|
||||
default(custom_tags_dir) -> "".
|
||||
|
||||
compile_dtl(Source, Target, Config) ->
|
||||
case code:which(erlydtl) of
|
||||
non_existing ->
|
||||
?CONSOLE(
|
||||
"~n===============================================~n"
|
||||
" You need to install erlydtl to compile DTL templates~n"
|
||||
" Download the latest tarball release from github~n"
|
||||
" http://code.google.com/p/erlydtl/~n"
|
||||
" and install it into your erlang library dir~n"
|
||||
"===============================================~n~n", []),
|
||||
<<"~n===============================================~n"
|
||||
" You need to install erlydtl to compile DTL templates~n"
|
||||
" Download the latest tarball release from github~n"
|
||||
" http://code.google.com/p/erlydtl/~n"
|
||||
" and install it into your erlang library dir~n"
|
||||
"===============================================~n~n">>, []),
|
||||
?FAIL;
|
||||
_ ->
|
||||
case needs_compile(Source, Target, Config) of
|
||||
|
|
|
@ -48,7 +48,8 @@ escriptize(Config, AppFile) ->
|
|||
%% Look for a list of other applications (dependencies) to include
|
||||
%% in the output file. We then use the .app files for each of these
|
||||
%% to pull in all the .beam files.
|
||||
InclBeams = get_app_beams(rebar_config:get_local(Config, escript_incl_apps, []), []),
|
||||
InclBeams = get_app_beams(
|
||||
rebar_config:get_local(Config, escript_incl_apps, []), []),
|
||||
|
||||
%% Construct the archive of everything in ebin/ dir -- put it on the
|
||||
%% top-level of the zip file so that code loading works properly.
|
||||
|
@ -62,11 +63,13 @@ escriptize(Config, AppFile) ->
|
|||
ok ->
|
||||
ok;
|
||||
{error, WriteError} ->
|
||||
?ERROR("Failed to write ~p script: ~p\n", [AppName, WriteError]),
|
||||
?ERROR("Failed to write ~p script: ~p\n",
|
||||
[AppName, WriteError]),
|
||||
?FAIL
|
||||
end;
|
||||
{error, ZipError} ->
|
||||
?ERROR("Failed to construct ~p escript: ~p\n", [AppName, ZipError]),
|
||||
?ERROR("Failed to construct ~p escript: ~p\n",
|
||||
[AppName, ZipError]),
|
||||
?FAIL
|
||||
end,
|
||||
|
||||
|
@ -94,9 +97,11 @@ get_app_beams([], Acc) ->
|
|||
get_app_beams([App | Rest], Acc) ->
|
||||
case code:lib_dir(App, ebin) of
|
||||
{error, bad_name} ->
|
||||
?ABORT("Failed to get ebin/ directory for ~p escript_incl_apps.", [App]);
|
||||
?ABORT("Failed to get ebin/ directory for "
|
||||
"~p escript_incl_apps.", [App]);
|
||||
Path ->
|
||||
Acc2 = [{filename:join([App, ebin, F]), file_contents(filename:join(Path, F))} ||
|
||||
Acc2 = [{filename:join([App, ebin, F]),
|
||||
file_contents(filename:join(Path, F))} ||
|
||||
F <- filelib:wildcard("*", Path)],
|
||||
get_app_beams(Rest, Acc2 ++ Acc)
|
||||
end.
|
||||
|
|
|
@ -36,8 +36,9 @@
|
|||
%% <li>suite="foo"" - runs test/foo_tests.erl</li>
|
||||
%% </ul>
|
||||
%% Additionally, for projects that have separate folders for the core
|
||||
%% implementation, and for the unit tests, then the following <code>rebar.config</code>
|
||||
%% option can be provided: <code>{eunit_compile_opts, [{src_dirs, ["dir"]}]}.</code>.
|
||||
%% implementation, and for the unit tests, then the following
|
||||
%% <code>rebar.config</code> option can be provided:
|
||||
%% <code>{eunit_compile_opts, [{src_dirs, ["dir"]}]}.</code>.
|
||||
%% @copyright 2009, 2010 Dave Smith
|
||||
%% -------------------------------------------------------------------
|
||||
-module(rebar_eunit).
|
||||
|
@ -78,16 +79,17 @@ eunit(Config, AppFile) ->
|
|||
ok = filelib:ensure_dir(eunit_dir() ++ "/foo"),
|
||||
ok = filelib:ensure_dir(ebin_dir() ++ "/foo"),
|
||||
|
||||
%% Setup code path prior to compilation so that parse_transforms and the like
|
||||
%% work properly. Also, be sure to add ebin_dir() to the END of the code path
|
||||
%% so that we don't have to jump through hoops to access the .app file
|
||||
%% Setup code path prior to compilation so that parse_transforms
|
||||
%% and the like work properly. Also, be sure to add ebin_dir()
|
||||
%% to the END of the code path so that we don't have to jump
|
||||
%% through hoops to access the .app file
|
||||
CodePath = code:get_path(),
|
||||
true = code:add_patha(eunit_dir()),
|
||||
true = code:add_pathz(ebin_dir()),
|
||||
|
||||
%% Obtain all the test modules for inclusion in the compile stage.
|
||||
%% Notice: this could also be achieved with the following rebar.config option:
|
||||
%% {eunit_compile_opts, [{src_dirs, ["test"]}]}
|
||||
%% Notice: this could also be achieved with the following
|
||||
%% rebar.config option: {eunit_compile_opts, [{src_dirs, ["test"]}]}
|
||||
TestErls = rebar_utils:find_files("test", ".*\\.erl\$"),
|
||||
|
||||
%% Copy source files to eunit dir for cover in case they are not directly
|
||||
|
@ -99,7 +101,8 @@ eunit(Config, AppFile) ->
|
|||
%% Compile erlang code to ?EUNIT_DIR, using a tweaked config
|
||||
%% with appropriate defines for eunit, and include all the test modules
|
||||
%% as well.
|
||||
rebar_erlc_compiler:doterl_compile(eunit_config(Config), ?EUNIT_DIR, TestErls),
|
||||
rebar_erlc_compiler:doterl_compile(eunit_config(Config),
|
||||
?EUNIT_DIR, TestErls),
|
||||
|
||||
%% Build a list of all the .beams in ?EUNIT_DIR -- use this for cover
|
||||
%% and eunit testing. Normally you can just tell cover and/or eunit to
|
||||
|
@ -111,7 +114,7 @@ eunit(Config, AppFile) ->
|
|||
string:str(N, "_tests.beam") =:= 0],
|
||||
Modules = [rebar_utils:beam_to_mod(?EUNIT_DIR, N) || N <- BeamFiles],
|
||||
SrcModules = [rebar_utils:erl_to_mod(M) || M <- SrcErls],
|
||||
|
||||
|
||||
cover_init(Config, BeamFiles),
|
||||
EunitResult = perform_eunit(Config, Modules),
|
||||
perform_cover(Config, Modules, SrcModules),
|
||||
|
@ -205,7 +208,7 @@ is_quickcheck_avail() ->
|
|||
end.
|
||||
|
||||
perform_cover(Config, BeamFiles, SrcModules) ->
|
||||
perform_cover(rebar_config:get(Config, cover_enabled, false),
|
||||
perform_cover(rebar_config:get(Config, cover_enabled, false),
|
||||
Config, BeamFiles, SrcModules).
|
||||
|
||||
perform_cover(false, _Config, _BeamFiles, _SrcModules) ->
|
||||
|
@ -227,7 +230,8 @@ cover_analyze(Config, Modules, SrcModules) ->
|
|||
|
||||
%% Write coverage details for each file
|
||||
lists:foreach(fun({M, _, _}) ->
|
||||
{ok, _} = cover:analyze_to_file(M, cover_file(M), [html])
|
||||
{ok, _} = cover:analyze_to_file(M, cover_file(M),
|
||||
[html])
|
||||
end, Coverage),
|
||||
|
||||
Index = filename:join([rebar_utils:get_cwd(), ?EUNIT_DIR, "index.html"]),
|
||||
|
@ -260,7 +264,12 @@ cover_init(true, BeamFiles) ->
|
|||
|
||||
%% It's not an error for cover compilation to fail partially,
|
||||
%% but we do want to warn about them
|
||||
_ = [?CONSOLE("Cover compilation warning for ~p: ~p", [Beam, Desc]) || {Beam, {error, Desc}} <- Compiled],
|
||||
PrintWarning =
|
||||
fun(Beam, Desc) ->
|
||||
?CONSOLE("Cover compilation warning for ~p: ~p",
|
||||
[Beam, Desc])
|
||||
end,
|
||||
_ = [PrintWarning(Beam, Desc) || {Beam, {error, Desc}} <- Compiled],
|
||||
ok
|
||||
end;
|
||||
cover_init(Config, BeamFiles) ->
|
||||
|
@ -287,18 +296,19 @@ is_eunitized(Mod) ->
|
|||
|
||||
has_eunit_test_fun(Mod) ->
|
||||
[F || {exports, Funs} <- Mod:module_info(),
|
||||
{F, 0} <- Funs, F =:= test] =/= [].
|
||||
{F, 0} <- Funs, F =:= test] =/= [].
|
||||
|
||||
has_header(Mod, Header) ->
|
||||
Mod1 = case code:which(Mod) of
|
||||
cover_compiled ->
|
||||
Mod1 = case code:which(Mod) of
|
||||
cover_compiled ->
|
||||
{file, File} = cover:is_compiled(Mod),
|
||||
File;
|
||||
non_existing -> Mod;
|
||||
preloaded -> Mod;
|
||||
L -> L
|
||||
end,
|
||||
{ok, {_, [{abstract_code, {_, AC}}]}} = beam_lib:chunks(Mod1, [abstract_code]),
|
||||
{ok, {_, [{abstract_code, {_, AC}}]}} = beam_lib:chunks(Mod1,
|
||||
[abstract_code]),
|
||||
[F || {attribute, 1, file, {F, 1}} <- AC,
|
||||
string:str(F, Header) =/= 0] =/= [].
|
||||
|
||||
|
@ -310,7 +320,7 @@ align_notcovered_count(Module, Covered, NotCovered, true) ->
|
|||
cover_write_index(Coverage, SrcModules) ->
|
||||
{ok, F} = file:open(filename:join([?EUNIT_DIR, "index.html"]), [write]),
|
||||
ok = file:write(F, "<html><head><title>Coverage Summary</title></head>\n"),
|
||||
IsSrcCoverage = fun({Mod,_C,_N}) -> lists:member(Mod, SrcModules) end,
|
||||
IsSrcCoverage = fun({Mod,_C,_N}) -> lists:member(Mod, SrcModules) end,
|
||||
{SrcCoverage, TestCoverage} = lists:partition(IsSrcCoverage, Coverage),
|
||||
cover_write_index_section(F, "Source", SrcCoverage),
|
||||
cover_write_index_section(F, "Test", TestCoverage),
|
||||
|
@ -331,9 +341,13 @@ cover_write_index_section(F, SectionName, Coverage) ->
|
|||
ok = file:write(F, ?FMT("<h3>Total: ~s</h3>\n", [TotalCoverage])),
|
||||
ok = file:write(F, "<table><tr><th>Module</th><th>Coverage %</th></tr>\n"),
|
||||
|
||||
FmtLink =
|
||||
fun(Module, Cov, NotCov) ->
|
||||
?FMT("<tr><td><a href='~s.COVER.html'>~s</a></td><td>~s</td>\n",
|
||||
[Module, Module, percentage(Cov, NotCov)])
|
||||
end,
|
||||
lists:foreach(fun({Module, Cov, NotCov}) ->
|
||||
ok = file:write(F, ?FMT("<tr><td><a href='~s.COVER.html'>~s</a></td><td>~s</td>\n",
|
||||
[Module, Module, percentage(Cov, NotCov)]))
|
||||
ok = file:write(F, FmtLink(Module, Cov, NotCov))
|
||||
end, Coverage),
|
||||
ok = file:write(F, "</table>\n").
|
||||
|
||||
|
@ -345,13 +359,13 @@ cover_print_coverage(Coverage) ->
|
|||
|
||||
%% Determine the longest module name for right-padding
|
||||
Width = lists:foldl(fun({Mod, _, _}, Acc) ->
|
||||
case length(atom_to_list(Mod)) of
|
||||
N when N > Acc ->
|
||||
N;
|
||||
_ ->
|
||||
Acc
|
||||
end
|
||||
end, 0, Coverage) * -1,
|
||||
case length(atom_to_list(Mod)) of
|
||||
N when N > Acc ->
|
||||
N;
|
||||
_ ->
|
||||
Acc
|
||||
end
|
||||
end, 0, Coverage) * -1,
|
||||
|
||||
%% Print the output the console
|
||||
?CONSOLE("~nCode Coverage:~n", []),
|
||||
|
|
|
@ -132,7 +132,7 @@ xcopy_win32(Source,Dest)->
|
|||
end.
|
||||
|
||||
cp_r_win32({true, SourceDir}, {true, DestDir}) ->
|
||||
% from directory to directory
|
||||
%% from directory to directory
|
||||
SourceBase = filename:basename(SourceDir),
|
||||
ok = case file:make_dir(filename:join(DestDir, SourceBase)) of
|
||||
{error, eexist} -> ok;
|
||||
|
@ -140,10 +140,10 @@ cp_r_win32({true, SourceDir}, {true, DestDir}) ->
|
|||
end,
|
||||
ok = xcopy_win32(SourceDir, filename:join(DestDir, SourceBase));
|
||||
cp_r_win32({false, Source} = S,{true, DestDir}) ->
|
||||
% from file to directory
|
||||
%% from file to directory
|
||||
cp_r_win32(S, {false, filename:join(DestDir, filename:basename(Source))});
|
||||
cp_r_win32({false, Source},{false, Dest}) ->
|
||||
% from file to file
|
||||
%% from file to file
|
||||
{ok,_} = file:copy(Source, Dest),
|
||||
ok;
|
||||
cp_r_win32(Source,Dest) ->
|
||||
|
|
|
@ -49,12 +49,13 @@ compile(Config, _AppFile) ->
|
|||
compile_lfe(Source, _Target, Config) ->
|
||||
case code:which(lfe_comp) of
|
||||
non_existing ->
|
||||
?CONSOLE("~n===============================================~n" ++
|
||||
" You need to install LFE to compile LFE source~n" ++
|
||||
"Download the latest tarball release from github~n" ++
|
||||
" http://github.com/rvirding/lfe/downloads~n" ++
|
||||
" and install it into your erlang library dir~n" ++
|
||||
"===============================================~n~n", []),
|
||||
?CONSOLE(
|
||||
<<"~n===============================================~n"
|
||||
" You need to install LFE to compile LFE source files~n"
|
||||
"Download the latest tarball release from github~n"
|
||||
" https://github.com/rvirding/lfe/downloads~n"
|
||||
" and install it into your erlang library dir~n"
|
||||
"===============================================~n~n">>, []),
|
||||
?FAIL;
|
||||
_ ->
|
||||
Opts = [{i, "include"}, {outdir, "ebin"}, report, return] ++
|
||||
|
|
|
@ -81,6 +81,3 @@ log_prefix(debug) -> "DEBUG: ";
|
|||
log_prefix(info) -> "INFO: ";
|
||||
log_prefix(warn) -> "WARN: ";
|
||||
log_prefix(error) -> "ERROR: ".
|
||||
|
||||
|
||||
|
||||
|
|
|
@ -27,7 +27,7 @@
|
|||
|
||||
%% The rebar_neotoma module is a plugin for rebar that compiles
|
||||
%% neotoma peg files. By default, it compiles all src/*.peg to src/*.erl
|
||||
%%
|
||||
%%
|
||||
%% Configuration options should be placed in rebar.config under
|
||||
%% neotoma_opts. Available options include:
|
||||
%%
|
||||
|
@ -52,9 +52,10 @@ compile(Config, _AppFile) ->
|
|||
NeoOpts = neotoma_opts(Config),
|
||||
rebar_base_compiler:run(Config, [],
|
||||
option(doc_root, NeoOpts), ".peg",
|
||||
option(out_dir, NeoOpts), option(module_ext, NeoOpts) ++ ".beam",
|
||||
option(out_dir, NeoOpts),
|
||||
option(module_ext, NeoOpts) ++ ".beam",
|
||||
fun compile_neo/3, [{check_last_mod,false}]).
|
||||
|
||||
|
||||
%% ============================================================================
|
||||
%% Public API
|
||||
%% ============================================================================
|
||||
|
@ -71,47 +72,48 @@ default(module_ext) -> "";
|
|||
default(source_ext) -> ".peg".
|
||||
|
||||
compile_neo(Source, Target, Config) ->
|
||||
case code:which(neotoma) of
|
||||
non_existing ->
|
||||
?CONSOLE(
|
||||
"~n===============================================~n"
|
||||
" You need to install neotoma to compile PEG grammars~n"
|
||||
" Download the latest tarball release from github~n"
|
||||
" http://github.com/seancribbs/neotoma~n"
|
||||
" and install it into your erlang library dir~n"
|
||||
"===============================================~n~n", []),
|
||||
?FAIL;
|
||||
_ ->
|
||||
case needs_compile(Source, Target, Config) of
|
||||
true ->
|
||||
do_compile(Source, Target, Config);
|
||||
false ->
|
||||
skipped
|
||||
end
|
||||
end.
|
||||
|
||||
case code:which(neotoma) of
|
||||
non_existing ->
|
||||
?CONSOLE(
|
||||
<<"~n===============================================~n"
|
||||
" You need to install neotoma to compile PEG grammars~n"
|
||||
" Download the latest tarball release from github~n"
|
||||
" https://github.com/seancribbs/neotoma~n"
|
||||
" and install it into your erlang library dir~n"
|
||||
"===============================================~n~n">>, []),
|
||||
?FAIL;
|
||||
_ ->
|
||||
case needs_compile(Source, Target, Config) of
|
||||
true ->
|
||||
do_compile(Source, Target, Config);
|
||||
false ->
|
||||
skipped
|
||||
end
|
||||
end.
|
||||
|
||||
do_compile(Source, _Target, Config) ->
|
||||
%% TODO: Check last mod on target and referenced DTLs here..
|
||||
NeoOpts = neotoma_opts(Config),
|
||||
%% ensure that doc_root and out_dir are defined,
|
||||
%% using defaults if necessary
|
||||
Opts = [{output, option(out_dir, NeoOpts)},
|
||||
{module, list_to_atom(filename:basename(Source, ".peg") ++ option(module_ext, NeoOpts))}],
|
||||
{module, list_to_atom(filename:basename(Source, ".peg")
|
||||
++ option(module_ext, NeoOpts))}],
|
||||
case neotoma:file(Source, Opts ++ NeoOpts) of
|
||||
ok ->
|
||||
ok ->
|
||||
ok;
|
||||
Reason ->
|
||||
?CONSOLE("Compiling peg ~s failed:~n ~p~n",
|
||||
[Source, Reason]),
|
||||
?FAIL
|
||||
end.
|
||||
|
||||
|
||||
needs_compile(Source, Target, Config) ->
|
||||
LM = filelib:last_modified(Target),
|
||||
LM < filelib:last_modified(Source) orelse
|
||||
lists:any(fun(D) -> LM < filelib:last_modified(D) end,
|
||||
referenced_pegs(Source, Config)).
|
||||
|
||||
|
||||
referenced_pegs(Source, Config) ->
|
||||
Set = referenced_pegs1([Source], Config,
|
||||
sets:add_element(Source, sets:new())),
|
||||
|
|
|
@ -40,11 +40,11 @@ compile(Config, File) ->
|
|||
%% written out as a ebin/*.app file. That resulting file will then
|
||||
%% be validated as usual.
|
||||
AppFile = case rebar_app_utils:is_app_src(File) of
|
||||
true ->
|
||||
preprocess(Config, File);
|
||||
false ->
|
||||
File
|
||||
end,
|
||||
true ->
|
||||
preprocess(Config, File);
|
||||
false ->
|
||||
File
|
||||
end,
|
||||
|
||||
%% Load the app file and validate it.
|
||||
case rebar_app_utils:load_app_file(AppFile) of
|
||||
|
@ -52,11 +52,12 @@ compile(Config, File) ->
|
|||
validate_name(AppName, AppFile),
|
||||
|
||||
%% In general, the list of modules is an important thing to validate
|
||||
%% for compliance with OTP guidelines and upgrade procedures. However,
|
||||
%% some people prefer not to validate this list.
|
||||
%% for compliance with OTP guidelines and upgrade procedures.
|
||||
%% However, some people prefer not to validate this list.
|
||||
case rebar_config:get_local(Config, validate_app_modules, true) of
|
||||
true ->
|
||||
validate_modules(AppName, proplists:get_value(modules, AppData));
|
||||
validate_modules(AppName,
|
||||
proplists:get_value(modules, AppData));
|
||||
false ->
|
||||
ok
|
||||
end;
|
||||
|
@ -102,13 +103,15 @@ preprocess(Config, AppSrcFile) ->
|
|||
AppFile = rebar_app_utils:app_src_to_app(AppSrcFile),
|
||||
ok = file:write_file(AppFile, Spec),
|
||||
|
||||
%% Make certain that the ebin/ directory is available on the code path
|
||||
%% Make certain that the ebin/ directory is available
|
||||
%% on the code path
|
||||
true = code:add_path(filename:absname(filename:dirname(AppFile))),
|
||||
|
||||
AppFile;
|
||||
|
||||
{error, Reason} ->
|
||||
?ABORT("Failed to read ~s for preprocessing: ~p\n", [AppSrcFile, Reason])
|
||||
?ABORT("Failed to read ~s for preprocessing: ~p\n",
|
||||
[AppSrcFile, Reason])
|
||||
end.
|
||||
|
||||
load_app_vars(Config) ->
|
||||
|
@ -129,23 +132,25 @@ apply_app_vars([{Key, Value} | Rest], AppData) ->
|
|||
apply_app_vars(Rest, AppData2).
|
||||
|
||||
validate_name(AppName, File) ->
|
||||
%% Convert the .app file name to an atom -- check it against the identifier within the file
|
||||
%% Convert the .app file name to an atom -- check it against the
|
||||
%% identifier within the file
|
||||
ExpApp = list_to_atom(filename:basename(File, ".app")),
|
||||
case ExpApp == AppName of
|
||||
true ->
|
||||
ok;
|
||||
false ->
|
||||
?ERROR("Invalid ~s: name of application (~p) must match filename.\n",
|
||||
[File, AppName]),
|
||||
?ERROR("Invalid ~s: name of application (~p) "
|
||||
"must match filename.\n", [File, AppName]),
|
||||
?FAIL
|
||||
end.
|
||||
|
||||
validate_modules(AppName, undefined) ->
|
||||
?ERROR("Missing modules declaration in~p.app:\n", [AppName]),
|
||||
?FAIL;
|
||||
?ERROR("Missing modules declaration in~p.app:\n", [AppName]),
|
||||
?FAIL;
|
||||
|
||||
validate_modules(AppName, Mods) ->
|
||||
%% Construct two sets -- one for the actual .beam files in ebin/ and one for the modules
|
||||
%% Construct two sets -- one for the actual .beam files in ebin/
|
||||
%% and one for the modules
|
||||
%% listed in the .app file
|
||||
EbinSet = ordsets:from_list(ebin_modules()),
|
||||
ModSet = ordsets:from_list(Mods),
|
||||
|
@ -155,9 +160,10 @@ validate_modules(AppName, Mods) ->
|
|||
[] ->
|
||||
ok;
|
||||
MissingBeams ->
|
||||
Msg1 = lists:flatten([io_lib:format("\t* ~p\n", [M]) || M <- MissingBeams]),
|
||||
?ERROR("One or more modules listed in ~p.app are not present in ebin/*.beam:\n~s",
|
||||
[AppName, Msg1]),
|
||||
Msg1 = lists:flatten([io_lib:format("\t* ~p\n", [M]) ||
|
||||
M <- MissingBeams]),
|
||||
?ERROR("One or more modules listed in ~p.app are not "
|
||||
"present in ebin/*.beam:\n~s", [AppName, Msg1]),
|
||||
?FAIL
|
||||
end,
|
||||
|
||||
|
@ -166,11 +172,13 @@ validate_modules(AppName, Mods) ->
|
|||
[] ->
|
||||
ok;
|
||||
MissingMods ->
|
||||
Msg2 = lists:flatten([io_lib:format("\t* ~p\n", [M]) || M <- MissingMods]),
|
||||
?ERROR("One or more .beam files exist that are not listed in ~p.app:\n~s",
|
||||
[AppName, Msg2]),
|
||||
Msg2 = lists:flatten([io_lib:format("\t* ~p\n", [M]) ||
|
||||
M <- MissingMods]),
|
||||
?ERROR("One or more .beam files exist that are not "
|
||||
"listed in ~p.app:\n~s", [AppName, Msg2]),
|
||||
?FAIL
|
||||
end.
|
||||
|
||||
ebin_modules() ->
|
||||
lists:sort([rebar_utils:beam_to_mod("ebin", N) || N <- rebar_utils:beams("ebin")]).
|
||||
lists:sort([rebar_utils:beam_to_mod("ebin", N) ||
|
||||
N <- rebar_utils:beams("ebin")]).
|
||||
|
|
|
@ -43,11 +43,14 @@
|
|||
%% {Regex, String} wherein Regex is a regular expression
|
||||
%% that is checked against the system architecture.
|
||||
%%
|
||||
%% * so_specs - Erlang list of tuples of the form {"priv/so_name.so", ["c_src/object_file_name.o"]} useful for
|
||||
%% building multiple *.so files.
|
||||
%% * so_specs - Erlang list of tuples of the form
|
||||
%% {"priv/so_name.so", ["c_src/object_file_name.o"]}
|
||||
%% useful for building multiple *.so files.
|
||||
%%
|
||||
%% * port_envs - Erlang list of key/value pairs which will control the environment when
|
||||
%% running the compiler and linker. By default, the following variables
|
||||
%% * port_envs - Erlang list of key/value pairs which will control
|
||||
%% the environment when running the compiler and linker.
|
||||
%%
|
||||
%% By default, the following variables
|
||||
%% are defined:
|
||||
%% CC - C compiler
|
||||
%% CXX - C++ compiler
|
||||
|
@ -59,67 +62,85 @@
|
|||
%% DRV_CFLAGS - flags that will be used for compiling the driver
|
||||
%% DRV_LDFLAGS - flags that will be used for linking the driver
|
||||
%%
|
||||
%% Note that if you wish to extend (vs. replace) these variables, you MUST
|
||||
%% include a shell-style reference in your definition. E.g. to extend CFLAGS,
|
||||
%% do something like:
|
||||
%% Note that if you wish to extend (vs. replace) these variables,
|
||||
%% you MUST include a shell-style reference in your definition.
|
||||
%% e.g. to extend CFLAGS, do something like:
|
||||
%%
|
||||
%% {port_envs, [{"CFLAGS", "$CFLAGS -MyOtherOptions"}]}
|
||||
%%
|
||||
%% It is also possible to specify platform specific options by specifying a triplet
|
||||
%% where the first string is a regex that is checked against erlang's system architecture
|
||||
%% string. E.g. to specify a CFLAG that only applies to x86_64 on linux do:
|
||||
%% It is also possible to specify platform specific options
|
||||
%% by specifying a tripletwhere the first string is a regex
|
||||
%% that is checked against erlang's system architecture string.
|
||||
%% e.g. to specify a CFLAG that only applies to x86_64 on linux
|
||||
%% do:
|
||||
%%
|
||||
%% {port_envs, [{"x86_64.*-linux", "CFLAGS", "$CFLAGS -X86Options"}]}
|
||||
%% {port_envs, [{"x86_64.*-linux", "CFLAGS",
|
||||
%% "$CFLAGS -X86Options"}]}
|
||||
%%
|
||||
%% * port_pre_script - Tuple which specifies a pre-compilation script to run, and a filename that
|
||||
%% exists as a result of the script running.
|
||||
%% * port_pre_script - Tuple which specifies a pre-compilation script to run,
|
||||
%% and a filename that exists as a result of the script
|
||||
%% running.
|
||||
%%
|
||||
%% * port_cleanup_script - String that specifies a script to run during cleanup. Use this to remove
|
||||
%% files/directories created by port_pre_script.
|
||||
%% * port_cleanup_script - String that specifies a script to run during cleanup.
|
||||
%% Use this to remove files/directories created by
|
||||
%% port_pre_script.
|
||||
%%
|
||||
|
||||
compile(Config, AppFile) ->
|
||||
%% Compose list of sources from config file -- defaults to c_src/*.c
|
||||
Sources = expand_sources(rebar_config:get_list(Config, port_sources, ["c_src/*.c"]), []),
|
||||
Sources = expand_sources(rebar_config:get_list(Config, port_sources,
|
||||
["c_src/*.c"]), []),
|
||||
case Sources of
|
||||
[] ->
|
||||
ok;
|
||||
_ ->
|
||||
%% Extract environment values from the config (if specified) and merge with the
|
||||
%% default for this operating system. This enables max flexibility for users.
|
||||
%% Extract environment values from the config (if specified) and
|
||||
%% merge with the default for this operating system. This enables
|
||||
%% max flexibility for users.
|
||||
DefaultEnvs = filter_envs(default_env(), []),
|
||||
OverrideEnvs = filter_envs(rebar_config:get_list(Config, port_envs, []), []),
|
||||
Env = expand_vars_loop(merge_each_var(DefaultEnvs ++ OverrideEnvs ++ os_env(), [])),
|
||||
PortEnvs = rebar_config:get_list(Config, port_envs, []),
|
||||
OverrideEnvs = filter_envs(PortEnvs, []),
|
||||
RawEnv = DefaultEnvs ++ OverrideEnvs ++ os_env(),
|
||||
Env = expand_vars_loop(merge_each_var(RawEnv, [])),
|
||||
|
||||
%% One or more files are available for building. Run the pre-compile hook, if
|
||||
%% necessary.
|
||||
%% One or more files are available for building.
|
||||
%% Run the pre-compile hook, if necessary.
|
||||
ok = run_precompile_hook(Config, Env),
|
||||
|
||||
%% Compile each of the sources
|
||||
{NewBins, ExistingBins} = compile_each(Sources, Config, Env, [], []),
|
||||
{NewBins, ExistingBins} = compile_each(Sources, Config, Env,
|
||||
[], []),
|
||||
|
||||
%% Construct the driver name and make sure priv/ exists
|
||||
SoSpecs = so_specs(Config, AppFile, NewBins ++ ExistingBins),
|
||||
?INFO("Using specs ~p\n", [SoSpecs]),
|
||||
lists:foreach(fun({SoName,_}) -> ok = filelib:ensure_dir(SoName) end, SoSpecs),
|
||||
lists:foreach(fun({SoName,_}) ->
|
||||
ok = filelib:ensure_dir(SoName)
|
||||
end, SoSpecs),
|
||||
|
||||
%% Only relink if necessary, given the SoName and list of new binaries
|
||||
lists:foreach(fun({SoName,Bins}) ->
|
||||
case needs_link(SoName, sets:to_list(sets:intersection([sets:from_list(Bins),sets:from_list(NewBins)]))) of
|
||||
true ->
|
||||
rebar_utils:sh(?FMT("$CC ~s $LDFLAGS $DRV_LDFLAGS -o ~s",
|
||||
[string:join(Bins, " "), SoName]),
|
||||
[{env, Env}]);
|
||||
false ->
|
||||
?INFO("Skipping relink of ~s\n", [SoName]),
|
||||
ok
|
||||
end
|
||||
%% Only relink if necessary, given the SoName
|
||||
%% and list of new binaries
|
||||
lists:foreach(
|
||||
fun({SoName,Bins}) ->
|
||||
AllBins = [sets:from_list(Bins), sets:from_list(NewBins)],
|
||||
Intersection = sets:intersection(AllBins),
|
||||
case needs_link(SoName, sets:to_list(Intersection)) of
|
||||
true ->
|
||||
rebar_utils:sh(
|
||||
?FMT("$CC ~s $LDFLAGS $DRV_LDFLAGS -o ~s",
|
||||
[string:join(Bins, " "), SoName]),
|
||||
[{env, Env}]);
|
||||
false ->
|
||||
?INFO("Skipping relink of ~s\n", [SoName]),
|
||||
ok
|
||||
end
|
||||
end, SoSpecs)
|
||||
end.
|
||||
|
||||
clean(Config, AppFile) ->
|
||||
%% Build a list of sources so as to derive all the bins we generated
|
||||
Sources = expand_sources(rebar_config:get_list(Config, port_sources, ["c_src/*.c"]), []),
|
||||
Sources = expand_sources(rebar_config:get_list(Config, port_sources,
|
||||
["c_src/*.c"]), []),
|
||||
rebar_file_utils:delete_each([source_to_bin(S) || S <- Sources]),
|
||||
|
||||
%% Delete the .so file
|
||||
|
@ -151,7 +172,7 @@ expand_sources([{ArchRegex, Spec} | Rest], Acc) ->
|
|||
expand_sources([Spec | Rest], Acc) ->
|
||||
Acc2 = filelib:wildcard(Spec) ++ Acc,
|
||||
expand_sources(Rest, Acc2).
|
||||
|
||||
|
||||
expand_objects(Sources) ->
|
||||
[filename:join([filename:dirname(F), filename:basename(F) ++ ".o"])
|
||||
|| F <- Sources].
|
||||
|
@ -167,7 +188,8 @@ run_precompile_hook(Config, Env) ->
|
|||
{ok, _} = rebar_utils:sh(Script, [{env, Env}]),
|
||||
ok;
|
||||
true ->
|
||||
?INFO("~s exists; not running ~s\n", [BypassFileName, Script])
|
||||
?INFO("~s exists; not running ~s\n",
|
||||
[BypassFileName, Script])
|
||||
end
|
||||
end.
|
||||
|
||||
|
@ -206,10 +228,9 @@ compile_each([Source | Rest], Config, Env, NewBins, ExistingBins) ->
|
|||
compile_each(Rest, Config, Env, NewBins, [Bin | ExistingBins])
|
||||
end.
|
||||
|
||||
|
||||
|
||||
needs_compile(Source, Bin) ->
|
||||
%% TODO: Generate depends using gcc -MM so we can also check for include changes
|
||||
%% TODO: Generate depends using gcc -MM so we can also
|
||||
%% check for include changes
|
||||
filelib:last_modified(Bin) < filelib:last_modified(Source).
|
||||
|
||||
needs_link(SoName, []) ->
|
||||
|
@ -249,8 +270,8 @@ merge_each_var([], Vars) ->
|
|||
merge_each_var([{Key, Value} | Rest], Vars) ->
|
||||
case orddict:find(Key, Vars) of
|
||||
error ->
|
||||
%% Nothing yet defined for this key/value. Expand any self-references
|
||||
%% as blank.
|
||||
%% Nothing yet defined for this key/value.
|
||||
%% Expand any self-references as blank.
|
||||
Evalue = expand_env_variable(Value, Key, "");
|
||||
{ok, Value0} ->
|
||||
%% Use previous definition in expansion
|
||||
|
@ -283,16 +304,17 @@ expand_vars_loop(Vars0, Count) ->
|
|||
%% Expand all OTHER references to a given K/V pair
|
||||
%%
|
||||
expand_vars(Key, Value, Vars) ->
|
||||
lists:foldl(fun({AKey, AValue}, Acc) ->
|
||||
case AKey of
|
||||
Key ->
|
||||
NewValue = AValue;
|
||||
_ ->
|
||||
NewValue = expand_env_variable(AValue, Key, Value)
|
||||
end,
|
||||
[{AKey, NewValue} | Acc]
|
||||
end,
|
||||
[], Vars).
|
||||
lists:foldl(
|
||||
fun({AKey, AValue}, Acc) ->
|
||||
case AKey of
|
||||
Key ->
|
||||
NewValue = AValue;
|
||||
_ ->
|
||||
NewValue = expand_env_variable(AValue, Key, Value)
|
||||
end,
|
||||
[{AKey, NewValue} | Acc]
|
||||
end,
|
||||
[], Vars).
|
||||
|
||||
|
||||
%%
|
||||
|
@ -325,7 +347,8 @@ erts_dir() ->
|
|||
lists:concat([code:root_dir(), "/erts-", erlang:system_info(version)]).
|
||||
|
||||
os_env() ->
|
||||
Os = [list_to_tuple(re:split(S, "=", [{return, list}, {parts, 2}])) || S <- os:getenv()],
|
||||
Os = [list_to_tuple(re:split(S, "=", [{return, list}, {parts, 2}])) ||
|
||||
S <- os:getenv()],
|
||||
lists:keydelete([],1,Os). %% Remove Windows current disk and path
|
||||
|
||||
default_env() ->
|
||||
|
@ -339,7 +362,8 @@ default_env() ->
|
|||
" -lerl_interface -lei"])},
|
||||
{"DRV_CFLAGS", "-g -Wall -fPIC $ERL_CFLAGS"},
|
||||
{"DRV_LDFLAGS", "-shared $ERL_LDFLAGS"},
|
||||
{"darwin", "DRV_LDFLAGS", "-bundle -flat_namespace -undefined suppress $ERL_LDFLAGS"},
|
||||
{"darwin", "DRV_LDFLAGS",
|
||||
"-bundle -flat_namespace -undefined suppress $ERL_LDFLAGS"},
|
||||
{"ERLANG_ARCH", integer_to_list(8 * erlang:system_info(wordsize))},
|
||||
{"ERLANG_TARGET", rebar_utils:get_arch()},
|
||||
|
||||
|
@ -383,13 +407,13 @@ switch_so_to_dll(Orig = {Name, Spec}) ->
|
|||
make_so_specs(Config, AppFile, Bins) ->
|
||||
case rebar_config:get(Config, so_specs, undefined) of
|
||||
undefined ->
|
||||
%% New form of so_specs is not provided. See if the old form of {so_name} is available
|
||||
%% instead
|
||||
%% New form of so_specs is not provided. See if the old form
|
||||
%% of {so_name} is available instead
|
||||
Dir = "priv",
|
||||
SoName = case rebar_config:get(Config, so_name, undefined) of
|
||||
undefined ->
|
||||
%% Ok, neither old nor new form is available. Use the app name and
|
||||
%% generate a sensible default.
|
||||
%% Ok, neither old nor new form is available. Use
|
||||
%% the app name and generate a sensible default.
|
||||
AppName = rebar_app_utils:app_name(AppFile),
|
||||
filename:join(Dir,
|
||||
lists:concat([AppName, "_drv.so"]));
|
||||
|
|
|
@ -40,8 +40,8 @@ compile(_Config, _AppFile) ->
|
|||
[] ->
|
||||
ok;
|
||||
FoundFiles ->
|
||||
%% Check for protobuffs library -- if it's not present, fail since we have
|
||||
%% .proto files that need building
|
||||
%% Check for protobuffs library -- if it's not present, fail
|
||||
%% since we have.proto files that need building
|
||||
case protobuffs_is_present() of
|
||||
true ->
|
||||
%% Build a list of output files - { Proto, Beam, Hrl }
|
||||
|
@ -51,7 +51,8 @@ compile(_Config, _AppFile) ->
|
|||
%% Compile each proto file
|
||||
compile_each(Targets);
|
||||
false ->
|
||||
?ERROR("Protobuffs library not present in code path!\n", []),
|
||||
?ERROR("Protobuffs library not present in code path!\n",
|
||||
[]),
|
||||
?FAIL
|
||||
end
|
||||
end.
|
||||
|
@ -60,7 +61,9 @@ compile(_Config, _AppFile) ->
|
|||
clean(_Config, _AppFile) ->
|
||||
%% Get a list of generated .beam and .hrl files and then delete them
|
||||
Protos = filelib:wildcard("src/*.proto"),
|
||||
Targets = [fq_beam_file(F) || F <- Protos] ++ [fq_hrl_file(F) || F <- Protos],
|
||||
BeamFiles = [fq_beam_file(F) || F <- Protos],
|
||||
HrlFiles = [fq_hrl_file(F) || F <- Protos],
|
||||
Targets = BeamFiles ++ HrlFiles,
|
||||
case Targets of
|
||||
[] ->
|
||||
ok;
|
||||
|
@ -100,16 +103,18 @@ compile_each([{Proto, Beam, Hrl} | Rest]) ->
|
|||
?CONSOLE("Compiling ~s\n", [Proto]),
|
||||
case protobuffs_compile:scan_file(Proto) of
|
||||
ok ->
|
||||
%% Compilation worked, but we need to move the .beam and .hrl file
|
||||
%% into the ebin/ and include/ directories respectively
|
||||
%% TODO: Protobuffs really needs to be better about this...sigh.
|
||||
%% Compilation worked, but we need to move the
|
||||
%% beam and .hrl file into the ebin/ and include/
|
||||
%% directories respectively
|
||||
%% TODO: Protobuffs really needs to be better about this
|
||||
ok = filelib:ensure_dir(filename:join("ebin","dummy")),
|
||||
ok = rebar_file_utils:mv(Beam, "ebin"),
|
||||
ok = filelib:ensure_dir(filename:join("include", Hrl)),
|
||||
ok = rebar_file_utils:mv(Hrl, "include"),
|
||||
ok;
|
||||
Other ->
|
||||
?ERROR("Protobuff compile of ~s failed: ~p\n", [Proto, Other]),
|
||||
?ERROR("Protobuff compile of ~s failed: ~p\n",
|
||||
[Proto, Other]),
|
||||
?FAIL
|
||||
end;
|
||||
false ->
|
||||
|
|
|
@ -78,13 +78,14 @@ clean(_Config, ReltoolFile) ->
|
|||
check_vsn() ->
|
||||
case code:lib_dir(reltool) of
|
||||
{error, bad_name} ->
|
||||
?ABORT("Reltool support requires the reltool application to be installed!", []);
|
||||
?ABORT("Reltool support requires the reltool application "
|
||||
"to be installed!", []);
|
||||
Path ->
|
||||
ReltoolVsn = filename:basename(Path),
|
||||
case ReltoolVsn < "reltool-0.5.2" of
|
||||
true ->
|
||||
?ABORT("Reltool support requires at least reltool-0.5.2; this VM is using ~s\n",
|
||||
[ReltoolVsn]);
|
||||
?ABORT("Reltool support requires at least reltool-0.5.2; "
|
||||
"this VM is using ~s\n", [ReltoolVsn]);
|
||||
false ->
|
||||
ok
|
||||
end
|
||||
|
@ -98,12 +99,13 @@ load_config(ReltoolFile) ->
|
|||
{ok, Terms} ->
|
||||
Terms;
|
||||
Other ->
|
||||
?ABORT("Failed to load expected config from ~s: ~p\n", [ReltoolFile, Other])
|
||||
?ABORT("Failed to load expected config from ~s: ~p\n",
|
||||
[ReltoolFile, Other])
|
||||
end.
|
||||
|
||||
%%
|
||||
%% Look for the {sys, [...]} tuple in the reltool.config file. Without this present, we
|
||||
%% can't run reltool.
|
||||
%% Look for the {sys, [...]} tuple in the reltool.config file.
|
||||
%% Without this present, we can't run reltool.
|
||||
%%
|
||||
sys_tuple(ReltoolConfig) ->
|
||||
case lists:keyfind(sys, 1, ReltoolConfig) of
|
||||
|
@ -160,15 +162,17 @@ validate_rel_apps(ReltoolServer, {sys, ReltoolConfig}) ->
|
|||
false ->
|
||||
ok;
|
||||
{rel, _Name, _Vsn, Apps} ->
|
||||
%% Identify all the apps that do NOT exist, based on what's available
|
||||
%% from the reltool server
|
||||
Missing = lists:sort([App || App <- Apps,
|
||||
app_exists(App, ReltoolServer) == false]),
|
||||
%% Identify all the apps that do NOT exist, based on
|
||||
%% what's available from the reltool server
|
||||
Missing = lists:sort(
|
||||
[App || App <- Apps,
|
||||
app_exists(App, ReltoolServer) == false]),
|
||||
case Missing of
|
||||
[] ->
|
||||
ok;
|
||||
_ ->
|
||||
?ABORT("Apps in {rel, ...} section not found by reltool: ~p\n", [Missing])
|
||||
?ABORT("Apps in {rel, ...} section not found by "
|
||||
"reltool: ~p\n", [Missing])
|
||||
end;
|
||||
Rel ->
|
||||
%% Invalid release format!
|
||||
|
@ -201,10 +205,12 @@ run_reltool(Server, _Config, ReltoolConfig) ->
|
|||
ok ->
|
||||
ok;
|
||||
{error, Reason} ->
|
||||
?ABORT("Failed to generate target from spec: ~p\n", [Reason])
|
||||
?ABORT("Failed to generate target from spec: ~p\n",
|
||||
[Reason])
|
||||
end,
|
||||
|
||||
%% Initialize overlay vars with some basics (that can get overwritten)
|
||||
%% Initialize overlay vars with some basics
|
||||
%% (that can get overwritten)
|
||||
OverlayVars0 = [{erts_vsn, "erts-" ++ erlang:system_info(version)}],
|
||||
|
||||
%% Load up any variables specified by overlay_vars
|
||||
|
@ -216,7 +222,8 @@ run_reltool(Server, _Config, ReltoolConfig) ->
|
|||
{ok, Terms} ->
|
||||
dict:from_list(OverlayVars0 ++ Terms);
|
||||
{error, Reason2} ->
|
||||
?ABORT("Unable to load overlay_vars from ~s: ~p\n",
|
||||
?ABORT("Unable to load overlay_vars "
|
||||
"from ~s: ~p\n",
|
||||
[File, Reason2])
|
||||
end
|
||||
end,
|
||||
|
@ -249,7 +256,8 @@ mk_target_dir(TargetDir) ->
|
|||
rebar_file_utils:rm_rf(TargetDir),
|
||||
ok = file:make_dir(TargetDir);
|
||||
_ ->
|
||||
?ERROR("Release target directory ~p already exists!\n", [TargetDir]),
|
||||
?ERROR("Release target directory ~p already exists!\n",
|
||||
[TargetDir]),
|
||||
?FAIL
|
||||
end
|
||||
end.
|
||||
|
@ -310,12 +318,14 @@ execute_overlay([{create, Out, Contents} | Rest], Vars, BaseDir, TargetDir) ->
|
|||
end;
|
||||
execute_overlay([{replace, Out, Regex, Replacement} | Rest],
|
||||
Vars, BaseDir, TargetDir) ->
|
||||
execute_overlay([{replace, Out, Regex, Replacement, []} | Rest], Vars, BaseDir, TargetDir);
|
||||
execute_overlay([{replace, Out, Regex, Replacement, []} | Rest],
|
||||
Vars, BaseDir, TargetDir);
|
||||
execute_overlay([{replace, Out, Regex, Replacement, Opts} | Rest],
|
||||
Vars, BaseDir, TargetDir) ->
|
||||
Filename = render(filename:join(TargetDir, Out), Vars),
|
||||
{ok, OrigData} = file:read_file(Filename),
|
||||
Data = re:replace(OrigData, Regex, Replacement, [global, {return, binary}] ++ Opts),
|
||||
Data = re:replace(OrigData, Regex, Replacement,
|
||||
[global, {return, binary}] ++ Opts),
|
||||
case file:write_file(Filename, Data) of
|
||||
ok ->
|
||||
?DEBUG("Edited ~s: s/~s/~s/\n", [Filename, Regex, Replacement]),
|
||||
|
|
|
@ -64,6 +64,3 @@ check_versions(Config) ->
|
|||
?ABORT("OTP release ~s does not match required regex ~s\n",
|
||||
[erlang:system_info(otp_release), OtpRegex])
|
||||
end.
|
||||
|
||||
|
||||
|
||||
|
|
|
@ -37,6 +37,6 @@
|
|||
preprocess(Config, _) ->
|
||||
%% Get the list of subdirs specified in the config (if any).
|
||||
Cwd = rebar_utils:get_cwd(),
|
||||
Subdirs = [filename:join(Cwd, Dir) || Dir <- rebar_config:get_local(Config, sub_dirs, [])],
|
||||
Subdirs = [filename:join(Cwd, Dir) ||
|
||||
Dir <- rebar_config:get_local(Config, sub_dirs, [])],
|
||||
{ok, Subdirs}.
|
||||
|
||||
|
|
|
@ -99,13 +99,13 @@ create(_Config, _) ->
|
|||
ok
|
||||
end;
|
||||
false ->
|
||||
?WARN("No variables section found in template ~p; using empty context.\n",
|
||||
[TemplateId]),
|
||||
?WARN("No variables section found in template ~p; "
|
||||
"using empty context.\n", [TemplateId]),
|
||||
Context0 = dict:new()
|
||||
end,
|
||||
|
||||
%% For each variable, see if it's defined in global vars -- if it is, prefer that
|
||||
%% value over the defaults
|
||||
%% For each variable, see if it's defined in global vars -- if it is,
|
||||
%% prefer that value over the defaults
|
||||
Context1 = update_vars(dict:fetch_keys(Context0), Context0),
|
||||
?DEBUG("Template ~p context: ~p\n", [TemplateId, dict:to_list(Context1)]),
|
||||
|
||||
|
@ -113,10 +113,11 @@ create(_Config, _) ->
|
|||
%% definition
|
||||
Context = resolve_recursive_vars(dict:to_list(Context1), Context1),
|
||||
|
||||
?DEBUG("Resolved Template ~p context: ~p\n", [TemplateId, dict:to_list(Context1)]),
|
||||
?DEBUG("Resolved Template ~p context: ~p\n",
|
||||
[TemplateId, dict:to_list(Context1)]),
|
||||
|
||||
%% Now, use our context to process the template definition -- this permits us to
|
||||
%% use variables within the definition for filenames.
|
||||
%% Now, use our context to process the template definition -- this
|
||||
%% permits us to use variables within the definition for filenames.
|
||||
FinalTemplate = consult(render(load_file(Type, Template), Context)),
|
||||
?DEBUG("Final template def ~p: ~p\n", [TemplateId, FinalTemplate]),
|
||||
|
||||
|
@ -136,10 +137,10 @@ create(_Config, _) ->
|
|||
%%
|
||||
cache_escript_files() ->
|
||||
{ok, Files} = rebar_utils:escript_foldl(
|
||||
fun(Name, _, GetBin, Acc) ->
|
||||
[{Name, GetBin()} | Acc]
|
||||
end,
|
||||
[], rebar_config:get_global(escript, undefined)),
|
||||
fun(Name, _, GetBin, Acc) ->
|
||||
[{Name, GetBin()} | Acc]
|
||||
end,
|
||||
[], rebar_config:get_global(escript, undefined)),
|
||||
erlang:put(escript_files, Files).
|
||||
|
||||
|
||||
|
@ -158,7 +159,8 @@ find_escript_templates() ->
|
|||
find_disk_templates() ->
|
||||
OtherTemplates = find_other_templates(),
|
||||
HomeFiles = rebar_utils:find_files(filename:join(os:getenv("HOME"),
|
||||
".rebar/templates"), ?TEMPLATE_RE),
|
||||
".rebar/templates"),
|
||||
?TEMPLATE_RE),
|
||||
LocalFiles = rebar_utils:find_files(".", ?TEMPLATE_RE),
|
||||
[{file, F} || F <- OtherTemplates ++ HomeFiles ++ LocalFiles].
|
||||
|
||||
|
@ -289,18 +291,24 @@ write_file(Output, Data, Force) ->
|
|||
%%
|
||||
%% Execute each instruction in a template definition file.
|
||||
%%
|
||||
execute_template([], _TemplateType, _TemplateName, _Context, _Force, ExistingFiles) ->
|
||||
execute_template([], _TemplateType, _TemplateName, _Context,
|
||||
_Force, ExistingFiles) ->
|
||||
case ExistingFiles of
|
||||
[] ->
|
||||
ok;
|
||||
_ ->
|
||||
Msg = lists:flatten([io_lib:format("\t* ~p~n", [F]) || F <- lists:reverse(ExistingFiles)]),
|
||||
Help = "To force overwriting, specify force=1 on the command line.\n",
|
||||
?ERROR("One or more files already exist on disk and were not generated:~n~s~s", [Msg , Help])
|
||||
Msg = lists:flatten([io_lib:format("\t* ~p~n", [F]) ||
|
||||
F <- lists:reverse(ExistingFiles)]),
|
||||
Help =
|
||||
"To force overwriting, specify force=1 on the command line.\n",
|
||||
?ERROR("One or more files already exist on disk and "
|
||||
"were not generated:~n~s~s", [Msg , Help])
|
||||
end;
|
||||
execute_template([{template, Input, Output} | Rest], TemplateType, TemplateName, Context, Force, ExistingFiles) ->
|
||||
execute_template([{template, Input, Output} | Rest], TemplateType,
|
||||
TemplateName, Context, Force, ExistingFiles) ->
|
||||
InputName = filename:join(filename:dirname(TemplateName), Input),
|
||||
case write_file(Output, render(load_file(TemplateType, InputName), Context), Force) of
|
||||
case write_file(Output, render(load_file(TemplateType, InputName), Context),
|
||||
Force) of
|
||||
ok ->
|
||||
execute_template(Rest, TemplateType, TemplateName, Context,
|
||||
Force, ExistingFiles);
|
||||
|
@ -308,34 +316,43 @@ execute_template([{template, Input, Output} | Rest], TemplateType, TemplateName,
|
|||
execute_template(Rest, TemplateType, TemplateName, Context,
|
||||
Force, [Output|ExistingFiles])
|
||||
end;
|
||||
execute_template([{file, Input, Output} | Rest], TemplateType, TemplateName, Context, Force, ExistingFiles) ->
|
||||
execute_template([{file, Input, Output} | Rest], TemplateType, TemplateName,
|
||||
Context, Force, ExistingFiles) ->
|
||||
InputName = filename:join(filename:dirname(TemplateName), Input),
|
||||
case write_file(Output, load_file(TemplateType, InputName), Force) of
|
||||
ok ->
|
||||
execute_template(Rest, TemplateType, TemplateName, Context,
|
||||
Force, ExistingFiles);
|
||||
execute_template(Rest, TemplateType, TemplateName,
|
||||
Context, Force, ExistingFiles);
|
||||
{error, exists} ->
|
||||
execute_template(Rest, TemplateType, TemplateName, Context,
|
||||
Force, [Output|ExistingFiles])
|
||||
execute_template(Rest, TemplateType, TemplateName,
|
||||
Context, Force, [Output|ExistingFiles])
|
||||
end;
|
||||
execute_template([{dir, Name} | Rest], TemplateType, TemplateName, Context, Force, ExistingFiles) ->
|
||||
execute_template([{dir, Name} | Rest], TemplateType, TemplateName, Context,
|
||||
Force, ExistingFiles) ->
|
||||
case filelib:ensure_dir(filename:join(Name, "dummy")) of
|
||||
ok ->
|
||||
execute_template(Rest, TemplateType, TemplateName, Context, Force, ExistingFiles);
|
||||
execute_template(Rest, TemplateType, TemplateName,
|
||||
Context, Force, ExistingFiles);
|
||||
{error, Reason} ->
|
||||
?ABORT("Failed while processing template instruction {dir, ~s}: ~p\n",
|
||||
[Name, Reason])
|
||||
?ABORT("Failed while processing template instruction "
|
||||
"{dir, ~s}: ~p\n", [Name, Reason])
|
||||
end;
|
||||
execute_template([{chmod, Mod, File} | Rest], TemplateType, TemplateName, Context, Force, ExistingFiles) when is_integer(Mod) ->
|
||||
execute_template([{chmod, Mod, File} | Rest], TemplateType, TemplateName,
|
||||
Context, Force, ExistingFiles) when is_integer(Mod) ->
|
||||
case file:change_mode(File, Mod) of
|
||||
ok ->
|
||||
execute_template(Rest, TemplateType, TemplateName, Context, Force, ExistingFiles);
|
||||
execute_template(Rest, TemplateType, TemplateName,
|
||||
Context, Force, ExistingFiles);
|
||||
{error, Reason} ->
|
||||
?ABORT("Failed while processing template instruction {cmod, ~b, ~s}: ~p~n",
|
||||
[Mod, File, Reason])
|
||||
?ABORT("Failed while processing template instruction "
|
||||
"{cmod, ~b, ~s}: ~p~n", [Mod, File, Reason])
|
||||
end;
|
||||
execute_template([{variables, _} | Rest], TemplateType, TemplateName, Context, Force, ExistingFiles) ->
|
||||
execute_template(Rest, TemplateType, TemplateName, Context, Force, ExistingFiles);
|
||||
execute_template([Other | Rest], TemplateType, TemplateName, Context, Force, ExistingFiles) ->
|
||||
execute_template([{variables, _} | Rest], TemplateType, TemplateName, Context,
|
||||
Force, ExistingFiles) ->
|
||||
execute_template(Rest, TemplateType, TemplateName,
|
||||
Context, Force, ExistingFiles);
|
||||
execute_template([Other | Rest], TemplateType, TemplateName, Context,
|
||||
Force, ExistingFiles) ->
|
||||
?WARN("Skipping unknown template instruction: ~p\n", [Other]),
|
||||
execute_template(Rest, TemplateType, TemplateName, Context, Force, ExistingFiles).
|
||||
execute_template(Rest, TemplateType, TemplateName, Context,
|
||||
Force, ExistingFiles).
|
||||
|
|
|
@ -159,10 +159,12 @@ run_systools(NewVer, Name) ->
|
|||
boot_files(Ver, Name) ->
|
||||
ok = file:make_dir(filename:join([".", "releases"])),
|
||||
ok = file:make_dir(filename:join([".", "releases", Ver])),
|
||||
ok = file:make_symlink(filename:join(["start.boot"]),
|
||||
filename:join([".", "releases", Ver, Name ++ ".boot"])),
|
||||
{ok, _} = file:copy(filename:join([".", Name, "releases", Ver, "start_clean.boot"]),
|
||||
filename:join([".", "releases", Ver, "start_clean.boot"])).
|
||||
ok = file:make_symlink(
|
||||
filename:join(["start.boot"]),
|
||||
filename:join([".", "releases", Ver, Name ++ ".boot"])),
|
||||
{ok, _} = file:copy(
|
||||
filename:join([".", Name, "releases", Ver, "start_clean.boot"]),
|
||||
filename:join([".", "releases", Ver, "start_clean.boot"])).
|
||||
|
||||
make_tar(NameVer) ->
|
||||
Filename = NameVer ++ ".tar.gz",
|
||||
|
|
|
@ -113,7 +113,7 @@ find_files(Dir, Regex) ->
|
|||
now_str() ->
|
||||
{{Year, Month, Day}, {Hour, Minute, Second}} = calendar:local_time(),
|
||||
lists:flatten(io_lib:format("~4b/~2..0b/~2..0b ~2..0b:~2..0b:~2..0b",
|
||||
[Year, Month, Day, Hour, Minute, Second])).
|
||||
[Year, Month, Day, Hour, Minute, Second])).
|
||||
|
||||
%% TODO: filelib:ensure_dir/1 corrected in R13B04. Remove when we drop
|
||||
%% support for OTP releases older than R13B04.
|
||||
|
|
|
@ -44,8 +44,11 @@ xref(Config, _) ->
|
|||
%% Spin up xref
|
||||
{ok, _} = xref:start(xref),
|
||||
ok = xref:set_library_path(xref, code_path()),
|
||||
xref:set_default(xref, [{warnings, rebar_config:get(Config, xref_warnings, false)},
|
||||
|
||||
xref:set_default(xref, [{warnings,
|
||||
rebar_config:get(Config, xref_warnings, false)},
|
||||
{verbose, rebar_config:is_verbose()}]),
|
||||
|
||||
{ok, _} = xref:add_directory(xref, "ebin"),
|
||||
|
||||
%% Save the code path prior to doing anything
|
||||
|
@ -53,8 +56,9 @@ xref(Config, _) ->
|
|||
true = code:add_path(filename:join(rebar_utils:get_cwd(), "ebin")),
|
||||
|
||||
%% Get list of xref checks we want to run
|
||||
XrefChecks = rebar_config:get(Config, xref_checks, [exports_not_used,
|
||||
undefined_function_calls]),
|
||||
XrefChecks = rebar_config:get(Config, xref_checks,
|
||||
[exports_not_used,
|
||||
undefined_function_calls]),
|
||||
|
||||
%% Look for exports that are unused by anything
|
||||
case lists:member(exports_not_used, XrefChecks) of
|
||||
|
@ -91,12 +95,15 @@ check_exports_not_used(_Config) ->
|
|||
|
||||
check_undefined_function_calls(_Config) ->
|
||||
{ok, UndefinedCalls0} = xref:analyze(xref, undefined_function_calls),
|
||||
UndefinedCalls = [{find_mfa_source(Caller), format_fa(Caller), format_mfa(Target)} ||
|
||||
{Caller, Target} <- UndefinedCalls0],
|
||||
lists:foreach(fun({{Source, Line}, FunStr, Target}) ->
|
||||
?CONSOLE("~s:~w: Warning ~s calls undefined function ~s\n",
|
||||
[Source, Line, FunStr, Target])
|
||||
end, UndefinedCalls),
|
||||
UndefinedCalls =
|
||||
[{find_mfa_source(Caller), format_fa(Caller), format_mfa(Target)} ||
|
||||
{Caller, Target} <- UndefinedCalls0],
|
||||
|
||||
lists:foreach(
|
||||
fun({{Source, Line}, FunStr, Target}) ->
|
||||
?CONSOLE("~s:~w: Warning ~s calls undefined function ~s\n",
|
||||
[Source, Line, FunStr, Target])
|
||||
end, UndefinedCalls),
|
||||
ok.
|
||||
|
||||
|
||||
|
@ -112,15 +119,18 @@ filter_away_ignored(UnusedExports) ->
|
|||
%% -ignore_xref([{F, A}, ...]).
|
||||
|
||||
%% Setup a filter function that builds a list of behaviour callbacks and/or
|
||||
%% any functions marked to ignore. We then use this list to mask any functions
|
||||
%% marked as unused exports by xref
|
||||
%% any functions marked to ignore. We then use this list to mask any
|
||||
%% functions marked as unused exports by xref
|
||||
F = fun(Mod) ->
|
||||
Attrs = kf(attributes, Mod:module_info()),
|
||||
Ignore = kf(ignore_xref, Attrs),
|
||||
Callbacks = [B:behaviour_info(callbacks) || B <- kf(behaviour, Attrs)],
|
||||
Callbacks =
|
||||
[B:behaviour_info(callbacks) || B <- kf(behaviour, Attrs)],
|
||||
[{Mod, F, A} || {F, A} <- Ignore ++ lists:flatten(Callbacks)]
|
||||
end,
|
||||
AttrIgnore = lists:flatten(lists:map(F, lists:usort([M || {M, _, _} <- UnusedExports]))),
|
||||
AttrIgnore =
|
||||
lists:flatten(
|
||||
lists:map(F, lists:usort([M || {M, _, _} <- UnusedExports]))),
|
||||
[X || X <- UnusedExports, not lists:member(X, AttrIgnore)].
|
||||
|
||||
|
||||
|
@ -136,7 +146,8 @@ display_mfas([], _Message) ->
|
|||
ok;
|
||||
display_mfas([{_Mod, Fun, Args} = MFA | Rest], Message) ->
|
||||
{Source, Line} = find_mfa_source(MFA),
|
||||
?CONSOLE("~s:~w: Warning: function ~s/~w ~s\n", [Source, Line, Fun, Args, Message]),
|
||||
?CONSOLE("~s:~w: Warning: function ~s/~w ~s\n",
|
||||
[Source, Line, Fun, Args, Message]),
|
||||
display_mfas(Rest, Message).
|
||||
|
||||
format_mfa({M, F, A}) ->
|
||||
|
@ -164,8 +175,8 @@ safe_element(N, Tuple) ->
|
|||
%%
|
||||
find_mfa_source({M, F, A}) ->
|
||||
{M, Bin, _} = code:get_object_code(M),
|
||||
{ok, {M, [{abstract_code, AbstractCode}]}} = beam_lib:chunks(Bin, [abstract_code]),
|
||||
{raw_abstract_v1, Code} = AbstractCode,
|
||||
AbstractCode = beam_lib:chunks(Bin, [abstract_code]),
|
||||
{ok, {M, [{abstract_code, {raw_abstract_v1, Code}}]}} = AbstractCode,
|
||||
%% Extract the original source filename from the abstract code
|
||||
[{attribute, 1, file, {Source, _}} | _] = Code,
|
||||
%% Extract the line number for a given function def
|
||||
|
|
Loading…
Reference in a new issue