aboutsummaryrefslogtreecommitdiff
path: root/build2/cc
diff options
context:
space:
mode:
Diffstat (limited to 'build2/cc')
-rw-r--r--build2/cc/common.cxx28
-rw-r--r--build2/cc/common.hxx8
-rw-r--r--build2/cc/compile-rule.cxx (renamed from build2/cc/compile.cxx)246
-rw-r--r--build2/cc/compile-rule.hxx (renamed from build2/cc/compile.hxx)27
-rw-r--r--build2/cc/install-rule.cxx (renamed from build2/cc/install.cxx)91
-rw-r--r--build2/cc/install-rule.hxx77
-rw-r--r--build2/cc/install.hxx67
-rw-r--r--build2/cc/link-rule.cxx (renamed from build2/cc/link.cxx)244
-rw-r--r--build2/cc/link-rule.hxx (renamed from build2/cc/link.hxx)59
-rw-r--r--build2/cc/module.cxx33
-rw-r--r--build2/cc/module.hxx22
-rw-r--r--build2/cc/pkgconfig.cxx37
-rw-r--r--build2/cc/windows-manifest.cxx6
-rw-r--r--build2/cc/windows-rpath.cxx42
14 files changed, 527 insertions, 460 deletions
diff --git a/build2/cc/common.cxx b/build2/cc/common.cxx
index 3daada1..e4dbfe8 100644
--- a/build2/cc/common.cxx
+++ b/build2/cc/common.cxx
@@ -46,7 +46,7 @@ namespace build2
//
void common::
process_libraries (
- action act,
+ action a,
const scope& top_bs,
linfo top_li,
const dir_paths& top_sysd,
@@ -222,23 +222,23 @@ namespace build2
//
if (impl && !c_e_libs.defined () && !x_e_libs.defined ())
{
- for (auto pt: l.prerequisite_targets)
+ for (const prerequisite_target& pt: l.prerequisite_targets[a])
{
if (pt == nullptr)
continue;
- bool a;
+ bool la;
const file* f;
- if ((a = (f = pt->is_a<liba> ())) ||
- (a = (f = pt->is_a<libux> ())) ||
- ( f = pt->is_a<libs> ()))
+ if ((la = (f = pt->is_a<liba> ())) ||
+ (la = (f = pt->is_a<libux> ())) ||
+ ( f = pt->is_a<libs> ()))
{
if (sysd == nullptr) find_sysd ();
if (!li) find_linfo ();
- process_libraries (act, bs, *li, *sysd,
- *f, a, pt.data,
+ process_libraries (a, bs, *li, *sysd,
+ *f, la, pt.data,
proc_impl, proc_lib, proc_opt, true);
}
}
@@ -275,7 +275,7 @@ namespace build2
&proc_impl, &proc_lib, &proc_opt,
&sysd, &usrd,
&find_sysd, &find_linfo, &sys_simple,
- &bs, act, &li, this] (const lookup& lu)
+ &bs, a, &li, this] (const lookup& lu)
{
const vector<name>* ns (cast_null<vector<name>> (lu));
if (ns == nullptr || ns->empty ())
@@ -300,7 +300,7 @@ namespace build2
if (sysd == nullptr) find_sysd ();
if (!li) find_linfo ();
- const file& t (resolve_library (act, bs, n, *li, *sysd, usrd));
+ const file& t (resolve_library (a, bs, n, *li, *sysd, usrd));
if (proc_lib)
{
@@ -324,7 +324,7 @@ namespace build2
// @@ Where can we get the link flags? Should we try to find them
// in the library's prerequisites? What about installed stuff?
//
- process_libraries (act, bs, *li, *sysd,
+ process_libraries (a, bs, *li, *sysd,
t, t.is_a<liba> () || t.is_a<libux> (), 0,
proc_impl, proc_lib, proc_opt, true);
}
@@ -402,7 +402,7 @@ namespace build2
// that's the only way to guarantee it will be up-to-date.
//
const file& common::
- resolve_library (action act,
+ resolve_library (action a,
const scope& s,
name n,
linfo li,
@@ -439,7 +439,7 @@ namespace build2
//
dir_path out;
prerequisite_key pk {n.proj, {tt, &n.dir, &out, &n.value, ext}, &s};
- xt = search_library_existing (act, sysd, usrd, pk);
+ xt = search_library_existing (a, sysd, usrd, pk);
if (xt == nullptr)
{
@@ -454,7 +454,7 @@ namespace build2
// If this is lib{}/libu{}, pick appropriate member.
//
if (const libx* l = xt->is_a<libx> ())
- xt = &link_member (*l, act, li); // Pick lib*{e,a,s}{}.
+ xt = &link_member (*l, a, li); // Pick lib*{e,a,s}{}.
return xt->as<file> ();
}
diff --git a/build2/cc/common.hxx b/build2/cc/common.hxx
index 5ed7173..5952df6 100644
--- a/build2/cc/common.hxx
+++ b/build2/cc/common.hxx
@@ -225,7 +225,7 @@ namespace build2
bool = false) const;
const target*
- search_library (action act,
+ search_library (action a,
const dir_paths& sysd,
optional<dir_paths>& usrd,
const prerequisite& p) const
@@ -234,7 +234,7 @@ namespace build2
if (r == nullptr)
{
- if ((r = search_library (act, sysd, usrd, p.key ())) != nullptr)
+ if ((r = search_library (a, sysd, usrd, p.key ())) != nullptr)
{
const target* e (nullptr);
if (!p.target.compare_exchange_strong (
@@ -274,12 +274,12 @@ namespace build2
bool existing = false) const;
const target*
- search_library_existing (action act,
+ search_library_existing (action a,
const dir_paths& sysd,
optional<dir_paths>& usrd,
const prerequisite_key& pk) const
{
- return search_library (act, sysd, usrd, pk, true);
+ return search_library (a, sysd, usrd, pk, true);
}
dir_paths
diff --git a/build2/cc/compile.cxx b/build2/cc/compile-rule.cxx
index 94b3478..df84547 100644
--- a/build2/cc/compile.cxx
+++ b/build2/cc/compile-rule.cxx
@@ -1,8 +1,8 @@
-// file : build2/cc/compile.cxx -*- C++ -*-
+// file : build2/cc/compile-rule.cxx -*- C++ -*-
// copyright : Copyright (c) 2014-2017 Code Synthesis Ltd
// license : MIT; see accompanying LICENSE file
-#include <build2/cc/compile.hxx>
+#include <build2/cc/compile-rule.hxx>
#include <cstdlib> // exit()
#include <cstring> // strlen()
@@ -124,7 +124,7 @@ namespace build2
throw invalid_argument ("invalid preprocessed value '" + s + "'");
}
- struct compile::match_data
+ struct compile_rule::match_data
{
explicit
match_data (translation_type t, const prerequisite_member& s)
@@ -141,16 +141,16 @@ namespace build2
module_positions mods = {0, 0, 0};
};
- compile::
- compile (data&& d)
+ compile_rule::
+ compile_rule (data&& d)
: common (move (d)),
rule_id (string (x) += ".compile 4")
{
- static_assert (sizeof (compile::match_data) <= target::data_size,
+ static_assert (sizeof (match_data) <= target::data_size,
"insufficient space");
}
- const char* compile::
+ const char* compile_rule::
langopt (const match_data& md) const
{
bool m (md.type == translation_type::module_iface);
@@ -204,7 +204,7 @@ namespace build2
return nullptr;
}
- inline void compile::
+ inline void compile_rule::
append_symexport_options (cstrings& args, const target& t) const
{
// With VC if a BMI is compiled with dllexport, then when such BMI is
@@ -216,10 +216,10 @@ namespace build2
: "-D__symexport=");
}
- match_result compile::
- match (action act, target& t, const string&) const
+ bool compile_rule::
+ match (action a, target& t, const string&) const
{
- tracer trace (x, "compile::match");
+ tracer trace (x, "compile_rule::match");
bool mod (t.is_a<bmie> () || t.is_a<bmia> () || t.is_a<bmis> ());
@@ -235,7 +235,7 @@ namespace build2
// file specified for a member overrides the one specified for the
// group. Also "see through" groups.
//
- for (prerequisite_member p: reverse_group_prerequisite_members (act, t))
+ for (prerequisite_member p: reverse_group_prerequisite_members (a, t))
{
if (p.is_a (mod ? *x_mod : x_src))
{
@@ -257,11 +257,11 @@ namespace build2
// Append or hash library options from a pair of *.export.* variables
// (first one is cc.export.*) recursively, prerequisite libraries first.
//
- void compile::
+ void compile_rule::
append_lib_options (const scope& bs,
cstrings& args,
+ action a,
const target& t,
- action act,
linfo li) const
{
// See through utility libraries.
@@ -290,33 +290,33 @@ namespace build2
const function<bool (const file&, bool)> impf (imp);
const function<void (const file&, const string&, bool, bool)> optf (opt);
- for (prerequisite_member p: group_prerequisite_members (act, t))
+ for (prerequisite_member p: group_prerequisite_members (a, t))
{
// Should be already searched and matched for libraries.
//
if (const target* pt = p.load ())
{
if (const libx* l = pt->is_a<libx> ())
- pt = &link_member (*l, act, li);
+ pt = &link_member (*l, a, li);
- bool a;
- if (!((a = pt->is_a<liba> ()) ||
- (a = pt->is_a<libux> ()) ||
+ bool la;
+ if (!((la = pt->is_a<liba> ()) ||
+ (la = pt->is_a<libux> ()) ||
pt->is_a<libs> ()))
continue;
- process_libraries (act, bs, li, sys_lib_dirs,
- pt->as<file> (), a, 0, // Hack: lflags unused.
+ process_libraries (a, bs, li, sys_lib_dirs,
+ pt->as<file> (), la, 0, // Hack: lflags unused.
impf, nullptr, optf);
}
}
}
- void compile::
+ void compile_rule::
hash_lib_options (const scope& bs,
sha256& cs,
+ action a,
const target& t,
- action act,
linfo li) const
{
auto imp = [] (const file& l, bool la) {return la && l.is_a<libux> ();};
@@ -340,21 +340,21 @@ namespace build2
const function<bool (const file&, bool)> impf (imp);
const function<void (const file&, const string&, bool, bool)> optf (opt);
- for (prerequisite_member p: group_prerequisite_members (act, t))
+ for (prerequisite_member p: group_prerequisite_members (a, t))
{
if (const target* pt = p.load ())
{
if (const libx* l = pt->is_a<libx> ())
- pt = &link_member (*l, act, li);
+ pt = &link_member (*l, a, li);
- bool a;
- if (!((a = pt->is_a<liba> ()) ||
- (a = pt->is_a<libux> ()) ||
+ bool la;
+ if (!((la = pt->is_a<liba> ()) ||
+ (la = pt->is_a<libux> ()) ||
pt->is_a<libs> ()))
continue;
- process_libraries (act, bs, li, sys_lib_dirs,
- pt->as<file> (), a, 0, // Hack: lflags unused.
+ process_libraries (a, bs, li, sys_lib_dirs,
+ pt->as<file> (), la, 0, // Hack: lflags unused.
impf, nullptr, optf);
}
}
@@ -363,11 +363,11 @@ namespace build2
// Append library prefixes based on the *.export.poptions variables
// recursively, prerequisite libraries first.
//
- void compile::
+ void compile_rule::
append_lib_prefixes (const scope& bs,
prefix_map& m,
+ action a,
target& t,
- action act,
linfo li) const
{
auto imp = [] (const file& l, bool la) {return la && l.is_a<libux> ();};
@@ -391,21 +391,21 @@ namespace build2
const function<bool (const file&, bool)> impf (imp);
const function<void (const file&, const string&, bool, bool)> optf (opt);
- for (prerequisite_member p: group_prerequisite_members (act, t))
+ for (prerequisite_member p: group_prerequisite_members (a, t))
{
if (const target* pt = p.load ())
{
if (const libx* l = pt->is_a<libx> ())
- pt = &link_member (*l, act, li);
+ pt = &link_member (*l, a, li);
- bool a;
- if (!((a = pt->is_a<liba> ()) ||
- (a = pt->is_a<libux> ()) ||
+ bool la;
+ if (!((la = pt->is_a<liba> ()) ||
+ (la = pt->is_a<libux> ()) ||
pt->is_a<libs> ()))
continue;
- process_libraries (act, bs, li, sys_lib_dirs,
- pt->as<file> (), a, 0, // Hack: lflags unused.
+ process_libraries (a, bs, li, sys_lib_dirs,
+ pt->as<file> (), la, 0, // Hack: lflags unused.
impf, nullptr, optf);
}
}
@@ -427,14 +427,14 @@ namespace build2
// file is known to be up to date. So we do the update "smartly".
//
static bool
- update (tracer& trace, action act, const target& t, timestamp ts)
+ update (tracer& trace, action a, const target& t, timestamp ts)
{
const path_target* pt (t.is_a<path_target> ());
if (pt == nullptr)
ts = timestamp_unknown;
- target_state os (t.matched_state (act));
+ target_state os (t.matched_state (a));
if (os == target_state::unchanged)
{
@@ -444,7 +444,7 @@ namespace build2
{
// We expect the timestamp to be known (i.e., existing file).
//
- timestamp mt (pt->mtime ()); // @@ MT perf: know target state.
+ timestamp mt (pt->mtime ());
assert (mt != timestamp_unknown);
return mt > ts;
}
@@ -460,7 +460,7 @@ namespace build2
// any generated header.
//
phase_switch ps (run_phase::execute);
- target_state ns (execute_direct (act, t));
+ target_state ns (execute_direct (a, t));
if (ns != os && ns != target_state::unchanged)
{
@@ -474,10 +474,10 @@ namespace build2
}
}
- recipe compile::
- apply (action act, target& xt) const
+ recipe compile_rule::
+ apply (action a, target& xt) const
{
- tracer trace (x, "compile::apply");
+ tracer trace (x, "compile_rule::apply");
file& t (xt.as<file> ()); // Either obj*{} or bmi*{}.
@@ -569,7 +569,7 @@ namespace build2
// (e.g., foo.mxx and foo.cxx) which means obj*{} targets could
// collide. So we add the module extension to the target name.
//
- target_lock obj (add_adhoc_member (act, t, tt.obj, e.c_str ()));
+ target_lock obj (add_adhoc_member (a, t, tt.obj, e.c_str ()));
obj.target->as<file> ().derive_path (o);
match_recipe (obj, group_recipe); // Set recipe and unlock.
}
@@ -579,7 +579,7 @@ namespace build2
// Inject dependency on the output directory.
//
- const fsdir* dir (inject_fsdir (act, t));
+ const fsdir* dir (inject_fsdir (a, t));
// Match all the existing prerequisites. The injection code takes care
// of the ones it is adding.
@@ -587,16 +587,16 @@ namespace build2
// When cleaning, ignore prerequisites that are not in the same or a
// subdirectory of our project root.
//
- auto& pts (t.prerequisite_targets);
+ auto& pts (t.prerequisite_targets[a]);
optional<dir_paths> usr_lib_dirs; // Extract lazily.
// Start asynchronous matching of prerequisites. Wait with unlocked
// phase to allow phase switching.
//
- wait_guard wg (target::count_busy (), t.task_count, true);
+ wait_guard wg (target::count_busy (), t[a].task_count, true);
size_t start (pts.size ()); // Index of the first to be added.
- for (prerequisite_member p: group_prerequisite_members (act, t))
+ for (prerequisite_member p: group_prerequisite_members (a, t))
{
const target* pt (nullptr);
@@ -609,7 +609,7 @@ namespace build2
p.is_a<libs> () ||
p.is_a<libux> ())
{
- if (act.operation () == update_id)
+ if (a.operation () == update_id)
{
// Handle (phase two) imported libraries. We know that for such
// libraries we don't need to do match() in order to get options
@@ -617,7 +617,7 @@ namespace build2
//
if (p.proj ())
{
- if (search_library (act,
+ if (search_library (a,
sys_lib_dirs,
usr_lib_dirs,
p.prerequisite) != nullptr)
@@ -627,7 +627,7 @@ namespace build2
pt = &p.search (t);
if (const libx* l = pt->is_a<libx> ())
- pt = &link_member (*l, act, li);
+ pt = &link_member (*l, a, li);
}
else
continue;
@@ -644,11 +644,11 @@ namespace build2
{
pt = &p.search (t);
- if (act.operation () == clean_id && !pt->dir.sub (rs.out_path ()))
+ if (a.operation () == clean_id && !pt->dir.sub (rs.out_path ()))
continue;
}
- match_async (act, *pt, target::count_busy (), t.task_count);
+ match_async (a, *pt, target::count_busy (), t[a].task_count);
pts.push_back (pt);
}
@@ -668,7 +668,7 @@ namespace build2
// an obj?{} target directory.
//
if (build2::match (
- act,
+ a,
*pt,
pt->is_a<liba> () || pt->is_a<libs> () || pt->is_a<libux> ()
? unmatch::safe
@@ -680,7 +680,7 @@ namespace build2
// since chances are we will have to update some of our prerequisites in
// the process (auto-generated source code).
//
- if (act == perform_update_id)
+ if (a == perform_update_id)
{
// The cached prerequisite target should be the same as what is in
// t.prerequisite_targets since we used standard search() and match()
@@ -722,7 +722,7 @@ namespace build2
// this can very well be happening in parallel. But that's not a
// problem since fsdir{}'s update is idempotent.
//
- fsdir_rule::perform_update_direct (act, t);
+ fsdir_rule::perform_update_direct (a, t);
}
// Note: the leading '@' is reserved for the module map prefix (see
@@ -764,7 +764,7 @@ namespace build2
// Hash *.export.poptions from prerequisite libraries.
//
- hash_lib_options (bs, cs, t, act, li);
+ hash_lib_options (bs, cs, a, t, li);
// Extra system header dirs (last).
//
@@ -821,14 +821,14 @@ namespace build2
if (pt == nullptr || pt == dir)
continue;
- u = update (trace, act, *pt, u ? timestamp_unknown : mt) || u;
+ u = update (trace, a, *pt, u ? timestamp_unknown : mt) || u;
}
// Check if the source is already preprocessed to a certain degree.
// This determines which of the following steps we perform and on
// what source (original or preprocessed).
//
- // Note: must be set of the src target.
+ // Note: must be set on the src target.
//
if (const string* v = cast_null<string> (src[x_preprocessed]))
try
@@ -846,7 +846,7 @@ namespace build2
//
pair<auto_rmfile, bool> psrc (auto_rmfile (), false);
if (md.pp < preprocessed::includes)
- psrc = extract_headers (act, bs, t, li, src, md, dd, u, mt);
+ psrc = extract_headers (a, bs, t, li, src, md, dd, u, mt);
// Next we "obtain" the translation unit information. What exactly
// "obtain" entails is tricky: If things changed, then we re-parse the
@@ -869,7 +869,7 @@ namespace build2
{
if (u)
{
- auto p (parse_unit (act, t, li, src, psrc.first, md));
+ auto p (parse_unit (a, t, li, src, psrc.first, md));
if (cs != p.second)
{
@@ -948,7 +948,7 @@ namespace build2
// NOTE: assumes that no further targets will be added into
// t.prerequisite_targets!
//
- extract_modules (act, bs, t, li, tt, src, md, move (tu.mod), dd, u);
+ extract_modules (a, bs, t, li, tt, src, md, move (tu.mod), dd, u);
}
// If anything got updated, then we didn't rely on the cache. However,
@@ -1002,7 +1002,7 @@ namespace build2
md.mt = u ? timestamp_nonexistent : dd.mtime ();
}
- switch (act)
+ switch (a)
{
case perform_update_id: return [this] (action a, const target& t)
{
@@ -1018,7 +1018,7 @@ namespace build2
// Reverse-lookup target type from extension.
//
- const target_type* compile::
+ const target_type* compile_rule::
map_extension (const scope& s, const string& n, const string& e) const
{
// We will just have to try all of the possible ones, in the "most
@@ -1047,10 +1047,10 @@ namespace build2
return nullptr;
}
- void compile::
+ void compile_rule::
append_prefixes (prefix_map& m, const target& t, const variable& var) const
{
- tracer trace (x, "compile::append_prefixes");
+ tracer trace (x, "compile_rule::append_prefixes");
// If this target does not belong to any project (e.g, an "imported as
// installed" library), then it can't possibly generate any headers for
@@ -1187,10 +1187,10 @@ namespace build2
}
}
- auto compile::
+ auto compile_rule::
build_prefix_map (const scope& bs,
+ action a,
target& t,
- action act,
linfo li) const -> prefix_map
{
prefix_map m;
@@ -1202,7 +1202,7 @@ namespace build2
// Then process the include directories from prerequisite libraries.
//
- append_lib_prefixes (bs, m, t, act, li);
+ append_lib_prefixes (bs, m, a, t, li);
return m;
}
@@ -1405,8 +1405,8 @@ namespace build2
// file as well as an indication if it is usable for compilation (see
// below for details).
//
- pair<auto_rmfile, bool> compile::
- extract_headers (action act,
+ pair<auto_rmfile, bool> compile_rule::
+ extract_headers (action a,
const scope& bs,
file& t,
linfo li,
@@ -1416,7 +1416,7 @@ namespace build2
bool& updating,
timestamp mt) const
{
- tracer trace (x, "compile::extract_headers");
+ tracer trace (x, "compile_rule::extract_headers");
l5 ([&]{trace << "target: " << t;});
@@ -1628,7 +1628,7 @@ namespace build2
// Return NULL if the dependency information goes to stdout and a
// pointer to the temporary file path otherwise.
//
- auto init_args = [&t, act, li,
+ auto init_args = [&t, a, li,
&src, &md, &psrc, &sense_diag,
&rs, &bs,
pp, &env, &args, &args_gen, &args_i, &out, &drm,
@@ -1677,7 +1677,7 @@ namespace build2
// Add *.export.poptions from prerequisite libraries.
//
- append_lib_options (bs, args, t, act, li);
+ append_lib_options (bs, args, a, t, li);
append_options (args, t, c_poptions);
append_options (args, t, x_poptions);
@@ -2055,7 +2055,7 @@ namespace build2
// extraction process should be restarted.
//
auto add = [&trace, &pfx_map, &so_map,
- act, &t, li,
+ a, &t, li,
&dd, &updating, &skip_count,
&bs, this]
(path f, bool cache, timestamp mt) -> bool
@@ -2185,7 +2185,7 @@ namespace build2
l4 ([&]{trace << "non-existent header '" << f << "'";});
if (!pfx_map)
- pfx_map = build_prefix_map (bs, t, act, li);
+ pfx_map = build_prefix_map (bs, a, t, li);
// First try the whole file. Then just the directory.
//
@@ -2300,8 +2300,8 @@ namespace build2
// will lead to the match failure which we translate to a restart.
//
if (!cache)
- build2::match (act, *pt);
- else if (!build2::try_match (act, *pt).first)
+ build2::match (a, *pt);
+ else if (!build2::try_match (a, *pt).first)
{
dd.write (); // Invalidate this line.
updating = true;
@@ -2310,7 +2310,7 @@ namespace build2
// Update.
//
- bool restart (update (trace, act, *pt, mt));
+ bool restart (update (trace, a, *pt, mt));
// Verify/add it to the dependency database. We do it after update in
// order not to add bogus files (non-existent and without a way to
@@ -2321,7 +2321,7 @@ namespace build2
// Add to our prerequisite target list.
//
- t.prerequisite_targets.push_back (pt);
+ t.prerequisite_targets[a].push_back (pt);
skip_count++;
updating = updating || restart;
@@ -2796,15 +2796,15 @@ namespace build2
return make_pair (move (psrc), puse);
}
- pair<translation_unit, string> compile::
- parse_unit (action act,
+ pair<translation_unit, string> compile_rule::
+ parse_unit (action a,
file& t,
linfo lo,
const file& src,
auto_rmfile& psrc,
const match_data& md) const
{
- tracer trace (x, "compile::parse_unit");
+ tracer trace (x, "compile_rule::parse_unit");
// If things go wrong give the user a bit extra context.
//
@@ -2844,7 +2844,7 @@ namespace build2
//
args.push_back (cpath.recall_string ());
- append_lib_options (t.base_scope (), args, t, act, lo);
+ append_lib_options (t.base_scope (), args, a, t, lo);
append_options (args, t, c_poptions);
append_options (args, t, x_poptions);
@@ -3071,8 +3071,8 @@ namespace build2
// Extract and inject module dependencies.
//
- void compile::
- extract_modules (action act,
+ void compile_rule::
+ extract_modules (action a,
const scope& bs,
file& t,
linfo li,
@@ -3083,7 +3083,7 @@ namespace build2
depdb& dd,
bool& updating) const
{
- tracer trace (x, "compile::extract_modules");
+ tracer trace (x, "compile_rule::extract_modules");
l5 ([&]{trace << "target: " << t;});
// If things go wrong, give the user a bit extra context.
@@ -3131,7 +3131,7 @@ namespace build2
sha256 cs;
if (!mi.imports.empty ())
- md.mods = search_modules (act, bs, t, li, tt.bmi, src, mi.imports, cs);
+ md.mods = search_modules (a, bs, t, li, tt.bmi, src, mi.imports, cs);
if (dd.expect (cs.string ()) != nullptr)
updating = true;
@@ -3201,8 +3201,8 @@ namespace build2
// Resolve imported modules to bmi*{} targets.
//
- module_positions compile::
- search_modules (action act,
+ module_positions compile_rule::
+ search_modules (action a,
const scope& bs,
file& t,
linfo li,
@@ -3211,7 +3211,7 @@ namespace build2
module_imports& imports,
sha256& cs) const
{
- tracer trace (x, "compile::search_modules");
+ tracer trace (x, "compile_rule::search_modules");
// So we have a list of imports and a list of "potential" module
// prerequisites. They are potential in the sense that they may or may
@@ -3317,7 +3317,7 @@ namespace build2
return m.size () - mi;
};
- auto& pts (t.prerequisite_targets);
+ auto& pts (t.prerequisite_targets[a]);
size_t start (pts.size ()); // Index of the first to be added.
// We have two parallel vectors: module names/scores in imports and
@@ -3476,7 +3476,7 @@ namespace build2
return r;
};
- for (prerequisite_member p: group_prerequisite_members (act, t))
+ for (prerequisite_member p: group_prerequisite_members (a, t))
{
const target* pt (p.load ()); // Should be cached for libraries.
@@ -3485,7 +3485,7 @@ namespace build2
const target* lt (nullptr);
if (const libx* l = pt->is_a<libx> ())
- lt = &link_member (*l, act, li);
+ lt = &link_member (*l, a, li);
else if (pt->is_a<liba> () || pt->is_a<libs> () || pt->is_a<libux> ())
lt = pt;
@@ -3493,7 +3493,7 @@ namespace build2
//
if (lt != nullptr)
{
- for (const target* bt: lt->prerequisite_targets)
+ for (const target* bt: lt->prerequisite_targets[a])
{
if (bt == nullptr)
continue;
@@ -3528,7 +3528,7 @@ namespace build2
continue;
if (const target** p = check_exact (*n))
- *p = &make_module_sidebuild (act, bs, *lt, *bt, *n);
+ *p = &make_module_sidebuild (a, bs, *lt, *bt, *n);
}
else
continue;
@@ -3563,7 +3563,7 @@ namespace build2
// Find the mxx{} prerequisite and extract its "file name" for the
// fuzzy match unless the user specified the module name explicitly.
//
- for (prerequisite_member p: group_prerequisite_members (act, *pt))
+ for (prerequisite_member p: group_prerequisite_members (a, *pt))
{
if (p.is_a (*x_mod))
{
@@ -3642,7 +3642,7 @@ namespace build2
// Match in parallel and wait for completion.
//
- match_members (act, t, pts, start);
+ match_members (a, t, pts, start);
// Post-process the list of our (direct) imports. While at it, calculate
// the checksum of all (direct and indirect) bmi{} paths.
@@ -3675,7 +3675,7 @@ namespace build2
if (in != mn)
{
- for (prerequisite_member p: group_prerequisite_members (act, *bt))
+ for (prerequisite_member p: group_prerequisite_members (a, *bt))
{
if (p.is_a (*x_mod)) // Got to be there.
{
@@ -3702,9 +3702,10 @@ namespace build2
// Hard to say whether we should reserve or not. We will probably
// get quite a bit of duplications.
//
- for (size_t m (bt->prerequisite_targets.size ()); j != m; ++j)
+ auto& bpts (bt->prerequisite_targets[a]);
+ for (size_t m (bpts.size ()); j != m; ++j)
{
- const target* et (bt->prerequisite_targets[j]);
+ const target* et (bpts[j]);
if (et == nullptr)
continue; // Unresolved (std.*).
@@ -3745,14 +3746,14 @@ namespace build2
// Synthesize a dependency for building a module binary interface on
// the side.
//
- const target& compile::
- make_module_sidebuild (action act,
+ const target& compile_rule::
+ make_module_sidebuild (action a,
const scope& bs,
const target& lt,
const target& mt,
const string& mn) const
{
- tracer trace (x, "compile::make_module_sidebuild");
+ tracer trace (x, "compile_rule::make_module_sidebuild");
// First figure out where we are going to build. We want to avoid
// multiple sidebuilds so the outermost scope that has loaded the
@@ -3891,7 +3892,7 @@ namespace build2
// synthesizing dependencies for bmi{}'s.
//
ps.push_back (prerequisite (lt));
- for (prerequisite_member p: group_prerequisite_members (act, lt))
+ for (prerequisite_member p: group_prerequisite_members (a, lt))
{
// @@ TODO: will probably need revision if using sidebuild for
// non-installed libraries (e.g., direct BMI dependencies
@@ -3927,10 +3928,11 @@ namespace build2
void
msvc_filter_cl (ifdstream&, const path& src);
- void compile::
+ void compile_rule::
append_modules (environment& env,
cstrings& args,
strings& stor,
+ action a,
const file& t,
const match_data& md) const
{
@@ -3939,6 +3941,8 @@ namespace build2
dir_path stdifc; // See the VC case below.
+ auto& pts (t.prerequisite_targets[a]);
+
#if 0
switch (cid)
{
@@ -3959,7 +3963,7 @@ namespace build2
//
if (md.type == translation_type::module_impl)
{
- const file& f (t.prerequisite_targets[ms.start]->as<file> ());
+ const file& f (pts[ms.start]->as<file> ());
string s (relative (f.path ()).string ());
s.insert (0, "-fmodule-file=");
stor.push_back (move (s));
@@ -3974,11 +3978,11 @@ namespace build2
}
case compiler_id::msvc:
{
- for (size_t i (ms.start), n (t.prerequisite_targets.size ());
+ for (size_t i (ms.start), n (pts.size ());
i != n;
++i)
{
- const target* pt (t.prerequisite_targets[i]);
+ const target* pt (pts[i]);
if (pt == nullptr)
continue;
@@ -4021,7 +4025,7 @@ namespace build2
assert (false);
}
#else
- size_t n (t.prerequisite_targets.size ());
+ size_t n (pts.size ());
// Clang embeds module file references so we only need to specify
// our direct imports.
@@ -4040,7 +4044,7 @@ namespace build2
for (size_t i (ms.start); i != n; ++i)
{
- const target* pt (t.prerequisite_targets[i]);
+ const target* pt (pts[i]);
if (pt == nullptr)
continue;
@@ -4130,8 +4134,8 @@ namespace build2
env.push_back ("IFCPATH");
}
- target_state compile::
- perform_update (action act, const target& xt) const
+ target_state compile_rule::
+ perform_update (action a, const target& xt) const
{
const file& t (xt.as<file> ());
const path& tp (t.path ());
@@ -4146,7 +4150,7 @@ namespace build2
auto pr (
execute_prerequisites<file> (
(mod ? *x_mod : x_src),
- act, t,
+ a, t,
md.mt,
[s = md.mods.start] (const target&, size_t i)
{
@@ -4203,7 +4207,7 @@ namespace build2
// Add *.export.poptions from prerequisite libraries.
//
- append_lib_options (bs, args, t, act, li);
+ append_lib_options (bs, args, a, t, li);
// Extra system header dirs (last).
//
@@ -4270,7 +4274,7 @@ namespace build2
args.push_back ("/MD");
if (md.mods.start != 0)
- append_modules (env, args, mods, t, md);
+ append_modules (env, args, mods, a, t, md);
// The presence of /Zi or /ZI causes the compiler to write debug info
// to the .pdb file. By default it is a shared file called vcNN.pdb
@@ -4335,7 +4339,7 @@ namespace build2
}
if (md.mods.start != 0)
- append_modules (env, args, mods, t, md);
+ append_modules (env, args, mods, a, t, md);
// Note: the order of the following options is relied upon below.
//
@@ -4604,7 +4608,7 @@ namespace build2
return target_state::changed;
}
- target_state compile::
+ target_state compile_rule::
perform_clean (action a, const target& xt) const
{
const file& t (xt.as<file> ());
diff --git a/build2/cc/compile.hxx b/build2/cc/compile-rule.hxx
index 2878e3d..6bf63bf 100644
--- a/build2/cc/compile.hxx
+++ b/build2/cc/compile-rule.hxx
@@ -1,9 +1,9 @@
-// file : build2/cc/compile.hxx -*- C++ -*-
+// file : build2/cc/compile-rule.hxx -*- C++ -*-
// copyright : Copyright (c) 2014-2017 Code Synthesis Ltd
// license : MIT; see accompanying LICENSE file
-#ifndef BUILD2_CC_COMPILE_HXX
-#define BUILD2_CC_COMPILE_HXX
+#ifndef BUILD2_CC_COMPILE_RULE_HXX
+#define BUILD2_CC_COMPILE_RULE_HXX
#include <libbutl/path-map.mxx>
@@ -37,12 +37,12 @@ namespace build2
size_t copied; // First copied-over bmi*{}, 0 if none.
};
- class compile: public rule, virtual common
+ class compile_rule: public rule, virtual common
{
public:
- compile (data&&);
+ compile_rule (data&&);
- virtual match_result
+ virtual bool
match (action, target&, const string&) const override;
virtual recipe
@@ -61,14 +61,16 @@ namespace build2
void
append_lib_options (const scope&,
cstrings&,
+ action,
const target&,
- action, linfo) const;
+ linfo) const;
void
hash_lib_options (const scope&,
sha256&,
+ action,
const target&,
- action, linfo) const;
+ linfo) const;
// Mapping of include prefixes (e.g., foo in <foo/bar>) for auto-
// generated headers to directories where they will be generated.
@@ -97,11 +99,12 @@ namespace build2
void
append_lib_prefixes (const scope&,
prefix_map&,
+ action,
target&,
- action, linfo) const;
+ linfo) const;
prefix_map
- build_prefix_map (const scope&, target&, action, linfo) const;
+ build_prefix_map (const scope&, action, target&, linfo) const;
// Reverse-lookup target type from extension.
//
@@ -134,7 +137,7 @@ namespace build2
void
append_modules (environment&, cstrings&, strings&,
- const file&, const match_data&) const;
+ action, const file&, const match_data&) const;
// Language selection option (for VC) or the value for the -x option.
//
@@ -150,4 +153,4 @@ namespace build2
}
}
-#endif // BUILD2_CC_COMPILE_HXX
+#endif // BUILD2_CC_COMPILE_RULE_HXX
diff --git a/build2/cc/install.cxx b/build2/cc/install-rule.cxx
index fcaf626..4e232ff 100644
--- a/build2/cc/install.cxx
+++ b/build2/cc/install-rule.cxx
@@ -1,15 +1,15 @@
-// file : build2/cc/install.cxx -*- C++ -*-
+// file : build2/cc/install-rule.cxx -*- C++ -*-
// copyright : Copyright (c) 2014-2017 Code Synthesis Ltd
// license : MIT; see accompanying LICENSE file
-#include <build2/cc/install.hxx>
+#include <build2/cc/install-rule.hxx>
#include <build2/algorithm.hxx>
#include <build2/bin/target.hxx>
-#include <build2/cc/link.hxx> // match()
#include <build2/cc/utility.hxx>
+#include <build2/cc/link-rule.hxx> // match()
using namespace std;
@@ -19,16 +19,16 @@ namespace build2
{
using namespace bin;
- // file_install
+ // install_rule
//
- file_install::
- file_install (data&& d, const link& l): common (move (d)), link_ (l) {}
+ install_rule::
+ install_rule (data&& d, const link_rule& l)
+ : common (move (d)), link_ (l) {}
- const target* file_install::
+ const target* install_rule::
filter (action a, const target& t, prerequisite_member p) const
{
- // NOTE: see also alias_install::filter() below if changing anything
- // here.
+ // NOTE: see libux_install_rule::filter() if changing anything here.
otype ot (link_type (t).type);
@@ -72,7 +72,7 @@ namespace build2
const target* pt (&p.search (t));
// If this is the lib{}/libu{} group, pick a member which we would
- // link. For libu{} we want to the "see through" logic.
+ // link. For libu{} we want the "see through" logic.
//
if (const libx* l = pt->is_a<libx> ())
pt = &link_member (*l, a, link_info (t.base_scope (), ot));
@@ -90,7 +90,7 @@ namespace build2
return file_rule::filter (a, t, p);
}
- match_result file_install::
+ bool install_rule::
match (action a, target& t, const string& hint) const
{
// @@ How do we split the hint between the two?
@@ -99,20 +99,38 @@ namespace build2
// We only want to handle installation if we are also the ones building
// this target. So first run link's match().
//
- match_result r (link_.match (a, t, hint));
- return r ? file_rule::match (a, t, "") : r;
+ return link_.match (a, t, hint) && file_rule::match (a, t, "");
}
- recipe file_install::
+ recipe install_rule::
apply (action a, target& t) const
{
recipe r (file_rule::apply (a, t));
- // Derive shared library paths and cache them in the target's aux
- // storage if we are (un)installing (used in *_extra() functions below).
- //
- if (a.operation () == install_id || a.operation () == uninstall_id)
+ if (a.operation () == update_id)
+ {
+ // Signal to the link rule that this is update for install. And if the
+ // update has already been executed, verify it was done for install.
+ //
+ auto& md (t.data<link_rule::match_data> ());
+
+ if (md.for_install)
+ {
+ if (!*md.for_install)
+ fail << "target " << t << " already updated but not for install";
+ }
+ else
+ md.for_install = true;
+ }
+ else // install or uninstall
{
+ // Derive shared library paths and cache them in the target's aux
+ // storage if we are un/installing (used in *_extra() functions
+ // below).
+ //
+ static_assert (sizeof (link_rule::libs_paths) <= target::data_size,
+ "insufficient space");
+
file* f;
if ((f = t.is_a<libs> ()) != nullptr && tclass != "windows")
{
@@ -128,34 +146,39 @@ namespace build2
return r;
}
- void file_install::
+ bool install_rule::
install_extra (const file& t, const install_dir& id) const
{
+ bool r (false);
+
if (t.is_a<libs> () && tclass != "windows")
{
// Here we may have a bunch of symlinks that we need to install.
//
const scope& rs (t.root_scope ());
- auto& lp (t.data<link::libs_paths> ());
+ auto& lp (t.data<link_rule::libs_paths> ());
auto ln = [&rs, &id] (const path& f, const path& l)
{
install_l (rs, id, f.leaf (), l.leaf (), false);
+ return true;
};
const path& lk (lp.link);
const path& so (lp.soname);
const path& in (lp.interm);
- const path* f (&lp.real);
+ const path* f (lp.real);
- if (!in.empty ()) {ln (*f, in); f = &in;}
- if (!so.empty ()) {ln (*f, so); f = &so;}
- if (!lk.empty ()) {ln (*f, lk);}
+ if (!in.empty ()) {r = ln (*f, in) || r; f = &in;}
+ if (!so.empty ()) {r = ln (*f, so) || r; f = &so;}
+ if (!lk.empty ()) {r = ln (*f, lk) || r; }
}
+
+ return r;
}
- bool file_install::
+ bool install_rule::
uninstall_extra (const file& t, const install_dir& id) const
{
bool r (false);
@@ -165,7 +188,7 @@ namespace build2
// Here we may have a bunch of symlinks that we need to uninstall.
//
const scope& rs (t.root_scope ());
- auto& lp (t.data<link::libs_paths> ());
+ auto& lp (t.data<link_rule::libs_paths> ());
auto rm = [&rs, &id] (const path& l)
{
@@ -184,15 +207,16 @@ namespace build2
return r;
}
- // alias_install
+ // libux_install_rule
//
- alias_install::
- alias_install (data&& d, const link& l): common (move (d)), link_ (l) {}
+ libux_install_rule::
+ libux_install_rule (data&& d, const link_rule& l)
+ : common (move (d)), link_ (l) {}
- const target* alias_install::
+ const target* libux_install_rule::
filter (action a, const target& t, prerequisite_member p) const
{
- // The "see through" semantics that should be parallel to file_install
+ // The "see through" semantics that should be parallel to install_rule
// above. In particular, here we use libue/libua/libus{} as proxies for
// exe/liba/libs{} there.
@@ -233,14 +257,13 @@ namespace build2
return alias_rule::filter (a, t, p);
}
- match_result alias_install::
+ bool libux_install_rule::
match (action a, target& t, const string& hint) const
{
// We only want to handle installation if we are also the ones building
// this target. So first run link's match().
//
- match_result r (link_.match (a, t, hint));
- return r ? alias_rule::match (a, t, "") : r;
+ return link_.match (a, t, hint) && alias_rule::match (a, t, "");
}
}
}
diff --git a/build2/cc/install-rule.hxx b/build2/cc/install-rule.hxx
new file mode 100644
index 0000000..ac2f93a
--- /dev/null
+++ b/build2/cc/install-rule.hxx
@@ -0,0 +1,77 @@
+// file : build2/cc/install-rule.hxx -*- C++ -*-
+// copyright : Copyright (c) 2014-2017 Code Synthesis Ltd
+// license : MIT; see accompanying LICENSE file
+
+#ifndef BUILD2_CC_INSTALL_RULE_HXX
+#define BUILD2_CC_INSTALL_RULE_HXX
+
+#include <build2/types.hxx>
+#include <build2/utility.hxx>
+
+#include <build2/install/rule.hxx>
+
+#include <build2/cc/types.hxx>
+#include <build2/cc/common.hxx>
+
+namespace build2
+{
+ namespace cc
+ {
+ class link_rule;
+
+ // Installation rule for exe{} and lib*{}. Here we do:
+ //
+ // 1. Signal to the link rule that this is update for install.
+ //
+ // 2. Additional filtering of prerequisites (e.g., headers of an exe{}).
+ //
+ // 3. Extra un/installation (e.g., libs{} symlinks).
+ //
+ class install_rule: public install::file_rule, virtual common
+ {
+ public:
+ install_rule (data&&, const link_rule&);
+
+ virtual const target*
+ filter (action, const target&, prerequisite_member) const override;
+
+ virtual bool
+ match (action, target&, const string&) const override;
+
+ virtual recipe
+ apply (action, target&) const override;
+
+ virtual bool
+ install_extra (const file&, const install_dir&) const override;
+
+ virtual bool
+ uninstall_extra (const file&, const install_dir&) const override;
+
+ private:
+ const link_rule& link_;
+ };
+
+ // Installation rule for libu*{}.
+ //
+ // While libu*{} themselves are not installable, we need to see through
+ // them in case they depend on stuff that we need to install (e.g.,
+ // headers). Note that we use the alias_rule as a base.
+ //
+ class libux_install_rule: public install::alias_rule, virtual common
+ {
+ public:
+ libux_install_rule (data&&, const link_rule&);
+
+ virtual const target*
+ filter (action, const target&, prerequisite_member) const override;
+
+ virtual bool
+ match (action, target&, const string&) const override;
+
+ private:
+ const link_rule& link_;
+ };
+ }
+}
+
+#endif // BUILD2_CC_INSTALL_RULE_HXX
diff --git a/build2/cc/install.hxx b/build2/cc/install.hxx
deleted file mode 100644
index 28a0a94..0000000
--- a/build2/cc/install.hxx
+++ /dev/null
@@ -1,67 +0,0 @@
-// file : build2/cc/install.hxx -*- C++ -*-
-// copyright : Copyright (c) 2014-2017 Code Synthesis Ltd
-// license : MIT; see accompanying LICENSE file
-
-#ifndef BUILD2_CC_INSTALL_HXX
-#define BUILD2_CC_INSTALL_HXX
-
-#include <build2/types.hxx>
-#include <build2/utility.hxx>
-
-#include <build2/install/rule.hxx>
-
-#include <build2/cc/types.hxx>
-#include <build2/cc/common.hxx>
-
-namespace build2
-{
- namespace cc
- {
- class link;
-
- // Installation rule for exe{}, lib*{}, etc.
- //
- class file_install: public install::file_rule, virtual common
- {
- public:
- file_install (data&&, const link&);
-
- virtual const target*
- filter (action, const target&, prerequisite_member) const override;
-
- virtual match_result
- match (action, target&, const string&) const override;
-
- virtual recipe
- apply (action, target&) const override;
-
- virtual void
- install_extra (const file&, const install_dir&) const override;
-
- virtual bool
- uninstall_extra (const file&, const install_dir&) const override;
-
- private:
- const link& link_;
- };
-
- // Installation rule for libux{}.
- //
- class alias_install: public install::alias_rule, virtual common
- {
- public:
- alias_install (data&&, const link&);
-
- virtual const target*
- filter (action, const target&, prerequisite_member) const override;
-
- virtual match_result
- match (action, target&, const string&) const override;
-
- private:
- const link& link_;
- };
- }
-}
-
-#endif // BUILD2_CC_INSTALL_HXX
diff --git a/build2/cc/link.cxx b/build2/cc/link-rule.cxx
index f69d549..d06a835 100644
--- a/build2/cc/link.cxx
+++ b/build2/cc/link-rule.cxx
@@ -1,8 +1,8 @@
-// file : build2/cc/link.cxx -*- C++ -*-
+// file : build2/cc/link-rule.cxx -*- C++ -*-
// copyright : Copyright (c) 2014-2017 Code Synthesis Ltd
// license : MIT; see accompanying LICENSE file
-#include <build2/cc/link.hxx>
+#include <build2/cc/link-rule.hxx>
#include <map>
#include <cstdlib> // exit()
@@ -32,29 +32,21 @@ namespace build2
{
using namespace bin;
- link::
- link (data&& d)
+ link_rule::
+ link_rule (data&& d)
: common (move (d)),
rule_id (string (x) += ".link 1")
{
+ static_assert (sizeof (match_data) <= target::data_size,
+ "insufficient space");
}
- match_result link::
- match (action act, target& t, const string& hint) const
+ bool link_rule::
+ match (action a, target& t, const string& hint) const
{
- tracer trace (x, "link::match");
+ tracer trace (x, "link_rule::match");
- // @@ TODO:
- //
- // - if path already assigned, verify extension?
- //
- // @@ Q:
- //
- // - if there is no .o, are we going to check if the one derived
- // from target exist or can be built? A: No.
- // What if there is a library. Probably ok if static, not if shared,
- // (i.e., a utility library).
- //
+ // NOTE: may be called multiple times (see install rules).
ltype lt (link_type (t));
otype ot (lt.type);
@@ -77,7 +69,7 @@ namespace build2
//
bool seen_x (false), seen_c (false), seen_obj (false), seen_lib (false);
- for (prerequisite_member p: group_prerequisite_members (act, t))
+ for (prerequisite_member p: group_prerequisite_members (a, t))
{
if (p.is_a (x_src) || (x_mod != nullptr && p.is_a (*x_mod)))
{
@@ -141,7 +133,7 @@ namespace build2
return true;
}
- auto link::
+ auto link_rule::
derive_libs_paths (file& ls, const char* pfx, const char* sfx) const
-> libs_paths
{
@@ -286,19 +278,21 @@ namespace build2
const path& re (ls.derive_path (move (b)));
- return libs_paths {move (lk), move (so), move (in), re, move (cp)};
+ return libs_paths {move (lk), move (so), move (in), &re, move (cp)};
}
- recipe link::
- apply (action act, target& xt) const
+ recipe link_rule::
+ apply (action a, target& xt) const
{
- static_assert (sizeof (link::libs_paths) <= target::data_size,
- "insufficient space");
-
- tracer trace (x, "link::apply");
+ tracer trace (x, "link_rule::apply");
file& t (xt.as<file> ());
+ // Note that for_install is signalled by install_rule and therefore
+ // can only be relied upon during execute.
+ //
+ match_data& md (t.data (match_data ()));
+
const scope& bs (t.base_scope ());
const scope& rs (*bs.root_scope ());
@@ -417,9 +411,9 @@ namespace build2
// the DLL and we add libi{} import library as its member.
//
if (tclass == "windows")
- libi = add_adhoc_member<bin::libi> (act, t);
+ libi = add_adhoc_member<bin::libi> (a, t);
- t.data (derive_libs_paths (t, p, s)); // Cache in target.
+ md.libs_data = derive_libs_paths (t, p, s);
if (libi)
match_recipe (libi, group_recipe); // Set recipe and unlock.
@@ -439,7 +433,7 @@ namespace build2
// Note: add after the import library if any.
//
target_lock pdb (
- add_adhoc_member (act, t, *bs.find_target_type ("pdb")));
+ add_adhoc_member (a, t, *bs.find_target_type ("pdb")));
// We call it foo.{exe,dll}.pdb rather than just foo.pdb because
// we can have both foo.exe and foo.dll in the same directory.
@@ -453,16 +447,16 @@ namespace build2
//
// Note that we do it here regardless of whether we are installing
// or not for two reasons. Firstly, it is not easy to detect this
- // situation in apply() since the action may (and is) overridden to
- // unconditional install. Secondly, always having the member takes
- // care of cleanup automagically. The actual generation happens in
- // the install rule.
+ // situation in apply() since the for_install hasn't yet been
+ // communicated by install_rule. Secondly, always having the member
+ // takes care of cleanup automagically. The actual generation
+ // happens in perform_update() below.
//
if (ot != otype::e)
{
target_lock pc (
add_adhoc_member (
- act, t,
+ a, t,
ot == otype::a ? pca::static_type : pcs::static_type));
// Note that here we always use the lib name prefix, even on
@@ -482,7 +476,7 @@ namespace build2
// Inject dependency on the output directory.
//
- inject_fsdir (act, t);
+ inject_fsdir (a, t);
// Process prerequisites, pass 1: search and match prerequisite
// libraries, search obj/bmi{} targets, and search targets we do rule
@@ -507,23 +501,24 @@ namespace build2
optional<dir_paths> usr_lib_dirs; // Extract lazily.
compile_target_types tt (compile_types (ot));
- auto skip = [&act, &rs] (const target*& pt)
+ auto skip = [&a, &rs] (const target*& pt)
{
- if (act.operation () == clean_id && !pt->dir.sub (rs.out_path ()))
+ if (a.operation () == clean_id && !pt->dir.sub (rs.out_path ()))
pt = nullptr;
return pt == nullptr;
};
- size_t start (t.prerequisite_targets.size ());
+ auto& pts (t.prerequisite_targets[a]);
+ size_t start (pts.size ());
- for (prerequisite_member p: group_prerequisite_members (act, t))
+ for (prerequisite_member p: group_prerequisite_members (a, t))
{
// We pre-allocate a NULL slot for each (potential; see clean)
// prerequisite target.
//
- t.prerequisite_targets.push_back (nullptr);
- const target*& pt (t.prerequisite_targets.back ());
+ pts.push_back (nullptr);
+ const target*& pt (pts.back ());
uint8_t m (0); // Mark: lib (0), src (1), mod (2), obj/bmi (3).
@@ -603,7 +598,7 @@ namespace build2
//
if (p.proj ())
pt = search_library (
- act, sys_lib_dirs, usr_lib_dirs, p.prerequisite);
+ a, sys_lib_dirs, usr_lib_dirs, p.prerequisite);
// The rest is the same basic logic as in search_and_match().
//
@@ -617,7 +612,7 @@ namespace build2
// member.
//
if (const libx* l = pt->is_a<libx> ())
- pt = &link_member (*l, act, li);
+ pt = &link_member (*l, a, li);
}
else
{
@@ -639,7 +634,7 @@ namespace build2
// Match lib{} (the only unmarked) in parallel and wait for completion.
//
- match_members (act, t, t.prerequisite_targets, start);
+ match_members (a, t, pts, start);
// Process prerequisites, pass 2: finish rule chaining but don't start
// matching anything yet since that may trigger recursive matching of
@@ -648,11 +643,11 @@ namespace build2
// Parallel prerequisite_targets loop.
//
- size_t i (start), n (t.prerequisite_targets.size ());
- for (prerequisite_member p: group_prerequisite_members (act, t))
+ size_t i (start), n (pts.size ());
+ for (prerequisite_member p: group_prerequisite_members (a, t))
{
- const target*& pt (t.prerequisite_targets[i].target);
- uintptr_t& pd (t.prerequisite_targets[i++].data);
+ const target*& pt (pts[i].target);
+ uintptr_t& pd (pts[i++].data);
if (pt == nullptr)
continue;
@@ -710,9 +705,9 @@ namespace build2
// Note: have similar logic in make_module_sidebuild().
//
size_t j (start);
- for (prerequisite_member p: group_prerequisite_members (act, t))
+ for (prerequisite_member p: group_prerequisite_members (a, t))
{
- const target* pt (t.prerequisite_targets[j++]);
+ const target* pt (pts[j++]);
if (p.is_a<libx> () ||
p.is_a<liba> () || p.is_a<libs> () || p.is_a<libux> () ||
@@ -760,7 +755,7 @@ namespace build2
: (group ? obj::static_type : tt.obj));
bool src (false);
- for (prerequisite_member p1: group_prerequisite_members (act, *pt))
+ for (prerequisite_member p1: group_prerequisite_members (a, *pt))
{
// Most of the time we will have just a single source so fast-
// path that case.
@@ -843,18 +838,18 @@ namespace build2
// Wait with unlocked phase to allow phase switching.
//
- wait_guard wg (target::count_busy (), t.task_count, true);
+ wait_guard wg (target::count_busy (), t[a].task_count, true);
for (i = start; i != n; ++i)
{
- const target*& pt (t.prerequisite_targets[i]);
+ const target*& pt (pts[i]);
if (pt == nullptr)
continue;
if (uint8_t m = unmark (pt))
{
- match_async (act, *pt, target::count_busy (), t.task_count);
+ match_async (a, *pt, target::count_busy (), t[a].task_count);
mark (pt, m);
}
}
@@ -865,9 +860,9 @@ namespace build2
// that we may have bailed out early (thus the parallel i/n for-loop).
//
i = start;
- for (prerequisite_member p: group_prerequisite_members (act, t))
+ for (prerequisite_member p: group_prerequisite_members (a, t))
{
- const target*& pt (t.prerequisite_targets[i++]);
+ const target*& pt (pts[i++]);
// Skipped or not marked for completion.
//
@@ -875,7 +870,7 @@ namespace build2
if (pt == nullptr || (m = unmark (pt)) == 0)
continue;
- build2::match (act, *pt);
+ build2::match (a, *pt);
// Nothing else to do if not marked for verification.
//
@@ -887,7 +882,7 @@ namespace build2
//
bool mod (x_mod != nullptr && p.is_a (*x_mod));
- for (prerequisite_member p1: group_prerequisite_members (act, *pt))
+ for (prerequisite_member p1: group_prerequisite_members (a, *pt))
{
if (p1.is_a (mod ? *x_mod : x_src) || p1.is_a<c> ())
{
@@ -915,7 +910,7 @@ namespace build2
}
}
- switch (act)
+ switch (a)
{
case perform_update_id: return [this] (action a, const target& t)
{
@@ -929,10 +924,10 @@ namespace build2
}
}
- void link::
+ void link_rule::
append_libraries (strings& args,
const file& l, bool la, lflags lf,
- const scope& bs, action act, linfo li) const
+ const scope& bs, action a, linfo li) const
{
// Note: lack of the "small function object" optimization will really
// kill us here since we are called in a loop.
@@ -996,14 +991,14 @@ namespace build2
};
process_libraries (
- act, bs, li, sys_lib_dirs, l, la, lf, imp, lib, opt, true);
+ a, bs, li, sys_lib_dirs, l, la, lf, imp, lib, opt, true);
}
- void link::
+ void link_rule::
hash_libraries (sha256& cs,
bool& update, timestamp mt,
const file& l, bool la, lflags lf,
- const scope& bs, action act, linfo li) const
+ const scope& bs, action a, linfo li) const
{
auto imp = [] (const file&, bool la) {return la;};
@@ -1053,14 +1048,14 @@ namespace build2
};
process_libraries (
- act, bs, li, sys_lib_dirs, l, la, lf, imp, lib, opt, true);
+ a, bs, li, sys_lib_dirs, l, la, lf, imp, lib, opt, true);
}
- void link::
+ void link_rule::
rpath_libraries (strings& args,
const target& t,
const scope& bs,
- action act,
+ action a,
linfo li,
bool for_install) const
{
@@ -1158,16 +1153,16 @@ namespace build2
const function<bool (const file&, bool)> impf (imp);
const function<void (const file*, const string&, lflags, bool)> libf (lib);
- for (auto pt: t.prerequisite_targets)
+ for (const prerequisite_target& pt: t.prerequisite_targets[a])
{
- bool a;
+ bool la;
const file* f;
- if ((a = (f = pt->is_a<liba> ())) ||
- (a = (f = pt->is_a<libux> ())) ||
- ( f = pt->is_a<libs> ()))
+ if ((la = (f = pt->is_a<liba> ())) ||
+ (la = (f = pt->is_a<libux> ())) ||
+ ( f = pt->is_a<libs> ()))
{
- if (!for_install && !a)
+ if (!for_install && !la)
{
// Top-level sharen library dependency. It is either matched or
// imported so should be a cc library.
@@ -1177,8 +1172,8 @@ namespace build2
"-Wl,-rpath," + f->path ().directory ().string ());
}
- process_libraries (act, bs, li, sys_lib_dirs,
- *f, a, pt.data,
+ process_libraries (a, bs, li, sys_lib_dirs,
+ *f, la, pt.data,
impf, libf, nullptr);
}
}
@@ -1194,17 +1189,24 @@ namespace build2
const char*
msvc_machine (const string& cpu); // msvc.cxx
- target_state link::
- perform_update (action act, const target& xt) const
+ target_state link_rule::
+ perform_update (action a, const target& xt) const
{
- tracer trace (x, "link::perform_update");
-
- auto oop (act.outer_operation ());
- bool for_install (oop == install_id || oop == uninstall_id);
+ tracer trace (x, "link_rule::perform_update");
const file& t (xt.as<file> ());
const path& tp (t.path ());
+ match_data& md (t.data<match_data> ());
+
+ // Unless the outer install rule signalled that this is update for
+ // install, signal back that we've performed plain update.
+ //
+ if (!md.for_install)
+ md.for_install = false;
+
+ bool for_install (*md.for_install);
+
const scope& bs (t.base_scope ());
const scope& rs (*bs.root_scope ());
@@ -1217,7 +1219,7 @@ namespace build2
//
bool update (false);
timestamp mt (t.load_mtime ());
- target_state ts (straight_execute_prerequisites (act, t));
+ target_state ts (straight_execute_prerequisites (a, t));
// If targeting Windows, take care of the manifest.
//
@@ -1231,7 +1233,7 @@ namespace build2
// it if we are updating for install.
//
if (!for_install)
- rpath_timestamp = windows_rpath_timestamp (t, bs, act, li);
+ rpath_timestamp = windows_rpath_timestamp (t, bs, a, li);
pair<path, bool> p (
windows_manifest (t,
@@ -1450,7 +1452,7 @@ namespace build2
//
if (lt.shared_library ())
{
- const libs_paths& paths (t.data<libs_paths> ());
+ const libs_paths& paths (md.libs_data);
const string& leaf (paths.effect_soname ().leaf ().string ());
if (tclass == "macos")
@@ -1486,7 +1488,7 @@ namespace build2
// rpath of the imported libraries (i.e., we assume they are also
// installed). But we add -rpath-link for some platforms.
//
- rpath_libraries (sargs, t, bs, act, li, for_install);
+ rpath_libraries (sargs, t, bs, a, li, for_install);
if (auto l = t["bin.rpath"])
for (const dir_path& p: cast<dir_paths> (l))
@@ -1519,7 +1521,7 @@ namespace build2
{
sha256 cs;
- for (auto p: t.prerequisite_targets)
+ for (const prerequisite_target& p: t.prerequisite_targets[a])
{
const target* pt (p.target);
@@ -1532,15 +1534,15 @@ namespace build2
}
const file* f;
- bool a (false), s (false);
+ bool la (false), ls (false);
if ((f = pt->is_a<obje> ()) ||
(f = pt->is_a<obja> ()) ||
(f = pt->is_a<objs> ()) ||
(!lt.static_library () && // @@ UTL: TODO libua to liba link.
- ((a = (f = pt->is_a<liba> ())) ||
- (a = (f = pt->is_a<libux> ())) ||
- (s = (f = pt->is_a<libs> ())))))
+ ((la = (f = pt->is_a<liba> ())) ||
+ (la = (f = pt->is_a<libux> ())) ||
+ (ls = (f = pt->is_a<libs> ())))))
{
// Link all the dependent interface libraries (shared) or interface
// and implementation (static), recursively.
@@ -1551,9 +1553,9 @@ namespace build2
// reason to re-archive the utility but those who link the utility
// have to "see through" the changes in the shared library.
//
- if (a || s)
+ if (la || ls)
{
- hash_libraries (cs, update, mt, *f, a, p.data, bs, act, li);
+ hash_libraries (cs, update, mt, *f, la, p.data, bs, a, li);
f = nullptr; // Timestamp checked by hash_libraries().
}
else
@@ -1603,22 +1605,16 @@ namespace build2
//
// Also, if you are wondering why don't we just always produce this .pc,
// install or no install, the reason is unless and until we are updating
- // for install, we have no idea where to things will be installed.
+ // for install, we have no idea where-to things will be installed.
//
if (for_install)
{
- bool a;
+ bool la;
const file* f;
- if ((a = (f = t.is_a<liba> ())) ||
- ( f = t.is_a<libs> ()))
- {
- // @@ Hack: this should really be in install:update_extra() where we
- // (should) what we are installing and what not.
- //
- if (rs["install.root"])
- pkgconfig_save (act, *f, a);
- }
+ if ((la = (f = t.is_a<liba> ())) ||
+ ( f = t.is_a<libs> ()))
+ pkgconfig_save (a, *f, la);
}
// If nothing changed, then we are done.
@@ -1810,7 +1806,7 @@ namespace build2
// The same logic as during hashing above.
//
- for (auto p: t.prerequisite_targets)
+ for (const prerequisite_target& p: t.prerequisite_targets[a])
{
const target* pt (p.target);
@@ -1821,21 +1817,21 @@ namespace build2
}
const file* f;
- bool a (false), s (false);
+ bool la (false), ls (false);
if ((f = pt->is_a<obje> ()) ||
(f = pt->is_a<obja> ()) ||
(f = pt->is_a<objs> ()) ||
(!lt.static_library () && // @@ UTL: TODO libua to liba link.
- ((a = (f = pt->is_a<liba> ())) ||
- (a = (f = pt->is_a<libux> ())) ||
- (s = (f = pt->is_a<libs> ())))))
+ ((la = (f = pt->is_a<liba> ())) ||
+ (la = (f = pt->is_a<libux> ())) ||
+ (ls = (f = pt->is_a<libs> ())))))
{
// Link all the dependent interface libraries (shared) or interface
// and implementation (static), recursively.
//
- if (a || s)
- append_libraries (sargs, *f, a, p.data, bs, act, li);
+ if (la || ls)
+ append_libraries (sargs, *f, la, p.data, bs, a, li);
else
sargs.push_back (relative (f->path ()).string ()); // string()&&
}
@@ -1864,7 +1860,7 @@ namespace build2
//
if (lt.shared_library ())
{
- const libs_paths& paths (t.data<libs_paths> ());
+ const libs_paths& paths (md.libs_data);
const path& p (paths.clean);
if (!p.empty ())
@@ -1886,7 +1882,7 @@ namespace build2
return s.empty () || m.string ().compare (0, s.size (), s) != 0;
};
- if (test (paths.real) &&
+ if (test (*paths.real) &&
test (paths.interm) &&
test (paths.soname) &&
test (paths.link))
@@ -2004,7 +2000,7 @@ namespace build2
// install).
//
if (lt.executable () && !for_install)
- windows_rpath_assembly (t, bs, act, li,
+ windows_rpath_assembly (t, bs, a, li,
cast<string> (rs[x_target_cpu]),
rpath_timestamp,
scratch);
@@ -2031,13 +2027,13 @@ namespace build2
}
};
- const libs_paths& paths (t.data<libs_paths> ());
+ const libs_paths& paths (md.libs_data);
const path& lk (paths.link);
const path& so (paths.soname);
const path& in (paths.interm);
- const path* f (&paths.real);
+ const path* f (paths.real);
if (!in.empty ()) {ln (f->leaf (), in); f = &in;}
if (!so.empty ()) {ln (f->leaf (), so); f = &so;}
@@ -2054,8 +2050,8 @@ namespace build2
return target_state::changed;
}
- target_state link::
- perform_clean (action act, const target& xt) const
+ target_state link_rule::
+ perform_clean (action a, const target& xt) const
{
const file& t (xt.as<file> ());
ltype lt (link_type (t));
@@ -2066,13 +2062,13 @@ namespace build2
{
if (tsys == "mingw32")
return clean_extra (
- act, t, {".d", ".dlls/", ".manifest.o", ".manifest"});
+ a, t, {".d", ".dlls/", ".manifest.o", ".manifest"});
else
// Assuming it's VC or alike. Clean up .ilk in case the user
// enabled incremental linking (note that .ilk replaces .exe).
//
return clean_extra (
- act, t, {".d", ".dlls/", ".manifest", "-.ilk"});
+ a, t, {".d", ".dlls/", ".manifest", "-.ilk"});
}
}
else if (lt.shared_library ())
@@ -2085,16 +2081,16 @@ namespace build2
// versioning their bases may not be the same.
//
if (tsys != "mingw32")
- return clean_extra (act, t, {{".d", "-.ilk"}, {"-.exp"}});
+ return clean_extra (a, t, {{".d", "-.ilk"}, {"-.exp"}});
}
else
{
// Here we can have a bunch of symlinks that we need to remove. If
// the paths are empty, then they will be ignored.
//
- const libs_paths& paths (t.data<libs_paths> ());
+ const libs_paths& paths (t.data<match_data> ().libs_data);
- return clean_extra (act, t, {".d",
+ return clean_extra (a, t, {".d",
paths.link.string ().c_str (),
paths.soname.string ().c_str (),
paths.interm.string ().c_str ()});
@@ -2102,7 +2098,7 @@ namespace build2
}
// For static library it's just the defaults.
- return clean_extra (act, t, {".d"});
+ return clean_extra (a, t, {".d"});
}
}
}
diff --git a/build2/cc/link.hxx b/build2/cc/link-rule.hxx
index c26102d..ba40410 100644
--- a/build2/cc/link.hxx
+++ b/build2/cc/link-rule.hxx
@@ -1,9 +1,9 @@
-// file : build2/cc/link.hxx -*- C++ -*-
+// file : build2/cc/link-rule.hxx -*- C++ -*-
// copyright : Copyright (c) 2014-2017 Code Synthesis Ltd
// license : MIT; see accompanying LICENSE file
-#ifndef BUILD2_CC_LINK_HXX
-#define BUILD2_CC_LINK_HXX
+#ifndef BUILD2_CC_LINK_RULE_HXX
+#define BUILD2_CC_LINK_RULE_HXX
#include <set>
@@ -19,12 +19,12 @@ namespace build2
{
namespace cc
{
- class link: public rule, virtual common
+ class link_rule: public rule, virtual common
{
public:
- link (data&&);
+ link_rule (data&&);
- virtual match_result
+ virtual bool
match (action, target&, const string&) const override;
virtual recipe
@@ -37,8 +37,8 @@ namespace build2
perform_clean (action, const target&) const;
private:
- friend class file_install;
- friend class alias_install;
+ friend class install_rule;
+ friend class libux_install_rule;
// Shared library paths.
//
@@ -51,27 +51,56 @@ namespace build2
// The libs{} path is always the real path. On Windows the link path
// is the import library.
//
- const path link; // What we link: libfoo.so
- const path soname; // SONAME: libfoo-1.so, libfoo.so.1
- const path interm; // Intermediate: libfoo.so.1.2
- const path& real; // Real: libfoo.so.1.2.3
+ path link; // What we link: libfoo.so
+ path soname; // SONAME: libfoo-1.so, libfoo.so.1
+ path interm; // Intermediate: libfoo.so.1.2
+ const path* real; // Real: libfoo.so.1.2.3
inline const path&
effect_link () const {return link.empty () ? effect_soname () : link;}
inline const path&
- effect_soname () const {return soname.empty () ? real : soname;}
+ effect_soname () const {return soname.empty () ? *real : soname;}
// Cleanup pattern used to remove previous versions. If empty, no
// cleanup is performed. The above (current) names are automatically
// filtered out.
//
- const path clean;
+ path clean;
};
libs_paths
derive_libs_paths (file&, const char*, const char*) const;
+ struct match_data
+ {
+ // The "for install" condition is signalled to us by install_rule when
+ // it is matched for the update operation. It also verifies that if we
+ // have already been executed, then it was for install.
+ //
+ // This has an interesting implication: it means that this rule cannot
+ // be used to update targets during match. Specifically, we cannot be
+ // executed for group resolution purposes (not a problem) nor as part
+ // of the generated source update. The latter case can be a problem:
+ // imagine a code generator that itself may need to be updated before
+ // it can be used to re-generate some out-of-date source code. As an
+ // aside, note that even if we were somehow able to communicate the
+ // "for install" in this case, the result of such an update may not
+ // actually be "usable" (e.g., not runnable because of the missing
+ // rpaths). There is another prominent case where the result may not
+ // be usable: cross-compilation.
+ //
+ // So the current (admittedly fuzzy) thinking is that a project shall
+ // not try to use its own build for update since it may not be usable
+ // (because of cross-compilations, being "for install", etc). Instead,
+ // it should rely on another, "usable" build of itself (this, BTW, is
+ // related to bpkg's build-time vs run-time dependencies).
+ //
+ optional<bool> for_install;
+
+ libs_paths libs_data;
+ };
+
// Library handling.
//
void
@@ -134,4 +163,4 @@ namespace build2
}
}
-#endif // BUILD2_CC_LINK_HXX
+#endif // BUILD2_CC_LINK_RULE_HXX
diff --git a/build2/cc/module.cxx b/build2/cc/module.cxx
index c56bca9..ae64220 100644
--- a/build2/cc/module.cxx
+++ b/build2/cc/module.cxx
@@ -499,8 +499,8 @@ namespace build2
// We register for configure so that we detect unresolved imports
// during configuration rather that later, e.g., during update.
//
- const compile& cr (*this);
- const link& lr (*this);
+ const compile_rule& cr (*this);
+ const link_rule& lr (*this);
r.insert<obje> (perform_update_id, x_compile, cr);
r.insert<obje> (perform_clean_id, x_compile, cr);
@@ -559,26 +559,27 @@ namespace build2
//
if (install_loaded)
{
- const file_install& fr (*this);
- const alias_install& ar (*this);
+ const install_rule& ir (*this);
- r.insert<exe> (perform_install_id, x_install, fr);
- r.insert<exe> (perform_uninstall_id, x_uninstall, fr);
+ r.insert<exe> (perform_install_id, x_install, ir);
+ r.insert<exe> (perform_uninstall_id, x_uninstall, ir);
- r.insert<liba> (perform_install_id, x_install, fr);
- r.insert<liba> (perform_uninstall_id, x_uninstall, fr);
+ r.insert<liba> (perform_install_id, x_install, ir);
+ r.insert<liba> (perform_uninstall_id, x_uninstall, ir);
- r.insert<libs> (perform_install_id, x_install, fr);
- r.insert<libs> (perform_uninstall_id, x_uninstall, fr);
+ r.insert<libs> (perform_install_id, x_install, ir);
+ r.insert<libs> (perform_uninstall_id, x_uninstall, ir);
- r.insert<libue> (perform_install_id, x_install, ar);
- r.insert<libue> (perform_uninstall_id, x_uninstall, ar);
+ const libux_install_rule& lr (*this);
- r.insert<libua> (perform_install_id, x_install, ar);
- r.insert<libua> (perform_uninstall_id, x_uninstall, ar);
+ r.insert<libue> (perform_install_id, x_install, lr);
+ r.insert<libue> (perform_uninstall_id, x_uninstall, lr);
- r.insert<libus> (perform_install_id, x_install, ar);
- r.insert<libus> (perform_uninstall_id, x_uninstall, ar);
+ r.insert<libua> (perform_install_id, x_install, lr);
+ r.insert<libua> (perform_uninstall_id, x_uninstall, lr);
+
+ r.insert<libus> (perform_install_id, x_install, lr);
+ r.insert<libus> (perform_uninstall_id, x_uninstall, lr);
}
}
}
diff --git a/build2/cc/module.hxx b/build2/cc/module.hxx
index de61611..58aa184 100644
--- a/build2/cc/module.hxx
+++ b/build2/cc/module.hxx
@@ -13,9 +13,9 @@
#include <build2/cc/common.hxx>
-#include <build2/cc/compile.hxx>
-#include <build2/cc/link.hxx>
-#include <build2/cc/install.hxx>
+#include <build2/cc/compile-rule.hxx>
+#include <build2/cc/link-rule.hxx>
+#include <build2/cc/install-rule.hxx>
namespace build2
{
@@ -76,19 +76,19 @@ namespace build2
};
class module: public module_base, public virtual common,
- link,
- compile,
- file_install,
- alias_install
+ link_rule,
+ compile_rule,
+ install_rule,
+ libux_install_rule
{
public:
explicit
module (data&& d)
: common (move (d)),
- link (move (d)),
- compile (move (d)),
- file_install (move (d), *this),
- alias_install (move (d), *this) {}
+ link_rule (move (d)),
+ compile_rule (move (d)),
+ install_rule (move (d), *this),
+ libux_install_rule (move (d), *this) {}
void
init (scope&, const location&, const variable_map&);
diff --git a/build2/cc/pkgconfig.cxx b/build2/cc/pkgconfig.cxx
index 0ffd135..697a60e 100644
--- a/build2/cc/pkgconfig.cxx
+++ b/build2/cc/pkgconfig.cxx
@@ -26,8 +26,8 @@
#include <build2/cc/utility.hxx>
#include <build2/cc/common.hxx>
-#include <build2/cc/compile.hxx>
-#include <build2/cc/link.hxx>
+#include <build2/cc/compile-rule.hxx>
+#include <build2/cc/link-rule.hxx>
using namespace std;
using namespace butl;
@@ -451,7 +451,7 @@ namespace build2
//
#ifndef BUILD2_BOOTSTRAP
bool common::
- pkgconfig_load (action act,
+ pkgconfig_load (action a,
const scope& s,
lib& lt,
liba* at,
@@ -592,12 +592,13 @@ namespace build2
// Extract --cflags and set them as lib?{}:export.poptions. Note that we
// still pass --static in case this is pkgconf which has Cflags.private.
//
- auto parse_cflags = [&trace, this] (target& t, const pkgconf& pc, bool a)
+ auto parse_cflags =
+ [&trace, this] (target& t, const pkgconf& pc, bool la)
{
strings pops;
bool arg (false);
- for (auto& o: pc.cflags (a))
+ for (auto& o: pc.cflags (la))
{
if (arg)
{
@@ -646,8 +647,8 @@ namespace build2
// Parse --libs into loptions/libs (interface and implementation). If
// ps is not NULL, add each resolves library target as a prerequisite.
//
- auto parse_libs = [act, &s, top_sysd, this]
- (target& t, const pkgconf& pc, bool a, prerequisites* ps)
+ auto parse_libs = [a, &s, top_sysd, this]
+ (target& t, const pkgconf& pc, bool la, prerequisites* ps)
{
strings lops;
vector<name> libs;
@@ -664,7 +665,7 @@ namespace build2
// library names (without -l) after seeing an unknown option.
//
bool arg (false), first (true), known (true), have_L;
- for (auto& o: pc.libs (a))
+ for (auto& o: pc.libs (la))
{
if (arg)
{
@@ -726,10 +727,10 @@ namespace build2
// Space-separated list of escaped library flags.
//
- auto lflags = [&pc, a] () -> string
+ auto lflags = [&pc, la] () -> string
{
string r;
- for (const auto& o: pc.libs (a))
+ for (const auto& o: pc.libs (la))
{
if (!r.empty ())
r += ' ';
@@ -831,7 +832,7 @@ namespace build2
prerequisite_key pk {
nullopt, {&lib::static_type, &out, &out, &name, nullopt}, &s};
- if (const target* lt = search_library (act, top_sysd, usrd, pk))
+ if (const target* lt = search_library (a, top_sysd, usrd, pk))
{
// We used to pick a member but that doesn't seem right since the
// same target could be used with different link orders.
@@ -1112,8 +1113,8 @@ namespace build2
#endif
- void link::
- pkgconfig_save (action act, const file& l, bool la) const
+ void link_rule::
+ pkgconfig_save (action a, const file& l, bool la) const
{
tracer trace (x, "pkgconfig_save");
@@ -1258,7 +1259,7 @@ namespace build2
os << " -L" << escape (ld.string ());
// Now process ourselves as if we were being linked to something (so
- // pretty similar to link::append_libraries()).
+ // pretty similar to link_rule::append_libraries()).
//
bool priv (false);
auto imp = [&priv] (const file&, bool la) {return priv && la;};
@@ -1307,7 +1308,7 @@ namespace build2
//
linfo li {otype::e, la ? lorder::a_s : lorder::s_a};
- process_libraries (act, bs, li, sys_lib_dirs,
+ process_libraries (a, bs, li, sys_lib_dirs,
l, la, 0, // Link flags.
imp, lib, opt, true);
os << endl;
@@ -1317,7 +1318,7 @@ namespace build2
os << "Libs.private:";
priv = true;
- process_libraries (act, bs, li, sys_lib_dirs,
+ process_libraries (a, bs, li, sys_lib_dirs,
l, la, 0, // Link flags.
imp, lib, opt, false);
os << endl;
@@ -1339,7 +1340,7 @@ namespace build2
};
vector<module> modules;
- for (const target* pt: l.prerequisite_targets)
+ for (const target* pt: l.prerequisite_targets[a])
{
// @@ UTL: we need to (recursively) see through libux{} (and
// also in search_modules()).
@@ -1354,7 +1355,7 @@ namespace build2
// the first mxx{} target that we see.
//
const target* mt (nullptr);
- for (const target* t: pt->prerequisite_targets)
+ for (const target* t: pt->prerequisite_targets[a])
{
if ((mt = t->is_a (*x_mod)))
break;
diff --git a/build2/cc/windows-manifest.cxx b/build2/cc/windows-manifest.cxx
index 4393fbf..ae33f66 100644
--- a/build2/cc/windows-manifest.cxx
+++ b/build2/cc/windows-manifest.cxx
@@ -9,7 +9,7 @@
#include <build2/filesystem.hxx>
#include <build2/diagnostics.hxx>
-#include <build2/cc/link.hxx>
+#include <build2/cc/link-rule.hxx>
using namespace std;
using namespace butl;
@@ -39,10 +39,10 @@ namespace build2
// file corresponding to the exe{} target. Return the manifest file path
// as well as whether it was changed.
//
- pair<path, bool> link::
+ pair<path, bool> link_rule::
windows_manifest (const file& t, bool rpath_assembly) const
{
- tracer trace (x, "link::windows_manifest");
+ tracer trace (x, "link_rule::windows_manifest");
const scope& rs (t.root_scope ());
diff --git a/build2/cc/windows-rpath.cxx b/build2/cc/windows-rpath.cxx
index b28ce42..8854542 100644
--- a/build2/cc/windows-rpath.cxx
+++ b/build2/cc/windows-rpath.cxx
@@ -13,7 +13,7 @@
#include <build2/bin/target.hxx>
-#include <build2/cc/link.hxx>
+#include <build2/cc/link-rule.hxx>
using namespace std;
using namespace butl;
@@ -46,10 +46,10 @@ namespace build2
// Return the greatest (newest) timestamp of all the DLLs that we will be
// adding to the assembly or timestamp_nonexistent if there aren't any.
//
- timestamp link::
+ timestamp link_rule::
windows_rpath_timestamp (const file& t,
const scope& bs,
- action act,
+ action a,
linfo li) const
{
timestamp r (timestamp_nonexistent);
@@ -103,19 +103,19 @@ namespace build2
r = t;
};
- for (auto pt: t.prerequisite_targets)
+ for (const prerequisite_target& pt: t.prerequisite_targets[a])
{
if (pt == nullptr)
continue;
- bool a;
+ bool la;
const file* f;
- if ((a = (f = pt->is_a<liba> ())) ||
- (a = (f = pt->is_a<libux> ())) || // See through.
+ if ((la = (f = pt->is_a<liba> ())) ||
+ (la = (f = pt->is_a<libux> ())) || // See through.
( f = pt->is_a<libs> ()))
- process_libraries (act, bs, li, sys_lib_dirs,
- *f, a, pt.data,
+ process_libraries (a, bs, li, sys_lib_dirs,
+ *f, la, pt.data,
imp, lib, nullptr, true);
}
@@ -125,10 +125,10 @@ namespace build2
// Like *_timestamp() but actually collect the DLLs (and weed out the
// duplicates).
//
- auto link::
+ auto link_rule::
windows_rpath_dlls (const file& t,
const scope& bs,
- action act,
+ action a,
linfo li) const -> windows_dlls
{
windows_dlls r;
@@ -193,19 +193,19 @@ namespace build2
}
};
- for (auto pt: t.prerequisite_targets)
+ for (const prerequisite_target& pt: t.prerequisite_targets[a])
{
if (pt == nullptr)
continue;
- bool a;
+ bool la;
const file* f;
- if ((a = (f = pt->is_a<liba> ())) ||
- (a = (f = pt->is_a<libux> ())) || // See through.
- ( f = pt->is_a<libs> ()))
- process_libraries (act, bs, li, sys_lib_dirs,
- *f, a, pt.data,
+ if ((la = (f = pt->is_a<liba> ())) ||
+ (la = (f = pt->is_a<libux> ())) || // See through.
+ ( f = pt->is_a<libs> ()))
+ process_libraries (a, bs, li, sys_lib_dirs,
+ *f, la, pt.data,
imp, lib, nullptr, true);
}
@@ -223,10 +223,10 @@ namespace build2
// unnecessary work by comparing the DLLs timestamp against the assembly
// manifest file.
//
- void link::
+ void link_rule::
windows_rpath_assembly (const file& t,
const scope& bs,
- action act,
+ action a,
linfo li,
const string& tcpu,
timestamp ts,
@@ -264,7 +264,7 @@ namespace build2
windows_dlls dlls;
if (!empty)
- dlls = windows_rpath_dlls (t, bs, act, li);
+ dlls = windows_rpath_dlls (t, bs, a, li);
// Clean the assembly directory and make sure it exists. Maybe it would
// have been faster to overwrite the existing manifest rather than