aboutsummaryrefslogtreecommitdiff
path: root/build2/cc
diff options
context:
space:
mode:
authorBoris Kolpackov <boris@codesynthesis.com>2017-02-15 03:55:15 +0200
committerBoris Kolpackov <boris@codesynthesis.com>2017-03-02 14:03:34 +0200
commitb37f1aa6398065be806e6605a023189685669885 (patch)
treeb9b32091e3d70a31852302b24c99ecb62465464a /build2/cc
parenta64b2ae2099346471ead988d5f2d383d55a9bf89 (diff)
Implement parallel match
Diffstat (limited to 'build2/cc')
-rw-r--r--build2/cc/common53
-rw-r--r--build2/cc/common.cxx198
-rw-r--r--build2/cc/compile17
-rw-r--r--build2/cc/compile.cxx339
-rw-r--r--build2/cc/install8
-rw-r--r--build2/cc/install.cxx27
-rw-r--r--build2/cc/link19
-rw-r--r--build2/cc/link.cxx534
-rw-r--r--build2/cc/msvc.cxx75
-rw-r--r--build2/cc/pkgconfig.cxx8
-rw-r--r--build2/cc/utility8
-rw-r--r--build2/cc/utility.cxx46
-rw-r--r--build2/cc/windows-manifest.cxx2
-rw-r--r--build2/cc/windows-rpath.cxx17
14 files changed, 753 insertions, 598 deletions
diff --git a/build2/cc/common b/build2/cc/common
index c631f38..5a459b8 100644
--- a/build2/cc/common
+++ b/build2/cc/common
@@ -185,6 +185,7 @@ namespace build2
public:
void
process_libraries (
+ action,
const scope&,
lorder,
const dir_paths&,
@@ -195,44 +196,70 @@ namespace build2
const function<void (const file&, const string&, bool, bool)>&,
bool = false) const;
- target*
- search_library (const dir_paths& sysd,
+ const target*
+ search_library (action act,
+ const dir_paths& sysd,
optional<dir_paths>& usrd,
- prerequisite& p) const
+ const prerequisite& p) const
{
- if (p.target == nullptr) // First check the cache.
- p.target = search_library (sysd, usrd, p.key ());
-
- return p.target;
+ const target* r (p.target.load (memory_order_consume));
+
+ if (r == nullptr)
+ {
+ if ((r = search_library (act, sysd, usrd, p.key ())) != nullptr)
+ {
+ const target* e (nullptr);
+ if (!p.target.compare_exchange_strong (
+ e, r,
+ memory_order_release,
+ memory_order_consume))
+ assert (e == r);
+ }
+ }
+
+ return r;
}
- private:
+ public:
const file&
- resolve_library (const scope&,
+ resolve_library (action,
+ const scope&,
name,
lorder,
const dir_paths&,
optional<dir_paths>&) const;
+ template <typename T>
+ static ulock
+ insert_library (T*&,
+ const string&,
+ const dir_path&,
+ optional<string>,
+ bool,
+ tracer&);
+
target*
- search_library (const dir_paths&,
+ search_library (action,
+ const dir_paths&,
optional<dir_paths>&,
const prerequisite_key&,
bool existing = false) const;
const target*
- search_library_existing (const dir_paths& sysd,
+ search_library_existing (action act,
+ const dir_paths& sysd,
optional<dir_paths>& usrd,
const prerequisite_key& pk) const
{
- return search_library (sysd, usrd, pk, true);
+ return search_library (act, sysd, usrd, pk, true);
}
dir_paths
extract_library_dirs (const scope&) const;
bool
- pkgconfig_extract (const scope&,
+ pkgconfig_extract (action,
+ const scope&,
bin::lib&,
bin::liba*,
bin::libs*,
diff --git a/build2/cc/common.cxx b/build2/cc/common.cxx
index 7b8499c..88eb45d 100644
--- a/build2/cc/common.cxx
+++ b/build2/cc/common.cxx
@@ -46,6 +46,7 @@ namespace build2
//
void common::
process_libraries (
+ action act,
const scope& top_bs,
lorder top_lo,
const dir_paths& top_sysd,
@@ -174,13 +175,14 @@ namespace build2
//
if (self && proc_lib)
{
- const string& p (l.path ().string ());
+ const path& p (l.path ());
+ assert (!p.empty ()); // Must be assigned.
bool s (t != nullptr // If cc library (matched or imported).
? cast_false<bool> (l.vars[c_system])
- : sys (top_sysd, p));
+ : sys (top_sysd, p.string ()));
- proc_lib (&l, p, s);
+ proc_lib (&l, p.string (), s);
}
const scope& bs (t == nullptr || cc ? top_bs : l.base_scope ());
@@ -224,7 +226,7 @@ namespace build2
if (sysd == nullptr) find_sysd ();
if (!lo) find_lo ();
- process_libraries (bs, *lo, *sysd,
+ process_libraries (act, bs, *lo, *sysd,
*f, a,
proc_impl, proc_lib, proc_opt, true);
}
@@ -262,7 +264,7 @@ namespace build2
&proc_impl, &proc_lib, &proc_opt,
&sysd, &usrd,
&find_sysd, &find_lo, &sys, &sys_simple,
- &bs, &lo, this] (const lookup& lu)
+ &bs, act, &lo, this] (const lookup& lu)
{
const vector<name>* ns (cast_null<vector<name>> (lu));
if (ns == nullptr || ns->empty ())
@@ -287,7 +289,7 @@ namespace build2
if (sysd == nullptr) find_sysd ();
if (!lo) find_lo ();
- const file& t (resolve_library (bs, n, *lo, *sysd, usrd));
+ const file& t (resolve_library (act, bs, n, *lo, *sysd, usrd));
if (proc_lib)
{
@@ -300,7 +302,7 @@ namespace build2
// on Windows import-installed DLLs may legally have empty
// paths.
//
- if (t.mtime (false) == timestamp_unknown)
+ if (t.mtime () == timestamp_unknown)
fail << "interface dependency " << t << " is out of date" <<
info << "mentioned in *.export.libs of target " << l <<
info << "is it a prerequisite of " << l << "?";
@@ -308,7 +310,7 @@ namespace build2
// Process it recursively.
//
- process_libraries (bs, *lo, *sysd,
+ process_libraries (act, bs, *lo, *sysd,
t, t.is_a<liba> (),
proc_impl, proc_lib, proc_opt, true);
}
@@ -386,7 +388,8 @@ namespace build2
// that's the only way to guarantee it will be up-to-date.
//
const file& common::
- resolve_library (const scope& s,
+ resolve_library (action act,
+ const scope& s,
name n,
lorder lo,
const dir_paths& sysd,
@@ -422,7 +425,7 @@ namespace build2
//
dir_path out;
prerequisite_key pk {n.proj, {tt, &n.dir, &out, &n.value, ext}, &s};
- xt = search_library_existing (sysd, usrd, pk);
+ xt = search_library_existing (act, sysd, usrd, pk);
if (xt == nullptr)
{
@@ -437,17 +440,41 @@ namespace build2
// If this is lib{}, pick appropriate member.
//
if (const lib* l = xt->is_a<lib> ())
- xt = &link_member (*l, lo); // Pick liba{} or libs{}.
+ xt = &link_member (*l, act, lo); // Pick liba{} or libs{}.
return xt->as<file> ();
}
- // Note that pk's scope should not be NULL (even if dir is absolute). If
- // sys is not NULL, then store there an inidication of whether this is a
- // system library.
+ // Insert a target verifying that it already exists if requested. Return
+ // the lock.
+ //
+ template <typename T>
+ ulock common::
+ insert_library (T*& r,
+ const string& name,
+ const dir_path& d,
+ optional<string> ext,
+ bool exist,
+ tracer& trace)
+ {
+ auto p (targets.insert_locked (T::static_type,
+ d,
+ dir_path (),
+ name,
+ move (ext),
+ true, // Implied.
+ trace));
+
+ assert (!exist || !p.second.owns_lock ());
+ r = &p.first.template as<T> ();
+ return move (p.second);
+ }
+
+ // Note that pk's scope should not be NULL (even if dir is absolute).
//
target* common::
- search_library (const dir_paths& sysd,
+ search_library (action act,
+ const dir_paths& sysd,
optional<dir_paths>& usrd,
const prerequisite_key& p,
bool exist) const
@@ -546,31 +573,11 @@ namespace build2
path f; // Reuse the buffer.
const dir_path* pd (nullptr);
- // Insert a target verifying that it already exists if requested.
- //
- auto insert = [&name, exist, &trace] (auto*& r,
- const dir_path& d,
- optional<string> ext)
- {
- using T = typename std::remove_reference<decltype (*r)>::type;
-
- auto p (targets.insert (T::static_type,
- d,
- dir_path (),
- name,
- move (ext),
- true, // Implied.
- trace));
-
- assert (!exist || !p.second);
- r = &p.first.template as<T> ();
- };
-
auto search =[&a, &s,
&an, &ae,
&sn, &se,
&name, ext,
- &p, &f, &insert, exist, this] (const dir_path& d) -> bool
+ &p, &f, exist, &trace, this] (const dir_path& d) -> bool
{
timestamp mt;
@@ -593,17 +600,22 @@ namespace build2
//
if (tclass == "windows")
{
- insert (s, d, nullopt);
+ libi* i (nullptr);
+ insert_library (i, name, d, se, exist, trace);
+
+ ulock l (insert_library (s, name, d, nullopt, exist, trace));
- if (s->member == nullptr)
+ if (!exist)
{
- libi* i;
- insert (i, d, se);
+ if (l.owns_lock ())
+ s->member = i;
+ else
+ assert (s->member == i);
- if (i->path ().empty ())
- i->path (move (f));
+ l.unlock ();
i->mtime (mt);
+ i->path (move (f));
// Presumably there is a DLL somewhere, we just don't know
// where (and its possible we might have to look for one if we
@@ -612,17 +624,15 @@ namespace build2
// but valid timestamp (aka "trust me, it's there").
//
s->mtime (mt);
- s->member = i;
+ s->path (path ());
}
}
else
{
- insert (s, d, se);
-
- if (s->path ().empty ())
- s->path (move (f));
+ insert_library (s, name, d, se, exist, trace);
s->mtime (mt);
+ s->path (move (f));
}
}
else if (!ext && tsys == "mingw32")
@@ -639,12 +649,10 @@ namespace build2
if (mt != timestamp_nonexistent)
{
- insert (s, d, se);
-
- if (s->path ().empty ())
- s->path (move (f));
+ insert_library (s, name, d, se, exist, trace);
s->mtime (mt);
+ s->path (move (f));
}
}
}
@@ -666,12 +674,9 @@ namespace build2
// Note that this target is outside any project which we treat
// as out trees.
//
- insert (a, d, ae);
-
- if (a->path ().empty ())
- a->path (move (f));
-
+ insert_library (a, name, d, ae, exist, trace);
a->mtime (mt);
+ a->path (move (f));
}
}
@@ -727,29 +732,44 @@ namespace build2
if (pd == nullptr)
return nullptr;
- // Enter (or find) the lib{} target group. Note that we must be careful
- // here since its possible we have already imported some of its members.
+ // Enter (or find) the lib{} target group.
//
lib* lt;
- insert (lt, *pd, l ? p.tk.ext : nullopt);
+ insert_library (lt, name, *pd, l ? p.tk.ext : nullopt, exist, trace);
- // It should automatically link-up to the members we have found.
+ // Result.
//
- assert (a == nullptr || lt->a == a);
- assert (s == nullptr || lt->s == s);
+ target* r (l ? lt : (p.is_a<liba> () ? static_cast<target*> (a) : s));
+
+ // Assume the rest is already done if existing.
+ //
+ if (exist)
+ return r;
- // Update the bin.lib variable to indicate what's available. Assume
- // already done if existing.
+ // If we cannot acquire the lock then this mean the target has already
+ // been matched (though not clear by whom) and we assume all of this
+ // has already been done.
//
- if (!exist)
+ target_lock ll (lock (act, *lt));
+
+ // Set lib{} group members to indicate what's available. Note that we
+ // must be careful here since its possible we have already imported some
+ // of its members.
+ //
+ if (ll)
{
- const char* bl (lt->a != nullptr
- ? (lt->s != nullptr ? "both" : "static")
- : "shared");
- lt->assign (var_pool["bin.lib"]) = bl;
+ if (a != nullptr) lt->a = a;
+ if (s != nullptr) lt->s = s;
}
- target* r (l ? lt : (p.is_a<liba> () ? static_cast<target*> (a) : s));
+ target_lock al (a != nullptr ? lock (act, *a) : target_lock ());
+ target_lock sl (s != nullptr ? lock (act, *s) : target_lock ());
+
+ if (!al) a = nullptr;
+ if (!sl) s = nullptr;
+
+ if (a != nullptr) a->group = lt;
+ if (s != nullptr) s->group = lt;
// Mark as a "cc" library (unless already marked) and set the system
// flag.
@@ -769,24 +789,16 @@ namespace build2
return p.second;
};
- // If the library already has cc.type, then assume it was either already
- // imported or was matched by a rule.
- //
- // Assume already done if existing.
+ // If the library already has cc.type, then assume it was either
+ // already imported or was matched by a rule.
//
- if (!exist)
- {
- if (a != nullptr && !mark_cc (*a))
- a = nullptr;
-
- if (s != nullptr && !mark_cc (*s))
- s = nullptr;
- }
+ if (a != nullptr && !mark_cc (*a)) a = nullptr;
+ if (s != nullptr && !mark_cc (*s)) s = nullptr;
// Add the "using static/shared library" macro (used, for example, to
- // handle DLL export). The absence of either of these macros would mean
- // some other build system that cannot distinguish between the two (and
- // no pkg-config information).
+ // handle DLL export). The absence of either of these macros would
+ // mean some other build system that cannot distinguish between the
+ // two (and no pkg-config information).
//
auto add_macro = [this] (target& t, const char* suffix)
{
@@ -832,17 +844,15 @@ namespace build2
}
};
- // Assume already done if existing.
- //
- if (!exist && (a != nullptr || s != nullptr))
+ if (ll && (a != nullptr || s != nullptr))
{
- // Try to extract library information from pkg-config. We only add the
- // default macro if we could not extract more precise information. The
- // idea is that when we auto-generate .pc files, we will copy those
- // macros (or custom ones) from *.export.poptions.
+ // Try to extract library information from pkg-config. We only add
+ // the default macro if we could not extract more precise
+ // information. The idea is that when we auto-generate .pc files, we
+ // will copy those macros (or custom ones) from *.export.poptions.
//
- if (pkgconfig == nullptr ||
- !pkgconfig_extract (*p.scope, *lt, a, s, p.proj, name, *pd, sysd))
+ if (pkgconfig == nullptr || !pkgconfig_extract (
+ act, *p.scope, *lt, a, s, p.proj, name, *pd, sysd))
{
if (a != nullptr) add_macro (*a, "STATIC");
if (s != nullptr) add_macro (*s, "SHARED");
diff --git a/build2/cc/compile b/build2/cc/compile
index 63ce286..2986b7d 100644
--- a/build2/cc/compile
+++ b/build2/cc/compile
@@ -27,10 +27,10 @@ namespace build2
compile (data&&);
virtual match_result
- match (slock&, action, target&, const string& hint) const override;
+ match (action, target&, const string&) const override;
virtual recipe
- apply (slock&, action, target&) const override;
+ apply (action, target&) const override;
target_state
perform_update (action, const target&) const;
@@ -43,13 +43,13 @@ namespace build2
append_lib_options (const scope&,
cstrings&,
const target&,
- lorder) const;
+ action, lorder) const;
void
hash_lib_options (const scope&,
sha256&,
const target&,
- lorder) const;
+ action, lorder) const;
// Mapping of include prefixes (e.g., foo in <foo/bar>) for auto-
// generated headers to directories where they will be generated.
@@ -67,10 +67,13 @@ namespace build2
append_prefixes (prefix_map&, const target&, const variable&) const;
void
- append_lib_prefixes (const scope&, prefix_map&, target&, lorder) const;
+ append_lib_prefixes (const scope&,
+ prefix_map&,
+ target&,
+ action, lorder) const;
prefix_map
- build_prefix_map (const scope&, target&, lorder) const;
+ build_prefix_map (const scope&, target&, action, lorder) const;
// Reverse-lookup target type from extension.
//
@@ -80,7 +83,7 @@ namespace build2
// Header dependency injection.
//
void
- inject (slock&, action, target&, lorder, file&, depdb&) const;
+ inject (action, target&, lorder, const file&, depdb&) const;
private:
const string rule_id;
diff --git a/build2/cc/compile.cxx b/build2/cc/compile.cxx
index c04f0a9..f202ba1 100644
--- a/build2/cc/compile.cxx
+++ b/build2/cc/compile.cxx
@@ -38,13 +38,14 @@ namespace build2
struct match_data
{
prerequisite_member src;
+ timestamp dd_mtime; // depdb mtime, timestamp_nonexistent if outdated.
};
static_assert (sizeof (match_data) <= target::data_size,
"insufficient space");
match_result compile::
- match (slock& ml, action a, target& t, const string&) const
+ match (action act, target& t, const string&) const
{
tracer trace (x, "compile::match");
@@ -54,16 +55,23 @@ namespace build2
// - if path already assigned, verify extension?
//
+ // Link-up to our group (this is the obj{} target group protocol which
+ // means this can be done whether we match or not).
+ //
+ if (t.group == nullptr)
+ t.group = targets.find<obj> (t.dir, t.out, t.name);
+
// See if we have a source file. Iterate in reverse so that a source
// file specified for an obj*{} member overrides the one specified for
// the group. Also "see through" groups.
//
- for (prerequisite_member p:
- reverse_group_prerequisite_members (ml, a, t))
+ for (prerequisite_member p: reverse_group_prerequisite_members (act, t))
{
if (p.is_a (x_src))
{
- t.data (match_data {p}); // Save in the target's auxilary storage.
+ // Save in the target's auxilary storage.
+ //
+ t.data (match_data {p, timestamp_nonexistent});
return true;
}
}
@@ -79,6 +87,7 @@ namespace build2
append_lib_options (const scope& bs,
cstrings& args,
const target& t,
+ action act,
lorder lo) const
{
auto opt = [&args, this] (
@@ -103,18 +112,20 @@ namespace build2
// Note that here we don't need to see group members (see apply()).
//
- for (const prerequisite& p: const_group_prerequisites (t))
+ for (const prerequisite& p: group_prerequisites (t))
{
- const target* pt (p.target); // Already searched and matched.
+ // Should be already searched and matched.
+ //
+ const target* pt (p.target.load (memory_order_consume));
bool a;
if (const lib* l = pt->is_a<lib> ())
- a = (pt = &link_member (*l, lo))->is_a<liba> ();
+ a = (pt = &link_member (*l, act, lo))->is_a<liba> ();
else if (!(a = pt->is_a<liba> ()) && !pt->is_a<libs> ())
continue;
- process_libraries (bs, lo, sys_lib_dirs,
+ process_libraries (act, bs, lo, sys_lib_dirs,
pt->as<file> (), a,
nullptr, nullptr, optf);
}
@@ -124,6 +135,7 @@ namespace build2
hash_lib_options (const scope& bs,
sha256& cs,
const target& t,
+ action act,
lorder lo) const
{
auto opt = [&cs, this] (
@@ -143,18 +155,20 @@ namespace build2
//
const function<void (const file&, const string&, bool, bool)> optf (opt);
- for (const prerequisite& p: const_group_prerequisites (t))
+ for (const prerequisite& p: group_prerequisites (t))
{
- const target* pt (p.target); // Already searched and matched.
+ // Should be already searched and matched.
+ //
+ const target* pt (p.target.load (memory_order_consume));
bool a;
if (const lib* l = pt->is_a<lib> ())
- a = (pt = &link_member (*l, lo))->is_a<liba> ();
+ a = (pt = &link_member (*l, act, lo))->is_a<liba> ();
else if (!(a = pt->is_a<liba> ()) && !pt->is_a<libs> ())
continue;
- process_libraries (bs, lo, sys_lib_dirs,
+ process_libraries (act, bs, lo, sys_lib_dirs,
pt->as<file> (), a,
nullptr, nullptr, optf);
}
@@ -167,6 +181,7 @@ namespace build2
append_lib_prefixes (const scope& bs,
prefix_map& m,
target& t,
+ action act,
lorder lo) const
{
auto opt = [&m, this] (
@@ -186,30 +201,32 @@ namespace build2
//
const function<void (const file&, const string&, bool, bool)> optf (opt);
- for (prerequisite& p: group_prerequisites (t))
+ for (const prerequisite& p: group_prerequisites (t))
{
- target* pt (p.target); // Already searched and matched.
+ // Should be already searched and matched.
+ //
+ const target* pt (p.target.load (memory_order_consume));
bool a;
- if (lib* l = pt->is_a<lib> ())
- a = (pt = &link_member (*l, lo))->is_a<liba> ();
+ if (const lib* l = pt->is_a<lib> ())
+ a = (pt = &link_member (*l, act, lo))->is_a<liba> ();
else if (!(a = pt->is_a<liba> ()) && !pt->is_a<libs> ())
continue;
- process_libraries (bs, lo, sys_lib_dirs,
+ process_libraries (act, bs, lo, sys_lib_dirs,
pt->as<file> (), a,
nullptr, nullptr, optf);
}
}
recipe compile::
- apply (slock& ml, action a, target& xt) const
+ apply (action act, target& xt) const
{
tracer trace (x, "compile::apply");
file& t (xt.as<file> ());
- const match_data& md (t.data<match_data> ());
+ match_data& md (t.data<match_data> ());
const scope& bs (t.base_scope ());
const scope& rs (*bs.root_scope ());
@@ -219,71 +236,77 @@ namespace build2
// Derive file name from target name.
//
- if (t.path ().empty ())
- {
- const char* e (nullptr);
+ const char* e (nullptr);
- if (tsys == "win32-msvc")
+ if (tsys == "win32-msvc")
+ {
+ switch (ct)
{
- switch (ct)
- {
- case otype::e: e = "exe.obj"; break;
- case otype::a: e = "lib.obj"; break;
- case otype::s: e = "dll.obj"; break;
- }
+ case otype::e: e = "exe.obj"; break;
+ case otype::a: e = "lib.obj"; break;
+ case otype::s: e = "dll.obj"; break;
}
- else if (tsys == "mingw32")
+ }
+ else if (tsys == "mingw32")
+ {
+ switch (ct)
{
- switch (ct)
- {
- case otype::e: e = "exe.o"; break;
- case otype::a: e = "a.o"; break;
- case otype::s: e = "dll.o"; break;
- }
+ case otype::e: e = "exe.o"; break;
+ case otype::a: e = "a.o"; break;
+ case otype::s: e = "dll.o"; break;
}
- else if (tsys == "darwin")
+ }
+ else if (tsys == "darwin")
+ {
+ switch (ct)
{
- switch (ct)
- {
- case otype::e: e = "o"; break;
- case otype::a: e = "a.o"; break;
- case otype::s: e = "dylib.o"; break;
- }
+ case otype::e: e = "o"; break;
+ case otype::a: e = "a.o"; break;
+ case otype::s: e = "dylib.o"; break;
}
- else
+ }
+ else
+ {
+ switch (ct)
{
- switch (ct)
- {
- case otype::e: e = "o"; break;
- case otype::a: e = "a.o"; break;
- case otype::s: e = "so.o"; break;
- }
+ case otype::e: e = "o"; break;
+ case otype::a: e = "a.o"; break;
+ case otype::s: e = "so.o"; break;
}
-
- t.derive_path (e);
}
+ const path& tp (t.derive_path (e));
+
// Inject dependency on the output directory.
//
- fsdir* dir (inject_fsdir (ml, a, t));
+ const fsdir* dir (inject_fsdir (act, t));
- // Search and match all the existing prerequisites. The injection code
- // takes care of the ones it is adding.
+ // Match all the existing prerequisites. The injection code takes care
+ // of the ones it is adding.
//
// When cleaning, ignore prerequisites that are not in the same or a
// subdirectory of our project root.
//
+ auto& pts (t.prerequisite_targets);
optional<dir_paths> usr_lib_dirs; // Extract lazily.
- for (prerequisite_member p: group_prerequisite_members (ml, a, t))
+ // Start asynchronous matching of prerequisites. Wait with unlocked
+ // phase to allow phase switching.
+ //
+ wait_guard wg (target::count_busy (), t.task_count, true);
+
+ size_t start (pts.size ()); // Index of the first to be added.
+ for (prerequisite_member p: group_prerequisite_members (act, t))
{
+ const target* pt (nullptr);
+
// A dependency on a library is there so that we can get its
// *.export.poptions. This is the "library meta-information
// protocol". See also append_lib_options().
//
if (p.is_a<lib> () || p.is_a<liba> () || p.is_a<libs> ())
{
- if (a.operation () == update_id)
+ if (act.operation () == update_id)
{
// Handle imported libraries. We know that for such libraries we
// don't need to do match() in order to get options (if any, they
@@ -291,53 +314,68 @@ namespace build2
//
if (p.proj ())
{
- if (search_library (sys_lib_dirs,
+ if (search_library (act,
+ sys_lib_dirs,
usr_lib_dirs,
p.prerequisite) != nullptr)
continue;
}
- target* pt (&p.search ());
+ pt = &p.search ();
- if (lib* l = pt->is_a<lib> ())
- pt = &link_member (*l, lo);
-
- // Making sure it is executed before us will only restrict
- // parallelism. But we do need to match it in order to get its
- // imports resolved and prerequisite_targets populated. So we
- // match it but then unmatch if it is safe. And thanks to the
- // two-pass prerequisite search & match in link::apply() it will
- // be safe unless someone is building an obj?{} target directory.
- //
- if (build2::match (ml, a, *pt))
- unmatch (a, *pt);
- else
- t.prerequisite_targets.push_back (pt);
+ if (const lib* l = pt->is_a<lib> ())
+ pt = &link_member (*l, act, lo);
}
continue;
}
+ else
+ {
+ pt = &p.search ();
- target& pt (p.search ());
+ if (act.operation () == clean_id && !pt->dir.sub (rs.out_path ()))
+ continue;
+ }
- if (a.operation () == clean_id && !pt.dir.sub (rs.out_path ()))
- continue;
+ match_async (act, *pt, target::count_busy (), t.task_count);
+ pts.push_back (pt);
+ }
+
+ wg.wait ();
+
+ // Finish matching all the targets that we have started.
+ //
+ for (size_t i (start), n (pts.size ()); i != n; ++i)
+ {
+ const target*& pt (pts[i]);
+
+ // Making sure a library is updated before us will only restrict
+ // parallelism. But we do need to match it in order to get its imports
+ // resolved and prerequisite_targets populated. So we match it but
+ // then unmatch if it is safe. And thanks to the two-pass prerequisite
+ // match in link::apply() it will be safe unless someone is building
+ // an obj?{} target directory.
+ //
+ if (build2::match (act,
+ *pt,
+ pt->is_a<liba> () || pt->is_a<libs> ()
+ ? unmatch::safe
+ : unmatch::none))
+ pt = nullptr; // Ignore in execute.
- build2::match (ml, a, pt);
- t.prerequisite_targets.push_back (&pt);
}
// Inject additional prerequisites. We only do it when performing update
// since chances are we will have to update some of our prerequisites in
// the process (auto-generated source code).
//
- if (a == perform_update_id)
+ if (act == perform_update_id)
{
// The cached prerequisite target should be the same as what is in
// t.prerequisite_targets since we used standard search() and match()
// above.
//
- file& src (*md.src.search ().is_a<file> ());
+ const file& src (*md.src.search ().is_a<file> ());
// Make sure the output directory exists.
//
@@ -351,9 +389,21 @@ namespace build2
// things.
//
if (dir != nullptr)
- execute_direct (a, *dir);
+ {
+ // We can do it properly by using execute_direct(). But this means
+ // we will be switching to the execute phase with all the associated
+ // overheads. At the same time, in case of update, creation of a
+ // directory is not going to change the external state in any way
+ // that would affect any parallel efforts in building the internal
+ // state. So we are just going to create the directory directly.
+ // Note, however, that we cannot modify the fsdir{} target since
+ // this can very well be happening in parallel. But that's not a
+ // problem since fsdir{}'s update is idempotent.
+ //
+ fsdir_rule::perform_update_direct (act, t);
+ }
- depdb dd (t.path () + ".d");
+ depdb dd (tp + ".d");
// First should come the rule name/version.
//
@@ -379,7 +429,7 @@ namespace build2
// Hash *.export.poptions from prerequisite libraries.
//
- hash_lib_options (bs, cs, t, lo);
+ hash_lib_options (bs, cs, t, act, lo);
// Extra system header dirs (last).
//
@@ -410,15 +460,13 @@ namespace build2
// compiler, options, or source file), or if the database is newer
// than the target (interrupted update) then force the target update.
//
- if (dd.writing () || dd.mtime () > t.mtime ())
- t.mtime (timestamp_nonexistent);
-
- inject (ml, a, t, lo, src, dd);
+ md.dd_mtime = dd.writing () ? timestamp_nonexistent : dd.mtime ();
+ inject (act, t, lo, src, dd);
dd.close ();
}
- switch (a)
+ switch (act)
{
case perform_update_id: return [this] (action a, const target& t)
{
@@ -466,7 +514,7 @@ namespace build2
void compile::
append_prefixes (prefix_map& m, const target& t, const variable& var) const
{
- tracer trace (x, "append_prefixes");
+ tracer trace (x, "compile::append_prefixes");
// If this target does not belong to any project (e.g, an
// "imported as installed" library), then it can't possibly
@@ -558,7 +606,7 @@ namespace build2
auto compile::
build_prefix_map (const scope& bs,
target& t,
- lorder lo) const -> prefix_map
+ action act, lorder lo) const -> prefix_map
{
prefix_map m;
@@ -569,7 +617,7 @@ namespace build2
// Then process the include directories from prerequisite libraries.
//
- append_lib_prefixes (bs, m, t, lo);
+ append_lib_prefixes (bs, m, t, act, lo);
return m;
}
@@ -748,12 +796,7 @@ namespace build2
}
void compile::
- inject (slock& ml,
- action a,
- target& t,
- lorder lo,
- file& src,
- depdb& dd) const
+ inject (action act, target& t, lorder lo, const file& src, depdb& dd) const
{
tracer trace (x, "compile::inject");
@@ -762,12 +805,12 @@ namespace build2
// If things go wrong (and they often do in this area), give the user a
// bit extra context.
//
- auto g (
- make_exception_guard (
- [&src]()
- {
- info << "while extracting header dependencies from " << src;
- }));
+ auto df = make_diag_frame (
+ [&src](const diag_record& dr)
+ {
+ if (verb != 0)
+ dr << info << "while extracting header dependencies from " << src;
+ });
const scope& bs (t.base_scope ());
const scope& rs (*bs.root_scope ());
@@ -777,14 +820,14 @@ namespace build2
const process_path* xc (nullptr);
cstrings args;
- auto init_args = [&ml, &t, lo, &src, &rs, &bs, &xc, &args, this] ()
+ auto init_args = [&t, act, lo, &src, &rs, &bs, &xc, &args, this] ()
{
xc = &cast<process_path> (rs[x_path]);
args.push_back (xc->recall_string ());
// Add *.export.poptions from prerequisite libraries.
//
- append_lib_options (bs, args, t, lo);
+ append_lib_options (bs, args, t, act, lo);
append_options (args, t, c_poptions);
append_options (args, t, x_poptions);
@@ -904,18 +947,35 @@ namespace build2
// (which causes the target state to be automatically set to unchanged)
// if the file is known to be up to date.
//
- auto update = [&trace, a] (path_target& pt, timestamp ts) -> bool
+ auto update = [&trace, act] (const path_target& pt, timestamp ts) -> bool
{
- target_state os (pt.synchronized_state ()); //@@ MT? matched?
+ target_state os (pt.matched_state (act));
- if (os != target_state::unchanged)
+ if (os == target_state::unchanged)
+ {
+ if (ts == timestamp_unknown)
+ return false;
+ else
+ {
+ // We expect the timestamp to be known (i.e., existing file).
+ //
+ timestamp mt (pt.mtime ()); // @@ MT perf: know target state.
+ assert (mt != timestamp_unknown);
+ return mt > ts;
+ }
+ }
+ else
{
- // We only want to restart if our call to execute() actually
- // caused an update. In particular, the target could already
- // have been in target_state::changed because of a dependency
- // extraction run for some other source file.
+ // We only want to restart if our call to execute() actually caused
+ // an update. In particular, the target could already have been in
+ // target_state::changed because of a dependency extraction run for
+ // some other source file.
//
- target_state ns (execute_direct (a, pt)); //@@ MT extenal modification sync.
+ // @@ MT perf: so we are going to switch the phase and execute for
+ // any generated header.
+ //
+ phase_switch ps (run_phase::execute);
+ target_state ns (execute_direct (act, pt));
if (ns != os && ns != target_state::unchanged)
{
@@ -924,9 +984,9 @@ namespace build2
<< "; new state " << ns;});
return true;
}
+ else
+ return ts != timestamp_unknown ? pt.newer (ts) : false;
}
-
- return ts != timestamp_unknown ? pt.newer (ts) : false;
};
// Update and add a header file to the list of prerequisite targets.
@@ -934,12 +994,13 @@ namespace build2
// from the depdb cache or from the compiler run. Return whether the
// extraction process should be restarted.
//
- auto add = [&trace, &ml, &update, &pm, a, &t, lo, &dd, &bs, this]
+ auto add = [&trace, &update, &pm, act, &t, lo, &dd, &bs, this]
(path f, bool cache) -> bool
{
// Find or maybe insert the target.
//
- auto find = [&trace, this] (const path& f, bool insert) -> path_target*
+ auto find = [&trace, this] (
+ const path& f, bool insert) -> const path_target*
{
// Split the name into its directory part, the name part, and
// extension. Here we can assume the name part is a valid filesystem
@@ -997,7 +1058,7 @@ namespace build2
//
// @@ OPT: move d, out, n
//
- target* r;
+ const target* r;
if (insert)
r = &search (*tt, d, out, n, &e, nullptr);
else
@@ -1009,10 +1070,10 @@ namespace build2
r = targets.find (*tt, d, out, n, e, trace);
}
- return static_cast<path_target*> (r);
+ return static_cast<const path_target*> (r);
};
- path_target* pt (nullptr);
+ const path_target* pt (nullptr);
// If it's not absolute then it does not exist.
//
@@ -1029,7 +1090,7 @@ namespace build2
// then we would have failed below.
//
if (pm.empty ())
- pm = build_prefix_map (bs, t, lo);
+ pm = build_prefix_map (bs, t, act, lo);
// First try the whole file. Then just the directory.
//
@@ -1097,16 +1158,13 @@ namespace build2
pt = find (f, true);
}
- // Assign path.
+ // Cache the path.
//
- if (pt->path ().empty ())
- pt->path (move (f));
- else
- assert (pt->path () == f);
+ const path& pp (pt->path (move (f)));
// Match to a rule.
//
- build2::match (ml, a, *pt);
+ build2::match (act, *pt);
// Update.
//
@@ -1121,7 +1179,7 @@ namespace build2
// update).
//
if (!cache)
- dd.expect (pt->path ());
+ dd.expect (pp);
// Add to our prerequisite target list.
//
@@ -1419,9 +1477,11 @@ namespace build2
msvc_filter_cl (ifdstream&, const path& src);
target_state compile::
- perform_update (action a, const target& xt) const
+ perform_update (action act, const target& xt) const
{
const file& t (xt.as<file> ());
+ const path& tp (t.path ());
+ const match_data& md (t.data<match_data> ());
// Update prerequisites and determine if any relevant ones render us
// out-of-date. Note that currently we treat all the prerequisites
@@ -1429,7 +1489,16 @@ namespace build2
//
const file* s;
{
- auto p (execute_prerequisites<file> (x_src, a, t, t.mtime ()));
+ timestamp mt;
+
+ // If the depdb was overwritten or it's newer than the target, then
+ // do unconditional update.
+ //
+ if (md.dd_mtime == timestamp_nonexistent ||
+ md.dd_mtime > (mt = t.load_mtime ()))
+ mt = timestamp_nonexistent;
+
+ auto p (execute_prerequisites<file> (x_src, act, t, mt));
if ((s = p.first) == nullptr)
return p.second;
@@ -1447,7 +1516,7 @@ namespace build2
// Translate paths to relative (to working directory) ones. This
// results in easier to read diagnostics.
//
- path relo (relative (t.path ()));
+ path relo (relative (tp));
path rels (relative (s->path ()));
append_options (args, t, c_poptions);
@@ -1455,7 +1524,7 @@ namespace build2
// Add *.export.poptions from prerequisite libraries.
//
- append_lib_options (bs, args, t, lo);
+ append_lib_options (bs, args, t, act, lo);
// Extra system header dirs (last).
//
@@ -1646,14 +1715,14 @@ namespace build2
}
target_state compile::
- perform_clean (action a, const target& xt) const
+ perform_clean (action act, const target& xt) const
{
const file& t (xt.as<file> ());
if (cid == "msvc")
- return clean_extra (a, t, {".d", ".idb", ".pdb"});
+ return clean_extra (act, t, {".d", ".idb", ".pdb"});
else
- return clean_extra (a, t, {".d"});
+ return clean_extra (act, t, {".d"});
}
}
}
diff --git a/build2/cc/install b/build2/cc/install
index f676d72..e229e94 100644
--- a/build2/cc/install
+++ b/build2/cc/install
@@ -24,14 +24,14 @@ namespace build2
public:
install (data&&, const link&);
- virtual target*
- filter (slock&, action, target&, prerequisite_member) const override;
+ virtual const target*
+ filter (action, const target&, prerequisite_member) const override;
virtual match_result
- match (slock&, action, target&, const string&) const override;
+ match (action, target&, const string&) const override;
virtual recipe
- apply (slock&, action, target&) const override;
+ apply (action, target&) const override;
virtual void
install_extra (const file&, const install_dir&) const override;
diff --git a/build2/cc/install.cxx b/build2/cc/install.cxx
index f022a92..6195ab7 100644
--- a/build2/cc/install.cxx
+++ b/build2/cc/install.cxx
@@ -22,8 +22,8 @@ namespace build2
install::
install (data&& d, const link& l): common (move (d)), link_ (l) {}
- target* install::
- filter (slock& ml, action a, target& t, prerequisite_member p) const
+ const target* install::
+ filter (action a, const target& t, prerequisite_member p) const
{
if (t.is_a<exe> ())
{
@@ -42,37 +42,38 @@ namespace build2
if ((t.is_a<exe> () || t.is_a<libs> ()) &&
(p.is_a<lib> () || p.is_a<libs> ()))
{
- target* pt (&p.search ());
+ const target* pt (&p.search ());
// If this is the lib{} group, pick a member which we would link.
//
- if (lib* l = pt->is_a<lib> ())
- pt = &link_member (*l, link_order (t.base_scope (), link_type (t)));
+ if (const lib* l = pt->is_a<lib> ())
+ pt = &link_member (
+ *l, a, link_order (t.base_scope (), link_type (t)));
if (pt->is_a<libs> ()) // Can be liba{}.
return pt->in (t.weak_scope ()) ? pt : nullptr;
}
- return file_rule::filter (ml, a, t, p);
+ return file_rule::filter (a, t, p);
}
match_result install::
- match (slock& ml, action a, target& t, const string& hint) const
+ match (action a, target& t, const string& hint) const
{
// @@ How do we split the hint between the two?
//
- // We only want to handle installation if we are also the
- // ones building this target. So first run link's match().
+ // We only want to handle installation if we are also the ones building
+ // this target. So first run link's match().
//
- match_result r (link_.match (ml, a, t, hint));
- return r ? file_rule::match (ml, a, t, "") : r;
+ match_result r (link_.match (a, t, hint));
+ return r ? file_rule::match (a, t, "") : r;
}
recipe install::
- apply (slock& s, action a, target& t) const
+ apply (action a, target& t) const
{
- recipe r (file_rule::apply (s, a, t));
+ recipe r (file_rule::apply (a, t));
// Derive shared library paths and cache them in the target's aux
// storage if we are (un)installing (used in *_extra() functions below).
diff --git a/build2/cc/link b/build2/cc/link
index c787015..cd83516 100644
--- a/build2/cc/link
+++ b/build2/cc/link
@@ -25,10 +25,10 @@ namespace build2
link (data&&);
virtual match_result
- match (slock&, action, target&, const string& hint) const override;
+ match (action, target&, const string&) const override;
virtual recipe
- apply (slock&, action, target&) const override;
+ apply (action, target&) const override;
target_state
perform_update (action, const target&) const;
@@ -70,18 +70,17 @@ namespace build2
void
append_libraries (strings&,
const file&, bool,
- const scope&, lorder) const;
+ const scope&, action, lorder) const;
void
hash_libraries (sha256&,
const file&, bool,
- const scope&, lorder) const;
+ const scope&, action, lorder) const;
void
rpath_libraries (strings&,
const target&,
- const scope&,
- lorder,
+ const scope&, action, lorder,
bool) const;
// Windows rpath emulation (windows-rpath.cxx).
@@ -98,13 +97,15 @@ namespace build2
using windows_dlls = std::set<windows_dll>;
timestamp
- windows_rpath_timestamp (const file&, const scope&, lorder) const;
+ windows_rpath_timestamp (const file&,
+ const scope&,
+ action, lorder) const;
windows_dlls
- windows_rpath_dlls (const file&, const scope&, lorder) const;
+ windows_rpath_dlls (const file&, const scope&, action, lorder) const;
void
- windows_rpath_assembly (const file&, const scope&, lorder,
+ windows_rpath_assembly (const file&, const scope&, action, lorder,
const string&,
timestamp,
bool) const;
diff --git a/build2/cc/link.cxx b/build2/cc/link.cxx
index 682b736..68f3d64 100644
--- a/build2/cc/link.cxx
+++ b/build2/cc/link.cxx
@@ -41,7 +41,7 @@ namespace build2
}
match_result link::
- match (slock& ml, action a, target& t, const string& hint) const
+ match (action act, target& t, const string& hint) const
{
tracer trace (x, "link::match");
@@ -59,12 +59,21 @@ namespace build2
otype lt (link_type (t));
+ // If this is a library, link-up to our group (this is the lib{} target
+ // group protocol which means this can be done whether we match or not).
+ //
+ if (lt == otype::a || lt == otype::s)
+ {
+ if (t.group == nullptr)
+ t.group = targets.find<lib> (t.dir, t.out, t.name);
+ }
+
// Scan prerequisites and see if we can work with what we've got. Note
// that X could be C. We handle this by always checking for X first.
//
bool seen_x (false), seen_c (false), seen_obj (false), seen_lib (false);
- for (prerequisite_member p: group_prerequisite_members (ml, a, t))
+ for (prerequisite_member p: group_prerequisite_members (act, t))
{
if (p.is_a (x_src))
{
@@ -248,7 +257,7 @@ namespace build2
lk = b;
append_ext (lk);
- libi& li (ls.member->as<libi> ());
+ libi& li (ls.member->as<libi> ()); // Note: libi is locked.
lk = li.derive_path (move (lk), tsys == "mingw32" ? "a" : "lib");
}
else if (!v.empty ())
@@ -266,7 +275,7 @@ namespace build2
}
recipe link::
- apply (slock& ml, action a, target& xt) const
+ apply (action act, target& xt) const
{
static_assert (sizeof (link::libs_paths) <= target::data_size,
"insufficient space");
@@ -288,21 +297,31 @@ namespace build2
// Derive file name(s) and add ad hoc group members.
//
- auto add_adhoc = [a, &bs] (target& t, const char* type) -> file&
+
+ // Add if necessary and lock an ad hoc group member.
+ //
+ auto add_adhoc = [act, &bs] (target& t, const char* type) -> target_lock
{
const target_type& tt (*bs.find_target_type (type));
- if (t.member != nullptr) // Might already be there.
- assert (t.member->type () == tt);
+ const target& m (t.member != nullptr // Might already be there.
+ ? *t.member
+ : search (tt, t.dir, t.out, t.name));
+
+ target_lock l (lock (act, m));
+ assert (l.target != nullptr); // Someone messing with adhoc members?
+
+ if (t.member == nullptr)
+ t.member = l.target;
else
- t.member = &search (tt, t.dir, t.out, t.name, nullptr, nullptr);
+ assert (t.member->type () == tt);
- file& r (t.member->as<file> ());
- r.recipe (a, group_recipe);
- return r;
+ return l;
};
{
+ target_lock libi; // Have to hold until after PDB member addition.
+
const char* e (nullptr); // Extension.
const char* p (nullptr); // Prefix.
const char* s (nullptr); // Suffix.
@@ -344,34 +363,41 @@ namespace build2
// DLL and we add libi{} import library as its member.
//
if (tclass == "windows")
- add_adhoc (t, "libi");
+ libi = add_adhoc (t, "libi");
t.data (derive_libs_paths (t)); // Cache in target.
+
+ if (libi)
+ match_recipe (libi, group_recipe); // Set recipe and unlock.
+
break;
}
}
- }
- // PDB
- //
- if (lt != otype::a &&
- cid == "msvc" &&
- (find_option ("/DEBUG", t, c_loptions, true) ||
- find_option ("/DEBUG", t, x_loptions, true)))
- {
- // Add after the import library if any.
+ // PDB
//
- file& pdb (add_adhoc (t.member == nullptr ? t : *t.member, "pdb"));
+ if (lt != otype::a &&
+ cid == "msvc" &&
+ (find_option ("/DEBUG", t, c_loptions, true) ||
+ find_option ("/DEBUG", t, x_loptions, true)))
+ {
+ // Add after the import library if any.
+ //
+ target_lock pdb (
+ add_adhoc (t.member == nullptr ? t : *t.member, "pdb"));
- // We call it foo.{exe,dll}.pdb rather than just foo.pdb because we
- // can have both foo.exe and foo.dll in the same directory.
- //
- pdb.derive_path (t.path (), "pdb");
+ // We call it foo.{exe,dll}.pdb rather than just foo.pdb because we
+ // can have both foo.exe and foo.dll in the same directory.
+ //
+ pdb.target->as<file> ().derive_path (t.path (), "pdb");
+
+ match_recipe (pdb, group_recipe); // Set recipe and unlock.
+ }
}
// Inject dependency on the output directory.
//
- inject_fsdir (ml, a, t);
+ inject_fsdir (act, t);
optional<dir_paths> usr_lib_dirs; // Extract lazily.
@@ -380,22 +406,23 @@ namespace build2
//
// We do it first in order to indicate that we will execute these
// targets before matching any of the obj?{}. This makes it safe for
- // compiler::apply() to unmatch them and therefore not to hinder
+ // compile::apply() to unmatch them and therefore not to hinder
// parallelism.
//
// When cleaning, we ignore prerequisites that are not in the same or a
// subdirectory of our project root.
//
- size_t slot (t.prerequisite_targets.size ()); // Start.
- for (prerequisite_member p: group_prerequisite_members (ml, a, t))
+ size_t start (t.prerequisite_targets.size ());
+
+ for (prerequisite_member p: group_prerequisite_members (act, t))
{
// We pre-allocate a NULL slot for each (potential; see clean)
// prerequisite target.
//
t.prerequisite_targets.push_back (nullptr);
- const target*& cpt (t.prerequisite_targets.back ());
+ const target*& rpt (t.prerequisite_targets.back ());
- target* pt (nullptr);
+ const target* pt (nullptr);
if (p.is_a<lib> () || p.is_a<liba> () || p.is_a<libs> ())
{
@@ -405,27 +432,31 @@ namespace build2
// target in the prerequisite.
//
if (p.proj ())
- pt = search_library (sys_lib_dirs, usr_lib_dirs, p.prerequisite);
+ pt = search_library (
+ act, sys_lib_dirs, usr_lib_dirs, p.prerequisite);
// The rest is the same basic logic as in search_and_match().
//
if (pt == nullptr)
pt = &p.search ();
- if (a.operation () == clean_id && !pt->dir.sub (rs.out_path ()))
+ if (act.operation () == clean_id && !pt->dir.sub (rs.out_path ()))
continue; // Skip.
// If this is the lib{} target group, then pick the appropriate
// member.
//
- if (lib* l = pt->is_a<lib> ())
- pt = &link_member (*l, lo);
+ if (const lib* l = pt->is_a<lib> ())
+ pt = &link_member (*l, act, lo);
- build2::match (ml, a, *pt);
- cpt = pt;
+ rpt = pt;
}
}
+ // Match in parallel and wait for completion.
+ //
+ match_members (act, t, t.prerequisite_targets, start);
+
// Process prerequisites, pass 2: search and match obj{} amd do rule
// chaining for C and X source files.
//
@@ -433,212 +464,256 @@ namespace build2
lt == otype::a ? obja::static_type :
objs::static_type);
- for (prerequisite_member p: group_prerequisite_members (ml, a, t))
{
- const target*& cpt (t.prerequisite_targets[slot++]);
- target* pt (nullptr);
+ // Wait with unlocked phase to allow phase switching.
+ //
+ wait_guard wg (target::count_busy (), t.task_count, true);
- if (p.is_a<lib> () || p.is_a<liba> () || p.is_a<libs> ())
- continue; // Handled on pass 1.
+ size_t i (start); // Parallel prerequisite_targets loop.
- if (!p.is_a (x_src) && !p.is_a<c> ())
+ for (prerequisite_member p: group_prerequisite_members (act, t))
{
- pt = &p.search ();
+ const target*& rpt (t.prerequisite_targets[i++]);
+ const target* pt (nullptr);
- if (a.operation () == clean_id && !pt->dir.sub (rs.out_path ()))
- continue; // Skip.
+ if (p.is_a<lib> () || p.is_a<liba> () || p.is_a<libs> ())
+ continue; // Taken care of on pass 1.
- // If this is the obj{} target group, then pick the appropriate
- // member.
- //
- if (obj* o = pt->is_a<obj> ())
+ uint8_t pm (1); // Completion (1) and verfication (2) mark.
+
+ if (!p.is_a (x_src) && !p.is_a<c> ())
{
- switch (lt)
- {
- case otype::e: pt = o->e; break;
- case otype::a: pt = o->a; break;
- case otype::s: pt = o->s; break;
- }
+ // If this is the obj{} target group, then pick the appropriate
+ // member.
+ //
+ pt = p.is_a<obj> () ? &search (ott, p.key ()) : &p.search ();
- if (pt == nullptr)
- pt = &search (ott, p.key ());
+ if (act.operation () == clean_id && !pt->dir.sub (rs.out_path ()))
+ continue; // Skip.
+
+ // Fall through.
}
+ else
+ {
+ // The rest is rule chaining.
+ //
+ // Which scope shall we use to resolve the root? Unlikely, but
+ // possible, the prerequisite is from a different project
+ // altogether. So we are going to use the target's project.
+ //
- build2::match (ml, a, *pt);
- cpt = pt;
- continue;
- }
+ // If the source came from the lib{} group, then create the obj{}
+ // group and add the source as a prerequisite of the obj{} group,
+ // not the obj?{} member. This way we only need one prerequisite
+ // for, say, both liba{} and libs{}.
+ //
+ bool group (!p.prerequisite.belongs (t)); // Group's prerequisite.
+ const target_type& tt (group ? obj::static_type : ott);
- // The rest is rule chaining.
- //
+ const prerequisite_key& cp (p.key ()); // C-source (X or C) key.
- // Which scope shall we use to resolve the root? Unlikely, but
- // possible, the prerequisite is from a different project
- // altogether. So we are going to use the target's project.
- //
+ // Come up with the obj*{} target. The source prerequisite
+ // directory can be relative (to the scope) or absolute. If it is
+ // relative, then use it as is. If absolute, then translate it to
+ // the corresponding directory under out_root. While the source
+ // directory is most likely under src_root, it is also possible it
+ // is under out_root (e.g., generated source).
+ //
+ dir_path d;
+ {
+ const dir_path& cpd (*cp.tk.dir);
- // If the source came from the lib{} group, then create the obj{}
- // group and add the source as a prerequisite of the obj{} group,
- // not the obj?{} member. This way we only need one prerequisite
- // for, say, both liba{} and libs{}.
- //
- bool group (!p.prerequisite.belongs (t)); // Group's prerequisite.
+ if (cpd.relative () || cpd.sub (rs.out_path ()))
+ d = cpd;
+ else
+ {
+ if (!cpd.sub (rs.src_path ()))
+ fail << "out of project prerequisite " << cp <<
+ info << "specify corresponding " << tt.name << "{} "
+ << "target explicitly";
- const prerequisite_key& cp (p.key ()); // C-source (X or C) key.
- const target_type& tt (group ? obj::static_type : ott);
+ d = rs.out_path () / cpd.leaf (rs.src_path ());
+ }
+ }
- // Come up with the obj*{} target. The source prerequisite directory
- // can be relative (to the scope) or absolute. If it is relative, then
- // use it as is. If absolute, then translate it to the corresponding
- // directory under out_root. While the source directory is most likely
- // under src_root, it is also possible it is under out_root (e.g.,
- // generated source).
- //
- dir_path d;
- {
- const dir_path& cpd (*cp.tk.dir);
+ // obj*{} is always in the out tree.
+ //
+ const target& ot (
+ search (tt, d, dir_path (), *cp.tk.name, nullptr, cp.scope));
- if (cpd.relative () || cpd.sub (rs.out_path ()))
- d = cpd;
- else
- {
- if (!cpd.sub (rs.src_path ()))
- fail << "out of project prerequisite " << cp <<
- info << "specify corresponding " << tt.name << "{} "
- << "target explicitly";
+ // If we are cleaning, check that this target is in the same or a
+ // subdirectory of our project root.
+ //
+ if (act.operation () == clean_id && !ot.dir.sub (rs.out_path ()))
+ {
+ // If we shouldn't clean obj{}, then it is fair to assume we
+ // shouldn't clean the source either (generated source will be
+ // in the same directory as obj{} and if not, well, go find
+ // yourself another build system ;-)).
+ //
+ continue; // Skip.
+ }
- d = rs.out_path () / cpd.leaf (rs.src_path ());
- }
- }
+ // If we have created the obj{} target group, pick one of its
+ // members; the rest would be primarily concerned with it.
+ //
+ pt = group ? &search (ott, ot.dir, ot.out, ot.name) : &ot;
- // obj*{} is always in the out tree.
- //
- target& ot (
- search (tt, d, dir_path (), *cp.tk.name, nullptr, cp.scope));
+ // If this obj*{} already has prerequisites, then verify they are
+ // "compatible" with what we are doing here. Otherwise, synthesize
+ // the dependency. Note that we may also end up synthesizing with
+ // someone beating up to it. In this case also verify.
+ //
+ bool verify (true);
- // If we are cleaning, check that this target is in the same or
- // a subdirectory of our project root.
- //
- if (a.operation () == clean_id && !ot.dir.sub (rs.out_path ()))
- {
- // If we shouldn't clean obj{}, then it is fair to assume we
- // shouldn't clean the source either (generated source will be in
- // the same directory as obj{} and if not, well, go find yourself
- // another build system ;-)).
- //
- continue; // Skip.
- }
+ if (!pt->has_prerequisites ())
+ {
+ prerequisites ps;
+ ps.push_back (p.as_prerequisite ()); // Source.
- // If we have created the obj{} target group, pick one of its members;
- // the rest would be primarily concerned with it.
- //
- if (group)
- {
- obj& o (ot.as<obj> ());
+ // Add our lib*{} prerequisites (see the export.* machinery for
+ // details).
+ //
+ // Note that we don't resolve lib{} to liba{}/libs{} here
+ // instead leaving it to whoever (e.g., the compile rule) will
+ // be needing *.export.*. One reason for doing it there is that
+ // the object target might be specified explicitly by the user
+ // in which case they will have to specify the set of lib{}
+ // prerequisites and it's much cleaner to do as lib{} rather
+ // than liba{}/libs{}.
+ //
+ // Initially, we were only adding imported libraries, but there
+ // is a problem with this approach: the non-imported library
+ // might depend on the imported one(s) which we will never "see"
+ // unless we start with this library.
+ //
+ for (const prerequisite& p: group_prerequisites (t))
+ {
+ if (p.is_a<lib> () || p.is_a<liba> () || p.is_a<libs> ())
+ ps.emplace_back (p);
+ }
- switch (lt)
- {
- case otype::e: pt = o.e; break;
- case otype::a: pt = o.a; break;
- case otype::s: pt = o.s; break;
+ // Note: add to the group, not the member.
+ //
+ verify = !ot.prerequisites (move (ps));
+ }
+
+ if (verify)
+ {
+ // This gets a bit tricky. We need to make sure the source files
+ // are the same which we can only do by comparing the targets to
+ // which they resolve. But we cannot search ot's prerequisites
+ // -- only the rule that matches can. Note, however, that if all
+ // this works out, then our next step is to match the obj*{}
+ // target. If things don't work out, then we fail, in which case
+ // searching and matching speculatively doesn't really hurt. So
+ // we start the async match here and finish this verification in
+ // the "harvest" loop below.
+ //
+ bool src (false);
+ for (prerequisite_member p1:
+ group_prerequisite_members (act, *pt))
+ {
+ // Most of the time we will have just a single source so
+ // fast-path that case.
+ //
+ if (p1.is_a (x_src) || p1.is_a<c> ())
+ {
+ src = true;
+ continue; // Check the rest of the prerequisites.
+ }
+
+ // Ignore some known target types (fsdir, headers, libraries).
+ //
+ if (p1.is_a<fsdir> () ||
+ p1.is_a<lib> () ||
+ p1.is_a<liba> () ||
+ p1.is_a<libs> () ||
+ (p.is_a (x_src) && x_header (p1)) ||
+ (p.is_a<c> () && p1.is_a<h> ()))
+ continue;
+
+ fail << "synthesized dependency for prerequisite " << p
+ << " would be incompatible with existing target " << *pt <<
+ info << "unexpected existing prerequisite type " << p1 <<
+ info << "specify corresponding " << tt.name << "{} "
+ << "dependency explicitly";
+ }
+
+ if (!src)
+ fail << "synthesized dependency for prerequisite " << p
+ << " would be incompatible with existing target " << *pt <<
+ info << "no existing c/" << x_name << " source prerequisite" <<
+ info << "specify corresponding " << tt.name << "{} "
+ << "dependency explicitly";
+
+ pm = 2; // Needs completion and verification.
+ }
}
- if (pt == nullptr)
- pt = &search (ott, o.dir, o.out, o.name, o.ext (), nullptr);
+ match_async (act, *pt, target::count_busy (), t.task_count);
+ rpt = pt;
+ mark (rpt, pm); // Mark for completion/verification.
}
- else
- pt = &ot;
- // If this obj*{} target already exists, then it needs to be
- // "compatible" with what we are doing here.
- //
- // This gets a bit tricky. We need to make sure the source files
- // are the same which we can only do by comparing the targets to
- // which they resolve. But we cannot search the ot's prerequisites
- // -- only the rule that matches can. Note, however, that if all
- // this works out, then our next step is to match the obj*{}
- // target. If things don't work out, then we fail, in which case
- // searching and matching speculatively doesn't really hurt.
- //
- bool found (false);
- for (prerequisite_member p1:
- reverse_group_prerequisite_members (ml, a, *pt))
- {
- // Most of the time we will have just a single source so fast-path
- // that case.
- //
- if (p1.is_a (x_src) || p1.is_a<c> ())
- {
- if (!found)
- {
- build2::match (ml, a, *pt); // Now p1 should be resolved.
+ wg.wait ();
+ }
- // Searching our own prerequisite is ok.
- //
- if (&p.search () != &p1.search ())
- fail << "synthesized target for prerequisite " << cp << " "
- << "would be incompatible with existing target " << *pt <<
- info << "existing prerequisite " << p1 << " does not match "
- << cp <<
- info << "specify corresponding " << tt.name << "{} target "
- << "explicitly";
+ // The "harvest" loop: finish matching the targets we have started. Note
+ // that we may have bailed out early (thus the parallel i/n for-loop).
+ //
+ {
+ size_t i (start), n (t.prerequisite_targets.size ());
- found = true;
- }
+ for (prerequisite_member p: group_prerequisite_members (act, t))
+ {
+ if (i == n)
+ break;
- continue; // Check the rest of the prerequisites.
- }
+ const target*& pt (t.prerequisite_targets[i++]);
+
+ uint8_t m;
- // Ignore some known target types (fsdir, headers, libraries).
+ // Skipped or not marked for completion (pass 1).
//
- if (p1.is_a<fsdir> () ||
- p1.is_a<lib> () ||
- p1.is_a<liba> () ||
- p1.is_a<libs> () ||
- (p.is_a (x_src) && x_header (p1)) ||
- (p.is_a<c> () && p1.is_a<h> ()))
+ if (pt == nullptr || (m = unmark (pt)) == 0)
continue;
- fail << "synthesized target for prerequisite " << cp
- << " would be incompatible with existing target " << *pt <<
- info << "unexpected existing prerequisite type " << p1 <<
- info << "specify corresponding obj{} target explicitly";
- }
+ build2::match (act, *pt);
- if (!found)
- {
- // Note: add the source to the group, not the member.
+ // Nothing else to do if not marked for verification.
//
- ot.prerequisites.push_back (p.as_prerequisite ());
+ if (m == 1)
+ continue;
- // Add our lib*{} prerequisites to the object file (see the export.*
- // machinery for details).
- //
- // Note that we don't resolve lib{} to liba{}/libs{} here instead
- // leaving it to whoever (e.g., the compile rule) will be needing
- // *.export.*. One reason for doing it there is that the object
- // target might be specified explicitly by the user in which case
- // they will have to specify the set of lib{} prerequisites and it's
- // much cleaner to do as lib{} rather than liba{}/libs{}.
+ // Finish verifying the existing dependency (which is now matched)
+ // compared to what we would have synthesized.
//
- // Initially, we were only adding imported libraries, but there is a
- // problem with this approach: the non-imported library might depend
- // on the imported one(s) which we will never "see" unless we start
- // with this library.
- //
- for (prerequisite& p: group_prerequisites (t))
+ bool group (!p.prerequisite.belongs (t)); // Group's prerequisite.
+ const target_type& tt (group ? obj::static_type : ott);
+
+ for (prerequisite_member p1: group_prerequisite_members (act, *pt))
{
- if (p.is_a<lib> () || p.is_a<liba> () || p.is_a<libs> ())
- ot.prerequisites.emplace_back (p);
- }
+ if (p1.is_a (x_src) || p1.is_a<c> ())
+ {
+ // Searching our own prerequisite is ok, p1 must already be
+ // resolved.
+ //
+ if (&p.search () != &p1.search ())
+ fail << "synthesized dependency for prerequisite " << p << " "
+ << "would be incompatible with existing target " << *pt <<
+ info << "existing prerequisite " << p1 << " does not match "
+ << p <<
+ info << "specify corresponding " << tt.name << "{} "
+ << "dependency explicitly";
- build2::match (ml, a, *pt);
+ break;
+ }
+ }
}
-
- cpt = pt;
}
- switch (a)
+ switch (act)
{
case perform_update_id: return [this] (action a, const target& t)
{
@@ -655,7 +730,7 @@ namespace build2
void link::
append_libraries (strings& args,
const file& l, bool la,
- const scope& bs, lorder lo) const
+ const scope& bs, action act, lorder lo) const
{
// Note: lack of the "small function object" optimization will really
// kill us here since we are called in a loop.
@@ -686,7 +761,7 @@ namespace build2
{
// If we need an interface value, then use the group (lib{}).
//
- if (const target* g = exp && l.is_a<libs> () ? l.group.get () : &l)
+ if (const target* g = exp && l.is_a<libs> () ? l.group : &l)
{
const variable& var (
com
@@ -699,13 +774,14 @@ namespace build2
}
};
- process_libraries (bs, lo, sys_lib_dirs, l, la, imp, lib, opt, true);
+ process_libraries (
+ act, bs, lo, sys_lib_dirs, l, la, imp, lib, opt, true);
}
void link::
hash_libraries (sha256& cs,
const file& l, bool la,
- const scope& bs, lorder lo) const
+ const scope& bs, action act, lorder lo) const
{
bool win (tclass == "windows");
@@ -731,7 +807,7 @@ namespace build2
auto opt = [&cs, this] (
const file& l, const string& t, bool com, bool exp)
{
- if (const target* g = exp && l.is_a<libs> () ? l.group.get () : &l)
+ if (const target* g = exp && l.is_a<libs> () ? l.group : &l)
{
const variable& var (
com
@@ -744,13 +820,15 @@ namespace build2
}
};
- process_libraries (bs, lo, sys_lib_dirs, l, la, imp, lib, opt, true);
+ process_libraries (
+ act, bs, lo, sys_lib_dirs, l, la, imp, lib, opt, true);
}
void link::
rpath_libraries (strings& args,
const target& t,
const scope& bs,
+ action act,
lorder lo,
bool for_install) const
{
@@ -864,7 +942,7 @@ namespace build2
"-Wl,-rpath," + f->path ().directory ().string ());
}
- process_libraries (bs, lo, sys_lib_dirs,
+ process_libraries (act, bs, lo, sys_lib_dirs,
*f, a != nullptr,
impf, libf, nullptr);
}
@@ -882,13 +960,14 @@ namespace build2
msvc_machine (const string& cpu); // msvc.cxx
target_state link::
- perform_update (action a, const target& xt) const
+ perform_update (action act, const target& xt) const
{
tracer trace (x, "link::perform_update");
const file& t (xt.as<file> ());
+ const path& tp (t.path ());
- auto oop (a.outer_operation ());
+ auto oop (act.outer_operation ());
bool for_install (oop == install_id || oop == uninstall_id);
const scope& bs (t.base_scope ());
@@ -901,8 +980,8 @@ namespace build2
// out-of-date manually below.
//
bool update (false);
- timestamp mt (t.mtime ());
- target_state ts (straight_execute_prerequisites (a, t));
+ timestamp mt (t.load_mtime ());
+ target_state ts (straight_execute_prerequisites (act, t));
// If targeting Windows, take care of the manifest.
//
@@ -916,7 +995,7 @@ namespace build2
// it if we are updating for install.
//
if (!for_install)
- rpath_timestamp = windows_rpath_timestamp (t, bs, lo);
+ rpath_timestamp = windows_rpath_timestamp (t, bs, act, lo);
path mf (
windows_manifest (
@@ -1015,7 +1094,7 @@ namespace build2
// Check/update the dependency database.
//
- depdb dd (t.path () + ".d");
+ depdb dd (tp + ".d");
// First should come the rule name/version.
//
@@ -1157,7 +1236,7 @@ namespace build2
// rpath of the imported libraries (i.e., we assume they are also
// installed). But we add -rpath-link for some platforms.
//
- rpath_libraries (sargs, t, bs, lo, for_install);
+ rpath_libraries (sargs, t, bs, act, lo, for_install);
if (auto l = t["bin.rpath"])
for (const dir_path& p: cast<dir_paths> (l))
@@ -1207,7 +1286,7 @@ namespace build2
// and implementation (static), recursively.
//
if (a != nullptr || s != nullptr)
- hash_libraries (cs, *f, a != nullptr, bs, lo);
+ hash_libraries (cs, *f, a != nullptr, bs, act, lo);
else
cs.append (f->path ().string ());
}
@@ -1260,7 +1339,7 @@ namespace build2
// Translate paths to relative (to working directory) ones. This results
// in easier to read diagnostics.
//
- path relt (relative (t.path ()));
+ path relt (relative (tp));
const process_path* ld (nullptr);
switch (lt)
@@ -1372,7 +1451,6 @@ namespace build2
//
if (find_option ("/DEBUG", args, true))
{
-
auto& pdb (
(lt == otype::e ? t.member : t.member->member)->as<file> ());
out1 = "/PDB:" + relative (pdb.path ()).string ();
@@ -1441,7 +1519,7 @@ namespace build2
// and implementation (static), recursively.
//
if (a != nullptr || s != nullptr)
- append_libraries (sargs, *f, a != nullptr, bs, lo);
+ append_libraries (sargs, *f, a != nullptr, bs, act, lo);
else
sargs.push_back (relative (f->path ()).string ()); // string()&&
}
@@ -1566,7 +1644,7 @@ namespace build2
// install).
//
if (lt == otype::e && !for_install)
- windows_rpath_assembly (t, bs, lo,
+ windows_rpath_assembly (t, bs, act, lo,
cast<string> (rs[x_target_cpu]),
rpath_timestamp,
scratch);
@@ -1620,7 +1698,7 @@ namespace build2
}
target_state link::
- perform_clean (action a, const target& xt) const
+ perform_clean (action act, const target& xt) const
{
const file& t (xt.as<file> ());
@@ -1634,13 +1712,13 @@ namespace build2
{
if (tsys == "mingw32")
return clean_extra (
- a, t, {".d", ".dlls/", ".manifest.o", ".manifest"});
+ act, t, {".d", ".dlls/", ".manifest.o", ".manifest"});
else
// Assuming it's VC or alike. Clean up .ilk in case the user
// enabled incremental linking (note that .ilk replaces .exe).
//
return clean_extra (
- a, t, {".d", ".dlls/", ".manifest", "-.ilk"});
+ act, t, {".d", ".dlls/", ".manifest", "-.ilk"});
}
break;
@@ -1655,7 +1733,7 @@ namespace build2
// versioning their bases may not be the same.
//
if (tsys != "mingw32")
- return clean_extra (a, t, {{".d", "-.ilk"}, {"-.exp"}});
+ return clean_extra (act, t, {{".d", "-.ilk"}, {"-.exp"}});
}
else
{
@@ -1664,7 +1742,7 @@ namespace build2
//
const libs_paths& paths (t.data<libs_paths> ());
- return clean_extra (a, t, {".d",
+ return clean_extra (act, t, {".d",
paths.link.string ().c_str (),
paths.soname.string ().c_str (),
paths.interm.string ().c_str ()});
@@ -1674,7 +1752,7 @@ namespace build2
}
}
- return clean_extra (a, t, {".d"});
+ return clean_extra (act, t, {".d"});
}
}
}
diff --git a/build2/cc/msvc.cxx b/build2/cc/msvc.cxx
index 94064ca..aa9389f 100644
--- a/build2/cc/msvc.cxx
+++ b/build2/cc/msvc.cxx
@@ -219,18 +219,17 @@ namespace build2
template <typename T>
static T*
- msvc_search_library (const char* mod,
- const process_path& ld,
+ msvc_search_library (const process_path& ld,
const dir_path& d,
const prerequisite_key& p,
otype lt,
const char* pfx,
const char* sfx,
- bool exist)
+ bool exist,
+ tracer& trace)
{
// Pretty similar logic to search_library().
//
- tracer trace (mod, "msvc_search_library");
const optional<string>& ext (p.tk.ext);
const string& name (*p.tk.name);
@@ -268,21 +267,13 @@ namespace build2
{
// Enter the target.
//
- auto p (targets.insert (T::static_type,
- d,
- dir_path (),
- name,
- e,
- true, // Implied.
- trace));
- assert (!exist || !p.second);
- T& t (p.first.template as<T> ());
-
- if (t.path ().empty ())
- t.path (move (f));
-
- t.mtime (mt);
- return &t;
+ T* t;
+ common::insert_library (t, name, d, e, exist, trace);
+
+ t->mtime (mt);
+ t->path (move (f));
+
+ return t;
}
return nullptr;
@@ -294,12 +285,15 @@ namespace build2
const prerequisite_key& p,
bool exist) const
{
+ tracer trace (x, "msvc_search_static");
+
liba* r (nullptr);
- auto search = [&r, &ld, &d, &p, exist, this] (
+ auto search = [&r, &ld, &d, &p, exist, &trace, this] (
const char* pf, const char* sf) -> bool
{
- r = msvc_search_library<liba> (x, ld, d, p, otype::a, pf, sf, exist);
+ r = msvc_search_library<liba> (
+ ld, d, p, otype::a, pf, sf, exist, trace);
return r != nullptr;
};
@@ -324,32 +318,33 @@ namespace build2
{
tracer trace (x, "msvc_search_shared");
- libs* r (nullptr);
+ libs* s (nullptr);
- auto search = [&r, &ld, &d, &pk, &trace, exist, this] (
+ auto search = [&s, &ld, &d, &pk, exist, &trace, this] (
const char* pf, const char* sf) -> bool
{
- if (libi* i =
- msvc_search_library<libi> (x, ld, d, pk, otype::s, pf, sf, exist))
+ if (libi* i = msvc_search_library<libi> (
+ ld, d, pk, otype::s, pf, sf, exist, trace))
{
- auto p (targets.insert (libs::static_type,
- d,
- dir_path (),
- *pk.tk.name,
- nullopt,
- true, // Implied.
- trace));
- assert (!exist || !p.second);
- r = &p.first.as<libs> ();
-
- if (r->member == nullptr)
+ ulock l (insert_library (s, *pk.tk.name, d, nullopt, exist, trace));
+
+ if (!exist)
{
- r->mtime (i->mtime ());
- r->member = i;
+ if (l.owns_lock ())
+ s->member = i;
+ else
+ assert (s->member == i);
+
+ l.unlock ();
+
+ // Presumably there is a DLL somewhere, we just don't know where.
+ //
+ s->mtime (i->mtime ());
+ s->path (path ());
}
}
- return r != nullptr;
+ return s != nullptr;
};
// Try:
@@ -360,7 +355,7 @@ namespace build2
return
search ("", "") ||
search ("lib", "") ||
- search ("", "dll") ? r : nullptr;
+ search ("", "dll") ? s : nullptr;
}
}
}
diff --git a/build2/cc/pkgconfig.cxx b/build2/cc/pkgconfig.cxx
index da6614f..72ae31b 100644
--- a/build2/cc/pkgconfig.cxx
+++ b/build2/cc/pkgconfig.cxx
@@ -37,7 +37,8 @@ namespace build2
// search_library() POV.
//
bool common::
- pkgconfig_extract (const scope& s,
+ pkgconfig_extract (action act,
+ const scope& s,
lib& lt,
liba* at,
libs* st,
@@ -256,7 +257,7 @@ namespace build2
// Now parse --libs into loptions/libs (interface and implementation).
//
- auto parse_libs = [&s, &f, sysd, &next, this] (
+ auto parse_libs = [act, &s, &f, sysd, &next, this] (
const string& lstr, target& t)
{
strings lops;
@@ -421,7 +422,8 @@ namespace build2
prerequisite_key pk {
nullopt, {&lib::static_type, &out, &out, &name, nullopt}, &s};
- if (lib* lt = static_cast<lib*> (search_library (sysd, usrd, pk)))
+ if (lib* lt = static_cast<lib*> (
+ search_library (act, sysd, usrd, pk)))
{
// We used to pick a member but that doesn't seem right since the
// same target could be used with different link orders.
diff --git a/build2/cc/utility b/build2/cc/utility
index b1d07b8..ee3cb81 100644
--- a/build2/cc/utility
+++ b/build2/cc/utility
@@ -41,14 +41,8 @@ namespace build2
// Given the link order return the library member (liba or libs) to link.
//
- // Note that the const version assumes you have already called non-const
- // (which does the search, if necessary).
- //
- target&
- link_member (bin::lib&, lorder);
-
const target&
- link_member (const bin::lib&, lorder);
+ link_member (const bin::lib&, action, lorder);
}
}
diff --git a/build2/cc/utility.cxx b/build2/cc/utility.cxx
index 62febfa..4a931af 100644
--- a/build2/cc/utility.cxx
+++ b/build2/cc/utility.cxx
@@ -39,40 +39,14 @@ namespace build2
}
const target&
- link_member (const bin::lib& l, lorder lo)
+ link_member (const bin::lib& l, action a, lorder lo)
{
- bool ls (true);
- const string& at (cast<string> (l["bin.lib"])); // Available members.
-
- switch (lo)
- {
- case lorder::a:
- case lorder::a_s:
- ls = false; // Fall through.
- case lorder::s:
- case lorder::s_a:
- {
- if (ls ? at == "static" : at == "shared")
- {
- if (lo == lorder::a_s || lo == lorder::s_a)
- ls = !ls;
- else
- assert (false);
- }
- }
- }
-
- const target* r (ls ? static_cast<const target*> (l.s) : l.a);
- assert (r != nullptr);
- return *r;
- }
+ // Make sure group members are resolved.
+ //
+ group_view gv (resolve_group_members (a, l));
+ assert (gv.members != nullptr);
- target&
- link_member (bin::lib& l, lorder lo)
- {
bool ls (true);
- const string& at (cast<string> (l["bin.lib"])); // Available members.
-
switch (lo)
{
case lorder::a:
@@ -81,7 +55,7 @@ namespace build2
case lorder::s:
case lorder::s_a:
{
- if (ls ? at == "static" : at == "shared")
+ if (ls ? l.s == nullptr : l.a == nullptr)
{
if (lo == lorder::a_s || lo == lorder::s_a)
ls = !ls;
@@ -92,13 +66,7 @@ namespace build2
}
}
- target* r (ls ? static_cast<target*> (l.s) : l.a);
-
- if (r == nullptr)
- r = &search (ls ? libs::static_type : liba::static_type,
- prerequisite_key {nullopt, l.key (), nullptr});
-
- return *r;
+ return *(ls ? static_cast<const target*> (l.s) : l.a);
}
}
}
diff --git a/build2/cc/windows-manifest.cxx b/build2/cc/windows-manifest.cxx
index 0e38e7d..3b9c649 100644
--- a/build2/cc/windows-manifest.cxx
+++ b/build2/cc/windows-manifest.cxx
@@ -41,7 +41,7 @@ namespace build2
path link::
windows_manifest (const file& t, bool rpath_assembly) const
{
- tracer trace (x, "windows_manifest");
+ tracer trace (x, "link::windows_manifest");
const scope& rs (t.root_scope ());
diff --git a/build2/cc/windows-rpath.cxx b/build2/cc/windows-rpath.cxx
index 46a3d3a..383663f 100644
--- a/build2/cc/windows-rpath.cxx
+++ b/build2/cc/windows-rpath.cxx
@@ -46,7 +46,10 @@ namespace build2
// adding to the assembly or timestamp_nonexistent if there aren't any.
//
timestamp link::
- windows_rpath_timestamp (const file& t, const scope& bs, lorder lo) const
+ windows_rpath_timestamp (const file& t,
+ const scope& bs,
+ action act,
+ lorder lo) const
{
timestamp r (timestamp_nonexistent);
@@ -91,7 +94,9 @@ namespace build2
// Ok, this is a DLL.
//
- timestamp t (l != nullptr ? l->mtime () : file_mtime (f.c_str ()));
+ timestamp t (l != nullptr
+ ? l->load_mtime ()
+ : file_mtime (f.c_str ()));
if (t > r)
r = t;
@@ -104,7 +109,7 @@ namespace build2
if ((f = a = pt->is_a<liba> ()) ||
(f = pt->is_a<libs> ()))
- process_libraries (bs, lo, sys_lib_dirs,
+ process_libraries (act, bs, lo, sys_lib_dirs,
*f, a != nullptr,
imp, lib, nullptr, true);
}
@@ -118,6 +123,7 @@ namespace build2
auto link::
windows_rpath_dlls (const file& t,
const scope& bs,
+ action act,
lorder lo) const -> windows_dlls
{
windows_dlls r;
@@ -185,7 +191,7 @@ namespace build2
if ((f = a = pt->is_a<liba> ()) ||
(f = pt->is_a<libs> ()))
- process_libraries (bs, lo, sys_lib_dirs,
+ process_libraries (act, bs, lo, sys_lib_dirs,
*f, a != nullptr,
imp, lib, nullptr, true);
}
@@ -207,6 +213,7 @@ namespace build2
void link::
windows_rpath_assembly (const file& t,
const scope& bs,
+ action act,
lorder lo,
const string& tcpu,
timestamp ts,
@@ -244,7 +251,7 @@ namespace build2
windows_dlls dlls;
if (!empty)
- dlls = windows_rpath_dlls (t, bs, lo);
+ dlls = windows_rpath_dlls (t, bs, act, lo);
// Clean the assembly directory and make sure it exists. Maybe it would
// have been faster to overwrite the existing manifest rather than