diff options
-rw-r--r-- | build2/algorithm.cxx | 4 | ||||
-rw-r--r-- | build2/algorithm.hxx | 3 | ||||
-rw-r--r-- | build2/c/init.cxx | 1 | ||||
-rw-r--r-- | build2/cc/common.hxx | 1 | ||||
-rw-r--r-- | build2/cc/compile.cxx | 147 | ||||
-rw-r--r-- | build2/cc/compile.hxx | 8 | ||||
-rw-r--r-- | build2/cc/init.cxx | 5 | ||||
-rw-r--r-- | build2/cc/link.cxx | 524 | ||||
-rw-r--r-- | build2/cc/utility.hxx | 15 | ||||
-rw-r--r-- | build2/cc/utility.ixx | 18 | ||||
-rw-r--r-- | build2/cxx/init.cxx | 1 | ||||
-rw-r--r-- | build2/target.hxx | 3 | ||||
-rw-r--r-- | build2/target.ixx | 7 |
13 files changed, 474 insertions, 263 deletions
diff --git a/build2/algorithm.cxx b/build2/algorithm.cxx index 8f555dd..0dca9cc 100644 --- a/build2/algorithm.cxx +++ b/build2/algorithm.cxx @@ -706,7 +706,7 @@ namespace build2 { const target* m (ts[i]); - if (m == nullptr) + if (m == nullptr || marked (m)) continue; match_async (a, *m, target::count_busy (), t.task_count); @@ -720,7 +720,7 @@ namespace build2 { const target* m (ts[i]); - if (m == nullptr) + if (m == nullptr || marked (m)) continue; match (a, *m); diff --git a/build2/algorithm.hxx b/build2/algorithm.hxx index 7bb3a59..9f54c8d 100644 --- a/build2/algorithm.hxx +++ b/build2/algorithm.hxx @@ -201,7 +201,8 @@ namespace build2 match_prerequisite_members (action, target&, const scope&); // Match (already searched) members of a group or similar prerequisite-like - // dependencies. Similar in semantics to match_prerequisites(). + // dependencies. Similar in semantics to match_prerequisites(). Any marked + // target pointers are skipped. // void match_members (action, target&, const target*[], size_t); diff --git a/build2/c/init.cxx b/build2/c/init.cxx index 00482a7..3461086 100644 --- a/build2/c/init.cxx +++ b/build2/c/init.cxx @@ -174,6 +174,7 @@ namespace build2 v["cc.type"], v["cc.system"], + v["cc.module_name"], v["cc.reprocess"], v["cc.preprocessed"], diff --git a/build2/cc/common.hxx b/build2/cc/common.hxx index 7d520d0..24262b7 100644 --- a/build2/cc/common.hxx +++ b/build2/cc/common.hxx @@ -66,6 +66,7 @@ namespace build2 const variable& c_type; // cc.type const variable& c_system; // cc.system + const variable& c_module_name; // cc.module_name const variable& c_reprocess; // cc.reprocess const variable& c_preprocessed; // cc.preprocessed diff --git a/build2/cc/compile.cxx b/build2/cc/compile.cxx index 04113e7..d0fd153 100644 --- a/build2/cc/compile.cxx +++ b/build2/cc/compile.cxx @@ -189,7 +189,7 @@ namespace build2 // const function<void (const file&, const string&, bool, bool)> optf (opt); - // Note that here we don't need to see group members (see apply()). + // Note that here we don't need to see group members. // for (const prerequisite& p: group_prerequisites (t)) { @@ -377,6 +377,7 @@ namespace build2 otype ct (compile_type (t, mod)); lorder lo (link_order (bs, ct)); + compile_target_types tt (compile_types (ct)); // Derive file name from target name. // @@ -447,20 +448,11 @@ namespace build2 // if (mod) { - const target_type* tt (nullptr); - - switch (ct) - { - case otype::e: tt = &obje::static_type; break; - case otype::a: tt = &obja::static_type; break; - case otype::s: tt = &objs::static_type; break; - } - // The module interface unit can be the same as an implementation // (e.g., foo.mxx and foo.cxx) which means obj*{} targets could // collide. So we add the module extension to the target name. // - target_lock obj (add_adhoc_member (act, t, *tt, e.c_str ())); + target_lock obj (add_adhoc_member (act, t, tt.obj, e.c_str ())); obj.target->as<file> ().derive_path (o); match_recipe (obj, group_recipe); // Set recipe and unlock. } @@ -503,8 +495,6 @@ namespace build2 // libraries we don't need to do match() in order to get options // (if any, they would be set by search_library()). // - // @@ MOD: for now the same applies to modules? - // if (p.proj ()) { if (search_library (act, @@ -522,6 +512,14 @@ namespace build2 else continue; } + // + // For modules we pick only what we import which is done below so + // skip it here. One corner case is clean: we assume that someone + // else (normally library/executable) also depends on it and will + // clean it up. + // + else if (p.is_a<bmi> () || p.is_a (tt.bmi)) + continue; else { pt = &p.search (t); @@ -668,10 +666,10 @@ namespace build2 // before extracting dependencies. The reasoning for source file is // pretty clear. What other prerequisites could we have? While // normally they will be some other sources (as in, static content - // from project's src_root), its possible they are some auto-generated - // stuff. And it's possible they affect the preprocessor result. Say - // some ad hoc/out-of-band compiler input file that is passed via the - // command line. So, to be safe, we make everything is up to date. + // from src_root), it's possible they are some auto-generated stuff. + // And it's possible they affect the preprocessor result. Say some ad + // hoc/out-of-band compiler input file that is passed via the command + // line. So, to be safe, we make sure everything is up to date. // for (const target* pt: pts) { @@ -722,6 +720,7 @@ namespace build2 if (u) // @@ TMP (depdb validation similar to extract_headers()). { extract_modules (act, t, lo, src, p.first, md, dd, u); + search_modules (bs, act, t, lo, tt.bmi); } // If the preprocessed output is suitable for compilation and is not @@ -2337,6 +2336,50 @@ namespace build2 // if (!modules) fail << "modules support not enabled or unavailable"; + + // Set the cc.module_name variable if this is an interface unit. We set + // it on the bmi{} group so we have to lock it. + // + if (tu.module_interface) + { + target_lock l (lock (act, *t.group)); + assert (l.target != nullptr); + + if (value& v = l.target->vars.assign (c_module_name)) + assert (cast<string> (v) == tu.module_name); + else + v = move (tu.module_name); // Note: move. + } + } + + // Resolve imported modules to bmi*{} targets. + // + void compile:: + search_modules (const scope& /*bs*/, + action act, + file& t, + lorder /*lo*/, + const target_type& mtt) const + { + auto& pts (t.prerequisite_targets); + size_t start (pts.size ()); // Index of the first to be added. + + for (prerequisite_member p: group_prerequisite_members (act, t)) + { + const target* pt (nullptr); + + if (p.is_a<bmi> ()) + pt = &search (t, mtt, p.key ()); //@@ MOD: fuzzy... + else if (p.is_a (mtt)) + pt = &p.search (t); + + if (pt != nullptr) + pts.push_back (pt); + } + + // Match in parallel and wait for completion. + // + match_members (act, t, pts, start); } // Filter cl.exe noise (msvc.cxx). @@ -2344,6 +2387,54 @@ namespace build2 void msvc_filter_cl (ifdstream&, const path& src); + void compile:: + append_modules (cstrings& args, strings& stor, const file& t) const + { + for (const target* pt: t.prerequisite_targets) + { + // Here we use whatever bmi type has been added. + // + const file* f; + if ((f = pt->is_a<bmie> ()) == nullptr && + (f = pt->is_a<bmia> ()) == nullptr && + (f = pt->is_a<bmis> ()) == nullptr) + continue; + + string s (relative (f->path ()).string ()); + + switch (cid) + { + case compiler_id::gcc: + { + s.insert (0, 1, '='); + s.insert (0, cast<string> (f->group->vars[c_module_name])); + s.insert (0, "-fmodule-map="); + break; + } + case compiler_id::clang: + { + s.insert (0, "-fmodule-file="); + break; + } + case compiler_id::msvc: + { + stor.push_back ("/module:reference"); + break; + } + case compiler_id::icc: + assert (false); + } + + stor.push_back (move (s)); + } + + // Shallow-copy storage to args. Why not do it as we go along pushing + // into storage? Because of potential reallocations. + // + for (const string& a: stor) + args.push_back (a.c_str ()); + } + target_state compile:: perform_update (action act, const target& xt) const { @@ -2352,8 +2443,9 @@ namespace build2 match_data md (move (t.data<match_data> ())); bool mod (md.mod); - // While all our prerequisites are already up-to-date, we still have - // to execute them to keep the dependency counts straight. + // While all our prerequisites are already up-to-date, we still have to + // execute them to keep the dependency counts straight. Actually, no, we + // may also have to update the modules. // auto pr ( execute_prerequisites<file> ( @@ -2392,7 +2484,7 @@ namespace build2 else relo = relative (t.path ()); - // Build command line. + // Build the command line. // if (md.pp != preprocessed::all) { @@ -2416,7 +2508,8 @@ namespace build2 append_options (args, t, x_coptions); append_options (args, tstd); - string out, out1; // Storage. + string out, out1; // Output options storage. + strings mods; // Module options storage. size_t out_i (0); // Index of the -o option. if (cid == compiler_id::msvc) @@ -2463,6 +2556,9 @@ namespace build2 if (!find_option_prefixes ({"/MD", "/MT"}, args)) args.push_back ("/MD"); + if (modules) + append_modules (args, mods, t); + // The presence of /Zi or /ZI causes the compiler to write debug info // to the .pdb file. By default it is a shared file called vcNN.pdb // (where NN is the VC version) created (wait for it) in the current @@ -2525,6 +2621,9 @@ namespace build2 args.push_back ("-fPIC"); } + if (modules) + append_modules (args, mods, t); + // Note: the order of the following options is relied upon below. // out_i = args.size (); // Index of the -o option. @@ -2537,7 +2636,11 @@ namespace build2 { args.push_back ("-o"); args.push_back (relo.string ().c_str ()); - //@@ MOD: specify module output file. + + out = "-fmodule-output="; + out += relm.string (); + args.push_back (out.c_str ()); + args.push_back ("-c"); break; } diff --git a/build2/cc/compile.hxx b/build2/cc/compile.hxx index 30f101d..596245d 100644 --- a/build2/cc/compile.hxx +++ b/build2/cc/compile.hxx @@ -97,6 +97,14 @@ namespace build2 const file&, auto_rmfile&, const match_data&, depdb&, bool&) const; + void + search_modules (const scope&, + action, file&, lorder, + const target_type&) const; + + void + append_modules (cstrings&, strings&, const file&) const; + // Language selection option (for VC) or the value for the -x option. // const char* diff --git a/build2/cc/init.cxx b/build2/cc/init.cxx index f845712..ea42469 100644 --- a/build2/cc/init.cxx +++ b/build2/cc/init.cxx @@ -78,6 +78,11 @@ namespace build2 // v.insert<bool> ("cc.system"); + // C++ module name. Should be set on the bmi{} target by the matching + // rule. + // + v.insert<string> ("cc.module_name"); + // Ability to disable using preprocessed output for compilation. // v.insert<bool> ("config.cc.reprocess", true); diff --git a/build2/cc/link.cxx b/build2/cc/link.cxx index 6488153..6d12c4a 100644 --- a/build2/cc/link.cxx +++ b/build2/cc/link.cxx @@ -399,19 +399,37 @@ namespace build2 // inject_fsdir (act, t); - optional<dir_paths> usr_lib_dirs; // Extract lazily. - // Process prerequisites, pass 1: search and match prerequisite - // libraries. + // libraries, search obj/bmi{} targets, and search targets we do rule + // chaining for. // - // We do it first in order to indicate that we will execute these - // targets before matching any of the obj*{}. This makes it safe for + // We do libraries first in order to indicate that we will execute these + // targets before matching any of the obj/bmi{}. This makes it safe for // compile::apply() to unmatch them and therefore not to hinder // parallelism. // - // When cleaning, we ignore prerequisites that are not in the same or a - // subdirectory of our project root. + // We also create obj/bmi{} chain targets because we need to add + // (similar to lib{}) all the bmi{} as prerequisites to all the other + // obj/bmi{} that we are creating. Note that this doesn't mean that the + // compile rule will actually treat them all as prerequisite targets. + // Rather, they are used to resolve actual module imports. We don't + // really have to search obj{} targets here but it's the same code so we + // do it here to avoid duplication. + // + // Also, when cleaning, we ignore prerequisites that are not in the same + // or a subdirectory of our project root. // + optional<dir_paths> usr_lib_dirs; // Extract lazily. + compile_target_types tt (compile_types (lt)); + + auto skip = [&act, &rs] (const target*& pt) + { + if (act.operation () == clean_id && !pt->dir.sub (rs.out_path ())) + pt = nullptr; + + return pt == nullptr; + }; + size_t start (t.prerequisite_targets.size ()); for (prerequisite_member p: group_prerequisite_members (act, t)) @@ -420,11 +438,75 @@ namespace build2 // prerequisite target. // t.prerequisite_targets.push_back (nullptr); - const target*& rpt (t.prerequisite_targets.back ()); + const target*& pt (t.prerequisite_targets.back ()); - const target* pt (nullptr); + uint8_t m (0); // Mark: lib (0), src (1), mod (2), obj/bmi (3). - if (p.is_a<lib> () || p.is_a<liba> () || p.is_a<libs> ()) + bool mod (x_mod != nullptr && p.is_a (*x_mod)); + + if (mod || p.is_a (x_src) || p.is_a<c> ()) + { + // Rule chaining, part 1. + // + + // Which scope shall we use to resolve the root? Unlikely, but + // possible, the prerequisite is from a different project + // altogether. So we are going to use the target's project. + // + + // If the source came from the lib{} group, then create the obj{} + // group and add the source as a prerequisite of the obj{} group, + // not the obj*{} member. This way we only need one prerequisite + // for, say, both liba{} and libs{}. The same goes for bmi{}. + // + bool group (!p.prerequisite.belongs (t)); // Group's prerequisite. + + const target_type& rtt (mod + ? (group ? bmi::static_type : tt.bmi) + : (group ? obj::static_type : tt.obj)); + + const prerequisite_key& cp (p.key ()); // Source key. + + // Come up with the obj*/bmi*{} target. The source prerequisite + // directory can be relative (to the scope) or absolute. If it is + // relative, then use it as is. If absolute, then translate it to + // the corresponding directory under out_root. While the source + // directory is most likely under src_root, it is also possible it + // is under out_root (e.g., generated source). + // + dir_path d; + { + const dir_path& cpd (*cp.tk.dir); + + if (cpd.relative () || cpd.sub (rs.out_path ())) + d = cpd; + else + { + if (!cpd.sub (rs.src_path ())) + fail << "out of project prerequisite " << cp << + info << "specify corresponding " << rtt.name << "{} " + << "target explicitly"; + + d = rs.out_path () / cpd.leaf (rs.src_path ()); + } + } + + // obj/bmi{} is always in the out tree. Note that currently it could + // be the group -- we will pick a member in part 2 below. + // + pt = &search (t, rtt, d, dir_path (), *cp.tk.name, nullptr, cp.scope); + + // If we shouldn't clean obj{}, then it is fair to assume we + // shouldn't clean the source either (generated source will be in + // the same directory as obj{} and if not, well, go find yourself + // another build system ;-)). + // + if (skip (pt)) + continue; + + m = mod ? 2 : 1; + } + else if (p.is_a<lib> () || p.is_a<liba> () || p.is_a<libs> ()) { // Handle imported libraries. // @@ -440,295 +522,263 @@ namespace build2 if (pt == nullptr) pt = &p.search (t); - if (act.operation () == clean_id && !pt->dir.sub (rs.out_path ())) - continue; // Skip. + if (skip (pt)) + continue; // If this is the lib{} target group, then pick the appropriate // member. // if (const lib* l = pt->is_a<lib> ()) pt = &link_member (*l, act, lo); + } + else + { + // If this is the obj{} or bmi{} target group, then pick the + // appropriate member. + // + if (p.is_a<obj> ()) pt = &search (t, tt.obj, p.key ()); + else if (p.is_a<bmi> ()) pt = &search (t, tt.bmi, p.key ()); + else pt = &p.search (t); - rpt = pt; + if (skip (pt)) + continue; + + m = 3; } + + mark (pt, m); } - // Match in parallel and wait for completion. + // Match lib{} (the only unmarked) in parallel and wait for completion. // match_members (act, t, t.prerequisite_targets, start); - // Process prerequisites, pass 2: search and match obj{} and do rule - // chaining for C and X source files. + // Process prerequisites, pass 2: finish rule chaining but don't start + // matching anything yet since that may trigger recursive matching of + // bmi{} targets we haven't completed yet. Hairy, I know. // - const target_type* ott (nullptr); - const target_type* mtt (nullptr); - switch (lt) + // Parallel prerequisite_targets loop. + // + size_t i (start), n (t.prerequisite_targets.size ()); + for (prerequisite_member p: group_prerequisite_members (act, t)) { - case otype::e: ott = &obje::static_type; mtt = &bmie::static_type; break; - case otype::a: ott = &obja::static_type; mtt = &bmia::static_type; break; - case otype::s: ott = &objs::static_type; mtt = &bmis::static_type; break; - } + const target*& pt (t.prerequisite_targets[i++]); - { - // Wait with unlocked phase to allow phase switching. - // - wait_guard wg (target::count_busy (), t.task_count, true); + if (pt == nullptr) + continue; - size_t i (start); // Parallel prerequisite_targets loop. + uint8_t m (unmark (pt)); // New mark: completion (1), verfication (2). - for (prerequisite_member p: group_prerequisite_members (act, t)) + if (m == 3) // obj/bmi{} + m = 1; // Just completion. + else if (m == 1 || m == 2) // Source/module chain. { - const target*& rpt (t.prerequisite_targets[i++]); - const target* pt (nullptr); + bool mod (m == 2); - if (p.is_a<lib> () || p.is_a<liba> () || p.is_a<libs> ()) - continue; // Taken care of on pass 1. + m = 1; - uint8_t pm (1); // Completion (1) and verfication (2) mark. - - bool mod (x_mod != nullptr && p.is_a (*x_mod)); - - if (!mod && !p.is_a (x_src) && !p.is_a<c> ()) - { - // If this is the obj{} or bmi{} target group, then pick the - // appropriate member. - // - if (p.is_a<obj> ()) pt = &search (t, *ott, p.key ()); - else if (p.is_a<bmi> ()) pt = &search (t, *mtt, p.key ()); - else pt = &p.search (t); + const target& rt (*pt); + bool group (!p.prerequisite.belongs (t)); // Group's prerequisite. - if (act.operation () == clean_id && !pt->dir.sub (rs.out_path ())) - continue; // Skip. + // If we have created a obj/bmi{} target group, pick one of its + // members; the rest would be primarily concerned with it. + // + pt = + group + ? &search (t, (mod ? tt.bmi : tt.obj), rt.dir, rt.out, rt.name) + : &rt; + + // If this obj*{} already has prerequisites, then verify they are + // "compatible" with what we are doing here. Otherwise, synthesize + // the dependency. Note that we may also end up synthesizing with + // someone beating up to it. In this case also verify. + // + bool verify (true); - // Fall through. - } - else + if (!pt->has_prerequisites ()) { - // The rest is rule chaining. - // + prerequisites ps; + ps.push_back (p.as_prerequisite ()); // Source. - // Which scope shall we use to resolve the root? Unlikely, but - // possible, the prerequisite is from a different project - // altogether. So we are going to use the target's project. + // Add our lib*{} (see the export.* machinery for details) and + // bmi*{} (both original and chanined; see module search logic) + // prerequisites. // - - // If the source came from the lib{} group, then create the obj{} - // group and add the source as a prerequisite of the obj{} group, - // not the obj*{} member. This way we only need one prerequisite - // for, say, both liba{} and libs{}. The same goes for bmi{}. + // Note that we don't resolve lib{} to liba{}/libs{} here + // instead leaving it to whomever (e.g., the compile rule) will + // be needing *.export.*. One reason for doing it there is that + // the object target might be specified explicitly by the user + // in which case they will have to specify the set of lib{} + // prerequisites and it's much cleaner to do as lib{} rather + // than liba{}/libs{}. // - bool group (!p.prerequisite.belongs (t)); // Group's prerequisite. - - const target_type& rtt (mod - ? (group ? bmi::static_type : *mtt) - : (group ? obj::static_type : *ott)); - - const prerequisite_key& cp (p.key ()); // C-source (X or C) key. - - // Come up with the obj*/bmi*{} target. The source prerequisite - // directory can be relative (to the scope) or absolute. If it is - // relative, then use it as is. If absolute, then translate it to - // the corresponding directory under out_root. While the source - // directory is most likely under src_root, it is also possible it - // is under out_root (e.g., generated source). + // Initially, we were only adding imported libraries, but there + // is a problem with this approach: the non-imported library + // might depend on the imported one(s) which we will never "see" + // unless we start with this library. // - dir_path d; + size_t j (start); + for (prerequisite_member p: group_prerequisite_members (act, t)) { - const dir_path& cpd (*cp.tk.dir); + const target* pt (t.prerequisite_targets[j++]); - if (cpd.relative () || cpd.sub (rs.out_path ())) - d = cpd; - else + if (p.is_a<lib> () || p.is_a<liba> () || p.is_a<libs> () || + p.is_a<bmi> () || p.is_a (tt.bmi)) { - if (!cpd.sub (rs.src_path ())) - fail << "out of project prerequisite " << cp << - info << "specify corresponding " << rtt.name << "{} " - << "target explicitly"; - - d = rs.out_path () / cpd.leaf (rs.src_path ()); + ps.emplace_back (p.as_prerequisite ()); + } + else if (x_mod != nullptr && p.is_a (*x_mod)) // Chained module. + { + // Searched during pass 1 but can be NULL or marked. + // + if (pt != nullptr && i != j) // Don't add self (note: both +1). + { + unmark (pt); + ps.emplace_back (prerequisite (*pt)); + } } } - // obj*/bmi*{} is always in the out tree. - // - const target& rt ( - search (t, rtt, d, dir_path (), *cp.tk.name, nullptr, cp.scope)); - - // If we are cleaning, check that this target is in the same or a - // subdirectory of our project root. + // Note: add to the group, not the member. // - if (act.operation () == clean_id && !rt.dir.sub (rs.out_path ())) - { - // If we shouldn't clean obj{}, then it is fair to assume we - // shouldn't clean the source either (generated source will be - // in the same directory as obj{} and if not, well, go find - // yourself another build system ;-)). - // - continue; // Skip. - } + verify = !rt.prerequisites (move (ps)); + } - // If we have created the obj/bmi{} target group, pick one of its - // members; the rest would be primarily concerned with it. - // - pt = (group - ? &search (t, (mod ? *mtt : *ott), rt.dir, rt.out, rt.name) - : &rt); - - // If this obj*{} already has prerequisites, then verify they are - // "compatible" with what we are doing here. Otherwise, synthesize - // the dependency. Note that we may also end up synthesizing with - // someone beating up to it. In this case also verify. + if (verify) + { + // This gets a bit tricky. We need to make sure the source files + // are the same which we can only do by comparing the targets to + // which they resolve. But we cannot search ot's prerequisites -- + // only the rule that matches can. Note, however, that if all this + // works out, then our next step is to match the obj*{} target. If + // things don't work out, then we fail, in which case searching + // and matching speculatively doesn't really hurt. So we start the + // async match here and finish this verification in the "harvest" + // loop below. // - bool verify (true); + const target_type& rtt (mod + ? (group ? bmi::static_type : tt.bmi) + : (group ? obj::static_type : tt.obj)); - if (!pt->has_prerequisites ()) + bool src (false); + for (prerequisite_member p1: group_prerequisite_members (act, *pt)) { - prerequisites ps; - ps.push_back (p.as_prerequisite ()); // Source. - - // Add our lib*{} prerequisites (see the export.* machinery for - // details). - // - // Note that we don't resolve lib{} to liba{}/libs{} here - // instead leaving it to whoever (e.g., the compile rule) will - // be needing *.export.*. One reason for doing it there is that - // the object target might be specified explicitly by the user - // in which case they will have to specify the set of lib{} - // prerequisites and it's much cleaner to do as lib{} rather - // than liba{}/libs{}. + // Most of the time we will have just a single source so fast- + // path that case. // - // Initially, we were only adding imported libraries, but there - // is a problem with this approach: the non-imported library - // might depend on the imported one(s) which we will never "see" - // unless we start with this library. - // - for (const prerequisite& p: group_prerequisites (t)) + if (p1.is_a (mod ? *x_mod : x_src) || p1.is_a<c> ()) { - if (p.is_a<lib> () || p.is_a<liba> () || p.is_a<libs> ()) - ps.emplace_back (p); + src = true; + continue; // Check the rest of the prerequisites. } - // Note: add to the group, not the member. + // Ignore some known target types (fsdir, headers, libraries, + // modules). // - verify = !rt.prerequisites (move (ps)); + if (p1.is_a<fsdir> () || + p1.is_a<lib> () || + p1.is_a<liba> () || p1.is_a<libs> () || + p1.is_a<bmi> () || + p1.is_a<bmie> () || p1.is_a<bmia> () || p1.is_a<bmis> () || + (p.is_a (mod ? *x_mod : x_src) && x_header (p1)) || + (p.is_a<c> () && p1.is_a<h> ())) + continue; + + fail << "synthesized dependency for prerequisite " << p + << " would be incompatible with existing target " << *pt << + info << "unexpected existing prerequisite type " << p1 << + info << "specify corresponding " << rtt.name << "{} " + << "dependency explicitly"; } - if (verify) - { - // This gets a bit tricky. We need to make sure the source files - // are the same which we can only do by comparing the targets to - // which they resolve. But we cannot search ot's prerequisites - // -- only the rule that matches can. Note, however, that if all - // this works out, then our next step is to match the obj*{} - // target. If things don't work out, then we fail, in which case - // searching and matching speculatively doesn't really hurt. So - // we start the async match here and finish this verification in - // the "harvest" loop below. - // - bool src (false); - for (prerequisite_member p1: - group_prerequisite_members (act, *pt)) - { - // Most of the time we will have just a single source so - // fast-path that case. - // - if (p1.is_a (mod ? *x_mod : x_src) || p1.is_a<c> ()) - { - src = true; - continue; // Check the rest of the prerequisites. - } + if (!src) + fail << "synthesized dependency for prerequisite " << p + << " would be incompatible with existing target " << *pt << + info << "no existing c/" << x_name << " source prerequisite" << + info << "specify corresponding " << rtt.name << "{} " + << "dependency explicitly"; - // Ignore some known target types (fsdir, headers, libraries). - // - if (p1.is_a<fsdir> () || - p1.is_a<lib> () || - p1.is_a<liba> () || - p1.is_a<libs> () || - (p.is_a (mod ? *x_mod : x_src) && x_header (p1)) || - (p.is_a<c> () && p1.is_a<h> ())) - continue; - - fail << "synthesized dependency for prerequisite " << p - << " would be incompatible with existing target " << *pt << - info << "unexpected existing prerequisite type " << p1 << - info << "specify corresponding " << rtt.name << "{} " - << "dependency explicitly"; - } - - if (!src) - fail << "synthesized dependency for prerequisite " << p - << " would be incompatible with existing target " << *pt << - info << "no existing c/" << x_name << " source prerequisite" << - info << "specify corresponding " << rtt.name << "{} " - << "dependency explicitly"; - - pm = 2; // Needs completion and verification. - } + m = 2; // Needs verification. } - - match_async (act, *pt, target::count_busy (), t.task_count); - rpt = pt; - mark (rpt, pm); // Mark for completion/verification. } - wg.wait (); + mark (pt, m); } - // The "harvest" loop: finish matching the targets we have started. Note - // that we may have bailed out early (thus the parallel i/n for-loop). + // Process prerequisites, pass 3: match everything and verify chains. + // + + // Wait with unlocked phase to allow phase switching. // + wait_guard wg (target::count_busy (), t.task_count, true); + + for (i = start; i != n; ++i) { - size_t i (start), n (t.prerequisite_targets.size ()); + const target*& pt (t.prerequisite_targets[i]); - for (prerequisite_member p: group_prerequisite_members (act, t)) - { - if (i == n) - break; + if (pt == nullptr) + continue; - const target*& pt (t.prerequisite_targets[i++]); + if (uint8_t m = unmark (pt)) + { + match_async (act, *pt, target::count_busy (), t.task_count); + mark (pt, m); + } + } - uint8_t m; + wg.wait (); - // Skipped or not marked for completion (pass 1). - // - if (pt == nullptr || (m = unmark (pt)) == 0) - continue; + // The "harvest" loop: finish matching the targets we have started. Note + // that we may have bailed out early (thus the parallel i/n for-loop). + // + i = start; + for (prerequisite_member p: group_prerequisite_members (act, t)) + { + const target*& pt (t.prerequisite_targets[i++]); - build2::match (act, *pt); + // Skipped or not marked for completion. + // + uint8_t m; + if (pt == nullptr || (m = unmark (pt)) == 0) + continue; - // Nothing else to do if not marked for verification. - // - if (m == 1) - continue; + build2::match (act, *pt); - // Finish verifying the existing dependency (which is now matched) - // compared to what we would have synthesized. - // - bool mod (x_mod != nullptr && p.is_a (*x_mod)); - bool group (!p.prerequisite.belongs (t)); // Group's prerequisite. + // Nothing else to do if not marked for verification. + // + if (m == 1) + continue; - const target_type& rtt (mod - ? (group ? bmi::static_type : *mtt) - : (group ? obj::static_type : *ott)); + // Finish verifying the existing dependency (which is now matched) + // compared to what we would have synthesized. + // + bool mod (x_mod != nullptr && p.is_a (*x_mod)); - for (prerequisite_member p1: group_prerequisite_members (act, *pt)) + for (prerequisite_member p1: group_prerequisite_members (act, *pt)) + { + if (p1.is_a (mod ? *x_mod : x_src) || p1.is_a<c> ()) { - if (p1.is_a (mod ? *x_mod : x_src) || p1.is_a<c> ()) + // Searching our own prerequisite is ok, p1 must already be + // resolved. + // + if (&p.search (t) != &p1.search (*pt)) { - // Searching our own prerequisite is ok, p1 must already be - // resolved. - // - if (&p.search (t) != &p1.search (*pt)) - fail << "synthesized dependency for prerequisite " << p << " " - << "would be incompatible with existing target " << *pt << - info << "existing prerequisite " << p1 << " does not match " - << p << - info << "specify corresponding " << rtt.name << "{} " - << "dependency explicitly"; - - break; + bool group (!p.prerequisite.belongs (t)); + + const target_type& rtt (mod + ? (group ? bmi::static_type : tt.bmi) + : (group ? obj::static_type : tt.obj)); + + fail << "synthesized dependency for prerequisite " << p << " " + << "would be incompatible with existing target " << *pt << + info << "existing prerequisite " << p1 << " does not match " + << p << + info << "specify corresponding " << rtt.name << "{} " + << "dependency explicitly"; } + + break; } } } @@ -1570,11 +1620,11 @@ namespace build2 if (!manifest.empty () && tsys == "mingw32") sargs.push_back (relative (manifest).string ()); - // Copy sargs to args. Why not do it as we go along pushing into sargs? - // Because of potential reallocations. + // Shallow-copy sargs to args. Why not do it as we go along pushing into + // sargs? Because of potential reallocations. // - for (size_t i (0); i != sargs.size (); ++i) - args.push_back (sargs[i].c_str ()); + for (const string& a: sargs) + args.push_back (a.c_str ()); if (lt != otype::a) { @@ -1594,7 +1644,7 @@ namespace build2 if (!p.empty ()) try { - if (verb >= 3) + if (verb >= 4) // Seeing this with -V doesn't really add any value. text << "rm " << p; auto rm = [&paths] (path&& m, const string&, bool interm) diff --git a/build2/cc/utility.hxx b/build2/cc/utility.hxx index 11abf90..62104d9 100644 --- a/build2/cc/utility.hxx +++ b/build2/cc/utility.hxx @@ -19,11 +19,24 @@ namespace build2 namespace cc { - // Compile/link output type. + // Compile output type. // otype compile_type (const target&, bool module); + // Compile target types. + // + struct compile_target_types + { + const target_type& obj; + const target_type& bmi; + }; + + compile_target_types + compile_types (otype); + + // Link output type. + // otype link_type (const target&); diff --git a/build2/cc/utility.ixx b/build2/cc/utility.ixx index b15791a..d372dac 100644 --- a/build2/cc/utility.ixx +++ b/build2/cc/utility.ixx @@ -27,5 +27,23 @@ namespace build2 t.is_a<liba> () ? otype::a : otype::s; } + + inline compile_target_types + compile_types (otype t) + { + using namespace bin; + + const target_type* o (nullptr); + const target_type* m (nullptr); + + switch (t) + { + case otype::e: o = &obje::static_type; m = &bmie::static_type; break; + case otype::a: o = &obja::static_type; m = &bmia::static_type; break; + case otype::s: o = &objs::static_type; m = &bmis::static_type; break; + } + + return compile_target_types {*o, *m}; + } } } diff --git a/build2/cxx/init.cxx b/build2/cxx/init.cxx index a5422d2..db2adf4 100644 --- a/build2/cxx/init.cxx +++ b/build2/cxx/init.cxx @@ -338,6 +338,7 @@ namespace build2 v["cc.type"], v["cc.system"], + v["cc.module_name"], v["cc.reprocess"], v["cc.preprocessed"], diff --git a/build2/target.hxx b/build2/target.hxx index 847e421..e6eda29 100644 --- a/build2/target.hxx +++ b/build2/target.hxx @@ -691,6 +691,9 @@ namespace build2 mark (const target*&, uint8_t = 1); uint8_t + marked (const target*); // Can be used as a predicate or to get the mark. + + uint8_t unmark (const target*&); // A "range" that presents the prerequisites of a group and one of diff --git a/build2/target.ixx b/build2/target.ixx index a213f16..d2edf89 100644 --- a/build2/target.ixx +++ b/build2/target.ixx @@ -164,6 +164,13 @@ namespace build2 } inline uint8_t + marked (const target* p) + { + uintptr_t i (reinterpret_cast<uintptr_t> (p)); + return uint8_t (i & 0x03); + } + + inline uint8_t unmark (const target*& p) { uintptr_t i (reinterpret_cast<uintptr_t> (p)); |