aboutsummaryrefslogtreecommitdiff
path: root/build2/cxx
diff options
context:
space:
mode:
authorBoris Kolpackov <boris@codesynthesis.com>2016-01-05 11:55:15 +0200
committerBoris Kolpackov <boris@codesynthesis.com>2016-01-05 11:55:15 +0200
commit9fb791e9fad6c63fc1dac49f4d05ae63b8a3db9b (patch)
treed60322d4382ca5f97b676c5abe2e39524f35eab4 /build2/cxx
parentf159b1dac68c8714f7ba71ca168e3b695891aad9 (diff)
Rename build directory/namespace to build2
Diffstat (limited to 'build2/cxx')
-rw-r--r--build2/cxx/compile32
-rw-r--r--build2/cxx/compile.cxx794
-rw-r--r--build2/cxx/install29
-rw-r--r--build2/cxx/install.cxx66
-rw-r--r--build2/cxx/link70
-rw-r--r--build2/cxx/link.cxx875
-rw-r--r--build2/cxx/module23
-rw-r--r--build2/cxx/module.cxx230
-rw-r--r--build2/cxx/target78
-rw-r--r--build2/cxx/target.cxx81
-rw-r--r--build2/cxx/utility37
-rw-r--r--build2/cxx/utility.cxx29
-rw-r--r--build2/cxx/utility.txx35
13 files changed, 2379 insertions, 0 deletions
diff --git a/build2/cxx/compile b/build2/cxx/compile
new file mode 100644
index 0000000..42e0f2e
--- /dev/null
+++ b/build2/cxx/compile
@@ -0,0 +1,32 @@
+// file : build2/cxx/compile -*- C++ -*-
+// copyright : Copyright (c) 2014-2015 Code Synthesis Ltd
+// license : MIT; see accompanying LICENSE file
+
+#ifndef BUILD2_CXX_COMPILE
+#define BUILD2_CXX_COMPILE
+
+#include <build2/types>
+#include <build2/rule>
+
+namespace build2
+{
+ namespace cxx
+ {
+ class compile: public rule
+ {
+ public:
+ virtual match_result
+ match (action, target&, const std::string& hint) const;
+
+ virtual recipe
+ apply (action, target&, const match_result&) const;
+
+ static target_state
+ perform_update (action, target&);
+
+ static compile instance;
+ };
+ }
+}
+
+#endif // BUILD2_CXX_COMPILE
diff --git a/build2/cxx/compile.cxx b/build2/cxx/compile.cxx
new file mode 100644
index 0000000..bc332b4
--- /dev/null
+++ b/build2/cxx/compile.cxx
@@ -0,0 +1,794 @@
+// file : build2/cxx/compile.cxx -*- C++ -*-
+// copyright : Copyright (c) 2014-2015 Code Synthesis Ltd
+// license : MIT; see accompanying LICENSE file
+
+#include <build2/cxx/compile>
+
+#include <map>
+#include <string>
+#include <cstddef> // size_t
+#include <cstdlib> // exit()
+#include <utility> // move()
+
+#include <butl/process>
+#include <butl/utility> // reverse_iterate
+#include <butl/fdstream>
+#include <butl/path-map>
+
+#include <build2/types>
+#include <build2/scope>
+#include <build2/variable>
+#include <build2/algorithm>
+#include <build2/diagnostics>
+#include <build2/context>
+
+#include <build2/bin/target>
+#include <build2/cxx/target>
+
+#include <build2/cxx/utility>
+#include <build2/cxx/link>
+
+using namespace std;
+using namespace butl;
+
+namespace build2
+{
+ namespace cxx
+ {
+ using namespace bin;
+
+ match_result compile::
+ match (action a, target& t, const string&) const
+ {
+ tracer trace ("cxx::compile::match");
+
+ // @@ TODO:
+ //
+ // - check prerequisites: single source file
+ // - if path already assigned, verify extension?
+ //
+
+ // See if we have a C++ source file. Iterate in reverse so that
+ // a source file specified for an obj*{} member overrides the one
+ // specified for the group. Also "see through" groups.
+ //
+ for (prerequisite_member p: reverse_group_prerequisite_members (a, t))
+ {
+ if (p.is_a<cxx> ())
+ return p;
+ }
+
+ level4 ([&]{trace << "no c++ source file for target " << t;});
+ return nullptr;
+ }
+
+ static void
+ inject_prerequisites (action, target&, cxx&, scope&);
+
+ recipe compile::
+ apply (action a, target& xt, const match_result& mr) const
+ {
+ path_target& t (static_cast<path_target&> (xt));
+
+ // Derive file name from target name.
+ //
+ if (t.path ().empty ())
+ t.derive_path ("o", nullptr, (t.is_a<objso> () ? "-so" : nullptr));
+
+ // Inject dependency on the output directory.
+ //
+ inject_parent_fsdir (a, t);
+
+ // Search and match all the existing prerequisites. The injection
+ // code (below) takes care of the ones it is adding.
+ //
+ // When cleaning, ignore prerequisites that are not in the same
+ // or a subdirectory of our strong amalgamation.
+ //
+ const dir_path* amlg (
+ a.operation () != clean_id
+ ? nullptr
+ : &t.strong_scope ().out_path ());
+
+ link::search_paths_cache lib_paths; // Extract lazily.
+
+ for (prerequisite_member p: group_prerequisite_members (a, t))
+ {
+ // A dependency on a library is there so that we can get its
+ // cxx.export.poptions. In particular, making sure it is
+ // executed before us will only restrict parallelism. But we
+ // do need to pre-match it in order to get its
+ // prerequisite_targets populated. This is the "library
+ // meta-information protocol". See also append_lib_options()
+ // above.
+ //
+ if (p.is_a<lib> () || p.is_a<liba> () || p.is_a<libso> ())
+ {
+ if (a.operation () == update_id)
+ {
+ // Handle imported libraries. We know that for such libraries
+ // we don't need to do match() in order to get options (if
+ // any, they would be set by search_library()).
+ //
+ if (p.proj () == nullptr ||
+ link::search_library (lib_paths, p.prerequisite) == nullptr)
+ {
+ match_only (a, p.search ());
+ }
+ }
+
+ continue;
+ }
+
+ target& pt (p.search ());
+
+ if (a.operation () == clean_id && !pt.dir.sub (*amlg))
+ continue;
+
+ build2::match (a, pt);
+ t.prerequisite_targets.push_back (&pt);
+ }
+
+ // Inject additional prerequisites. We only do it when
+ // performing update since chances are we will have to
+ // update some of our prerequisites in the process (auto-
+ // generated source code).
+ //
+ if (a == perform_update_id)
+ {
+ // The cached prerequisite target should be the same as what
+ // is in t.prerequisite_targets since we used standard
+ // search() and match() above.
+ //
+ // @@ Ugly.
+ //
+ cxx& st (
+ dynamic_cast<cxx&> (
+ mr.target != nullptr ? *mr.target : *mr.prerequisite->target));
+ inject_prerequisites (a, t, st, mr.prerequisite->scope);
+ }
+
+ switch (a)
+ {
+ case perform_update_id: return &perform_update;
+ case perform_clean_id: return &perform_clean;
+ default: return noop_recipe; // Configure update.
+ }
+ }
+
+ // Reverse-lookup target type from extension.
+ //
+ static const target_type*
+ map_extension (scope& s, const string& n, const string& e)
+ {
+ // We will just have to try all of the possible ones, in the
+ // "most likely to match" order.
+ //
+ const variable& var (var_pool.find ("extension"));
+
+ auto test = [&s, &n, &e, &var] (const target_type& tt)
+ -> const target_type*
+ {
+ if (auto l = s.lookup (tt, n, var))
+ if (as<string> (*l) == e)
+ return &tt;
+
+ return nullptr;
+ };
+
+ if (auto r = test (hxx::static_type)) return r;
+ if (auto r = test (h::static_type)) return r;
+ if (auto r = test (ixx::static_type)) return r;
+ if (auto r = test (txx::static_type)) return r;
+ if (auto r = test (cxx::static_type)) return r;
+ if (auto r = test (c::static_type)) return r;
+
+ return nullptr;
+ }
+
+ // Mapping of include prefixes (e.g., foo in <foo/bar>) for auto-
+ // generated headers to directories where they will be generated.
+ //
+ // We are using a prefix map of directories (dir_path_map) instead
+ // of just a map in order also cover sub-paths (e.g., <foo/more/bar>
+ // if we continue with the example). Specifically, we need to make
+ // sure we don't treat foobar as a sub-directory of foo.
+ //
+ // @@ The keys should be canonicalized.
+ //
+ using prefix_map = dir_path_map<dir_path>;
+
+ static void
+ append_prefixes (prefix_map& m, target& t, const char* var)
+ {
+ tracer trace ("cxx::append_prefixes");
+
+ // If this target does not belong to any project (e.g, an
+ // "imported as installed" library), then it can't possibly
+ // generate any headers for us.
+ //
+ scope* rs (t.base_scope ().root_scope ());
+ if (rs == nullptr)
+ return;
+
+ const dir_path& out_base (t.dir);
+ const dir_path& out_root (rs->out_path ());
+
+ if (auto l = t[var])
+ {
+ const auto& v (as<strings> (*l));
+
+ for (auto i (v.begin ()), e (v.end ()); i != e; ++i)
+ {
+ // -I can either be in the "-Ifoo" or "-I foo" form.
+ //
+ dir_path d;
+ if (*i == "-I")
+ {
+ if (++i == e)
+ break; // Let the compiler complain.
+
+ d = dir_path (*i);
+ }
+ else if (i->compare (0, 2, "-I") == 0)
+ d = dir_path (*i, 2, string::npos);
+ else
+ continue;
+
+ level6 ([&]{trace << "-I '" << d << "'";});
+
+ // If we are relative or not inside our project root, then
+ // ignore.
+ //
+ if (d.relative () || !d.sub (out_root))
+ continue;
+
+ // If the target directory is a sub-directory of the include
+ // directory, then the prefix is the difference between the
+ // two. Otherwise, leave it empty.
+ //
+ // The idea here is to make this "canonical" setup work auto-
+ // magically:
+ //
+ // 1. We include all files with a prefix, e.g., <foo/bar>.
+ // 2. The library target is in the foo/ sub-directory, e.g.,
+ // /tmp/foo/.
+ // 3. The poptions variable contains -I/tmp.
+ //
+ dir_path p (out_base.sub (d) ? out_base.leaf (d) : dir_path ());
+
+ auto j (m.find (p));
+
+ if (j != m.end ())
+ {
+ if (j->second != d)
+ {
+ // We used to reject duplicates but it seems this can
+ // be reasonably expected to work according to the order
+ // of the -I options.
+ //
+ if (verb >= 4)
+ trace << "overriding dependency prefix '" << p << "'\n"
+ << " old mapping to " << j->second << "\n"
+ << " new mapping to " << d;
+
+ j->second = d;
+ }
+ }
+ else
+ {
+ level6 ([&]{trace << "'" << p << "' = '" << d << "'";});
+ m.emplace (move (p), move (d));
+ }
+ }
+ }
+ }
+
+ // Append library prefixes based on the cxx.export.poptions variables
+ // recursively, prerequisite libraries first.
+ //
+ static void
+ append_lib_prefixes (prefix_map& m, target& l)
+ {
+ for (target* t: l.prerequisite_targets)
+ {
+ if (t == nullptr)
+ continue;
+
+ if (t->is_a<lib> () || t->is_a<liba> () || t->is_a<libso> ())
+ append_lib_prefixes (m, *t);
+ }
+
+ append_prefixes (m, l, "cxx.export.poptions");
+ }
+
+ static prefix_map
+ build_prefix_map (target& t)
+ {
+ prefix_map m;
+
+ // First process the include directories from prerequisite
+ // libraries. Note that here we don't need to see group
+ // members (see apply()).
+ //
+ for (prerequisite& p: group_prerequisites (t))
+ {
+ target& pt (*p.target); // Already searched and matched.
+
+ if (pt.is_a<lib> () || pt.is_a<liba> () || pt.is_a<libso> ())
+ append_lib_prefixes (m, pt);
+ }
+
+ // Then process our own.
+ //
+ append_prefixes (m, t, "cxx.poptions");
+
+ return m;
+ }
+
+ // Return the next make prerequisite starting from the specified
+ // position and update position to point to the start of the
+ // following prerequisite or l.size() if there are none left.
+ //
+ static string
+ next (const string& l, size_t& p)
+ {
+ size_t n (l.size ());
+
+ // Skip leading spaces.
+ //
+ for (; p != n && l[p] == ' '; p++) ;
+
+ // Lines containing multiple prerequisites are 80 characters max.
+ //
+ string r;
+ r.reserve (n);
+
+ // Scan the next prerequisite while watching out for escape sequences.
+ //
+ for (; p != n && l[p] != ' '; p++)
+ {
+ char c (l[p]);
+
+ if (c == '\\')
+ c = l[++p];
+
+ r += c;
+ }
+
+ // Skip trailing spaces.
+ //
+ for (; p != n && l[p] == ' '; p++) ;
+
+ // Skip final '\'.
+ //
+ if (p == n - 1 && l[p] == '\\')
+ p++;
+
+ return r;
+ }
+
+ static void
+ inject_prerequisites (action a, target& t, cxx& s, scope& ds)
+ {
+ tracer trace ("cxx::compile::inject_prerequisites");
+
+ scope& rs (t.root_scope ());
+ const string& cxx (as<string> (*rs["config.cxx"]));
+
+ cstrings args {cxx.c_str ()};
+
+ // Add cxx.export.poptions from prerequisite libraries. Note
+ // that here we don't need to see group members (see apply()).
+ //
+ for (prerequisite& p: group_prerequisites (t))
+ {
+ target& pt (*p.target); // Already searched and matched.
+
+ if (pt.is_a<lib> () || pt.is_a<liba> () || pt.is_a<libso> ())
+ append_lib_options (args, pt, "cxx.export.poptions");
+ }
+
+ append_options (args, t, "cxx.poptions");
+
+ // @@ Some C++ options (e.g., -std, -m) affect the preprocessor.
+ // Or maybe they are not C++ options? Common options?
+ //
+ append_options (args, t, "cxx.coptions");
+
+ string std; // Storage.
+ append_std (args, t, std);
+
+ if (t.is_a<objso> ())
+ args.push_back ("-fPIC");
+
+ args.push_back ("-M"); // Note: -MM -MG skips missing <>-included.
+ args.push_back ("-MG"); // Treat missing headers as generated.
+ args.push_back ("-MQ"); // Quoted target name.
+ args.push_back ("*"); // Old versions can't handle empty target name.
+
+ // We are using absolute source file path in order to get absolute
+ // paths in the result. Any relative paths in the result are non-
+ // existent, potentially auto-generated headers.
+ //
+ // @@ We will also have to use absolute -I paths to guarantee
+ // that. Or just detect relative paths and error out?
+ //
+ args.push_back (s.path ().string ().c_str ());
+ args.push_back (nullptr);
+
+ level6 ([&]{trace << "target: " << t;});
+
+ // Build the prefix map lazily only if we have non-existent files.
+ // Also reuse it over restarts since it doesn't change.
+ //
+ prefix_map pm;
+
+ // If any prerequisites that we have extracted changed, then we
+ // have to redo the whole thing. The reason for this is auto-
+ // generated headers: the updated header may now include a yet-
+ // non-existent header. Unless we discover this and generate it
+ // (which, BTW, will trigger another restart since that header,
+ // in turn, can also include auto-generated headers), we will
+ // end up with an error during compilation proper.
+ //
+ // One complication with this restart logic is that we will see
+ // a "prefix" of prerequisites that we have already processed
+ // (i.e., they are already in our prerequisite_targets list) and
+ // we don't want to keep redoing this over and over again. One
+ // thing to note, however, is that the prefix that we have seen
+ // on the previous run must appear exactly the same in the
+ // subsequent run. The reason for this is that none of the files
+ // that it can possibly be based on have changed and thus it
+ // should be exactly the same. To put it another way, the
+ // presence or absence of a file in the dependency output can
+ // only depend on the previous files (assuming the compiler
+ // outputs them as it encounters them and it is hard to think
+ // of a reason why would someone do otherwise). And we have
+ // already made sure that all those files are up to date. And
+ // here is the way we are going to exploit this: we are going
+ // to keep track of how many prerequisites we have processed so
+ // far and on restart skip right to the next one.
+ //
+ // Also, before we do all that, make sure the source file itself
+ // if up to date.
+ //
+ execute_direct (a, s);
+
+ size_t skip_count (0);
+ for (bool restart (true); restart; )
+ {
+ restart = false;
+
+ if (verb >= 3)
+ print_process (args);
+
+ try
+ {
+ process pr (args.data (), 0, -1); // Open pipe to stdout.
+ ifdstream is (pr.in_ofd);
+
+ size_t skip (skip_count);
+ for (bool first (true), second (true); !(restart || is.eof ()); )
+ {
+ string l;
+ getline (is, l);
+
+ if (is.fail () && !is.eof ())
+ fail << "error reading C++ compiler -M output";
+
+ size_t pos (0);
+
+ if (first)
+ {
+ // Empty output should mean the wait() call below will return
+ // false.
+ //
+ if (l.empty ())
+ break;
+
+ assert (l[0] == '*' && l[1] == ':' && l[2] == ' ');
+
+ first = false;
+
+ // While normally we would have the source file on the
+ // first line, if too long, it will be moved to the next
+ // line and all we will have on this line is "*: \".
+ //
+ if (l.size () == 4 && l[3] == '\\')
+ continue;
+ else
+ pos = 3; // Skip "*: ".
+
+ // Fall through to the 'second' block.
+ }
+
+ if (second)
+ {
+ second = false;
+ next (l, pos); // Skip the source file.
+ }
+
+ // If things go wrong (and they often do in this area), give
+ // the user a bit extra context.
+ //
+ auto g (
+ make_exception_guard (
+ [&s]()
+ {
+ info << "while extracting dependencies from " << s;
+ }));
+
+ while (pos != l.size ())
+ {
+ string fs (next (l, pos));
+
+ // Skip until where we left off.
+ //
+ if (skip != 0)
+ {
+ skip--;
+ continue;
+ }
+
+ path f (move (fs));
+ f.normalize ();
+
+ if (!f.absolute ())
+ {
+ // This is probably as often an error as an auto-generated
+ // file, so trace at level 4.
+ //
+ level4 ([&]{trace << "non-existent header '" << f << "'";});
+
+ // If we already did it and build_prefix_map() returned empty,
+ // then we would have failed below.
+ //
+ if (pm.empty ())
+ pm = build_prefix_map (t);
+
+ // First try the whole file. Then just the directory.
+ //
+ // @@ Has to be a separate map since the prefix can be
+ // the same as the file name.
+ //
+ // auto i (pm.find (f));
+
+ // Find the most qualified prefix of which we are a
+ // sub-path.
+ //
+ auto i (pm.end ());
+
+ if (!pm.empty ())
+ {
+ const dir_path& d (f.directory ());
+ i = pm.upper_bound (d);
+
+ // Get the greatest less than, if any. We might
+ // still not be a sub. Note also that we still
+ // have to check the last element is upper_bound()
+ // returned end().
+ //
+ if (i == pm.begin () || !d.sub ((--i)->first))
+ i = pm.end ();
+ }
+
+ if (i == pm.end ())
+ fail << "unable to map presumably auto-generated header '"
+ << f << "' to a project";
+
+ f = i->second / f;
+ }
+
+ level6 ([&]{trace << "injecting " << f;});
+
+ // Split the name into its directory part, the name part, and
+ // extension. Here we can assume the name part is a valid
+ // filesystem name.
+ //
+ // Note that if the file has no extension, we record an empty
+ // extension rather than NULL (which would signify that the
+ // default extension should be added).
+ //
+ dir_path d (f.directory ());
+ string n (f.leaf ().base ().string ());
+ const char* es (f.extension ());
+ const string* e (&extension_pool.find (es != nullptr ? es : ""));
+
+ // Determine the target type.
+ //
+ const target_type* tt (nullptr);
+
+ // See if this directory is part of any project out_root
+ // hierarchy. Note that this will miss all the headers
+ // that come from src_root (so they will be treated as
+ // generic C headers below). Generally, we don't have
+ // the ability to determine that some file belongs to
+ // src_root of some project. But that's not a problem
+ // for our purposes: it is only important for us to
+ // accurately determine target types for headers that
+ // could be auto-generated.
+ //
+ scope& b (scopes.find (d));
+ if (b.root_scope () != nullptr)
+ tt = map_extension (b, n, *e);
+
+ // If it is outside any project, or the project doesn't have
+ // such an extension, assume it is a plain old C header.
+ //
+ if (tt == nullptr)
+ tt = &h::static_type;
+
+ // Find or insert target.
+ //
+ path_target& pt (
+ static_cast<path_target&> (search (*tt, d, n, e, &ds)));
+
+ // Assign path.
+ //
+ if (pt.path ().empty ())
+ pt.path (move (f));
+
+ // Match to a rule.
+ //
+ build2::match (a, pt);
+
+ // Update it.
+ //
+ // There would normally be a lot of headers for every source
+ // file (think all the system headers) and this can get
+ // expensive. At the same time, most of these headers are
+ // existing files that we will never be updated (again,
+ // system headers, for example) and the rule that will match
+ // them is fallback file_rule. That rule has an optimization
+ // in that it returns noop_recipe (which causes the target
+ // state to be automatically set to unchanged) if the file
+ // is known to be up to date.
+ //
+ if (pt.state () != target_state::unchanged)
+ {
+ // We only want to restart if our call to execute() actually
+ // caused an update. In particular, the target could already
+ // have been in target_state::changed because of a dependency
+ // extraction run for some other source file.
+ //
+ target_state os (pt.state ());
+ target_state ns (execute_direct (a, pt));
+
+ if (ns != os && ns != target_state::unchanged)
+ {
+ level6 ([&]{trace << "updated " << pt << ", restarting";});
+ restart = true;
+ }
+ }
+
+ // Add to our prerequisite target list.
+ //
+ t.prerequisite_targets.push_back (&pt);
+ skip_count++;
+ }
+ }
+
+ // We may not have read all the output (e.g., due to a restart),
+ // so close the file descriptor before waiting to avoid blocking
+ // the other end.
+ //
+ is.close ();
+
+ // We assume the child process issued some diagnostics.
+ //
+ if (!pr.wait ())
+ {
+ // In case of a restarts, we closed our end of the pipe early
+ // which might have caused the other end to fail. So far we
+ // experienced this on Fedora 23 with GCC 5.3.1 and there were
+ // no diagnostics issued, just the non-zero exit status. If we
+ // do get diagnostics, then we will have to read and discard the
+ // output until eof.
+ //
+ if (!restart)
+ throw failed ();
+ }
+ }
+ catch (const process_error& e)
+ {
+ error << "unable to execute " << args[0] << ": " << e.what ();
+
+ // In a multi-threaded program that fork()'ed but did not exec(),
+ // it is unwise to try to do any kind of cleanup (like unwinding
+ // the stack and running destructors).
+ //
+ if (e.child ())
+ exit (1);
+
+ throw failed ();
+ }
+ }
+ }
+
+ target_state compile::
+ perform_update (action a, target& xt)
+ {
+ path_target& t (static_cast<path_target&> (xt));
+ cxx* s (execute_prerequisites<cxx> (a, t, t.mtime ()));
+
+ if (s == nullptr)
+ return target_state::unchanged;
+
+ // Translate paths to relative (to working directory) ones. This
+ // results in easier to read diagnostics.
+ //
+ path relo (relative (t.path ()));
+ path rels (relative (s->path ()));
+
+ scope& rs (t.root_scope ());
+ const string& cxx (as<string> (*rs["config.cxx"]));
+
+ cstrings args {cxx.c_str ()};
+
+ // Add cxx.export.poptions from prerequisite libraries. Note that
+ // here we don't need to see group members (see apply()).
+ //
+ for (prerequisite& p: group_prerequisites (t))
+ {
+ target& pt (*p.target); // Already searched and matched.
+
+ if (pt.is_a<lib> () || pt.is_a<liba> () || pt.is_a<libso> ())
+ append_lib_options (args, pt, "cxx.export.poptions");
+ }
+
+ append_options (args, t, "cxx.poptions");
+ append_options (args, t, "cxx.coptions");
+
+ string std; // Storage.
+ append_std (args, t, std);
+
+ if (t.is_a<objso> ())
+ args.push_back ("-fPIC");
+
+ args.push_back ("-o");
+ args.push_back (relo.string ().c_str ());
+
+ args.push_back ("-c");
+ args.push_back (rels.string ().c_str ());
+
+ args.push_back (nullptr);
+
+ if (verb >= 2)
+ print_process (args);
+ else if (verb)
+ text << "c++ " << *s;
+
+ try
+ {
+ process pr (args.data ());
+
+ if (!pr.wait ())
+ throw failed ();
+
+ // Should we go to the filesystem and get the new mtime? We
+ // know the file has been modified, so instead just use the
+ // current clock time. It has the advantage of having the
+ // subseconds precision.
+ //
+ t.mtime (system_clock::now ());
+ return target_state::changed;
+ }
+ catch (const process_error& e)
+ {
+ error << "unable to execute " << args[0] << ": " << e.what ();
+
+ // In a multi-threaded program that fork()'ed but did not exec(),
+ // it is unwise to try to do any kind of cleanup (like unwinding
+ // the stack and running destructors).
+ //
+ if (e.child ())
+ exit (1);
+
+ throw failed ();
+ }
+ }
+
+ compile compile::instance;
+ }
+}
diff --git a/build2/cxx/install b/build2/cxx/install
new file mode 100644
index 0000000..154a62a
--- /dev/null
+++ b/build2/cxx/install
@@ -0,0 +1,29 @@
+// file : build2/cxx/install -*- C++ -*-
+// copyright : Copyright (c) 2014-2015 Code Synthesis Ltd
+// license : MIT; see accompanying LICENSE file
+
+#ifndef BUILD2_CXX_INSTALL
+#define BUILD2_CXX_INSTALL
+
+#include <build2/types>
+#include <build2/install/rule>
+
+namespace build2
+{
+ namespace cxx
+ {
+ class install: public build2::install::file_rule
+ {
+ public:
+ virtual target*
+ filter (action, target&, prerequisite_member) const;
+
+ virtual match_result
+ match (action, target&, const std::string&) const;
+
+ static install instance;
+ };
+ }
+}
+
+#endif // BUILD2_CXX_INSTALL
diff --git a/build2/cxx/install.cxx b/build2/cxx/install.cxx
new file mode 100644
index 0000000..1bde9ec
--- /dev/null
+++ b/build2/cxx/install.cxx
@@ -0,0 +1,66 @@
+// file : build2/cxx/install.cxx -*- C++ -*-
+// copyright : Copyright (c) 2014-2015 Code Synthesis Ltd
+// license : MIT; see accompanying LICENSE file
+
+#include <build2/cxx/install>
+
+#include <build2/bin/target>
+
+#include <build2/cxx/target>
+#include <build2/cxx/link>
+
+using namespace std;
+
+namespace build2
+{
+ namespace cxx
+ {
+ using namespace bin;
+
+ target* install::
+ filter (action a, target& t, prerequisite_member p) const
+ {
+ if (t.is_a<exe> ())
+ {
+ // Don't install executable's prerequisite headers.
+ //
+ if (p.is_a<hxx> () || p.is_a<ixx> () || p.is_a<txx> () || p.is_a<h> ())
+ return nullptr;
+ }
+
+ // If this is a shared library prerequisite, install it as long as it
+ // is in the same amalgamation as we are.
+ //
+ if ((t.is_a<exe> () || t.is_a<libso> ()) &&
+ (p.is_a<lib> () || p.is_a<libso> ()))
+ {
+ target* pt (&p.search ());
+
+ // If this is the lib{} group, pick a member which we would link.
+ //
+ if (lib* l = pt->is_a<lib> ())
+ pt = &link::link_member (*l, link::link_order (t));
+
+ if (pt->is_a<libso> ()) // Can be liba{}.
+ return pt->in (t.weak_scope ()) ? pt : nullptr;
+ }
+
+ return file_rule::filter (a, t, p);
+ }
+
+ match_result install::
+ match (action a, target& t, const std::string& hint) const
+ {
+ // @@ How do we split the hint between the two?
+ //
+
+ // We only want to handle installation if we are also the
+ // ones building this target. So first run link's match().
+ //
+ match_result r (link::instance.match (a, t, hint));
+ return r ? install::file_rule::match (a, t, "") : r;
+ }
+
+ install install::instance;
+ }
+}
diff --git a/build2/cxx/link b/build2/cxx/link
new file mode 100644
index 0000000..c7f7019
--- /dev/null
+++ b/build2/cxx/link
@@ -0,0 +1,70 @@
+// file : build2/cxx/link -*- C++ -*-
+// copyright : Copyright (c) 2014-2015 Code Synthesis Ltd
+// license : MIT; see accompanying LICENSE file
+
+#ifndef BUILD2_CXX_LINK
+#define BUILD2_CXX_LINK
+
+#include <vector>
+
+#include <butl/optional>
+
+#include <build2/types>
+#include <build2/rule>
+
+#include <build2/bin/target>
+
+namespace build2
+{
+ namespace cxx
+ {
+ class link: public rule
+ {
+ public:
+ virtual match_result
+ match (action, target&, const std::string& hint) const;
+
+ virtual recipe
+ apply (action, target&, const match_result&) const;
+
+ static target_state
+ perform_update (action, target&);
+
+ static link instance;
+
+ public:
+ enum class type {e, a, so};
+ enum class order {a, so, a_so, so_a};
+
+ static type
+ link_type (target& t)
+ {
+ return t.is_a<bin::exe> ()
+ ? type::e
+ : (t.is_a<bin::liba> () ? type::a : type::so);
+ }
+
+ static order
+ link_order (target&);
+
+ // Determine the library member (liba or libso) to link.
+ //
+ static target&
+ link_member (bin::lib&, order);
+
+ private:
+ friend class compile;
+
+ using search_paths = std::vector<dir_path>;
+ using search_paths_cache = butl::optional<search_paths>;
+
+ static target*
+ search_library (search_paths_cache&, prerequisite&);
+
+ static search_paths
+ extract_library_paths (scope&);
+ };
+ }
+}
+
+#endif // BUILD2_CXX_LINK
diff --git a/build2/cxx/link.cxx b/build2/cxx/link.cxx
new file mode 100644
index 0000000..96584d9
--- /dev/null
+++ b/build2/cxx/link.cxx
@@ -0,0 +1,875 @@
+// file : build2/cxx/link.cxx -*- C++ -*-
+// copyright : Copyright (c) 2014-2015 Code Synthesis Ltd
+// license : MIT; see accompanying LICENSE file
+
+#include <build2/cxx/link>
+
+#include <vector>
+#include <string>
+#include <cstddef> // size_t
+#include <cstdlib> // exit()
+#include <utility> // move()
+
+#include <butl/process>
+#include <butl/utility> // reverse_iterate
+#include <butl/fdstream>
+#include <butl/optional>
+#include <butl/path-map>
+#include <butl/filesystem>
+
+#include <build2/types>
+#include <build2/scope>
+#include <build2/variable>
+#include <build2/algorithm>
+#include <build2/diagnostics>
+#include <build2/context>
+
+#include <build2/bin/target>
+#include <build2/cxx/target>
+
+#include <build2/cxx/utility>
+
+using namespace std;
+using namespace butl;
+
+namespace build2
+{
+ namespace cxx
+ {
+ using namespace bin;
+
+ link::order link::
+ link_order (target& t)
+ {
+ const char* var;
+
+ switch (link_type (t))
+ {
+ case type::e: var = "bin.exe.lib"; break;
+ case type::a: var = "bin.liba.lib"; break;
+ case type::so: var = "bin.libso.lib"; break;
+ }
+
+ const auto& v (as<strings> (*t[var]));
+ return v[0] == "shared"
+ ? v.size () > 1 && v[1] == "static" ? order::so_a : order::so
+ : v.size () > 1 && v[1] == "shared" ? order::a_so : order::a;
+ }
+
+ target& link::
+ link_member (bin::lib& l, order lo)
+ {
+ bool lso (true);
+ const string& at (as<string> (*l["bin.lib"])); // Available types.
+
+ switch (lo)
+ {
+ case order::a:
+ case order::a_so:
+ lso = false; // Fall through.
+ case order::so:
+ case order::so_a:
+ {
+ if (lso ? at == "static" : at == "shared")
+ {
+ if (lo == order::a_so || lo == order::so_a)
+ lso = !lso;
+ else
+ fail << (lso ? "shared" : "static") << " build of " << l
+ << " is not available";
+ }
+ }
+ }
+
+ target* r (lso ? static_cast<target*> (l.so) : l.a);
+
+ if (r == nullptr)
+ r = &search (lso ? libso::static_type : liba::static_type,
+ prerequisite_key {nullptr, l.key (), nullptr});
+
+ return *r;
+ }
+
+ link::search_paths link::
+ extract_library_paths (scope& bs)
+ {
+ search_paths r;
+ scope& rs (*bs.root_scope ());
+
+ // Extract user-supplied search paths (i.e., -L).
+ //
+ if (auto l = bs["cxx.loptions"])
+ {
+ const auto& v (as<strings> (*l));
+
+ for (auto i (v.begin ()), e (v.end ()); i != e; ++i)
+ {
+ // -L can either be in the "-Lfoo" or "-L foo" form.
+ //
+ dir_path d;
+ if (*i == "-L")
+ {
+ if (++i == e)
+ break; // Let the compiler complain.
+
+ d = dir_path (*i);
+ }
+ else if (i->compare (0, 2, "-L") == 0)
+ d = dir_path (*i, 2, string::npos);
+ else
+ continue;
+
+ // Ignore relative paths. Or maybe we should warn?
+ //
+ if (!d.relative ())
+ r.push_back (move (d));
+ }
+ }
+
+ // Extract system search paths.
+ //
+ cstrings args;
+ string std_storage;
+
+ args.push_back (as<string> (*rs["config.cxx"]).c_str ());
+ append_options (args, bs, "cxx.coptions");
+ append_std (args, bs, std_storage);
+ append_options (args, bs, "cxx.loptions");
+ args.push_back ("-print-search-dirs");
+ args.push_back (nullptr);
+
+ if (verb >= 3)
+ print_process (args);
+
+ string l;
+ try
+ {
+ process pr (args.data (), 0, -1); // Open pipe to stdout.
+ ifdstream is (pr.in_ofd);
+
+ while (!is.eof ())
+ {
+ string s;
+ getline (is, s);
+
+ if (is.fail () && !is.eof ())
+ fail << "error reading C++ compiler -print-search-dirs output";
+
+ if (s.compare (0, 12, "libraries: =") == 0)
+ {
+ l.assign (s, 12, string::npos);
+ break;
+ }
+ }
+
+ is.close (); // Don't block.
+
+ if (!pr.wait ())
+ throw failed ();
+ }
+ catch (const process_error& e)
+ {
+ error << "unable to execute " << args[0] << ": " << e.what ();
+
+ if (e.child ())
+ exit (1);
+
+ throw failed ();
+ }
+
+ if (l.empty ())
+ fail << "unable to extract C++ compiler system library paths";
+
+ // Now the fun part: figuring out which delimiter is used.
+ // Normally it is ':' but on Windows it is ';' (or can be;
+ // who knows for sure). Also note that these paths are
+ // absolute (or should be). So here is what we are going
+ // to do: first look for ';'. If found, then that's the
+ // delimiter. If not found, then there are two cases:
+ // it is either a single Windows path or the delimiter
+ // is ':'. To distinguish these two cases we check if
+ // the path starts with a Windows drive.
+ //
+ char d (';');
+ string::size_type e (l.find (d));
+
+ if (e == string::npos &&
+ (l.size () < 2 || l[0] == '/' || l[1] != ':'))
+ {
+ d = ':';
+ e = l.find (d);
+ }
+
+ // Now chop it up. We already have the position of the
+ // first delimiter (if any).
+ //
+ for (string::size_type b (0);; e = l.find (d, (b = e + 1)))
+ {
+ r.emplace_back (l, b, (e != string::npos ? e - b : e));
+ r.back ().normalize ();
+
+ if (e == string::npos)
+ break;
+ }
+
+ return r;
+ }
+
+ target* link::
+ search_library (search_paths_cache& spc, prerequisite& p)
+ {
+ tracer trace ("cxx::link::search_library");
+
+ // First check the cache.
+ //
+ if (p.target != nullptr)
+ return p.target;
+
+ bool l (p.is_a<lib> ());
+ const string* ext (l ? nullptr : p.ext); // Only for liba/libso.
+
+ // Then figure out what we need to search for.
+ //
+
+ // liba
+ //
+ path an;
+ const string* ae;
+
+ if (l || p.is_a<liba> ())
+ {
+ an = path ("lib" + p.name);
+
+ // Note that p.scope should be the same as the target's for
+ // which we are looking for this library. The idea here is
+ // that we have to use the same "extension configuration" as
+ // the target's.
+ //
+ ae = ext == nullptr
+ ? &liba::static_type.extension (p.key ().tk, p.scope)
+ : ext;
+
+ if (!ae->empty ())
+ {
+ an += '.';
+ an += *ae;
+ }
+ }
+
+ // libso
+ //
+ path sn;
+ const string* se;
+
+ if (l || p.is_a<libso> ())
+ {
+ sn = path ("lib" + p.name);
+ se = ext == nullptr
+ ? &libso::static_type.extension (p.key ().tk, p.scope)
+ : ext;
+
+ if (!se->empty ())
+ {
+ sn += '.';
+ sn += *se;
+ }
+ }
+
+ // Now search.
+ //
+ if (!spc)
+ spc = extract_library_paths (p.scope);
+
+ liba* a (nullptr);
+ libso* s (nullptr);
+
+ path f; // Reuse the buffer.
+ const dir_path* pd;
+ for (const dir_path& d: *spc)
+ {
+ timestamp mt;
+
+ // liba
+ //
+ if (!an.empty ())
+ {
+ f = d;
+ f /= an;
+
+ if ((mt = file_mtime (f)) != timestamp_nonexistent)
+ {
+ // Enter the target. Note that because the search paths are
+ // normalized, the result is automatically normalized as well.
+ //
+ a = &targets.insert<liba> (d, p.name, ae, trace);
+
+ if (a->path ().empty ())
+ a->path (move (f));
+
+ a->mtime (mt);
+ }
+ }
+
+ // libso
+ //
+ if (!sn.empty ())
+ {
+ f = d;
+ f /= sn;
+
+ if ((mt = file_mtime (f)) != timestamp_nonexistent)
+ {
+ s = &targets.insert<libso> (d, p.name, se, trace);
+
+ if (s->path ().empty ())
+ s->path (move (f));
+
+ s->mtime (mt);
+ }
+ }
+
+ if (a != nullptr || s != nullptr)
+ {
+ pd = &d;
+ break;
+ }
+ }
+
+ if (a == nullptr && s == nullptr)
+ return nullptr;
+
+ if (l)
+ {
+ // Enter the target group.
+ //
+ lib& l (targets.insert<lib> (*pd, p.name, p.ext, trace));
+
+ // It should automatically link-up to the members we have found.
+ //
+ assert (l.a == a);
+ assert (l.so == s);
+
+ // Set the bin.lib variable to indicate what's available.
+ //
+ const char* bl (a != nullptr
+ ? (s != nullptr ? "both" : "static")
+ : "shared");
+ l.assign ("bin.lib") = bl;
+
+ p.target = &l;
+ }
+ else
+ p.target = p.is_a<liba> () ? static_cast<target*> (a) : s;
+
+ return p.target;
+ }
+
+ match_result link::
+ match (action a, target& t, const string& hint) const
+ {
+ tracer trace ("cxx::link::match");
+
+ // @@ TODO:
+ //
+ // - if path already assigned, verify extension?
+ //
+ // @@ Q:
+ //
+ // - if there is no .o, are we going to check if the one derived
+ // from target exist or can be built? A: No.
+ // What if there is a library. Probably ok if .a, not if .so.
+ // (i.e., a utility library).
+ //
+
+ type lt (link_type (t));
+
+ // Scan prerequisites and see if we can work with what we've got.
+ //
+ bool seen_cxx (false), seen_c (false), seen_obj (false),
+ seen_lib (false);
+
+ for (prerequisite_member p: group_prerequisite_members (a, t))
+ {
+ if (p.is_a<cxx> ())
+ {
+ seen_cxx = seen_cxx || true;
+ }
+ else if (p.is_a<c> ())
+ {
+ seen_c = seen_c || true;
+ }
+ else if (p.is_a<obja> ())
+ {
+ if (lt == type::so)
+ fail << "shared library " << t << " prerequisite " << p
+ << " is static object";
+
+ seen_obj = seen_obj || true;
+ }
+ else if (p.is_a<objso> () ||
+ p.is_a<obj> ())
+ {
+ seen_obj = seen_obj || true;
+ }
+ else if (p.is_a<liba> () ||
+ p.is_a<libso> () ||
+ p.is_a<lib> ())
+ {
+ seen_lib = seen_lib || true;
+ }
+ }
+
+ // We will only chain a C source if there is also a C++ source or we
+ // were explicitly told to.
+ //
+ if (seen_c && !seen_cxx && hint < "cxx")
+ {
+ level4 ([&]{trace << "c prerequisite(s) without c++ or hint";});
+ return nullptr;
+ }
+
+ // If we have any prerequisite libraries (which also means that
+ // we match), search/import and pre-match them to implement the
+ // "library meta-information protocol". Don't do this if we are
+ // called from the install rule just to check if we would match.
+ //
+ if (seen_lib && lt != type::e &&
+ a.operation () != install_id && a.outer_operation () != install_id)
+ {
+ if (t.group != nullptr)
+ t.group->prerequisite_targets.clear (); // lib{}'s
+
+ search_paths_cache lib_paths; // Extract lazily.
+
+ for (prerequisite_member p: group_prerequisite_members (a, t))
+ {
+ if (p.is_a<lib> () || p.is_a<liba> () || p.is_a<libso> ())
+ {
+ target* pt (nullptr);
+
+ // Handle imported libraries.
+ //
+ if (p.proj () != nullptr)
+ pt = search_library (lib_paths, p.prerequisite);
+
+ if (pt == nullptr)
+ {
+ pt = &p.search ();
+ match_only (a, *pt);
+ }
+
+ // If the prerequisite came from the lib{} group, then also
+ // add it to lib's prerequisite_targets.
+ //
+ if (!p.prerequisite.belongs (t))
+ t.group->prerequisite_targets.push_back (pt);
+
+ t.prerequisite_targets.push_back (pt);
+ }
+ }
+ }
+
+ return seen_cxx || seen_c || seen_obj || seen_lib ? &t : nullptr;
+ }
+
+ recipe link::
+ apply (action a, target& xt, const match_result&) const
+ {
+ tracer trace ("cxx::link::apply");
+
+ path_target& t (static_cast<path_target&> (xt));
+
+ type lt (link_type (t));
+ bool so (lt == type::so);
+ order lo (link_order (t));
+
+ // Derive file name from target name.
+ //
+ if (t.path ().empty ())
+ {
+ auto l (t["extension"]);
+ const char* e (l ? as<string> (*l).c_str () : nullptr);
+
+ switch (lt)
+ {
+ case type::e:
+ {
+ t.derive_path (e != nullptr ? e : "");
+ break;
+ }
+ case type::a:
+ case type::so:
+ {
+ auto l (t["bin.libprefix"]);
+ t.derive_path (e != nullptr ? e : (lt == type::a ? "a" : "so"),
+ l ? as<string> (*l).c_str () : "lib");
+ break;
+ }
+ }
+ }
+
+ t.prerequisite_targets.clear (); // See lib pre-match in match() above.
+
+ // Inject dependency on the output directory.
+ //
+ inject_parent_fsdir (a, t);
+
+ // We may need the project roots for rule chaining (see below).
+ // We will resolve them lazily only if needed.
+ //
+ scope* root (nullptr);
+ const dir_path* out_root (nullptr);
+ const dir_path* src_root (nullptr);
+
+ search_paths_cache lib_paths; // Extract lazily.
+
+ // Process prerequisites: do rule chaining for C and C++ source
+ // files as well as search and match.
+ //
+ // When cleaning, ignore prerequisites that are not in the same
+ // or a subdirectory of our strong amalgamation.
+ //
+ const dir_path* amlg (
+ a.operation () != clean_id
+ ? nullptr
+ : &t.strong_scope ().out_path ());
+
+ for (prerequisite_member p: group_prerequisite_members (a, t))
+ {
+ bool group (!p.prerequisite.belongs (t)); // Group's prerequisite.
+ target* pt (nullptr);
+
+ if (!p.is_a<c> () && !p.is_a<cxx> ())
+ {
+ // Handle imported libraries.
+ //
+ if (p.proj () != nullptr)
+ pt = search_library (lib_paths, p.prerequisite);
+
+ // The rest is the same basic logic as in search_and_match().
+ //
+ if (pt == nullptr)
+ pt = &p.search ();
+
+ if (a.operation () == clean_id && !pt->dir.sub (*amlg))
+ continue; // Skip.
+
+ // If this is the obj{} or lib{} target group, then pick the
+ // appropriate member and make sure it is searched and matched.
+ //
+ if (obj* o = pt->is_a<obj> ())
+ {
+ pt = so ? static_cast<target*> (o->so) : o->a;
+
+ if (pt == nullptr)
+ pt = &search (so ? objso::static_type : obja::static_type,
+ p.key ());
+ }
+ else if (lib* l = pt->is_a<lib> ())
+ {
+ pt = &link_member (*l, lo);
+ }
+
+ build2::match (a, *pt);
+ t.prerequisite_targets.push_back (pt);
+ continue;
+ }
+
+ if (root == nullptr)
+ {
+ // Which scope shall we use to resolve the root? Unlikely,
+ // but possible, the prerequisite is from a different project
+ // altogether. So we are going to use the target's project.
+ //
+ root = &t.root_scope ();
+ out_root = &root->out_path ();
+ src_root = &root->src_path ();
+ }
+
+ const prerequisite_key& cp (p.key ()); // c(xx){} prerequisite key.
+ const target_type& o_type (
+ group
+ ? obj::static_type
+ : (so ? objso::static_type : obja::static_type));
+
+ // Come up with the obj*{} target. The c(xx){} prerequisite
+ // directory can be relative (to the scope) or absolute. If it is
+ // relative, then use it as is. If it is absolute, then translate
+ // it to the corresponding directory under out_root. While the
+ // c(xx){} directory is most likely under src_root, it is also
+ // possible it is under out_root (e.g., generated source).
+ //
+ dir_path d;
+ {
+ const dir_path& cpd (*cp.tk.dir);
+
+ if (cpd.relative () || cpd.sub (*out_root))
+ d = cpd;
+ else
+ {
+ if (!cpd.sub (*src_root))
+ fail << "out of project prerequisite " << cp <<
+ info << "specify corresponding " << o_type.name << "{} "
+ << "target explicitly";
+
+ d = *out_root / cpd.leaf (*src_root);
+ }
+ }
+
+ target& ot (search (o_type, d, *cp.tk.name, nullptr, cp.scope));
+
+ // If we are cleaning, check that this target is in the same or
+ // a subdirectory of our strong amalgamation.
+ //
+ if (a.operation () == clean_id && !ot.dir.sub (*amlg))
+ {
+ // If we shouldn't clean obj{}, then it is fair to assume
+ // we shouldn't clean cxx{} either (generated source will
+ // be in the same directory as obj{} and if not, well, go
+ // find yourself another build system ;-)).
+ //
+ continue; // Skip.
+ }
+
+ // If we have created the obj{} target group, pick one of its
+ // members; the rest would be primarily concerned with it.
+ //
+ if (group)
+ {
+ obj& o (static_cast<obj&> (ot));
+ pt = so ? static_cast<target*> (o.so) : o.a;
+
+ if (pt == nullptr)
+ pt = &search (so ? objso::static_type : obja::static_type,
+ o.dir, o.name, o.ext, nullptr);
+ }
+ else
+ pt = &ot;
+
+ // If this obj*{} target already exists, then it needs to be
+ // "compatible" with what we are doing here.
+ //
+ // This gets a bit tricky. We need to make sure the source files
+ // are the same which we can only do by comparing the targets to
+ // which they resolve. But we cannot search the ot's prerequisites
+ // -- only the rule that matches can. Note, however, that if all
+ // this works out, then our next step is to match the obj*{}
+ // target. If things don't work out, then we fail, in which case
+ // searching and matching speculatively doesn't really hurt.
+ //
+ bool found (false);
+ for (prerequisite_member p1:
+ reverse_group_prerequisite_members (a, *pt))
+ {
+ // Ignore some known target types (fsdir, headers, libraries).
+ //
+ if (p1.is_a<fsdir> () ||
+ p1.is_a<h> () ||
+ (p.is_a<cxx> () && (p1.is_a<hxx> () ||
+ p1.is_a<ixx> () ||
+ p1.is_a<txx> ())) ||
+ p1.is_a<lib> () ||
+ p1.is_a<liba> () ||
+ p1.is_a<libso> ())
+ {
+ continue;
+ }
+
+ if (!p1.is_a<cxx> ())
+ fail << "synthesized target for prerequisite " << cp
+ << " would be incompatible with existing target " << *pt <<
+ info << "unexpected existing prerequisite type " << p1 <<
+ info << "specify corresponding obj{} target explicitly";
+
+ if (!found)
+ {
+ build2::match (a, *pt); // Now p1 should be resolved.
+
+ // Searching our own prerequisite is ok.
+ //
+ if (&p.search () != &p1.search ())
+ fail << "synthesized target for prerequisite " << cp << " would "
+ << "be incompatible with existing target " << *pt <<
+ info << "existing prerequisite " << p1 << " does not match "
+ << cp <<
+ info << "specify corresponding " << o_type.name << "{} target "
+ << "explicitly";
+
+ found = true;
+ // Check the rest of the prerequisites.
+ }
+ }
+
+ if (!found)
+ {
+ // Note: add the source to the group, not the member.
+ //
+ ot.prerequisites.emplace_back (p.as_prerequisite (trace));
+
+ // Add our lib*{} prerequisites to the object file (see
+ // cxx.export.poptions above for details). Note: no need
+ // to go into group members.
+ //
+ // Initially, we were only adding imported libraries, but
+ // there is a problem with this approach: the non-imported
+ // library might depend on the imported one(s) which we will
+ // never "see" unless we start with this library.
+ //
+ for (prerequisite& p: group_prerequisites (t))
+ {
+ if (p.is_a<lib> () || p.is_a<liba> () || p.is_a<libso> ())
+ ot.prerequisites.emplace_back (p);
+ }
+
+ build2::match (a, *pt);
+ }
+
+ t.prerequisite_targets.push_back (pt);
+ }
+
+ switch (a)
+ {
+ case perform_update_id: return &perform_update;
+ case perform_clean_id: return &perform_clean;
+ default: return noop_recipe; // Configure update.
+ }
+ }
+
+ target_state link::
+ perform_update (action a, target& xt)
+ {
+ path_target& t (static_cast<path_target&> (xt));
+
+ type lt (link_type (t));
+ bool so (lt == type::so);
+
+ if (!execute_prerequisites (a, t, t.mtime ()))
+ return target_state::unchanged;
+
+ // Translate paths to relative (to working directory) ones. This
+ // results in easier to read diagnostics.
+ //
+ path relt (relative (t.path ()));
+
+ scope& rs (t.root_scope ());
+ cstrings args;
+
+ // Storage.
+ //
+ string std;
+ string soname;
+ strings sargs;
+
+ if (lt == type::a)
+ {
+ //@@ ranlib
+ //
+ args.push_back ("ar");
+ args.push_back ("-rc");
+ args.push_back (relt.string ().c_str ());
+ }
+ else
+ {
+ args.push_back (as<string> (*rs["config.cxx"]).c_str ());
+ append_options (args, t, "cxx.coptions");
+ append_std (args, t, std);
+
+ if (so)
+ args.push_back ("-shared");
+
+ args.push_back ("-o");
+ args.push_back (relt.string ().c_str ());
+
+ // Set soname.
+ //
+ if (so)
+ {
+ soname = "-Wl,-soname," + relt.leaf ().string ();
+ args.push_back (soname.c_str ());
+ }
+
+ // Add rpaths. First the ones specified by the user so that they
+ // take precedence.
+ //
+ if (auto l = t["bin.rpath"])
+ for (const string& p: as<strings> (*l))
+ sargs.push_back ("-Wl,-rpath," + p);
+
+ // Then the paths of the shared libraries we are linking to.
+ //
+ for (target* pt: t.prerequisite_targets)
+ {
+ if (libso* ls = pt->is_a<libso> ())
+ sargs.push_back (
+ "-Wl,-rpath," + ls->path ().directory ().string ());
+ }
+ }
+
+ size_t oend (sargs.size ()); // Note the end of options.
+
+ for (target* pt: t.prerequisite_targets)
+ {
+ path_target* ppt;
+
+ if ((ppt = pt->is_a<obja> ()) ||
+ (ppt = pt->is_a<objso> ()) ||
+ (ppt = pt->is_a<liba> ()) ||
+ (ppt = pt->is_a<libso> ()))
+ {
+ sargs.push_back (relative (ppt->path ()).string ()); // string()&&
+ }
+ }
+
+ // Finish assembling args from sargs.
+ //
+ for (size_t i (0); i != sargs.size (); ++i)
+ {
+ if (lt != type::a && i == oend)
+ append_options (args, t, "cxx.loptions");
+
+ args.push_back (sargs[i].c_str ());
+ }
+
+ if (lt != type::a)
+ append_options (args, t, "cxx.libs");
+
+ args.push_back (nullptr);
+
+ if (verb >= 2)
+ print_process (args);
+ else if (verb)
+ text << "ld " << t;
+
+ try
+ {
+ process pr (args.data ());
+
+ if (!pr.wait ())
+ throw failed ();
+
+ // Should we go to the filesystem and get the new mtime? We
+ // know the file has been modified, so instead just use the
+ // current clock time. It has the advantage of having the
+ // subseconds precision.
+ //
+ t.mtime (system_clock::now ());
+ return target_state::changed;
+ }
+ catch (const process_error& e)
+ {
+ error << "unable to execute " << args[0] << ": " << e.what ();
+
+ // In a multi-threaded program that fork()'ed but did not exec(),
+ // it is unwise to try to do any kind of cleanup (like unwinding
+ // the stack and running destructors).
+ //
+ if (e.child ())
+ exit (1);
+
+ throw failed ();
+ }
+ }
+
+ link link::instance;
+ }
+}
diff --git a/build2/cxx/module b/build2/cxx/module
new file mode 100644
index 0000000..c712d0b
--- /dev/null
+++ b/build2/cxx/module
@@ -0,0 +1,23 @@
+// file : build2/cxx/module -*- C++ -*-
+// copyright : Copyright (c) 2014-2015 Code Synthesis Ltd
+// license : MIT; see accompanying LICENSE file
+
+#ifndef BUILD2_CXX_MODULE
+#define BUILD2_CXX_MODULE
+
+#include <build2/types>
+#include <build2/utility>
+
+#include <build2/module>
+
+namespace build2
+{
+ namespace cxx
+ {
+ extern "C" bool
+ cxx_init (
+ scope&, scope&, const location&, unique_ptr<module>&, bool, bool);
+ }
+}
+
+#endif // BUILD2_CXX_MODULE
diff --git a/build2/cxx/module.cxx b/build2/cxx/module.cxx
new file mode 100644
index 0000000..4829a17
--- /dev/null
+++ b/build2/cxx/module.cxx
@@ -0,0 +1,230 @@
+// file : build2/cxx/module.cxx -*- C++ -*-
+// copyright : Copyright (c) 2014-2015 Code Synthesis Ltd
+// license : MIT; see accompanying LICENSE file
+
+#include <build2/cxx/module>
+
+#include <butl/process>
+#include <butl/fdstream>
+
+#include <build2/scope>
+#include <build2/diagnostics>
+
+#include <build2/config/utility>
+#include <build2/install/utility>
+
+#include <build2/bin/target>
+
+#include <build2/cxx/target>
+#include <build2/cxx/compile>
+#include <build2/cxx/link>
+#include <build2/cxx/install>
+
+using namespace std;
+using namespace butl;
+
+namespace build2
+{
+ namespace cxx
+ {
+ extern "C" bool
+ cxx_init (scope& r,
+ scope& b,
+ const location& loc,
+ std::unique_ptr<module>&,
+ bool first,
+ bool)
+ {
+ tracer trace ("cxx::init");
+ level5 ([&]{trace << "for " << b.out_path ();});
+
+ // Initialize the bin module. Only do this if it hasn't already
+ // been loaded so that we don't overwrite user's bin.* settings.
+ //
+ {
+ auto l (b["bin.loaded"]);
+
+ if (!l || !as<bool> (*l))
+ load_module (false, "bin", r, b, loc);
+ }
+
+ // Enter module variables.
+ //
+ // @@ Probably should only be done on load; make sure reset() unloads
+ // modules.
+ //
+ // @@ Should probably cache the variable pointers so we don't have
+ // to keep looking them up.
+ //
+ if (first)
+ {
+ auto& v (var_pool);
+
+ v.find ("config.cxx", string_type); //@@ VAR type
+
+ v.find ("config.cxx.poptions", strings_type);
+ v.find ("config.cxx.coptions", strings_type);
+ v.find ("config.cxx.loptions", strings_type);
+ v.find ("config.cxx.libs", strings_type);
+
+ v.find ("cxx.poptions", strings_type);
+ v.find ("cxx.coptions", strings_type);
+ v.find ("cxx.loptions", strings_type);
+ v.find ("cxx.libs", strings_type);
+
+ v.find ("cxx.export.poptions", strings_type);
+ v.find ("cxx.export.coptions", strings_type);
+ v.find ("cxx.export.loptions", strings_type);
+ v.find ("cxx.export.libs", strings_type);
+
+ v.find ("cxx.std", string_type);
+ }
+
+ // Register target types.
+ //
+ {
+ auto& t (b.target_types);
+
+ t.insert<h> ();
+ t.insert<c> ();
+
+ t.insert<cxx> ();
+ t.insert<hxx> ();
+ t.insert<ixx> ();
+ t.insert<txx> ();
+ }
+
+ // Register rules.
+ //
+ {
+ using namespace bin;
+
+ auto& r (b.rules);
+
+ r.insert<obja> (perform_update_id, "cxx.compile", compile::instance);
+
+ r.insert<obja> (perform_update_id, "cxx.compile", compile::instance);
+ r.insert<obja> (perform_clean_id, "cxx.compile", compile::instance);
+
+ r.insert<objso> (perform_update_id, "cxx.compile", compile::instance);
+ r.insert<objso> (perform_clean_id, "cxx.compile", compile::instance);
+
+ r.insert<exe> (perform_update_id, "cxx.link", link::instance);
+ r.insert<exe> (perform_clean_id, "cxx.link", link::instance);
+
+ r.insert<liba> (perform_update_id, "cxx.link", link::instance);
+ r.insert<liba> (perform_clean_id, "cxx.link", link::instance);
+
+ r.insert<libso> (perform_update_id, "cxx.link", link::instance);
+ r.insert<libso> (perform_clean_id, "cxx.link", link::instance);
+
+ // Register for configure so that we detect unresolved imports
+ // during configuration rather that later, e.g., during update.
+ //
+ r.insert<obja> (configure_update_id, "cxx.compile", compile::instance);
+ r.insert<objso> (configure_update_id, "cxx.compile", compile::instance);
+
+ r.insert<exe> (configure_update_id, "cxx.link", link::instance);
+ r.insert<liba> (configure_update_id, "cxx.link", link::instance);
+ r.insert<libso> (configure_update_id, "cxx.link", link::instance);
+
+ //@@ Should we check if install module was loaded (see bin)?
+ //
+ r.insert<exe> (perform_install_id, "cxx.install", install::instance);
+ r.insert<liba> (perform_install_id, "cxx.install", install::instance);
+ r.insert<libso> (perform_install_id, "cxx.install", install::instance);
+ }
+
+ // Configure.
+ //
+
+ // config.cxx
+ //
+ if (first)
+ {
+ auto p (config::required (r, "config.cxx", "g++"));
+
+ // If we actually set a new value, test it by trying to execute.
+ //
+ if (p.second)
+ {
+ const string& cxx (as<string> (p.first));
+ const char* args[] = {cxx.c_str (), "-dumpversion", nullptr};
+
+ if (verb >= 2)
+ print_process (args);
+ else if (verb)
+ text << "test " << cxx;
+
+ string ver;
+ try
+ {
+ process pr (args, 0, -1); // Open pipe to stdout.
+ ifdstream is (pr.in_ofd);
+
+ bool r (getline (is, ver));
+
+ if (!r)
+ fail << "unexpected output from " << cxx;
+
+ if (!pr.wait ())
+ throw failed ();
+ }
+ catch (const process_error& e)
+ {
+ error << "unable to execute " << cxx << ": " << e.what ();
+
+ if (e.child ())
+ exit (1);
+
+ throw failed ();
+ }
+
+ if (verb >= 2)
+ text << cxx << " " << ver;
+ }
+ }
+
+ // config.cxx.{p,c,l}options
+ // config.cxx.libs
+ //
+ // These are optional. We also merge them into the corresponding
+ // cxx.* variables.
+ //
+ // The merging part gets a bit tricky if this module has already
+ // been loaded in one of the outer scopes. By doing the straight
+ // append we would just be repeating the same options over and
+ // over. So what we are going to do is only append to a value if
+ // it came from this scope. Then the usage for merging becomes:
+ //
+ // cxx.coptions = <overridable options> # Note: '='.
+ // using cxx
+ // cxx.coptions += <overriding options> # Note: '+='.
+ //
+ if (const value& v = config::optional (r, "config.cxx.poptions"))
+ b.assign ("cxx.poptions") += as<strings> (v);
+
+ if (const value& v = config::optional (r, "config.cxx.coptions"))
+ b.assign ("cxx.coptions") += as<strings> (v);
+
+ if (const value& v = config::optional (r, "config.cxx.loptions"))
+ b.assign ("cxx.loptions") += as<strings> (v);
+
+ if (const value& v = config::optional (r, "config.cxx.libs"))
+ b.assign ("cxx.libs") += as<strings> (v);
+
+ // Configure "installability" of our target types.
+ //
+ {
+ using build2::install::path;
+
+ path<hxx> (b, dir_path ("include")); // Install into install.include.
+ path<ixx> (b, dir_path ("include"));
+ path<txx> (b, dir_path ("include"));
+ path<h> (b, dir_path ("include"));
+ }
+
+ return true;
+ }
+ }
+}
diff --git a/build2/cxx/target b/build2/cxx/target
new file mode 100644
index 0000000..c51c964
--- /dev/null
+++ b/build2/cxx/target
@@ -0,0 +1,78 @@
+// file : build2/cxx/target -*- C++ -*-
+// copyright : Copyright (c) 2014-2015 Code Synthesis Ltd
+// license : MIT; see accompanying LICENSE file
+
+#ifndef BUILD2_CXX_TARGET
+#define BUILD2_CXX_TARGET
+
+#include <build2/target>
+
+namespace build2
+{
+ namespace cxx
+ {
+ class hxx: public file
+ {
+ public:
+ using file::file;
+
+ public:
+ static const target_type static_type;
+ virtual const target_type& dynamic_type () const {return static_type;}
+ };
+
+ class ixx: public file
+ {
+ public:
+ using file::file;
+
+ public:
+ static const target_type static_type;
+ virtual const target_type& dynamic_type () const {return static_type;}
+ };
+
+ class txx: public file
+ {
+ public:
+ using file::file;
+
+ public:
+ static const target_type static_type;
+ virtual const target_type& dynamic_type () const {return static_type;}
+ };
+
+ class cxx: public file
+ {
+ public:
+ using file::file;
+
+ public:
+ static const target_type static_type;
+ virtual const target_type& dynamic_type () const {return static_type;}
+ };
+
+ //@@ TMP
+ //
+ class h: public file
+ {
+ public:
+ using file::file;
+
+ public:
+ static const target_type static_type;
+ virtual const target_type& dynamic_type () const {return static_type;}
+ };
+
+ class c: public file
+ {
+ public:
+ using file::file;
+
+ public:
+ static const target_type static_type;
+ virtual const target_type& dynamic_type () const {return static_type;}
+ };
+ }
+}
+
+#endif // BUILD2_CXX_TARGET
diff --git a/build2/cxx/target.cxx b/build2/cxx/target.cxx
new file mode 100644
index 0000000..0990945
--- /dev/null
+++ b/build2/cxx/target.cxx
@@ -0,0 +1,81 @@
+// file : build2/cxx/target.cxx -*- C++ -*-
+// copyright : Copyright (c) 2014-2015 Code Synthesis Ltd
+// license : MIT; see accompanying LICENSE file
+
+#include <build2/cxx/target>
+
+using namespace std;
+
+namespace build2
+{
+ namespace cxx
+ {
+ constexpr const char ext_var[] = "extension";
+
+ constexpr const char hxx_ext_def[] = "hxx";
+ const target_type hxx::static_type
+ {
+ "hxx",
+ &file::static_type,
+ &target_factory<hxx>,
+ &target_extension_var<ext_var, hxx_ext_def>,
+ &search_file,
+ false
+ };
+
+ constexpr const char ixx_ext_def[] = "ixx";
+ const target_type ixx::static_type
+ {
+ "ixx",
+ &file::static_type,
+ &target_factory<ixx>,
+ &target_extension_var<ext_var, ixx_ext_def>,
+ &search_file,
+ false
+ };
+
+ constexpr const char txx_ext_def[] = "txx";
+ const target_type txx::static_type
+ {
+ "txx",
+ &file::static_type,
+ &target_factory<txx>,
+ &target_extension_var<ext_var, txx_ext_def>,
+ &search_file,
+ false
+ };
+
+ constexpr const char cxx_ext_def[] = "cxx";
+ const target_type cxx::static_type
+ {
+ "cxx",
+ &file::static_type,
+ &target_factory<cxx>,
+ &target_extension_var<ext_var, cxx_ext_def>,
+ &search_file,
+ false
+ };
+
+ constexpr const char h_ext_def[] = "h";
+ const target_type h::static_type
+ {
+ "h",
+ &file::static_type,
+ &target_factory<h>,
+ &target_extension_var<ext_var, h_ext_def>,
+ &search_file,
+ false
+ };
+
+ constexpr const char c_ext_def[] = "c";
+ const target_type c::static_type
+ {
+ "c",
+ &file::static_type,
+ &target_factory<c>,
+ &target_extension_var<ext_var, c_ext_def>,
+ &search_file,
+ false
+ };
+ }
+}
diff --git a/build2/cxx/utility b/build2/cxx/utility
new file mode 100644
index 0000000..b0deb08
--- /dev/null
+++ b/build2/cxx/utility
@@ -0,0 +1,37 @@
+// file : build2/cxx/utility -*- C++ -*-
+// copyright : Copyright (c) 2014-2015 Code Synthesis Ltd
+// license : MIT; see accompanying LICENSE file
+
+#ifndef BUILD2_CXX_UTILITY
+#define BUILD2_CXX_UTILITY
+
+#include <string>
+
+#include <build2/types>
+#include <build2/target>
+
+#include <build2/config/utility>
+
+namespace build2
+{
+ namespace cxx
+ {
+ using config::append_options;
+
+ // T is either target or scope.
+ //
+ template <typename T>
+ void
+ append_std (cstrings& args, T&, std::string& storage);
+
+ // Append library options from one of the cxx.export.* variables
+ // recursively, prerequisite libraries first.
+ //
+ void
+ append_lib_options (cstrings& args, target&, const char* variable);
+ }
+}
+
+#include <build2/cxx/utility.txx>
+
+#endif // BUILD2_CXX_UTILITY
diff --git a/build2/cxx/utility.cxx b/build2/cxx/utility.cxx
new file mode 100644
index 0000000..fead1b4
--- /dev/null
+++ b/build2/cxx/utility.cxx
@@ -0,0 +1,29 @@
+// file : build2/cxx/utility.cxx -*- C++ -*-
+// copyright : Copyright (c) 2014-2015 Code Synthesis Ltd
+// license : MIT; see accompanying LICENSE file
+
+#include <build2/cxx/utility>
+
+#include <build2/bin/target>
+
+using namespace std;
+
+namespace build2
+{
+ namespace cxx
+ {
+ void
+ append_lib_options (cstrings& args, target& l, const char* var)
+ {
+ using namespace bin;
+
+ for (target* t: l.prerequisite_targets)
+ {
+ if (t->is_a<lib> () || t->is_a<liba> () || t->is_a<libso> ())
+ append_lib_options (args, *t, var);
+ }
+
+ append_options (args, l, var);
+ }
+ }
+}
diff --git a/build2/cxx/utility.txx b/build2/cxx/utility.txx
new file mode 100644
index 0000000..b35649e
--- /dev/null
+++ b/build2/cxx/utility.txx
@@ -0,0 +1,35 @@
+// file : build2/cxx/utility.txx -*- C++ -*-
+// copyright : Copyright (c) 2014-2015 Code Synthesis Ltd
+// license : MIT; see accompanying LICENSE file
+
+using namespace std;
+
+namespace build2
+{
+ namespace cxx
+ {
+ template <typename T>
+ void
+ append_std (cstrings& args, T& t, std::string& s)
+ {
+ if (auto l = t["cxx.std"])
+ {
+ const std::string& v (as<string> (*l));
+
+ // Translate 11 to 0x and 14 to 1y for compatibility with
+ // older versions of the compiler.
+ //
+ s = "-std=c++";
+
+ if (v == "11")
+ s += "0x";
+ else if (v == "14")
+ s += "1y";
+ else
+ s += v;
+
+ args.push_back (s.c_str ());
+ }
+ }
+ }
+}