diff options
author | Boris Kolpackov <boris@codesynthesis.com> | 2018-07-09 09:48:07 +0200 |
---|---|---|
committer | Karen Arutyunov <karen@codesynthesis.com> | 2018-07-27 14:23:07 +0300 |
commit | 07fdebdbb02fde71d6e656ddd46b967347417502 (patch) | |
tree | 594c2f352499aaac0756e3071a4b7ce2aee0fd34 | |
parent | 8a87a8bc08f0d692f53a0373da3a0a959de13e52 (diff) |
Implement publish command for publishing packages to archive repositories
-rw-r--r-- | bdep/bdep.cli | 5 | ||||
-rw-r--r-- | bdep/bdep.cxx | 22 | ||||
-rw-r--r-- | bdep/buildfile | 50 | ||||
-rw-r--r-- | bdep/git.cxx | 66 | ||||
-rw-r--r-- | bdep/git.hxx | 29 | ||||
-rw-r--r-- | bdep/git.ixx | 28 | ||||
-rw-r--r-- | bdep/project-email.cxx | 69 | ||||
-rw-r--r-- | bdep/publish.cli | 131 | ||||
-rw-r--r-- | bdep/publish.cxx | 461 | ||||
-rw-r--r-- | bdep/publish.hxx | 19 | ||||
-rw-r--r-- | bdep/types-parsers.cxx | 22 | ||||
-rw-r--r-- | bdep/types-parsers.hxx | 7 | ||||
-rw-r--r-- | bdep/types.hxx | 5 | ||||
-rw-r--r-- | bdep/utility.cxx | 17 | ||||
-rw-r--r-- | bdep/utility.hxx | 22 | ||||
-rw-r--r-- | bdep/utility.txx | 38 | ||||
-rwxr-xr-x | doc/cli.sh | 5 |
17 files changed, 868 insertions, 128 deletions
diff --git a/bdep/bdep.cli b/bdep/bdep.cli index 6d6eb04..284f1a5 100644 --- a/bdep/bdep.cli +++ b/bdep/bdep.cli @@ -443,6 +443,11 @@ namespace bdep "\l{bdep-status(1)} \- print status of project and/or its dependencies" } + bool publish + { + "\l{bdep-publish(1)} \- publish project to archive repository" + } + bool deinit { "\l{bdep-deinit(1)} \- deinitialize project in build configurations" diff --git a/bdep/bdep.cxx b/bdep/bdep.cxx index d0327d8..339be05 100644 --- a/bdep/bdep.cxx +++ b/bdep/bdep.cxx @@ -25,6 +25,7 @@ #include <bdep/sync.hxx> #include <bdep/fetch.hxx> #include <bdep/status.hxx> +#include <bdep/publish.hxx> #include <bdep/deinit.hxx> #include <bdep/config.hxx> #include <bdep/test.hxx> @@ -265,16 +266,17 @@ try break; \ } - COMMAND_IMPL (new_, new, "new"); - COMMAND_IMPL (init, init, "init"); - COMMAND_IMPL (sync, sync, "sync"); - COMMAND_IMPL (fetch, fetch, "fetch"); - COMMAND_IMPL (status, status, "status"); - COMMAND_IMPL (deinit, deinit, "deinit"); - COMMAND_IMPL (config, config, "config"); - COMMAND_IMPL (test, test, "test"); - COMMAND_IMPL (update, update, "update"); - COMMAND_IMPL (clean, clean, "clean"); + COMMAND_IMPL (new_, new, "new"); + COMMAND_IMPL (init, init, "init"); + COMMAND_IMPL (sync, sync, "sync"); + COMMAND_IMPL (fetch, fetch, "fetch"); + COMMAND_IMPL (status, status, "status"); + COMMAND_IMPL (publish, publish, "publish"); + COMMAND_IMPL (deinit, deinit, "deinit"); + COMMAND_IMPL (config, config, "config"); + COMMAND_IMPL (test, test, "test"); + COMMAND_IMPL (update, update, "update"); + COMMAND_IMPL (clean, clean, "clean"); assert (false); fail << "unhandled command"; diff --git a/bdep/buildfile b/bdep/buildfile index c3d0707..a234d96 100644 --- a/bdep/buildfile +++ b/bdep/buildfile @@ -17,20 +17,21 @@ import libs += libodb-sqlite%lib{odb-sqlite} # @@ Why don't we generate these with wildcard patterns (and rules below with # a for-loop)? # -options_topics = \ -bdep-options \ -common-options \ -project-options \ -help-options \ -new-options \ -init-options \ -sync-options \ -fetch-options \ -status-options \ -deinit-options \ -config-options \ -test-options \ -update-options \ +options_topics = \ +bdep-options \ +common-options \ +project-options \ +help-options \ +new-options \ +init-options \ +sync-options \ +fetch-options \ +status-options \ +publish-options \ +deinit-options \ +config-options \ +test-options \ +update-options \ clean-options help_topics = projects-configs @@ -65,16 +66,17 @@ if $cli.configured # cli.cxx{help-options}: cli{help} - cli.cxx{new-options}: cli{new} - cli.cxx{init-options}: cli{init} - cli.cxx{sync-options}: cli{sync} - cli.cxx{fetch-options}: cli{fetch} - cli.cxx{status-options}: cli{status} - cli.cxx{deinit-options}: cli{deinit} - cli.cxx{config-options}: cli{config} - cli.cxx{test-options}: cli{test} - cli.cxx{update-options}: cli{update} - cli.cxx{clean-options}: cli{clean} + cli.cxx{new-options}: cli{new} + cli.cxx{init-options}: cli{init} + cli.cxx{sync-options}: cli{sync} + cli.cxx{fetch-options}: cli{fetch} + cli.cxx{status-options}: cli{status} + cli.cxx{publish-options}: cli{publish} + cli.cxx{deinit-options}: cli{deinit} + cli.cxx{config-options}: cli{config} + cli.cxx{test-options}: cli{test} + cli.cxx{update-options}: cli{update} + cli.cxx{clean-options}: cli{clean} # Help topics. # diff --git a/bdep/git.cxx b/bdep/git.cxx new file mode 100644 index 0000000..c26d257 --- /dev/null +++ b/bdep/git.cxx @@ -0,0 +1,66 @@ +// file : bdep/git.cxx -*- C++ -*- +// copyright : Copyright (c) 2014-2018 Code Synthesis Ltd +// license : MIT; see accompanying LICENSE file + +#include <bdep/git.hxx> + +#include <libbutl/filesystem.mxx> + +#include <bdep/diagnostics.hxx> + +using namespace butl; + +namespace bdep +{ + bool + git (const dir_path& d) + { + // .git can be either a directory or a file in case of a submodule. + // + return entry_exists (d / ".git", + true /* follow_symlinks */, + true /* ignore_errors */); + } + + optional<string> + git_line (process&& pr, fdpipe&& pipe, bool ie) + { + optional<string> r; + + bool io (false); + try + { + pipe.out.close (); + ifdstream is (move (pipe.in), fdstream_mode::skip, ifdstream::badbit); + + string l; + if (!eof (getline (is, l))) + r = move (l); + + is.close (); // Detect errors. + } + catch (const io_error&) + { + io = true; // Presumably git failed so check that first. + } + + // Note: cannot use finish() since ignoring normal error. + // + if (!pr.wait ()) + { + const process_exit& e (*pr.exit); + + if (!e.normal ()) + fail << "process git " << e; + + if (ie) + r = nullopt; + else + throw failed (); // Assume git issued diagnostics. + } + else if (io) + fail << "unable to read git output"; + + return r; + } +} diff --git a/bdep/git.hxx b/bdep/git.hxx new file mode 100644 index 0000000..f56f38b --- /dev/null +++ b/bdep/git.hxx @@ -0,0 +1,29 @@ +// file : bdep/git.hxx -*- C++ -*- +// copyright : Copyright (c) 2014-2018 Code Synthesis Ltd +// license : MIT; see accompanying LICENSE file + +#ifndef BDEP_GIT_HXX +#define BDEP_GIT_HXX + +#include <bdep/types.hxx> +#include <bdep/utility.hxx> + +namespace bdep +{ + // Return true if the specified directory is a git repository root (contains + // the .git filesystem entry). + // + bool + git (const dir_path&); + + // Return the first line of the git output. If ignore_error is true, then + // suppress stderr, ignore (normal) error exist status, and return nullopt. + // + template <typename... A> + optional<string> + git_line (const dir_path& repo, bool ignore_error, A&&... args); +} + +#include <bdep/git.ixx> + +#endif // BDEP_GIT_HXX diff --git a/bdep/git.ixx b/bdep/git.ixx new file mode 100644 index 0000000..0ee94ac --- /dev/null +++ b/bdep/git.ixx @@ -0,0 +1,28 @@ +// file : bdep/git.ixx -*- C++ -*- +// copyright : Copyright (c) 2014-2018 Code Synthesis Ltd +// license : MIT; see accompanying LICENSE file + +using namespace butl; + +namespace bdep +{ + optional<string> + git_line (process&&, fdpipe&&, bool); + + template <typename... A> + inline optional<string> + git_line (const dir_path& repo, bool ie, A&&... args) + { + fdpipe pipe (fdopen_pipe ()); + auto_fd null (ie ? fdnull () : auto_fd ()); + + process pr (start (0 /* stdin */, + pipe /* stdout */, + ie ? null.get () : 2 /* stderr */, + "git", + "-C", repo, + forward<A> (args)...)); + + return git_line (move (pr), move (pipe), ie); + } +} diff --git a/bdep/project-email.cxx b/bdep/project-email.cxx index b85a738..2b836f2 100644 --- a/bdep/project-email.cxx +++ b/bdep/project-email.cxx @@ -4,8 +4,7 @@ #include <bdep/project-email.hxx> -#include <libbutl/filesystem.mxx> - +#include <bdep/git.hxx> #include <bdep/diagnostics.hxx> using namespace butl; @@ -28,12 +27,7 @@ namespace bdep // See if this is a VCS repository we recognize. // - - // .git can be either a directory or a file in case of a submodule. - // - if (entry_exists (prj / ".git", - true /* follow_symlinks */, - true /* ignore_errors */)) + if (git (prj)) { // In git the author email can be specified with the GIT_AUTHOR_EMAIL // environment variable after which things fall back to the committer @@ -41,26 +35,10 @@ namespace bdep // resolved value can be queried with the GIT_AUTHOR_IDENT logical // variable. // - process pr; - bool io (false); - try + if (optional<string> l = git_line (prj, + true /* ignore_error */, + "var", "GIT_AUTHOR_IDENT")) { - fdpipe pipe (fdopen_pipe ()); - - // If git cannot determine the author name/email, it fails verbosely - // so we suppress all diagnostics. - // - pr = start (0 /* stdin */, - pipe /* stdout */, - fdnull () /* stderr */, - "git", - "-C", prj, - "var", - "GIT_AUTHOR_IDENT"); - - pipe.out.close (); - ifdstream is (move (pipe.in), ifdstream::badbit); - // The output should be a single line in this form: // // NAME <EMAIL> TIME ZONE @@ -72,40 +50,15 @@ namespace bdep // The <> delimiters are there even if the email is empty so we use // them as anchors. // - string l; - if (!eof (getline (is, l))) - { - size_t p1, p2; - - if ((p2 = l.rfind ('>' )) == string::npos || - (p1 = l.rfind ('<', p2)) == string::npos) - fail << "no email in git-var output" << endf; + size_t p1, p2; - if (++p1 != p2) - r = string (l, p1, p2 - p1); - } + if ((p2 = l->rfind ('>' )) == string::npos || + (p1 = l->rfind ('<', p2)) == string::npos) + fail << "no email in GIT_AUTHOR_IDENT" << endf; - is.close (); // Detect errors. + if (++p1 != p2) + return string (*l, p1, p2 - p1); } - catch (const io_error&) - { - io = true; // Presumably git failed so check that first. - } - - if (!pr.wait ()) - { - const process_exit& e (*pr.exit); - - if (!e.normal ()) - fail << "process git " << e; - - r = nullopt; - } - else if (io) - fail << "unable to read git-var output"; - - if (r) - return r; } if ((r = getenv ("EMAIL"))) diff --git a/bdep/publish.cli b/bdep/publish.cli new file mode 100644 index 0000000..f44ecc0 --- /dev/null +++ b/bdep/publish.cli @@ -0,0 +1,131 @@ +// file : bdep/publish.cli +// copyright : Copyright (c) 2014-2018 Code Synthesis Ltd +// license : MIT; see accompanying LICENSE file + +include <bdep/project.cli>; + +"\section=1" +"\name=bdep-publish" +"\summary=publish project to archive repository" + +namespace bdep +{ + { + "<options> + <prj-spec> <prj-dir> + <pkg-spec> <pkg-dir> + <cfg-spec> <cfg-name> <cfg-dir>", + + "\h|SYNOPSIS| + + \c{\b{bdep publish} [<options>] [<cfg-spec>] [<pkg-spec>]} + + \c{<pkg-spec> = (\b{--directory}|\b{-d} <pkg-dir>)... | <prj-spec>\n + <prj-spec> = \b{--directory}|\b{-d} <prj-dir>\n + <cfg-spec> = \b{@}<cfg-name> | \b{--config}|\b{-c} <cfg-dir>} + + \h|DESCRIPTION| + + The \cb{publish} command published the project packages to an + archive-based repository. + + If no project or package directory is specified, then the current working + directory is assumed. If no configuration is specified, then the default + configuration is used to prepare the package distributions. See + \l{bdep-projects-configs(1)} for details on specifying projects and + configurations. + + For each specified package the \cb{publish} command prepares a package + archive and sends it as part of the package submission request to an + archive-based repository. If the repository is not explicitly specified + with the \cb{--repository} option, packages are published to + \cb{cppget.org} by default. + + Along with the package archive, the submission request specifies the + project the package belongs to, the repository section to publish the + package under, the control repository URL to use for authorization, and + the publisher's email address for notifications. While the exact usage + and interpretation of this information depends on the specific + repository, the following semantics apply when submitting to + \cb{cppget.org}. + + The project information is used to group related packages together. For + example, \cb{hello} and \cb{libhello} are likely to be part of the same + project called \cb{hello}. @@ TODO. + + The section specifies the desired repository section to publish the + project under. If not explicitly specified with the \cb{--section} + option, one of the \cb{alpha}, \cb{beta}, or \cb{stable} values are + automatically derived from the package version. + + The control repository URL is a publicly accessible, read-only URL to a + version control repository (normally the same as the project's) that is + used to authenticate the publisher as someone authorized to publish under + this package name (currently only \cb{git(1)} is supported). + + Prior to sending the submission request, the \cb{publish} command adds + the package archive checksum to the \cb{build2-control} branch of the + project's version control repository. Upon receiving the submission + request, the archive repository either (1) associates the control + repository with the package name if this is the first time this package + name is published or (2) compares the submitted control repository to the + previously associated and, if matching, queries the \cb{build2-control} + branch to verify that the submitter is authorized to publish this archive + under this package name. + + Unless the control repository URL is specified with the \cb{--control} + option, it will be automatically derived from the version control's + \"remote\" URL. In case of \cb{git(1)}, it will be based on the + \cb{remote.origin.url} configuration value unless overridden with + \cb{remote.origin.build2ControlUrl}. The special \cb{none} value to the + \cb{--control} option can be used to disable this functionality. + + See \l{brep#submit Package Submission} for details on the submission + request handling by archive repositories. + " + } + + class cmd_publish_options: project_options + { + "\h|PUBLISH OPTIONS|" + + bool --yes|-y + { + "Don't prompt for confirmation before publishing." + } + + string --control + { + "<url>", + "Control repository URL for the packages being published." + } + + url --repository = "https://cppget.org" + { + "<url>", + "Repository to publish the packages to." + } + + string --section + { + "<name>", + "Repository section to publish the packages under." + } + + string --email + { + "<email>", + "Publisher's email address for notifications. If unspecified, one will be + obtained from the environment and/or version control system. See the + ENVIRONMENT section for details." + } + }; + + "\h|ENVIRONMENT| + + The \cb{BDEP_EMAIL} environment variable can be used to specify the + publisher's email address. If not set, the \cb{publish} command will first + try to obtain the email from the version control system (if used) and then + from the \cb{EMAIL} environment variable. See also the \cb{--email} option. + " +} diff --git a/bdep/publish.cxx b/bdep/publish.cxx new file mode 100644 index 0000000..0fbe02d --- /dev/null +++ b/bdep/publish.cxx @@ -0,0 +1,461 @@ +// file : bdep/publish.cxx -*- C++ -*- +// copyright : Copyright (c) 2014-2018 Code Synthesis Ltd +// license : MIT; see accompanying LICENSE file + +#include <bdep/publish.hxx> + +#include <libbutl/standard-version.mxx> + +#include <bdep/git.hxx> +#include <bdep/project.hxx> +#include <bdep/project-email.hxx> +#include <bdep/database.hxx> +#include <bdep/diagnostics.hxx> + +#include <bdep/sync.hxx> + +using namespace std; +using namespace butl; + +namespace bdep +{ + static inline url + parse_url (const string& s, const char* what) + { + try + { + return url (s); + } + catch (const invalid_argument& e) + { + fail << "invalid " << what << " value '" << s << "': " << e << endf; + } + }; + + // Get the project's control repository URL. + // + static url + control_url (const dir_path& prj) + { + if (git (prj)) + { + // First try remote.origin.build2ControlUrl which can be used to specify + // a custom URL (e.g., if a correct one cannot be automatically derived + // from remote.origin.url). + // + if (optional<string> l = git_line (prj, + true /* ignore_error */, + "config", + "--get", + "remote.origin.build2ControlUrl")) + { + return parse_url (*l, "remote.origin.build2ControlUrl"); + } + + // Otherwise, get remote.origin.url and try to derive an https URL from + // it. + // + if (optional<string> l = git_line (prj, + true /* ignore_error */, + "config", + "--get", + "remote.origin.url")) + { + string& s (*l); + + // This one will be fuzzy and hairy. Here are some representative + // examples of what we can encounter: + // + // example.org:/path/to/repo.git + // user@example.org:/path/to/repo.git + // user@example.org:~user/path/to/repo.git + // ssh://user@example.org/path/to/repo.git + // + // git://example.org/path/to/repo.git + // + // http://example.org/path/to/repo.git + // https://example.org/path/to/repo.git + // + // /path/to/repo.git + // file:///path/to/repo.git + // + // Note that git seem to always make remote.origin.url absolute in + // case of a local filesystem path. + // + // So the algorithm will be as follows: + // + // 1. If there is scheme, then parse as URL. + // + // 2. Otherwise, check if this is an absolute path. + // + // 3. Otherwise, assume SSH <host>:<path> thing. + // + url u; + + // Find the scheme. + // + // Note that in example.org:/path/... example.org is a valid scheme. + // To distinguish this, we check if the scheme contains any dots (none + // of the schemes recognized by git currently do and probably never + // will). + // + size_t p (s.find (':')); + if (p != string::npos && // Has ':'. + url::traits::find (s, p) == 0 && // Scheme starts at 0. + s.rfind ('.', p - 1) == string::npos) // No dots in scheme. + { + u = parse_url (s, "remote.origin.url"); + } + else + { + // Absolute path or the SSH thing. + // + if (path::traits::absolute (s)) + { + // This is what we want to end up with: + // + // file:///tmp + // file:///c:/tmp + // + const char* h (s[0] == '/' ? "file://" : "file:///"); + u = parse_url (h + s, "remote.origin.url"); + } + else if (p != string::npos) + { + // This can still include user (user@host) so let's add the + // scheme, replace/erase ':', and parse it as a string + // representation of a URL. + // + if (s[p + 1] == '/') // POSIX notation. + s.erase (p, 1); + else + s[p] = '/'; + + u = parse_url ("ssh://" + s, "remote.origin.url"); + } + else + fail << "invalid remote.origin.url value '" << s << "': not a URL"; + } + + if (u.scheme == "http" || u.scheme == "https") + return u; + + // Derive an HTTPS URL from a remote URL (and hope for the best). + // + if (u.scheme != "file" && u.authority && u.path) + return url ("https", u.authority->host, *u.path); + + fail << "unable to derive control repository URL from " << u << + info << "consider setting remote.origin.build2ControlUrl" << + info << "or use --control to specify explicitly"; + } + + fail << "unable to discover control repository URL: no git " + << "remote.origin.url value"; + } + + fail << "unable to discover control repository URL" << + info << "use --control to specify explicitly" << endf; + } + + static standard_version + package_version (const common_options& o, + const dir_path& cfg, + const string& p) + { + // We could have used bpkg-pkg-status but then we would have to deal with + // iterations. So we use the build system's info meta-operation directly. + // + string v; + { + process pr; + bool io (false); + try + { + fdpipe pipe (fdopen_pipe ()); // Text mode seems appropriate. + + // Note: the package directory inside the configuration is a bit of an + // assumption. + // + pr = start_b (o, + pipe /* stdout */, + 2 /* stderr */, + "info:", (dir_path (cfg) /= p).representation ()); + + pipe.out.close (); + ifdstream is (move (pipe.in), fdstream_mode::skip, ifdstream::badbit); + + for (string l; !eof (getline (is, l)); ) + { + // Verify the name for good measure (comes before version). + // + if (l.compare (0, 9, "project: ") == 0) + { + if (l.compare (9, string::npos, p) != 0) + fail << "name mismatch for package " << p; + } + else if (l.compare (0, 9, "version: ") == 0) + { + v = string (l, 9); + break; + } + } + + is.close (); // Detect errors. + } + catch (const io_error&) + { + // Presumably the child process failed and issued diagnostics so let + // finish_b() try to deal with that first. + // + io = true; + } + + finish_b (o, pr, io); + } + + try + { + return standard_version (v); + } + catch (const invalid_argument& e) + { + fail << "invalid package " << p << " version " << v << ": " << e << endf; + } + } + + static int + cmd_publish (const cmd_publish_options& o, + const dir_path& prj, + const dir_path& cfg, + const cstrings& pkg_names) + { + const url& repo (o.repository ()); + + optional<url> ctrl; + if (o.control_specified ()) + { + if (o.control () != "none") + ctrl = parse_url (o.control (), "--control option"); + } + else + ctrl = control_url (prj); + + string email; + if (o.email_specified ()) + email = o.email (); + else if (optional<string> r = project_email (prj)) + email = move (*r); + else + fail << "unable to obtain publisher's email" << + info << "use --email to specify explicitly"; + + + // Collect package information (version, project, section). + // + // @@ It would have been nice to publish them in the dependency order. + // Perhaps we need something like bpkg-pkg-order (also would be needed + // in init --clone). + // + struct package + { + string name; + standard_version version; + string project; + string section; // alpha|beta|stable (or --section) + + path archive; + string checksum; + }; + vector<package> pkgs; + + for (string n: pkg_names) + { + standard_version v (package_version (o, cfg, n)); + + // Should we allow publishing snapshots and, if so, to which section? + // For example, is it correct to consider a "between betas" snapshot a + // beta version? + // + if (v.snapshot ()) + fail << "package " << n << " version " << v << " is a snapshot"; + + string p (prj.leaf ().string ()); // @@ TODO/TMP + + // Per semver we treat zero major versions as alpha. + // + string s (o.section_specified () ? o.section () : + v.alpha () || v.major () == 0 ? "alpha" : + v.beta () ? "beta" : "stable"); + + pkgs.push_back ( + package {move (n), move (v), move (p), move (s), path (), string ()}); + } + + // Print the plan and ask for confirmation. + // + if (!o.yes ()) + { + text << "publishing:" << '\n' + << " to: " << repo << '\n' + << " as: " << email + << '\n'; + + for (size_t i (0); i != pkgs.size (); ++i) + { + const package& p (pkgs[i]); + + diag_record dr (text); + + // If printing multiple packages, separate them with a blank line. + // + if (i != 0) + dr << '\n'; + + // While currently the control repository is the same for all + // packages, this could change in the future (e.g., multi-project + // publishing). + // + dr << " package: " << p.name << '\n' + << " version: " << p.version << '\n' + << " project: " << p.project << '\n' + << " section: " << p.section; + + if (ctrl) + dr << '\n' + << " control: " << *ctrl; + } + + if (!yn_prompt ("continue? [y/n]")) + return 1; + } + + // Prepare package archives and calculate their checksums. Also verify + // each archive with bpkg-pkg-verify for good measure. + // + auto_rmdir dr_rm (dir_path ("/tmp/publish")); //@@ TODO tmp facility like in bpkg. + const dir_path& dr (dr_rm.path); // dist.root + mk (dr); + + for (package& p: pkgs) + { + // Similar to extracting package version, we call the build system + // directly to prepare the distribution. If/when we have bpkg-pkg-dist, + // we may want to switch to that. + // + run_b (o, + "dist:", (dir_path (cfg) /= p.name).representation (), + "config.dist.root=" + dr.representation (), + "config.dist.archives=tar.gz", + "config.dist.checksums=sha256"); + + // This is the canonical package archive name that we expect dist to + // produce. + // + path a (dr / p.name + '-' + p.version.string () + ".tar.gz"); + path c (a + ".sha256"); + + if (!exists (a)) + fail << "package distribution did not produce expected archive " << a; + + if (!exists (c)) + fail << "package distribution did not produce expected checksum " << c; + + //@@ TODO: call bpkg-pkg-verify to verify archive name/content all match. + + //@@ TODO: read checksum from .sha256 file and store in p.checksum. + + p.archive = move (a); + } + + // Submit each package. + // + for (const package& p: pkgs) + { + //@@ TODO: call curl to upload the archive, parse response manifest, + // and print message/reference. + + text << "submitting " << p.archive; + + //@@ TODO [phase 2]: add checksum file to build2-control branch, commit + // and push (this will need some more discussion). + // + // - name (abbrev 12 char checksum) and subdir? + // + // - make the checksum file a manifest with basic info (name, version) + // + // - what if already exists (previous failed attempt)? Ignore? + // + // - make a separate checkout (in tmp facility) reusing the external + // .git/ dir? + // + // - should probably first fetch to avoid push conflicts. Or maybe + // fetch on push conflict (more efficient/robust)? + // + } + + return 0; + } + + int + cmd_publish (const cmd_publish_options& o, cli::scanner&) + { + tracer trace ("publish"); + + // The same ignore/load story as in sync. + // + project_packages pp ( + find_project_packages (o, + false /* ignore_packages */, + false /* load_packages */)); + + const dir_path& prj (pp.project); + database db (open (prj, trace)); + + // We need a single configuration to prepare package distribution. + // + shared_ptr<configuration> cfg; + { + transaction t (db.begin ()); + configurations cfgs (find_configurations (o, prj, t)); + t.commit (); + + if (cfgs.size () > 1) + fail << "multiple configurations specified for publish"; + + shared_ptr<configuration>& c (cfgs[0]); + + // If specified, verify packages are present in the configuration. + // Otherwise, make sure the configuration is not empty. + // + if (!pp.packages.empty ()) + verify_project_packages (pp, cfgs); + else if (c->packages.empty ()) + fail << "no packages initialized in configuration " << *c; + + cfg = move (c); + } + + // Pre-sync the configuration to avoid triggering the build system hook + // (see sync for details). + // + cmd_sync (o, prj, cfg, strings () /* pkg_args */, true /* implicit */); + + // If no packages were explicitly specified, then we publish all that have + // been initialized in the configuration. + // + cstrings pkgs; + if (pp.packages.empty ()) + { + for (const package_state& p: cfg->packages) + pkgs.push_back (p.name.string ().c_str ()); + } + else + { + for (const package_location& p: pp.packages) + pkgs.push_back (p.name.string ().c_str ()); + } + + return cmd_publish (o, prj, cfg->path, pkgs); + } +} diff --git a/bdep/publish.hxx b/bdep/publish.hxx new file mode 100644 index 0000000..8cc5b94 --- /dev/null +++ b/bdep/publish.hxx @@ -0,0 +1,19 @@ +// file : bdep/publish.hxx -*- C++ -*- +// copyright : Copyright (c) 2014-2018 Code Synthesis Ltd +// license : MIT; see accompanying LICENSE file + +#ifndef BDEP_PUBLISH_HXX +#define BDEP_PUBLISH_HXX + +#include <bdep/types.hxx> +#include <bdep/utility.hxx> + +#include <bdep/publish-options.hxx> + +namespace bdep +{ + int + cmd_publish (const cmd_publish_options&, cli::scanner& args); +} + +#endif // BDEP_PUBLISH_HXX diff --git a/bdep/types-parsers.cxx b/bdep/types-parsers.cxx index 6b92d8e..c93424a 100644 --- a/bdep/types-parsers.cxx +++ b/bdep/types-parsers.cxx @@ -10,6 +10,28 @@ namespace bdep { namespace cli { + void parser<url>:: + parse (url& x, bool& xs, scanner& s) + { + const char* o (s.next ()); + + if (!s.more ()) + throw missing_value (o); + + const char* v (s.next ()); + + try + { + x = url (v); + } + catch (const invalid_argument& e) + { + throw invalid_value (o, v, e.what ()); + } + + xs = true; + } + template <typename T> static void parse_path (T& x, scanner& s) diff --git a/bdep/types-parsers.hxx b/bdep/types-parsers.hxx index 3c23d2e..1ce0eef 100644 --- a/bdep/types-parsers.hxx +++ b/bdep/types-parsers.hxx @@ -21,6 +21,13 @@ namespace bdep struct parser; template <> + struct parser<url> + { + static void + parse (url&, bool&, scanner&); + }; + + template <> struct parser<path> { static void diff --git a/bdep/types.hxx b/bdep/types.hxx index 6c2ea8d..a9adee6 100644 --- a/bdep/types.hxx +++ b/bdep/types.hxx @@ -22,6 +22,7 @@ #include <odb/sqlite/forward.hxx> +#include <libbutl/url.mxx> #include <libbutl/path.mxx> #include <libbutl/process.mxx> #include <libbutl/optional.mxx> @@ -77,6 +78,10 @@ namespace bdep using butl::optional; using butl::nullopt; + // <libbutl/url.mxx> + // + using butl::url; + // <libbutl/path.mxx> // using butl::path; diff --git a/bdep/utility.cxx b/bdep/utility.cxx index 11e77f8..2600de9 100644 --- a/bdep/utility.cxx +++ b/bdep/utility.cxx @@ -109,23 +109,6 @@ namespace bdep : "bpkg" BDEP_EXE_SUFFIX; } - void - finish_bpkg (const common_options& co, process& pr, bool io) - { - if (!pr.wait ()) - { - const process_exit& e (*pr.exit); - - if (e.normal ()) - throw failed (); // Assume the child issued diagnostics. - - fail << "process " << name_bpkg (co) << " " << e; - } - - if (io) - fail << "error reading " << name_bpkg (co) << " output"; - } - const char* name_b (const common_options& co) { diff --git a/bdep/utility.hxx b/bdep/utility.hxx index ea8f5d1..7bc1522 100644 --- a/bdep/utility.hxx +++ b/bdep/utility.hxx @@ -15,6 +15,7 @@ #include <libbutl/ft/lang.hxx> #include <libbutl/utility.mxx> // casecmp(), reverse_iterate(), etc +#include <libbutl/prompt.mxx> #include <libbutl/fdstream.mxx> #include <libbutl/filesystem.mxx> @@ -54,6 +55,10 @@ namespace bdep using butl::setenv; using butl::unsetenv; + // <libbutl/prompt.mxx> + // + using butl::yn_prompt; + // <libbutl/filesystem.mxx> // using butl::auto_rmfile; @@ -114,6 +119,10 @@ namespace bdep process start (I&& in, O&& out, E&& err, const P& prog, A&&... args); + template <typename P> + void + finish (const P& prog, process&, bool io_read = false, bool io_write = false); + template <typename P, typename... A> void run (const P& prog, A&&... args); @@ -131,8 +140,11 @@ namespace bdep E&& err, A&&... args); - void - finish_bpkg (const common_options&, process&, bool io_error = false); + inline void + finish_bpkg (const common_options& co, process& pr, bool io_read = false) + { + finish (name_bpkg (co), pr, io_read); + } template <typename... A> void @@ -147,6 +159,12 @@ namespace bdep process start_b (const common_options&, O&& out, E&& err, A&&... args); + inline void + finish_b (const common_options& co, process& pr, bool io_read = false) + { + finish (name_b (co), pr, io_read); + } + template <typename... A> void run_b (const common_options&, A&&... args); diff --git a/bdep/utility.txx b/bdep/utility.txx index 23a8c8a..84248d0 100644 --- a/bdep/utility.txx +++ b/bdep/utility.txx @@ -35,15 +35,10 @@ namespace bdep } } - template <typename P, typename... A> + template <typename P> void - run (const P& prog, A&&... args) + finish (const P& prog, process& pr, bool io_read, bool io_write) { - process pr (start (0 /* stdin */, - 1 /* stdout */, - 2 /* stderr */, - prog, - forward<A> (args)...)); if (!pr.wait ()) { const process_exit& e (*pr.exit); @@ -53,6 +48,25 @@ namespace bdep fail << "process " << prog << " " << e; } + + if (io_read) + fail << "error reading " << prog << " output"; + + if (io_write) + fail << "error writing " << prog << " input"; + } + + template <typename P, typename... A> + void + run (const P& prog, A&&... args) + { + process pr (start (0 /* stdin */, + 1 /* stdout */, + 2 /* stderr */, + prog, + forward<A> (args)...)); + + finish (prog, pr); } // *_bpkg() @@ -215,15 +229,7 @@ namespace bdep 1 /* stdout */, 2 /* stderr */, forward<A> (args)...)); - if (!pr.wait ()) - { - const process_exit& e (*pr.exit); - - if (e.normal ()) - throw failed (); // Assume the child issued diagnostics. - - fail << "process " << name_b (co) << " " << e; - } + finish_b (co, pr); } // *_manifest() @@ -42,6 +42,7 @@ man-prologue.xhtml --html-epilogue-file man-epilogue.xhtml --html-suffix .xhtml --link-regex '%b([-.].+)%../../build2/doc/b$1%' \ --link-regex '%bpkg([-.].+)%../../bpkg/doc/bpkg$1%' \ --link-regex '%bpkg(#.+)?%../../bpkg/doc/build2-package-manager-manual.xhtml$1%' \ +--link-regex '%brep(#.+)?%../../brep/doc/build2-repository-interface-manual.xhtml$1%' \ --link-regex '%bdep(#.+)?%build2-project-manager-manual.xhtml$1%' \ ../bdep/$n.cli @@ -49,6 +50,7 @@ man-prologue.xhtml --html-epilogue-file man-epilogue.xhtml --html-suffix .xhtml --include-base-last "${o[@]}" --generate-man --man-prologue-file \ man-prologue.1 --man-epilogue-file man-epilogue.1 --man-suffix .1 \ --link-regex '%bpkg(#.+)?%$1%' \ +--link-regex '%brep(#.+)?%$1%' \ --link-regex '%bdep(#.+)?%$1%' \ ../bdep/$n.cli } @@ -60,7 +62,8 @@ o="--suppress-undocumented --output-prefix bdep- --class-doc bdep::common_option compile "common" $o --output-suffix "-options" --class-doc bdep::common_options=long compile "bdep" $o --output-prefix "" --class-doc bdep::commands=short --class-doc bdep::topics=short -pages="new help init sync fetch status deinit config test update clean projects-configs" +pages="new help init sync fetch status publish deinit config test update \ +clean projects-configs" for p in $pages; do compile $p $o |