From e3ce9cc60846cc2e4e4775f6d24c28089bad30ff Mon Sep 17 00:00:00 2001 From: Boris Kolpackov Date: Mon, 7 Nov 2016 10:52:01 +0200 Subject: Only do effective escaping when re-parsing expansions in testscript Doing unconditional escaping messes up expansions that are Windows paths. --- build2/test/script/lexer | 7 +++++-- build2/test/script/parser.cxx | 16 ++++++++++++++-- 2 files changed, 19 insertions(+), 4 deletions(-) (limited to 'build2/test/script') diff --git a/build2/test/script/lexer b/build2/test/script/lexer index 65ef297..5597e9a 100644 --- a/build2/test/script/lexer +++ b/build2/test/script/lexer @@ -44,8 +44,11 @@ namespace build2 using base_lexer = build2::lexer; using base_mode = build2::lexer_mode; - lexer (istream& is, const path& name, lexer_mode m) - : base_lexer (is, name, nullptr, nullptr, false) {mode (m);} + lexer (istream& is, + const path& name, + lexer_mode m, + const char* escapes = nullptr) + : base_lexer (is, name, escapes, nullptr, false) {mode (m);} virtual void mode (base_mode, char = '\0') override; diff --git a/build2/test/script/parser.cxx b/build2/test/script/parser.cxx index df1cd9f..0cc54f7 100644 --- a/build2/test/script/parser.cxx +++ b/build2/test/script/parser.cxx @@ -1595,7 +1595,7 @@ namespace build2 // interesting characters (operators plus quotes/escapes), // then no need to re-lex. // - // NOTE: updated quoting (script.cxx:to_stream_q()) if adding + // NOTE: update quoting (script.cxx:to_stream_q()) if adding // any new characters. // if (q || s.find_first_of ("|&<>\'\"\\") == string::npos) @@ -1631,8 +1631,20 @@ namespace build2 name = path (move (n)); } + // When re-parsing we do "effective escaping" and only for + // ['"\] (quotes plus the backslash itself). In particular, + // there is no way to escape redirects, operators, etc. The + // idea is to prefer quoting except for passing literal + // quotes, for example: + // + // args = \"&foo\" + // cmd $args # cmd &foo + // + // args = 'x=\"foo bar\"' + // cmd $args # cmd x="foo bar" + // istringstream is (s); - lexer lex (is, name, lexer_mode::command_line); + lexer lex (is, name, lexer_mode::command_line, "\'\"\\"); // Treat the first "sub-token" as always separated from what // we saw earlier. -- cgit v1.1