From 0cef93b4e2e9bf39b0ca542876f9ab1af6d0f01d Mon Sep 17 00:00:00 2001 From: Boris Kolpackov Date: Wed, 24 May 2017 13:24:31 +0200 Subject: Implement support for tokenization of preprocessed C/C++ source --- unit-tests/cc/lexer/driver.cxx | 66 ++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 66 insertions(+) create mode 100644 unit-tests/cc/lexer/driver.cxx (limited to 'unit-tests/cc/lexer/driver.cxx') diff --git a/unit-tests/cc/lexer/driver.cxx b/unit-tests/cc/lexer/driver.cxx new file mode 100644 index 0000000..db3f516 --- /dev/null +++ b/unit-tests/cc/lexer/driver.cxx @@ -0,0 +1,66 @@ +// file : unit-tests/cc/lexer/driver.cxx -*- C++ -*- +// copyright : Copyright (c) 2014-2017 Code Synthesis Ltd +// license : MIT; see accompanying LICENSE file + +#include +#include + +#include +#include + +#include + +using namespace std; + +namespace build2 +{ + namespace cc + { + // Usage: argv[0] [] + // + int + main (int argc, char* argv[]) + { + try + { + istream* is; + const char* in; + + // Reading from file is several times faster. + // + ifdstream ifs; + if (argc > 1) + { + in = argv[1]; + ifs.open (in); + is = &ifs; + } + else + { + in = "stdin"; + cin.exceptions (istream::failbit | istream::badbit); + is = &cin; + } + + lexer l (*is, path (in)); + + // No use printing eos since we will either get it or loop forever. + // + for (token t; l.next (t) != token_type::eos; ) + cout << t << endl; + } + catch (const failed&) + { + return 1; + } + + return 0; + } + } +} + +int +main (int argc, char* argv[]) +{ + return build2::cc::main (argc, argv); +} -- cgit v1.1