// Copyright (c) 2001-2011 Hartmut Kaiser // // Distributed under the Boost Software License, Version 1.0. (See accompanying // file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt) #include #include #include #include #include #include #include #include namespace spirit = boost::spirit; namespace lex = boost::spirit::lex; namespace phoenix = boost::phoenix; namespace mpl = boost::mpl; /////////////////////////////////////////////////////////////////////////////// enum tokenids { ID_INT = 1000, ID_DOUBLE }; template struct token_definitions : lex::lexer { token_definitions() { this->self.add_pattern("HEXDIGIT", "[0-9a-fA-F]"); this->self.add_pattern("OCTALDIGIT", "[0-7]"); this->self.add_pattern("DIGIT", "[0-9]"); this->self.add_pattern("OPTSIGN", "[-+]?"); this->self.add_pattern("EXPSTART", "[eE][-+]"); this->self.add_pattern("EXPONENT", "[eE]{OPTSIGN}{DIGIT}+"); // define tokens and associate them with the lexer int_ = "(0x|0X){HEXDIGIT}+|0{OCTALDIGIT}*|{OPTSIGN}[1-9]{DIGIT}*"; int_.id(ID_INT); double_ = "{OPTSIGN}({DIGIT}*\\.{DIGIT}+|{DIGIT}+\\.){EXPONENT}?|{DIGIT}+{EXPONENT}"; double_.id(ID_DOUBLE); whitespace = "[ \t\n]+"; this->self = double_ | int_ | whitespace[ lex::_pass = lex::pass_flags::pass_ignore ] ; } lex::token_def int_; lex::token_def double_; lex::token_def whitespace; }; template struct token_definitions_with_state : lex::lexer { token_definitions_with_state() { this->self.add_pattern("HEXDIGIT", "[0-9a-fA-F]"); this->self.add_pattern("OCTALDIGIT", "[0-7]"); this->self.add_pattern("DIGIT", "[0-9]"); this->self.add_pattern("OPTSIGN", "[-+]?"); this->self.add_pattern("EXPSTART", "[eE][-+]"); this->self.add_pattern("EXPONENT", "[eE]{OPTSIGN}{DIGIT}+"); this->self.add_state(); this->self.add_state("INT"); this->self.add_state("DOUBLE"); // define tokens and associate them with the lexer int_ = "(0x|0X){HEXDIGIT}+|0{OCTALDIGIT}*|{OPTSIGN}[1-9]{DIGIT}*"; int_.id(ID_INT); double_ = "{OPTSIGN}({DIGIT}*\\.{DIGIT}+|{DIGIT}+\\.){EXPONENT}?|{DIGIT}+{EXPONENT}"; double_.id(ID_DOUBLE); whitespace = "[ \t\n]+"; this->self("*") = double_ [ lex::_state = "DOUBLE"] | int_ [ lex::_state = "INT" ] | whitespace[ lex::_pass = lex::pass_flags::pass_ignore ] ; } lex::token_def int_; lex::token_def double_; lex::token_def whitespace; }; /////////////////////////////////////////////////////////////////////////////// template inline bool test_token_ids(int const* ids, std::vector const& tokens) { BOOST_FOREACH(Token const& t, tokens) { if (*ids == -1) return false; // reached end of expected data if (t.id() != static_cast(*ids)) // token id must match return false; ++ids; } return (*ids == -1) ? true : false; } /////////////////////////////////////////////////////////////////////////////// template inline bool test_token_states(std::size_t const* states, std::vector const& tokens) { BOOST_FOREACH(Token const& t, tokens) { if (*states == std::size_t(-1)) return false; // reached end of expected data if (t.state() != *states) // token state must match return false; ++states; } return (*states == std::size_t(-1)) ? true : false; } /////////////////////////////////////////////////////////////////////////////// struct position_type { std::size_t begin, end; }; template inline bool test_token_positions(Iterator begin, position_type const* positions, std::vector const& tokens) { BOOST_FOREACH(Token const& t, tokens) { if (positions->begin == std::size_t(-1) && positions->end == std::size_t(-1)) { return false; // reached end of expected data } boost::iterator_range matched = t.matched(); std::size_t start = std::distance(begin, matched.begin()); std::size_t end = std::distance(begin, matched.end()); // position must match if (start != positions->begin || end != positions->end) return false; ++positions; } return (positions->begin == std::size_t(-1) && positions->end == std::size_t(-1)) ? true : false; } /////////////////////////////////////////////////////////////////////////////// template inline bool test_token_values(boost::optional const* values, std::vector const& tokens) { BOOST_FOREACH(Token const& t, tokens) { if (values->is_initialized() && values->get() == 0) return false; // reached end of expected data if (values->is_initialized()) { T val; spirit::traits::assign_to(t, val); if (val != values->get()) // token value must match return false; } ++values; } return (values->is_initialized() && values->get() == 0) ? true : false; } /////////////////////////////////////////////////////////////////////////////// int main() { using boost::none; typedef std::string::iterator base_iterator_type; std::string input(" 01 1.2 -2 03 2.3e6 -3.4"); int ids[] = { ID_INT, ID_DOUBLE, ID_INT, ID_INT, ID_DOUBLE, ID_DOUBLE, -1 }; std::size_t states[] = { 0, 1, 2, 1, 1, 2, std::size_t(-1) }; position_type positions[] = { { 1, 3 }, { 4, 7 }, { 8, 10 }, { 11, 13 }, { 15, 20 }, { 21, 25 }, { std::size_t(-1), std::size_t(-1) } }; boost::optional ivalues[] = { 1, none, -2, 3, none, none, 0 }; boost::optional dvalues[] = { none, 1.2, none, none, 2.3e6, -3.4, 0.0 }; // token type: token id, iterator_pair as token value, no state { typedef lex::lexertl::token< base_iterator_type, mpl::vector, mpl::false_> token_type; typedef lex::lexertl::actor_lexer lexer_type; token_definitions lexer; std::vector tokens; base_iterator_type first = input.begin(); using phoenix::arg_names::_1; BOOST_TEST(lex::tokenize(first, input.end(), lexer , phoenix::push_back(phoenix::ref(tokens), _1))); BOOST_TEST(test_token_ids(ids, tokens)); BOOST_TEST(test_token_values(ivalues, tokens)); BOOST_TEST(test_token_values(dvalues, tokens)); } { typedef lex::lexertl::position_token< base_iterator_type, mpl::vector, mpl::false_> token_type; typedef lex::lexertl::actor_lexer lexer_type; token_definitions lexer; std::vector tokens; base_iterator_type first = input.begin(); using phoenix::arg_names::_1; BOOST_TEST(lex::tokenize(first, input.end(), lexer , phoenix::push_back(phoenix::ref(tokens), _1))); BOOST_TEST(test_token_ids(ids, tokens)); BOOST_TEST(test_token_positions(input.begin(), positions, tokens)); BOOST_TEST(test_token_values(ivalues, tokens)); BOOST_TEST(test_token_values(dvalues, tokens)); } // token type: holds token id, state, iterator_pair as token value { typedef lex::lexertl::token< base_iterator_type, mpl::vector, mpl::true_> token_type; typedef lex::lexertl::actor_lexer lexer_type; token_definitions_with_state lexer; std::vector tokens; base_iterator_type first = input.begin(); using phoenix::arg_names::_1; BOOST_TEST(lex::tokenize(first, input.end(), lexer , phoenix::push_back(phoenix::ref(tokens), _1))); BOOST_TEST(test_token_ids(ids, tokens)); BOOST_TEST(test_token_states(states, tokens)); BOOST_TEST(test_token_values(ivalues, tokens)); BOOST_TEST(test_token_values(dvalues, tokens)); } { typedef lex::lexertl::position_token< base_iterator_type, mpl::vector, mpl::true_> token_type; typedef lex::lexertl::actor_lexer lexer_type; token_definitions_with_state lexer; std::vector tokens; base_iterator_type first = input.begin(); using phoenix::arg_names::_1; BOOST_TEST(lex::tokenize(first, input.end(), lexer , phoenix::push_back(phoenix::ref(tokens), _1))); BOOST_TEST(test_token_ids(ids, tokens)); BOOST_TEST(test_token_states(states, tokens)); BOOST_TEST(test_token_positions(input.begin(), positions, tokens)); BOOST_TEST(test_token_values(ivalues, tokens)); BOOST_TEST(test_token_values(dvalues, tokens)); } return boost::report_errors(); }