token_onetype.cpp 8.9 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282
  1. // Copyright (c) 2001-2011 Hartmut Kaiser
  2. //
  3. // Distributed under the Boost Software License, Version 1.0. (See accompanying
  4. // file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
  5. #include <boost/config/warning_disable.hpp>
  6. #include <boost/detail/lightweight_test.hpp>
  7. #include <boost/spirit/include/lex_lexertl.hpp>
  8. #include <boost/spirit/include/lex_lexertl_position_token.hpp>
  9. #include <boost/spirit/include/phoenix_object.hpp>
  10. #include <boost/spirit/include/phoenix_operator.hpp>
  11. #include <boost/spirit/include/phoenix_container.hpp>
  12. #include <boost/spirit/include/qi_numeric.hpp>
  13. namespace spirit = boost::spirit;
  14. namespace lex = boost::spirit::lex;
  15. namespace phoenix = boost::phoenix;
  16. namespace mpl = boost::mpl;
  17. ///////////////////////////////////////////////////////////////////////////////
  18. enum tokenids
  19. {
  20. ID_INT = 1000,
  21. ID_DOUBLE
  22. };
  23. template <typename Lexer>
  24. struct token_definitions : lex::lexer<Lexer>
  25. {
  26. token_definitions()
  27. {
  28. this->self.add_pattern("HEXDIGIT", "[0-9a-fA-F]");
  29. this->self.add_pattern("OCTALDIGIT", "[0-7]");
  30. this->self.add_pattern("DIGIT", "[0-9]");
  31. this->self.add_pattern("OPTSIGN", "[-+]?");
  32. this->self.add_pattern("EXPSTART", "[eE][-+]");
  33. this->self.add_pattern("EXPONENT", "[eE]{OPTSIGN}{DIGIT}+");
  34. // define tokens and associate them with the lexer
  35. int_ = "{OPTSIGN}[1-9]{DIGIT}*";
  36. int_.id(ID_INT);
  37. double_ = "{OPTSIGN}({DIGIT}*\\.{DIGIT}+|{DIGIT}+\\.){EXPONENT}?|{DIGIT}+{EXPONENT}";
  38. double_.id(ID_DOUBLE);
  39. whitespace = "[ \t\n]+";
  40. this->self =
  41. double_
  42. | int_
  43. | whitespace[ lex::_pass = lex::pass_flags::pass_ignore ]
  44. ;
  45. }
  46. lex::token_def<double> int_;
  47. lex::token_def<double> double_;
  48. lex::token_def<lex::omit> whitespace;
  49. };
  50. template <typename Lexer>
  51. struct token_definitions_with_state : lex::lexer<Lexer>
  52. {
  53. token_definitions_with_state()
  54. {
  55. this->self.add_pattern("HEXDIGIT", "[0-9a-fA-F]");
  56. this->self.add_pattern("OCTALDIGIT", "[0-7]");
  57. this->self.add_pattern("DIGIT", "[0-9]");
  58. this->self.add_pattern("OPTSIGN", "[-+]?");
  59. this->self.add_pattern("EXPSTART", "[eE][-+]");
  60. this->self.add_pattern("EXPONENT", "[eE]{OPTSIGN}{DIGIT}+");
  61. this->self.add_state();
  62. this->self.add_state("INT");
  63. this->self.add_state("DOUBLE");
  64. // define tokens and associate them with the lexer
  65. int_ = "{OPTSIGN}[1-9]{DIGIT}*";
  66. int_.id(ID_INT);
  67. double_ = "{OPTSIGN}({DIGIT}*\\.{DIGIT}+|{DIGIT}+\\.){EXPONENT}?|{DIGIT}+{EXPONENT}";
  68. double_.id(ID_DOUBLE);
  69. whitespace = "[ \t\n]+";
  70. this->self("*") =
  71. double_ [ lex::_state = "DOUBLE"]
  72. | int_ [ lex::_state = "INT" ]
  73. | whitespace[ lex::_pass = lex::pass_flags::pass_ignore ]
  74. ;
  75. }
  76. lex::token_def<double> int_;
  77. lex::token_def<double> double_;
  78. lex::token_def<lex::omit> whitespace;
  79. };
  80. ///////////////////////////////////////////////////////////////////////////////
  81. template <typename Token>
  82. inline bool
  83. test_token_ids(int const* ids, std::vector<Token> const& tokens)
  84. {
  85. BOOST_FOREACH(Token const& t, tokens)
  86. {
  87. if (*ids == -1)
  88. return false; // reached end of expected data
  89. if (t.id() != static_cast<std::size_t>(*ids)) // token id must match
  90. return false;
  91. ++ids;
  92. }
  93. return (*ids == -1) ? true : false;
  94. }
  95. ///////////////////////////////////////////////////////////////////////////////
  96. template <typename Token>
  97. inline bool
  98. test_token_states(std::size_t const* states, std::vector<Token> const& tokens)
  99. {
  100. BOOST_FOREACH(Token const& t, tokens)
  101. {
  102. if (*states == std::size_t(-1))
  103. return false; // reached end of expected data
  104. if (t.state() != *states) // token state must match
  105. return false;
  106. ++states;
  107. }
  108. return (*states == std::size_t(-1)) ? true : false;
  109. }
  110. ///////////////////////////////////////////////////////////////////////////////
  111. struct position_type
  112. {
  113. std::size_t begin, end;
  114. };
  115. template <typename Iterator, typename Token>
  116. inline bool
  117. test_token_positions(Iterator begin, position_type const* positions,
  118. std::vector<Token> const& tokens)
  119. {
  120. BOOST_FOREACH(Token const& t, tokens)
  121. {
  122. if (positions->begin == std::size_t(-1) &&
  123. positions->end == std::size_t(-1))
  124. {
  125. return false; // reached end of expected data
  126. }
  127. boost::iterator_range<Iterator> matched = t.matched();
  128. std::size_t start = std::distance(begin, matched.begin());
  129. std::size_t end = std::distance(begin, matched.end());
  130. // position must match
  131. if (start != positions->begin || end != positions->end)
  132. return false;
  133. ++positions;
  134. }
  135. return (positions->begin == std::size_t(-1) &&
  136. positions->end == std::size_t(-1)) ? true : false;
  137. }
  138. ///////////////////////////////////////////////////////////////////////////////
  139. template <typename Token>
  140. inline bool
  141. test_token_values(double const* values, std::vector<Token> const& tokens)
  142. {
  143. BOOST_FOREACH(Token const& t, tokens)
  144. {
  145. if (*values == 0.0)
  146. return false; // reached end of expected data
  147. double val;
  148. spirit::traits::assign_to(t, val);
  149. if (val != *values) // token value must match
  150. return false;
  151. ++values;
  152. }
  153. return (*values == 0.0) ? true : false;
  154. }
  155. ///////////////////////////////////////////////////////////////////////////////
  156. int main()
  157. {
  158. typedef std::string::iterator base_iterator_type;
  159. std::string input(" 1 1.2 -2 3 2.3e6 -3.4");
  160. int ids[] = { ID_INT, ID_DOUBLE, ID_INT, ID_INT, ID_DOUBLE, ID_DOUBLE, -1 };
  161. std::size_t states[] = { 0, 1, 2, 1, 1, 2, std::size_t(-1) };
  162. position_type positions[] =
  163. {
  164. { 2, 3 }, { 4, 7 }, { 8, 10 }, { 13, 14 }, { 15, 20 }, { 21, 25 },
  165. { std::size_t(-1), std::size_t(-1) }
  166. };
  167. double values[] = { 1.0, 1.2, -2.0, 3.0, 2.3e6, -3.4, 0.0 };
  168. // token type: token id, iterator_pair as token value, no state
  169. {
  170. typedef lex::lexertl::token<
  171. base_iterator_type, mpl::vector<double>, mpl::false_> token_type;
  172. typedef lex::lexertl::actor_lexer<token_type> lexer_type;
  173. token_definitions<lexer_type> lexer;
  174. std::vector<token_type> tokens;
  175. base_iterator_type first = input.begin();
  176. using phoenix::arg_names::_1;
  177. BOOST_TEST(lex::tokenize(first, input.end(), lexer
  178. , phoenix::push_back(phoenix::ref(tokens), _1)));
  179. BOOST_TEST(test_token_ids(ids, tokens));
  180. BOOST_TEST(test_token_values(values, tokens));
  181. }
  182. {
  183. typedef lex::lexertl::position_token<
  184. base_iterator_type, mpl::vector<double>, mpl::false_> token_type;
  185. typedef lex::lexertl::actor_lexer<token_type> lexer_type;
  186. token_definitions<lexer_type> lexer;
  187. std::vector<token_type> tokens;
  188. base_iterator_type first = input.begin();
  189. using phoenix::arg_names::_1;
  190. BOOST_TEST(lex::tokenize(first, input.end(), lexer
  191. , phoenix::push_back(phoenix::ref(tokens), _1)));
  192. BOOST_TEST(test_token_ids(ids, tokens));
  193. BOOST_TEST(test_token_positions(input.begin(), positions, tokens));
  194. BOOST_TEST(test_token_values(values, tokens));
  195. }
  196. // token type: holds token id, state, iterator_pair as token value
  197. {
  198. typedef lex::lexertl::token<
  199. base_iterator_type, mpl::vector<double>, mpl::true_> token_type;
  200. typedef lex::lexertl::actor_lexer<token_type> lexer_type;
  201. token_definitions_with_state<lexer_type> lexer;
  202. std::vector<token_type> tokens;
  203. base_iterator_type first = input.begin();
  204. using phoenix::arg_names::_1;
  205. BOOST_TEST(lex::tokenize(first, input.end(), lexer
  206. , phoenix::push_back(phoenix::ref(tokens), _1)));
  207. BOOST_TEST(test_token_ids(ids, tokens));
  208. BOOST_TEST(test_token_states(states, tokens));
  209. BOOST_TEST(test_token_values(values, tokens));
  210. }
  211. {
  212. typedef lex::lexertl::position_token<
  213. base_iterator_type, mpl::vector<double>, mpl::true_> token_type;
  214. typedef lex::lexertl::actor_lexer<token_type> lexer_type;
  215. token_definitions_with_state<lexer_type> lexer;
  216. std::vector<token_type> tokens;
  217. base_iterator_type first = input.begin();
  218. using phoenix::arg_names::_1;
  219. BOOST_TEST(lex::tokenize(first, input.end(), lexer
  220. , phoenix::push_back(phoenix::ref(tokens), _1)));
  221. BOOST_TEST(test_token_ids(ids, tokens));
  222. BOOST_TEST(test_token_states(states, tokens));
  223. BOOST_TEST(test_token_positions(input.begin(), positions, tokens));
  224. BOOST_TEST(test_token_values(values, tokens));
  225. }
  226. return boost::report_errors();
  227. }