regression_word_count.cpp 2.4 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889
  1. // Copyright (c) 2001-2010 Hartmut Kaiser
  2. // Copyright (c) 2009 Tor Brede Vekterli
  3. //
  4. // Distributed under the Boost Software License, Version 1.0. (See accompanying
  5. // file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
  6. #include <boost/detail/lightweight_test.hpp>
  7. #include <boost/config/warning_disable.hpp>
  8. #include <boost/spirit/include/lex_lexertl.hpp>
  9. #include <boost/spirit/include/qi_parse.hpp>
  10. #include <boost/spirit/include/qi_operator.hpp>
  11. #include <boost/spirit/include/qi_action.hpp>
  12. #include <boost/spirit/include/qi_char.hpp>
  13. #include <boost/spirit/include/qi_grammar.hpp>
  14. #include <boost/spirit/include/phoenix_operator.hpp>
  15. #include <iostream>
  16. #include <string>
  17. namespace qi = boost::spirit::qi;
  18. namespace lex = boost::spirit::lex;
  19. enum tokenids
  20. {
  21. IDANY = lex::min_token_id + 10 // Lower 8 bits is 0x0a, same as '\n'
  22. };
  23. template <typename Lexer>
  24. struct word_count_tokens : lex::lexer<Lexer>
  25. {
  26. word_count_tokens()
  27. {
  28. this->self.add_pattern
  29. ("TEST", "A")
  30. ;
  31. word = "{TEST}";
  32. this->self.add
  33. (word)
  34. ('\n')
  35. (".", IDANY)
  36. ;
  37. }
  38. lex::token_def<std::string> word;
  39. };
  40. template <typename Iterator>
  41. struct word_count_grammar : qi::grammar<Iterator>
  42. {
  43. template <typename TokenDef>
  44. word_count_grammar(TokenDef const& tok)
  45. : word_count_grammar::base_type(start)
  46. , c(0), w(0), l(0)
  47. {
  48. using boost::phoenix::ref;
  49. using qi::lit;
  50. using qi::token;
  51. start = *( tok.word [++ref(w)]
  52. | lit('\n') [++ref(l)]
  53. | token(IDANY) [++ref(c)]
  54. )
  55. ;
  56. }
  57. std::size_t c, w, l;
  58. qi::rule<Iterator> start;
  59. };
  60. int main()
  61. {
  62. typedef lex::lexertl::token<
  63. const char*, boost::mpl::vector<std::string>
  64. > token_type;
  65. typedef lex::lexertl::lexer<token_type> lexer_type;
  66. typedef word_count_tokens<lexer_type>::iterator_type iterator_type;
  67. word_count_tokens<lexer_type> word_count; // Our lexer
  68. word_count_grammar<iterator_type> g (word_count); // Our parser
  69. std::string str ("A\nBCDEFGHI");
  70. char const* first = str.c_str();
  71. char const* last = &first[str.size()];
  72. BOOST_TEST(lex::tokenize_and_parse(first, last, word_count, g));
  73. BOOST_TEST(g.l == 1 && g.w == 1 && g.c == 8);
  74. return boost::report_errors();
  75. }