• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 //  Copyright (c) 2001-2010 Hartmut Kaiser
2 //  Copyright (c) 2009 Tor Brede Vekterli
3 //
4 //  Distributed under the Boost Software License, Version 1.0. (See accompanying
5 //  file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
6 
7 #include <boost/detail/lightweight_test.hpp>
8 #include <boost/config/warning_disable.hpp>
9 
10 #include <boost/spirit/include/lex_lexertl.hpp>
11 #include <boost/spirit/include/qi_parse.hpp>
12 #include <boost/spirit/include/qi_operator.hpp>
13 #include <boost/spirit/include/qi_action.hpp>
14 #include <boost/spirit/include/qi_char.hpp>
15 #include <boost/spirit/include/qi_grammar.hpp>
16 #include <boost/spirit/include/phoenix_operator.hpp>
17 
18 #include <iostream>
19 #include <string>
20 
21 namespace qi = boost::spirit::qi;
22 namespace lex = boost::spirit::lex;
23 
24 enum tokenids
25 {
26     IDANY = lex::min_token_id + 10 // Lower 8 bits is 0x0a, same as '\n'
27 };
28 
29 template <typename Lexer>
30 struct word_count_tokens : lex::lexer<Lexer>
31 {
word_count_tokensword_count_tokens32     word_count_tokens()
33     {
34         this->self.add_pattern
35             ("TEST", "A")
36         ;
37         word = "{TEST}";
38         this->self.add
39             (word)
40             ('\n')
41             (".", IDANY)
42         ;
43     }
44     lex::token_def<std::string> word;
45 };
46 
47 template <typename Iterator>
48 struct word_count_grammar : qi::grammar<Iterator>
49 {
50     template <typename TokenDef>
word_count_grammarword_count_grammar51     word_count_grammar(TokenDef const& tok)
52       : word_count_grammar::base_type(start)
53       , c(0), w(0), l(0)
54     {
55         using boost::phoenix::ref;
56         using qi::lit;
57         using qi::token;
58 
59         start =  *(   tok.word      [++ref(w)]
60                   |   lit('\n')     [++ref(l)]
61                   |   token(IDANY)  [++ref(c)]
62                   )
63               ;
64     }
65     std::size_t c, w, l;
66     qi::rule<Iterator> start;
67 };
68 
69 
main()70 int main()
71 {
72     typedef lex::lexertl::token<
73         const char*, boost::mpl::vector<std::string>
74     > token_type;
75 
76     typedef lex::lexertl::lexer<token_type> lexer_type;
77     typedef word_count_tokens<lexer_type>::iterator_type iterator_type;
78     word_count_tokens<lexer_type> word_count;          // Our lexer
79     word_count_grammar<iterator_type> g (word_count);  // Our parser
80 
81     std::string str ("A\nBCDEFGHI");
82     char const* first = str.c_str();
83     char const* last = &first[str.size()];
84 
85     BOOST_TEST(lex::tokenize_and_parse(first, last, word_count, g));
86     BOOST_TEST(g.l == 1 && g.w == 1 && g.c == 8);
87 
88     return boost::report_errors();
89 }
90