• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 //  Copyright (c) 2001-2010 Hartmut Kaiser
2 //
3 //  Distributed under the Boost Software License, Version 1.0. (See accompanying
4 //  file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
5 
6 //  This example is the equivalent to the following lex program:
7 //
8 //     %{
9 //     #include <stdio.h>
10 //     %}
11 //     %%
12 //     [0-9]+   { printf("%s\n", yytext); }
13 //     .|\n     ;
14 //     %%
15 //     main()
16 //     {
17 //             yylex();
18 //     }
19 //
20 //  Its purpose is to print all the (integer) numbers found in a file
21 
22 #include <boost/config/warning_disable.hpp>
23 #include <boost/spirit/include/qi.hpp>
24 #include <boost/spirit/include/lex_lexertl.hpp>
25 #include <boost/spirit/include/phoenix_operator.hpp>
26 
27 #include <iostream>
28 #include <string>
29 
30 #include "example.hpp"
31 
32 using namespace boost::spirit;
33 
34 ///////////////////////////////////////////////////////////////////////////////
35 //  Token definition: We use the lexertl based lexer engine as the underlying
36 //                    lexer type.
37 ///////////////////////////////////////////////////////////////////////////////
38 template <typename Lexer>
39 struct print_numbers_tokenids : lex::lexer<Lexer>
40 {
41     // define tokens and associate it with the lexer, we set the lexer flags
42     // not to match newlines while matching a dot, so we need to add the
43     // '\n' explicitly below
print_numbers_tokenidsprint_numbers_tokenids44     print_numbers_tokenids()
45       : print_numbers_tokenids::base_type(lex::match_flags::match_not_dot_newline)
46     {
47         this->self = lex::token_def<int>("[0-9]+") | ".|\n";
48     }
49 };
50 
51 ///////////////////////////////////////////////////////////////////////////////
52 //  Grammar definition
53 ///////////////////////////////////////////////////////////////////////////////
54 template <typename Iterator>
55 struct print_numbers_grammar : qi::grammar<Iterator>
56 {
print_numbers_grammarprint_numbers_grammar57     print_numbers_grammar()
58       : print_numbers_grammar::base_type(start)
59     {
60         // we just know, that the token ids get assigned starting min_token_id
61         // so, "[0-9]+" gets the id 'min_token_id' and ".|\n" gets the id
62         // 'min_token_id+1'.
63 
64         // this prints the token ids of the matched tokens
65         start =  *(   qi::tokenid(lex::min_token_id)
66                   |   qi::tokenid(lex::min_token_id+1)
67                   )
68                   [ std::cout << _1  << "\n" ]
69               ;
70     }
71 
72     qi::rule<Iterator> start;
73 };
74 
75 ///////////////////////////////////////////////////////////////////////////////
main(int argc,char * argv[])76 int main(int argc, char* argv[])
77 {
78     // iterator type used to expose the underlying input stream
79     typedef std::string::iterator base_iterator_type;
80 
81     // the token type to be used, 'int' is available as the type of the token
82     // attribute and no lexer state is supported
83     typedef lex::lexertl::token<base_iterator_type, boost::mpl::vector<int>
84       , boost::mpl::false_> token_type;
85 
86     // lexer type
87     typedef lex::lexertl::lexer<token_type> lexer_type;
88 
89     // iterator type exposed by the lexer
90     typedef print_numbers_tokenids<lexer_type>::iterator_type iterator_type;
91 
92     // now we use the types defined above to create the lexer and grammar
93     // object instances needed to invoke the parsing process
94     print_numbers_tokenids<lexer_type> print_tokens;  // Our lexer
95     print_numbers_grammar<iterator_type> print;       // Our parser
96 
97     // Parsing is done based on the token stream, not the character
98     // stream read from the input.
99     std::string str (read_from_file(1 == argc ? "print_numbers.input" : argv[1]));
100     base_iterator_type first = str.begin();
101     bool r = lex::tokenize_and_parse(first, str.end(), print_tokens, print);
102 
103     if (r) {
104         std::cout << "-------------------------\n";
105         std::cout << "Parsing succeeded\n";
106         std::cout << "-------------------------\n";
107     }
108     else {
109         std::string rest(first, str.end());
110         std::cout << "-------------------------\n";
111         std::cout << "Parsing failed\n";
112         std::cout << "stopped at: \"" << rest << "\"\n";
113         std::cout << "-------------------------\n";
114     }
115 
116     std::cout << "Bye... :-) \n\n";
117     return 0;
118 }
119 
120 
121 
122