• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 //  Copyright (c) 2001-2011 Hartmut Kaiser
2 //
3 //  Distributed under the Boost Software License, Version 1.0. (See accompanying
4 //  file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
5 
6 // #define BOOST_SPIRIT_LEXERTL_DEBUG 1
7 
8 #include <boost/config/warning_disable.hpp>
9 
10 #include <boost/spirit/include/lex_lexertl.hpp>
11 #include <boost/spirit/include/qi.hpp>
12 #include <boost/spirit/include/phoenix.hpp>
13 
14 namespace lex = boost::spirit::lex;
15 namespace qi = boost::spirit::qi;
16 namespace phoenix = boost::phoenix;
17 
18 ///////////////////////////////////////////////////////////////////////////////
19 template <typename Lexer>
20 struct language_tokens : lex::lexer<Lexer>
21 {
language_tokenslanguage_tokens22     language_tokens()
23     {
24         tok_float = "float";
25         tok_int = "int";
26         floatlit = "[0-9]+\\.[0-9]*";
27         intlit = "[0-9]+";
28         ws = "[ \t\n]+";
29         identifier = "[a-zA-Z_][a-zA-Z_0-9]*";
30 
31         this->self = ws [lex::_pass = lex::pass_flags::pass_ignore];
32         this->self += tok_float | tok_int | floatlit | intlit | identifier;
33         this->self += lex::char_('=');
34     }
35 
36     lex::token_def<> tok_float, tok_int;
37     lex::token_def<> ws;
38     lex::token_def<double> floatlit;
39     lex::token_def<int> intlit;
40     lex::token_def<> identifier;
41 };
42 
43 ///////////////////////////////////////////////////////////////////////////////
44 template <typename Iterator>
45 struct language_grammar : qi::grammar<Iterator>
46 {
47     template <typename Lexer>
language_grammarlanguage_grammar48     language_grammar(language_tokens<Lexer> const& tok)
49       : language_grammar::base_type(declarations)
50     {
51         declarations = +number;
52         number =
53                 tok.tok_float >> tok.identifier >> '=' >> tok.floatlit
54             |   tok.tok_int >> tok.identifier >> '=' >> tok.intlit
55             ;
56 
57         declarations.name("declarations");
58         number.name("number");
59         debug(declarations);
60         debug(number);
61     }
62 
63     qi::rule<Iterator> declarations;
64     qi::rule<Iterator> number;
65 };
66 
67 ///////////////////////////////////////////////////////////////////////////////
main()68 int main()
69 {
70     // iterator type used to expose the underlying input stream
71     typedef std::string::iterator base_iterator_type;
72 
73     // lexer type
74     typedef lex::lexertl::actor_lexer<
75         lex::lexertl::token<
76             base_iterator_type, boost::mpl::vector2<double, int>
77         > > lexer_type;
78 
79     // iterator type exposed by the lexer
80     typedef language_tokens<lexer_type>::iterator_type iterator_type;
81 
82     // now we use the types defined above to create the lexer and grammar
83     // object instances needed to invoke the parsing process
84     language_tokens<lexer_type> tokenizer;           // Our lexer
85     language_grammar<iterator_type> g (tokenizer);   // Our parser
86 
87     // Parsing is done based on the token stream, not the character
88     // stream read from the input.
89     std::string str ("float f = 3.4\nint i = 6\n");
90     base_iterator_type first = str.begin();
91 
92     bool r = lex::tokenize_and_parse(first, str.end(), tokenizer, g);
93 
94     if (r) {
95         std::cout << "-------------------------\n";
96         std::cout << "Parsing succeeded\n";
97         std::cout << "-------------------------\n";
98     }
99     else {
100         std::string rest(first, str.end());
101         std::cout << "-------------------------\n";
102         std::cout << "Parsing failed\n";
103         std::cout << "stopped at: \"" << rest << "\"\n";
104         std::cout << "-------------------------\n";
105     }
106 
107     std::cout << "Bye... :-) \n\n";
108     return 0;
109 }
110