• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 //  Copyright (c) 2001-2011 Hartmut Kaiser
2 //
3 //  Distributed under the Boost Software License, Version 1.0. (See accompanying
4 //  file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
5 
6 #include <boost/config/warning_disable.hpp>
7 #include <boost/detail/lightweight_test.hpp>
8 
9 #include <boost/spirit/include/lex_lexertl.hpp>
10 #include <boost/spirit/include/lex_lexertl_position_token.hpp>
11 #include <boost/spirit/include/phoenix_object.hpp>
12 #include <boost/spirit/include/phoenix_operator.hpp>
13 #include <boost/spirit/include/phoenix_container.hpp>
14 
15 namespace lex = boost::spirit::lex;
16 namespace phoenix = boost::phoenix;
17 namespace mpl = boost::mpl;
18 
19 ///////////////////////////////////////////////////////////////////////////////
20 enum tokenids
21 {
22     ID_INT = 1000,
23     ID_DOUBLE
24 };
25 
26 template <typename Lexer>
27 struct token_definitions : lex::lexer<Lexer>
28 {
token_definitionstoken_definitions29     token_definitions()
30     {
31         this->self.add_pattern("HEXDIGIT", "[0-9a-fA-F]");
32         this->self.add_pattern("OCTALDIGIT", "[0-7]");
33         this->self.add_pattern("DIGIT", "[0-9]");
34 
35         this->self.add_pattern("OPTSIGN", "[-+]?");
36         this->self.add_pattern("EXPSTART", "[eE][-+]");
37         this->self.add_pattern("EXPONENT", "[eE]{OPTSIGN}{DIGIT}+");
38 
39         // define tokens and associate them with the lexer
40         int_ = "(0x|0X){HEXDIGIT}+|0{OCTALDIGIT}*|{OPTSIGN}[1-9]{DIGIT}*";
41         int_.id(ID_INT);
42 
43         double_ = "{OPTSIGN}({DIGIT}*\\.{DIGIT}+|{DIGIT}+\\.){EXPONENT}?|{DIGIT}+{EXPONENT}";
44         double_.id(ID_DOUBLE);
45 
46         whitespace = "[ \t\n]+";
47 
48         this->self =
49                 double_
50             |   int_
51             |   whitespace[ lex::_pass = lex::pass_flags::pass_ignore ]
52             ;
53     }
54 
55     lex::token_def<lex::omit> int_;
56     lex::token_def<lex::omit> double_;
57     lex::token_def<lex::omit> whitespace;
58 };
59 
60 template <typename Lexer>
61 struct token_definitions_with_state : lex::lexer<Lexer>
62 {
token_definitions_with_statetoken_definitions_with_state63     token_definitions_with_state()
64     {
65         this->self.add_pattern("HEXDIGIT", "[0-9a-fA-F]");
66         this->self.add_pattern("OCTALDIGIT", "[0-7]");
67         this->self.add_pattern("DIGIT", "[0-9]");
68 
69         this->self.add_pattern("OPTSIGN", "[-+]?");
70         this->self.add_pattern("EXPSTART", "[eE][-+]");
71         this->self.add_pattern("EXPONENT", "[eE]{OPTSIGN}{DIGIT}+");
72 
73         this->self.add_state();
74         this->self.add_state("INT");
75         this->self.add_state("DOUBLE");
76 
77         // define tokens and associate them with the lexer
78         int_ = "(0x|0X){HEXDIGIT}+|0{OCTALDIGIT}*|{OPTSIGN}[1-9]{DIGIT}*";
79         int_.id(ID_INT);
80 
81         double_ = "{OPTSIGN}({DIGIT}*\\.{DIGIT}+|{DIGIT}+\\.){EXPONENT}?|{DIGIT}+{EXPONENT}";
82         double_.id(ID_DOUBLE);
83 
84         whitespace = "[ \t\n]+";
85 
86         this->self("*") =
87                 double_ [ lex::_state = "DOUBLE"]
88             |   int_ [ lex::_state = "INT" ]
89             |   whitespace[ lex::_pass = lex::pass_flags::pass_ignore ]
90             ;
91     }
92 
93     lex::token_def<lex::omit> int_;
94     lex::token_def<lex::omit> double_;
95     lex::token_def<lex::omit> whitespace;
96 };
97 
98 ///////////////////////////////////////////////////////////////////////////////
99 template <typename Token>
100 inline bool
test_token_ids(int const * ids,std::vector<Token> const & tokens)101 test_token_ids(int const* ids, std::vector<Token> const& tokens)
102 {
103     BOOST_FOREACH(Token const& t, tokens)
104     {
105         if (*ids == -1)
106             return false;           // reached end of expected data
107 
108         if (t.id() != static_cast<std::size_t>(*ids))        // token id must match
109             return false;
110 
111         ++ids;
112     }
113 
114     return (*ids == -1) ? true : false;
115 }
116 
117 ///////////////////////////////////////////////////////////////////////////////
118 template <typename Token>
119 inline bool
test_token_states(std::size_t const * states,std::vector<Token> const & tokens)120 test_token_states(std::size_t const* states, std::vector<Token> const& tokens)
121 {
122     BOOST_FOREACH(Token const& t, tokens)
123     {
124         if (*states == std::size_t(-1))
125             return false;           // reached end of expected data
126 
127         if (t.state() != *states)            // token state must match
128             return false;
129 
130         ++states;
131     }
132 
133     return (*states == std::size_t(-1)) ? true : false;
134 }
135 
136 ///////////////////////////////////////////////////////////////////////////////
137 struct position_type
138 {
139     std::size_t begin, end;
140 };
141 
142 template <typename Iterator, typename Token>
143 inline bool
test_token_positions(Iterator begin,position_type const * positions,std::vector<Token> const & tokens)144 test_token_positions(Iterator begin, position_type const* positions,
145     std::vector<Token> const& tokens)
146 {
147     BOOST_FOREACH(Token const& t, tokens)
148     {
149         if (positions->begin == std::size_t(-1) &&
150             positions->end == std::size_t(-1))
151         {
152             return false;           // reached end of expected data
153         }
154 
155         boost::iterator_range<Iterator> matched = t.matched();
156         std::size_t start = std::distance(begin, matched.begin());
157         std::size_t end = std::distance(begin, matched.end());
158 
159         // position must match
160         if (start != positions->begin || end != positions->end)
161             return false;
162 
163         ++positions;
164     }
165 
166     return (positions->begin == std::size_t(-1) &&
167             positions->end == std::size_t(-1)) ? true : false;
168 }
169 
170 ///////////////////////////////////////////////////////////////////////////////
main()171 int main()
172 {
173     typedef std::string::iterator base_iterator_type;
174     std::string input(" 01 1.2 -2 0x3 2.3e6 -3.4");
175     int ids[] = { ID_INT, ID_DOUBLE, ID_INT, ID_INT, ID_DOUBLE, ID_DOUBLE, -1 };
176     std::size_t states[] = { 0, 1, 2, 1, 1, 2, std::size_t(-1) };
177     position_type positions[] =
178     {
179         { 1, 3 }, { 4, 7 }, { 8, 10 }, { 11, 14 }, { 15, 20 }, { 21, 25 },
180         { std::size_t(-1), std::size_t(-1) }
181     };
182 
183     // minimal token type: holds just token id, no state, no value
184     {
185         typedef lex::lexertl::token<
186             base_iterator_type, lex::omit, mpl::false_> token_type;
187         typedef lex::lexertl::actor_lexer<token_type> lexer_type;
188 
189         token_definitions<lexer_type> lexer;
190         std::vector<token_type> tokens;
191         base_iterator_type first = input.begin();
192 
193         using phoenix::arg_names::_1;
194         BOOST_TEST(lex::tokenize(first, input.end(), lexer
195           , phoenix::push_back(phoenix::ref(tokens), _1)));
196 
197         BOOST_TEST(test_token_ids(ids, tokens));
198     }
199 
200     {
201         typedef lex::lexertl::position_token<
202             base_iterator_type, lex::omit, mpl::false_> token_type;
203         typedef lex::lexertl::actor_lexer<token_type> lexer_type;
204 
205         token_definitions<lexer_type> lexer;
206         std::vector<token_type> tokens;
207         base_iterator_type first = input.begin();
208 
209         using phoenix::arg_names::_1;
210         BOOST_TEST(lex::tokenize(first, input.end(), lexer
211           , phoenix::push_back(phoenix::ref(tokens), _1)));
212 
213         BOOST_TEST(test_token_ids(ids, tokens));
214         BOOST_TEST(test_token_positions(input.begin(), positions, tokens));
215     }
216 
217     // minimal token type: holds just token id and state, no value
218     {
219         typedef lex::lexertl::token<
220             base_iterator_type, lex::omit, mpl::true_> token_type;
221         typedef lex::lexertl::actor_lexer<token_type> lexer_type;
222 
223         token_definitions_with_state<lexer_type> lexer;
224         std::vector<token_type> tokens;
225         base_iterator_type first = input.begin();
226 
227         using phoenix::arg_names::_1;
228         BOOST_TEST(lex::tokenize(first, input.end(), lexer
229           , phoenix::push_back(phoenix::ref(tokens), _1)));
230 
231         BOOST_TEST(test_token_ids(ids, tokens));
232         BOOST_TEST(test_token_states(states, tokens));
233     }
234 
235     {
236         typedef lex::lexertl::position_token<
237             base_iterator_type, lex::omit, mpl::true_> token_type;
238         typedef lex::lexertl::actor_lexer<token_type> lexer_type;
239 
240         token_definitions_with_state<lexer_type> lexer;
241         std::vector<token_type> tokens;
242         base_iterator_type first = input.begin();
243 
244         using phoenix::arg_names::_1;
245         BOOST_TEST(lex::tokenize(first, input.end(), lexer
246           , phoenix::push_back(phoenix::ref(tokens), _1)));
247 
248         BOOST_TEST(test_token_ids(ids, tokens));
249         BOOST_TEST(test_token_states(states, tokens));
250         BOOST_TEST(test_token_positions(input.begin(), positions, tokens));
251     }
252 
253     return boost::report_errors();
254 }
255